github.com/FenixAra/go@v0.0.0-20170127160404-96ea0918e670/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // autogenerated from gen/MIPS64.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValueMIPS64(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValueMIPS64_OpAdd16(v, config)
    13  	case OpAdd32:
    14  		return rewriteValueMIPS64_OpAdd32(v, config)
    15  	case OpAdd32F:
    16  		return rewriteValueMIPS64_OpAdd32F(v, config)
    17  	case OpAdd64:
    18  		return rewriteValueMIPS64_OpAdd64(v, config)
    19  	case OpAdd64F:
    20  		return rewriteValueMIPS64_OpAdd64F(v, config)
    21  	case OpAdd8:
    22  		return rewriteValueMIPS64_OpAdd8(v, config)
    23  	case OpAddPtr:
    24  		return rewriteValueMIPS64_OpAddPtr(v, config)
    25  	case OpAddr:
    26  		return rewriteValueMIPS64_OpAddr(v, config)
    27  	case OpAnd16:
    28  		return rewriteValueMIPS64_OpAnd16(v, config)
    29  	case OpAnd32:
    30  		return rewriteValueMIPS64_OpAnd32(v, config)
    31  	case OpAnd64:
    32  		return rewriteValueMIPS64_OpAnd64(v, config)
    33  	case OpAnd8:
    34  		return rewriteValueMIPS64_OpAnd8(v, config)
    35  	case OpAndB:
    36  		return rewriteValueMIPS64_OpAndB(v, config)
    37  	case OpAvg64u:
    38  		return rewriteValueMIPS64_OpAvg64u(v, config)
    39  	case OpClosureCall:
    40  		return rewriteValueMIPS64_OpClosureCall(v, config)
    41  	case OpCom16:
    42  		return rewriteValueMIPS64_OpCom16(v, config)
    43  	case OpCom32:
    44  		return rewriteValueMIPS64_OpCom32(v, config)
    45  	case OpCom64:
    46  		return rewriteValueMIPS64_OpCom64(v, config)
    47  	case OpCom8:
    48  		return rewriteValueMIPS64_OpCom8(v, config)
    49  	case OpConst16:
    50  		return rewriteValueMIPS64_OpConst16(v, config)
    51  	case OpConst32:
    52  		return rewriteValueMIPS64_OpConst32(v, config)
    53  	case OpConst32F:
    54  		return rewriteValueMIPS64_OpConst32F(v, config)
    55  	case OpConst64:
    56  		return rewriteValueMIPS64_OpConst64(v, config)
    57  	case OpConst64F:
    58  		return rewriteValueMIPS64_OpConst64F(v, config)
    59  	case OpConst8:
    60  		return rewriteValueMIPS64_OpConst8(v, config)
    61  	case OpConstBool:
    62  		return rewriteValueMIPS64_OpConstBool(v, config)
    63  	case OpConstNil:
    64  		return rewriteValueMIPS64_OpConstNil(v, config)
    65  	case OpConvert:
    66  		return rewriteValueMIPS64_OpConvert(v, config)
    67  	case OpCvt32Fto32:
    68  		return rewriteValueMIPS64_OpCvt32Fto32(v, config)
    69  	case OpCvt32Fto64:
    70  		return rewriteValueMIPS64_OpCvt32Fto64(v, config)
    71  	case OpCvt32Fto64F:
    72  		return rewriteValueMIPS64_OpCvt32Fto64F(v, config)
    73  	case OpCvt32to32F:
    74  		return rewriteValueMIPS64_OpCvt32to32F(v, config)
    75  	case OpCvt32to64F:
    76  		return rewriteValueMIPS64_OpCvt32to64F(v, config)
    77  	case OpCvt64Fto32:
    78  		return rewriteValueMIPS64_OpCvt64Fto32(v, config)
    79  	case OpCvt64Fto32F:
    80  		return rewriteValueMIPS64_OpCvt64Fto32F(v, config)
    81  	case OpCvt64Fto64:
    82  		return rewriteValueMIPS64_OpCvt64Fto64(v, config)
    83  	case OpCvt64to32F:
    84  		return rewriteValueMIPS64_OpCvt64to32F(v, config)
    85  	case OpCvt64to64F:
    86  		return rewriteValueMIPS64_OpCvt64to64F(v, config)
    87  	case OpDeferCall:
    88  		return rewriteValueMIPS64_OpDeferCall(v, config)
    89  	case OpDiv16:
    90  		return rewriteValueMIPS64_OpDiv16(v, config)
    91  	case OpDiv16u:
    92  		return rewriteValueMIPS64_OpDiv16u(v, config)
    93  	case OpDiv32:
    94  		return rewriteValueMIPS64_OpDiv32(v, config)
    95  	case OpDiv32F:
    96  		return rewriteValueMIPS64_OpDiv32F(v, config)
    97  	case OpDiv32u:
    98  		return rewriteValueMIPS64_OpDiv32u(v, config)
    99  	case OpDiv64:
   100  		return rewriteValueMIPS64_OpDiv64(v, config)
   101  	case OpDiv64F:
   102  		return rewriteValueMIPS64_OpDiv64F(v, config)
   103  	case OpDiv64u:
   104  		return rewriteValueMIPS64_OpDiv64u(v, config)
   105  	case OpDiv8:
   106  		return rewriteValueMIPS64_OpDiv8(v, config)
   107  	case OpDiv8u:
   108  		return rewriteValueMIPS64_OpDiv8u(v, config)
   109  	case OpEq16:
   110  		return rewriteValueMIPS64_OpEq16(v, config)
   111  	case OpEq32:
   112  		return rewriteValueMIPS64_OpEq32(v, config)
   113  	case OpEq32F:
   114  		return rewriteValueMIPS64_OpEq32F(v, config)
   115  	case OpEq64:
   116  		return rewriteValueMIPS64_OpEq64(v, config)
   117  	case OpEq64F:
   118  		return rewriteValueMIPS64_OpEq64F(v, config)
   119  	case OpEq8:
   120  		return rewriteValueMIPS64_OpEq8(v, config)
   121  	case OpEqB:
   122  		return rewriteValueMIPS64_OpEqB(v, config)
   123  	case OpEqPtr:
   124  		return rewriteValueMIPS64_OpEqPtr(v, config)
   125  	case OpGeq16:
   126  		return rewriteValueMIPS64_OpGeq16(v, config)
   127  	case OpGeq16U:
   128  		return rewriteValueMIPS64_OpGeq16U(v, config)
   129  	case OpGeq32:
   130  		return rewriteValueMIPS64_OpGeq32(v, config)
   131  	case OpGeq32F:
   132  		return rewriteValueMIPS64_OpGeq32F(v, config)
   133  	case OpGeq32U:
   134  		return rewriteValueMIPS64_OpGeq32U(v, config)
   135  	case OpGeq64:
   136  		return rewriteValueMIPS64_OpGeq64(v, config)
   137  	case OpGeq64F:
   138  		return rewriteValueMIPS64_OpGeq64F(v, config)
   139  	case OpGeq64U:
   140  		return rewriteValueMIPS64_OpGeq64U(v, config)
   141  	case OpGeq8:
   142  		return rewriteValueMIPS64_OpGeq8(v, config)
   143  	case OpGeq8U:
   144  		return rewriteValueMIPS64_OpGeq8U(v, config)
   145  	case OpGetClosurePtr:
   146  		return rewriteValueMIPS64_OpGetClosurePtr(v, config)
   147  	case OpGoCall:
   148  		return rewriteValueMIPS64_OpGoCall(v, config)
   149  	case OpGreater16:
   150  		return rewriteValueMIPS64_OpGreater16(v, config)
   151  	case OpGreater16U:
   152  		return rewriteValueMIPS64_OpGreater16U(v, config)
   153  	case OpGreater32:
   154  		return rewriteValueMIPS64_OpGreater32(v, config)
   155  	case OpGreater32F:
   156  		return rewriteValueMIPS64_OpGreater32F(v, config)
   157  	case OpGreater32U:
   158  		return rewriteValueMIPS64_OpGreater32U(v, config)
   159  	case OpGreater64:
   160  		return rewriteValueMIPS64_OpGreater64(v, config)
   161  	case OpGreater64F:
   162  		return rewriteValueMIPS64_OpGreater64F(v, config)
   163  	case OpGreater64U:
   164  		return rewriteValueMIPS64_OpGreater64U(v, config)
   165  	case OpGreater8:
   166  		return rewriteValueMIPS64_OpGreater8(v, config)
   167  	case OpGreater8U:
   168  		return rewriteValueMIPS64_OpGreater8U(v, config)
   169  	case OpHmul16:
   170  		return rewriteValueMIPS64_OpHmul16(v, config)
   171  	case OpHmul16u:
   172  		return rewriteValueMIPS64_OpHmul16u(v, config)
   173  	case OpHmul32:
   174  		return rewriteValueMIPS64_OpHmul32(v, config)
   175  	case OpHmul32u:
   176  		return rewriteValueMIPS64_OpHmul32u(v, config)
   177  	case OpHmul64:
   178  		return rewriteValueMIPS64_OpHmul64(v, config)
   179  	case OpHmul64u:
   180  		return rewriteValueMIPS64_OpHmul64u(v, config)
   181  	case OpHmul8:
   182  		return rewriteValueMIPS64_OpHmul8(v, config)
   183  	case OpHmul8u:
   184  		return rewriteValueMIPS64_OpHmul8u(v, config)
   185  	case OpInterCall:
   186  		return rewriteValueMIPS64_OpInterCall(v, config)
   187  	case OpIsInBounds:
   188  		return rewriteValueMIPS64_OpIsInBounds(v, config)
   189  	case OpIsNonNil:
   190  		return rewriteValueMIPS64_OpIsNonNil(v, config)
   191  	case OpIsSliceInBounds:
   192  		return rewriteValueMIPS64_OpIsSliceInBounds(v, config)
   193  	case OpLeq16:
   194  		return rewriteValueMIPS64_OpLeq16(v, config)
   195  	case OpLeq16U:
   196  		return rewriteValueMIPS64_OpLeq16U(v, config)
   197  	case OpLeq32:
   198  		return rewriteValueMIPS64_OpLeq32(v, config)
   199  	case OpLeq32F:
   200  		return rewriteValueMIPS64_OpLeq32F(v, config)
   201  	case OpLeq32U:
   202  		return rewriteValueMIPS64_OpLeq32U(v, config)
   203  	case OpLeq64:
   204  		return rewriteValueMIPS64_OpLeq64(v, config)
   205  	case OpLeq64F:
   206  		return rewriteValueMIPS64_OpLeq64F(v, config)
   207  	case OpLeq64U:
   208  		return rewriteValueMIPS64_OpLeq64U(v, config)
   209  	case OpLeq8:
   210  		return rewriteValueMIPS64_OpLeq8(v, config)
   211  	case OpLeq8U:
   212  		return rewriteValueMIPS64_OpLeq8U(v, config)
   213  	case OpLess16:
   214  		return rewriteValueMIPS64_OpLess16(v, config)
   215  	case OpLess16U:
   216  		return rewriteValueMIPS64_OpLess16U(v, config)
   217  	case OpLess32:
   218  		return rewriteValueMIPS64_OpLess32(v, config)
   219  	case OpLess32F:
   220  		return rewriteValueMIPS64_OpLess32F(v, config)
   221  	case OpLess32U:
   222  		return rewriteValueMIPS64_OpLess32U(v, config)
   223  	case OpLess64:
   224  		return rewriteValueMIPS64_OpLess64(v, config)
   225  	case OpLess64F:
   226  		return rewriteValueMIPS64_OpLess64F(v, config)
   227  	case OpLess64U:
   228  		return rewriteValueMIPS64_OpLess64U(v, config)
   229  	case OpLess8:
   230  		return rewriteValueMIPS64_OpLess8(v, config)
   231  	case OpLess8U:
   232  		return rewriteValueMIPS64_OpLess8U(v, config)
   233  	case OpLoad:
   234  		return rewriteValueMIPS64_OpLoad(v, config)
   235  	case OpLsh16x16:
   236  		return rewriteValueMIPS64_OpLsh16x16(v, config)
   237  	case OpLsh16x32:
   238  		return rewriteValueMIPS64_OpLsh16x32(v, config)
   239  	case OpLsh16x64:
   240  		return rewriteValueMIPS64_OpLsh16x64(v, config)
   241  	case OpLsh16x8:
   242  		return rewriteValueMIPS64_OpLsh16x8(v, config)
   243  	case OpLsh32x16:
   244  		return rewriteValueMIPS64_OpLsh32x16(v, config)
   245  	case OpLsh32x32:
   246  		return rewriteValueMIPS64_OpLsh32x32(v, config)
   247  	case OpLsh32x64:
   248  		return rewriteValueMIPS64_OpLsh32x64(v, config)
   249  	case OpLsh32x8:
   250  		return rewriteValueMIPS64_OpLsh32x8(v, config)
   251  	case OpLsh64x16:
   252  		return rewriteValueMIPS64_OpLsh64x16(v, config)
   253  	case OpLsh64x32:
   254  		return rewriteValueMIPS64_OpLsh64x32(v, config)
   255  	case OpLsh64x64:
   256  		return rewriteValueMIPS64_OpLsh64x64(v, config)
   257  	case OpLsh64x8:
   258  		return rewriteValueMIPS64_OpLsh64x8(v, config)
   259  	case OpLsh8x16:
   260  		return rewriteValueMIPS64_OpLsh8x16(v, config)
   261  	case OpLsh8x32:
   262  		return rewriteValueMIPS64_OpLsh8x32(v, config)
   263  	case OpLsh8x64:
   264  		return rewriteValueMIPS64_OpLsh8x64(v, config)
   265  	case OpLsh8x8:
   266  		return rewriteValueMIPS64_OpLsh8x8(v, config)
   267  	case OpMIPS64ADDV:
   268  		return rewriteValueMIPS64_OpMIPS64ADDV(v, config)
   269  	case OpMIPS64ADDVconst:
   270  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v, config)
   271  	case OpMIPS64AND:
   272  		return rewriteValueMIPS64_OpMIPS64AND(v, config)
   273  	case OpMIPS64ANDconst:
   274  		return rewriteValueMIPS64_OpMIPS64ANDconst(v, config)
   275  	case OpMIPS64MOVBUload:
   276  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v, config)
   277  	case OpMIPS64MOVBUreg:
   278  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v, config)
   279  	case OpMIPS64MOVBload:
   280  		return rewriteValueMIPS64_OpMIPS64MOVBload(v, config)
   281  	case OpMIPS64MOVBreg:
   282  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v, config)
   283  	case OpMIPS64MOVBstore:
   284  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v, config)
   285  	case OpMIPS64MOVBstorezero:
   286  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v, config)
   287  	case OpMIPS64MOVDload:
   288  		return rewriteValueMIPS64_OpMIPS64MOVDload(v, config)
   289  	case OpMIPS64MOVDstore:
   290  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v, config)
   291  	case OpMIPS64MOVFload:
   292  		return rewriteValueMIPS64_OpMIPS64MOVFload(v, config)
   293  	case OpMIPS64MOVFstore:
   294  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v, config)
   295  	case OpMIPS64MOVHUload:
   296  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v, config)
   297  	case OpMIPS64MOVHUreg:
   298  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v, config)
   299  	case OpMIPS64MOVHload:
   300  		return rewriteValueMIPS64_OpMIPS64MOVHload(v, config)
   301  	case OpMIPS64MOVHreg:
   302  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v, config)
   303  	case OpMIPS64MOVHstore:
   304  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v, config)
   305  	case OpMIPS64MOVHstorezero:
   306  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v, config)
   307  	case OpMIPS64MOVVload:
   308  		return rewriteValueMIPS64_OpMIPS64MOVVload(v, config)
   309  	case OpMIPS64MOVVreg:
   310  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v, config)
   311  	case OpMIPS64MOVVstore:
   312  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v, config)
   313  	case OpMIPS64MOVVstorezero:
   314  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v, config)
   315  	case OpMIPS64MOVWUload:
   316  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v, config)
   317  	case OpMIPS64MOVWUreg:
   318  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v, config)
   319  	case OpMIPS64MOVWload:
   320  		return rewriteValueMIPS64_OpMIPS64MOVWload(v, config)
   321  	case OpMIPS64MOVWreg:
   322  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v, config)
   323  	case OpMIPS64MOVWstore:
   324  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v, config)
   325  	case OpMIPS64MOVWstorezero:
   326  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v, config)
   327  	case OpMIPS64NEGV:
   328  		return rewriteValueMIPS64_OpMIPS64NEGV(v, config)
   329  	case OpMIPS64NOR:
   330  		return rewriteValueMIPS64_OpMIPS64NOR(v, config)
   331  	case OpMIPS64NORconst:
   332  		return rewriteValueMIPS64_OpMIPS64NORconst(v, config)
   333  	case OpMIPS64OR:
   334  		return rewriteValueMIPS64_OpMIPS64OR(v, config)
   335  	case OpMIPS64ORconst:
   336  		return rewriteValueMIPS64_OpMIPS64ORconst(v, config)
   337  	case OpMIPS64SGT:
   338  		return rewriteValueMIPS64_OpMIPS64SGT(v, config)
   339  	case OpMIPS64SGTU:
   340  		return rewriteValueMIPS64_OpMIPS64SGTU(v, config)
   341  	case OpMIPS64SGTUconst:
   342  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v, config)
   343  	case OpMIPS64SGTconst:
   344  		return rewriteValueMIPS64_OpMIPS64SGTconst(v, config)
   345  	case OpMIPS64SLLV:
   346  		return rewriteValueMIPS64_OpMIPS64SLLV(v, config)
   347  	case OpMIPS64SLLVconst:
   348  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v, config)
   349  	case OpMIPS64SRAV:
   350  		return rewriteValueMIPS64_OpMIPS64SRAV(v, config)
   351  	case OpMIPS64SRAVconst:
   352  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v, config)
   353  	case OpMIPS64SRLV:
   354  		return rewriteValueMIPS64_OpMIPS64SRLV(v, config)
   355  	case OpMIPS64SRLVconst:
   356  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v, config)
   357  	case OpMIPS64SUBV:
   358  		return rewriteValueMIPS64_OpMIPS64SUBV(v, config)
   359  	case OpMIPS64SUBVconst:
   360  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v, config)
   361  	case OpMIPS64XOR:
   362  		return rewriteValueMIPS64_OpMIPS64XOR(v, config)
   363  	case OpMIPS64XORconst:
   364  		return rewriteValueMIPS64_OpMIPS64XORconst(v, config)
   365  	case OpMod16:
   366  		return rewriteValueMIPS64_OpMod16(v, config)
   367  	case OpMod16u:
   368  		return rewriteValueMIPS64_OpMod16u(v, config)
   369  	case OpMod32:
   370  		return rewriteValueMIPS64_OpMod32(v, config)
   371  	case OpMod32u:
   372  		return rewriteValueMIPS64_OpMod32u(v, config)
   373  	case OpMod64:
   374  		return rewriteValueMIPS64_OpMod64(v, config)
   375  	case OpMod64u:
   376  		return rewriteValueMIPS64_OpMod64u(v, config)
   377  	case OpMod8:
   378  		return rewriteValueMIPS64_OpMod8(v, config)
   379  	case OpMod8u:
   380  		return rewriteValueMIPS64_OpMod8u(v, config)
   381  	case OpMove:
   382  		return rewriteValueMIPS64_OpMove(v, config)
   383  	case OpMul16:
   384  		return rewriteValueMIPS64_OpMul16(v, config)
   385  	case OpMul32:
   386  		return rewriteValueMIPS64_OpMul32(v, config)
   387  	case OpMul32F:
   388  		return rewriteValueMIPS64_OpMul32F(v, config)
   389  	case OpMul64:
   390  		return rewriteValueMIPS64_OpMul64(v, config)
   391  	case OpMul64F:
   392  		return rewriteValueMIPS64_OpMul64F(v, config)
   393  	case OpMul8:
   394  		return rewriteValueMIPS64_OpMul8(v, config)
   395  	case OpNeg16:
   396  		return rewriteValueMIPS64_OpNeg16(v, config)
   397  	case OpNeg32:
   398  		return rewriteValueMIPS64_OpNeg32(v, config)
   399  	case OpNeg32F:
   400  		return rewriteValueMIPS64_OpNeg32F(v, config)
   401  	case OpNeg64:
   402  		return rewriteValueMIPS64_OpNeg64(v, config)
   403  	case OpNeg64F:
   404  		return rewriteValueMIPS64_OpNeg64F(v, config)
   405  	case OpNeg8:
   406  		return rewriteValueMIPS64_OpNeg8(v, config)
   407  	case OpNeq16:
   408  		return rewriteValueMIPS64_OpNeq16(v, config)
   409  	case OpNeq32:
   410  		return rewriteValueMIPS64_OpNeq32(v, config)
   411  	case OpNeq32F:
   412  		return rewriteValueMIPS64_OpNeq32F(v, config)
   413  	case OpNeq64:
   414  		return rewriteValueMIPS64_OpNeq64(v, config)
   415  	case OpNeq64F:
   416  		return rewriteValueMIPS64_OpNeq64F(v, config)
   417  	case OpNeq8:
   418  		return rewriteValueMIPS64_OpNeq8(v, config)
   419  	case OpNeqB:
   420  		return rewriteValueMIPS64_OpNeqB(v, config)
   421  	case OpNeqPtr:
   422  		return rewriteValueMIPS64_OpNeqPtr(v, config)
   423  	case OpNilCheck:
   424  		return rewriteValueMIPS64_OpNilCheck(v, config)
   425  	case OpNot:
   426  		return rewriteValueMIPS64_OpNot(v, config)
   427  	case OpOffPtr:
   428  		return rewriteValueMIPS64_OpOffPtr(v, config)
   429  	case OpOr16:
   430  		return rewriteValueMIPS64_OpOr16(v, config)
   431  	case OpOr32:
   432  		return rewriteValueMIPS64_OpOr32(v, config)
   433  	case OpOr64:
   434  		return rewriteValueMIPS64_OpOr64(v, config)
   435  	case OpOr8:
   436  		return rewriteValueMIPS64_OpOr8(v, config)
   437  	case OpOrB:
   438  		return rewriteValueMIPS64_OpOrB(v, config)
   439  	case OpRsh16Ux16:
   440  		return rewriteValueMIPS64_OpRsh16Ux16(v, config)
   441  	case OpRsh16Ux32:
   442  		return rewriteValueMIPS64_OpRsh16Ux32(v, config)
   443  	case OpRsh16Ux64:
   444  		return rewriteValueMIPS64_OpRsh16Ux64(v, config)
   445  	case OpRsh16Ux8:
   446  		return rewriteValueMIPS64_OpRsh16Ux8(v, config)
   447  	case OpRsh16x16:
   448  		return rewriteValueMIPS64_OpRsh16x16(v, config)
   449  	case OpRsh16x32:
   450  		return rewriteValueMIPS64_OpRsh16x32(v, config)
   451  	case OpRsh16x64:
   452  		return rewriteValueMIPS64_OpRsh16x64(v, config)
   453  	case OpRsh16x8:
   454  		return rewriteValueMIPS64_OpRsh16x8(v, config)
   455  	case OpRsh32Ux16:
   456  		return rewriteValueMIPS64_OpRsh32Ux16(v, config)
   457  	case OpRsh32Ux32:
   458  		return rewriteValueMIPS64_OpRsh32Ux32(v, config)
   459  	case OpRsh32Ux64:
   460  		return rewriteValueMIPS64_OpRsh32Ux64(v, config)
   461  	case OpRsh32Ux8:
   462  		return rewriteValueMIPS64_OpRsh32Ux8(v, config)
   463  	case OpRsh32x16:
   464  		return rewriteValueMIPS64_OpRsh32x16(v, config)
   465  	case OpRsh32x32:
   466  		return rewriteValueMIPS64_OpRsh32x32(v, config)
   467  	case OpRsh32x64:
   468  		return rewriteValueMIPS64_OpRsh32x64(v, config)
   469  	case OpRsh32x8:
   470  		return rewriteValueMIPS64_OpRsh32x8(v, config)
   471  	case OpRsh64Ux16:
   472  		return rewriteValueMIPS64_OpRsh64Ux16(v, config)
   473  	case OpRsh64Ux32:
   474  		return rewriteValueMIPS64_OpRsh64Ux32(v, config)
   475  	case OpRsh64Ux64:
   476  		return rewriteValueMIPS64_OpRsh64Ux64(v, config)
   477  	case OpRsh64Ux8:
   478  		return rewriteValueMIPS64_OpRsh64Ux8(v, config)
   479  	case OpRsh64x16:
   480  		return rewriteValueMIPS64_OpRsh64x16(v, config)
   481  	case OpRsh64x32:
   482  		return rewriteValueMIPS64_OpRsh64x32(v, config)
   483  	case OpRsh64x64:
   484  		return rewriteValueMIPS64_OpRsh64x64(v, config)
   485  	case OpRsh64x8:
   486  		return rewriteValueMIPS64_OpRsh64x8(v, config)
   487  	case OpRsh8Ux16:
   488  		return rewriteValueMIPS64_OpRsh8Ux16(v, config)
   489  	case OpRsh8Ux32:
   490  		return rewriteValueMIPS64_OpRsh8Ux32(v, config)
   491  	case OpRsh8Ux64:
   492  		return rewriteValueMIPS64_OpRsh8Ux64(v, config)
   493  	case OpRsh8Ux8:
   494  		return rewriteValueMIPS64_OpRsh8Ux8(v, config)
   495  	case OpRsh8x16:
   496  		return rewriteValueMIPS64_OpRsh8x16(v, config)
   497  	case OpRsh8x32:
   498  		return rewriteValueMIPS64_OpRsh8x32(v, config)
   499  	case OpRsh8x64:
   500  		return rewriteValueMIPS64_OpRsh8x64(v, config)
   501  	case OpRsh8x8:
   502  		return rewriteValueMIPS64_OpRsh8x8(v, config)
   503  	case OpSelect0:
   504  		return rewriteValueMIPS64_OpSelect0(v, config)
   505  	case OpSelect1:
   506  		return rewriteValueMIPS64_OpSelect1(v, config)
   507  	case OpSignExt16to32:
   508  		return rewriteValueMIPS64_OpSignExt16to32(v, config)
   509  	case OpSignExt16to64:
   510  		return rewriteValueMIPS64_OpSignExt16to64(v, config)
   511  	case OpSignExt32to64:
   512  		return rewriteValueMIPS64_OpSignExt32to64(v, config)
   513  	case OpSignExt8to16:
   514  		return rewriteValueMIPS64_OpSignExt8to16(v, config)
   515  	case OpSignExt8to32:
   516  		return rewriteValueMIPS64_OpSignExt8to32(v, config)
   517  	case OpSignExt8to64:
   518  		return rewriteValueMIPS64_OpSignExt8to64(v, config)
   519  	case OpSlicemask:
   520  		return rewriteValueMIPS64_OpSlicemask(v, config)
   521  	case OpStaticCall:
   522  		return rewriteValueMIPS64_OpStaticCall(v, config)
   523  	case OpStore:
   524  		return rewriteValueMIPS64_OpStore(v, config)
   525  	case OpSub16:
   526  		return rewriteValueMIPS64_OpSub16(v, config)
   527  	case OpSub32:
   528  		return rewriteValueMIPS64_OpSub32(v, config)
   529  	case OpSub32F:
   530  		return rewriteValueMIPS64_OpSub32F(v, config)
   531  	case OpSub64:
   532  		return rewriteValueMIPS64_OpSub64(v, config)
   533  	case OpSub64F:
   534  		return rewriteValueMIPS64_OpSub64F(v, config)
   535  	case OpSub8:
   536  		return rewriteValueMIPS64_OpSub8(v, config)
   537  	case OpSubPtr:
   538  		return rewriteValueMIPS64_OpSubPtr(v, config)
   539  	case OpTrunc16to8:
   540  		return rewriteValueMIPS64_OpTrunc16to8(v, config)
   541  	case OpTrunc32to16:
   542  		return rewriteValueMIPS64_OpTrunc32to16(v, config)
   543  	case OpTrunc32to8:
   544  		return rewriteValueMIPS64_OpTrunc32to8(v, config)
   545  	case OpTrunc64to16:
   546  		return rewriteValueMIPS64_OpTrunc64to16(v, config)
   547  	case OpTrunc64to32:
   548  		return rewriteValueMIPS64_OpTrunc64to32(v, config)
   549  	case OpTrunc64to8:
   550  		return rewriteValueMIPS64_OpTrunc64to8(v, config)
   551  	case OpXor16:
   552  		return rewriteValueMIPS64_OpXor16(v, config)
   553  	case OpXor32:
   554  		return rewriteValueMIPS64_OpXor32(v, config)
   555  	case OpXor64:
   556  		return rewriteValueMIPS64_OpXor64(v, config)
   557  	case OpXor8:
   558  		return rewriteValueMIPS64_OpXor8(v, config)
   559  	case OpZero:
   560  		return rewriteValueMIPS64_OpZero(v, config)
   561  	case OpZeroExt16to32:
   562  		return rewriteValueMIPS64_OpZeroExt16to32(v, config)
   563  	case OpZeroExt16to64:
   564  		return rewriteValueMIPS64_OpZeroExt16to64(v, config)
   565  	case OpZeroExt32to64:
   566  		return rewriteValueMIPS64_OpZeroExt32to64(v, config)
   567  	case OpZeroExt8to16:
   568  		return rewriteValueMIPS64_OpZeroExt8to16(v, config)
   569  	case OpZeroExt8to32:
   570  		return rewriteValueMIPS64_OpZeroExt8to32(v, config)
   571  	case OpZeroExt8to64:
   572  		return rewriteValueMIPS64_OpZeroExt8to64(v, config)
   573  	}
   574  	return false
   575  }
   576  func rewriteValueMIPS64_OpAdd16(v *Value, config *Config) bool {
   577  	b := v.Block
   578  	_ = b
   579  	// match: (Add16 x y)
   580  	// cond:
   581  	// result: (ADDV x y)
   582  	for {
   583  		x := v.Args[0]
   584  		y := v.Args[1]
   585  		v.reset(OpMIPS64ADDV)
   586  		v.AddArg(x)
   587  		v.AddArg(y)
   588  		return true
   589  	}
   590  }
   591  func rewriteValueMIPS64_OpAdd32(v *Value, config *Config) bool {
   592  	b := v.Block
   593  	_ = b
   594  	// match: (Add32 x y)
   595  	// cond:
   596  	// result: (ADDV x y)
   597  	for {
   598  		x := v.Args[0]
   599  		y := v.Args[1]
   600  		v.reset(OpMIPS64ADDV)
   601  		v.AddArg(x)
   602  		v.AddArg(y)
   603  		return true
   604  	}
   605  }
   606  func rewriteValueMIPS64_OpAdd32F(v *Value, config *Config) bool {
   607  	b := v.Block
   608  	_ = b
   609  	// match: (Add32F x y)
   610  	// cond:
   611  	// result: (ADDF x y)
   612  	for {
   613  		x := v.Args[0]
   614  		y := v.Args[1]
   615  		v.reset(OpMIPS64ADDF)
   616  		v.AddArg(x)
   617  		v.AddArg(y)
   618  		return true
   619  	}
   620  }
   621  func rewriteValueMIPS64_OpAdd64(v *Value, config *Config) bool {
   622  	b := v.Block
   623  	_ = b
   624  	// match: (Add64 x y)
   625  	// cond:
   626  	// result: (ADDV x y)
   627  	for {
   628  		x := v.Args[0]
   629  		y := v.Args[1]
   630  		v.reset(OpMIPS64ADDV)
   631  		v.AddArg(x)
   632  		v.AddArg(y)
   633  		return true
   634  	}
   635  }
   636  func rewriteValueMIPS64_OpAdd64F(v *Value, config *Config) bool {
   637  	b := v.Block
   638  	_ = b
   639  	// match: (Add64F x y)
   640  	// cond:
   641  	// result: (ADDD x y)
   642  	for {
   643  		x := v.Args[0]
   644  		y := v.Args[1]
   645  		v.reset(OpMIPS64ADDD)
   646  		v.AddArg(x)
   647  		v.AddArg(y)
   648  		return true
   649  	}
   650  }
   651  func rewriteValueMIPS64_OpAdd8(v *Value, config *Config) bool {
   652  	b := v.Block
   653  	_ = b
   654  	// match: (Add8 x y)
   655  	// cond:
   656  	// result: (ADDV x y)
   657  	for {
   658  		x := v.Args[0]
   659  		y := v.Args[1]
   660  		v.reset(OpMIPS64ADDV)
   661  		v.AddArg(x)
   662  		v.AddArg(y)
   663  		return true
   664  	}
   665  }
   666  func rewriteValueMIPS64_OpAddPtr(v *Value, config *Config) bool {
   667  	b := v.Block
   668  	_ = b
   669  	// match: (AddPtr x y)
   670  	// cond:
   671  	// result: (ADDV x y)
   672  	for {
   673  		x := v.Args[0]
   674  		y := v.Args[1]
   675  		v.reset(OpMIPS64ADDV)
   676  		v.AddArg(x)
   677  		v.AddArg(y)
   678  		return true
   679  	}
   680  }
   681  func rewriteValueMIPS64_OpAddr(v *Value, config *Config) bool {
   682  	b := v.Block
   683  	_ = b
   684  	// match: (Addr {sym} base)
   685  	// cond:
   686  	// result: (MOVVaddr {sym} base)
   687  	for {
   688  		sym := v.Aux
   689  		base := v.Args[0]
   690  		v.reset(OpMIPS64MOVVaddr)
   691  		v.Aux = sym
   692  		v.AddArg(base)
   693  		return true
   694  	}
   695  }
   696  func rewriteValueMIPS64_OpAnd16(v *Value, config *Config) bool {
   697  	b := v.Block
   698  	_ = b
   699  	// match: (And16 x y)
   700  	// cond:
   701  	// result: (AND x y)
   702  	for {
   703  		x := v.Args[0]
   704  		y := v.Args[1]
   705  		v.reset(OpMIPS64AND)
   706  		v.AddArg(x)
   707  		v.AddArg(y)
   708  		return true
   709  	}
   710  }
   711  func rewriteValueMIPS64_OpAnd32(v *Value, config *Config) bool {
   712  	b := v.Block
   713  	_ = b
   714  	// match: (And32 x y)
   715  	// cond:
   716  	// result: (AND x y)
   717  	for {
   718  		x := v.Args[0]
   719  		y := v.Args[1]
   720  		v.reset(OpMIPS64AND)
   721  		v.AddArg(x)
   722  		v.AddArg(y)
   723  		return true
   724  	}
   725  }
   726  func rewriteValueMIPS64_OpAnd64(v *Value, config *Config) bool {
   727  	b := v.Block
   728  	_ = b
   729  	// match: (And64 x y)
   730  	// cond:
   731  	// result: (AND x y)
   732  	for {
   733  		x := v.Args[0]
   734  		y := v.Args[1]
   735  		v.reset(OpMIPS64AND)
   736  		v.AddArg(x)
   737  		v.AddArg(y)
   738  		return true
   739  	}
   740  }
   741  func rewriteValueMIPS64_OpAnd8(v *Value, config *Config) bool {
   742  	b := v.Block
   743  	_ = b
   744  	// match: (And8 x y)
   745  	// cond:
   746  	// result: (AND x y)
   747  	for {
   748  		x := v.Args[0]
   749  		y := v.Args[1]
   750  		v.reset(OpMIPS64AND)
   751  		v.AddArg(x)
   752  		v.AddArg(y)
   753  		return true
   754  	}
   755  }
   756  func rewriteValueMIPS64_OpAndB(v *Value, config *Config) bool {
   757  	b := v.Block
   758  	_ = b
   759  	// match: (AndB x y)
   760  	// cond:
   761  	// result: (AND x y)
   762  	for {
   763  		x := v.Args[0]
   764  		y := v.Args[1]
   765  		v.reset(OpMIPS64AND)
   766  		v.AddArg(x)
   767  		v.AddArg(y)
   768  		return true
   769  	}
   770  }
   771  func rewriteValueMIPS64_OpAvg64u(v *Value, config *Config) bool {
   772  	b := v.Block
   773  	_ = b
   774  	// match: (Avg64u <t> x y)
   775  	// cond:
   776  	// result: (ADDV (ADDV <t> (SRLVconst <t> x [1]) (SRLVconst <t> y [1])) (AND <t> (AND <t> x y) (MOVVconst [1])))
   777  	for {
   778  		t := v.Type
   779  		x := v.Args[0]
   780  		y := v.Args[1]
   781  		v.reset(OpMIPS64ADDV)
   782  		v0 := b.NewValue0(v.Line, OpMIPS64ADDV, t)
   783  		v1 := b.NewValue0(v.Line, OpMIPS64SRLVconst, t)
   784  		v1.AuxInt = 1
   785  		v1.AddArg(x)
   786  		v0.AddArg(v1)
   787  		v2 := b.NewValue0(v.Line, OpMIPS64SRLVconst, t)
   788  		v2.AuxInt = 1
   789  		v2.AddArg(y)
   790  		v0.AddArg(v2)
   791  		v.AddArg(v0)
   792  		v3 := b.NewValue0(v.Line, OpMIPS64AND, t)
   793  		v4 := b.NewValue0(v.Line, OpMIPS64AND, t)
   794  		v4.AddArg(x)
   795  		v4.AddArg(y)
   796  		v3.AddArg(v4)
   797  		v5 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
   798  		v5.AuxInt = 1
   799  		v3.AddArg(v5)
   800  		v.AddArg(v3)
   801  		return true
   802  	}
   803  }
   804  func rewriteValueMIPS64_OpClosureCall(v *Value, config *Config) bool {
   805  	b := v.Block
   806  	_ = b
   807  	// match: (ClosureCall [argwid] entry closure mem)
   808  	// cond:
   809  	// result: (CALLclosure [argwid] entry closure mem)
   810  	for {
   811  		argwid := v.AuxInt
   812  		entry := v.Args[0]
   813  		closure := v.Args[1]
   814  		mem := v.Args[2]
   815  		v.reset(OpMIPS64CALLclosure)
   816  		v.AuxInt = argwid
   817  		v.AddArg(entry)
   818  		v.AddArg(closure)
   819  		v.AddArg(mem)
   820  		return true
   821  	}
   822  }
   823  func rewriteValueMIPS64_OpCom16(v *Value, config *Config) bool {
   824  	b := v.Block
   825  	_ = b
   826  	// match: (Com16 x)
   827  	// cond:
   828  	// result: (NOR (MOVVconst [0]) x)
   829  	for {
   830  		x := v.Args[0]
   831  		v.reset(OpMIPS64NOR)
   832  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
   833  		v0.AuxInt = 0
   834  		v.AddArg(v0)
   835  		v.AddArg(x)
   836  		return true
   837  	}
   838  }
   839  func rewriteValueMIPS64_OpCom32(v *Value, config *Config) bool {
   840  	b := v.Block
   841  	_ = b
   842  	// match: (Com32 x)
   843  	// cond:
   844  	// result: (NOR (MOVVconst [0]) x)
   845  	for {
   846  		x := v.Args[0]
   847  		v.reset(OpMIPS64NOR)
   848  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
   849  		v0.AuxInt = 0
   850  		v.AddArg(v0)
   851  		v.AddArg(x)
   852  		return true
   853  	}
   854  }
   855  func rewriteValueMIPS64_OpCom64(v *Value, config *Config) bool {
   856  	b := v.Block
   857  	_ = b
   858  	// match: (Com64 x)
   859  	// cond:
   860  	// result: (NOR (MOVVconst [0]) x)
   861  	for {
   862  		x := v.Args[0]
   863  		v.reset(OpMIPS64NOR)
   864  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
   865  		v0.AuxInt = 0
   866  		v.AddArg(v0)
   867  		v.AddArg(x)
   868  		return true
   869  	}
   870  }
   871  func rewriteValueMIPS64_OpCom8(v *Value, config *Config) bool {
   872  	b := v.Block
   873  	_ = b
   874  	// match: (Com8 x)
   875  	// cond:
   876  	// result: (NOR (MOVVconst [0]) x)
   877  	for {
   878  		x := v.Args[0]
   879  		v.reset(OpMIPS64NOR)
   880  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
   881  		v0.AuxInt = 0
   882  		v.AddArg(v0)
   883  		v.AddArg(x)
   884  		return true
   885  	}
   886  }
   887  func rewriteValueMIPS64_OpConst16(v *Value, config *Config) bool {
   888  	b := v.Block
   889  	_ = b
   890  	// match: (Const16 [val])
   891  	// cond:
   892  	// result: (MOVVconst [val])
   893  	for {
   894  		val := v.AuxInt
   895  		v.reset(OpMIPS64MOVVconst)
   896  		v.AuxInt = val
   897  		return true
   898  	}
   899  }
   900  func rewriteValueMIPS64_OpConst32(v *Value, config *Config) bool {
   901  	b := v.Block
   902  	_ = b
   903  	// match: (Const32 [val])
   904  	// cond:
   905  	// result: (MOVVconst [val])
   906  	for {
   907  		val := v.AuxInt
   908  		v.reset(OpMIPS64MOVVconst)
   909  		v.AuxInt = val
   910  		return true
   911  	}
   912  }
   913  func rewriteValueMIPS64_OpConst32F(v *Value, config *Config) bool {
   914  	b := v.Block
   915  	_ = b
   916  	// match: (Const32F [val])
   917  	// cond:
   918  	// result: (MOVFconst [val])
   919  	for {
   920  		val := v.AuxInt
   921  		v.reset(OpMIPS64MOVFconst)
   922  		v.AuxInt = val
   923  		return true
   924  	}
   925  }
   926  func rewriteValueMIPS64_OpConst64(v *Value, config *Config) bool {
   927  	b := v.Block
   928  	_ = b
   929  	// match: (Const64 [val])
   930  	// cond:
   931  	// result: (MOVVconst [val])
   932  	for {
   933  		val := v.AuxInt
   934  		v.reset(OpMIPS64MOVVconst)
   935  		v.AuxInt = val
   936  		return true
   937  	}
   938  }
   939  func rewriteValueMIPS64_OpConst64F(v *Value, config *Config) bool {
   940  	b := v.Block
   941  	_ = b
   942  	// match: (Const64F [val])
   943  	// cond:
   944  	// result: (MOVDconst [val])
   945  	for {
   946  		val := v.AuxInt
   947  		v.reset(OpMIPS64MOVDconst)
   948  		v.AuxInt = val
   949  		return true
   950  	}
   951  }
   952  func rewriteValueMIPS64_OpConst8(v *Value, config *Config) bool {
   953  	b := v.Block
   954  	_ = b
   955  	// match: (Const8 [val])
   956  	// cond:
   957  	// result: (MOVVconst [val])
   958  	for {
   959  		val := v.AuxInt
   960  		v.reset(OpMIPS64MOVVconst)
   961  		v.AuxInt = val
   962  		return true
   963  	}
   964  }
   965  func rewriteValueMIPS64_OpConstBool(v *Value, config *Config) bool {
   966  	b := v.Block
   967  	_ = b
   968  	// match: (ConstBool [b])
   969  	// cond:
   970  	// result: (MOVVconst [b])
   971  	for {
   972  		b := v.AuxInt
   973  		v.reset(OpMIPS64MOVVconst)
   974  		v.AuxInt = b
   975  		return true
   976  	}
   977  }
   978  func rewriteValueMIPS64_OpConstNil(v *Value, config *Config) bool {
   979  	b := v.Block
   980  	_ = b
   981  	// match: (ConstNil)
   982  	// cond:
   983  	// result: (MOVVconst [0])
   984  	for {
   985  		v.reset(OpMIPS64MOVVconst)
   986  		v.AuxInt = 0
   987  		return true
   988  	}
   989  }
   990  func rewriteValueMIPS64_OpConvert(v *Value, config *Config) bool {
   991  	b := v.Block
   992  	_ = b
   993  	// match: (Convert x mem)
   994  	// cond:
   995  	// result: (MOVVconvert x mem)
   996  	for {
   997  		x := v.Args[0]
   998  		mem := v.Args[1]
   999  		v.reset(OpMIPS64MOVVconvert)
  1000  		v.AddArg(x)
  1001  		v.AddArg(mem)
  1002  		return true
  1003  	}
  1004  }
  1005  func rewriteValueMIPS64_OpCvt32Fto32(v *Value, config *Config) bool {
  1006  	b := v.Block
  1007  	_ = b
  1008  	// match: (Cvt32Fto32 x)
  1009  	// cond:
  1010  	// result: (TRUNCFW x)
  1011  	for {
  1012  		x := v.Args[0]
  1013  		v.reset(OpMIPS64TRUNCFW)
  1014  		v.AddArg(x)
  1015  		return true
  1016  	}
  1017  }
  1018  func rewriteValueMIPS64_OpCvt32Fto64(v *Value, config *Config) bool {
  1019  	b := v.Block
  1020  	_ = b
  1021  	// match: (Cvt32Fto64 x)
  1022  	// cond:
  1023  	// result: (TRUNCFV x)
  1024  	for {
  1025  		x := v.Args[0]
  1026  		v.reset(OpMIPS64TRUNCFV)
  1027  		v.AddArg(x)
  1028  		return true
  1029  	}
  1030  }
  1031  func rewriteValueMIPS64_OpCvt32Fto64F(v *Value, config *Config) bool {
  1032  	b := v.Block
  1033  	_ = b
  1034  	// match: (Cvt32Fto64F x)
  1035  	// cond:
  1036  	// result: (MOVFD x)
  1037  	for {
  1038  		x := v.Args[0]
  1039  		v.reset(OpMIPS64MOVFD)
  1040  		v.AddArg(x)
  1041  		return true
  1042  	}
  1043  }
  1044  func rewriteValueMIPS64_OpCvt32to32F(v *Value, config *Config) bool {
  1045  	b := v.Block
  1046  	_ = b
  1047  	// match: (Cvt32to32F x)
  1048  	// cond:
  1049  	// result: (MOVWF x)
  1050  	for {
  1051  		x := v.Args[0]
  1052  		v.reset(OpMIPS64MOVWF)
  1053  		v.AddArg(x)
  1054  		return true
  1055  	}
  1056  }
  1057  func rewriteValueMIPS64_OpCvt32to64F(v *Value, config *Config) bool {
  1058  	b := v.Block
  1059  	_ = b
  1060  	// match: (Cvt32to64F x)
  1061  	// cond:
  1062  	// result: (MOVWD x)
  1063  	for {
  1064  		x := v.Args[0]
  1065  		v.reset(OpMIPS64MOVWD)
  1066  		v.AddArg(x)
  1067  		return true
  1068  	}
  1069  }
  1070  func rewriteValueMIPS64_OpCvt64Fto32(v *Value, config *Config) bool {
  1071  	b := v.Block
  1072  	_ = b
  1073  	// match: (Cvt64Fto32 x)
  1074  	// cond:
  1075  	// result: (TRUNCDW x)
  1076  	for {
  1077  		x := v.Args[0]
  1078  		v.reset(OpMIPS64TRUNCDW)
  1079  		v.AddArg(x)
  1080  		return true
  1081  	}
  1082  }
  1083  func rewriteValueMIPS64_OpCvt64Fto32F(v *Value, config *Config) bool {
  1084  	b := v.Block
  1085  	_ = b
  1086  	// match: (Cvt64Fto32F x)
  1087  	// cond:
  1088  	// result: (MOVDF x)
  1089  	for {
  1090  		x := v.Args[0]
  1091  		v.reset(OpMIPS64MOVDF)
  1092  		v.AddArg(x)
  1093  		return true
  1094  	}
  1095  }
  1096  func rewriteValueMIPS64_OpCvt64Fto64(v *Value, config *Config) bool {
  1097  	b := v.Block
  1098  	_ = b
  1099  	// match: (Cvt64Fto64 x)
  1100  	// cond:
  1101  	// result: (TRUNCDV x)
  1102  	for {
  1103  		x := v.Args[0]
  1104  		v.reset(OpMIPS64TRUNCDV)
  1105  		v.AddArg(x)
  1106  		return true
  1107  	}
  1108  }
  1109  func rewriteValueMIPS64_OpCvt64to32F(v *Value, config *Config) bool {
  1110  	b := v.Block
  1111  	_ = b
  1112  	// match: (Cvt64to32F x)
  1113  	// cond:
  1114  	// result: (MOVVF x)
  1115  	for {
  1116  		x := v.Args[0]
  1117  		v.reset(OpMIPS64MOVVF)
  1118  		v.AddArg(x)
  1119  		return true
  1120  	}
  1121  }
  1122  func rewriteValueMIPS64_OpCvt64to64F(v *Value, config *Config) bool {
  1123  	b := v.Block
  1124  	_ = b
  1125  	// match: (Cvt64to64F x)
  1126  	// cond:
  1127  	// result: (MOVVD x)
  1128  	for {
  1129  		x := v.Args[0]
  1130  		v.reset(OpMIPS64MOVVD)
  1131  		v.AddArg(x)
  1132  		return true
  1133  	}
  1134  }
  1135  func rewriteValueMIPS64_OpDeferCall(v *Value, config *Config) bool {
  1136  	b := v.Block
  1137  	_ = b
  1138  	// match: (DeferCall [argwid] mem)
  1139  	// cond:
  1140  	// result: (CALLdefer [argwid] mem)
  1141  	for {
  1142  		argwid := v.AuxInt
  1143  		mem := v.Args[0]
  1144  		v.reset(OpMIPS64CALLdefer)
  1145  		v.AuxInt = argwid
  1146  		v.AddArg(mem)
  1147  		return true
  1148  	}
  1149  }
  1150  func rewriteValueMIPS64_OpDiv16(v *Value, config *Config) bool {
  1151  	b := v.Block
  1152  	_ = b
  1153  	// match: (Div16 x y)
  1154  	// cond:
  1155  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1156  	for {
  1157  		x := v.Args[0]
  1158  		y := v.Args[1]
  1159  		v.reset(OpSelect1)
  1160  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1161  		v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1162  		v1.AddArg(x)
  1163  		v0.AddArg(v1)
  1164  		v2 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1165  		v2.AddArg(y)
  1166  		v0.AddArg(v2)
  1167  		v.AddArg(v0)
  1168  		return true
  1169  	}
  1170  }
  1171  func rewriteValueMIPS64_OpDiv16u(v *Value, config *Config) bool {
  1172  	b := v.Block
  1173  	_ = b
  1174  	// match: (Div16u x y)
  1175  	// cond:
  1176  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1177  	for {
  1178  		x := v.Args[0]
  1179  		y := v.Args[1]
  1180  		v.reset(OpSelect1)
  1181  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  1182  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1183  		v1.AddArg(x)
  1184  		v0.AddArg(v1)
  1185  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1186  		v2.AddArg(y)
  1187  		v0.AddArg(v2)
  1188  		v.AddArg(v0)
  1189  		return true
  1190  	}
  1191  }
  1192  func rewriteValueMIPS64_OpDiv32(v *Value, config *Config) bool {
  1193  	b := v.Block
  1194  	_ = b
  1195  	// match: (Div32 x y)
  1196  	// cond:
  1197  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1198  	for {
  1199  		x := v.Args[0]
  1200  		y := v.Args[1]
  1201  		v.reset(OpSelect1)
  1202  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1203  		v1 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1204  		v1.AddArg(x)
  1205  		v0.AddArg(v1)
  1206  		v2 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1207  		v2.AddArg(y)
  1208  		v0.AddArg(v2)
  1209  		v.AddArg(v0)
  1210  		return true
  1211  	}
  1212  }
  1213  func rewriteValueMIPS64_OpDiv32F(v *Value, config *Config) bool {
  1214  	b := v.Block
  1215  	_ = b
  1216  	// match: (Div32F x y)
  1217  	// cond:
  1218  	// result: (DIVF x y)
  1219  	for {
  1220  		x := v.Args[0]
  1221  		y := v.Args[1]
  1222  		v.reset(OpMIPS64DIVF)
  1223  		v.AddArg(x)
  1224  		v.AddArg(y)
  1225  		return true
  1226  	}
  1227  }
  1228  func rewriteValueMIPS64_OpDiv32u(v *Value, config *Config) bool {
  1229  	b := v.Block
  1230  	_ = b
  1231  	// match: (Div32u x y)
  1232  	// cond:
  1233  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1234  	for {
  1235  		x := v.Args[0]
  1236  		y := v.Args[1]
  1237  		v.reset(OpSelect1)
  1238  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  1239  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1240  		v1.AddArg(x)
  1241  		v0.AddArg(v1)
  1242  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1243  		v2.AddArg(y)
  1244  		v0.AddArg(v2)
  1245  		v.AddArg(v0)
  1246  		return true
  1247  	}
  1248  }
  1249  func rewriteValueMIPS64_OpDiv64(v *Value, config *Config) bool {
  1250  	b := v.Block
  1251  	_ = b
  1252  	// match: (Div64 x y)
  1253  	// cond:
  1254  	// result: (Select1 (DIVV x y))
  1255  	for {
  1256  		x := v.Args[0]
  1257  		y := v.Args[1]
  1258  		v.reset(OpSelect1)
  1259  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1260  		v0.AddArg(x)
  1261  		v0.AddArg(y)
  1262  		v.AddArg(v0)
  1263  		return true
  1264  	}
  1265  }
  1266  func rewriteValueMIPS64_OpDiv64F(v *Value, config *Config) bool {
  1267  	b := v.Block
  1268  	_ = b
  1269  	// match: (Div64F x y)
  1270  	// cond:
  1271  	// result: (DIVD x y)
  1272  	for {
  1273  		x := v.Args[0]
  1274  		y := v.Args[1]
  1275  		v.reset(OpMIPS64DIVD)
  1276  		v.AddArg(x)
  1277  		v.AddArg(y)
  1278  		return true
  1279  	}
  1280  }
  1281  func rewriteValueMIPS64_OpDiv64u(v *Value, config *Config) bool {
  1282  	b := v.Block
  1283  	_ = b
  1284  	// match: (Div64u x y)
  1285  	// cond:
  1286  	// result: (Select1 (DIVVU x y))
  1287  	for {
  1288  		x := v.Args[0]
  1289  		y := v.Args[1]
  1290  		v.reset(OpSelect1)
  1291  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  1292  		v0.AddArg(x)
  1293  		v0.AddArg(y)
  1294  		v.AddArg(v0)
  1295  		return true
  1296  	}
  1297  }
  1298  func rewriteValueMIPS64_OpDiv8(v *Value, config *Config) bool {
  1299  	b := v.Block
  1300  	_ = b
  1301  	// match: (Div8 x y)
  1302  	// cond:
  1303  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1304  	for {
  1305  		x := v.Args[0]
  1306  		y := v.Args[1]
  1307  		v.reset(OpSelect1)
  1308  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1309  		v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1310  		v1.AddArg(x)
  1311  		v0.AddArg(v1)
  1312  		v2 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1313  		v2.AddArg(y)
  1314  		v0.AddArg(v2)
  1315  		v.AddArg(v0)
  1316  		return true
  1317  	}
  1318  }
  1319  func rewriteValueMIPS64_OpDiv8u(v *Value, config *Config) bool {
  1320  	b := v.Block
  1321  	_ = b
  1322  	// match: (Div8u x y)
  1323  	// cond:
  1324  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1325  	for {
  1326  		x := v.Args[0]
  1327  		y := v.Args[1]
  1328  		v.reset(OpSelect1)
  1329  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  1330  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1331  		v1.AddArg(x)
  1332  		v0.AddArg(v1)
  1333  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1334  		v2.AddArg(y)
  1335  		v0.AddArg(v2)
  1336  		v.AddArg(v0)
  1337  		return true
  1338  	}
  1339  }
  1340  func rewriteValueMIPS64_OpEq16(v *Value, config *Config) bool {
  1341  	b := v.Block
  1342  	_ = b
  1343  	// match: (Eq16 x y)
  1344  	// cond:
  1345  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1346  	for {
  1347  		x := v.Args[0]
  1348  		y := v.Args[1]
  1349  		v.reset(OpMIPS64SGTU)
  1350  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1351  		v0.AuxInt = 1
  1352  		v.AddArg(v0)
  1353  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  1354  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1355  		v2.AddArg(x)
  1356  		v1.AddArg(v2)
  1357  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1358  		v3.AddArg(y)
  1359  		v1.AddArg(v3)
  1360  		v.AddArg(v1)
  1361  		return true
  1362  	}
  1363  }
  1364  func rewriteValueMIPS64_OpEq32(v *Value, config *Config) bool {
  1365  	b := v.Block
  1366  	_ = b
  1367  	// match: (Eq32 x y)
  1368  	// cond:
  1369  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1370  	for {
  1371  		x := v.Args[0]
  1372  		y := v.Args[1]
  1373  		v.reset(OpMIPS64SGTU)
  1374  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1375  		v0.AuxInt = 1
  1376  		v.AddArg(v0)
  1377  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  1378  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1379  		v2.AddArg(x)
  1380  		v1.AddArg(v2)
  1381  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1382  		v3.AddArg(y)
  1383  		v1.AddArg(v3)
  1384  		v.AddArg(v1)
  1385  		return true
  1386  	}
  1387  }
  1388  func rewriteValueMIPS64_OpEq32F(v *Value, config *Config) bool {
  1389  	b := v.Block
  1390  	_ = b
  1391  	// match: (Eq32F x y)
  1392  	// cond:
  1393  	// result: (FPFlagTrue (CMPEQF x y))
  1394  	for {
  1395  		x := v.Args[0]
  1396  		y := v.Args[1]
  1397  		v.reset(OpMIPS64FPFlagTrue)
  1398  		v0 := b.NewValue0(v.Line, OpMIPS64CMPEQF, TypeFlags)
  1399  		v0.AddArg(x)
  1400  		v0.AddArg(y)
  1401  		v.AddArg(v0)
  1402  		return true
  1403  	}
  1404  }
  1405  func rewriteValueMIPS64_OpEq64(v *Value, config *Config) bool {
  1406  	b := v.Block
  1407  	_ = b
  1408  	// match: (Eq64 x y)
  1409  	// cond:
  1410  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1411  	for {
  1412  		x := v.Args[0]
  1413  		y := v.Args[1]
  1414  		v.reset(OpMIPS64SGTU)
  1415  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1416  		v0.AuxInt = 1
  1417  		v.AddArg(v0)
  1418  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  1419  		v1.AddArg(x)
  1420  		v1.AddArg(y)
  1421  		v.AddArg(v1)
  1422  		return true
  1423  	}
  1424  }
  1425  func rewriteValueMIPS64_OpEq64F(v *Value, config *Config) bool {
  1426  	b := v.Block
  1427  	_ = b
  1428  	// match: (Eq64F x y)
  1429  	// cond:
  1430  	// result: (FPFlagTrue (CMPEQD x y))
  1431  	for {
  1432  		x := v.Args[0]
  1433  		y := v.Args[1]
  1434  		v.reset(OpMIPS64FPFlagTrue)
  1435  		v0 := b.NewValue0(v.Line, OpMIPS64CMPEQD, TypeFlags)
  1436  		v0.AddArg(x)
  1437  		v0.AddArg(y)
  1438  		v.AddArg(v0)
  1439  		return true
  1440  	}
  1441  }
  1442  func rewriteValueMIPS64_OpEq8(v *Value, config *Config) bool {
  1443  	b := v.Block
  1444  	_ = b
  1445  	// match: (Eq8 x y)
  1446  	// cond:
  1447  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1448  	for {
  1449  		x := v.Args[0]
  1450  		y := v.Args[1]
  1451  		v.reset(OpMIPS64SGTU)
  1452  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1453  		v0.AuxInt = 1
  1454  		v.AddArg(v0)
  1455  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  1456  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1457  		v2.AddArg(x)
  1458  		v1.AddArg(v2)
  1459  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1460  		v3.AddArg(y)
  1461  		v1.AddArg(v3)
  1462  		v.AddArg(v1)
  1463  		return true
  1464  	}
  1465  }
  1466  func rewriteValueMIPS64_OpEqB(v *Value, config *Config) bool {
  1467  	b := v.Block
  1468  	_ = b
  1469  	// match: (EqB x y)
  1470  	// cond:
  1471  	// result: (XOR (MOVVconst [1]) (XOR <config.fe.TypeBool()> x y))
  1472  	for {
  1473  		x := v.Args[0]
  1474  		y := v.Args[1]
  1475  		v.reset(OpMIPS64XOR)
  1476  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1477  		v0.AuxInt = 1
  1478  		v.AddArg(v0)
  1479  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeBool())
  1480  		v1.AddArg(x)
  1481  		v1.AddArg(y)
  1482  		v.AddArg(v1)
  1483  		return true
  1484  	}
  1485  }
  1486  func rewriteValueMIPS64_OpEqPtr(v *Value, config *Config) bool {
  1487  	b := v.Block
  1488  	_ = b
  1489  	// match: (EqPtr x y)
  1490  	// cond:
  1491  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1492  	for {
  1493  		x := v.Args[0]
  1494  		y := v.Args[1]
  1495  		v.reset(OpMIPS64SGTU)
  1496  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1497  		v0.AuxInt = 1
  1498  		v.AddArg(v0)
  1499  		v1 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  1500  		v1.AddArg(x)
  1501  		v1.AddArg(y)
  1502  		v.AddArg(v1)
  1503  		return true
  1504  	}
  1505  }
  1506  func rewriteValueMIPS64_OpGeq16(v *Value, config *Config) bool {
  1507  	b := v.Block
  1508  	_ = b
  1509  	// match: (Geq16 x y)
  1510  	// cond:
  1511  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1512  	for {
  1513  		x := v.Args[0]
  1514  		y := v.Args[1]
  1515  		v.reset(OpMIPS64XOR)
  1516  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1517  		v0.AuxInt = 1
  1518  		v.AddArg(v0)
  1519  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  1520  		v2 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1521  		v2.AddArg(y)
  1522  		v1.AddArg(v2)
  1523  		v3 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1524  		v3.AddArg(x)
  1525  		v1.AddArg(v3)
  1526  		v.AddArg(v1)
  1527  		return true
  1528  	}
  1529  }
  1530  func rewriteValueMIPS64_OpGeq16U(v *Value, config *Config) bool {
  1531  	b := v.Block
  1532  	_ = b
  1533  	// match: (Geq16U x y)
  1534  	// cond:
  1535  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1536  	for {
  1537  		x := v.Args[0]
  1538  		y := v.Args[1]
  1539  		v.reset(OpMIPS64XOR)
  1540  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1541  		v0.AuxInt = 1
  1542  		v.AddArg(v0)
  1543  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  1544  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1545  		v2.AddArg(y)
  1546  		v1.AddArg(v2)
  1547  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1548  		v3.AddArg(x)
  1549  		v1.AddArg(v3)
  1550  		v.AddArg(v1)
  1551  		return true
  1552  	}
  1553  }
  1554  func rewriteValueMIPS64_OpGeq32(v *Value, config *Config) bool {
  1555  	b := v.Block
  1556  	_ = b
  1557  	// match: (Geq32 x y)
  1558  	// cond:
  1559  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1560  	for {
  1561  		x := v.Args[0]
  1562  		y := v.Args[1]
  1563  		v.reset(OpMIPS64XOR)
  1564  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1565  		v0.AuxInt = 1
  1566  		v.AddArg(v0)
  1567  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  1568  		v2 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1569  		v2.AddArg(y)
  1570  		v1.AddArg(v2)
  1571  		v3 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1572  		v3.AddArg(x)
  1573  		v1.AddArg(v3)
  1574  		v.AddArg(v1)
  1575  		return true
  1576  	}
  1577  }
  1578  func rewriteValueMIPS64_OpGeq32F(v *Value, config *Config) bool {
  1579  	b := v.Block
  1580  	_ = b
  1581  	// match: (Geq32F x y)
  1582  	// cond:
  1583  	// result: (FPFlagTrue (CMPGEF x y))
  1584  	for {
  1585  		x := v.Args[0]
  1586  		y := v.Args[1]
  1587  		v.reset(OpMIPS64FPFlagTrue)
  1588  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGEF, TypeFlags)
  1589  		v0.AddArg(x)
  1590  		v0.AddArg(y)
  1591  		v.AddArg(v0)
  1592  		return true
  1593  	}
  1594  }
  1595  func rewriteValueMIPS64_OpGeq32U(v *Value, config *Config) bool {
  1596  	b := v.Block
  1597  	_ = b
  1598  	// match: (Geq32U x y)
  1599  	// cond:
  1600  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1601  	for {
  1602  		x := v.Args[0]
  1603  		y := v.Args[1]
  1604  		v.reset(OpMIPS64XOR)
  1605  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1606  		v0.AuxInt = 1
  1607  		v.AddArg(v0)
  1608  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  1609  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1610  		v2.AddArg(y)
  1611  		v1.AddArg(v2)
  1612  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1613  		v3.AddArg(x)
  1614  		v1.AddArg(v3)
  1615  		v.AddArg(v1)
  1616  		return true
  1617  	}
  1618  }
  1619  func rewriteValueMIPS64_OpGeq64(v *Value, config *Config) bool {
  1620  	b := v.Block
  1621  	_ = b
  1622  	// match: (Geq64 x y)
  1623  	// cond:
  1624  	// result: (XOR (MOVVconst [1]) (SGT y x))
  1625  	for {
  1626  		x := v.Args[0]
  1627  		y := v.Args[1]
  1628  		v.reset(OpMIPS64XOR)
  1629  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1630  		v0.AuxInt = 1
  1631  		v.AddArg(v0)
  1632  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  1633  		v1.AddArg(y)
  1634  		v1.AddArg(x)
  1635  		v.AddArg(v1)
  1636  		return true
  1637  	}
  1638  }
  1639  func rewriteValueMIPS64_OpGeq64F(v *Value, config *Config) bool {
  1640  	b := v.Block
  1641  	_ = b
  1642  	// match: (Geq64F x y)
  1643  	// cond:
  1644  	// result: (FPFlagTrue (CMPGED x y))
  1645  	for {
  1646  		x := v.Args[0]
  1647  		y := v.Args[1]
  1648  		v.reset(OpMIPS64FPFlagTrue)
  1649  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGED, TypeFlags)
  1650  		v0.AddArg(x)
  1651  		v0.AddArg(y)
  1652  		v.AddArg(v0)
  1653  		return true
  1654  	}
  1655  }
  1656  func rewriteValueMIPS64_OpGeq64U(v *Value, config *Config) bool {
  1657  	b := v.Block
  1658  	_ = b
  1659  	// match: (Geq64U x y)
  1660  	// cond:
  1661  	// result: (XOR (MOVVconst [1]) (SGTU y x))
  1662  	for {
  1663  		x := v.Args[0]
  1664  		y := v.Args[1]
  1665  		v.reset(OpMIPS64XOR)
  1666  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1667  		v0.AuxInt = 1
  1668  		v.AddArg(v0)
  1669  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  1670  		v1.AddArg(y)
  1671  		v1.AddArg(x)
  1672  		v.AddArg(v1)
  1673  		return true
  1674  	}
  1675  }
  1676  func rewriteValueMIPS64_OpGeq8(v *Value, config *Config) bool {
  1677  	b := v.Block
  1678  	_ = b
  1679  	// match: (Geq8 x y)
  1680  	// cond:
  1681  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1682  	for {
  1683  		x := v.Args[0]
  1684  		y := v.Args[1]
  1685  		v.reset(OpMIPS64XOR)
  1686  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1687  		v0.AuxInt = 1
  1688  		v.AddArg(v0)
  1689  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  1690  		v2 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1691  		v2.AddArg(y)
  1692  		v1.AddArg(v2)
  1693  		v3 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1694  		v3.AddArg(x)
  1695  		v1.AddArg(v3)
  1696  		v.AddArg(v1)
  1697  		return true
  1698  	}
  1699  }
  1700  func rewriteValueMIPS64_OpGeq8U(v *Value, config *Config) bool {
  1701  	b := v.Block
  1702  	_ = b
  1703  	// match: (Geq8U x y)
  1704  	// cond:
  1705  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1706  	for {
  1707  		x := v.Args[0]
  1708  		y := v.Args[1]
  1709  		v.reset(OpMIPS64XOR)
  1710  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  1711  		v0.AuxInt = 1
  1712  		v.AddArg(v0)
  1713  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  1714  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1715  		v2.AddArg(y)
  1716  		v1.AddArg(v2)
  1717  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1718  		v3.AddArg(x)
  1719  		v1.AddArg(v3)
  1720  		v.AddArg(v1)
  1721  		return true
  1722  	}
  1723  }
  1724  func rewriteValueMIPS64_OpGetClosurePtr(v *Value, config *Config) bool {
  1725  	b := v.Block
  1726  	_ = b
  1727  	// match: (GetClosurePtr)
  1728  	// cond:
  1729  	// result: (LoweredGetClosurePtr)
  1730  	for {
  1731  		v.reset(OpMIPS64LoweredGetClosurePtr)
  1732  		return true
  1733  	}
  1734  }
  1735  func rewriteValueMIPS64_OpGoCall(v *Value, config *Config) bool {
  1736  	b := v.Block
  1737  	_ = b
  1738  	// match: (GoCall [argwid] mem)
  1739  	// cond:
  1740  	// result: (CALLgo [argwid] mem)
  1741  	for {
  1742  		argwid := v.AuxInt
  1743  		mem := v.Args[0]
  1744  		v.reset(OpMIPS64CALLgo)
  1745  		v.AuxInt = argwid
  1746  		v.AddArg(mem)
  1747  		return true
  1748  	}
  1749  }
  1750  func rewriteValueMIPS64_OpGreater16(v *Value, config *Config) bool {
  1751  	b := v.Block
  1752  	_ = b
  1753  	// match: (Greater16 x y)
  1754  	// cond:
  1755  	// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1756  	for {
  1757  		x := v.Args[0]
  1758  		y := v.Args[1]
  1759  		v.reset(OpMIPS64SGT)
  1760  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1761  		v0.AddArg(x)
  1762  		v.AddArg(v0)
  1763  		v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1764  		v1.AddArg(y)
  1765  		v.AddArg(v1)
  1766  		return true
  1767  	}
  1768  }
  1769  func rewriteValueMIPS64_OpGreater16U(v *Value, config *Config) bool {
  1770  	b := v.Block
  1771  	_ = b
  1772  	// match: (Greater16U x y)
  1773  	// cond:
  1774  	// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1775  	for {
  1776  		x := v.Args[0]
  1777  		y := v.Args[1]
  1778  		v.reset(OpMIPS64SGTU)
  1779  		v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1780  		v0.AddArg(x)
  1781  		v.AddArg(v0)
  1782  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1783  		v1.AddArg(y)
  1784  		v.AddArg(v1)
  1785  		return true
  1786  	}
  1787  }
  1788  func rewriteValueMIPS64_OpGreater32(v *Value, config *Config) bool {
  1789  	b := v.Block
  1790  	_ = b
  1791  	// match: (Greater32 x y)
  1792  	// cond:
  1793  	// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  1794  	for {
  1795  		x := v.Args[0]
  1796  		y := v.Args[1]
  1797  		v.reset(OpMIPS64SGT)
  1798  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1799  		v0.AddArg(x)
  1800  		v.AddArg(v0)
  1801  		v1 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1802  		v1.AddArg(y)
  1803  		v.AddArg(v1)
  1804  		return true
  1805  	}
  1806  }
  1807  func rewriteValueMIPS64_OpGreater32F(v *Value, config *Config) bool {
  1808  	b := v.Block
  1809  	_ = b
  1810  	// match: (Greater32F x y)
  1811  	// cond:
  1812  	// result: (FPFlagTrue (CMPGTF x y))
  1813  	for {
  1814  		x := v.Args[0]
  1815  		y := v.Args[1]
  1816  		v.reset(OpMIPS64FPFlagTrue)
  1817  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGTF, TypeFlags)
  1818  		v0.AddArg(x)
  1819  		v0.AddArg(y)
  1820  		v.AddArg(v0)
  1821  		return true
  1822  	}
  1823  }
  1824  func rewriteValueMIPS64_OpGreater32U(v *Value, config *Config) bool {
  1825  	b := v.Block
  1826  	_ = b
  1827  	// match: (Greater32U x y)
  1828  	// cond:
  1829  	// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1830  	for {
  1831  		x := v.Args[0]
  1832  		y := v.Args[1]
  1833  		v.reset(OpMIPS64SGTU)
  1834  		v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1835  		v0.AddArg(x)
  1836  		v.AddArg(v0)
  1837  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  1838  		v1.AddArg(y)
  1839  		v.AddArg(v1)
  1840  		return true
  1841  	}
  1842  }
  1843  func rewriteValueMIPS64_OpGreater64(v *Value, config *Config) bool {
  1844  	b := v.Block
  1845  	_ = b
  1846  	// match: (Greater64 x y)
  1847  	// cond:
  1848  	// result: (SGT x y)
  1849  	for {
  1850  		x := v.Args[0]
  1851  		y := v.Args[1]
  1852  		v.reset(OpMIPS64SGT)
  1853  		v.AddArg(x)
  1854  		v.AddArg(y)
  1855  		return true
  1856  	}
  1857  }
  1858  func rewriteValueMIPS64_OpGreater64F(v *Value, config *Config) bool {
  1859  	b := v.Block
  1860  	_ = b
  1861  	// match: (Greater64F x y)
  1862  	// cond:
  1863  	// result: (FPFlagTrue (CMPGTD x y))
  1864  	for {
  1865  		x := v.Args[0]
  1866  		y := v.Args[1]
  1867  		v.reset(OpMIPS64FPFlagTrue)
  1868  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGTD, TypeFlags)
  1869  		v0.AddArg(x)
  1870  		v0.AddArg(y)
  1871  		v.AddArg(v0)
  1872  		return true
  1873  	}
  1874  }
  1875  func rewriteValueMIPS64_OpGreater64U(v *Value, config *Config) bool {
  1876  	b := v.Block
  1877  	_ = b
  1878  	// match: (Greater64U x y)
  1879  	// cond:
  1880  	// result: (SGTU x y)
  1881  	for {
  1882  		x := v.Args[0]
  1883  		y := v.Args[1]
  1884  		v.reset(OpMIPS64SGTU)
  1885  		v.AddArg(x)
  1886  		v.AddArg(y)
  1887  		return true
  1888  	}
  1889  }
  1890  func rewriteValueMIPS64_OpGreater8(v *Value, config *Config) bool {
  1891  	b := v.Block
  1892  	_ = b
  1893  	// match: (Greater8 x y)
  1894  	// cond:
  1895  	// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  1896  	for {
  1897  		x := v.Args[0]
  1898  		y := v.Args[1]
  1899  		v.reset(OpMIPS64SGT)
  1900  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1901  		v0.AddArg(x)
  1902  		v.AddArg(v0)
  1903  		v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  1904  		v1.AddArg(y)
  1905  		v.AddArg(v1)
  1906  		return true
  1907  	}
  1908  }
  1909  func rewriteValueMIPS64_OpGreater8U(v *Value, config *Config) bool {
  1910  	b := v.Block
  1911  	_ = b
  1912  	// match: (Greater8U x y)
  1913  	// cond:
  1914  	// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1915  	for {
  1916  		x := v.Args[0]
  1917  		y := v.Args[1]
  1918  		v.reset(OpMIPS64SGTU)
  1919  		v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1920  		v0.AddArg(x)
  1921  		v.AddArg(v0)
  1922  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  1923  		v1.AddArg(y)
  1924  		v.AddArg(v1)
  1925  		return true
  1926  	}
  1927  }
  1928  func rewriteValueMIPS64_OpHmul16(v *Value, config *Config) bool {
  1929  	b := v.Block
  1930  	_ = b
  1931  	// match: (Hmul16 x y)
  1932  	// cond:
  1933  	// result: (SRAVconst (Select1 <config.fe.TypeInt32()> (MULV (SignExt16to64 x) (SignExt16to64 y))) [16])
  1934  	for {
  1935  		x := v.Args[0]
  1936  		y := v.Args[1]
  1937  		v.reset(OpMIPS64SRAVconst)
  1938  		v.AuxInt = 16
  1939  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeInt32())
  1940  		v1 := b.NewValue0(v.Line, OpMIPS64MULV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1941  		v2 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1942  		v2.AddArg(x)
  1943  		v1.AddArg(v2)
  1944  		v3 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  1945  		v3.AddArg(y)
  1946  		v1.AddArg(v3)
  1947  		v0.AddArg(v1)
  1948  		v.AddArg(v0)
  1949  		return true
  1950  	}
  1951  }
  1952  func rewriteValueMIPS64_OpHmul16u(v *Value, config *Config) bool {
  1953  	b := v.Block
  1954  	_ = b
  1955  	// match: (Hmul16u x y)
  1956  	// cond:
  1957  	// result: (SRLVconst (Select1 <config.fe.TypeUInt32()> (MULVU (ZeroExt16to64 x) (ZeroExt16to64 y))) [16])
  1958  	for {
  1959  		x := v.Args[0]
  1960  		y := v.Args[1]
  1961  		v.reset(OpMIPS64SRLVconst)
  1962  		v.AuxInt = 16
  1963  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeUInt32())
  1964  		v1 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  1965  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1966  		v2.AddArg(x)
  1967  		v1.AddArg(v2)
  1968  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  1969  		v3.AddArg(y)
  1970  		v1.AddArg(v3)
  1971  		v0.AddArg(v1)
  1972  		v.AddArg(v0)
  1973  		return true
  1974  	}
  1975  }
  1976  func rewriteValueMIPS64_OpHmul32(v *Value, config *Config) bool {
  1977  	b := v.Block
  1978  	_ = b
  1979  	// match: (Hmul32 x y)
  1980  	// cond:
  1981  	// result: (SRAVconst (Select1 <config.fe.TypeInt64()> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1982  	for {
  1983  		x := v.Args[0]
  1984  		y := v.Args[1]
  1985  		v.reset(OpMIPS64SRAVconst)
  1986  		v.AuxInt = 32
  1987  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeInt64())
  1988  		v1 := b.NewValue0(v.Line, OpMIPS64MULV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  1989  		v2 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1990  		v2.AddArg(x)
  1991  		v1.AddArg(v2)
  1992  		v3 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  1993  		v3.AddArg(y)
  1994  		v1.AddArg(v3)
  1995  		v0.AddArg(v1)
  1996  		v.AddArg(v0)
  1997  		return true
  1998  	}
  1999  }
  2000  func rewriteValueMIPS64_OpHmul32u(v *Value, config *Config) bool {
  2001  	b := v.Block
  2002  	_ = b
  2003  	// match: (Hmul32u x y)
  2004  	// cond:
  2005  	// result: (SRLVconst (Select1 <config.fe.TypeUInt64()> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  2006  	for {
  2007  		x := v.Args[0]
  2008  		y := v.Args[1]
  2009  		v.reset(OpMIPS64SRLVconst)
  2010  		v.AuxInt = 32
  2011  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeUInt64())
  2012  		v1 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  2013  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2014  		v2.AddArg(x)
  2015  		v1.AddArg(v2)
  2016  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2017  		v3.AddArg(y)
  2018  		v1.AddArg(v3)
  2019  		v0.AddArg(v1)
  2020  		v.AddArg(v0)
  2021  		return true
  2022  	}
  2023  }
  2024  func rewriteValueMIPS64_OpHmul64(v *Value, config *Config) bool {
  2025  	b := v.Block
  2026  	_ = b
  2027  	// match: (Hmul64 x y)
  2028  	// cond:
  2029  	// result: (Select0 (MULV x y))
  2030  	for {
  2031  		x := v.Args[0]
  2032  		y := v.Args[1]
  2033  		v.reset(OpSelect0)
  2034  		v0 := b.NewValue0(v.Line, OpMIPS64MULV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  2035  		v0.AddArg(x)
  2036  		v0.AddArg(y)
  2037  		v.AddArg(v0)
  2038  		return true
  2039  	}
  2040  }
  2041  func rewriteValueMIPS64_OpHmul64u(v *Value, config *Config) bool {
  2042  	b := v.Block
  2043  	_ = b
  2044  	// match: (Hmul64u x y)
  2045  	// cond:
  2046  	// result: (Select0 (MULVU x y))
  2047  	for {
  2048  		x := v.Args[0]
  2049  		y := v.Args[1]
  2050  		v.reset(OpSelect0)
  2051  		v0 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  2052  		v0.AddArg(x)
  2053  		v0.AddArg(y)
  2054  		v.AddArg(v0)
  2055  		return true
  2056  	}
  2057  }
  2058  func rewriteValueMIPS64_OpHmul8(v *Value, config *Config) bool {
  2059  	b := v.Block
  2060  	_ = b
  2061  	// match: (Hmul8 x y)
  2062  	// cond:
  2063  	// result: (SRAVconst (Select1 <config.fe.TypeInt16()> (MULV (SignExt8to64 x) (SignExt8to64 y))) [8])
  2064  	for {
  2065  		x := v.Args[0]
  2066  		y := v.Args[1]
  2067  		v.reset(OpMIPS64SRAVconst)
  2068  		v.AuxInt = 8
  2069  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeInt16())
  2070  		v1 := b.NewValue0(v.Line, OpMIPS64MULV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  2071  		v2 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2072  		v2.AddArg(x)
  2073  		v1.AddArg(v2)
  2074  		v3 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2075  		v3.AddArg(y)
  2076  		v1.AddArg(v3)
  2077  		v0.AddArg(v1)
  2078  		v.AddArg(v0)
  2079  		return true
  2080  	}
  2081  }
  2082  func rewriteValueMIPS64_OpHmul8u(v *Value, config *Config) bool {
  2083  	b := v.Block
  2084  	_ = b
  2085  	// match: (Hmul8u x y)
  2086  	// cond:
  2087  	// result: (SRLVconst (Select1 <config.fe.TypeUInt16()> (MULVU (ZeroExt8to64 x) (ZeroExt8to64 y))) [8])
  2088  	for {
  2089  		x := v.Args[0]
  2090  		y := v.Args[1]
  2091  		v.reset(OpMIPS64SRLVconst)
  2092  		v.AuxInt = 8
  2093  		v0 := b.NewValue0(v.Line, OpSelect1, config.fe.TypeUInt16())
  2094  		v1 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  2095  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2096  		v2.AddArg(x)
  2097  		v1.AddArg(v2)
  2098  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2099  		v3.AddArg(y)
  2100  		v1.AddArg(v3)
  2101  		v0.AddArg(v1)
  2102  		v.AddArg(v0)
  2103  		return true
  2104  	}
  2105  }
  2106  func rewriteValueMIPS64_OpInterCall(v *Value, config *Config) bool {
  2107  	b := v.Block
  2108  	_ = b
  2109  	// match: (InterCall [argwid] entry mem)
  2110  	// cond:
  2111  	// result: (CALLinter [argwid] entry mem)
  2112  	for {
  2113  		argwid := v.AuxInt
  2114  		entry := v.Args[0]
  2115  		mem := v.Args[1]
  2116  		v.reset(OpMIPS64CALLinter)
  2117  		v.AuxInt = argwid
  2118  		v.AddArg(entry)
  2119  		v.AddArg(mem)
  2120  		return true
  2121  	}
  2122  }
  2123  func rewriteValueMIPS64_OpIsInBounds(v *Value, config *Config) bool {
  2124  	b := v.Block
  2125  	_ = b
  2126  	// match: (IsInBounds idx len)
  2127  	// cond:
  2128  	// result: (SGTU len idx)
  2129  	for {
  2130  		idx := v.Args[0]
  2131  		len := v.Args[1]
  2132  		v.reset(OpMIPS64SGTU)
  2133  		v.AddArg(len)
  2134  		v.AddArg(idx)
  2135  		return true
  2136  	}
  2137  }
  2138  func rewriteValueMIPS64_OpIsNonNil(v *Value, config *Config) bool {
  2139  	b := v.Block
  2140  	_ = b
  2141  	// match: (IsNonNil ptr)
  2142  	// cond:
  2143  	// result: (SGTU ptr (MOVVconst [0]))
  2144  	for {
  2145  		ptr := v.Args[0]
  2146  		v.reset(OpMIPS64SGTU)
  2147  		v.AddArg(ptr)
  2148  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2149  		v0.AuxInt = 0
  2150  		v.AddArg(v0)
  2151  		return true
  2152  	}
  2153  }
  2154  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value, config *Config) bool {
  2155  	b := v.Block
  2156  	_ = b
  2157  	// match: (IsSliceInBounds idx len)
  2158  	// cond:
  2159  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2160  	for {
  2161  		idx := v.Args[0]
  2162  		len := v.Args[1]
  2163  		v.reset(OpMIPS64XOR)
  2164  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2165  		v0.AuxInt = 1
  2166  		v.AddArg(v0)
  2167  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2168  		v1.AddArg(idx)
  2169  		v1.AddArg(len)
  2170  		v.AddArg(v1)
  2171  		return true
  2172  	}
  2173  }
  2174  func rewriteValueMIPS64_OpLeq16(v *Value, config *Config) bool {
  2175  	b := v.Block
  2176  	_ = b
  2177  	// match: (Leq16 x y)
  2178  	// cond:
  2179  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2180  	for {
  2181  		x := v.Args[0]
  2182  		y := v.Args[1]
  2183  		v.reset(OpMIPS64XOR)
  2184  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2185  		v0.AuxInt = 1
  2186  		v.AddArg(v0)
  2187  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  2188  		v2 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  2189  		v2.AddArg(x)
  2190  		v1.AddArg(v2)
  2191  		v3 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  2192  		v3.AddArg(y)
  2193  		v1.AddArg(v3)
  2194  		v.AddArg(v1)
  2195  		return true
  2196  	}
  2197  }
  2198  func rewriteValueMIPS64_OpLeq16U(v *Value, config *Config) bool {
  2199  	b := v.Block
  2200  	_ = b
  2201  	// match: (Leq16U x y)
  2202  	// cond:
  2203  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2204  	for {
  2205  		x := v.Args[0]
  2206  		y := v.Args[1]
  2207  		v.reset(OpMIPS64XOR)
  2208  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2209  		v0.AuxInt = 1
  2210  		v.AddArg(v0)
  2211  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2212  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2213  		v2.AddArg(x)
  2214  		v1.AddArg(v2)
  2215  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2216  		v3.AddArg(y)
  2217  		v1.AddArg(v3)
  2218  		v.AddArg(v1)
  2219  		return true
  2220  	}
  2221  }
  2222  func rewriteValueMIPS64_OpLeq32(v *Value, config *Config) bool {
  2223  	b := v.Block
  2224  	_ = b
  2225  	// match: (Leq32 x y)
  2226  	// cond:
  2227  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2228  	for {
  2229  		x := v.Args[0]
  2230  		y := v.Args[1]
  2231  		v.reset(OpMIPS64XOR)
  2232  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2233  		v0.AuxInt = 1
  2234  		v.AddArg(v0)
  2235  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  2236  		v2 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  2237  		v2.AddArg(x)
  2238  		v1.AddArg(v2)
  2239  		v3 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  2240  		v3.AddArg(y)
  2241  		v1.AddArg(v3)
  2242  		v.AddArg(v1)
  2243  		return true
  2244  	}
  2245  }
  2246  func rewriteValueMIPS64_OpLeq32F(v *Value, config *Config) bool {
  2247  	b := v.Block
  2248  	_ = b
  2249  	// match: (Leq32F x y)
  2250  	// cond:
  2251  	// result: (FPFlagTrue (CMPGEF y x))
  2252  	for {
  2253  		x := v.Args[0]
  2254  		y := v.Args[1]
  2255  		v.reset(OpMIPS64FPFlagTrue)
  2256  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGEF, TypeFlags)
  2257  		v0.AddArg(y)
  2258  		v0.AddArg(x)
  2259  		v.AddArg(v0)
  2260  		return true
  2261  	}
  2262  }
  2263  func rewriteValueMIPS64_OpLeq32U(v *Value, config *Config) bool {
  2264  	b := v.Block
  2265  	_ = b
  2266  	// match: (Leq32U x y)
  2267  	// cond:
  2268  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2269  	for {
  2270  		x := v.Args[0]
  2271  		y := v.Args[1]
  2272  		v.reset(OpMIPS64XOR)
  2273  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2274  		v0.AuxInt = 1
  2275  		v.AddArg(v0)
  2276  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2277  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2278  		v2.AddArg(x)
  2279  		v1.AddArg(v2)
  2280  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2281  		v3.AddArg(y)
  2282  		v1.AddArg(v3)
  2283  		v.AddArg(v1)
  2284  		return true
  2285  	}
  2286  }
  2287  func rewriteValueMIPS64_OpLeq64(v *Value, config *Config) bool {
  2288  	b := v.Block
  2289  	_ = b
  2290  	// match: (Leq64 x y)
  2291  	// cond:
  2292  	// result: (XOR (MOVVconst [1]) (SGT x y))
  2293  	for {
  2294  		x := v.Args[0]
  2295  		y := v.Args[1]
  2296  		v.reset(OpMIPS64XOR)
  2297  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2298  		v0.AuxInt = 1
  2299  		v.AddArg(v0)
  2300  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  2301  		v1.AddArg(x)
  2302  		v1.AddArg(y)
  2303  		v.AddArg(v1)
  2304  		return true
  2305  	}
  2306  }
  2307  func rewriteValueMIPS64_OpLeq64F(v *Value, config *Config) bool {
  2308  	b := v.Block
  2309  	_ = b
  2310  	// match: (Leq64F x y)
  2311  	// cond:
  2312  	// result: (FPFlagTrue (CMPGED y x))
  2313  	for {
  2314  		x := v.Args[0]
  2315  		y := v.Args[1]
  2316  		v.reset(OpMIPS64FPFlagTrue)
  2317  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGED, TypeFlags)
  2318  		v0.AddArg(y)
  2319  		v0.AddArg(x)
  2320  		v.AddArg(v0)
  2321  		return true
  2322  	}
  2323  }
  2324  func rewriteValueMIPS64_OpLeq64U(v *Value, config *Config) bool {
  2325  	b := v.Block
  2326  	_ = b
  2327  	// match: (Leq64U x y)
  2328  	// cond:
  2329  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  2330  	for {
  2331  		x := v.Args[0]
  2332  		y := v.Args[1]
  2333  		v.reset(OpMIPS64XOR)
  2334  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2335  		v0.AuxInt = 1
  2336  		v.AddArg(v0)
  2337  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2338  		v1.AddArg(x)
  2339  		v1.AddArg(y)
  2340  		v.AddArg(v1)
  2341  		return true
  2342  	}
  2343  }
  2344  func rewriteValueMIPS64_OpLeq8(v *Value, config *Config) bool {
  2345  	b := v.Block
  2346  	_ = b
  2347  	// match: (Leq8 x y)
  2348  	// cond:
  2349  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2350  	for {
  2351  		x := v.Args[0]
  2352  		y := v.Args[1]
  2353  		v.reset(OpMIPS64XOR)
  2354  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2355  		v0.AuxInt = 1
  2356  		v.AddArg(v0)
  2357  		v1 := b.NewValue0(v.Line, OpMIPS64SGT, config.fe.TypeBool())
  2358  		v2 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2359  		v2.AddArg(x)
  2360  		v1.AddArg(v2)
  2361  		v3 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2362  		v3.AddArg(y)
  2363  		v1.AddArg(v3)
  2364  		v.AddArg(v1)
  2365  		return true
  2366  	}
  2367  }
  2368  func rewriteValueMIPS64_OpLeq8U(v *Value, config *Config) bool {
  2369  	b := v.Block
  2370  	_ = b
  2371  	// match: (Leq8U x y)
  2372  	// cond:
  2373  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2374  	for {
  2375  		x := v.Args[0]
  2376  		y := v.Args[1]
  2377  		v.reset(OpMIPS64XOR)
  2378  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  2379  		v0.AuxInt = 1
  2380  		v.AddArg(v0)
  2381  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2382  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2383  		v2.AddArg(x)
  2384  		v1.AddArg(v2)
  2385  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2386  		v3.AddArg(y)
  2387  		v1.AddArg(v3)
  2388  		v.AddArg(v1)
  2389  		return true
  2390  	}
  2391  }
  2392  func rewriteValueMIPS64_OpLess16(v *Value, config *Config) bool {
  2393  	b := v.Block
  2394  	_ = b
  2395  	// match: (Less16 x y)
  2396  	// cond:
  2397  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2398  	for {
  2399  		x := v.Args[0]
  2400  		y := v.Args[1]
  2401  		v.reset(OpMIPS64SGT)
  2402  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  2403  		v0.AddArg(y)
  2404  		v.AddArg(v0)
  2405  		v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  2406  		v1.AddArg(x)
  2407  		v.AddArg(v1)
  2408  		return true
  2409  	}
  2410  }
  2411  func rewriteValueMIPS64_OpLess16U(v *Value, config *Config) bool {
  2412  	b := v.Block
  2413  	_ = b
  2414  	// match: (Less16U x y)
  2415  	// cond:
  2416  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2417  	for {
  2418  		x := v.Args[0]
  2419  		y := v.Args[1]
  2420  		v.reset(OpMIPS64SGTU)
  2421  		v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2422  		v0.AddArg(y)
  2423  		v.AddArg(v0)
  2424  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2425  		v1.AddArg(x)
  2426  		v.AddArg(v1)
  2427  		return true
  2428  	}
  2429  }
  2430  func rewriteValueMIPS64_OpLess32(v *Value, config *Config) bool {
  2431  	b := v.Block
  2432  	_ = b
  2433  	// match: (Less32 x y)
  2434  	// cond:
  2435  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2436  	for {
  2437  		x := v.Args[0]
  2438  		y := v.Args[1]
  2439  		v.reset(OpMIPS64SGT)
  2440  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  2441  		v0.AddArg(y)
  2442  		v.AddArg(v0)
  2443  		v1 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  2444  		v1.AddArg(x)
  2445  		v.AddArg(v1)
  2446  		return true
  2447  	}
  2448  }
  2449  func rewriteValueMIPS64_OpLess32F(v *Value, config *Config) bool {
  2450  	b := v.Block
  2451  	_ = b
  2452  	// match: (Less32F x y)
  2453  	// cond:
  2454  	// result: (FPFlagTrue (CMPGTF y x))
  2455  	for {
  2456  		x := v.Args[0]
  2457  		y := v.Args[1]
  2458  		v.reset(OpMIPS64FPFlagTrue)
  2459  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGTF, TypeFlags)
  2460  		v0.AddArg(y)
  2461  		v0.AddArg(x)
  2462  		v.AddArg(v0)
  2463  		return true
  2464  	}
  2465  }
  2466  func rewriteValueMIPS64_OpLess32U(v *Value, config *Config) bool {
  2467  	b := v.Block
  2468  	_ = b
  2469  	// match: (Less32U x y)
  2470  	// cond:
  2471  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2472  	for {
  2473  		x := v.Args[0]
  2474  		y := v.Args[1]
  2475  		v.reset(OpMIPS64SGTU)
  2476  		v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2477  		v0.AddArg(y)
  2478  		v.AddArg(v0)
  2479  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2480  		v1.AddArg(x)
  2481  		v.AddArg(v1)
  2482  		return true
  2483  	}
  2484  }
  2485  func rewriteValueMIPS64_OpLess64(v *Value, config *Config) bool {
  2486  	b := v.Block
  2487  	_ = b
  2488  	// match: (Less64 x y)
  2489  	// cond:
  2490  	// result: (SGT y x)
  2491  	for {
  2492  		x := v.Args[0]
  2493  		y := v.Args[1]
  2494  		v.reset(OpMIPS64SGT)
  2495  		v.AddArg(y)
  2496  		v.AddArg(x)
  2497  		return true
  2498  	}
  2499  }
  2500  func rewriteValueMIPS64_OpLess64F(v *Value, config *Config) bool {
  2501  	b := v.Block
  2502  	_ = b
  2503  	// match: (Less64F x y)
  2504  	// cond:
  2505  	// result: (FPFlagTrue (CMPGTD y x))
  2506  	for {
  2507  		x := v.Args[0]
  2508  		y := v.Args[1]
  2509  		v.reset(OpMIPS64FPFlagTrue)
  2510  		v0 := b.NewValue0(v.Line, OpMIPS64CMPGTD, TypeFlags)
  2511  		v0.AddArg(y)
  2512  		v0.AddArg(x)
  2513  		v.AddArg(v0)
  2514  		return true
  2515  	}
  2516  }
  2517  func rewriteValueMIPS64_OpLess64U(v *Value, config *Config) bool {
  2518  	b := v.Block
  2519  	_ = b
  2520  	// match: (Less64U x y)
  2521  	// cond:
  2522  	// result: (SGTU y x)
  2523  	for {
  2524  		x := v.Args[0]
  2525  		y := v.Args[1]
  2526  		v.reset(OpMIPS64SGTU)
  2527  		v.AddArg(y)
  2528  		v.AddArg(x)
  2529  		return true
  2530  	}
  2531  }
  2532  func rewriteValueMIPS64_OpLess8(v *Value, config *Config) bool {
  2533  	b := v.Block
  2534  	_ = b
  2535  	// match: (Less8 x y)
  2536  	// cond:
  2537  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2538  	for {
  2539  		x := v.Args[0]
  2540  		y := v.Args[1]
  2541  		v.reset(OpMIPS64SGT)
  2542  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2543  		v0.AddArg(y)
  2544  		v.AddArg(v0)
  2545  		v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  2546  		v1.AddArg(x)
  2547  		v.AddArg(v1)
  2548  		return true
  2549  	}
  2550  }
  2551  func rewriteValueMIPS64_OpLess8U(v *Value, config *Config) bool {
  2552  	b := v.Block
  2553  	_ = b
  2554  	// match: (Less8U x y)
  2555  	// cond:
  2556  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2557  	for {
  2558  		x := v.Args[0]
  2559  		y := v.Args[1]
  2560  		v.reset(OpMIPS64SGTU)
  2561  		v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2562  		v0.AddArg(y)
  2563  		v.AddArg(v0)
  2564  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2565  		v1.AddArg(x)
  2566  		v.AddArg(v1)
  2567  		return true
  2568  	}
  2569  }
  2570  func rewriteValueMIPS64_OpLoad(v *Value, config *Config) bool {
  2571  	b := v.Block
  2572  	_ = b
  2573  	// match: (Load <t> ptr mem)
  2574  	// cond: t.IsBoolean()
  2575  	// result: (MOVBUload ptr mem)
  2576  	for {
  2577  		t := v.Type
  2578  		ptr := v.Args[0]
  2579  		mem := v.Args[1]
  2580  		if !(t.IsBoolean()) {
  2581  			break
  2582  		}
  2583  		v.reset(OpMIPS64MOVBUload)
  2584  		v.AddArg(ptr)
  2585  		v.AddArg(mem)
  2586  		return true
  2587  	}
  2588  	// match: (Load <t> ptr mem)
  2589  	// cond: (is8BitInt(t) && isSigned(t))
  2590  	// result: (MOVBload ptr mem)
  2591  	for {
  2592  		t := v.Type
  2593  		ptr := v.Args[0]
  2594  		mem := v.Args[1]
  2595  		if !(is8BitInt(t) && isSigned(t)) {
  2596  			break
  2597  		}
  2598  		v.reset(OpMIPS64MOVBload)
  2599  		v.AddArg(ptr)
  2600  		v.AddArg(mem)
  2601  		return true
  2602  	}
  2603  	// match: (Load <t> ptr mem)
  2604  	// cond: (is8BitInt(t) && !isSigned(t))
  2605  	// result: (MOVBUload ptr mem)
  2606  	for {
  2607  		t := v.Type
  2608  		ptr := v.Args[0]
  2609  		mem := v.Args[1]
  2610  		if !(is8BitInt(t) && !isSigned(t)) {
  2611  			break
  2612  		}
  2613  		v.reset(OpMIPS64MOVBUload)
  2614  		v.AddArg(ptr)
  2615  		v.AddArg(mem)
  2616  		return true
  2617  	}
  2618  	// match: (Load <t> ptr mem)
  2619  	// cond: (is16BitInt(t) && isSigned(t))
  2620  	// result: (MOVHload ptr mem)
  2621  	for {
  2622  		t := v.Type
  2623  		ptr := v.Args[0]
  2624  		mem := v.Args[1]
  2625  		if !(is16BitInt(t) && isSigned(t)) {
  2626  			break
  2627  		}
  2628  		v.reset(OpMIPS64MOVHload)
  2629  		v.AddArg(ptr)
  2630  		v.AddArg(mem)
  2631  		return true
  2632  	}
  2633  	// match: (Load <t> ptr mem)
  2634  	// cond: (is16BitInt(t) && !isSigned(t))
  2635  	// result: (MOVHUload ptr mem)
  2636  	for {
  2637  		t := v.Type
  2638  		ptr := v.Args[0]
  2639  		mem := v.Args[1]
  2640  		if !(is16BitInt(t) && !isSigned(t)) {
  2641  			break
  2642  		}
  2643  		v.reset(OpMIPS64MOVHUload)
  2644  		v.AddArg(ptr)
  2645  		v.AddArg(mem)
  2646  		return true
  2647  	}
  2648  	// match: (Load <t> ptr mem)
  2649  	// cond: (is32BitInt(t) && isSigned(t))
  2650  	// result: (MOVWload ptr mem)
  2651  	for {
  2652  		t := v.Type
  2653  		ptr := v.Args[0]
  2654  		mem := v.Args[1]
  2655  		if !(is32BitInt(t) && isSigned(t)) {
  2656  			break
  2657  		}
  2658  		v.reset(OpMIPS64MOVWload)
  2659  		v.AddArg(ptr)
  2660  		v.AddArg(mem)
  2661  		return true
  2662  	}
  2663  	// match: (Load <t> ptr mem)
  2664  	// cond: (is32BitInt(t) && !isSigned(t))
  2665  	// result: (MOVWUload ptr mem)
  2666  	for {
  2667  		t := v.Type
  2668  		ptr := v.Args[0]
  2669  		mem := v.Args[1]
  2670  		if !(is32BitInt(t) && !isSigned(t)) {
  2671  			break
  2672  		}
  2673  		v.reset(OpMIPS64MOVWUload)
  2674  		v.AddArg(ptr)
  2675  		v.AddArg(mem)
  2676  		return true
  2677  	}
  2678  	// match: (Load <t> ptr mem)
  2679  	// cond: (is64BitInt(t) || isPtr(t))
  2680  	// result: (MOVVload ptr mem)
  2681  	for {
  2682  		t := v.Type
  2683  		ptr := v.Args[0]
  2684  		mem := v.Args[1]
  2685  		if !(is64BitInt(t) || isPtr(t)) {
  2686  			break
  2687  		}
  2688  		v.reset(OpMIPS64MOVVload)
  2689  		v.AddArg(ptr)
  2690  		v.AddArg(mem)
  2691  		return true
  2692  	}
  2693  	// match: (Load <t> ptr mem)
  2694  	// cond: is32BitFloat(t)
  2695  	// result: (MOVFload ptr mem)
  2696  	for {
  2697  		t := v.Type
  2698  		ptr := v.Args[0]
  2699  		mem := v.Args[1]
  2700  		if !(is32BitFloat(t)) {
  2701  			break
  2702  		}
  2703  		v.reset(OpMIPS64MOVFload)
  2704  		v.AddArg(ptr)
  2705  		v.AddArg(mem)
  2706  		return true
  2707  	}
  2708  	// match: (Load <t> ptr mem)
  2709  	// cond: is64BitFloat(t)
  2710  	// result: (MOVDload ptr mem)
  2711  	for {
  2712  		t := v.Type
  2713  		ptr := v.Args[0]
  2714  		mem := v.Args[1]
  2715  		if !(is64BitFloat(t)) {
  2716  			break
  2717  		}
  2718  		v.reset(OpMIPS64MOVDload)
  2719  		v.AddArg(ptr)
  2720  		v.AddArg(mem)
  2721  		return true
  2722  	}
  2723  	return false
  2724  }
  2725  func rewriteValueMIPS64_OpLsh16x16(v *Value, config *Config) bool {
  2726  	b := v.Block
  2727  	_ = b
  2728  	// match: (Lsh16x16 <t> x y)
  2729  	// cond:
  2730  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2731  	for {
  2732  		t := v.Type
  2733  		x := v.Args[0]
  2734  		y := v.Args[1]
  2735  		v.reset(OpMIPS64AND)
  2736  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2737  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2738  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2739  		v2.AuxInt = 64
  2740  		v1.AddArg(v2)
  2741  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2742  		v3.AddArg(y)
  2743  		v1.AddArg(v3)
  2744  		v0.AddArg(v1)
  2745  		v.AddArg(v0)
  2746  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2747  		v4.AddArg(x)
  2748  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2749  		v5.AddArg(y)
  2750  		v4.AddArg(v5)
  2751  		v.AddArg(v4)
  2752  		return true
  2753  	}
  2754  }
  2755  func rewriteValueMIPS64_OpLsh16x32(v *Value, config *Config) bool {
  2756  	b := v.Block
  2757  	_ = b
  2758  	// match: (Lsh16x32 <t> x y)
  2759  	// cond:
  2760  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2761  	for {
  2762  		t := v.Type
  2763  		x := v.Args[0]
  2764  		y := v.Args[1]
  2765  		v.reset(OpMIPS64AND)
  2766  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2767  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2768  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2769  		v2.AuxInt = 64
  2770  		v1.AddArg(v2)
  2771  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2772  		v3.AddArg(y)
  2773  		v1.AddArg(v3)
  2774  		v0.AddArg(v1)
  2775  		v.AddArg(v0)
  2776  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2777  		v4.AddArg(x)
  2778  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2779  		v5.AddArg(y)
  2780  		v4.AddArg(v5)
  2781  		v.AddArg(v4)
  2782  		return true
  2783  	}
  2784  }
  2785  func rewriteValueMIPS64_OpLsh16x64(v *Value, config *Config) bool {
  2786  	b := v.Block
  2787  	_ = b
  2788  	// match: (Lsh16x64 <t> x y)
  2789  	// cond:
  2790  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SLLV <t> x y))
  2791  	for {
  2792  		t := v.Type
  2793  		x := v.Args[0]
  2794  		y := v.Args[1]
  2795  		v.reset(OpMIPS64AND)
  2796  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2797  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2798  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2799  		v2.AuxInt = 64
  2800  		v1.AddArg(v2)
  2801  		v1.AddArg(y)
  2802  		v0.AddArg(v1)
  2803  		v.AddArg(v0)
  2804  		v3 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2805  		v3.AddArg(x)
  2806  		v3.AddArg(y)
  2807  		v.AddArg(v3)
  2808  		return true
  2809  	}
  2810  }
  2811  func rewriteValueMIPS64_OpLsh16x8(v *Value, config *Config) bool {
  2812  	b := v.Block
  2813  	_ = b
  2814  	// match: (Lsh16x8  <t> x y)
  2815  	// cond:
  2816  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2817  	for {
  2818  		t := v.Type
  2819  		x := v.Args[0]
  2820  		y := v.Args[1]
  2821  		v.reset(OpMIPS64AND)
  2822  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2823  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2824  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2825  		v2.AuxInt = 64
  2826  		v1.AddArg(v2)
  2827  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2828  		v3.AddArg(y)
  2829  		v1.AddArg(v3)
  2830  		v0.AddArg(v1)
  2831  		v.AddArg(v0)
  2832  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2833  		v4.AddArg(x)
  2834  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2835  		v5.AddArg(y)
  2836  		v4.AddArg(v5)
  2837  		v.AddArg(v4)
  2838  		return true
  2839  	}
  2840  }
  2841  func rewriteValueMIPS64_OpLsh32x16(v *Value, config *Config) bool {
  2842  	b := v.Block
  2843  	_ = b
  2844  	// match: (Lsh32x16 <t> x y)
  2845  	// cond:
  2846  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2847  	for {
  2848  		t := v.Type
  2849  		x := v.Args[0]
  2850  		y := v.Args[1]
  2851  		v.reset(OpMIPS64AND)
  2852  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2853  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2854  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2855  		v2.AuxInt = 64
  2856  		v1.AddArg(v2)
  2857  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2858  		v3.AddArg(y)
  2859  		v1.AddArg(v3)
  2860  		v0.AddArg(v1)
  2861  		v.AddArg(v0)
  2862  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2863  		v4.AddArg(x)
  2864  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2865  		v5.AddArg(y)
  2866  		v4.AddArg(v5)
  2867  		v.AddArg(v4)
  2868  		return true
  2869  	}
  2870  }
  2871  func rewriteValueMIPS64_OpLsh32x32(v *Value, config *Config) bool {
  2872  	b := v.Block
  2873  	_ = b
  2874  	// match: (Lsh32x32 <t> x y)
  2875  	// cond:
  2876  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2877  	for {
  2878  		t := v.Type
  2879  		x := v.Args[0]
  2880  		y := v.Args[1]
  2881  		v.reset(OpMIPS64AND)
  2882  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2883  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2884  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2885  		v2.AuxInt = 64
  2886  		v1.AddArg(v2)
  2887  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2888  		v3.AddArg(y)
  2889  		v1.AddArg(v3)
  2890  		v0.AddArg(v1)
  2891  		v.AddArg(v0)
  2892  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2893  		v4.AddArg(x)
  2894  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  2895  		v5.AddArg(y)
  2896  		v4.AddArg(v5)
  2897  		v.AddArg(v4)
  2898  		return true
  2899  	}
  2900  }
  2901  func rewriteValueMIPS64_OpLsh32x64(v *Value, config *Config) bool {
  2902  	b := v.Block
  2903  	_ = b
  2904  	// match: (Lsh32x64 <t> x y)
  2905  	// cond:
  2906  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SLLV <t> x y))
  2907  	for {
  2908  		t := v.Type
  2909  		x := v.Args[0]
  2910  		y := v.Args[1]
  2911  		v.reset(OpMIPS64AND)
  2912  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2913  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2914  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2915  		v2.AuxInt = 64
  2916  		v1.AddArg(v2)
  2917  		v1.AddArg(y)
  2918  		v0.AddArg(v1)
  2919  		v.AddArg(v0)
  2920  		v3 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2921  		v3.AddArg(x)
  2922  		v3.AddArg(y)
  2923  		v.AddArg(v3)
  2924  		return true
  2925  	}
  2926  }
  2927  func rewriteValueMIPS64_OpLsh32x8(v *Value, config *Config) bool {
  2928  	b := v.Block
  2929  	_ = b
  2930  	// match: (Lsh32x8  <t> x y)
  2931  	// cond:
  2932  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2933  	for {
  2934  		t := v.Type
  2935  		x := v.Args[0]
  2936  		y := v.Args[1]
  2937  		v.reset(OpMIPS64AND)
  2938  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2939  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2940  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2941  		v2.AuxInt = 64
  2942  		v1.AddArg(v2)
  2943  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2944  		v3.AddArg(y)
  2945  		v1.AddArg(v3)
  2946  		v0.AddArg(v1)
  2947  		v.AddArg(v0)
  2948  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2949  		v4.AddArg(x)
  2950  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  2951  		v5.AddArg(y)
  2952  		v4.AddArg(v5)
  2953  		v.AddArg(v4)
  2954  		return true
  2955  	}
  2956  }
  2957  func rewriteValueMIPS64_OpLsh64x16(v *Value, config *Config) bool {
  2958  	b := v.Block
  2959  	_ = b
  2960  	// match: (Lsh64x16 <t> x y)
  2961  	// cond:
  2962  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2963  	for {
  2964  		t := v.Type
  2965  		x := v.Args[0]
  2966  		y := v.Args[1]
  2967  		v.reset(OpMIPS64AND)
  2968  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2969  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  2970  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  2971  		v2.AuxInt = 64
  2972  		v1.AddArg(v2)
  2973  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2974  		v3.AddArg(y)
  2975  		v1.AddArg(v3)
  2976  		v0.AddArg(v1)
  2977  		v.AddArg(v0)
  2978  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  2979  		v4.AddArg(x)
  2980  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  2981  		v5.AddArg(y)
  2982  		v4.AddArg(v5)
  2983  		v.AddArg(v4)
  2984  		return true
  2985  	}
  2986  }
  2987  func rewriteValueMIPS64_OpLsh64x32(v *Value, config *Config) bool {
  2988  	b := v.Block
  2989  	_ = b
  2990  	// match: (Lsh64x32 <t> x y)
  2991  	// cond:
  2992  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2993  	for {
  2994  		t := v.Type
  2995  		x := v.Args[0]
  2996  		y := v.Args[1]
  2997  		v.reset(OpMIPS64AND)
  2998  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  2999  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3000  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3001  		v2.AuxInt = 64
  3002  		v1.AddArg(v2)
  3003  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  3004  		v3.AddArg(y)
  3005  		v1.AddArg(v3)
  3006  		v0.AddArg(v1)
  3007  		v.AddArg(v0)
  3008  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3009  		v4.AddArg(x)
  3010  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  3011  		v5.AddArg(y)
  3012  		v4.AddArg(v5)
  3013  		v.AddArg(v4)
  3014  		return true
  3015  	}
  3016  }
  3017  func rewriteValueMIPS64_OpLsh64x64(v *Value, config *Config) bool {
  3018  	b := v.Block
  3019  	_ = b
  3020  	// match: (Lsh64x64 <t> x y)
  3021  	// cond:
  3022  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SLLV <t> x y))
  3023  	for {
  3024  		t := v.Type
  3025  		x := v.Args[0]
  3026  		y := v.Args[1]
  3027  		v.reset(OpMIPS64AND)
  3028  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3029  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3030  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3031  		v2.AuxInt = 64
  3032  		v1.AddArg(v2)
  3033  		v1.AddArg(y)
  3034  		v0.AddArg(v1)
  3035  		v.AddArg(v0)
  3036  		v3 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3037  		v3.AddArg(x)
  3038  		v3.AddArg(y)
  3039  		v.AddArg(v3)
  3040  		return true
  3041  	}
  3042  }
  3043  func rewriteValueMIPS64_OpLsh64x8(v *Value, config *Config) bool {
  3044  	b := v.Block
  3045  	_ = b
  3046  	// match: (Lsh64x8  <t> x y)
  3047  	// cond:
  3048  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3049  	for {
  3050  		t := v.Type
  3051  		x := v.Args[0]
  3052  		y := v.Args[1]
  3053  		v.reset(OpMIPS64AND)
  3054  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3055  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3056  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3057  		v2.AuxInt = 64
  3058  		v1.AddArg(v2)
  3059  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  3060  		v3.AddArg(y)
  3061  		v1.AddArg(v3)
  3062  		v0.AddArg(v1)
  3063  		v.AddArg(v0)
  3064  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3065  		v4.AddArg(x)
  3066  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  3067  		v5.AddArg(y)
  3068  		v4.AddArg(v5)
  3069  		v.AddArg(v4)
  3070  		return true
  3071  	}
  3072  }
  3073  func rewriteValueMIPS64_OpLsh8x16(v *Value, config *Config) bool {
  3074  	b := v.Block
  3075  	_ = b
  3076  	// match: (Lsh8x16 <t> x y)
  3077  	// cond:
  3078  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3079  	for {
  3080  		t := v.Type
  3081  		x := v.Args[0]
  3082  		y := v.Args[1]
  3083  		v.reset(OpMIPS64AND)
  3084  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3085  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3086  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3087  		v2.AuxInt = 64
  3088  		v1.AddArg(v2)
  3089  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  3090  		v3.AddArg(y)
  3091  		v1.AddArg(v3)
  3092  		v0.AddArg(v1)
  3093  		v.AddArg(v0)
  3094  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3095  		v4.AddArg(x)
  3096  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  3097  		v5.AddArg(y)
  3098  		v4.AddArg(v5)
  3099  		v.AddArg(v4)
  3100  		return true
  3101  	}
  3102  }
  3103  func rewriteValueMIPS64_OpLsh8x32(v *Value, config *Config) bool {
  3104  	b := v.Block
  3105  	_ = b
  3106  	// match: (Lsh8x32 <t> x y)
  3107  	// cond:
  3108  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3109  	for {
  3110  		t := v.Type
  3111  		x := v.Args[0]
  3112  		y := v.Args[1]
  3113  		v.reset(OpMIPS64AND)
  3114  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3115  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3116  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3117  		v2.AuxInt = 64
  3118  		v1.AddArg(v2)
  3119  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  3120  		v3.AddArg(y)
  3121  		v1.AddArg(v3)
  3122  		v0.AddArg(v1)
  3123  		v.AddArg(v0)
  3124  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3125  		v4.AddArg(x)
  3126  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  3127  		v5.AddArg(y)
  3128  		v4.AddArg(v5)
  3129  		v.AddArg(v4)
  3130  		return true
  3131  	}
  3132  }
  3133  func rewriteValueMIPS64_OpLsh8x64(v *Value, config *Config) bool {
  3134  	b := v.Block
  3135  	_ = b
  3136  	// match: (Lsh8x64 <t> x y)
  3137  	// cond:
  3138  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SLLV <t> x y))
  3139  	for {
  3140  		t := v.Type
  3141  		x := v.Args[0]
  3142  		y := v.Args[1]
  3143  		v.reset(OpMIPS64AND)
  3144  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3145  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3146  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3147  		v2.AuxInt = 64
  3148  		v1.AddArg(v2)
  3149  		v1.AddArg(y)
  3150  		v0.AddArg(v1)
  3151  		v.AddArg(v0)
  3152  		v3 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3153  		v3.AddArg(x)
  3154  		v3.AddArg(y)
  3155  		v.AddArg(v3)
  3156  		return true
  3157  	}
  3158  }
  3159  func rewriteValueMIPS64_OpLsh8x8(v *Value, config *Config) bool {
  3160  	b := v.Block
  3161  	_ = b
  3162  	// match: (Lsh8x8  <t> x y)
  3163  	// cond:
  3164  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3165  	for {
  3166  		t := v.Type
  3167  		x := v.Args[0]
  3168  		y := v.Args[1]
  3169  		v.reset(OpMIPS64AND)
  3170  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  3171  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  3172  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  3173  		v2.AuxInt = 64
  3174  		v1.AddArg(v2)
  3175  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  3176  		v3.AddArg(y)
  3177  		v1.AddArg(v3)
  3178  		v0.AddArg(v1)
  3179  		v.AddArg(v0)
  3180  		v4 := b.NewValue0(v.Line, OpMIPS64SLLV, t)
  3181  		v4.AddArg(x)
  3182  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  3183  		v5.AddArg(y)
  3184  		v4.AddArg(v5)
  3185  		v.AddArg(v4)
  3186  		return true
  3187  	}
  3188  }
  3189  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value, config *Config) bool {
  3190  	b := v.Block
  3191  	_ = b
  3192  	// match: (ADDV (MOVVconst [c]) x)
  3193  	// cond: is32Bit(c)
  3194  	// result: (ADDVconst [c] x)
  3195  	for {
  3196  		v_0 := v.Args[0]
  3197  		if v_0.Op != OpMIPS64MOVVconst {
  3198  			break
  3199  		}
  3200  		c := v_0.AuxInt
  3201  		x := v.Args[1]
  3202  		if !(is32Bit(c)) {
  3203  			break
  3204  		}
  3205  		v.reset(OpMIPS64ADDVconst)
  3206  		v.AuxInt = c
  3207  		v.AddArg(x)
  3208  		return true
  3209  	}
  3210  	// match: (ADDV x (MOVVconst [c]))
  3211  	// cond: is32Bit(c)
  3212  	// result: (ADDVconst [c] x)
  3213  	for {
  3214  		x := v.Args[0]
  3215  		v_1 := v.Args[1]
  3216  		if v_1.Op != OpMIPS64MOVVconst {
  3217  			break
  3218  		}
  3219  		c := v_1.AuxInt
  3220  		if !(is32Bit(c)) {
  3221  			break
  3222  		}
  3223  		v.reset(OpMIPS64ADDVconst)
  3224  		v.AuxInt = c
  3225  		v.AddArg(x)
  3226  		return true
  3227  	}
  3228  	// match: (ADDV x (NEGV y))
  3229  	// cond:
  3230  	// result: (SUBV x y)
  3231  	for {
  3232  		x := v.Args[0]
  3233  		v_1 := v.Args[1]
  3234  		if v_1.Op != OpMIPS64NEGV {
  3235  			break
  3236  		}
  3237  		y := v_1.Args[0]
  3238  		v.reset(OpMIPS64SUBV)
  3239  		v.AddArg(x)
  3240  		v.AddArg(y)
  3241  		return true
  3242  	}
  3243  	// match: (ADDV (NEGV y) x)
  3244  	// cond:
  3245  	// result: (SUBV x y)
  3246  	for {
  3247  		v_0 := v.Args[0]
  3248  		if v_0.Op != OpMIPS64NEGV {
  3249  			break
  3250  		}
  3251  		y := v_0.Args[0]
  3252  		x := v.Args[1]
  3253  		v.reset(OpMIPS64SUBV)
  3254  		v.AddArg(x)
  3255  		v.AddArg(y)
  3256  		return true
  3257  	}
  3258  	return false
  3259  }
  3260  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value, config *Config) bool {
  3261  	b := v.Block
  3262  	_ = b
  3263  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3264  	// cond:
  3265  	// result: (MOVVaddr [off1+off2] {sym} ptr)
  3266  	for {
  3267  		off1 := v.AuxInt
  3268  		v_0 := v.Args[0]
  3269  		if v_0.Op != OpMIPS64MOVVaddr {
  3270  			break
  3271  		}
  3272  		off2 := v_0.AuxInt
  3273  		sym := v_0.Aux
  3274  		ptr := v_0.Args[0]
  3275  		v.reset(OpMIPS64MOVVaddr)
  3276  		v.AuxInt = off1 + off2
  3277  		v.Aux = sym
  3278  		v.AddArg(ptr)
  3279  		return true
  3280  	}
  3281  	// match: (ADDVconst [0]  x)
  3282  	// cond:
  3283  	// result: x
  3284  	for {
  3285  		if v.AuxInt != 0 {
  3286  			break
  3287  		}
  3288  		x := v.Args[0]
  3289  		v.reset(OpCopy)
  3290  		v.Type = x.Type
  3291  		v.AddArg(x)
  3292  		return true
  3293  	}
  3294  	// match: (ADDVconst [c] (MOVVconst [d]))
  3295  	// cond:
  3296  	// result: (MOVVconst [c+d])
  3297  	for {
  3298  		c := v.AuxInt
  3299  		v_0 := v.Args[0]
  3300  		if v_0.Op != OpMIPS64MOVVconst {
  3301  			break
  3302  		}
  3303  		d := v_0.AuxInt
  3304  		v.reset(OpMIPS64MOVVconst)
  3305  		v.AuxInt = c + d
  3306  		return true
  3307  	}
  3308  	// match: (ADDVconst [c] (ADDVconst [d] x))
  3309  	// cond: is32Bit(c+d)
  3310  	// result: (ADDVconst [c+d] x)
  3311  	for {
  3312  		c := v.AuxInt
  3313  		v_0 := v.Args[0]
  3314  		if v_0.Op != OpMIPS64ADDVconst {
  3315  			break
  3316  		}
  3317  		d := v_0.AuxInt
  3318  		x := v_0.Args[0]
  3319  		if !(is32Bit(c + d)) {
  3320  			break
  3321  		}
  3322  		v.reset(OpMIPS64ADDVconst)
  3323  		v.AuxInt = c + d
  3324  		v.AddArg(x)
  3325  		return true
  3326  	}
  3327  	// match: (ADDVconst [c] (SUBVconst [d] x))
  3328  	// cond: is32Bit(c-d)
  3329  	// result: (ADDVconst [c-d] x)
  3330  	for {
  3331  		c := v.AuxInt
  3332  		v_0 := v.Args[0]
  3333  		if v_0.Op != OpMIPS64SUBVconst {
  3334  			break
  3335  		}
  3336  		d := v_0.AuxInt
  3337  		x := v_0.Args[0]
  3338  		if !(is32Bit(c - d)) {
  3339  			break
  3340  		}
  3341  		v.reset(OpMIPS64ADDVconst)
  3342  		v.AuxInt = c - d
  3343  		v.AddArg(x)
  3344  		return true
  3345  	}
  3346  	return false
  3347  }
  3348  func rewriteValueMIPS64_OpMIPS64AND(v *Value, config *Config) bool {
  3349  	b := v.Block
  3350  	_ = b
  3351  	// match: (AND (MOVVconst [c]) x)
  3352  	// cond: is32Bit(c)
  3353  	// result: (ANDconst [c] x)
  3354  	for {
  3355  		v_0 := v.Args[0]
  3356  		if v_0.Op != OpMIPS64MOVVconst {
  3357  			break
  3358  		}
  3359  		c := v_0.AuxInt
  3360  		x := v.Args[1]
  3361  		if !(is32Bit(c)) {
  3362  			break
  3363  		}
  3364  		v.reset(OpMIPS64ANDconst)
  3365  		v.AuxInt = c
  3366  		v.AddArg(x)
  3367  		return true
  3368  	}
  3369  	// match: (AND x (MOVVconst [c]))
  3370  	// cond: is32Bit(c)
  3371  	// result: (ANDconst [c] x)
  3372  	for {
  3373  		x := v.Args[0]
  3374  		v_1 := v.Args[1]
  3375  		if v_1.Op != OpMIPS64MOVVconst {
  3376  			break
  3377  		}
  3378  		c := v_1.AuxInt
  3379  		if !(is32Bit(c)) {
  3380  			break
  3381  		}
  3382  		v.reset(OpMIPS64ANDconst)
  3383  		v.AuxInt = c
  3384  		v.AddArg(x)
  3385  		return true
  3386  	}
  3387  	// match: (AND x x)
  3388  	// cond:
  3389  	// result: x
  3390  	for {
  3391  		x := v.Args[0]
  3392  		if x != v.Args[1] {
  3393  			break
  3394  		}
  3395  		v.reset(OpCopy)
  3396  		v.Type = x.Type
  3397  		v.AddArg(x)
  3398  		return true
  3399  	}
  3400  	return false
  3401  }
  3402  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value, config *Config) bool {
  3403  	b := v.Block
  3404  	_ = b
  3405  	// match: (ANDconst [0]  _)
  3406  	// cond:
  3407  	// result: (MOVVconst [0])
  3408  	for {
  3409  		if v.AuxInt != 0 {
  3410  			break
  3411  		}
  3412  		v.reset(OpMIPS64MOVVconst)
  3413  		v.AuxInt = 0
  3414  		return true
  3415  	}
  3416  	// match: (ANDconst [-1] x)
  3417  	// cond:
  3418  	// result: x
  3419  	for {
  3420  		if v.AuxInt != -1 {
  3421  			break
  3422  		}
  3423  		x := v.Args[0]
  3424  		v.reset(OpCopy)
  3425  		v.Type = x.Type
  3426  		v.AddArg(x)
  3427  		return true
  3428  	}
  3429  	// match: (ANDconst [c] (MOVVconst [d]))
  3430  	// cond:
  3431  	// result: (MOVVconst [c&d])
  3432  	for {
  3433  		c := v.AuxInt
  3434  		v_0 := v.Args[0]
  3435  		if v_0.Op != OpMIPS64MOVVconst {
  3436  			break
  3437  		}
  3438  		d := v_0.AuxInt
  3439  		v.reset(OpMIPS64MOVVconst)
  3440  		v.AuxInt = c & d
  3441  		return true
  3442  	}
  3443  	// match: (ANDconst [c] (ANDconst [d] x))
  3444  	// cond:
  3445  	// result: (ANDconst [c&d] x)
  3446  	for {
  3447  		c := v.AuxInt
  3448  		v_0 := v.Args[0]
  3449  		if v_0.Op != OpMIPS64ANDconst {
  3450  			break
  3451  		}
  3452  		d := v_0.AuxInt
  3453  		x := v_0.Args[0]
  3454  		v.reset(OpMIPS64ANDconst)
  3455  		v.AuxInt = c & d
  3456  		v.AddArg(x)
  3457  		return true
  3458  	}
  3459  	return false
  3460  }
  3461  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value, config *Config) bool {
  3462  	b := v.Block
  3463  	_ = b
  3464  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3465  	// cond: is32Bit(off1+off2)
  3466  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3467  	for {
  3468  		off1 := v.AuxInt
  3469  		sym := v.Aux
  3470  		v_0 := v.Args[0]
  3471  		if v_0.Op != OpMIPS64ADDVconst {
  3472  			break
  3473  		}
  3474  		off2 := v_0.AuxInt
  3475  		ptr := v_0.Args[0]
  3476  		mem := v.Args[1]
  3477  		if !(is32Bit(off1 + off2)) {
  3478  			break
  3479  		}
  3480  		v.reset(OpMIPS64MOVBUload)
  3481  		v.AuxInt = off1 + off2
  3482  		v.Aux = sym
  3483  		v.AddArg(ptr)
  3484  		v.AddArg(mem)
  3485  		return true
  3486  	}
  3487  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3488  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3489  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3490  	for {
  3491  		off1 := v.AuxInt
  3492  		sym1 := v.Aux
  3493  		v_0 := v.Args[0]
  3494  		if v_0.Op != OpMIPS64MOVVaddr {
  3495  			break
  3496  		}
  3497  		off2 := v_0.AuxInt
  3498  		sym2 := v_0.Aux
  3499  		ptr := v_0.Args[0]
  3500  		mem := v.Args[1]
  3501  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3502  			break
  3503  		}
  3504  		v.reset(OpMIPS64MOVBUload)
  3505  		v.AuxInt = off1 + off2
  3506  		v.Aux = mergeSym(sym1, sym2)
  3507  		v.AddArg(ptr)
  3508  		v.AddArg(mem)
  3509  		return true
  3510  	}
  3511  	return false
  3512  }
  3513  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value, config *Config) bool {
  3514  	b := v.Block
  3515  	_ = b
  3516  	// match: (MOVBUreg x:(MOVBUload _ _))
  3517  	// cond:
  3518  	// result: (MOVVreg x)
  3519  	for {
  3520  		x := v.Args[0]
  3521  		if x.Op != OpMIPS64MOVBUload {
  3522  			break
  3523  		}
  3524  		v.reset(OpMIPS64MOVVreg)
  3525  		v.AddArg(x)
  3526  		return true
  3527  	}
  3528  	// match: (MOVBUreg x:(MOVBUreg _))
  3529  	// cond:
  3530  	// result: (MOVVreg x)
  3531  	for {
  3532  		x := v.Args[0]
  3533  		if x.Op != OpMIPS64MOVBUreg {
  3534  			break
  3535  		}
  3536  		v.reset(OpMIPS64MOVVreg)
  3537  		v.AddArg(x)
  3538  		return true
  3539  	}
  3540  	// match: (MOVBUreg (MOVVconst [c]))
  3541  	// cond:
  3542  	// result: (MOVVconst [int64(uint8(c))])
  3543  	for {
  3544  		v_0 := v.Args[0]
  3545  		if v_0.Op != OpMIPS64MOVVconst {
  3546  			break
  3547  		}
  3548  		c := v_0.AuxInt
  3549  		v.reset(OpMIPS64MOVVconst)
  3550  		v.AuxInt = int64(uint8(c))
  3551  		return true
  3552  	}
  3553  	return false
  3554  }
  3555  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value, config *Config) bool {
  3556  	b := v.Block
  3557  	_ = b
  3558  	// match: (MOVBload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  3559  	// cond: is32Bit(off1+off2)
  3560  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3561  	for {
  3562  		off1 := v.AuxInt
  3563  		sym := v.Aux
  3564  		v_0 := v.Args[0]
  3565  		if v_0.Op != OpMIPS64ADDVconst {
  3566  			break
  3567  		}
  3568  		off2 := v_0.AuxInt
  3569  		ptr := v_0.Args[0]
  3570  		mem := v.Args[1]
  3571  		if !(is32Bit(off1 + off2)) {
  3572  			break
  3573  		}
  3574  		v.reset(OpMIPS64MOVBload)
  3575  		v.AuxInt = off1 + off2
  3576  		v.Aux = sym
  3577  		v.AddArg(ptr)
  3578  		v.AddArg(mem)
  3579  		return true
  3580  	}
  3581  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3582  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3583  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3584  	for {
  3585  		off1 := v.AuxInt
  3586  		sym1 := v.Aux
  3587  		v_0 := v.Args[0]
  3588  		if v_0.Op != OpMIPS64MOVVaddr {
  3589  			break
  3590  		}
  3591  		off2 := v_0.AuxInt
  3592  		sym2 := v_0.Aux
  3593  		ptr := v_0.Args[0]
  3594  		mem := v.Args[1]
  3595  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3596  			break
  3597  		}
  3598  		v.reset(OpMIPS64MOVBload)
  3599  		v.AuxInt = off1 + off2
  3600  		v.Aux = mergeSym(sym1, sym2)
  3601  		v.AddArg(ptr)
  3602  		v.AddArg(mem)
  3603  		return true
  3604  	}
  3605  	return false
  3606  }
  3607  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value, config *Config) bool {
  3608  	b := v.Block
  3609  	_ = b
  3610  	// match: (MOVBreg x:(MOVBload _ _))
  3611  	// cond:
  3612  	// result: (MOVVreg x)
  3613  	for {
  3614  		x := v.Args[0]
  3615  		if x.Op != OpMIPS64MOVBload {
  3616  			break
  3617  		}
  3618  		v.reset(OpMIPS64MOVVreg)
  3619  		v.AddArg(x)
  3620  		return true
  3621  	}
  3622  	// match: (MOVBreg x:(MOVBreg _))
  3623  	// cond:
  3624  	// result: (MOVVreg x)
  3625  	for {
  3626  		x := v.Args[0]
  3627  		if x.Op != OpMIPS64MOVBreg {
  3628  			break
  3629  		}
  3630  		v.reset(OpMIPS64MOVVreg)
  3631  		v.AddArg(x)
  3632  		return true
  3633  	}
  3634  	// match: (MOVBreg  (MOVVconst [c]))
  3635  	// cond:
  3636  	// result: (MOVVconst [int64(int8(c))])
  3637  	for {
  3638  		v_0 := v.Args[0]
  3639  		if v_0.Op != OpMIPS64MOVVconst {
  3640  			break
  3641  		}
  3642  		c := v_0.AuxInt
  3643  		v.reset(OpMIPS64MOVVconst)
  3644  		v.AuxInt = int64(int8(c))
  3645  		return true
  3646  	}
  3647  	return false
  3648  }
  3649  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value, config *Config) bool {
  3650  	b := v.Block
  3651  	_ = b
  3652  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3653  	// cond: is32Bit(off1+off2)
  3654  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3655  	for {
  3656  		off1 := v.AuxInt
  3657  		sym := v.Aux
  3658  		v_0 := v.Args[0]
  3659  		if v_0.Op != OpMIPS64ADDVconst {
  3660  			break
  3661  		}
  3662  		off2 := v_0.AuxInt
  3663  		ptr := v_0.Args[0]
  3664  		val := v.Args[1]
  3665  		mem := v.Args[2]
  3666  		if !(is32Bit(off1 + off2)) {
  3667  			break
  3668  		}
  3669  		v.reset(OpMIPS64MOVBstore)
  3670  		v.AuxInt = off1 + off2
  3671  		v.Aux = sym
  3672  		v.AddArg(ptr)
  3673  		v.AddArg(val)
  3674  		v.AddArg(mem)
  3675  		return true
  3676  	}
  3677  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3678  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3679  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3680  	for {
  3681  		off1 := v.AuxInt
  3682  		sym1 := v.Aux
  3683  		v_0 := v.Args[0]
  3684  		if v_0.Op != OpMIPS64MOVVaddr {
  3685  			break
  3686  		}
  3687  		off2 := v_0.AuxInt
  3688  		sym2 := v_0.Aux
  3689  		ptr := v_0.Args[0]
  3690  		val := v.Args[1]
  3691  		mem := v.Args[2]
  3692  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3693  			break
  3694  		}
  3695  		v.reset(OpMIPS64MOVBstore)
  3696  		v.AuxInt = off1 + off2
  3697  		v.Aux = mergeSym(sym1, sym2)
  3698  		v.AddArg(ptr)
  3699  		v.AddArg(val)
  3700  		v.AddArg(mem)
  3701  		return true
  3702  	}
  3703  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  3704  	// cond:
  3705  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3706  	for {
  3707  		off := v.AuxInt
  3708  		sym := v.Aux
  3709  		ptr := v.Args[0]
  3710  		v_1 := v.Args[1]
  3711  		if v_1.Op != OpMIPS64MOVVconst {
  3712  			break
  3713  		}
  3714  		if v_1.AuxInt != 0 {
  3715  			break
  3716  		}
  3717  		mem := v.Args[2]
  3718  		v.reset(OpMIPS64MOVBstorezero)
  3719  		v.AuxInt = off
  3720  		v.Aux = sym
  3721  		v.AddArg(ptr)
  3722  		v.AddArg(mem)
  3723  		return true
  3724  	}
  3725  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3726  	// cond:
  3727  	// result: (MOVBstore [off] {sym} ptr x mem)
  3728  	for {
  3729  		off := v.AuxInt
  3730  		sym := v.Aux
  3731  		ptr := v.Args[0]
  3732  		v_1 := v.Args[1]
  3733  		if v_1.Op != OpMIPS64MOVBreg {
  3734  			break
  3735  		}
  3736  		x := v_1.Args[0]
  3737  		mem := v.Args[2]
  3738  		v.reset(OpMIPS64MOVBstore)
  3739  		v.AuxInt = off
  3740  		v.Aux = sym
  3741  		v.AddArg(ptr)
  3742  		v.AddArg(x)
  3743  		v.AddArg(mem)
  3744  		return true
  3745  	}
  3746  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3747  	// cond:
  3748  	// result: (MOVBstore [off] {sym} ptr x mem)
  3749  	for {
  3750  		off := v.AuxInt
  3751  		sym := v.Aux
  3752  		ptr := v.Args[0]
  3753  		v_1 := v.Args[1]
  3754  		if v_1.Op != OpMIPS64MOVBUreg {
  3755  			break
  3756  		}
  3757  		x := v_1.Args[0]
  3758  		mem := v.Args[2]
  3759  		v.reset(OpMIPS64MOVBstore)
  3760  		v.AuxInt = off
  3761  		v.Aux = sym
  3762  		v.AddArg(ptr)
  3763  		v.AddArg(x)
  3764  		v.AddArg(mem)
  3765  		return true
  3766  	}
  3767  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3768  	// cond:
  3769  	// result: (MOVBstore [off] {sym} ptr x mem)
  3770  	for {
  3771  		off := v.AuxInt
  3772  		sym := v.Aux
  3773  		ptr := v.Args[0]
  3774  		v_1 := v.Args[1]
  3775  		if v_1.Op != OpMIPS64MOVHreg {
  3776  			break
  3777  		}
  3778  		x := v_1.Args[0]
  3779  		mem := v.Args[2]
  3780  		v.reset(OpMIPS64MOVBstore)
  3781  		v.AuxInt = off
  3782  		v.Aux = sym
  3783  		v.AddArg(ptr)
  3784  		v.AddArg(x)
  3785  		v.AddArg(mem)
  3786  		return true
  3787  	}
  3788  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3789  	// cond:
  3790  	// result: (MOVBstore [off] {sym} ptr x mem)
  3791  	for {
  3792  		off := v.AuxInt
  3793  		sym := v.Aux
  3794  		ptr := v.Args[0]
  3795  		v_1 := v.Args[1]
  3796  		if v_1.Op != OpMIPS64MOVHUreg {
  3797  			break
  3798  		}
  3799  		x := v_1.Args[0]
  3800  		mem := v.Args[2]
  3801  		v.reset(OpMIPS64MOVBstore)
  3802  		v.AuxInt = off
  3803  		v.Aux = sym
  3804  		v.AddArg(ptr)
  3805  		v.AddArg(x)
  3806  		v.AddArg(mem)
  3807  		return true
  3808  	}
  3809  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3810  	// cond:
  3811  	// result: (MOVBstore [off] {sym} ptr x mem)
  3812  	for {
  3813  		off := v.AuxInt
  3814  		sym := v.Aux
  3815  		ptr := v.Args[0]
  3816  		v_1 := v.Args[1]
  3817  		if v_1.Op != OpMIPS64MOVWreg {
  3818  			break
  3819  		}
  3820  		x := v_1.Args[0]
  3821  		mem := v.Args[2]
  3822  		v.reset(OpMIPS64MOVBstore)
  3823  		v.AuxInt = off
  3824  		v.Aux = sym
  3825  		v.AddArg(ptr)
  3826  		v.AddArg(x)
  3827  		v.AddArg(mem)
  3828  		return true
  3829  	}
  3830  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3831  	// cond:
  3832  	// result: (MOVBstore [off] {sym} ptr x mem)
  3833  	for {
  3834  		off := v.AuxInt
  3835  		sym := v.Aux
  3836  		ptr := v.Args[0]
  3837  		v_1 := v.Args[1]
  3838  		if v_1.Op != OpMIPS64MOVWUreg {
  3839  			break
  3840  		}
  3841  		x := v_1.Args[0]
  3842  		mem := v.Args[2]
  3843  		v.reset(OpMIPS64MOVBstore)
  3844  		v.AuxInt = off
  3845  		v.Aux = sym
  3846  		v.AddArg(ptr)
  3847  		v.AddArg(x)
  3848  		v.AddArg(mem)
  3849  		return true
  3850  	}
  3851  	return false
  3852  }
  3853  func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value, config *Config) bool {
  3854  	b := v.Block
  3855  	_ = b
  3856  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3857  	// cond: is32Bit(off1+off2)
  3858  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3859  	for {
  3860  		off1 := v.AuxInt
  3861  		sym := v.Aux
  3862  		v_0 := v.Args[0]
  3863  		if v_0.Op != OpMIPS64ADDVconst {
  3864  			break
  3865  		}
  3866  		off2 := v_0.AuxInt
  3867  		ptr := v_0.Args[0]
  3868  		mem := v.Args[1]
  3869  		if !(is32Bit(off1 + off2)) {
  3870  			break
  3871  		}
  3872  		v.reset(OpMIPS64MOVBstorezero)
  3873  		v.AuxInt = off1 + off2
  3874  		v.Aux = sym
  3875  		v.AddArg(ptr)
  3876  		v.AddArg(mem)
  3877  		return true
  3878  	}
  3879  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3880  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3881  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3882  	for {
  3883  		off1 := v.AuxInt
  3884  		sym1 := v.Aux
  3885  		v_0 := v.Args[0]
  3886  		if v_0.Op != OpMIPS64MOVVaddr {
  3887  			break
  3888  		}
  3889  		off2 := v_0.AuxInt
  3890  		sym2 := v_0.Aux
  3891  		ptr := v_0.Args[0]
  3892  		mem := v.Args[1]
  3893  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3894  			break
  3895  		}
  3896  		v.reset(OpMIPS64MOVBstorezero)
  3897  		v.AuxInt = off1 + off2
  3898  		v.Aux = mergeSym(sym1, sym2)
  3899  		v.AddArg(ptr)
  3900  		v.AddArg(mem)
  3901  		return true
  3902  	}
  3903  	return false
  3904  }
  3905  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value, config *Config) bool {
  3906  	b := v.Block
  3907  	_ = b
  3908  	// match: (MOVDload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  3909  	// cond: is32Bit(off1+off2)
  3910  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3911  	for {
  3912  		off1 := v.AuxInt
  3913  		sym := v.Aux
  3914  		v_0 := v.Args[0]
  3915  		if v_0.Op != OpMIPS64ADDVconst {
  3916  			break
  3917  		}
  3918  		off2 := v_0.AuxInt
  3919  		ptr := v_0.Args[0]
  3920  		mem := v.Args[1]
  3921  		if !(is32Bit(off1 + off2)) {
  3922  			break
  3923  		}
  3924  		v.reset(OpMIPS64MOVDload)
  3925  		v.AuxInt = off1 + off2
  3926  		v.Aux = sym
  3927  		v.AddArg(ptr)
  3928  		v.AddArg(mem)
  3929  		return true
  3930  	}
  3931  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3932  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3933  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3934  	for {
  3935  		off1 := v.AuxInt
  3936  		sym1 := v.Aux
  3937  		v_0 := v.Args[0]
  3938  		if v_0.Op != OpMIPS64MOVVaddr {
  3939  			break
  3940  		}
  3941  		off2 := v_0.AuxInt
  3942  		sym2 := v_0.Aux
  3943  		ptr := v_0.Args[0]
  3944  		mem := v.Args[1]
  3945  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3946  			break
  3947  		}
  3948  		v.reset(OpMIPS64MOVDload)
  3949  		v.AuxInt = off1 + off2
  3950  		v.Aux = mergeSym(sym1, sym2)
  3951  		v.AddArg(ptr)
  3952  		v.AddArg(mem)
  3953  		return true
  3954  	}
  3955  	return false
  3956  }
  3957  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value, config *Config) bool {
  3958  	b := v.Block
  3959  	_ = b
  3960  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3961  	// cond: is32Bit(off1+off2)
  3962  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3963  	for {
  3964  		off1 := v.AuxInt
  3965  		sym := v.Aux
  3966  		v_0 := v.Args[0]
  3967  		if v_0.Op != OpMIPS64ADDVconst {
  3968  			break
  3969  		}
  3970  		off2 := v_0.AuxInt
  3971  		ptr := v_0.Args[0]
  3972  		val := v.Args[1]
  3973  		mem := v.Args[2]
  3974  		if !(is32Bit(off1 + off2)) {
  3975  			break
  3976  		}
  3977  		v.reset(OpMIPS64MOVDstore)
  3978  		v.AuxInt = off1 + off2
  3979  		v.Aux = sym
  3980  		v.AddArg(ptr)
  3981  		v.AddArg(val)
  3982  		v.AddArg(mem)
  3983  		return true
  3984  	}
  3985  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3986  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3987  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3988  	for {
  3989  		off1 := v.AuxInt
  3990  		sym1 := v.Aux
  3991  		v_0 := v.Args[0]
  3992  		if v_0.Op != OpMIPS64MOVVaddr {
  3993  			break
  3994  		}
  3995  		off2 := v_0.AuxInt
  3996  		sym2 := v_0.Aux
  3997  		ptr := v_0.Args[0]
  3998  		val := v.Args[1]
  3999  		mem := v.Args[2]
  4000  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4001  			break
  4002  		}
  4003  		v.reset(OpMIPS64MOVDstore)
  4004  		v.AuxInt = off1 + off2
  4005  		v.Aux = mergeSym(sym1, sym2)
  4006  		v.AddArg(ptr)
  4007  		v.AddArg(val)
  4008  		v.AddArg(mem)
  4009  		return true
  4010  	}
  4011  	return false
  4012  }
  4013  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value, config *Config) bool {
  4014  	b := v.Block
  4015  	_ = b
  4016  	// match: (MOVFload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  4017  	// cond: is32Bit(off1+off2)
  4018  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  4019  	for {
  4020  		off1 := v.AuxInt
  4021  		sym := v.Aux
  4022  		v_0 := v.Args[0]
  4023  		if v_0.Op != OpMIPS64ADDVconst {
  4024  			break
  4025  		}
  4026  		off2 := v_0.AuxInt
  4027  		ptr := v_0.Args[0]
  4028  		mem := v.Args[1]
  4029  		if !(is32Bit(off1 + off2)) {
  4030  			break
  4031  		}
  4032  		v.reset(OpMIPS64MOVFload)
  4033  		v.AuxInt = off1 + off2
  4034  		v.Aux = sym
  4035  		v.AddArg(ptr)
  4036  		v.AddArg(mem)
  4037  		return true
  4038  	}
  4039  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4040  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4041  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4042  	for {
  4043  		off1 := v.AuxInt
  4044  		sym1 := v.Aux
  4045  		v_0 := v.Args[0]
  4046  		if v_0.Op != OpMIPS64MOVVaddr {
  4047  			break
  4048  		}
  4049  		off2 := v_0.AuxInt
  4050  		sym2 := v_0.Aux
  4051  		ptr := v_0.Args[0]
  4052  		mem := v.Args[1]
  4053  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4054  			break
  4055  		}
  4056  		v.reset(OpMIPS64MOVFload)
  4057  		v.AuxInt = off1 + off2
  4058  		v.Aux = mergeSym(sym1, sym2)
  4059  		v.AddArg(ptr)
  4060  		v.AddArg(mem)
  4061  		return true
  4062  	}
  4063  	return false
  4064  }
  4065  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value, config *Config) bool {
  4066  	b := v.Block
  4067  	_ = b
  4068  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4069  	// cond: is32Bit(off1+off2)
  4070  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4071  	for {
  4072  		off1 := v.AuxInt
  4073  		sym := v.Aux
  4074  		v_0 := v.Args[0]
  4075  		if v_0.Op != OpMIPS64ADDVconst {
  4076  			break
  4077  		}
  4078  		off2 := v_0.AuxInt
  4079  		ptr := v_0.Args[0]
  4080  		val := v.Args[1]
  4081  		mem := v.Args[2]
  4082  		if !(is32Bit(off1 + off2)) {
  4083  			break
  4084  		}
  4085  		v.reset(OpMIPS64MOVFstore)
  4086  		v.AuxInt = off1 + off2
  4087  		v.Aux = sym
  4088  		v.AddArg(ptr)
  4089  		v.AddArg(val)
  4090  		v.AddArg(mem)
  4091  		return true
  4092  	}
  4093  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4094  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4095  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4096  	for {
  4097  		off1 := v.AuxInt
  4098  		sym1 := v.Aux
  4099  		v_0 := v.Args[0]
  4100  		if v_0.Op != OpMIPS64MOVVaddr {
  4101  			break
  4102  		}
  4103  		off2 := v_0.AuxInt
  4104  		sym2 := v_0.Aux
  4105  		ptr := v_0.Args[0]
  4106  		val := v.Args[1]
  4107  		mem := v.Args[2]
  4108  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4109  			break
  4110  		}
  4111  		v.reset(OpMIPS64MOVFstore)
  4112  		v.AuxInt = off1 + off2
  4113  		v.Aux = mergeSym(sym1, sym2)
  4114  		v.AddArg(ptr)
  4115  		v.AddArg(val)
  4116  		v.AddArg(mem)
  4117  		return true
  4118  	}
  4119  	return false
  4120  }
  4121  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value, config *Config) bool {
  4122  	b := v.Block
  4123  	_ = b
  4124  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4125  	// cond: is32Bit(off1+off2)
  4126  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4127  	for {
  4128  		off1 := v.AuxInt
  4129  		sym := v.Aux
  4130  		v_0 := v.Args[0]
  4131  		if v_0.Op != OpMIPS64ADDVconst {
  4132  			break
  4133  		}
  4134  		off2 := v_0.AuxInt
  4135  		ptr := v_0.Args[0]
  4136  		mem := v.Args[1]
  4137  		if !(is32Bit(off1 + off2)) {
  4138  			break
  4139  		}
  4140  		v.reset(OpMIPS64MOVHUload)
  4141  		v.AuxInt = off1 + off2
  4142  		v.Aux = sym
  4143  		v.AddArg(ptr)
  4144  		v.AddArg(mem)
  4145  		return true
  4146  	}
  4147  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4148  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4149  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4150  	for {
  4151  		off1 := v.AuxInt
  4152  		sym1 := v.Aux
  4153  		v_0 := v.Args[0]
  4154  		if v_0.Op != OpMIPS64MOVVaddr {
  4155  			break
  4156  		}
  4157  		off2 := v_0.AuxInt
  4158  		sym2 := v_0.Aux
  4159  		ptr := v_0.Args[0]
  4160  		mem := v.Args[1]
  4161  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4162  			break
  4163  		}
  4164  		v.reset(OpMIPS64MOVHUload)
  4165  		v.AuxInt = off1 + off2
  4166  		v.Aux = mergeSym(sym1, sym2)
  4167  		v.AddArg(ptr)
  4168  		v.AddArg(mem)
  4169  		return true
  4170  	}
  4171  	return false
  4172  }
  4173  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value, config *Config) bool {
  4174  	b := v.Block
  4175  	_ = b
  4176  	// match: (MOVHUreg x:(MOVBUload _ _))
  4177  	// cond:
  4178  	// result: (MOVVreg x)
  4179  	for {
  4180  		x := v.Args[0]
  4181  		if x.Op != OpMIPS64MOVBUload {
  4182  			break
  4183  		}
  4184  		v.reset(OpMIPS64MOVVreg)
  4185  		v.AddArg(x)
  4186  		return true
  4187  	}
  4188  	// match: (MOVHUreg x:(MOVHUload _ _))
  4189  	// cond:
  4190  	// result: (MOVVreg x)
  4191  	for {
  4192  		x := v.Args[0]
  4193  		if x.Op != OpMIPS64MOVHUload {
  4194  			break
  4195  		}
  4196  		v.reset(OpMIPS64MOVVreg)
  4197  		v.AddArg(x)
  4198  		return true
  4199  	}
  4200  	// match: (MOVHUreg x:(MOVBUreg _))
  4201  	// cond:
  4202  	// result: (MOVVreg x)
  4203  	for {
  4204  		x := v.Args[0]
  4205  		if x.Op != OpMIPS64MOVBUreg {
  4206  			break
  4207  		}
  4208  		v.reset(OpMIPS64MOVVreg)
  4209  		v.AddArg(x)
  4210  		return true
  4211  	}
  4212  	// match: (MOVHUreg x:(MOVHUreg _))
  4213  	// cond:
  4214  	// result: (MOVVreg x)
  4215  	for {
  4216  		x := v.Args[0]
  4217  		if x.Op != OpMIPS64MOVHUreg {
  4218  			break
  4219  		}
  4220  		v.reset(OpMIPS64MOVVreg)
  4221  		v.AddArg(x)
  4222  		return true
  4223  	}
  4224  	// match: (MOVHUreg (MOVVconst [c]))
  4225  	// cond:
  4226  	// result: (MOVVconst [int64(uint16(c))])
  4227  	for {
  4228  		v_0 := v.Args[0]
  4229  		if v_0.Op != OpMIPS64MOVVconst {
  4230  			break
  4231  		}
  4232  		c := v_0.AuxInt
  4233  		v.reset(OpMIPS64MOVVconst)
  4234  		v.AuxInt = int64(uint16(c))
  4235  		return true
  4236  	}
  4237  	return false
  4238  }
  4239  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value, config *Config) bool {
  4240  	b := v.Block
  4241  	_ = b
  4242  	// match: (MOVHload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  4243  	// cond: is32Bit(off1+off2)
  4244  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4245  	for {
  4246  		off1 := v.AuxInt
  4247  		sym := v.Aux
  4248  		v_0 := v.Args[0]
  4249  		if v_0.Op != OpMIPS64ADDVconst {
  4250  			break
  4251  		}
  4252  		off2 := v_0.AuxInt
  4253  		ptr := v_0.Args[0]
  4254  		mem := v.Args[1]
  4255  		if !(is32Bit(off1 + off2)) {
  4256  			break
  4257  		}
  4258  		v.reset(OpMIPS64MOVHload)
  4259  		v.AuxInt = off1 + off2
  4260  		v.Aux = sym
  4261  		v.AddArg(ptr)
  4262  		v.AddArg(mem)
  4263  		return true
  4264  	}
  4265  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4266  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4267  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4268  	for {
  4269  		off1 := v.AuxInt
  4270  		sym1 := v.Aux
  4271  		v_0 := v.Args[0]
  4272  		if v_0.Op != OpMIPS64MOVVaddr {
  4273  			break
  4274  		}
  4275  		off2 := v_0.AuxInt
  4276  		sym2 := v_0.Aux
  4277  		ptr := v_0.Args[0]
  4278  		mem := v.Args[1]
  4279  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4280  			break
  4281  		}
  4282  		v.reset(OpMIPS64MOVHload)
  4283  		v.AuxInt = off1 + off2
  4284  		v.Aux = mergeSym(sym1, sym2)
  4285  		v.AddArg(ptr)
  4286  		v.AddArg(mem)
  4287  		return true
  4288  	}
  4289  	return false
  4290  }
  4291  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value, config *Config) bool {
  4292  	b := v.Block
  4293  	_ = b
  4294  	// match: (MOVHreg x:(MOVBload _ _))
  4295  	// cond:
  4296  	// result: (MOVVreg x)
  4297  	for {
  4298  		x := v.Args[0]
  4299  		if x.Op != OpMIPS64MOVBload {
  4300  			break
  4301  		}
  4302  		v.reset(OpMIPS64MOVVreg)
  4303  		v.AddArg(x)
  4304  		return true
  4305  	}
  4306  	// match: (MOVHreg x:(MOVBUload _ _))
  4307  	// cond:
  4308  	// result: (MOVVreg x)
  4309  	for {
  4310  		x := v.Args[0]
  4311  		if x.Op != OpMIPS64MOVBUload {
  4312  			break
  4313  		}
  4314  		v.reset(OpMIPS64MOVVreg)
  4315  		v.AddArg(x)
  4316  		return true
  4317  	}
  4318  	// match: (MOVHreg x:(MOVHload _ _))
  4319  	// cond:
  4320  	// result: (MOVVreg x)
  4321  	for {
  4322  		x := v.Args[0]
  4323  		if x.Op != OpMIPS64MOVHload {
  4324  			break
  4325  		}
  4326  		v.reset(OpMIPS64MOVVreg)
  4327  		v.AddArg(x)
  4328  		return true
  4329  	}
  4330  	// match: (MOVHreg x:(MOVBreg _))
  4331  	// cond:
  4332  	// result: (MOVVreg x)
  4333  	for {
  4334  		x := v.Args[0]
  4335  		if x.Op != OpMIPS64MOVBreg {
  4336  			break
  4337  		}
  4338  		v.reset(OpMIPS64MOVVreg)
  4339  		v.AddArg(x)
  4340  		return true
  4341  	}
  4342  	// match: (MOVHreg x:(MOVBUreg _))
  4343  	// cond:
  4344  	// result: (MOVVreg x)
  4345  	for {
  4346  		x := v.Args[0]
  4347  		if x.Op != OpMIPS64MOVBUreg {
  4348  			break
  4349  		}
  4350  		v.reset(OpMIPS64MOVVreg)
  4351  		v.AddArg(x)
  4352  		return true
  4353  	}
  4354  	// match: (MOVHreg x:(MOVHreg _))
  4355  	// cond:
  4356  	// result: (MOVVreg x)
  4357  	for {
  4358  		x := v.Args[0]
  4359  		if x.Op != OpMIPS64MOVHreg {
  4360  			break
  4361  		}
  4362  		v.reset(OpMIPS64MOVVreg)
  4363  		v.AddArg(x)
  4364  		return true
  4365  	}
  4366  	// match: (MOVHreg  (MOVVconst [c]))
  4367  	// cond:
  4368  	// result: (MOVVconst [int64(int16(c))])
  4369  	for {
  4370  		v_0 := v.Args[0]
  4371  		if v_0.Op != OpMIPS64MOVVconst {
  4372  			break
  4373  		}
  4374  		c := v_0.AuxInt
  4375  		v.reset(OpMIPS64MOVVconst)
  4376  		v.AuxInt = int64(int16(c))
  4377  		return true
  4378  	}
  4379  	return false
  4380  }
  4381  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value, config *Config) bool {
  4382  	b := v.Block
  4383  	_ = b
  4384  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4385  	// cond: is32Bit(off1+off2)
  4386  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4387  	for {
  4388  		off1 := v.AuxInt
  4389  		sym := v.Aux
  4390  		v_0 := v.Args[0]
  4391  		if v_0.Op != OpMIPS64ADDVconst {
  4392  			break
  4393  		}
  4394  		off2 := v_0.AuxInt
  4395  		ptr := v_0.Args[0]
  4396  		val := v.Args[1]
  4397  		mem := v.Args[2]
  4398  		if !(is32Bit(off1 + off2)) {
  4399  			break
  4400  		}
  4401  		v.reset(OpMIPS64MOVHstore)
  4402  		v.AuxInt = off1 + off2
  4403  		v.Aux = sym
  4404  		v.AddArg(ptr)
  4405  		v.AddArg(val)
  4406  		v.AddArg(mem)
  4407  		return true
  4408  	}
  4409  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4410  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4411  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4412  	for {
  4413  		off1 := v.AuxInt
  4414  		sym1 := v.Aux
  4415  		v_0 := v.Args[0]
  4416  		if v_0.Op != OpMIPS64MOVVaddr {
  4417  			break
  4418  		}
  4419  		off2 := v_0.AuxInt
  4420  		sym2 := v_0.Aux
  4421  		ptr := v_0.Args[0]
  4422  		val := v.Args[1]
  4423  		mem := v.Args[2]
  4424  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4425  			break
  4426  		}
  4427  		v.reset(OpMIPS64MOVHstore)
  4428  		v.AuxInt = off1 + off2
  4429  		v.Aux = mergeSym(sym1, sym2)
  4430  		v.AddArg(ptr)
  4431  		v.AddArg(val)
  4432  		v.AddArg(mem)
  4433  		return true
  4434  	}
  4435  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4436  	// cond:
  4437  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4438  	for {
  4439  		off := v.AuxInt
  4440  		sym := v.Aux
  4441  		ptr := v.Args[0]
  4442  		v_1 := v.Args[1]
  4443  		if v_1.Op != OpMIPS64MOVVconst {
  4444  			break
  4445  		}
  4446  		if v_1.AuxInt != 0 {
  4447  			break
  4448  		}
  4449  		mem := v.Args[2]
  4450  		v.reset(OpMIPS64MOVHstorezero)
  4451  		v.AuxInt = off
  4452  		v.Aux = sym
  4453  		v.AddArg(ptr)
  4454  		v.AddArg(mem)
  4455  		return true
  4456  	}
  4457  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4458  	// cond:
  4459  	// result: (MOVHstore [off] {sym} ptr x mem)
  4460  	for {
  4461  		off := v.AuxInt
  4462  		sym := v.Aux
  4463  		ptr := v.Args[0]
  4464  		v_1 := v.Args[1]
  4465  		if v_1.Op != OpMIPS64MOVHreg {
  4466  			break
  4467  		}
  4468  		x := v_1.Args[0]
  4469  		mem := v.Args[2]
  4470  		v.reset(OpMIPS64MOVHstore)
  4471  		v.AuxInt = off
  4472  		v.Aux = sym
  4473  		v.AddArg(ptr)
  4474  		v.AddArg(x)
  4475  		v.AddArg(mem)
  4476  		return true
  4477  	}
  4478  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4479  	// cond:
  4480  	// result: (MOVHstore [off] {sym} ptr x mem)
  4481  	for {
  4482  		off := v.AuxInt
  4483  		sym := v.Aux
  4484  		ptr := v.Args[0]
  4485  		v_1 := v.Args[1]
  4486  		if v_1.Op != OpMIPS64MOVHUreg {
  4487  			break
  4488  		}
  4489  		x := v_1.Args[0]
  4490  		mem := v.Args[2]
  4491  		v.reset(OpMIPS64MOVHstore)
  4492  		v.AuxInt = off
  4493  		v.Aux = sym
  4494  		v.AddArg(ptr)
  4495  		v.AddArg(x)
  4496  		v.AddArg(mem)
  4497  		return true
  4498  	}
  4499  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4500  	// cond:
  4501  	// result: (MOVHstore [off] {sym} ptr x mem)
  4502  	for {
  4503  		off := v.AuxInt
  4504  		sym := v.Aux
  4505  		ptr := v.Args[0]
  4506  		v_1 := v.Args[1]
  4507  		if v_1.Op != OpMIPS64MOVWreg {
  4508  			break
  4509  		}
  4510  		x := v_1.Args[0]
  4511  		mem := v.Args[2]
  4512  		v.reset(OpMIPS64MOVHstore)
  4513  		v.AuxInt = off
  4514  		v.Aux = sym
  4515  		v.AddArg(ptr)
  4516  		v.AddArg(x)
  4517  		v.AddArg(mem)
  4518  		return true
  4519  	}
  4520  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4521  	// cond:
  4522  	// result: (MOVHstore [off] {sym} ptr x mem)
  4523  	for {
  4524  		off := v.AuxInt
  4525  		sym := v.Aux
  4526  		ptr := v.Args[0]
  4527  		v_1 := v.Args[1]
  4528  		if v_1.Op != OpMIPS64MOVWUreg {
  4529  			break
  4530  		}
  4531  		x := v_1.Args[0]
  4532  		mem := v.Args[2]
  4533  		v.reset(OpMIPS64MOVHstore)
  4534  		v.AuxInt = off
  4535  		v.Aux = sym
  4536  		v.AddArg(ptr)
  4537  		v.AddArg(x)
  4538  		v.AddArg(mem)
  4539  		return true
  4540  	}
  4541  	return false
  4542  }
  4543  func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value, config *Config) bool {
  4544  	b := v.Block
  4545  	_ = b
  4546  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4547  	// cond: is32Bit(off1+off2)
  4548  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4549  	for {
  4550  		off1 := v.AuxInt
  4551  		sym := v.Aux
  4552  		v_0 := v.Args[0]
  4553  		if v_0.Op != OpMIPS64ADDVconst {
  4554  			break
  4555  		}
  4556  		off2 := v_0.AuxInt
  4557  		ptr := v_0.Args[0]
  4558  		mem := v.Args[1]
  4559  		if !(is32Bit(off1 + off2)) {
  4560  			break
  4561  		}
  4562  		v.reset(OpMIPS64MOVHstorezero)
  4563  		v.AuxInt = off1 + off2
  4564  		v.Aux = sym
  4565  		v.AddArg(ptr)
  4566  		v.AddArg(mem)
  4567  		return true
  4568  	}
  4569  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4570  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4571  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4572  	for {
  4573  		off1 := v.AuxInt
  4574  		sym1 := v.Aux
  4575  		v_0 := v.Args[0]
  4576  		if v_0.Op != OpMIPS64MOVVaddr {
  4577  			break
  4578  		}
  4579  		off2 := v_0.AuxInt
  4580  		sym2 := v_0.Aux
  4581  		ptr := v_0.Args[0]
  4582  		mem := v.Args[1]
  4583  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4584  			break
  4585  		}
  4586  		v.reset(OpMIPS64MOVHstorezero)
  4587  		v.AuxInt = off1 + off2
  4588  		v.Aux = mergeSym(sym1, sym2)
  4589  		v.AddArg(ptr)
  4590  		v.AddArg(mem)
  4591  		return true
  4592  	}
  4593  	return false
  4594  }
  4595  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value, config *Config) bool {
  4596  	b := v.Block
  4597  	_ = b
  4598  	// match: (MOVVload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  4599  	// cond: is32Bit(off1+off2)
  4600  	// result: (MOVVload  [off1+off2] {sym} ptr mem)
  4601  	for {
  4602  		off1 := v.AuxInt
  4603  		sym := v.Aux
  4604  		v_0 := v.Args[0]
  4605  		if v_0.Op != OpMIPS64ADDVconst {
  4606  			break
  4607  		}
  4608  		off2 := v_0.AuxInt
  4609  		ptr := v_0.Args[0]
  4610  		mem := v.Args[1]
  4611  		if !(is32Bit(off1 + off2)) {
  4612  			break
  4613  		}
  4614  		v.reset(OpMIPS64MOVVload)
  4615  		v.AuxInt = off1 + off2
  4616  		v.Aux = sym
  4617  		v.AddArg(ptr)
  4618  		v.AddArg(mem)
  4619  		return true
  4620  	}
  4621  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4622  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4623  	// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4624  	for {
  4625  		off1 := v.AuxInt
  4626  		sym1 := v.Aux
  4627  		v_0 := v.Args[0]
  4628  		if v_0.Op != OpMIPS64MOVVaddr {
  4629  			break
  4630  		}
  4631  		off2 := v_0.AuxInt
  4632  		sym2 := v_0.Aux
  4633  		ptr := v_0.Args[0]
  4634  		mem := v.Args[1]
  4635  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4636  			break
  4637  		}
  4638  		v.reset(OpMIPS64MOVVload)
  4639  		v.AuxInt = off1 + off2
  4640  		v.Aux = mergeSym(sym1, sym2)
  4641  		v.AddArg(ptr)
  4642  		v.AddArg(mem)
  4643  		return true
  4644  	}
  4645  	return false
  4646  }
  4647  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value, config *Config) bool {
  4648  	b := v.Block
  4649  	_ = b
  4650  	// match: (MOVVreg x)
  4651  	// cond: x.Uses == 1
  4652  	// result: (MOVVnop x)
  4653  	for {
  4654  		x := v.Args[0]
  4655  		if !(x.Uses == 1) {
  4656  			break
  4657  		}
  4658  		v.reset(OpMIPS64MOVVnop)
  4659  		v.AddArg(x)
  4660  		return true
  4661  	}
  4662  	// match: (MOVVreg  (MOVVconst [c]))
  4663  	// cond:
  4664  	// result: (MOVVconst [c])
  4665  	for {
  4666  		v_0 := v.Args[0]
  4667  		if v_0.Op != OpMIPS64MOVVconst {
  4668  			break
  4669  		}
  4670  		c := v_0.AuxInt
  4671  		v.reset(OpMIPS64MOVVconst)
  4672  		v.AuxInt = c
  4673  		return true
  4674  	}
  4675  	return false
  4676  }
  4677  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value, config *Config) bool {
  4678  	b := v.Block
  4679  	_ = b
  4680  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4681  	// cond: is32Bit(off1+off2)
  4682  	// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  4683  	for {
  4684  		off1 := v.AuxInt
  4685  		sym := v.Aux
  4686  		v_0 := v.Args[0]
  4687  		if v_0.Op != OpMIPS64ADDVconst {
  4688  			break
  4689  		}
  4690  		off2 := v_0.AuxInt
  4691  		ptr := v_0.Args[0]
  4692  		val := v.Args[1]
  4693  		mem := v.Args[2]
  4694  		if !(is32Bit(off1 + off2)) {
  4695  			break
  4696  		}
  4697  		v.reset(OpMIPS64MOVVstore)
  4698  		v.AuxInt = off1 + off2
  4699  		v.Aux = sym
  4700  		v.AddArg(ptr)
  4701  		v.AddArg(val)
  4702  		v.AddArg(mem)
  4703  		return true
  4704  	}
  4705  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4706  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4707  	// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4708  	for {
  4709  		off1 := v.AuxInt
  4710  		sym1 := v.Aux
  4711  		v_0 := v.Args[0]
  4712  		if v_0.Op != OpMIPS64MOVVaddr {
  4713  			break
  4714  		}
  4715  		off2 := v_0.AuxInt
  4716  		sym2 := v_0.Aux
  4717  		ptr := v_0.Args[0]
  4718  		val := v.Args[1]
  4719  		mem := v.Args[2]
  4720  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4721  			break
  4722  		}
  4723  		v.reset(OpMIPS64MOVVstore)
  4724  		v.AuxInt = off1 + off2
  4725  		v.Aux = mergeSym(sym1, sym2)
  4726  		v.AddArg(ptr)
  4727  		v.AddArg(val)
  4728  		v.AddArg(mem)
  4729  		return true
  4730  	}
  4731  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4732  	// cond:
  4733  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4734  	for {
  4735  		off := v.AuxInt
  4736  		sym := v.Aux
  4737  		ptr := v.Args[0]
  4738  		v_1 := v.Args[1]
  4739  		if v_1.Op != OpMIPS64MOVVconst {
  4740  			break
  4741  		}
  4742  		if v_1.AuxInt != 0 {
  4743  			break
  4744  		}
  4745  		mem := v.Args[2]
  4746  		v.reset(OpMIPS64MOVVstorezero)
  4747  		v.AuxInt = off
  4748  		v.Aux = sym
  4749  		v.AddArg(ptr)
  4750  		v.AddArg(mem)
  4751  		return true
  4752  	}
  4753  	return false
  4754  }
  4755  func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value, config *Config) bool {
  4756  	b := v.Block
  4757  	_ = b
  4758  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4759  	// cond: is32Bit(off1+off2)
  4760  	// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  4761  	for {
  4762  		off1 := v.AuxInt
  4763  		sym := v.Aux
  4764  		v_0 := v.Args[0]
  4765  		if v_0.Op != OpMIPS64ADDVconst {
  4766  			break
  4767  		}
  4768  		off2 := v_0.AuxInt
  4769  		ptr := v_0.Args[0]
  4770  		mem := v.Args[1]
  4771  		if !(is32Bit(off1 + off2)) {
  4772  			break
  4773  		}
  4774  		v.reset(OpMIPS64MOVVstorezero)
  4775  		v.AuxInt = off1 + off2
  4776  		v.Aux = sym
  4777  		v.AddArg(ptr)
  4778  		v.AddArg(mem)
  4779  		return true
  4780  	}
  4781  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4782  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4783  	// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4784  	for {
  4785  		off1 := v.AuxInt
  4786  		sym1 := v.Aux
  4787  		v_0 := v.Args[0]
  4788  		if v_0.Op != OpMIPS64MOVVaddr {
  4789  			break
  4790  		}
  4791  		off2 := v_0.AuxInt
  4792  		sym2 := v_0.Aux
  4793  		ptr := v_0.Args[0]
  4794  		mem := v.Args[1]
  4795  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4796  			break
  4797  		}
  4798  		v.reset(OpMIPS64MOVVstorezero)
  4799  		v.AuxInt = off1 + off2
  4800  		v.Aux = mergeSym(sym1, sym2)
  4801  		v.AddArg(ptr)
  4802  		v.AddArg(mem)
  4803  		return true
  4804  	}
  4805  	return false
  4806  }
  4807  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value, config *Config) bool {
  4808  	b := v.Block
  4809  	_ = b
  4810  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4811  	// cond: is32Bit(off1+off2)
  4812  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4813  	for {
  4814  		off1 := v.AuxInt
  4815  		sym := v.Aux
  4816  		v_0 := v.Args[0]
  4817  		if v_0.Op != OpMIPS64ADDVconst {
  4818  			break
  4819  		}
  4820  		off2 := v_0.AuxInt
  4821  		ptr := v_0.Args[0]
  4822  		mem := v.Args[1]
  4823  		if !(is32Bit(off1 + off2)) {
  4824  			break
  4825  		}
  4826  		v.reset(OpMIPS64MOVWUload)
  4827  		v.AuxInt = off1 + off2
  4828  		v.Aux = sym
  4829  		v.AddArg(ptr)
  4830  		v.AddArg(mem)
  4831  		return true
  4832  	}
  4833  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4834  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4835  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4836  	for {
  4837  		off1 := v.AuxInt
  4838  		sym1 := v.Aux
  4839  		v_0 := v.Args[0]
  4840  		if v_0.Op != OpMIPS64MOVVaddr {
  4841  			break
  4842  		}
  4843  		off2 := v_0.AuxInt
  4844  		sym2 := v_0.Aux
  4845  		ptr := v_0.Args[0]
  4846  		mem := v.Args[1]
  4847  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4848  			break
  4849  		}
  4850  		v.reset(OpMIPS64MOVWUload)
  4851  		v.AuxInt = off1 + off2
  4852  		v.Aux = mergeSym(sym1, sym2)
  4853  		v.AddArg(ptr)
  4854  		v.AddArg(mem)
  4855  		return true
  4856  	}
  4857  	return false
  4858  }
  4859  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value, config *Config) bool {
  4860  	b := v.Block
  4861  	_ = b
  4862  	// match: (MOVWUreg x:(MOVBUload _ _))
  4863  	// cond:
  4864  	// result: (MOVVreg x)
  4865  	for {
  4866  		x := v.Args[0]
  4867  		if x.Op != OpMIPS64MOVBUload {
  4868  			break
  4869  		}
  4870  		v.reset(OpMIPS64MOVVreg)
  4871  		v.AddArg(x)
  4872  		return true
  4873  	}
  4874  	// match: (MOVWUreg x:(MOVHUload _ _))
  4875  	// cond:
  4876  	// result: (MOVVreg x)
  4877  	for {
  4878  		x := v.Args[0]
  4879  		if x.Op != OpMIPS64MOVHUload {
  4880  			break
  4881  		}
  4882  		v.reset(OpMIPS64MOVVreg)
  4883  		v.AddArg(x)
  4884  		return true
  4885  	}
  4886  	// match: (MOVWUreg x:(MOVWUload _ _))
  4887  	// cond:
  4888  	// result: (MOVVreg x)
  4889  	for {
  4890  		x := v.Args[0]
  4891  		if x.Op != OpMIPS64MOVWUload {
  4892  			break
  4893  		}
  4894  		v.reset(OpMIPS64MOVVreg)
  4895  		v.AddArg(x)
  4896  		return true
  4897  	}
  4898  	// match: (MOVWUreg x:(MOVBUreg _))
  4899  	// cond:
  4900  	// result: (MOVVreg x)
  4901  	for {
  4902  		x := v.Args[0]
  4903  		if x.Op != OpMIPS64MOVBUreg {
  4904  			break
  4905  		}
  4906  		v.reset(OpMIPS64MOVVreg)
  4907  		v.AddArg(x)
  4908  		return true
  4909  	}
  4910  	// match: (MOVWUreg x:(MOVHUreg _))
  4911  	// cond:
  4912  	// result: (MOVVreg x)
  4913  	for {
  4914  		x := v.Args[0]
  4915  		if x.Op != OpMIPS64MOVHUreg {
  4916  			break
  4917  		}
  4918  		v.reset(OpMIPS64MOVVreg)
  4919  		v.AddArg(x)
  4920  		return true
  4921  	}
  4922  	// match: (MOVWUreg x:(MOVWUreg _))
  4923  	// cond:
  4924  	// result: (MOVVreg x)
  4925  	for {
  4926  		x := v.Args[0]
  4927  		if x.Op != OpMIPS64MOVWUreg {
  4928  			break
  4929  		}
  4930  		v.reset(OpMIPS64MOVVreg)
  4931  		v.AddArg(x)
  4932  		return true
  4933  	}
  4934  	// match: (MOVWUreg (MOVVconst [c]))
  4935  	// cond:
  4936  	// result: (MOVVconst [int64(uint32(c))])
  4937  	for {
  4938  		v_0 := v.Args[0]
  4939  		if v_0.Op != OpMIPS64MOVVconst {
  4940  			break
  4941  		}
  4942  		c := v_0.AuxInt
  4943  		v.reset(OpMIPS64MOVVconst)
  4944  		v.AuxInt = int64(uint32(c))
  4945  		return true
  4946  	}
  4947  	return false
  4948  }
  4949  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value, config *Config) bool {
  4950  	b := v.Block
  4951  	_ = b
  4952  	// match: (MOVWload  [off1] {sym} (ADDVconst [off2] ptr) mem)
  4953  	// cond: is32Bit(off1+off2)
  4954  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4955  	for {
  4956  		off1 := v.AuxInt
  4957  		sym := v.Aux
  4958  		v_0 := v.Args[0]
  4959  		if v_0.Op != OpMIPS64ADDVconst {
  4960  			break
  4961  		}
  4962  		off2 := v_0.AuxInt
  4963  		ptr := v_0.Args[0]
  4964  		mem := v.Args[1]
  4965  		if !(is32Bit(off1 + off2)) {
  4966  			break
  4967  		}
  4968  		v.reset(OpMIPS64MOVWload)
  4969  		v.AuxInt = off1 + off2
  4970  		v.Aux = sym
  4971  		v.AddArg(ptr)
  4972  		v.AddArg(mem)
  4973  		return true
  4974  	}
  4975  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4976  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4977  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4978  	for {
  4979  		off1 := v.AuxInt
  4980  		sym1 := v.Aux
  4981  		v_0 := v.Args[0]
  4982  		if v_0.Op != OpMIPS64MOVVaddr {
  4983  			break
  4984  		}
  4985  		off2 := v_0.AuxInt
  4986  		sym2 := v_0.Aux
  4987  		ptr := v_0.Args[0]
  4988  		mem := v.Args[1]
  4989  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4990  			break
  4991  		}
  4992  		v.reset(OpMIPS64MOVWload)
  4993  		v.AuxInt = off1 + off2
  4994  		v.Aux = mergeSym(sym1, sym2)
  4995  		v.AddArg(ptr)
  4996  		v.AddArg(mem)
  4997  		return true
  4998  	}
  4999  	return false
  5000  }
  5001  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value, config *Config) bool {
  5002  	b := v.Block
  5003  	_ = b
  5004  	// match: (MOVWreg x:(MOVBload _ _))
  5005  	// cond:
  5006  	// result: (MOVVreg x)
  5007  	for {
  5008  		x := v.Args[0]
  5009  		if x.Op != OpMIPS64MOVBload {
  5010  			break
  5011  		}
  5012  		v.reset(OpMIPS64MOVVreg)
  5013  		v.AddArg(x)
  5014  		return true
  5015  	}
  5016  	// match: (MOVWreg x:(MOVBUload _ _))
  5017  	// cond:
  5018  	// result: (MOVVreg x)
  5019  	for {
  5020  		x := v.Args[0]
  5021  		if x.Op != OpMIPS64MOVBUload {
  5022  			break
  5023  		}
  5024  		v.reset(OpMIPS64MOVVreg)
  5025  		v.AddArg(x)
  5026  		return true
  5027  	}
  5028  	// match: (MOVWreg x:(MOVHload _ _))
  5029  	// cond:
  5030  	// result: (MOVVreg x)
  5031  	for {
  5032  		x := v.Args[0]
  5033  		if x.Op != OpMIPS64MOVHload {
  5034  			break
  5035  		}
  5036  		v.reset(OpMIPS64MOVVreg)
  5037  		v.AddArg(x)
  5038  		return true
  5039  	}
  5040  	// match: (MOVWreg x:(MOVHUload _ _))
  5041  	// cond:
  5042  	// result: (MOVVreg x)
  5043  	for {
  5044  		x := v.Args[0]
  5045  		if x.Op != OpMIPS64MOVHUload {
  5046  			break
  5047  		}
  5048  		v.reset(OpMIPS64MOVVreg)
  5049  		v.AddArg(x)
  5050  		return true
  5051  	}
  5052  	// match: (MOVWreg x:(MOVWload _ _))
  5053  	// cond:
  5054  	// result: (MOVVreg x)
  5055  	for {
  5056  		x := v.Args[0]
  5057  		if x.Op != OpMIPS64MOVWload {
  5058  			break
  5059  		}
  5060  		v.reset(OpMIPS64MOVVreg)
  5061  		v.AddArg(x)
  5062  		return true
  5063  	}
  5064  	// match: (MOVWreg x:(MOVBreg _))
  5065  	// cond:
  5066  	// result: (MOVVreg x)
  5067  	for {
  5068  		x := v.Args[0]
  5069  		if x.Op != OpMIPS64MOVBreg {
  5070  			break
  5071  		}
  5072  		v.reset(OpMIPS64MOVVreg)
  5073  		v.AddArg(x)
  5074  		return true
  5075  	}
  5076  	// match: (MOVWreg x:(MOVBUreg _))
  5077  	// cond:
  5078  	// result: (MOVVreg x)
  5079  	for {
  5080  		x := v.Args[0]
  5081  		if x.Op != OpMIPS64MOVBUreg {
  5082  			break
  5083  		}
  5084  		v.reset(OpMIPS64MOVVreg)
  5085  		v.AddArg(x)
  5086  		return true
  5087  	}
  5088  	// match: (MOVWreg x:(MOVHreg _))
  5089  	// cond:
  5090  	// result: (MOVVreg x)
  5091  	for {
  5092  		x := v.Args[0]
  5093  		if x.Op != OpMIPS64MOVHreg {
  5094  			break
  5095  		}
  5096  		v.reset(OpMIPS64MOVVreg)
  5097  		v.AddArg(x)
  5098  		return true
  5099  	}
  5100  	// match: (MOVWreg x:(MOVHreg _))
  5101  	// cond:
  5102  	// result: (MOVVreg x)
  5103  	for {
  5104  		x := v.Args[0]
  5105  		if x.Op != OpMIPS64MOVHreg {
  5106  			break
  5107  		}
  5108  		v.reset(OpMIPS64MOVVreg)
  5109  		v.AddArg(x)
  5110  		return true
  5111  	}
  5112  	// match: (MOVWreg x:(MOVWreg _))
  5113  	// cond:
  5114  	// result: (MOVVreg x)
  5115  	for {
  5116  		x := v.Args[0]
  5117  		if x.Op != OpMIPS64MOVWreg {
  5118  			break
  5119  		}
  5120  		v.reset(OpMIPS64MOVVreg)
  5121  		v.AddArg(x)
  5122  		return true
  5123  	}
  5124  	// match: (MOVWreg  (MOVVconst [c]))
  5125  	// cond:
  5126  	// result: (MOVVconst [int64(int32(c))])
  5127  	for {
  5128  		v_0 := v.Args[0]
  5129  		if v_0.Op != OpMIPS64MOVVconst {
  5130  			break
  5131  		}
  5132  		c := v_0.AuxInt
  5133  		v.reset(OpMIPS64MOVVconst)
  5134  		v.AuxInt = int64(int32(c))
  5135  		return true
  5136  	}
  5137  	return false
  5138  }
  5139  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value, config *Config) bool {
  5140  	b := v.Block
  5141  	_ = b
  5142  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5143  	// cond: is32Bit(off1+off2)
  5144  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5145  	for {
  5146  		off1 := v.AuxInt
  5147  		sym := v.Aux
  5148  		v_0 := v.Args[0]
  5149  		if v_0.Op != OpMIPS64ADDVconst {
  5150  			break
  5151  		}
  5152  		off2 := v_0.AuxInt
  5153  		ptr := v_0.Args[0]
  5154  		val := v.Args[1]
  5155  		mem := v.Args[2]
  5156  		if !(is32Bit(off1 + off2)) {
  5157  			break
  5158  		}
  5159  		v.reset(OpMIPS64MOVWstore)
  5160  		v.AuxInt = off1 + off2
  5161  		v.Aux = sym
  5162  		v.AddArg(ptr)
  5163  		v.AddArg(val)
  5164  		v.AddArg(mem)
  5165  		return true
  5166  	}
  5167  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5168  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5169  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5170  	for {
  5171  		off1 := v.AuxInt
  5172  		sym1 := v.Aux
  5173  		v_0 := v.Args[0]
  5174  		if v_0.Op != OpMIPS64MOVVaddr {
  5175  			break
  5176  		}
  5177  		off2 := v_0.AuxInt
  5178  		sym2 := v_0.Aux
  5179  		ptr := v_0.Args[0]
  5180  		val := v.Args[1]
  5181  		mem := v.Args[2]
  5182  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5183  			break
  5184  		}
  5185  		v.reset(OpMIPS64MOVWstore)
  5186  		v.AuxInt = off1 + off2
  5187  		v.Aux = mergeSym(sym1, sym2)
  5188  		v.AddArg(ptr)
  5189  		v.AddArg(val)
  5190  		v.AddArg(mem)
  5191  		return true
  5192  	}
  5193  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5194  	// cond:
  5195  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5196  	for {
  5197  		off := v.AuxInt
  5198  		sym := v.Aux
  5199  		ptr := v.Args[0]
  5200  		v_1 := v.Args[1]
  5201  		if v_1.Op != OpMIPS64MOVVconst {
  5202  			break
  5203  		}
  5204  		if v_1.AuxInt != 0 {
  5205  			break
  5206  		}
  5207  		mem := v.Args[2]
  5208  		v.reset(OpMIPS64MOVWstorezero)
  5209  		v.AuxInt = off
  5210  		v.Aux = sym
  5211  		v.AddArg(ptr)
  5212  		v.AddArg(mem)
  5213  		return true
  5214  	}
  5215  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5216  	// cond:
  5217  	// result: (MOVWstore [off] {sym} ptr x mem)
  5218  	for {
  5219  		off := v.AuxInt
  5220  		sym := v.Aux
  5221  		ptr := v.Args[0]
  5222  		v_1 := v.Args[1]
  5223  		if v_1.Op != OpMIPS64MOVWreg {
  5224  			break
  5225  		}
  5226  		x := v_1.Args[0]
  5227  		mem := v.Args[2]
  5228  		v.reset(OpMIPS64MOVWstore)
  5229  		v.AuxInt = off
  5230  		v.Aux = sym
  5231  		v.AddArg(ptr)
  5232  		v.AddArg(x)
  5233  		v.AddArg(mem)
  5234  		return true
  5235  	}
  5236  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5237  	// cond:
  5238  	// result: (MOVWstore [off] {sym} ptr x mem)
  5239  	for {
  5240  		off := v.AuxInt
  5241  		sym := v.Aux
  5242  		ptr := v.Args[0]
  5243  		v_1 := v.Args[1]
  5244  		if v_1.Op != OpMIPS64MOVWUreg {
  5245  			break
  5246  		}
  5247  		x := v_1.Args[0]
  5248  		mem := v.Args[2]
  5249  		v.reset(OpMIPS64MOVWstore)
  5250  		v.AuxInt = off
  5251  		v.Aux = sym
  5252  		v.AddArg(ptr)
  5253  		v.AddArg(x)
  5254  		v.AddArg(mem)
  5255  		return true
  5256  	}
  5257  	return false
  5258  }
  5259  func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value, config *Config) bool {
  5260  	b := v.Block
  5261  	_ = b
  5262  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5263  	// cond: is32Bit(off1+off2)
  5264  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5265  	for {
  5266  		off1 := v.AuxInt
  5267  		sym := v.Aux
  5268  		v_0 := v.Args[0]
  5269  		if v_0.Op != OpMIPS64ADDVconst {
  5270  			break
  5271  		}
  5272  		off2 := v_0.AuxInt
  5273  		ptr := v_0.Args[0]
  5274  		mem := v.Args[1]
  5275  		if !(is32Bit(off1 + off2)) {
  5276  			break
  5277  		}
  5278  		v.reset(OpMIPS64MOVWstorezero)
  5279  		v.AuxInt = off1 + off2
  5280  		v.Aux = sym
  5281  		v.AddArg(ptr)
  5282  		v.AddArg(mem)
  5283  		return true
  5284  	}
  5285  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5286  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5287  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5288  	for {
  5289  		off1 := v.AuxInt
  5290  		sym1 := v.Aux
  5291  		v_0 := v.Args[0]
  5292  		if v_0.Op != OpMIPS64MOVVaddr {
  5293  			break
  5294  		}
  5295  		off2 := v_0.AuxInt
  5296  		sym2 := v_0.Aux
  5297  		ptr := v_0.Args[0]
  5298  		mem := v.Args[1]
  5299  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5300  			break
  5301  		}
  5302  		v.reset(OpMIPS64MOVWstorezero)
  5303  		v.AuxInt = off1 + off2
  5304  		v.Aux = mergeSym(sym1, sym2)
  5305  		v.AddArg(ptr)
  5306  		v.AddArg(mem)
  5307  		return true
  5308  	}
  5309  	return false
  5310  }
  5311  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value, config *Config) bool {
  5312  	b := v.Block
  5313  	_ = b
  5314  	// match: (NEGV (MOVVconst [c]))
  5315  	// cond:
  5316  	// result: (MOVVconst [-c])
  5317  	for {
  5318  		v_0 := v.Args[0]
  5319  		if v_0.Op != OpMIPS64MOVVconst {
  5320  			break
  5321  		}
  5322  		c := v_0.AuxInt
  5323  		v.reset(OpMIPS64MOVVconst)
  5324  		v.AuxInt = -c
  5325  		return true
  5326  	}
  5327  	return false
  5328  }
  5329  func rewriteValueMIPS64_OpMIPS64NOR(v *Value, config *Config) bool {
  5330  	b := v.Block
  5331  	_ = b
  5332  	// match: (NOR (MOVVconst [c]) x)
  5333  	// cond: is32Bit(c)
  5334  	// result: (NORconst [c] x)
  5335  	for {
  5336  		v_0 := v.Args[0]
  5337  		if v_0.Op != OpMIPS64MOVVconst {
  5338  			break
  5339  		}
  5340  		c := v_0.AuxInt
  5341  		x := v.Args[1]
  5342  		if !(is32Bit(c)) {
  5343  			break
  5344  		}
  5345  		v.reset(OpMIPS64NORconst)
  5346  		v.AuxInt = c
  5347  		v.AddArg(x)
  5348  		return true
  5349  	}
  5350  	// match: (NOR x (MOVVconst [c]))
  5351  	// cond: is32Bit(c)
  5352  	// result: (NORconst [c] x)
  5353  	for {
  5354  		x := v.Args[0]
  5355  		v_1 := v.Args[1]
  5356  		if v_1.Op != OpMIPS64MOVVconst {
  5357  			break
  5358  		}
  5359  		c := v_1.AuxInt
  5360  		if !(is32Bit(c)) {
  5361  			break
  5362  		}
  5363  		v.reset(OpMIPS64NORconst)
  5364  		v.AuxInt = c
  5365  		v.AddArg(x)
  5366  		return true
  5367  	}
  5368  	return false
  5369  }
  5370  func rewriteValueMIPS64_OpMIPS64NORconst(v *Value, config *Config) bool {
  5371  	b := v.Block
  5372  	_ = b
  5373  	// match: (NORconst [c] (MOVVconst [d]))
  5374  	// cond:
  5375  	// result: (MOVVconst [^(c|d)])
  5376  	for {
  5377  		c := v.AuxInt
  5378  		v_0 := v.Args[0]
  5379  		if v_0.Op != OpMIPS64MOVVconst {
  5380  			break
  5381  		}
  5382  		d := v_0.AuxInt
  5383  		v.reset(OpMIPS64MOVVconst)
  5384  		v.AuxInt = ^(c | d)
  5385  		return true
  5386  	}
  5387  	return false
  5388  }
  5389  func rewriteValueMIPS64_OpMIPS64OR(v *Value, config *Config) bool {
  5390  	b := v.Block
  5391  	_ = b
  5392  	// match: (OR  (MOVVconst [c]) x)
  5393  	// cond: is32Bit(c)
  5394  	// result: (ORconst  [c] x)
  5395  	for {
  5396  		v_0 := v.Args[0]
  5397  		if v_0.Op != OpMIPS64MOVVconst {
  5398  			break
  5399  		}
  5400  		c := v_0.AuxInt
  5401  		x := v.Args[1]
  5402  		if !(is32Bit(c)) {
  5403  			break
  5404  		}
  5405  		v.reset(OpMIPS64ORconst)
  5406  		v.AuxInt = c
  5407  		v.AddArg(x)
  5408  		return true
  5409  	}
  5410  	// match: (OR  x (MOVVconst [c]))
  5411  	// cond: is32Bit(c)
  5412  	// result: (ORconst  [c] x)
  5413  	for {
  5414  		x := v.Args[0]
  5415  		v_1 := v.Args[1]
  5416  		if v_1.Op != OpMIPS64MOVVconst {
  5417  			break
  5418  		}
  5419  		c := v_1.AuxInt
  5420  		if !(is32Bit(c)) {
  5421  			break
  5422  		}
  5423  		v.reset(OpMIPS64ORconst)
  5424  		v.AuxInt = c
  5425  		v.AddArg(x)
  5426  		return true
  5427  	}
  5428  	// match: (OR  x x)
  5429  	// cond:
  5430  	// result: x
  5431  	for {
  5432  		x := v.Args[0]
  5433  		if x != v.Args[1] {
  5434  			break
  5435  		}
  5436  		v.reset(OpCopy)
  5437  		v.Type = x.Type
  5438  		v.AddArg(x)
  5439  		return true
  5440  	}
  5441  	return false
  5442  }
  5443  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value, config *Config) bool {
  5444  	b := v.Block
  5445  	_ = b
  5446  	// match: (ORconst  [0]  x)
  5447  	// cond:
  5448  	// result: x
  5449  	for {
  5450  		if v.AuxInt != 0 {
  5451  			break
  5452  		}
  5453  		x := v.Args[0]
  5454  		v.reset(OpCopy)
  5455  		v.Type = x.Type
  5456  		v.AddArg(x)
  5457  		return true
  5458  	}
  5459  	// match: (ORconst  [-1] _)
  5460  	// cond:
  5461  	// result: (MOVVconst [-1])
  5462  	for {
  5463  		if v.AuxInt != -1 {
  5464  			break
  5465  		}
  5466  		v.reset(OpMIPS64MOVVconst)
  5467  		v.AuxInt = -1
  5468  		return true
  5469  	}
  5470  	// match: (ORconst [c] (MOVVconst [d]))
  5471  	// cond:
  5472  	// result: (MOVVconst [c|d])
  5473  	for {
  5474  		c := v.AuxInt
  5475  		v_0 := v.Args[0]
  5476  		if v_0.Op != OpMIPS64MOVVconst {
  5477  			break
  5478  		}
  5479  		d := v_0.AuxInt
  5480  		v.reset(OpMIPS64MOVVconst)
  5481  		v.AuxInt = c | d
  5482  		return true
  5483  	}
  5484  	// match: (ORconst [c] (ORconst [d] x))
  5485  	// cond: is32Bit(c|d)
  5486  	// result: (ORconst [c|d] x)
  5487  	for {
  5488  		c := v.AuxInt
  5489  		v_0 := v.Args[0]
  5490  		if v_0.Op != OpMIPS64ORconst {
  5491  			break
  5492  		}
  5493  		d := v_0.AuxInt
  5494  		x := v_0.Args[0]
  5495  		if !(is32Bit(c | d)) {
  5496  			break
  5497  		}
  5498  		v.reset(OpMIPS64ORconst)
  5499  		v.AuxInt = c | d
  5500  		v.AddArg(x)
  5501  		return true
  5502  	}
  5503  	return false
  5504  }
  5505  func rewriteValueMIPS64_OpMIPS64SGT(v *Value, config *Config) bool {
  5506  	b := v.Block
  5507  	_ = b
  5508  	// match: (SGT  (MOVVconst [c]) x)
  5509  	// cond: is32Bit(c)
  5510  	// result: (SGTconst  [c] x)
  5511  	for {
  5512  		v_0 := v.Args[0]
  5513  		if v_0.Op != OpMIPS64MOVVconst {
  5514  			break
  5515  		}
  5516  		c := v_0.AuxInt
  5517  		x := v.Args[1]
  5518  		if !(is32Bit(c)) {
  5519  			break
  5520  		}
  5521  		v.reset(OpMIPS64SGTconst)
  5522  		v.AuxInt = c
  5523  		v.AddArg(x)
  5524  		return true
  5525  	}
  5526  	return false
  5527  }
  5528  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value, config *Config) bool {
  5529  	b := v.Block
  5530  	_ = b
  5531  	// match: (SGTU (MOVVconst [c]) x)
  5532  	// cond: is32Bit(c)
  5533  	// result: (SGTUconst [c] x)
  5534  	for {
  5535  		v_0 := v.Args[0]
  5536  		if v_0.Op != OpMIPS64MOVVconst {
  5537  			break
  5538  		}
  5539  		c := v_0.AuxInt
  5540  		x := v.Args[1]
  5541  		if !(is32Bit(c)) {
  5542  			break
  5543  		}
  5544  		v.reset(OpMIPS64SGTUconst)
  5545  		v.AuxInt = c
  5546  		v.AddArg(x)
  5547  		return true
  5548  	}
  5549  	return false
  5550  }
  5551  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value, config *Config) bool {
  5552  	b := v.Block
  5553  	_ = b
  5554  	// match: (SGTUconst [c] (MOVVconst [d]))
  5555  	// cond: uint64(c)>uint64(d)
  5556  	// result: (MOVVconst [1])
  5557  	for {
  5558  		c := v.AuxInt
  5559  		v_0 := v.Args[0]
  5560  		if v_0.Op != OpMIPS64MOVVconst {
  5561  			break
  5562  		}
  5563  		d := v_0.AuxInt
  5564  		if !(uint64(c) > uint64(d)) {
  5565  			break
  5566  		}
  5567  		v.reset(OpMIPS64MOVVconst)
  5568  		v.AuxInt = 1
  5569  		return true
  5570  	}
  5571  	// match: (SGTUconst [c] (MOVVconst [d]))
  5572  	// cond: uint64(c)<=uint64(d)
  5573  	// result: (MOVVconst [0])
  5574  	for {
  5575  		c := v.AuxInt
  5576  		v_0 := v.Args[0]
  5577  		if v_0.Op != OpMIPS64MOVVconst {
  5578  			break
  5579  		}
  5580  		d := v_0.AuxInt
  5581  		if !(uint64(c) <= uint64(d)) {
  5582  			break
  5583  		}
  5584  		v.reset(OpMIPS64MOVVconst)
  5585  		v.AuxInt = 0
  5586  		return true
  5587  	}
  5588  	// match: (SGTUconst [c] (MOVBUreg _))
  5589  	// cond: 0xff < uint64(c)
  5590  	// result: (MOVVconst [1])
  5591  	for {
  5592  		c := v.AuxInt
  5593  		v_0 := v.Args[0]
  5594  		if v_0.Op != OpMIPS64MOVBUreg {
  5595  			break
  5596  		}
  5597  		if !(0xff < uint64(c)) {
  5598  			break
  5599  		}
  5600  		v.reset(OpMIPS64MOVVconst)
  5601  		v.AuxInt = 1
  5602  		return true
  5603  	}
  5604  	// match: (SGTUconst [c] (MOVHUreg _))
  5605  	// cond: 0xffff < uint64(c)
  5606  	// result: (MOVVconst [1])
  5607  	for {
  5608  		c := v.AuxInt
  5609  		v_0 := v.Args[0]
  5610  		if v_0.Op != OpMIPS64MOVHUreg {
  5611  			break
  5612  		}
  5613  		if !(0xffff < uint64(c)) {
  5614  			break
  5615  		}
  5616  		v.reset(OpMIPS64MOVVconst)
  5617  		v.AuxInt = 1
  5618  		return true
  5619  	}
  5620  	// match: (SGTUconst [c] (ANDconst [m] _))
  5621  	// cond: uint64(m) < uint64(c)
  5622  	// result: (MOVVconst [1])
  5623  	for {
  5624  		c := v.AuxInt
  5625  		v_0 := v.Args[0]
  5626  		if v_0.Op != OpMIPS64ANDconst {
  5627  			break
  5628  		}
  5629  		m := v_0.AuxInt
  5630  		if !(uint64(m) < uint64(c)) {
  5631  			break
  5632  		}
  5633  		v.reset(OpMIPS64MOVVconst)
  5634  		v.AuxInt = 1
  5635  		return true
  5636  	}
  5637  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5638  	// cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)
  5639  	// result: (MOVVconst [1])
  5640  	for {
  5641  		c := v.AuxInt
  5642  		v_0 := v.Args[0]
  5643  		if v_0.Op != OpMIPS64SRLVconst {
  5644  			break
  5645  		}
  5646  		d := v_0.AuxInt
  5647  		if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) {
  5648  			break
  5649  		}
  5650  		v.reset(OpMIPS64MOVVconst)
  5651  		v.AuxInt = 1
  5652  		return true
  5653  	}
  5654  	return false
  5655  }
  5656  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value, config *Config) bool {
  5657  	b := v.Block
  5658  	_ = b
  5659  	// match: (SGTconst [c] (MOVVconst [d]))
  5660  	// cond: int64(c)>int64(d)
  5661  	// result: (MOVVconst [1])
  5662  	for {
  5663  		c := v.AuxInt
  5664  		v_0 := v.Args[0]
  5665  		if v_0.Op != OpMIPS64MOVVconst {
  5666  			break
  5667  		}
  5668  		d := v_0.AuxInt
  5669  		if !(int64(c) > int64(d)) {
  5670  			break
  5671  		}
  5672  		v.reset(OpMIPS64MOVVconst)
  5673  		v.AuxInt = 1
  5674  		return true
  5675  	}
  5676  	// match: (SGTconst [c] (MOVVconst [d]))
  5677  	// cond: int64(c)<=int64(d)
  5678  	// result: (MOVVconst [0])
  5679  	for {
  5680  		c := v.AuxInt
  5681  		v_0 := v.Args[0]
  5682  		if v_0.Op != OpMIPS64MOVVconst {
  5683  			break
  5684  		}
  5685  		d := v_0.AuxInt
  5686  		if !(int64(c) <= int64(d)) {
  5687  			break
  5688  		}
  5689  		v.reset(OpMIPS64MOVVconst)
  5690  		v.AuxInt = 0
  5691  		return true
  5692  	}
  5693  	// match: (SGTconst [c] (MOVBreg _))
  5694  	// cond: 0x7f < int64(c)
  5695  	// result: (MOVVconst [1])
  5696  	for {
  5697  		c := v.AuxInt
  5698  		v_0 := v.Args[0]
  5699  		if v_0.Op != OpMIPS64MOVBreg {
  5700  			break
  5701  		}
  5702  		if !(0x7f < int64(c)) {
  5703  			break
  5704  		}
  5705  		v.reset(OpMIPS64MOVVconst)
  5706  		v.AuxInt = 1
  5707  		return true
  5708  	}
  5709  	// match: (SGTconst [c] (MOVBreg _))
  5710  	// cond: int64(c) <= -0x80
  5711  	// result: (MOVVconst [0])
  5712  	for {
  5713  		c := v.AuxInt
  5714  		v_0 := v.Args[0]
  5715  		if v_0.Op != OpMIPS64MOVBreg {
  5716  			break
  5717  		}
  5718  		if !(int64(c) <= -0x80) {
  5719  			break
  5720  		}
  5721  		v.reset(OpMIPS64MOVVconst)
  5722  		v.AuxInt = 0
  5723  		return true
  5724  	}
  5725  	// match: (SGTconst [c] (MOVBUreg _))
  5726  	// cond: 0xff < int64(c)
  5727  	// result: (MOVVconst [1])
  5728  	for {
  5729  		c := v.AuxInt
  5730  		v_0 := v.Args[0]
  5731  		if v_0.Op != OpMIPS64MOVBUreg {
  5732  			break
  5733  		}
  5734  		if !(0xff < int64(c)) {
  5735  			break
  5736  		}
  5737  		v.reset(OpMIPS64MOVVconst)
  5738  		v.AuxInt = 1
  5739  		return true
  5740  	}
  5741  	// match: (SGTconst [c] (MOVBUreg _))
  5742  	// cond: int64(c) < 0
  5743  	// result: (MOVVconst [0])
  5744  	for {
  5745  		c := v.AuxInt
  5746  		v_0 := v.Args[0]
  5747  		if v_0.Op != OpMIPS64MOVBUreg {
  5748  			break
  5749  		}
  5750  		if !(int64(c) < 0) {
  5751  			break
  5752  		}
  5753  		v.reset(OpMIPS64MOVVconst)
  5754  		v.AuxInt = 0
  5755  		return true
  5756  	}
  5757  	// match: (SGTconst [c] (MOVHreg _))
  5758  	// cond: 0x7fff < int64(c)
  5759  	// result: (MOVVconst [1])
  5760  	for {
  5761  		c := v.AuxInt
  5762  		v_0 := v.Args[0]
  5763  		if v_0.Op != OpMIPS64MOVHreg {
  5764  			break
  5765  		}
  5766  		if !(0x7fff < int64(c)) {
  5767  			break
  5768  		}
  5769  		v.reset(OpMIPS64MOVVconst)
  5770  		v.AuxInt = 1
  5771  		return true
  5772  	}
  5773  	// match: (SGTconst [c] (MOVHreg _))
  5774  	// cond: int64(c) <= -0x8000
  5775  	// result: (MOVVconst [0])
  5776  	for {
  5777  		c := v.AuxInt
  5778  		v_0 := v.Args[0]
  5779  		if v_0.Op != OpMIPS64MOVHreg {
  5780  			break
  5781  		}
  5782  		if !(int64(c) <= -0x8000) {
  5783  			break
  5784  		}
  5785  		v.reset(OpMIPS64MOVVconst)
  5786  		v.AuxInt = 0
  5787  		return true
  5788  	}
  5789  	// match: (SGTconst [c] (MOVHUreg _))
  5790  	// cond: 0xffff < int64(c)
  5791  	// result: (MOVVconst [1])
  5792  	for {
  5793  		c := v.AuxInt
  5794  		v_0 := v.Args[0]
  5795  		if v_0.Op != OpMIPS64MOVHUreg {
  5796  			break
  5797  		}
  5798  		if !(0xffff < int64(c)) {
  5799  			break
  5800  		}
  5801  		v.reset(OpMIPS64MOVVconst)
  5802  		v.AuxInt = 1
  5803  		return true
  5804  	}
  5805  	// match: (SGTconst [c] (MOVHUreg _))
  5806  	// cond: int64(c) < 0
  5807  	// result: (MOVVconst [0])
  5808  	for {
  5809  		c := v.AuxInt
  5810  		v_0 := v.Args[0]
  5811  		if v_0.Op != OpMIPS64MOVHUreg {
  5812  			break
  5813  		}
  5814  		if !(int64(c) < 0) {
  5815  			break
  5816  		}
  5817  		v.reset(OpMIPS64MOVVconst)
  5818  		v.AuxInt = 0
  5819  		return true
  5820  	}
  5821  	// match: (SGTconst [c] (MOVWUreg _))
  5822  	// cond: int64(c) < 0
  5823  	// result: (MOVVconst [0])
  5824  	for {
  5825  		c := v.AuxInt
  5826  		v_0 := v.Args[0]
  5827  		if v_0.Op != OpMIPS64MOVWUreg {
  5828  			break
  5829  		}
  5830  		if !(int64(c) < 0) {
  5831  			break
  5832  		}
  5833  		v.reset(OpMIPS64MOVVconst)
  5834  		v.AuxInt = 0
  5835  		return true
  5836  	}
  5837  	// match: (SGTconst [c] (ANDconst [m] _))
  5838  	// cond: 0 <= m && m < c
  5839  	// result: (MOVVconst [1])
  5840  	for {
  5841  		c := v.AuxInt
  5842  		v_0 := v.Args[0]
  5843  		if v_0.Op != OpMIPS64ANDconst {
  5844  			break
  5845  		}
  5846  		m := v_0.AuxInt
  5847  		if !(0 <= m && m < c) {
  5848  			break
  5849  		}
  5850  		v.reset(OpMIPS64MOVVconst)
  5851  		v.AuxInt = 1
  5852  		return true
  5853  	}
  5854  	// match: (SGTconst [c] (SRLVconst _ [d]))
  5855  	// cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c
  5856  	// result: (MOVVconst [1])
  5857  	for {
  5858  		c := v.AuxInt
  5859  		v_0 := v.Args[0]
  5860  		if v_0.Op != OpMIPS64SRLVconst {
  5861  			break
  5862  		}
  5863  		d := v_0.AuxInt
  5864  		if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) {
  5865  			break
  5866  		}
  5867  		v.reset(OpMIPS64MOVVconst)
  5868  		v.AuxInt = 1
  5869  		return true
  5870  	}
  5871  	return false
  5872  }
  5873  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value, config *Config) bool {
  5874  	b := v.Block
  5875  	_ = b
  5876  	// match: (SLLV _ (MOVVconst [c]))
  5877  	// cond: uint64(c)>=64
  5878  	// result: (MOVVconst [0])
  5879  	for {
  5880  		v_1 := v.Args[1]
  5881  		if v_1.Op != OpMIPS64MOVVconst {
  5882  			break
  5883  		}
  5884  		c := v_1.AuxInt
  5885  		if !(uint64(c) >= 64) {
  5886  			break
  5887  		}
  5888  		v.reset(OpMIPS64MOVVconst)
  5889  		v.AuxInt = 0
  5890  		return true
  5891  	}
  5892  	// match: (SLLV x (MOVVconst [c]))
  5893  	// cond:
  5894  	// result: (SLLVconst x [c])
  5895  	for {
  5896  		x := v.Args[0]
  5897  		v_1 := v.Args[1]
  5898  		if v_1.Op != OpMIPS64MOVVconst {
  5899  			break
  5900  		}
  5901  		c := v_1.AuxInt
  5902  		v.reset(OpMIPS64SLLVconst)
  5903  		v.AuxInt = c
  5904  		v.AddArg(x)
  5905  		return true
  5906  	}
  5907  	return false
  5908  }
  5909  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value, config *Config) bool {
  5910  	b := v.Block
  5911  	_ = b
  5912  	// match: (SLLVconst [c] (MOVVconst [d]))
  5913  	// cond:
  5914  	// result: (MOVVconst [int64(d)<<uint64(c)])
  5915  	for {
  5916  		c := v.AuxInt
  5917  		v_0 := v.Args[0]
  5918  		if v_0.Op != OpMIPS64MOVVconst {
  5919  			break
  5920  		}
  5921  		d := v_0.AuxInt
  5922  		v.reset(OpMIPS64MOVVconst)
  5923  		v.AuxInt = int64(d) << uint64(c)
  5924  		return true
  5925  	}
  5926  	return false
  5927  }
  5928  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value, config *Config) bool {
  5929  	b := v.Block
  5930  	_ = b
  5931  	// match: (SRAV x (MOVVconst [c]))
  5932  	// cond: uint64(c)>=64
  5933  	// result: (SRAVconst x [63])
  5934  	for {
  5935  		x := v.Args[0]
  5936  		v_1 := v.Args[1]
  5937  		if v_1.Op != OpMIPS64MOVVconst {
  5938  			break
  5939  		}
  5940  		c := v_1.AuxInt
  5941  		if !(uint64(c) >= 64) {
  5942  			break
  5943  		}
  5944  		v.reset(OpMIPS64SRAVconst)
  5945  		v.AuxInt = 63
  5946  		v.AddArg(x)
  5947  		return true
  5948  	}
  5949  	// match: (SRAV x (MOVVconst [c]))
  5950  	// cond:
  5951  	// result: (SRAVconst x [c])
  5952  	for {
  5953  		x := v.Args[0]
  5954  		v_1 := v.Args[1]
  5955  		if v_1.Op != OpMIPS64MOVVconst {
  5956  			break
  5957  		}
  5958  		c := v_1.AuxInt
  5959  		v.reset(OpMIPS64SRAVconst)
  5960  		v.AuxInt = c
  5961  		v.AddArg(x)
  5962  		return true
  5963  	}
  5964  	return false
  5965  }
  5966  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value, config *Config) bool {
  5967  	b := v.Block
  5968  	_ = b
  5969  	// match: (SRAVconst [c] (MOVVconst [d]))
  5970  	// cond:
  5971  	// result: (MOVVconst [int64(d)>>uint64(c)])
  5972  	for {
  5973  		c := v.AuxInt
  5974  		v_0 := v.Args[0]
  5975  		if v_0.Op != OpMIPS64MOVVconst {
  5976  			break
  5977  		}
  5978  		d := v_0.AuxInt
  5979  		v.reset(OpMIPS64MOVVconst)
  5980  		v.AuxInt = int64(d) >> uint64(c)
  5981  		return true
  5982  	}
  5983  	return false
  5984  }
  5985  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value, config *Config) bool {
  5986  	b := v.Block
  5987  	_ = b
  5988  	// match: (SRLV _ (MOVVconst [c]))
  5989  	// cond: uint64(c)>=64
  5990  	// result: (MOVVconst [0])
  5991  	for {
  5992  		v_1 := v.Args[1]
  5993  		if v_1.Op != OpMIPS64MOVVconst {
  5994  			break
  5995  		}
  5996  		c := v_1.AuxInt
  5997  		if !(uint64(c) >= 64) {
  5998  			break
  5999  		}
  6000  		v.reset(OpMIPS64MOVVconst)
  6001  		v.AuxInt = 0
  6002  		return true
  6003  	}
  6004  	// match: (SRLV x (MOVVconst [c]))
  6005  	// cond:
  6006  	// result: (SRLVconst x [c])
  6007  	for {
  6008  		x := v.Args[0]
  6009  		v_1 := v.Args[1]
  6010  		if v_1.Op != OpMIPS64MOVVconst {
  6011  			break
  6012  		}
  6013  		c := v_1.AuxInt
  6014  		v.reset(OpMIPS64SRLVconst)
  6015  		v.AuxInt = c
  6016  		v.AddArg(x)
  6017  		return true
  6018  	}
  6019  	return false
  6020  }
  6021  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value, config *Config) bool {
  6022  	b := v.Block
  6023  	_ = b
  6024  	// match: (SRLVconst [c] (MOVVconst [d]))
  6025  	// cond:
  6026  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6027  	for {
  6028  		c := v.AuxInt
  6029  		v_0 := v.Args[0]
  6030  		if v_0.Op != OpMIPS64MOVVconst {
  6031  			break
  6032  		}
  6033  		d := v_0.AuxInt
  6034  		v.reset(OpMIPS64MOVVconst)
  6035  		v.AuxInt = int64(uint64(d) >> uint64(c))
  6036  		return true
  6037  	}
  6038  	return false
  6039  }
  6040  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value, config *Config) bool {
  6041  	b := v.Block
  6042  	_ = b
  6043  	// match: (SUBV x (MOVVconst [c]))
  6044  	// cond: is32Bit(c)
  6045  	// result: (SUBVconst [c] x)
  6046  	for {
  6047  		x := v.Args[0]
  6048  		v_1 := v.Args[1]
  6049  		if v_1.Op != OpMIPS64MOVVconst {
  6050  			break
  6051  		}
  6052  		c := v_1.AuxInt
  6053  		if !(is32Bit(c)) {
  6054  			break
  6055  		}
  6056  		v.reset(OpMIPS64SUBVconst)
  6057  		v.AuxInt = c
  6058  		v.AddArg(x)
  6059  		return true
  6060  	}
  6061  	// match: (SUBV x x)
  6062  	// cond:
  6063  	// result: (MOVVconst [0])
  6064  	for {
  6065  		x := v.Args[0]
  6066  		if x != v.Args[1] {
  6067  			break
  6068  		}
  6069  		v.reset(OpMIPS64MOVVconst)
  6070  		v.AuxInt = 0
  6071  		return true
  6072  	}
  6073  	// match: (SUBV (MOVVconst [0]) x)
  6074  	// cond:
  6075  	// result: (NEGV x)
  6076  	for {
  6077  		v_0 := v.Args[0]
  6078  		if v_0.Op != OpMIPS64MOVVconst {
  6079  			break
  6080  		}
  6081  		if v_0.AuxInt != 0 {
  6082  			break
  6083  		}
  6084  		x := v.Args[1]
  6085  		v.reset(OpMIPS64NEGV)
  6086  		v.AddArg(x)
  6087  		return true
  6088  	}
  6089  	return false
  6090  }
  6091  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value, config *Config) bool {
  6092  	b := v.Block
  6093  	_ = b
  6094  	// match: (SUBVconst [0]  x)
  6095  	// cond:
  6096  	// result: x
  6097  	for {
  6098  		if v.AuxInt != 0 {
  6099  			break
  6100  		}
  6101  		x := v.Args[0]
  6102  		v.reset(OpCopy)
  6103  		v.Type = x.Type
  6104  		v.AddArg(x)
  6105  		return true
  6106  	}
  6107  	// match: (SUBVconst [c] (MOVVconst [d]))
  6108  	// cond:
  6109  	// result: (MOVVconst [d-c])
  6110  	for {
  6111  		c := v.AuxInt
  6112  		v_0 := v.Args[0]
  6113  		if v_0.Op != OpMIPS64MOVVconst {
  6114  			break
  6115  		}
  6116  		d := v_0.AuxInt
  6117  		v.reset(OpMIPS64MOVVconst)
  6118  		v.AuxInt = d - c
  6119  		return true
  6120  	}
  6121  	// match: (SUBVconst [c] (SUBVconst [d] x))
  6122  	// cond: is32Bit(-c-d)
  6123  	// result: (ADDVconst [-c-d] x)
  6124  	for {
  6125  		c := v.AuxInt
  6126  		v_0 := v.Args[0]
  6127  		if v_0.Op != OpMIPS64SUBVconst {
  6128  			break
  6129  		}
  6130  		d := v_0.AuxInt
  6131  		x := v_0.Args[0]
  6132  		if !(is32Bit(-c - d)) {
  6133  			break
  6134  		}
  6135  		v.reset(OpMIPS64ADDVconst)
  6136  		v.AuxInt = -c - d
  6137  		v.AddArg(x)
  6138  		return true
  6139  	}
  6140  	// match: (SUBVconst [c] (ADDVconst [d] x))
  6141  	// cond: is32Bit(-c+d)
  6142  	// result: (ADDVconst [-c+d] x)
  6143  	for {
  6144  		c := v.AuxInt
  6145  		v_0 := v.Args[0]
  6146  		if v_0.Op != OpMIPS64ADDVconst {
  6147  			break
  6148  		}
  6149  		d := v_0.AuxInt
  6150  		x := v_0.Args[0]
  6151  		if !(is32Bit(-c + d)) {
  6152  			break
  6153  		}
  6154  		v.reset(OpMIPS64ADDVconst)
  6155  		v.AuxInt = -c + d
  6156  		v.AddArg(x)
  6157  		return true
  6158  	}
  6159  	return false
  6160  }
  6161  func rewriteValueMIPS64_OpMIPS64XOR(v *Value, config *Config) bool {
  6162  	b := v.Block
  6163  	_ = b
  6164  	// match: (XOR (MOVVconst [c]) x)
  6165  	// cond: is32Bit(c)
  6166  	// result: (XORconst [c] x)
  6167  	for {
  6168  		v_0 := v.Args[0]
  6169  		if v_0.Op != OpMIPS64MOVVconst {
  6170  			break
  6171  		}
  6172  		c := v_0.AuxInt
  6173  		x := v.Args[1]
  6174  		if !(is32Bit(c)) {
  6175  			break
  6176  		}
  6177  		v.reset(OpMIPS64XORconst)
  6178  		v.AuxInt = c
  6179  		v.AddArg(x)
  6180  		return true
  6181  	}
  6182  	// match: (XOR x (MOVVconst [c]))
  6183  	// cond: is32Bit(c)
  6184  	// result: (XORconst [c] x)
  6185  	for {
  6186  		x := v.Args[0]
  6187  		v_1 := v.Args[1]
  6188  		if v_1.Op != OpMIPS64MOVVconst {
  6189  			break
  6190  		}
  6191  		c := v_1.AuxInt
  6192  		if !(is32Bit(c)) {
  6193  			break
  6194  		}
  6195  		v.reset(OpMIPS64XORconst)
  6196  		v.AuxInt = c
  6197  		v.AddArg(x)
  6198  		return true
  6199  	}
  6200  	// match: (XOR x x)
  6201  	// cond:
  6202  	// result: (MOVVconst [0])
  6203  	for {
  6204  		x := v.Args[0]
  6205  		if x != v.Args[1] {
  6206  			break
  6207  		}
  6208  		v.reset(OpMIPS64MOVVconst)
  6209  		v.AuxInt = 0
  6210  		return true
  6211  	}
  6212  	return false
  6213  }
  6214  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value, config *Config) bool {
  6215  	b := v.Block
  6216  	_ = b
  6217  	// match: (XORconst [0]  x)
  6218  	// cond:
  6219  	// result: x
  6220  	for {
  6221  		if v.AuxInt != 0 {
  6222  			break
  6223  		}
  6224  		x := v.Args[0]
  6225  		v.reset(OpCopy)
  6226  		v.Type = x.Type
  6227  		v.AddArg(x)
  6228  		return true
  6229  	}
  6230  	// match: (XORconst [-1] x)
  6231  	// cond:
  6232  	// result: (NORconst [0] x)
  6233  	for {
  6234  		if v.AuxInt != -1 {
  6235  			break
  6236  		}
  6237  		x := v.Args[0]
  6238  		v.reset(OpMIPS64NORconst)
  6239  		v.AuxInt = 0
  6240  		v.AddArg(x)
  6241  		return true
  6242  	}
  6243  	// match: (XORconst [c] (MOVVconst [d]))
  6244  	// cond:
  6245  	// result: (MOVVconst [c^d])
  6246  	for {
  6247  		c := v.AuxInt
  6248  		v_0 := v.Args[0]
  6249  		if v_0.Op != OpMIPS64MOVVconst {
  6250  			break
  6251  		}
  6252  		d := v_0.AuxInt
  6253  		v.reset(OpMIPS64MOVVconst)
  6254  		v.AuxInt = c ^ d
  6255  		return true
  6256  	}
  6257  	// match: (XORconst [c] (XORconst [d] x))
  6258  	// cond: is32Bit(c^d)
  6259  	// result: (XORconst [c^d] x)
  6260  	for {
  6261  		c := v.AuxInt
  6262  		v_0 := v.Args[0]
  6263  		if v_0.Op != OpMIPS64XORconst {
  6264  			break
  6265  		}
  6266  		d := v_0.AuxInt
  6267  		x := v_0.Args[0]
  6268  		if !(is32Bit(c ^ d)) {
  6269  			break
  6270  		}
  6271  		v.reset(OpMIPS64XORconst)
  6272  		v.AuxInt = c ^ d
  6273  		v.AddArg(x)
  6274  		return true
  6275  	}
  6276  	return false
  6277  }
  6278  func rewriteValueMIPS64_OpMod16(v *Value, config *Config) bool {
  6279  	b := v.Block
  6280  	_ = b
  6281  	// match: (Mod16 x y)
  6282  	// cond:
  6283  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6284  	for {
  6285  		x := v.Args[0]
  6286  		y := v.Args[1]
  6287  		v.reset(OpSelect0)
  6288  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  6289  		v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  6290  		v1.AddArg(x)
  6291  		v0.AddArg(v1)
  6292  		v2 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  6293  		v2.AddArg(y)
  6294  		v0.AddArg(v2)
  6295  		v.AddArg(v0)
  6296  		return true
  6297  	}
  6298  }
  6299  func rewriteValueMIPS64_OpMod16u(v *Value, config *Config) bool {
  6300  	b := v.Block
  6301  	_ = b
  6302  	// match: (Mod16u x y)
  6303  	// cond:
  6304  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6305  	for {
  6306  		x := v.Args[0]
  6307  		y := v.Args[1]
  6308  		v.reset(OpSelect0)
  6309  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6310  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  6311  		v1.AddArg(x)
  6312  		v0.AddArg(v1)
  6313  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  6314  		v2.AddArg(y)
  6315  		v0.AddArg(v2)
  6316  		v.AddArg(v0)
  6317  		return true
  6318  	}
  6319  }
  6320  func rewriteValueMIPS64_OpMod32(v *Value, config *Config) bool {
  6321  	b := v.Block
  6322  	_ = b
  6323  	// match: (Mod32 x y)
  6324  	// cond:
  6325  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6326  	for {
  6327  		x := v.Args[0]
  6328  		y := v.Args[1]
  6329  		v.reset(OpSelect0)
  6330  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  6331  		v1 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  6332  		v1.AddArg(x)
  6333  		v0.AddArg(v1)
  6334  		v2 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  6335  		v2.AddArg(y)
  6336  		v0.AddArg(v2)
  6337  		v.AddArg(v0)
  6338  		return true
  6339  	}
  6340  }
  6341  func rewriteValueMIPS64_OpMod32u(v *Value, config *Config) bool {
  6342  	b := v.Block
  6343  	_ = b
  6344  	// match: (Mod32u x y)
  6345  	// cond:
  6346  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6347  	for {
  6348  		x := v.Args[0]
  6349  		y := v.Args[1]
  6350  		v.reset(OpSelect0)
  6351  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6352  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  6353  		v1.AddArg(x)
  6354  		v0.AddArg(v1)
  6355  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  6356  		v2.AddArg(y)
  6357  		v0.AddArg(v2)
  6358  		v.AddArg(v0)
  6359  		return true
  6360  	}
  6361  }
  6362  func rewriteValueMIPS64_OpMod64(v *Value, config *Config) bool {
  6363  	b := v.Block
  6364  	_ = b
  6365  	// match: (Mod64 x y)
  6366  	// cond:
  6367  	// result: (Select0 (DIVV x y))
  6368  	for {
  6369  		x := v.Args[0]
  6370  		y := v.Args[1]
  6371  		v.reset(OpSelect0)
  6372  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  6373  		v0.AddArg(x)
  6374  		v0.AddArg(y)
  6375  		v.AddArg(v0)
  6376  		return true
  6377  	}
  6378  }
  6379  func rewriteValueMIPS64_OpMod64u(v *Value, config *Config) bool {
  6380  	b := v.Block
  6381  	_ = b
  6382  	// match: (Mod64u x y)
  6383  	// cond:
  6384  	// result: (Select0 (DIVVU x y))
  6385  	for {
  6386  		x := v.Args[0]
  6387  		y := v.Args[1]
  6388  		v.reset(OpSelect0)
  6389  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6390  		v0.AddArg(x)
  6391  		v0.AddArg(y)
  6392  		v.AddArg(v0)
  6393  		return true
  6394  	}
  6395  }
  6396  func rewriteValueMIPS64_OpMod8(v *Value, config *Config) bool {
  6397  	b := v.Block
  6398  	_ = b
  6399  	// match: (Mod8 x y)
  6400  	// cond:
  6401  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6402  	for {
  6403  		x := v.Args[0]
  6404  		y := v.Args[1]
  6405  		v.reset(OpSelect0)
  6406  		v0 := b.NewValue0(v.Line, OpMIPS64DIVV, MakeTuple(config.fe.TypeInt64(), config.fe.TypeInt64()))
  6407  		v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  6408  		v1.AddArg(x)
  6409  		v0.AddArg(v1)
  6410  		v2 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  6411  		v2.AddArg(y)
  6412  		v0.AddArg(v2)
  6413  		v.AddArg(v0)
  6414  		return true
  6415  	}
  6416  }
  6417  func rewriteValueMIPS64_OpMod8u(v *Value, config *Config) bool {
  6418  	b := v.Block
  6419  	_ = b
  6420  	// match: (Mod8u x y)
  6421  	// cond:
  6422  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6423  	for {
  6424  		x := v.Args[0]
  6425  		y := v.Args[1]
  6426  		v.reset(OpSelect0)
  6427  		v0 := b.NewValue0(v.Line, OpMIPS64DIVVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6428  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  6429  		v1.AddArg(x)
  6430  		v0.AddArg(v1)
  6431  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  6432  		v2.AddArg(y)
  6433  		v0.AddArg(v2)
  6434  		v.AddArg(v0)
  6435  		return true
  6436  	}
  6437  }
  6438  func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
  6439  	b := v.Block
  6440  	_ = b
  6441  	// match: (Move [s] _ _ mem)
  6442  	// cond: SizeAndAlign(s).Size() == 0
  6443  	// result: mem
  6444  	for {
  6445  		s := v.AuxInt
  6446  		mem := v.Args[2]
  6447  		if !(SizeAndAlign(s).Size() == 0) {
  6448  			break
  6449  		}
  6450  		v.reset(OpCopy)
  6451  		v.Type = mem.Type
  6452  		v.AddArg(mem)
  6453  		return true
  6454  	}
  6455  	// match: (Move [s] dst src mem)
  6456  	// cond: SizeAndAlign(s).Size() == 1
  6457  	// result: (MOVBstore dst (MOVBload src mem) mem)
  6458  	for {
  6459  		s := v.AuxInt
  6460  		dst := v.Args[0]
  6461  		src := v.Args[1]
  6462  		mem := v.Args[2]
  6463  		if !(SizeAndAlign(s).Size() == 1) {
  6464  			break
  6465  		}
  6466  		v.reset(OpMIPS64MOVBstore)
  6467  		v.AddArg(dst)
  6468  		v0 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6469  		v0.AddArg(src)
  6470  		v0.AddArg(mem)
  6471  		v.AddArg(v0)
  6472  		v.AddArg(mem)
  6473  		return true
  6474  	}
  6475  	// match: (Move [s] dst src mem)
  6476  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  6477  	// result: (MOVHstore dst (MOVHload src mem) mem)
  6478  	for {
  6479  		s := v.AuxInt
  6480  		dst := v.Args[0]
  6481  		src := v.Args[1]
  6482  		mem := v.Args[2]
  6483  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  6484  			break
  6485  		}
  6486  		v.reset(OpMIPS64MOVHstore)
  6487  		v.AddArg(dst)
  6488  		v0 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6489  		v0.AddArg(src)
  6490  		v0.AddArg(mem)
  6491  		v.AddArg(v0)
  6492  		v.AddArg(mem)
  6493  		return true
  6494  	}
  6495  	// match: (Move [s] dst src mem)
  6496  	// cond: SizeAndAlign(s).Size() == 2
  6497  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) 		(MOVBstore dst (MOVBload src mem) mem))
  6498  	for {
  6499  		s := v.AuxInt
  6500  		dst := v.Args[0]
  6501  		src := v.Args[1]
  6502  		mem := v.Args[2]
  6503  		if !(SizeAndAlign(s).Size() == 2) {
  6504  			break
  6505  		}
  6506  		v.reset(OpMIPS64MOVBstore)
  6507  		v.AuxInt = 1
  6508  		v.AddArg(dst)
  6509  		v0 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6510  		v0.AuxInt = 1
  6511  		v0.AddArg(src)
  6512  		v0.AddArg(mem)
  6513  		v.AddArg(v0)
  6514  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6515  		v1.AddArg(dst)
  6516  		v2 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6517  		v2.AddArg(src)
  6518  		v2.AddArg(mem)
  6519  		v1.AddArg(v2)
  6520  		v1.AddArg(mem)
  6521  		v.AddArg(v1)
  6522  		return true
  6523  	}
  6524  	// match: (Move [s] dst src mem)
  6525  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  6526  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6527  	for {
  6528  		s := v.AuxInt
  6529  		dst := v.Args[0]
  6530  		src := v.Args[1]
  6531  		mem := v.Args[2]
  6532  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  6533  			break
  6534  		}
  6535  		v.reset(OpMIPS64MOVWstore)
  6536  		v.AddArg(dst)
  6537  		v0 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6538  		v0.AddArg(src)
  6539  		v0.AddArg(mem)
  6540  		v.AddArg(v0)
  6541  		v.AddArg(mem)
  6542  		return true
  6543  	}
  6544  	// match: (Move [s] dst src mem)
  6545  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  6546  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) 		(MOVHstore dst (MOVHload src mem) mem))
  6547  	for {
  6548  		s := v.AuxInt
  6549  		dst := v.Args[0]
  6550  		src := v.Args[1]
  6551  		mem := v.Args[2]
  6552  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  6553  			break
  6554  		}
  6555  		v.reset(OpMIPS64MOVHstore)
  6556  		v.AuxInt = 2
  6557  		v.AddArg(dst)
  6558  		v0 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6559  		v0.AuxInt = 2
  6560  		v0.AddArg(src)
  6561  		v0.AddArg(mem)
  6562  		v.AddArg(v0)
  6563  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6564  		v1.AddArg(dst)
  6565  		v2 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6566  		v2.AddArg(src)
  6567  		v2.AddArg(mem)
  6568  		v1.AddArg(v2)
  6569  		v1.AddArg(mem)
  6570  		v.AddArg(v1)
  6571  		return true
  6572  	}
  6573  	// match: (Move [s] dst src mem)
  6574  	// cond: SizeAndAlign(s).Size() == 4
  6575  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) 		(MOVBstore [2] dst (MOVBload [2] src mem) 			(MOVBstore [1] dst (MOVBload [1] src mem) 				(MOVBstore dst (MOVBload src mem) mem))))
  6576  	for {
  6577  		s := v.AuxInt
  6578  		dst := v.Args[0]
  6579  		src := v.Args[1]
  6580  		mem := v.Args[2]
  6581  		if !(SizeAndAlign(s).Size() == 4) {
  6582  			break
  6583  		}
  6584  		v.reset(OpMIPS64MOVBstore)
  6585  		v.AuxInt = 3
  6586  		v.AddArg(dst)
  6587  		v0 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6588  		v0.AuxInt = 3
  6589  		v0.AddArg(src)
  6590  		v0.AddArg(mem)
  6591  		v.AddArg(v0)
  6592  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6593  		v1.AuxInt = 2
  6594  		v1.AddArg(dst)
  6595  		v2 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6596  		v2.AuxInt = 2
  6597  		v2.AddArg(src)
  6598  		v2.AddArg(mem)
  6599  		v1.AddArg(v2)
  6600  		v3 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6601  		v3.AuxInt = 1
  6602  		v3.AddArg(dst)
  6603  		v4 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6604  		v4.AuxInt = 1
  6605  		v4.AddArg(src)
  6606  		v4.AddArg(mem)
  6607  		v3.AddArg(v4)
  6608  		v5 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6609  		v5.AddArg(dst)
  6610  		v6 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6611  		v6.AddArg(src)
  6612  		v6.AddArg(mem)
  6613  		v5.AddArg(v6)
  6614  		v5.AddArg(mem)
  6615  		v3.AddArg(v5)
  6616  		v1.AddArg(v3)
  6617  		v.AddArg(v1)
  6618  		return true
  6619  	}
  6620  	// match: (Move [s] dst src mem)
  6621  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0
  6622  	// result: (MOVVstore dst (MOVVload src mem) mem)
  6623  	for {
  6624  		s := v.AuxInt
  6625  		dst := v.Args[0]
  6626  		src := v.Args[1]
  6627  		mem := v.Args[2]
  6628  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) {
  6629  			break
  6630  		}
  6631  		v.reset(OpMIPS64MOVVstore)
  6632  		v.AddArg(dst)
  6633  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6634  		v0.AddArg(src)
  6635  		v0.AddArg(mem)
  6636  		v.AddArg(v0)
  6637  		v.AddArg(mem)
  6638  		return true
  6639  	}
  6640  	// match: (Move [s] dst src mem)
  6641  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  6642  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6643  	for {
  6644  		s := v.AuxInt
  6645  		dst := v.Args[0]
  6646  		src := v.Args[1]
  6647  		mem := v.Args[2]
  6648  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  6649  			break
  6650  		}
  6651  		v.reset(OpMIPS64MOVWstore)
  6652  		v.AuxInt = 4
  6653  		v.AddArg(dst)
  6654  		v0 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6655  		v0.AuxInt = 4
  6656  		v0.AddArg(src)
  6657  		v0.AddArg(mem)
  6658  		v.AddArg(v0)
  6659  		v1 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  6660  		v1.AddArg(dst)
  6661  		v2 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6662  		v2.AddArg(src)
  6663  		v2.AddArg(mem)
  6664  		v1.AddArg(v2)
  6665  		v1.AddArg(mem)
  6666  		v.AddArg(v1)
  6667  		return true
  6668  	}
  6669  	// match: (Move [s] dst src mem)
  6670  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0
  6671  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6672  	for {
  6673  		s := v.AuxInt
  6674  		dst := v.Args[0]
  6675  		src := v.Args[1]
  6676  		mem := v.Args[2]
  6677  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) {
  6678  			break
  6679  		}
  6680  		v.reset(OpMIPS64MOVHstore)
  6681  		v.AuxInt = 6
  6682  		v.AddArg(dst)
  6683  		v0 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6684  		v0.AuxInt = 6
  6685  		v0.AddArg(src)
  6686  		v0.AddArg(mem)
  6687  		v.AddArg(v0)
  6688  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6689  		v1.AuxInt = 4
  6690  		v1.AddArg(dst)
  6691  		v2 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6692  		v2.AuxInt = 4
  6693  		v2.AddArg(src)
  6694  		v2.AddArg(mem)
  6695  		v1.AddArg(v2)
  6696  		v3 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6697  		v3.AuxInt = 2
  6698  		v3.AddArg(dst)
  6699  		v4 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6700  		v4.AuxInt = 2
  6701  		v4.AddArg(src)
  6702  		v4.AddArg(mem)
  6703  		v3.AddArg(v4)
  6704  		v5 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6705  		v5.AddArg(dst)
  6706  		v6 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6707  		v6.AddArg(src)
  6708  		v6.AddArg(mem)
  6709  		v5.AddArg(v6)
  6710  		v5.AddArg(mem)
  6711  		v3.AddArg(v5)
  6712  		v1.AddArg(v3)
  6713  		v.AddArg(v1)
  6714  		return true
  6715  	}
  6716  	// match: (Move [s] dst src mem)
  6717  	// cond: SizeAndAlign(s).Size() == 3
  6718  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) 		(MOVBstore [1] dst (MOVBload [1] src mem) 			(MOVBstore dst (MOVBload src mem) mem)))
  6719  	for {
  6720  		s := v.AuxInt
  6721  		dst := v.Args[0]
  6722  		src := v.Args[1]
  6723  		mem := v.Args[2]
  6724  		if !(SizeAndAlign(s).Size() == 3) {
  6725  			break
  6726  		}
  6727  		v.reset(OpMIPS64MOVBstore)
  6728  		v.AuxInt = 2
  6729  		v.AddArg(dst)
  6730  		v0 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6731  		v0.AuxInt = 2
  6732  		v0.AddArg(src)
  6733  		v0.AddArg(mem)
  6734  		v.AddArg(v0)
  6735  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6736  		v1.AuxInt = 1
  6737  		v1.AddArg(dst)
  6738  		v2 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6739  		v2.AuxInt = 1
  6740  		v2.AddArg(src)
  6741  		v2.AddArg(mem)
  6742  		v1.AddArg(v2)
  6743  		v3 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  6744  		v3.AddArg(dst)
  6745  		v4 := b.NewValue0(v.Line, OpMIPS64MOVBload, config.fe.TypeInt8())
  6746  		v4.AddArg(src)
  6747  		v4.AddArg(mem)
  6748  		v3.AddArg(v4)
  6749  		v3.AddArg(mem)
  6750  		v1.AddArg(v3)
  6751  		v.AddArg(v1)
  6752  		return true
  6753  	}
  6754  	// match: (Move [s] dst src mem)
  6755  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  6756  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6757  	for {
  6758  		s := v.AuxInt
  6759  		dst := v.Args[0]
  6760  		src := v.Args[1]
  6761  		mem := v.Args[2]
  6762  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  6763  			break
  6764  		}
  6765  		v.reset(OpMIPS64MOVHstore)
  6766  		v.AuxInt = 4
  6767  		v.AddArg(dst)
  6768  		v0 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6769  		v0.AuxInt = 4
  6770  		v0.AddArg(src)
  6771  		v0.AddArg(mem)
  6772  		v.AddArg(v0)
  6773  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6774  		v1.AuxInt = 2
  6775  		v1.AddArg(dst)
  6776  		v2 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6777  		v2.AuxInt = 2
  6778  		v2.AddArg(src)
  6779  		v2.AddArg(mem)
  6780  		v1.AddArg(v2)
  6781  		v3 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  6782  		v3.AddArg(dst)
  6783  		v4 := b.NewValue0(v.Line, OpMIPS64MOVHload, config.fe.TypeInt16())
  6784  		v4.AddArg(src)
  6785  		v4.AddArg(mem)
  6786  		v3.AddArg(v4)
  6787  		v3.AddArg(mem)
  6788  		v1.AddArg(v3)
  6789  		v.AddArg(v1)
  6790  		return true
  6791  	}
  6792  	// match: (Move [s] dst src mem)
  6793  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  6794  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6795  	for {
  6796  		s := v.AuxInt
  6797  		dst := v.Args[0]
  6798  		src := v.Args[1]
  6799  		mem := v.Args[2]
  6800  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  6801  			break
  6802  		}
  6803  		v.reset(OpMIPS64MOVWstore)
  6804  		v.AuxInt = 8
  6805  		v.AddArg(dst)
  6806  		v0 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6807  		v0.AuxInt = 8
  6808  		v0.AddArg(src)
  6809  		v0.AddArg(mem)
  6810  		v.AddArg(v0)
  6811  		v1 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  6812  		v1.AuxInt = 4
  6813  		v1.AddArg(dst)
  6814  		v2 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6815  		v2.AuxInt = 4
  6816  		v2.AddArg(src)
  6817  		v2.AddArg(mem)
  6818  		v1.AddArg(v2)
  6819  		v3 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  6820  		v3.AddArg(dst)
  6821  		v4 := b.NewValue0(v.Line, OpMIPS64MOVWload, config.fe.TypeInt32())
  6822  		v4.AddArg(src)
  6823  		v4.AddArg(mem)
  6824  		v3.AddArg(v4)
  6825  		v3.AddArg(mem)
  6826  		v1.AddArg(v3)
  6827  		v.AddArg(v1)
  6828  		return true
  6829  	}
  6830  	// match: (Move [s] dst src mem)
  6831  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0
  6832  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) 		(MOVVstore dst (MOVVload src mem) mem))
  6833  	for {
  6834  		s := v.AuxInt
  6835  		dst := v.Args[0]
  6836  		src := v.Args[1]
  6837  		mem := v.Args[2]
  6838  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) {
  6839  			break
  6840  		}
  6841  		v.reset(OpMIPS64MOVVstore)
  6842  		v.AuxInt = 8
  6843  		v.AddArg(dst)
  6844  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6845  		v0.AuxInt = 8
  6846  		v0.AddArg(src)
  6847  		v0.AddArg(mem)
  6848  		v.AddArg(v0)
  6849  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  6850  		v1.AddArg(dst)
  6851  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6852  		v2.AddArg(src)
  6853  		v2.AddArg(mem)
  6854  		v1.AddArg(v2)
  6855  		v1.AddArg(mem)
  6856  		v.AddArg(v1)
  6857  		return true
  6858  	}
  6859  	// match: (Move [s] dst src mem)
  6860  	// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0
  6861  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) 		(MOVVstore [8] dst (MOVVload [8] src mem) 			(MOVVstore dst (MOVVload src mem) mem)))
  6862  	for {
  6863  		s := v.AuxInt
  6864  		dst := v.Args[0]
  6865  		src := v.Args[1]
  6866  		mem := v.Args[2]
  6867  		if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) {
  6868  			break
  6869  		}
  6870  		v.reset(OpMIPS64MOVVstore)
  6871  		v.AuxInt = 16
  6872  		v.AddArg(dst)
  6873  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6874  		v0.AuxInt = 16
  6875  		v0.AddArg(src)
  6876  		v0.AddArg(mem)
  6877  		v.AddArg(v0)
  6878  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  6879  		v1.AuxInt = 8
  6880  		v1.AddArg(dst)
  6881  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6882  		v2.AuxInt = 8
  6883  		v2.AddArg(src)
  6884  		v2.AddArg(mem)
  6885  		v1.AddArg(v2)
  6886  		v3 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  6887  		v3.AddArg(dst)
  6888  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVload, config.fe.TypeUInt64())
  6889  		v4.AddArg(src)
  6890  		v4.AddArg(mem)
  6891  		v3.AddArg(v4)
  6892  		v3.AddArg(mem)
  6893  		v1.AddArg(v3)
  6894  		v.AddArg(v1)
  6895  		return true
  6896  	}
  6897  	// match: (Move [s] dst src mem)
  6898  	// cond: SizeAndAlign(s).Size() > 24 || SizeAndAlign(s).Align()%8 != 0
  6899  	// result: (LoweredMove [SizeAndAlign(s).Align()] 		dst 		src 		(ADDVconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  6900  	for {
  6901  		s := v.AuxInt
  6902  		dst := v.Args[0]
  6903  		src := v.Args[1]
  6904  		mem := v.Args[2]
  6905  		if !(SizeAndAlign(s).Size() > 24 || SizeAndAlign(s).Align()%8 != 0) {
  6906  			break
  6907  		}
  6908  		v.reset(OpMIPS64LoweredMove)
  6909  		v.AuxInt = SizeAndAlign(s).Align()
  6910  		v.AddArg(dst)
  6911  		v.AddArg(src)
  6912  		v0 := b.NewValue0(v.Line, OpMIPS64ADDVconst, src.Type)
  6913  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  6914  		v0.AddArg(src)
  6915  		v.AddArg(v0)
  6916  		v.AddArg(mem)
  6917  		return true
  6918  	}
  6919  	return false
  6920  }
  6921  func rewriteValueMIPS64_OpMul16(v *Value, config *Config) bool {
  6922  	b := v.Block
  6923  	_ = b
  6924  	// match: (Mul16 x y)
  6925  	// cond:
  6926  	// result: (Select1 (MULVU x y))
  6927  	for {
  6928  		x := v.Args[0]
  6929  		y := v.Args[1]
  6930  		v.reset(OpSelect1)
  6931  		v0 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6932  		v0.AddArg(x)
  6933  		v0.AddArg(y)
  6934  		v.AddArg(v0)
  6935  		return true
  6936  	}
  6937  }
  6938  func rewriteValueMIPS64_OpMul32(v *Value, config *Config) bool {
  6939  	b := v.Block
  6940  	_ = b
  6941  	// match: (Mul32 x y)
  6942  	// cond:
  6943  	// result: (Select1 (MULVU x y))
  6944  	for {
  6945  		x := v.Args[0]
  6946  		y := v.Args[1]
  6947  		v.reset(OpSelect1)
  6948  		v0 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6949  		v0.AddArg(x)
  6950  		v0.AddArg(y)
  6951  		v.AddArg(v0)
  6952  		return true
  6953  	}
  6954  }
  6955  func rewriteValueMIPS64_OpMul32F(v *Value, config *Config) bool {
  6956  	b := v.Block
  6957  	_ = b
  6958  	// match: (Mul32F x y)
  6959  	// cond:
  6960  	// result: (MULF x y)
  6961  	for {
  6962  		x := v.Args[0]
  6963  		y := v.Args[1]
  6964  		v.reset(OpMIPS64MULF)
  6965  		v.AddArg(x)
  6966  		v.AddArg(y)
  6967  		return true
  6968  	}
  6969  }
  6970  func rewriteValueMIPS64_OpMul64(v *Value, config *Config) bool {
  6971  	b := v.Block
  6972  	_ = b
  6973  	// match: (Mul64 x y)
  6974  	// cond:
  6975  	// result: (Select1 (MULVU x y))
  6976  	for {
  6977  		x := v.Args[0]
  6978  		y := v.Args[1]
  6979  		v.reset(OpSelect1)
  6980  		v0 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  6981  		v0.AddArg(x)
  6982  		v0.AddArg(y)
  6983  		v.AddArg(v0)
  6984  		return true
  6985  	}
  6986  }
  6987  func rewriteValueMIPS64_OpMul64F(v *Value, config *Config) bool {
  6988  	b := v.Block
  6989  	_ = b
  6990  	// match: (Mul64F x y)
  6991  	// cond:
  6992  	// result: (MULD x y)
  6993  	for {
  6994  		x := v.Args[0]
  6995  		y := v.Args[1]
  6996  		v.reset(OpMIPS64MULD)
  6997  		v.AddArg(x)
  6998  		v.AddArg(y)
  6999  		return true
  7000  	}
  7001  }
  7002  func rewriteValueMIPS64_OpMul8(v *Value, config *Config) bool {
  7003  	b := v.Block
  7004  	_ = b
  7005  	// match: (Mul8 x y)
  7006  	// cond:
  7007  	// result: (Select1 (MULVU x y))
  7008  	for {
  7009  		x := v.Args[0]
  7010  		y := v.Args[1]
  7011  		v.reset(OpSelect1)
  7012  		v0 := b.NewValue0(v.Line, OpMIPS64MULVU, MakeTuple(config.fe.TypeUInt64(), config.fe.TypeUInt64()))
  7013  		v0.AddArg(x)
  7014  		v0.AddArg(y)
  7015  		v.AddArg(v0)
  7016  		return true
  7017  	}
  7018  }
  7019  func rewriteValueMIPS64_OpNeg16(v *Value, config *Config) bool {
  7020  	b := v.Block
  7021  	_ = b
  7022  	// match: (Neg16 x)
  7023  	// cond:
  7024  	// result: (NEGV x)
  7025  	for {
  7026  		x := v.Args[0]
  7027  		v.reset(OpMIPS64NEGV)
  7028  		v.AddArg(x)
  7029  		return true
  7030  	}
  7031  }
  7032  func rewriteValueMIPS64_OpNeg32(v *Value, config *Config) bool {
  7033  	b := v.Block
  7034  	_ = b
  7035  	// match: (Neg32 x)
  7036  	// cond:
  7037  	// result: (NEGV x)
  7038  	for {
  7039  		x := v.Args[0]
  7040  		v.reset(OpMIPS64NEGV)
  7041  		v.AddArg(x)
  7042  		return true
  7043  	}
  7044  }
  7045  func rewriteValueMIPS64_OpNeg32F(v *Value, config *Config) bool {
  7046  	b := v.Block
  7047  	_ = b
  7048  	// match: (Neg32F x)
  7049  	// cond:
  7050  	// result: (NEGF x)
  7051  	for {
  7052  		x := v.Args[0]
  7053  		v.reset(OpMIPS64NEGF)
  7054  		v.AddArg(x)
  7055  		return true
  7056  	}
  7057  }
  7058  func rewriteValueMIPS64_OpNeg64(v *Value, config *Config) bool {
  7059  	b := v.Block
  7060  	_ = b
  7061  	// match: (Neg64 x)
  7062  	// cond:
  7063  	// result: (NEGV x)
  7064  	for {
  7065  		x := v.Args[0]
  7066  		v.reset(OpMIPS64NEGV)
  7067  		v.AddArg(x)
  7068  		return true
  7069  	}
  7070  }
  7071  func rewriteValueMIPS64_OpNeg64F(v *Value, config *Config) bool {
  7072  	b := v.Block
  7073  	_ = b
  7074  	// match: (Neg64F x)
  7075  	// cond:
  7076  	// result: (NEGD x)
  7077  	for {
  7078  		x := v.Args[0]
  7079  		v.reset(OpMIPS64NEGD)
  7080  		v.AddArg(x)
  7081  		return true
  7082  	}
  7083  }
  7084  func rewriteValueMIPS64_OpNeg8(v *Value, config *Config) bool {
  7085  	b := v.Block
  7086  	_ = b
  7087  	// match: (Neg8 x)
  7088  	// cond:
  7089  	// result: (NEGV x)
  7090  	for {
  7091  		x := v.Args[0]
  7092  		v.reset(OpMIPS64NEGV)
  7093  		v.AddArg(x)
  7094  		return true
  7095  	}
  7096  }
  7097  func rewriteValueMIPS64_OpNeq16(v *Value, config *Config) bool {
  7098  	b := v.Block
  7099  	_ = b
  7100  	// match: (Neq16 x y)
  7101  	// cond:
  7102  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  7103  	for {
  7104  		x := v.Args[0]
  7105  		y := v.Args[1]
  7106  		v.reset(OpMIPS64SGTU)
  7107  		v0 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  7108  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7109  		v1.AddArg(x)
  7110  		v0.AddArg(v1)
  7111  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7112  		v2.AddArg(y)
  7113  		v0.AddArg(v2)
  7114  		v.AddArg(v0)
  7115  		v3 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  7116  		v3.AuxInt = 0
  7117  		v.AddArg(v3)
  7118  		return true
  7119  	}
  7120  }
  7121  func rewriteValueMIPS64_OpNeq32(v *Value, config *Config) bool {
  7122  	b := v.Block
  7123  	_ = b
  7124  	// match: (Neq32 x y)
  7125  	// cond:
  7126  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  7127  	for {
  7128  		x := v.Args[0]
  7129  		y := v.Args[1]
  7130  		v.reset(OpMIPS64SGTU)
  7131  		v0 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  7132  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7133  		v1.AddArg(x)
  7134  		v0.AddArg(v1)
  7135  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7136  		v2.AddArg(y)
  7137  		v0.AddArg(v2)
  7138  		v.AddArg(v0)
  7139  		v3 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  7140  		v3.AuxInt = 0
  7141  		v.AddArg(v3)
  7142  		return true
  7143  	}
  7144  }
  7145  func rewriteValueMIPS64_OpNeq32F(v *Value, config *Config) bool {
  7146  	b := v.Block
  7147  	_ = b
  7148  	// match: (Neq32F x y)
  7149  	// cond:
  7150  	// result: (FPFlagFalse (CMPEQF x y))
  7151  	for {
  7152  		x := v.Args[0]
  7153  		y := v.Args[1]
  7154  		v.reset(OpMIPS64FPFlagFalse)
  7155  		v0 := b.NewValue0(v.Line, OpMIPS64CMPEQF, TypeFlags)
  7156  		v0.AddArg(x)
  7157  		v0.AddArg(y)
  7158  		v.AddArg(v0)
  7159  		return true
  7160  	}
  7161  }
  7162  func rewriteValueMIPS64_OpNeq64(v *Value, config *Config) bool {
  7163  	b := v.Block
  7164  	_ = b
  7165  	// match: (Neq64 x y)
  7166  	// cond:
  7167  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7168  	for {
  7169  		x := v.Args[0]
  7170  		y := v.Args[1]
  7171  		v.reset(OpMIPS64SGTU)
  7172  		v0 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  7173  		v0.AddArg(x)
  7174  		v0.AddArg(y)
  7175  		v.AddArg(v0)
  7176  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  7177  		v1.AuxInt = 0
  7178  		v.AddArg(v1)
  7179  		return true
  7180  	}
  7181  }
  7182  func rewriteValueMIPS64_OpNeq64F(v *Value, config *Config) bool {
  7183  	b := v.Block
  7184  	_ = b
  7185  	// match: (Neq64F x y)
  7186  	// cond:
  7187  	// result: (FPFlagFalse (CMPEQD x y))
  7188  	for {
  7189  		x := v.Args[0]
  7190  		y := v.Args[1]
  7191  		v.reset(OpMIPS64FPFlagFalse)
  7192  		v0 := b.NewValue0(v.Line, OpMIPS64CMPEQD, TypeFlags)
  7193  		v0.AddArg(x)
  7194  		v0.AddArg(y)
  7195  		v.AddArg(v0)
  7196  		return true
  7197  	}
  7198  }
  7199  func rewriteValueMIPS64_OpNeq8(v *Value, config *Config) bool {
  7200  	b := v.Block
  7201  	_ = b
  7202  	// match: (Neq8 x y)
  7203  	// cond:
  7204  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7205  	for {
  7206  		x := v.Args[0]
  7207  		y := v.Args[1]
  7208  		v.reset(OpMIPS64SGTU)
  7209  		v0 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  7210  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7211  		v1.AddArg(x)
  7212  		v0.AddArg(v1)
  7213  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7214  		v2.AddArg(y)
  7215  		v0.AddArg(v2)
  7216  		v.AddArg(v0)
  7217  		v3 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  7218  		v3.AuxInt = 0
  7219  		v.AddArg(v3)
  7220  		return true
  7221  	}
  7222  }
  7223  func rewriteValueMIPS64_OpNeqB(v *Value, config *Config) bool {
  7224  	b := v.Block
  7225  	_ = b
  7226  	// match: (NeqB x y)
  7227  	// cond:
  7228  	// result: (XOR x y)
  7229  	for {
  7230  		x := v.Args[0]
  7231  		y := v.Args[1]
  7232  		v.reset(OpMIPS64XOR)
  7233  		v.AddArg(x)
  7234  		v.AddArg(y)
  7235  		return true
  7236  	}
  7237  }
  7238  func rewriteValueMIPS64_OpNeqPtr(v *Value, config *Config) bool {
  7239  	b := v.Block
  7240  	_ = b
  7241  	// match: (NeqPtr x y)
  7242  	// cond:
  7243  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7244  	for {
  7245  		x := v.Args[0]
  7246  		y := v.Args[1]
  7247  		v.reset(OpMIPS64SGTU)
  7248  		v0 := b.NewValue0(v.Line, OpMIPS64XOR, config.fe.TypeUInt64())
  7249  		v0.AddArg(x)
  7250  		v0.AddArg(y)
  7251  		v.AddArg(v0)
  7252  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  7253  		v1.AuxInt = 0
  7254  		v.AddArg(v1)
  7255  		return true
  7256  	}
  7257  }
  7258  func rewriteValueMIPS64_OpNilCheck(v *Value, config *Config) bool {
  7259  	b := v.Block
  7260  	_ = b
  7261  	// match: (NilCheck ptr mem)
  7262  	// cond:
  7263  	// result: (LoweredNilCheck ptr mem)
  7264  	for {
  7265  		ptr := v.Args[0]
  7266  		mem := v.Args[1]
  7267  		v.reset(OpMIPS64LoweredNilCheck)
  7268  		v.AddArg(ptr)
  7269  		v.AddArg(mem)
  7270  		return true
  7271  	}
  7272  }
  7273  func rewriteValueMIPS64_OpNot(v *Value, config *Config) bool {
  7274  	b := v.Block
  7275  	_ = b
  7276  	// match: (Not x)
  7277  	// cond:
  7278  	// result: (XORconst [1] x)
  7279  	for {
  7280  		x := v.Args[0]
  7281  		v.reset(OpMIPS64XORconst)
  7282  		v.AuxInt = 1
  7283  		v.AddArg(x)
  7284  		return true
  7285  	}
  7286  }
  7287  func rewriteValueMIPS64_OpOffPtr(v *Value, config *Config) bool {
  7288  	b := v.Block
  7289  	_ = b
  7290  	// match: (OffPtr [off] ptr:(SP))
  7291  	// cond:
  7292  	// result: (MOVVaddr [off] ptr)
  7293  	for {
  7294  		off := v.AuxInt
  7295  		ptr := v.Args[0]
  7296  		if ptr.Op != OpSP {
  7297  			break
  7298  		}
  7299  		v.reset(OpMIPS64MOVVaddr)
  7300  		v.AuxInt = off
  7301  		v.AddArg(ptr)
  7302  		return true
  7303  	}
  7304  	// match: (OffPtr [off] ptr)
  7305  	// cond:
  7306  	// result: (ADDVconst [off] ptr)
  7307  	for {
  7308  		off := v.AuxInt
  7309  		ptr := v.Args[0]
  7310  		v.reset(OpMIPS64ADDVconst)
  7311  		v.AuxInt = off
  7312  		v.AddArg(ptr)
  7313  		return true
  7314  	}
  7315  }
  7316  func rewriteValueMIPS64_OpOr16(v *Value, config *Config) bool {
  7317  	b := v.Block
  7318  	_ = b
  7319  	// match: (Or16 x y)
  7320  	// cond:
  7321  	// result: (OR x y)
  7322  	for {
  7323  		x := v.Args[0]
  7324  		y := v.Args[1]
  7325  		v.reset(OpMIPS64OR)
  7326  		v.AddArg(x)
  7327  		v.AddArg(y)
  7328  		return true
  7329  	}
  7330  }
  7331  func rewriteValueMIPS64_OpOr32(v *Value, config *Config) bool {
  7332  	b := v.Block
  7333  	_ = b
  7334  	// match: (Or32 x y)
  7335  	// cond:
  7336  	// result: (OR x y)
  7337  	for {
  7338  		x := v.Args[0]
  7339  		y := v.Args[1]
  7340  		v.reset(OpMIPS64OR)
  7341  		v.AddArg(x)
  7342  		v.AddArg(y)
  7343  		return true
  7344  	}
  7345  }
  7346  func rewriteValueMIPS64_OpOr64(v *Value, config *Config) bool {
  7347  	b := v.Block
  7348  	_ = b
  7349  	// match: (Or64 x y)
  7350  	// cond:
  7351  	// result: (OR x y)
  7352  	for {
  7353  		x := v.Args[0]
  7354  		y := v.Args[1]
  7355  		v.reset(OpMIPS64OR)
  7356  		v.AddArg(x)
  7357  		v.AddArg(y)
  7358  		return true
  7359  	}
  7360  }
  7361  func rewriteValueMIPS64_OpOr8(v *Value, config *Config) bool {
  7362  	b := v.Block
  7363  	_ = b
  7364  	// match: (Or8 x y)
  7365  	// cond:
  7366  	// result: (OR x y)
  7367  	for {
  7368  		x := v.Args[0]
  7369  		y := v.Args[1]
  7370  		v.reset(OpMIPS64OR)
  7371  		v.AddArg(x)
  7372  		v.AddArg(y)
  7373  		return true
  7374  	}
  7375  }
  7376  func rewriteValueMIPS64_OpOrB(v *Value, config *Config) bool {
  7377  	b := v.Block
  7378  	_ = b
  7379  	// match: (OrB x y)
  7380  	// cond:
  7381  	// result: (OR x y)
  7382  	for {
  7383  		x := v.Args[0]
  7384  		y := v.Args[1]
  7385  		v.reset(OpMIPS64OR)
  7386  		v.AddArg(x)
  7387  		v.AddArg(y)
  7388  		return true
  7389  	}
  7390  }
  7391  func rewriteValueMIPS64_OpRsh16Ux16(v *Value, config *Config) bool {
  7392  	b := v.Block
  7393  	_ = b
  7394  	// match: (Rsh16Ux16 <t> x y)
  7395  	// cond:
  7396  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7397  	for {
  7398  		t := v.Type
  7399  		x := v.Args[0]
  7400  		y := v.Args[1]
  7401  		v.reset(OpMIPS64AND)
  7402  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7403  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7404  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7405  		v2.AuxInt = 64
  7406  		v1.AddArg(v2)
  7407  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7408  		v3.AddArg(y)
  7409  		v1.AddArg(v3)
  7410  		v0.AddArg(v1)
  7411  		v.AddArg(v0)
  7412  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7413  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7414  		v5.AddArg(x)
  7415  		v4.AddArg(v5)
  7416  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7417  		v6.AddArg(y)
  7418  		v4.AddArg(v6)
  7419  		v.AddArg(v4)
  7420  		return true
  7421  	}
  7422  }
  7423  func rewriteValueMIPS64_OpRsh16Ux32(v *Value, config *Config) bool {
  7424  	b := v.Block
  7425  	_ = b
  7426  	// match: (Rsh16Ux32 <t> x y)
  7427  	// cond:
  7428  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7429  	for {
  7430  		t := v.Type
  7431  		x := v.Args[0]
  7432  		y := v.Args[1]
  7433  		v.reset(OpMIPS64AND)
  7434  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7435  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7436  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7437  		v2.AuxInt = 64
  7438  		v1.AddArg(v2)
  7439  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7440  		v3.AddArg(y)
  7441  		v1.AddArg(v3)
  7442  		v0.AddArg(v1)
  7443  		v.AddArg(v0)
  7444  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7445  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7446  		v5.AddArg(x)
  7447  		v4.AddArg(v5)
  7448  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7449  		v6.AddArg(y)
  7450  		v4.AddArg(v6)
  7451  		v.AddArg(v4)
  7452  		return true
  7453  	}
  7454  }
  7455  func rewriteValueMIPS64_OpRsh16Ux64(v *Value, config *Config) bool {
  7456  	b := v.Block
  7457  	_ = b
  7458  	// match: (Rsh16Ux64 <t> x y)
  7459  	// cond:
  7460  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7461  	for {
  7462  		t := v.Type
  7463  		x := v.Args[0]
  7464  		y := v.Args[1]
  7465  		v.reset(OpMIPS64AND)
  7466  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7467  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7468  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7469  		v2.AuxInt = 64
  7470  		v1.AddArg(v2)
  7471  		v1.AddArg(y)
  7472  		v0.AddArg(v1)
  7473  		v.AddArg(v0)
  7474  		v3 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7475  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7476  		v4.AddArg(x)
  7477  		v3.AddArg(v4)
  7478  		v3.AddArg(y)
  7479  		v.AddArg(v3)
  7480  		return true
  7481  	}
  7482  }
  7483  func rewriteValueMIPS64_OpRsh16Ux8(v *Value, config *Config) bool {
  7484  	b := v.Block
  7485  	_ = b
  7486  	// match: (Rsh16Ux8  <t> x y)
  7487  	// cond:
  7488  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64  y)))
  7489  	for {
  7490  		t := v.Type
  7491  		x := v.Args[0]
  7492  		y := v.Args[1]
  7493  		v.reset(OpMIPS64AND)
  7494  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7495  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7496  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7497  		v2.AuxInt = 64
  7498  		v1.AddArg(v2)
  7499  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7500  		v3.AddArg(y)
  7501  		v1.AddArg(v3)
  7502  		v0.AddArg(v1)
  7503  		v.AddArg(v0)
  7504  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7505  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7506  		v5.AddArg(x)
  7507  		v4.AddArg(v5)
  7508  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7509  		v6.AddArg(y)
  7510  		v4.AddArg(v6)
  7511  		v.AddArg(v4)
  7512  		return true
  7513  	}
  7514  }
  7515  func rewriteValueMIPS64_OpRsh16x16(v *Value, config *Config) bool {
  7516  	b := v.Block
  7517  	_ = b
  7518  	// match: (Rsh16x16 <t> x y)
  7519  	// cond:
  7520  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt16to64 y)))
  7521  	for {
  7522  		t := v.Type
  7523  		x := v.Args[0]
  7524  		y := v.Args[1]
  7525  		v.reset(OpMIPS64SRAV)
  7526  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  7527  		v0.AddArg(x)
  7528  		v.AddArg(v0)
  7529  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7530  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7531  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7532  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7533  		v4.AddArg(y)
  7534  		v3.AddArg(v4)
  7535  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7536  		v5.AuxInt = 63
  7537  		v3.AddArg(v5)
  7538  		v2.AddArg(v3)
  7539  		v1.AddArg(v2)
  7540  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7541  		v6.AddArg(y)
  7542  		v1.AddArg(v6)
  7543  		v.AddArg(v1)
  7544  		return true
  7545  	}
  7546  }
  7547  func rewriteValueMIPS64_OpRsh16x32(v *Value, config *Config) bool {
  7548  	b := v.Block
  7549  	_ = b
  7550  	// match: (Rsh16x32 <t> x y)
  7551  	// cond:
  7552  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt32to64 y)))
  7553  	for {
  7554  		t := v.Type
  7555  		x := v.Args[0]
  7556  		y := v.Args[1]
  7557  		v.reset(OpMIPS64SRAV)
  7558  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  7559  		v0.AddArg(x)
  7560  		v.AddArg(v0)
  7561  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7562  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7563  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7564  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7565  		v4.AddArg(y)
  7566  		v3.AddArg(v4)
  7567  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7568  		v5.AuxInt = 63
  7569  		v3.AddArg(v5)
  7570  		v2.AddArg(v3)
  7571  		v1.AddArg(v2)
  7572  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7573  		v6.AddArg(y)
  7574  		v1.AddArg(v6)
  7575  		v.AddArg(v1)
  7576  		return true
  7577  	}
  7578  }
  7579  func rewriteValueMIPS64_OpRsh16x64(v *Value, config *Config) bool {
  7580  	b := v.Block
  7581  	_ = b
  7582  	// match: (Rsh16x64 <t> x y)
  7583  	// cond:
  7584  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <config.fe.TypeUInt64()> [63]))) y))
  7585  	for {
  7586  		t := v.Type
  7587  		x := v.Args[0]
  7588  		y := v.Args[1]
  7589  		v.reset(OpMIPS64SRAV)
  7590  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  7591  		v0.AddArg(x)
  7592  		v.AddArg(v0)
  7593  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7594  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7595  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7596  		v3.AddArg(y)
  7597  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7598  		v4.AuxInt = 63
  7599  		v3.AddArg(v4)
  7600  		v2.AddArg(v3)
  7601  		v1.AddArg(v2)
  7602  		v1.AddArg(y)
  7603  		v.AddArg(v1)
  7604  		return true
  7605  	}
  7606  }
  7607  func rewriteValueMIPS64_OpRsh16x8(v *Value, config *Config) bool {
  7608  	b := v.Block
  7609  	_ = b
  7610  	// match: (Rsh16x8  <t> x y)
  7611  	// cond:
  7612  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt8to64  y)))
  7613  	for {
  7614  		t := v.Type
  7615  		x := v.Args[0]
  7616  		y := v.Args[1]
  7617  		v.reset(OpMIPS64SRAV)
  7618  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
  7619  		v0.AddArg(x)
  7620  		v.AddArg(v0)
  7621  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7622  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7623  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7624  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7625  		v4.AddArg(y)
  7626  		v3.AddArg(v4)
  7627  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7628  		v5.AuxInt = 63
  7629  		v3.AddArg(v5)
  7630  		v2.AddArg(v3)
  7631  		v1.AddArg(v2)
  7632  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7633  		v6.AddArg(y)
  7634  		v1.AddArg(v6)
  7635  		v.AddArg(v1)
  7636  		return true
  7637  	}
  7638  }
  7639  func rewriteValueMIPS64_OpRsh32Ux16(v *Value, config *Config) bool {
  7640  	b := v.Block
  7641  	_ = b
  7642  	// match: (Rsh32Ux16 <t> x y)
  7643  	// cond:
  7644  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  7645  	for {
  7646  		t := v.Type
  7647  		x := v.Args[0]
  7648  		y := v.Args[1]
  7649  		v.reset(OpMIPS64AND)
  7650  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7651  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7652  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7653  		v2.AuxInt = 64
  7654  		v1.AddArg(v2)
  7655  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7656  		v3.AddArg(y)
  7657  		v1.AddArg(v3)
  7658  		v0.AddArg(v1)
  7659  		v.AddArg(v0)
  7660  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7661  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7662  		v5.AddArg(x)
  7663  		v4.AddArg(v5)
  7664  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7665  		v6.AddArg(y)
  7666  		v4.AddArg(v6)
  7667  		v.AddArg(v4)
  7668  		return true
  7669  	}
  7670  }
  7671  func rewriteValueMIPS64_OpRsh32Ux32(v *Value, config *Config) bool {
  7672  	b := v.Block
  7673  	_ = b
  7674  	// match: (Rsh32Ux32 <t> x y)
  7675  	// cond:
  7676  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7677  	for {
  7678  		t := v.Type
  7679  		x := v.Args[0]
  7680  		y := v.Args[1]
  7681  		v.reset(OpMIPS64AND)
  7682  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7683  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7684  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7685  		v2.AuxInt = 64
  7686  		v1.AddArg(v2)
  7687  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7688  		v3.AddArg(y)
  7689  		v1.AddArg(v3)
  7690  		v0.AddArg(v1)
  7691  		v.AddArg(v0)
  7692  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7693  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7694  		v5.AddArg(x)
  7695  		v4.AddArg(v5)
  7696  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7697  		v6.AddArg(y)
  7698  		v4.AddArg(v6)
  7699  		v.AddArg(v4)
  7700  		return true
  7701  	}
  7702  }
  7703  func rewriteValueMIPS64_OpRsh32Ux64(v *Value, config *Config) bool {
  7704  	b := v.Block
  7705  	_ = b
  7706  	// match: (Rsh32Ux64 <t> x y)
  7707  	// cond:
  7708  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  7709  	for {
  7710  		t := v.Type
  7711  		x := v.Args[0]
  7712  		y := v.Args[1]
  7713  		v.reset(OpMIPS64AND)
  7714  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7715  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7716  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7717  		v2.AuxInt = 64
  7718  		v1.AddArg(v2)
  7719  		v1.AddArg(y)
  7720  		v0.AddArg(v1)
  7721  		v.AddArg(v0)
  7722  		v3 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7723  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7724  		v4.AddArg(x)
  7725  		v3.AddArg(v4)
  7726  		v3.AddArg(y)
  7727  		v.AddArg(v3)
  7728  		return true
  7729  	}
  7730  }
  7731  func rewriteValueMIPS64_OpRsh32Ux8(v *Value, config *Config) bool {
  7732  	b := v.Block
  7733  	_ = b
  7734  	// match: (Rsh32Ux8  <t> x y)
  7735  	// cond:
  7736  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64  y)))
  7737  	for {
  7738  		t := v.Type
  7739  		x := v.Args[0]
  7740  		y := v.Args[1]
  7741  		v.reset(OpMIPS64AND)
  7742  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7743  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7744  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7745  		v2.AuxInt = 64
  7746  		v1.AddArg(v2)
  7747  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7748  		v3.AddArg(y)
  7749  		v1.AddArg(v3)
  7750  		v0.AddArg(v1)
  7751  		v.AddArg(v0)
  7752  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7753  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7754  		v5.AddArg(x)
  7755  		v4.AddArg(v5)
  7756  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7757  		v6.AddArg(y)
  7758  		v4.AddArg(v6)
  7759  		v.AddArg(v4)
  7760  		return true
  7761  	}
  7762  }
  7763  func rewriteValueMIPS64_OpRsh32x16(v *Value, config *Config) bool {
  7764  	b := v.Block
  7765  	_ = b
  7766  	// match: (Rsh32x16 <t> x y)
  7767  	// cond:
  7768  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt16to64 y)))
  7769  	for {
  7770  		t := v.Type
  7771  		x := v.Args[0]
  7772  		y := v.Args[1]
  7773  		v.reset(OpMIPS64SRAV)
  7774  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  7775  		v0.AddArg(x)
  7776  		v.AddArg(v0)
  7777  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7778  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7779  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7780  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7781  		v4.AddArg(y)
  7782  		v3.AddArg(v4)
  7783  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7784  		v5.AuxInt = 63
  7785  		v3.AddArg(v5)
  7786  		v2.AddArg(v3)
  7787  		v1.AddArg(v2)
  7788  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7789  		v6.AddArg(y)
  7790  		v1.AddArg(v6)
  7791  		v.AddArg(v1)
  7792  		return true
  7793  	}
  7794  }
  7795  func rewriteValueMIPS64_OpRsh32x32(v *Value, config *Config) bool {
  7796  	b := v.Block
  7797  	_ = b
  7798  	// match: (Rsh32x32 <t> x y)
  7799  	// cond:
  7800  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt32to64 y)))
  7801  	for {
  7802  		t := v.Type
  7803  		x := v.Args[0]
  7804  		y := v.Args[1]
  7805  		v.reset(OpMIPS64SRAV)
  7806  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  7807  		v0.AddArg(x)
  7808  		v.AddArg(v0)
  7809  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7810  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7811  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7812  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7813  		v4.AddArg(y)
  7814  		v3.AddArg(v4)
  7815  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7816  		v5.AuxInt = 63
  7817  		v3.AddArg(v5)
  7818  		v2.AddArg(v3)
  7819  		v1.AddArg(v2)
  7820  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7821  		v6.AddArg(y)
  7822  		v1.AddArg(v6)
  7823  		v.AddArg(v1)
  7824  		return true
  7825  	}
  7826  }
  7827  func rewriteValueMIPS64_OpRsh32x64(v *Value, config *Config) bool {
  7828  	b := v.Block
  7829  	_ = b
  7830  	// match: (Rsh32x64 <t> x y)
  7831  	// cond:
  7832  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <config.fe.TypeUInt64()> [63]))) y))
  7833  	for {
  7834  		t := v.Type
  7835  		x := v.Args[0]
  7836  		y := v.Args[1]
  7837  		v.reset(OpMIPS64SRAV)
  7838  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  7839  		v0.AddArg(x)
  7840  		v.AddArg(v0)
  7841  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7842  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7843  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7844  		v3.AddArg(y)
  7845  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7846  		v4.AuxInt = 63
  7847  		v3.AddArg(v4)
  7848  		v2.AddArg(v3)
  7849  		v1.AddArg(v2)
  7850  		v1.AddArg(y)
  7851  		v.AddArg(v1)
  7852  		return true
  7853  	}
  7854  }
  7855  func rewriteValueMIPS64_OpRsh32x8(v *Value, config *Config) bool {
  7856  	b := v.Block
  7857  	_ = b
  7858  	// match: (Rsh32x8  <t> x y)
  7859  	// cond:
  7860  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt8to64  y)))
  7861  	for {
  7862  		t := v.Type
  7863  		x := v.Args[0]
  7864  		y := v.Args[1]
  7865  		v.reset(OpMIPS64SRAV)
  7866  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
  7867  		v0.AddArg(x)
  7868  		v.AddArg(v0)
  7869  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  7870  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7871  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7872  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7873  		v4.AddArg(y)
  7874  		v3.AddArg(v4)
  7875  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7876  		v5.AuxInt = 63
  7877  		v3.AddArg(v5)
  7878  		v2.AddArg(v3)
  7879  		v1.AddArg(v2)
  7880  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7881  		v6.AddArg(y)
  7882  		v1.AddArg(v6)
  7883  		v.AddArg(v1)
  7884  		return true
  7885  	}
  7886  }
  7887  func rewriteValueMIPS64_OpRsh64Ux16(v *Value, config *Config) bool {
  7888  	b := v.Block
  7889  	_ = b
  7890  	// match: (Rsh64Ux16 <t> x y)
  7891  	// cond:
  7892  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  7893  	for {
  7894  		t := v.Type
  7895  		x := v.Args[0]
  7896  		y := v.Args[1]
  7897  		v.reset(OpMIPS64AND)
  7898  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7899  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7900  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7901  		v2.AuxInt = 64
  7902  		v1.AddArg(v2)
  7903  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7904  		v3.AddArg(y)
  7905  		v1.AddArg(v3)
  7906  		v0.AddArg(v1)
  7907  		v.AddArg(v0)
  7908  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7909  		v4.AddArg(x)
  7910  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  7911  		v5.AddArg(y)
  7912  		v4.AddArg(v5)
  7913  		v.AddArg(v4)
  7914  		return true
  7915  	}
  7916  }
  7917  func rewriteValueMIPS64_OpRsh64Ux32(v *Value, config *Config) bool {
  7918  	b := v.Block
  7919  	_ = b
  7920  	// match: (Rsh64Ux32 <t> x y)
  7921  	// cond:
  7922  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  7923  	for {
  7924  		t := v.Type
  7925  		x := v.Args[0]
  7926  		y := v.Args[1]
  7927  		v.reset(OpMIPS64AND)
  7928  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7929  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7930  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7931  		v2.AuxInt = 64
  7932  		v1.AddArg(v2)
  7933  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7934  		v3.AddArg(y)
  7935  		v1.AddArg(v3)
  7936  		v0.AddArg(v1)
  7937  		v.AddArg(v0)
  7938  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7939  		v4.AddArg(x)
  7940  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  7941  		v5.AddArg(y)
  7942  		v4.AddArg(v5)
  7943  		v.AddArg(v4)
  7944  		return true
  7945  	}
  7946  }
  7947  func rewriteValueMIPS64_OpRsh64Ux64(v *Value, config *Config) bool {
  7948  	b := v.Block
  7949  	_ = b
  7950  	// match: (Rsh64Ux64 <t> x y)
  7951  	// cond:
  7952  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SRLV <t> x y))
  7953  	for {
  7954  		t := v.Type
  7955  		x := v.Args[0]
  7956  		y := v.Args[1]
  7957  		v.reset(OpMIPS64AND)
  7958  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7959  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7960  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7961  		v2.AuxInt = 64
  7962  		v1.AddArg(v2)
  7963  		v1.AddArg(y)
  7964  		v0.AddArg(v1)
  7965  		v.AddArg(v0)
  7966  		v3 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7967  		v3.AddArg(x)
  7968  		v3.AddArg(y)
  7969  		v.AddArg(v3)
  7970  		return true
  7971  	}
  7972  }
  7973  func rewriteValueMIPS64_OpRsh64Ux8(v *Value, config *Config) bool {
  7974  	b := v.Block
  7975  	_ = b
  7976  	// match: (Rsh64Ux8  <t> x y)
  7977  	// cond:
  7978  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SRLV <t> x (ZeroExt8to64  y)))
  7979  	for {
  7980  		t := v.Type
  7981  		x := v.Args[0]
  7982  		y := v.Args[1]
  7983  		v.reset(OpMIPS64AND)
  7984  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  7985  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  7986  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  7987  		v2.AuxInt = 64
  7988  		v1.AddArg(v2)
  7989  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7990  		v3.AddArg(y)
  7991  		v1.AddArg(v3)
  7992  		v0.AddArg(v1)
  7993  		v.AddArg(v0)
  7994  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  7995  		v4.AddArg(x)
  7996  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  7997  		v5.AddArg(y)
  7998  		v4.AddArg(v5)
  7999  		v.AddArg(v4)
  8000  		return true
  8001  	}
  8002  }
  8003  func rewriteValueMIPS64_OpRsh64x16(v *Value, config *Config) bool {
  8004  	b := v.Block
  8005  	_ = b
  8006  	// match: (Rsh64x16 <t> x y)
  8007  	// cond:
  8008  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt16to64 y)))
  8009  	for {
  8010  		t := v.Type
  8011  		x := v.Args[0]
  8012  		y := v.Args[1]
  8013  		v.reset(OpMIPS64SRAV)
  8014  		v.AddArg(x)
  8015  		v0 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8016  		v1 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8017  		v2 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8018  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8019  		v3.AddArg(y)
  8020  		v2.AddArg(v3)
  8021  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8022  		v4.AuxInt = 63
  8023  		v2.AddArg(v4)
  8024  		v1.AddArg(v2)
  8025  		v0.AddArg(v1)
  8026  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8027  		v5.AddArg(y)
  8028  		v0.AddArg(v5)
  8029  		v.AddArg(v0)
  8030  		return true
  8031  	}
  8032  }
  8033  func rewriteValueMIPS64_OpRsh64x32(v *Value, config *Config) bool {
  8034  	b := v.Block
  8035  	_ = b
  8036  	// match: (Rsh64x32 <t> x y)
  8037  	// cond:
  8038  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt32to64 y)))
  8039  	for {
  8040  		t := v.Type
  8041  		x := v.Args[0]
  8042  		y := v.Args[1]
  8043  		v.reset(OpMIPS64SRAV)
  8044  		v.AddArg(x)
  8045  		v0 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8046  		v1 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8047  		v2 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8048  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8049  		v3.AddArg(y)
  8050  		v2.AddArg(v3)
  8051  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8052  		v4.AuxInt = 63
  8053  		v2.AddArg(v4)
  8054  		v1.AddArg(v2)
  8055  		v0.AddArg(v1)
  8056  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8057  		v5.AddArg(y)
  8058  		v0.AddArg(v5)
  8059  		v.AddArg(v0)
  8060  		return true
  8061  	}
  8062  }
  8063  func rewriteValueMIPS64_OpRsh64x64(v *Value, config *Config) bool {
  8064  	b := v.Block
  8065  	_ = b
  8066  	// match: (Rsh64x64 <t> x y)
  8067  	// cond:
  8068  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <config.fe.TypeUInt64()> [63]))) y))
  8069  	for {
  8070  		t := v.Type
  8071  		x := v.Args[0]
  8072  		y := v.Args[1]
  8073  		v.reset(OpMIPS64SRAV)
  8074  		v.AddArg(x)
  8075  		v0 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8076  		v1 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8077  		v2 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8078  		v2.AddArg(y)
  8079  		v3 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8080  		v3.AuxInt = 63
  8081  		v2.AddArg(v3)
  8082  		v1.AddArg(v2)
  8083  		v0.AddArg(v1)
  8084  		v0.AddArg(y)
  8085  		v.AddArg(v0)
  8086  		return true
  8087  	}
  8088  }
  8089  func rewriteValueMIPS64_OpRsh64x8(v *Value, config *Config) bool {
  8090  	b := v.Block
  8091  	_ = b
  8092  	// match: (Rsh64x8  <t> x y)
  8093  	// cond:
  8094  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt8to64  y)))
  8095  	for {
  8096  		t := v.Type
  8097  		x := v.Args[0]
  8098  		y := v.Args[1]
  8099  		v.reset(OpMIPS64SRAV)
  8100  		v.AddArg(x)
  8101  		v0 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8102  		v1 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8103  		v2 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8104  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8105  		v3.AddArg(y)
  8106  		v2.AddArg(v3)
  8107  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8108  		v4.AuxInt = 63
  8109  		v2.AddArg(v4)
  8110  		v1.AddArg(v2)
  8111  		v0.AddArg(v1)
  8112  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8113  		v5.AddArg(y)
  8114  		v0.AddArg(v5)
  8115  		v.AddArg(v0)
  8116  		return true
  8117  	}
  8118  }
  8119  func rewriteValueMIPS64_OpRsh8Ux16(v *Value, config *Config) bool {
  8120  	b := v.Block
  8121  	_ = b
  8122  	// match: (Rsh8Ux16 <t> x y)
  8123  	// cond:
  8124  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8125  	for {
  8126  		t := v.Type
  8127  		x := v.Args[0]
  8128  		y := v.Args[1]
  8129  		v.reset(OpMIPS64AND)
  8130  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8131  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8132  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8133  		v2.AuxInt = 64
  8134  		v1.AddArg(v2)
  8135  		v3 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8136  		v3.AddArg(y)
  8137  		v1.AddArg(v3)
  8138  		v0.AddArg(v1)
  8139  		v.AddArg(v0)
  8140  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  8141  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8142  		v5.AddArg(x)
  8143  		v4.AddArg(v5)
  8144  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8145  		v6.AddArg(y)
  8146  		v4.AddArg(v6)
  8147  		v.AddArg(v4)
  8148  		return true
  8149  	}
  8150  }
  8151  func rewriteValueMIPS64_OpRsh8Ux32(v *Value, config *Config) bool {
  8152  	b := v.Block
  8153  	_ = b
  8154  	// match: (Rsh8Ux32 <t> x y)
  8155  	// cond:
  8156  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8157  	for {
  8158  		t := v.Type
  8159  		x := v.Args[0]
  8160  		y := v.Args[1]
  8161  		v.reset(OpMIPS64AND)
  8162  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8163  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8164  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8165  		v2.AuxInt = 64
  8166  		v1.AddArg(v2)
  8167  		v3 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8168  		v3.AddArg(y)
  8169  		v1.AddArg(v3)
  8170  		v0.AddArg(v1)
  8171  		v.AddArg(v0)
  8172  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  8173  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8174  		v5.AddArg(x)
  8175  		v4.AddArg(v5)
  8176  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8177  		v6.AddArg(y)
  8178  		v4.AddArg(v6)
  8179  		v.AddArg(v4)
  8180  		return true
  8181  	}
  8182  }
  8183  func rewriteValueMIPS64_OpRsh8Ux64(v *Value, config *Config) bool {
  8184  	b := v.Block
  8185  	_ = b
  8186  	// match: (Rsh8Ux64 <t> x y)
  8187  	// cond:
  8188  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8189  	for {
  8190  		t := v.Type
  8191  		x := v.Args[0]
  8192  		y := v.Args[1]
  8193  		v.reset(OpMIPS64AND)
  8194  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8195  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8196  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8197  		v2.AuxInt = 64
  8198  		v1.AddArg(v2)
  8199  		v1.AddArg(y)
  8200  		v0.AddArg(v1)
  8201  		v.AddArg(v0)
  8202  		v3 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  8203  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8204  		v4.AddArg(x)
  8205  		v3.AddArg(v4)
  8206  		v3.AddArg(y)
  8207  		v.AddArg(v3)
  8208  		return true
  8209  	}
  8210  }
  8211  func rewriteValueMIPS64_OpRsh8Ux8(v *Value, config *Config) bool {
  8212  	b := v.Block
  8213  	_ = b
  8214  	// match: (Rsh8Ux8  <t> x y)
  8215  	// cond:
  8216  	// result: (AND (NEGV <t> (SGTU (Const64 <config.fe.TypeUInt64()> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64  y)))
  8217  	for {
  8218  		t := v.Type
  8219  		x := v.Args[0]
  8220  		y := v.Args[1]
  8221  		v.reset(OpMIPS64AND)
  8222  		v0 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8223  		v1 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8224  		v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8225  		v2.AuxInt = 64
  8226  		v1.AddArg(v2)
  8227  		v3 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8228  		v3.AddArg(y)
  8229  		v1.AddArg(v3)
  8230  		v0.AddArg(v1)
  8231  		v.AddArg(v0)
  8232  		v4 := b.NewValue0(v.Line, OpMIPS64SRLV, t)
  8233  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8234  		v5.AddArg(x)
  8235  		v4.AddArg(v5)
  8236  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8237  		v6.AddArg(y)
  8238  		v4.AddArg(v6)
  8239  		v.AddArg(v4)
  8240  		return true
  8241  	}
  8242  }
  8243  func rewriteValueMIPS64_OpRsh8x16(v *Value, config *Config) bool {
  8244  	b := v.Block
  8245  	_ = b
  8246  	// match: (Rsh8x16 <t> x y)
  8247  	// cond:
  8248  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt16to64 y)))
  8249  	for {
  8250  		t := v.Type
  8251  		x := v.Args[0]
  8252  		y := v.Args[1]
  8253  		v.reset(OpMIPS64SRAV)
  8254  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  8255  		v0.AddArg(x)
  8256  		v.AddArg(v0)
  8257  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8258  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8259  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8260  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8261  		v4.AddArg(y)
  8262  		v3.AddArg(v4)
  8263  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8264  		v5.AuxInt = 63
  8265  		v3.AddArg(v5)
  8266  		v2.AddArg(v3)
  8267  		v1.AddArg(v2)
  8268  		v6 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
  8269  		v6.AddArg(y)
  8270  		v1.AddArg(v6)
  8271  		v.AddArg(v1)
  8272  		return true
  8273  	}
  8274  }
  8275  func rewriteValueMIPS64_OpRsh8x32(v *Value, config *Config) bool {
  8276  	b := v.Block
  8277  	_ = b
  8278  	// match: (Rsh8x32 <t> x y)
  8279  	// cond:
  8280  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt32to64 y)))
  8281  	for {
  8282  		t := v.Type
  8283  		x := v.Args[0]
  8284  		y := v.Args[1]
  8285  		v.reset(OpMIPS64SRAV)
  8286  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  8287  		v0.AddArg(x)
  8288  		v.AddArg(v0)
  8289  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8290  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8291  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8292  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8293  		v4.AddArg(y)
  8294  		v3.AddArg(v4)
  8295  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8296  		v5.AuxInt = 63
  8297  		v3.AddArg(v5)
  8298  		v2.AddArg(v3)
  8299  		v1.AddArg(v2)
  8300  		v6 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
  8301  		v6.AddArg(y)
  8302  		v1.AddArg(v6)
  8303  		v.AddArg(v1)
  8304  		return true
  8305  	}
  8306  }
  8307  func rewriteValueMIPS64_OpRsh8x64(v *Value, config *Config) bool {
  8308  	b := v.Block
  8309  	_ = b
  8310  	// match: (Rsh8x64 <t> x y)
  8311  	// cond:
  8312  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <config.fe.TypeUInt64()> [63]))) y))
  8313  	for {
  8314  		t := v.Type
  8315  		x := v.Args[0]
  8316  		y := v.Args[1]
  8317  		v.reset(OpMIPS64SRAV)
  8318  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  8319  		v0.AddArg(x)
  8320  		v.AddArg(v0)
  8321  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8322  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8323  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8324  		v3.AddArg(y)
  8325  		v4 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8326  		v4.AuxInt = 63
  8327  		v3.AddArg(v4)
  8328  		v2.AddArg(v3)
  8329  		v1.AddArg(v2)
  8330  		v1.AddArg(y)
  8331  		v.AddArg(v1)
  8332  		return true
  8333  	}
  8334  }
  8335  func rewriteValueMIPS64_OpRsh8x8(v *Value, config *Config) bool {
  8336  	b := v.Block
  8337  	_ = b
  8338  	// match: (Rsh8x8  <t> x y)
  8339  	// cond:
  8340  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <config.fe.TypeUInt64()> [63]))) (ZeroExt8to64  y)))
  8341  	for {
  8342  		t := v.Type
  8343  		x := v.Args[0]
  8344  		y := v.Args[1]
  8345  		v.reset(OpMIPS64SRAV)
  8346  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
  8347  		v0.AddArg(x)
  8348  		v.AddArg(v0)
  8349  		v1 := b.NewValue0(v.Line, OpMIPS64OR, t)
  8350  		v2 := b.NewValue0(v.Line, OpMIPS64NEGV, t)
  8351  		v3 := b.NewValue0(v.Line, OpMIPS64SGTU, config.fe.TypeBool())
  8352  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8353  		v4.AddArg(y)
  8354  		v3.AddArg(v4)
  8355  		v5 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
  8356  		v5.AuxInt = 63
  8357  		v3.AddArg(v5)
  8358  		v2.AddArg(v3)
  8359  		v1.AddArg(v2)
  8360  		v6 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
  8361  		v6.AddArg(y)
  8362  		v1.AddArg(v6)
  8363  		v.AddArg(v1)
  8364  		return true
  8365  	}
  8366  }
  8367  func rewriteValueMIPS64_OpSelect0(v *Value, config *Config) bool {
  8368  	b := v.Block
  8369  	_ = b
  8370  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8371  	// cond:
  8372  	// result: (MOVVconst [0])
  8373  	for {
  8374  		v_0 := v.Args[0]
  8375  		if v_0.Op != OpMIPS64DIVVU {
  8376  			break
  8377  		}
  8378  		v_0_1 := v_0.Args[1]
  8379  		if v_0_1.Op != OpMIPS64MOVVconst {
  8380  			break
  8381  		}
  8382  		if v_0_1.AuxInt != 1 {
  8383  			break
  8384  		}
  8385  		v.reset(OpMIPS64MOVVconst)
  8386  		v.AuxInt = 0
  8387  		return true
  8388  	}
  8389  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  8390  	// cond: isPowerOfTwo(c)
  8391  	// result: (ANDconst [c-1] x)
  8392  	for {
  8393  		v_0 := v.Args[0]
  8394  		if v_0.Op != OpMIPS64DIVVU {
  8395  			break
  8396  		}
  8397  		x := v_0.Args[0]
  8398  		v_0_1 := v_0.Args[1]
  8399  		if v_0_1.Op != OpMIPS64MOVVconst {
  8400  			break
  8401  		}
  8402  		c := v_0_1.AuxInt
  8403  		if !(isPowerOfTwo(c)) {
  8404  			break
  8405  		}
  8406  		v.reset(OpMIPS64ANDconst)
  8407  		v.AuxInt = c - 1
  8408  		v.AddArg(x)
  8409  		return true
  8410  	}
  8411  	// match: (Select0 (DIVV  (MOVVconst [c]) (MOVVconst [d])))
  8412  	// cond:
  8413  	// result: (MOVVconst [int64(c)%int64(d)])
  8414  	for {
  8415  		v_0 := v.Args[0]
  8416  		if v_0.Op != OpMIPS64DIVV {
  8417  			break
  8418  		}
  8419  		v_0_0 := v_0.Args[0]
  8420  		if v_0_0.Op != OpMIPS64MOVVconst {
  8421  			break
  8422  		}
  8423  		c := v_0_0.AuxInt
  8424  		v_0_1 := v_0.Args[1]
  8425  		if v_0_1.Op != OpMIPS64MOVVconst {
  8426  			break
  8427  		}
  8428  		d := v_0_1.AuxInt
  8429  		v.reset(OpMIPS64MOVVconst)
  8430  		v.AuxInt = int64(c) % int64(d)
  8431  		return true
  8432  	}
  8433  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8434  	// cond:
  8435  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8436  	for {
  8437  		v_0 := v.Args[0]
  8438  		if v_0.Op != OpMIPS64DIVVU {
  8439  			break
  8440  		}
  8441  		v_0_0 := v_0.Args[0]
  8442  		if v_0_0.Op != OpMIPS64MOVVconst {
  8443  			break
  8444  		}
  8445  		c := v_0_0.AuxInt
  8446  		v_0_1 := v_0.Args[1]
  8447  		if v_0_1.Op != OpMIPS64MOVVconst {
  8448  			break
  8449  		}
  8450  		d := v_0_1.AuxInt
  8451  		v.reset(OpMIPS64MOVVconst)
  8452  		v.AuxInt = int64(uint64(c) % uint64(d))
  8453  		return true
  8454  	}
  8455  	return false
  8456  }
  8457  func rewriteValueMIPS64_OpSelect1(v *Value, config *Config) bool {
  8458  	b := v.Block
  8459  	_ = b
  8460  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  8461  	// cond:
  8462  	// result: (NEGV x)
  8463  	for {
  8464  		v_0 := v.Args[0]
  8465  		if v_0.Op != OpMIPS64MULVU {
  8466  			break
  8467  		}
  8468  		x := v_0.Args[0]
  8469  		v_0_1 := v_0.Args[1]
  8470  		if v_0_1.Op != OpMIPS64MOVVconst {
  8471  			break
  8472  		}
  8473  		if v_0_1.AuxInt != -1 {
  8474  			break
  8475  		}
  8476  		v.reset(OpMIPS64NEGV)
  8477  		v.AddArg(x)
  8478  		return true
  8479  	}
  8480  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  8481  	// cond:
  8482  	// result: (MOVVconst [0])
  8483  	for {
  8484  		v_0 := v.Args[0]
  8485  		if v_0.Op != OpMIPS64MULVU {
  8486  			break
  8487  		}
  8488  		v_0_1 := v_0.Args[1]
  8489  		if v_0_1.Op != OpMIPS64MOVVconst {
  8490  			break
  8491  		}
  8492  		if v_0_1.AuxInt != 0 {
  8493  			break
  8494  		}
  8495  		v.reset(OpMIPS64MOVVconst)
  8496  		v.AuxInt = 0
  8497  		return true
  8498  	}
  8499  	// match: (Select1 (MULVU x (MOVVconst [1])))
  8500  	// cond:
  8501  	// result: x
  8502  	for {
  8503  		v_0 := v.Args[0]
  8504  		if v_0.Op != OpMIPS64MULVU {
  8505  			break
  8506  		}
  8507  		x := v_0.Args[0]
  8508  		v_0_1 := v_0.Args[1]
  8509  		if v_0_1.Op != OpMIPS64MOVVconst {
  8510  			break
  8511  		}
  8512  		if v_0_1.AuxInt != 1 {
  8513  			break
  8514  		}
  8515  		v.reset(OpCopy)
  8516  		v.Type = x.Type
  8517  		v.AddArg(x)
  8518  		return true
  8519  	}
  8520  	// match: (Select1 (MULVU x (MOVVconst [c])))
  8521  	// cond: isPowerOfTwo(c)
  8522  	// result: (SLLVconst [log2(c)] x)
  8523  	for {
  8524  		v_0 := v.Args[0]
  8525  		if v_0.Op != OpMIPS64MULVU {
  8526  			break
  8527  		}
  8528  		x := v_0.Args[0]
  8529  		v_0_1 := v_0.Args[1]
  8530  		if v_0_1.Op != OpMIPS64MOVVconst {
  8531  			break
  8532  		}
  8533  		c := v_0_1.AuxInt
  8534  		if !(isPowerOfTwo(c)) {
  8535  			break
  8536  		}
  8537  		v.reset(OpMIPS64SLLVconst)
  8538  		v.AuxInt = log2(c)
  8539  		v.AddArg(x)
  8540  		return true
  8541  	}
  8542  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8543  	// cond:
  8544  	// result: (NEGV x)
  8545  	for {
  8546  		v_0 := v.Args[0]
  8547  		if v_0.Op != OpMIPS64MULVU {
  8548  			break
  8549  		}
  8550  		v_0_0 := v_0.Args[0]
  8551  		if v_0_0.Op != OpMIPS64MOVVconst {
  8552  			break
  8553  		}
  8554  		if v_0_0.AuxInt != -1 {
  8555  			break
  8556  		}
  8557  		x := v_0.Args[1]
  8558  		v.reset(OpMIPS64NEGV)
  8559  		v.AddArg(x)
  8560  		return true
  8561  	}
  8562  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  8563  	// cond:
  8564  	// result: (MOVVconst [0])
  8565  	for {
  8566  		v_0 := v.Args[0]
  8567  		if v_0.Op != OpMIPS64MULVU {
  8568  			break
  8569  		}
  8570  		v_0_0 := v_0.Args[0]
  8571  		if v_0_0.Op != OpMIPS64MOVVconst {
  8572  			break
  8573  		}
  8574  		if v_0_0.AuxInt != 0 {
  8575  			break
  8576  		}
  8577  		v.reset(OpMIPS64MOVVconst)
  8578  		v.AuxInt = 0
  8579  		return true
  8580  	}
  8581  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  8582  	// cond:
  8583  	// result: x
  8584  	for {
  8585  		v_0 := v.Args[0]
  8586  		if v_0.Op != OpMIPS64MULVU {
  8587  			break
  8588  		}
  8589  		v_0_0 := v_0.Args[0]
  8590  		if v_0_0.Op != OpMIPS64MOVVconst {
  8591  			break
  8592  		}
  8593  		if v_0_0.AuxInt != 1 {
  8594  			break
  8595  		}
  8596  		x := v_0.Args[1]
  8597  		v.reset(OpCopy)
  8598  		v.Type = x.Type
  8599  		v.AddArg(x)
  8600  		return true
  8601  	}
  8602  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  8603  	// cond: isPowerOfTwo(c)
  8604  	// result: (SLLVconst [log2(c)] x)
  8605  	for {
  8606  		v_0 := v.Args[0]
  8607  		if v_0.Op != OpMIPS64MULVU {
  8608  			break
  8609  		}
  8610  		v_0_0 := v_0.Args[0]
  8611  		if v_0_0.Op != OpMIPS64MOVVconst {
  8612  			break
  8613  		}
  8614  		c := v_0_0.AuxInt
  8615  		x := v_0.Args[1]
  8616  		if !(isPowerOfTwo(c)) {
  8617  			break
  8618  		}
  8619  		v.reset(OpMIPS64SLLVconst)
  8620  		v.AuxInt = log2(c)
  8621  		v.AddArg(x)
  8622  		return true
  8623  	}
  8624  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  8625  	// cond:
  8626  	// result: x
  8627  	for {
  8628  		v_0 := v.Args[0]
  8629  		if v_0.Op != OpMIPS64DIVVU {
  8630  			break
  8631  		}
  8632  		x := v_0.Args[0]
  8633  		v_0_1 := v_0.Args[1]
  8634  		if v_0_1.Op != OpMIPS64MOVVconst {
  8635  			break
  8636  		}
  8637  		if v_0_1.AuxInt != 1 {
  8638  			break
  8639  		}
  8640  		v.reset(OpCopy)
  8641  		v.Type = x.Type
  8642  		v.AddArg(x)
  8643  		return true
  8644  	}
  8645  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  8646  	// cond: isPowerOfTwo(c)
  8647  	// result: (SRLVconst [log2(c)] x)
  8648  	for {
  8649  		v_0 := v.Args[0]
  8650  		if v_0.Op != OpMIPS64DIVVU {
  8651  			break
  8652  		}
  8653  		x := v_0.Args[0]
  8654  		v_0_1 := v_0.Args[1]
  8655  		if v_0_1.Op != OpMIPS64MOVVconst {
  8656  			break
  8657  		}
  8658  		c := v_0_1.AuxInt
  8659  		if !(isPowerOfTwo(c)) {
  8660  			break
  8661  		}
  8662  		v.reset(OpMIPS64SRLVconst)
  8663  		v.AuxInt = log2(c)
  8664  		v.AddArg(x)
  8665  		return true
  8666  	}
  8667  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  8668  	// cond:
  8669  	// result: (MOVVconst [c*d])
  8670  	for {
  8671  		v_0 := v.Args[0]
  8672  		if v_0.Op != OpMIPS64MULVU {
  8673  			break
  8674  		}
  8675  		v_0_0 := v_0.Args[0]
  8676  		if v_0_0.Op != OpMIPS64MOVVconst {
  8677  			break
  8678  		}
  8679  		c := v_0_0.AuxInt
  8680  		v_0_1 := v_0.Args[1]
  8681  		if v_0_1.Op != OpMIPS64MOVVconst {
  8682  			break
  8683  		}
  8684  		d := v_0_1.AuxInt
  8685  		v.reset(OpMIPS64MOVVconst)
  8686  		v.AuxInt = c * d
  8687  		return true
  8688  	}
  8689  	// match: (Select1 (DIVV  (MOVVconst [c]) (MOVVconst [d])))
  8690  	// cond:
  8691  	// result: (MOVVconst [int64(c)/int64(d)])
  8692  	for {
  8693  		v_0 := v.Args[0]
  8694  		if v_0.Op != OpMIPS64DIVV {
  8695  			break
  8696  		}
  8697  		v_0_0 := v_0.Args[0]
  8698  		if v_0_0.Op != OpMIPS64MOVVconst {
  8699  			break
  8700  		}
  8701  		c := v_0_0.AuxInt
  8702  		v_0_1 := v_0.Args[1]
  8703  		if v_0_1.Op != OpMIPS64MOVVconst {
  8704  			break
  8705  		}
  8706  		d := v_0_1.AuxInt
  8707  		v.reset(OpMIPS64MOVVconst)
  8708  		v.AuxInt = int64(c) / int64(d)
  8709  		return true
  8710  	}
  8711  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8712  	// cond:
  8713  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  8714  	for {
  8715  		v_0 := v.Args[0]
  8716  		if v_0.Op != OpMIPS64DIVVU {
  8717  			break
  8718  		}
  8719  		v_0_0 := v_0.Args[0]
  8720  		if v_0_0.Op != OpMIPS64MOVVconst {
  8721  			break
  8722  		}
  8723  		c := v_0_0.AuxInt
  8724  		v_0_1 := v_0.Args[1]
  8725  		if v_0_1.Op != OpMIPS64MOVVconst {
  8726  			break
  8727  		}
  8728  		d := v_0_1.AuxInt
  8729  		v.reset(OpMIPS64MOVVconst)
  8730  		v.AuxInt = int64(uint64(c) / uint64(d))
  8731  		return true
  8732  	}
  8733  	return false
  8734  }
  8735  func rewriteValueMIPS64_OpSignExt16to32(v *Value, config *Config) bool {
  8736  	b := v.Block
  8737  	_ = b
  8738  	// match: (SignExt16to32 x)
  8739  	// cond:
  8740  	// result: (MOVHreg x)
  8741  	for {
  8742  		x := v.Args[0]
  8743  		v.reset(OpMIPS64MOVHreg)
  8744  		v.AddArg(x)
  8745  		return true
  8746  	}
  8747  }
  8748  func rewriteValueMIPS64_OpSignExt16to64(v *Value, config *Config) bool {
  8749  	b := v.Block
  8750  	_ = b
  8751  	// match: (SignExt16to64 x)
  8752  	// cond:
  8753  	// result: (MOVHreg x)
  8754  	for {
  8755  		x := v.Args[0]
  8756  		v.reset(OpMIPS64MOVHreg)
  8757  		v.AddArg(x)
  8758  		return true
  8759  	}
  8760  }
  8761  func rewriteValueMIPS64_OpSignExt32to64(v *Value, config *Config) bool {
  8762  	b := v.Block
  8763  	_ = b
  8764  	// match: (SignExt32to64 x)
  8765  	// cond:
  8766  	// result: (MOVWreg x)
  8767  	for {
  8768  		x := v.Args[0]
  8769  		v.reset(OpMIPS64MOVWreg)
  8770  		v.AddArg(x)
  8771  		return true
  8772  	}
  8773  }
  8774  func rewriteValueMIPS64_OpSignExt8to16(v *Value, config *Config) bool {
  8775  	b := v.Block
  8776  	_ = b
  8777  	// match: (SignExt8to16 x)
  8778  	// cond:
  8779  	// result: (MOVBreg x)
  8780  	for {
  8781  		x := v.Args[0]
  8782  		v.reset(OpMIPS64MOVBreg)
  8783  		v.AddArg(x)
  8784  		return true
  8785  	}
  8786  }
  8787  func rewriteValueMIPS64_OpSignExt8to32(v *Value, config *Config) bool {
  8788  	b := v.Block
  8789  	_ = b
  8790  	// match: (SignExt8to32 x)
  8791  	// cond:
  8792  	// result: (MOVBreg x)
  8793  	for {
  8794  		x := v.Args[0]
  8795  		v.reset(OpMIPS64MOVBreg)
  8796  		v.AddArg(x)
  8797  		return true
  8798  	}
  8799  }
  8800  func rewriteValueMIPS64_OpSignExt8to64(v *Value, config *Config) bool {
  8801  	b := v.Block
  8802  	_ = b
  8803  	// match: (SignExt8to64 x)
  8804  	// cond:
  8805  	// result: (MOVBreg x)
  8806  	for {
  8807  		x := v.Args[0]
  8808  		v.reset(OpMIPS64MOVBreg)
  8809  		v.AddArg(x)
  8810  		return true
  8811  	}
  8812  }
  8813  func rewriteValueMIPS64_OpSlicemask(v *Value, config *Config) bool {
  8814  	b := v.Block
  8815  	_ = b
  8816  	// match: (Slicemask <t> x)
  8817  	// cond:
  8818  	// result: (NORconst [0] (SRAVconst <t> (SUBVconst <t> x [1]) [63]))
  8819  	for {
  8820  		t := v.Type
  8821  		x := v.Args[0]
  8822  		v.reset(OpMIPS64NORconst)
  8823  		v.AuxInt = 0
  8824  		v0 := b.NewValue0(v.Line, OpMIPS64SRAVconst, t)
  8825  		v0.AuxInt = 63
  8826  		v1 := b.NewValue0(v.Line, OpMIPS64SUBVconst, t)
  8827  		v1.AuxInt = 1
  8828  		v1.AddArg(x)
  8829  		v0.AddArg(v1)
  8830  		v.AddArg(v0)
  8831  		return true
  8832  	}
  8833  }
  8834  func rewriteValueMIPS64_OpStaticCall(v *Value, config *Config) bool {
  8835  	b := v.Block
  8836  	_ = b
  8837  	// match: (StaticCall [argwid] {target} mem)
  8838  	// cond:
  8839  	// result: (CALLstatic [argwid] {target} mem)
  8840  	for {
  8841  		argwid := v.AuxInt
  8842  		target := v.Aux
  8843  		mem := v.Args[0]
  8844  		v.reset(OpMIPS64CALLstatic)
  8845  		v.AuxInt = argwid
  8846  		v.Aux = target
  8847  		v.AddArg(mem)
  8848  		return true
  8849  	}
  8850  }
  8851  func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool {
  8852  	b := v.Block
  8853  	_ = b
  8854  	// match: (Store [1] ptr val mem)
  8855  	// cond:
  8856  	// result: (MOVBstore ptr val mem)
  8857  	for {
  8858  		if v.AuxInt != 1 {
  8859  			break
  8860  		}
  8861  		ptr := v.Args[0]
  8862  		val := v.Args[1]
  8863  		mem := v.Args[2]
  8864  		v.reset(OpMIPS64MOVBstore)
  8865  		v.AddArg(ptr)
  8866  		v.AddArg(val)
  8867  		v.AddArg(mem)
  8868  		return true
  8869  	}
  8870  	// match: (Store [2] ptr val mem)
  8871  	// cond:
  8872  	// result: (MOVHstore ptr val mem)
  8873  	for {
  8874  		if v.AuxInt != 2 {
  8875  			break
  8876  		}
  8877  		ptr := v.Args[0]
  8878  		val := v.Args[1]
  8879  		mem := v.Args[2]
  8880  		v.reset(OpMIPS64MOVHstore)
  8881  		v.AddArg(ptr)
  8882  		v.AddArg(val)
  8883  		v.AddArg(mem)
  8884  		return true
  8885  	}
  8886  	// match: (Store [4] ptr val mem)
  8887  	// cond: !is32BitFloat(val.Type)
  8888  	// result: (MOVWstore ptr val mem)
  8889  	for {
  8890  		if v.AuxInt != 4 {
  8891  			break
  8892  		}
  8893  		ptr := v.Args[0]
  8894  		val := v.Args[1]
  8895  		mem := v.Args[2]
  8896  		if !(!is32BitFloat(val.Type)) {
  8897  			break
  8898  		}
  8899  		v.reset(OpMIPS64MOVWstore)
  8900  		v.AddArg(ptr)
  8901  		v.AddArg(val)
  8902  		v.AddArg(mem)
  8903  		return true
  8904  	}
  8905  	// match: (Store [8] ptr val mem)
  8906  	// cond: !is64BitFloat(val.Type)
  8907  	// result: (MOVVstore ptr val mem)
  8908  	for {
  8909  		if v.AuxInt != 8 {
  8910  			break
  8911  		}
  8912  		ptr := v.Args[0]
  8913  		val := v.Args[1]
  8914  		mem := v.Args[2]
  8915  		if !(!is64BitFloat(val.Type)) {
  8916  			break
  8917  		}
  8918  		v.reset(OpMIPS64MOVVstore)
  8919  		v.AddArg(ptr)
  8920  		v.AddArg(val)
  8921  		v.AddArg(mem)
  8922  		return true
  8923  	}
  8924  	// match: (Store [4] ptr val mem)
  8925  	// cond: is32BitFloat(val.Type)
  8926  	// result: (MOVFstore ptr val mem)
  8927  	for {
  8928  		if v.AuxInt != 4 {
  8929  			break
  8930  		}
  8931  		ptr := v.Args[0]
  8932  		val := v.Args[1]
  8933  		mem := v.Args[2]
  8934  		if !(is32BitFloat(val.Type)) {
  8935  			break
  8936  		}
  8937  		v.reset(OpMIPS64MOVFstore)
  8938  		v.AddArg(ptr)
  8939  		v.AddArg(val)
  8940  		v.AddArg(mem)
  8941  		return true
  8942  	}
  8943  	// match: (Store [8] ptr val mem)
  8944  	// cond: is64BitFloat(val.Type)
  8945  	// result: (MOVDstore ptr val mem)
  8946  	for {
  8947  		if v.AuxInt != 8 {
  8948  			break
  8949  		}
  8950  		ptr := v.Args[0]
  8951  		val := v.Args[1]
  8952  		mem := v.Args[2]
  8953  		if !(is64BitFloat(val.Type)) {
  8954  			break
  8955  		}
  8956  		v.reset(OpMIPS64MOVDstore)
  8957  		v.AddArg(ptr)
  8958  		v.AddArg(val)
  8959  		v.AddArg(mem)
  8960  		return true
  8961  	}
  8962  	return false
  8963  }
  8964  func rewriteValueMIPS64_OpSub16(v *Value, config *Config) bool {
  8965  	b := v.Block
  8966  	_ = b
  8967  	// match: (Sub16 x y)
  8968  	// cond:
  8969  	// result: (SUBV x y)
  8970  	for {
  8971  		x := v.Args[0]
  8972  		y := v.Args[1]
  8973  		v.reset(OpMIPS64SUBV)
  8974  		v.AddArg(x)
  8975  		v.AddArg(y)
  8976  		return true
  8977  	}
  8978  }
  8979  func rewriteValueMIPS64_OpSub32(v *Value, config *Config) bool {
  8980  	b := v.Block
  8981  	_ = b
  8982  	// match: (Sub32 x y)
  8983  	// cond:
  8984  	// result: (SUBV x y)
  8985  	for {
  8986  		x := v.Args[0]
  8987  		y := v.Args[1]
  8988  		v.reset(OpMIPS64SUBV)
  8989  		v.AddArg(x)
  8990  		v.AddArg(y)
  8991  		return true
  8992  	}
  8993  }
  8994  func rewriteValueMIPS64_OpSub32F(v *Value, config *Config) bool {
  8995  	b := v.Block
  8996  	_ = b
  8997  	// match: (Sub32F x y)
  8998  	// cond:
  8999  	// result: (SUBF x y)
  9000  	for {
  9001  		x := v.Args[0]
  9002  		y := v.Args[1]
  9003  		v.reset(OpMIPS64SUBF)
  9004  		v.AddArg(x)
  9005  		v.AddArg(y)
  9006  		return true
  9007  	}
  9008  }
  9009  func rewriteValueMIPS64_OpSub64(v *Value, config *Config) bool {
  9010  	b := v.Block
  9011  	_ = b
  9012  	// match: (Sub64 x y)
  9013  	// cond:
  9014  	// result: (SUBV x y)
  9015  	for {
  9016  		x := v.Args[0]
  9017  		y := v.Args[1]
  9018  		v.reset(OpMIPS64SUBV)
  9019  		v.AddArg(x)
  9020  		v.AddArg(y)
  9021  		return true
  9022  	}
  9023  }
  9024  func rewriteValueMIPS64_OpSub64F(v *Value, config *Config) bool {
  9025  	b := v.Block
  9026  	_ = b
  9027  	// match: (Sub64F x y)
  9028  	// cond:
  9029  	// result: (SUBD x y)
  9030  	for {
  9031  		x := v.Args[0]
  9032  		y := v.Args[1]
  9033  		v.reset(OpMIPS64SUBD)
  9034  		v.AddArg(x)
  9035  		v.AddArg(y)
  9036  		return true
  9037  	}
  9038  }
  9039  func rewriteValueMIPS64_OpSub8(v *Value, config *Config) bool {
  9040  	b := v.Block
  9041  	_ = b
  9042  	// match: (Sub8 x y)
  9043  	// cond:
  9044  	// result: (SUBV x y)
  9045  	for {
  9046  		x := v.Args[0]
  9047  		y := v.Args[1]
  9048  		v.reset(OpMIPS64SUBV)
  9049  		v.AddArg(x)
  9050  		v.AddArg(y)
  9051  		return true
  9052  	}
  9053  }
  9054  func rewriteValueMIPS64_OpSubPtr(v *Value, config *Config) bool {
  9055  	b := v.Block
  9056  	_ = b
  9057  	// match: (SubPtr x y)
  9058  	// cond:
  9059  	// result: (SUBV x y)
  9060  	for {
  9061  		x := v.Args[0]
  9062  		y := v.Args[1]
  9063  		v.reset(OpMIPS64SUBV)
  9064  		v.AddArg(x)
  9065  		v.AddArg(y)
  9066  		return true
  9067  	}
  9068  }
  9069  func rewriteValueMIPS64_OpTrunc16to8(v *Value, config *Config) bool {
  9070  	b := v.Block
  9071  	_ = b
  9072  	// match: (Trunc16to8 x)
  9073  	// cond:
  9074  	// result: x
  9075  	for {
  9076  		x := v.Args[0]
  9077  		v.reset(OpCopy)
  9078  		v.Type = x.Type
  9079  		v.AddArg(x)
  9080  		return true
  9081  	}
  9082  }
  9083  func rewriteValueMIPS64_OpTrunc32to16(v *Value, config *Config) bool {
  9084  	b := v.Block
  9085  	_ = b
  9086  	// match: (Trunc32to16 x)
  9087  	// cond:
  9088  	// result: x
  9089  	for {
  9090  		x := v.Args[0]
  9091  		v.reset(OpCopy)
  9092  		v.Type = x.Type
  9093  		v.AddArg(x)
  9094  		return true
  9095  	}
  9096  }
  9097  func rewriteValueMIPS64_OpTrunc32to8(v *Value, config *Config) bool {
  9098  	b := v.Block
  9099  	_ = b
  9100  	// match: (Trunc32to8 x)
  9101  	// cond:
  9102  	// result: x
  9103  	for {
  9104  		x := v.Args[0]
  9105  		v.reset(OpCopy)
  9106  		v.Type = x.Type
  9107  		v.AddArg(x)
  9108  		return true
  9109  	}
  9110  }
  9111  func rewriteValueMIPS64_OpTrunc64to16(v *Value, config *Config) bool {
  9112  	b := v.Block
  9113  	_ = b
  9114  	// match: (Trunc64to16 x)
  9115  	// cond:
  9116  	// result: x
  9117  	for {
  9118  		x := v.Args[0]
  9119  		v.reset(OpCopy)
  9120  		v.Type = x.Type
  9121  		v.AddArg(x)
  9122  		return true
  9123  	}
  9124  }
  9125  func rewriteValueMIPS64_OpTrunc64to32(v *Value, config *Config) bool {
  9126  	b := v.Block
  9127  	_ = b
  9128  	// match: (Trunc64to32 x)
  9129  	// cond:
  9130  	// result: x
  9131  	for {
  9132  		x := v.Args[0]
  9133  		v.reset(OpCopy)
  9134  		v.Type = x.Type
  9135  		v.AddArg(x)
  9136  		return true
  9137  	}
  9138  }
  9139  func rewriteValueMIPS64_OpTrunc64to8(v *Value, config *Config) bool {
  9140  	b := v.Block
  9141  	_ = b
  9142  	// match: (Trunc64to8 x)
  9143  	// cond:
  9144  	// result: x
  9145  	for {
  9146  		x := v.Args[0]
  9147  		v.reset(OpCopy)
  9148  		v.Type = x.Type
  9149  		v.AddArg(x)
  9150  		return true
  9151  	}
  9152  }
  9153  func rewriteValueMIPS64_OpXor16(v *Value, config *Config) bool {
  9154  	b := v.Block
  9155  	_ = b
  9156  	// match: (Xor16 x y)
  9157  	// cond:
  9158  	// result: (XOR x y)
  9159  	for {
  9160  		x := v.Args[0]
  9161  		y := v.Args[1]
  9162  		v.reset(OpMIPS64XOR)
  9163  		v.AddArg(x)
  9164  		v.AddArg(y)
  9165  		return true
  9166  	}
  9167  }
  9168  func rewriteValueMIPS64_OpXor32(v *Value, config *Config) bool {
  9169  	b := v.Block
  9170  	_ = b
  9171  	// match: (Xor32 x y)
  9172  	// cond:
  9173  	// result: (XOR x y)
  9174  	for {
  9175  		x := v.Args[0]
  9176  		y := v.Args[1]
  9177  		v.reset(OpMIPS64XOR)
  9178  		v.AddArg(x)
  9179  		v.AddArg(y)
  9180  		return true
  9181  	}
  9182  }
  9183  func rewriteValueMIPS64_OpXor64(v *Value, config *Config) bool {
  9184  	b := v.Block
  9185  	_ = b
  9186  	// match: (Xor64 x y)
  9187  	// cond:
  9188  	// result: (XOR x y)
  9189  	for {
  9190  		x := v.Args[0]
  9191  		y := v.Args[1]
  9192  		v.reset(OpMIPS64XOR)
  9193  		v.AddArg(x)
  9194  		v.AddArg(y)
  9195  		return true
  9196  	}
  9197  }
  9198  func rewriteValueMIPS64_OpXor8(v *Value, config *Config) bool {
  9199  	b := v.Block
  9200  	_ = b
  9201  	// match: (Xor8 x y)
  9202  	// cond:
  9203  	// result: (XOR x y)
  9204  	for {
  9205  		x := v.Args[0]
  9206  		y := v.Args[1]
  9207  		v.reset(OpMIPS64XOR)
  9208  		v.AddArg(x)
  9209  		v.AddArg(y)
  9210  		return true
  9211  	}
  9212  }
  9213  func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
  9214  	b := v.Block
  9215  	_ = b
  9216  	// match: (Zero [s] _ mem)
  9217  	// cond: SizeAndAlign(s).Size() == 0
  9218  	// result: mem
  9219  	for {
  9220  		s := v.AuxInt
  9221  		mem := v.Args[1]
  9222  		if !(SizeAndAlign(s).Size() == 0) {
  9223  			break
  9224  		}
  9225  		v.reset(OpCopy)
  9226  		v.Type = mem.Type
  9227  		v.AddArg(mem)
  9228  		return true
  9229  	}
  9230  	// match: (Zero [s] ptr mem)
  9231  	// cond: SizeAndAlign(s).Size() == 1
  9232  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9233  	for {
  9234  		s := v.AuxInt
  9235  		ptr := v.Args[0]
  9236  		mem := v.Args[1]
  9237  		if !(SizeAndAlign(s).Size() == 1) {
  9238  			break
  9239  		}
  9240  		v.reset(OpMIPS64MOVBstore)
  9241  		v.AddArg(ptr)
  9242  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9243  		v0.AuxInt = 0
  9244  		v.AddArg(v0)
  9245  		v.AddArg(mem)
  9246  		return true
  9247  	}
  9248  	// match: (Zero [s] ptr mem)
  9249  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  9250  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  9251  	for {
  9252  		s := v.AuxInt
  9253  		ptr := v.Args[0]
  9254  		mem := v.Args[1]
  9255  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  9256  			break
  9257  		}
  9258  		v.reset(OpMIPS64MOVHstore)
  9259  		v.AddArg(ptr)
  9260  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9261  		v0.AuxInt = 0
  9262  		v.AddArg(v0)
  9263  		v.AddArg(mem)
  9264  		return true
  9265  	}
  9266  	// match: (Zero [s] ptr mem)
  9267  	// cond: SizeAndAlign(s).Size() == 2
  9268  	// result: (MOVBstore [1] ptr (MOVVconst [0]) 		(MOVBstore [0] ptr (MOVVconst [0]) mem))
  9269  	for {
  9270  		s := v.AuxInt
  9271  		ptr := v.Args[0]
  9272  		mem := v.Args[1]
  9273  		if !(SizeAndAlign(s).Size() == 2) {
  9274  			break
  9275  		}
  9276  		v.reset(OpMIPS64MOVBstore)
  9277  		v.AuxInt = 1
  9278  		v.AddArg(ptr)
  9279  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9280  		v0.AuxInt = 0
  9281  		v.AddArg(v0)
  9282  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9283  		v1.AuxInt = 0
  9284  		v1.AddArg(ptr)
  9285  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9286  		v2.AuxInt = 0
  9287  		v1.AddArg(v2)
  9288  		v1.AddArg(mem)
  9289  		v.AddArg(v1)
  9290  		return true
  9291  	}
  9292  	// match: (Zero [s] ptr mem)
  9293  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  9294  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  9295  	for {
  9296  		s := v.AuxInt
  9297  		ptr := v.Args[0]
  9298  		mem := v.Args[1]
  9299  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  9300  			break
  9301  		}
  9302  		v.reset(OpMIPS64MOVWstore)
  9303  		v.AddArg(ptr)
  9304  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9305  		v0.AuxInt = 0
  9306  		v.AddArg(v0)
  9307  		v.AddArg(mem)
  9308  		return true
  9309  	}
  9310  	// match: (Zero [s] ptr mem)
  9311  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  9312  	// result: (MOVHstore [2] ptr (MOVVconst [0]) 		(MOVHstore [0] ptr (MOVVconst [0]) mem))
  9313  	for {
  9314  		s := v.AuxInt
  9315  		ptr := v.Args[0]
  9316  		mem := v.Args[1]
  9317  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  9318  			break
  9319  		}
  9320  		v.reset(OpMIPS64MOVHstore)
  9321  		v.AuxInt = 2
  9322  		v.AddArg(ptr)
  9323  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9324  		v0.AuxInt = 0
  9325  		v.AddArg(v0)
  9326  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9327  		v1.AuxInt = 0
  9328  		v1.AddArg(ptr)
  9329  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9330  		v2.AuxInt = 0
  9331  		v1.AddArg(v2)
  9332  		v1.AddArg(mem)
  9333  		v.AddArg(v1)
  9334  		return true
  9335  	}
  9336  	// match: (Zero [s] ptr mem)
  9337  	// cond: SizeAndAlign(s).Size() == 4
  9338  	// result: (MOVBstore [3] ptr (MOVVconst [0]) 		(MOVBstore [2] ptr (MOVVconst [0]) 			(MOVBstore [1] ptr (MOVVconst [0]) 				(MOVBstore [0] ptr (MOVVconst [0]) mem))))
  9339  	for {
  9340  		s := v.AuxInt
  9341  		ptr := v.Args[0]
  9342  		mem := v.Args[1]
  9343  		if !(SizeAndAlign(s).Size() == 4) {
  9344  			break
  9345  		}
  9346  		v.reset(OpMIPS64MOVBstore)
  9347  		v.AuxInt = 3
  9348  		v.AddArg(ptr)
  9349  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9350  		v0.AuxInt = 0
  9351  		v.AddArg(v0)
  9352  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9353  		v1.AuxInt = 2
  9354  		v1.AddArg(ptr)
  9355  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9356  		v2.AuxInt = 0
  9357  		v1.AddArg(v2)
  9358  		v3 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9359  		v3.AuxInt = 1
  9360  		v3.AddArg(ptr)
  9361  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9362  		v4.AuxInt = 0
  9363  		v3.AddArg(v4)
  9364  		v5 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9365  		v5.AuxInt = 0
  9366  		v5.AddArg(ptr)
  9367  		v6 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9368  		v6.AuxInt = 0
  9369  		v5.AddArg(v6)
  9370  		v5.AddArg(mem)
  9371  		v3.AddArg(v5)
  9372  		v1.AddArg(v3)
  9373  		v.AddArg(v1)
  9374  		return true
  9375  	}
  9376  	// match: (Zero [s] ptr mem)
  9377  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0
  9378  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  9379  	for {
  9380  		s := v.AuxInt
  9381  		ptr := v.Args[0]
  9382  		mem := v.Args[1]
  9383  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) {
  9384  			break
  9385  		}
  9386  		v.reset(OpMIPS64MOVVstore)
  9387  		v.AddArg(ptr)
  9388  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9389  		v0.AuxInt = 0
  9390  		v.AddArg(v0)
  9391  		v.AddArg(mem)
  9392  		return true
  9393  	}
  9394  	// match: (Zero [s] ptr mem)
  9395  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  9396  	// result: (MOVWstore [4] ptr (MOVVconst [0]) 		(MOVWstore [0] ptr (MOVVconst [0]) mem))
  9397  	for {
  9398  		s := v.AuxInt
  9399  		ptr := v.Args[0]
  9400  		mem := v.Args[1]
  9401  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  9402  			break
  9403  		}
  9404  		v.reset(OpMIPS64MOVWstore)
  9405  		v.AuxInt = 4
  9406  		v.AddArg(ptr)
  9407  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9408  		v0.AuxInt = 0
  9409  		v.AddArg(v0)
  9410  		v1 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  9411  		v1.AuxInt = 0
  9412  		v1.AddArg(ptr)
  9413  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9414  		v2.AuxInt = 0
  9415  		v1.AddArg(v2)
  9416  		v1.AddArg(mem)
  9417  		v.AddArg(v1)
  9418  		return true
  9419  	}
  9420  	// match: (Zero [s] ptr mem)
  9421  	// cond: SizeAndAlign(s).Size() == 4
  9422  	// result: (MOVHstore [6] ptr (MOVVconst [0]) 		(MOVHstore [4] ptr (MOVVconst [0]) 			(MOVHstore [2] ptr (MOVVconst [0]) 				(MOVHstore [0] ptr (MOVVconst [0]) mem))))
  9423  	for {
  9424  		s := v.AuxInt
  9425  		ptr := v.Args[0]
  9426  		mem := v.Args[1]
  9427  		if !(SizeAndAlign(s).Size() == 4) {
  9428  			break
  9429  		}
  9430  		v.reset(OpMIPS64MOVHstore)
  9431  		v.AuxInt = 6
  9432  		v.AddArg(ptr)
  9433  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9434  		v0.AuxInt = 0
  9435  		v.AddArg(v0)
  9436  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9437  		v1.AuxInt = 4
  9438  		v1.AddArg(ptr)
  9439  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9440  		v2.AuxInt = 0
  9441  		v1.AddArg(v2)
  9442  		v3 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9443  		v3.AuxInt = 2
  9444  		v3.AddArg(ptr)
  9445  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9446  		v4.AuxInt = 0
  9447  		v3.AddArg(v4)
  9448  		v5 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9449  		v5.AuxInt = 0
  9450  		v5.AddArg(ptr)
  9451  		v6 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9452  		v6.AuxInt = 0
  9453  		v5.AddArg(v6)
  9454  		v5.AddArg(mem)
  9455  		v3.AddArg(v5)
  9456  		v1.AddArg(v3)
  9457  		v.AddArg(v1)
  9458  		return true
  9459  	}
  9460  	// match: (Zero [s] ptr mem)
  9461  	// cond: SizeAndAlign(s).Size() == 3
  9462  	// result: (MOVBstore [2] ptr (MOVVconst [0]) 		(MOVBstore [1] ptr (MOVVconst [0]) 			(MOVBstore [0] ptr (MOVVconst [0]) mem)))
  9463  	for {
  9464  		s := v.AuxInt
  9465  		ptr := v.Args[0]
  9466  		mem := v.Args[1]
  9467  		if !(SizeAndAlign(s).Size() == 3) {
  9468  			break
  9469  		}
  9470  		v.reset(OpMIPS64MOVBstore)
  9471  		v.AuxInt = 2
  9472  		v.AddArg(ptr)
  9473  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9474  		v0.AuxInt = 0
  9475  		v.AddArg(v0)
  9476  		v1 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9477  		v1.AuxInt = 1
  9478  		v1.AddArg(ptr)
  9479  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9480  		v2.AuxInt = 0
  9481  		v1.AddArg(v2)
  9482  		v3 := b.NewValue0(v.Line, OpMIPS64MOVBstore, TypeMem)
  9483  		v3.AuxInt = 0
  9484  		v3.AddArg(ptr)
  9485  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9486  		v4.AuxInt = 0
  9487  		v3.AddArg(v4)
  9488  		v3.AddArg(mem)
  9489  		v1.AddArg(v3)
  9490  		v.AddArg(v1)
  9491  		return true
  9492  	}
  9493  	// match: (Zero [s] ptr mem)
  9494  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  9495  	// result: (MOVHstore [4] ptr (MOVVconst [0]) 		(MOVHstore [2] ptr (MOVVconst [0]) 			(MOVHstore [0] ptr (MOVVconst [0]) mem)))
  9496  	for {
  9497  		s := v.AuxInt
  9498  		ptr := v.Args[0]
  9499  		mem := v.Args[1]
  9500  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  9501  			break
  9502  		}
  9503  		v.reset(OpMIPS64MOVHstore)
  9504  		v.AuxInt = 4
  9505  		v.AddArg(ptr)
  9506  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9507  		v0.AuxInt = 0
  9508  		v.AddArg(v0)
  9509  		v1 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9510  		v1.AuxInt = 2
  9511  		v1.AddArg(ptr)
  9512  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9513  		v2.AuxInt = 0
  9514  		v1.AddArg(v2)
  9515  		v3 := b.NewValue0(v.Line, OpMIPS64MOVHstore, TypeMem)
  9516  		v3.AuxInt = 0
  9517  		v3.AddArg(ptr)
  9518  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9519  		v4.AuxInt = 0
  9520  		v3.AddArg(v4)
  9521  		v3.AddArg(mem)
  9522  		v1.AddArg(v3)
  9523  		v.AddArg(v1)
  9524  		return true
  9525  	}
  9526  	// match: (Zero [s] ptr mem)
  9527  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  9528  	// result: (MOVWstore [8] ptr (MOVVconst [0]) 		(MOVWstore [4] ptr (MOVVconst [0]) 			(MOVWstore [0] ptr (MOVVconst [0]) mem)))
  9529  	for {
  9530  		s := v.AuxInt
  9531  		ptr := v.Args[0]
  9532  		mem := v.Args[1]
  9533  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  9534  			break
  9535  		}
  9536  		v.reset(OpMIPS64MOVWstore)
  9537  		v.AuxInt = 8
  9538  		v.AddArg(ptr)
  9539  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9540  		v0.AuxInt = 0
  9541  		v.AddArg(v0)
  9542  		v1 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  9543  		v1.AuxInt = 4
  9544  		v1.AddArg(ptr)
  9545  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9546  		v2.AuxInt = 0
  9547  		v1.AddArg(v2)
  9548  		v3 := b.NewValue0(v.Line, OpMIPS64MOVWstore, TypeMem)
  9549  		v3.AuxInt = 0
  9550  		v3.AddArg(ptr)
  9551  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9552  		v4.AuxInt = 0
  9553  		v3.AddArg(v4)
  9554  		v3.AddArg(mem)
  9555  		v1.AddArg(v3)
  9556  		v.AddArg(v1)
  9557  		return true
  9558  	}
  9559  	// match: (Zero [s] ptr mem)
  9560  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0
  9561  	// result: (MOVVstore [8] ptr (MOVVconst [0]) 		(MOVVstore [0] ptr (MOVVconst [0]) mem))
  9562  	for {
  9563  		s := v.AuxInt
  9564  		ptr := v.Args[0]
  9565  		mem := v.Args[1]
  9566  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) {
  9567  			break
  9568  		}
  9569  		v.reset(OpMIPS64MOVVstore)
  9570  		v.AuxInt = 8
  9571  		v.AddArg(ptr)
  9572  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9573  		v0.AuxInt = 0
  9574  		v.AddArg(v0)
  9575  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  9576  		v1.AuxInt = 0
  9577  		v1.AddArg(ptr)
  9578  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9579  		v2.AuxInt = 0
  9580  		v1.AddArg(v2)
  9581  		v1.AddArg(mem)
  9582  		v.AddArg(v1)
  9583  		return true
  9584  	}
  9585  	// match: (Zero [s] ptr mem)
  9586  	// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0
  9587  	// result: (MOVVstore [16] ptr (MOVVconst [0]) 		(MOVVstore [8] ptr (MOVVconst [0]) 			(MOVVstore [0] ptr (MOVVconst [0]) mem)))
  9588  	for {
  9589  		s := v.AuxInt
  9590  		ptr := v.Args[0]
  9591  		mem := v.Args[1]
  9592  		if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) {
  9593  			break
  9594  		}
  9595  		v.reset(OpMIPS64MOVVstore)
  9596  		v.AuxInt = 16
  9597  		v.AddArg(ptr)
  9598  		v0 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9599  		v0.AuxInt = 0
  9600  		v.AddArg(v0)
  9601  		v1 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  9602  		v1.AuxInt = 8
  9603  		v1.AddArg(ptr)
  9604  		v2 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9605  		v2.AuxInt = 0
  9606  		v1.AddArg(v2)
  9607  		v3 := b.NewValue0(v.Line, OpMIPS64MOVVstore, TypeMem)
  9608  		v3.AuxInt = 0
  9609  		v3.AddArg(ptr)
  9610  		v4 := b.NewValue0(v.Line, OpMIPS64MOVVconst, config.fe.TypeUInt64())
  9611  		v4.AuxInt = 0
  9612  		v3.AddArg(v4)
  9613  		v3.AddArg(mem)
  9614  		v1.AddArg(v3)
  9615  		v.AddArg(v1)
  9616  		return true
  9617  	}
  9618  	// match: (Zero [s] ptr mem)
  9619  	// cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 	&& SizeAndAlign(s).Align()%8 == 0 && !config.noDuffDevice
  9620  	// result: (DUFFZERO [8 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem)
  9621  	for {
  9622  		s := v.AuxInt
  9623  		ptr := v.Args[0]
  9624  		mem := v.Args[1]
  9625  		if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && SizeAndAlign(s).Align()%8 == 0 && !config.noDuffDevice) {
  9626  			break
  9627  		}
  9628  		v.reset(OpMIPS64DUFFZERO)
  9629  		v.AuxInt = 8 * (128 - int64(SizeAndAlign(s).Size()/8))
  9630  		v.AddArg(ptr)
  9631  		v.AddArg(mem)
  9632  		return true
  9633  	}
  9634  	// match: (Zero [s] ptr mem)
  9635  	// cond: (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0
  9636  	// result: (LoweredZero [SizeAndAlign(s).Align()] 		ptr 		(ADDVconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  9637  	for {
  9638  		s := v.AuxInt
  9639  		ptr := v.Args[0]
  9640  		mem := v.Args[1]
  9641  		if !((SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0) {
  9642  			break
  9643  		}
  9644  		v.reset(OpMIPS64LoweredZero)
  9645  		v.AuxInt = SizeAndAlign(s).Align()
  9646  		v.AddArg(ptr)
  9647  		v0 := b.NewValue0(v.Line, OpMIPS64ADDVconst, ptr.Type)
  9648  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  9649  		v0.AddArg(ptr)
  9650  		v.AddArg(v0)
  9651  		v.AddArg(mem)
  9652  		return true
  9653  	}
  9654  	return false
  9655  }
  9656  func rewriteValueMIPS64_OpZeroExt16to32(v *Value, config *Config) bool {
  9657  	b := v.Block
  9658  	_ = b
  9659  	// match: (ZeroExt16to32 x)
  9660  	// cond:
  9661  	// result: (MOVHUreg x)
  9662  	for {
  9663  		x := v.Args[0]
  9664  		v.reset(OpMIPS64MOVHUreg)
  9665  		v.AddArg(x)
  9666  		return true
  9667  	}
  9668  }
  9669  func rewriteValueMIPS64_OpZeroExt16to64(v *Value, config *Config) bool {
  9670  	b := v.Block
  9671  	_ = b
  9672  	// match: (ZeroExt16to64 x)
  9673  	// cond:
  9674  	// result: (MOVHUreg x)
  9675  	for {
  9676  		x := v.Args[0]
  9677  		v.reset(OpMIPS64MOVHUreg)
  9678  		v.AddArg(x)
  9679  		return true
  9680  	}
  9681  }
  9682  func rewriteValueMIPS64_OpZeroExt32to64(v *Value, config *Config) bool {
  9683  	b := v.Block
  9684  	_ = b
  9685  	// match: (ZeroExt32to64 x)
  9686  	// cond:
  9687  	// result: (MOVWUreg x)
  9688  	for {
  9689  		x := v.Args[0]
  9690  		v.reset(OpMIPS64MOVWUreg)
  9691  		v.AddArg(x)
  9692  		return true
  9693  	}
  9694  }
  9695  func rewriteValueMIPS64_OpZeroExt8to16(v *Value, config *Config) bool {
  9696  	b := v.Block
  9697  	_ = b
  9698  	// match: (ZeroExt8to16 x)
  9699  	// cond:
  9700  	// result: (MOVBUreg x)
  9701  	for {
  9702  		x := v.Args[0]
  9703  		v.reset(OpMIPS64MOVBUreg)
  9704  		v.AddArg(x)
  9705  		return true
  9706  	}
  9707  }
  9708  func rewriteValueMIPS64_OpZeroExt8to32(v *Value, config *Config) bool {
  9709  	b := v.Block
  9710  	_ = b
  9711  	// match: (ZeroExt8to32 x)
  9712  	// cond:
  9713  	// result: (MOVBUreg x)
  9714  	for {
  9715  		x := v.Args[0]
  9716  		v.reset(OpMIPS64MOVBUreg)
  9717  		v.AddArg(x)
  9718  		return true
  9719  	}
  9720  }
  9721  func rewriteValueMIPS64_OpZeroExt8to64(v *Value, config *Config) bool {
  9722  	b := v.Block
  9723  	_ = b
  9724  	// match: (ZeroExt8to64 x)
  9725  	// cond:
  9726  	// result: (MOVBUreg x)
  9727  	for {
  9728  		x := v.Args[0]
  9729  		v.reset(OpMIPS64MOVBUreg)
  9730  		v.AddArg(x)
  9731  		return true
  9732  	}
  9733  }
  9734  func rewriteBlockMIPS64(b *Block, config *Config) bool {
  9735  	switch b.Kind {
  9736  	case BlockMIPS64EQ:
  9737  		// match: (EQ (FPFlagTrue cmp) yes no)
  9738  		// cond:
  9739  		// result: (FPF cmp yes no)
  9740  		for {
  9741  			v := b.Control
  9742  			if v.Op != OpMIPS64FPFlagTrue {
  9743  				break
  9744  			}
  9745  			cmp := v.Args[0]
  9746  			yes := b.Succs[0]
  9747  			no := b.Succs[1]
  9748  			b.Kind = BlockMIPS64FPF
  9749  			b.SetControl(cmp)
  9750  			_ = yes
  9751  			_ = no
  9752  			return true
  9753  		}
  9754  		// match: (EQ (FPFlagFalse cmp) yes no)
  9755  		// cond:
  9756  		// result: (FPT cmp yes no)
  9757  		for {
  9758  			v := b.Control
  9759  			if v.Op != OpMIPS64FPFlagFalse {
  9760  				break
  9761  			}
  9762  			cmp := v.Args[0]
  9763  			yes := b.Succs[0]
  9764  			no := b.Succs[1]
  9765  			b.Kind = BlockMIPS64FPT
  9766  			b.SetControl(cmp)
  9767  			_ = yes
  9768  			_ = no
  9769  			return true
  9770  		}
  9771  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9772  		// cond:
  9773  		// result: (NE cmp yes no)
  9774  		for {
  9775  			v := b.Control
  9776  			if v.Op != OpMIPS64XORconst {
  9777  				break
  9778  			}
  9779  			if v.AuxInt != 1 {
  9780  				break
  9781  			}
  9782  			cmp := v.Args[0]
  9783  			if cmp.Op != OpMIPS64SGT {
  9784  				break
  9785  			}
  9786  			yes := b.Succs[0]
  9787  			no := b.Succs[1]
  9788  			b.Kind = BlockMIPS64NE
  9789  			b.SetControl(cmp)
  9790  			_ = yes
  9791  			_ = no
  9792  			return true
  9793  		}
  9794  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9795  		// cond:
  9796  		// result: (NE cmp yes no)
  9797  		for {
  9798  			v := b.Control
  9799  			if v.Op != OpMIPS64XORconst {
  9800  				break
  9801  			}
  9802  			if v.AuxInt != 1 {
  9803  				break
  9804  			}
  9805  			cmp := v.Args[0]
  9806  			if cmp.Op != OpMIPS64SGTU {
  9807  				break
  9808  			}
  9809  			yes := b.Succs[0]
  9810  			no := b.Succs[1]
  9811  			b.Kind = BlockMIPS64NE
  9812  			b.SetControl(cmp)
  9813  			_ = yes
  9814  			_ = no
  9815  			return true
  9816  		}
  9817  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9818  		// cond:
  9819  		// result: (NE cmp yes no)
  9820  		for {
  9821  			v := b.Control
  9822  			if v.Op != OpMIPS64XORconst {
  9823  				break
  9824  			}
  9825  			if v.AuxInt != 1 {
  9826  				break
  9827  			}
  9828  			cmp := v.Args[0]
  9829  			if cmp.Op != OpMIPS64SGTconst {
  9830  				break
  9831  			}
  9832  			yes := b.Succs[0]
  9833  			no := b.Succs[1]
  9834  			b.Kind = BlockMIPS64NE
  9835  			b.SetControl(cmp)
  9836  			_ = yes
  9837  			_ = no
  9838  			return true
  9839  		}
  9840  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9841  		// cond:
  9842  		// result: (NE cmp yes no)
  9843  		for {
  9844  			v := b.Control
  9845  			if v.Op != OpMIPS64XORconst {
  9846  				break
  9847  			}
  9848  			if v.AuxInt != 1 {
  9849  				break
  9850  			}
  9851  			cmp := v.Args[0]
  9852  			if cmp.Op != OpMIPS64SGTUconst {
  9853  				break
  9854  			}
  9855  			yes := b.Succs[0]
  9856  			no := b.Succs[1]
  9857  			b.Kind = BlockMIPS64NE
  9858  			b.SetControl(cmp)
  9859  			_ = yes
  9860  			_ = no
  9861  			return true
  9862  		}
  9863  		// match: (EQ (SGTUconst [1] x) yes no)
  9864  		// cond:
  9865  		// result: (NE x yes no)
  9866  		for {
  9867  			v := b.Control
  9868  			if v.Op != OpMIPS64SGTUconst {
  9869  				break
  9870  			}
  9871  			if v.AuxInt != 1 {
  9872  				break
  9873  			}
  9874  			x := v.Args[0]
  9875  			yes := b.Succs[0]
  9876  			no := b.Succs[1]
  9877  			b.Kind = BlockMIPS64NE
  9878  			b.SetControl(x)
  9879  			_ = yes
  9880  			_ = no
  9881  			return true
  9882  		}
  9883  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  9884  		// cond:
  9885  		// result: (EQ x yes no)
  9886  		for {
  9887  			v := b.Control
  9888  			if v.Op != OpMIPS64SGTU {
  9889  				break
  9890  			}
  9891  			x := v.Args[0]
  9892  			v_1 := v.Args[1]
  9893  			if v_1.Op != OpMIPS64MOVVconst {
  9894  				break
  9895  			}
  9896  			if v_1.AuxInt != 0 {
  9897  				break
  9898  			}
  9899  			yes := b.Succs[0]
  9900  			no := b.Succs[1]
  9901  			b.Kind = BlockMIPS64EQ
  9902  			b.SetControl(x)
  9903  			_ = yes
  9904  			_ = no
  9905  			return true
  9906  		}
  9907  		// match: (EQ (SGTconst [0] x) yes no)
  9908  		// cond:
  9909  		// result: (GEZ x yes no)
  9910  		for {
  9911  			v := b.Control
  9912  			if v.Op != OpMIPS64SGTconst {
  9913  				break
  9914  			}
  9915  			if v.AuxInt != 0 {
  9916  				break
  9917  			}
  9918  			x := v.Args[0]
  9919  			yes := b.Succs[0]
  9920  			no := b.Succs[1]
  9921  			b.Kind = BlockMIPS64GEZ
  9922  			b.SetControl(x)
  9923  			_ = yes
  9924  			_ = no
  9925  			return true
  9926  		}
  9927  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  9928  		// cond:
  9929  		// result: (LEZ x yes no)
  9930  		for {
  9931  			v := b.Control
  9932  			if v.Op != OpMIPS64SGT {
  9933  				break
  9934  			}
  9935  			x := v.Args[0]
  9936  			v_1 := v.Args[1]
  9937  			if v_1.Op != OpMIPS64MOVVconst {
  9938  				break
  9939  			}
  9940  			if v_1.AuxInt != 0 {
  9941  				break
  9942  			}
  9943  			yes := b.Succs[0]
  9944  			no := b.Succs[1]
  9945  			b.Kind = BlockMIPS64LEZ
  9946  			b.SetControl(x)
  9947  			_ = yes
  9948  			_ = no
  9949  			return true
  9950  		}
  9951  		// match: (EQ  (MOVVconst [0]) yes no)
  9952  		// cond:
  9953  		// result: (First nil yes no)
  9954  		for {
  9955  			v := b.Control
  9956  			if v.Op != OpMIPS64MOVVconst {
  9957  				break
  9958  			}
  9959  			if v.AuxInt != 0 {
  9960  				break
  9961  			}
  9962  			yes := b.Succs[0]
  9963  			no := b.Succs[1]
  9964  			b.Kind = BlockFirst
  9965  			b.SetControl(nil)
  9966  			_ = yes
  9967  			_ = no
  9968  			return true
  9969  		}
  9970  		// match: (EQ  (MOVVconst [c]) yes no)
  9971  		// cond: c != 0
  9972  		// result: (First nil no yes)
  9973  		for {
  9974  			v := b.Control
  9975  			if v.Op != OpMIPS64MOVVconst {
  9976  				break
  9977  			}
  9978  			c := v.AuxInt
  9979  			yes := b.Succs[0]
  9980  			no := b.Succs[1]
  9981  			if !(c != 0) {
  9982  				break
  9983  			}
  9984  			b.Kind = BlockFirst
  9985  			b.SetControl(nil)
  9986  			b.swapSuccessors()
  9987  			_ = no
  9988  			_ = yes
  9989  			return true
  9990  		}
  9991  	case BlockMIPS64GEZ:
  9992  		// match: (GEZ (MOVVconst [c]) yes no)
  9993  		// cond: c >= 0
  9994  		// result: (First nil yes no)
  9995  		for {
  9996  			v := b.Control
  9997  			if v.Op != OpMIPS64MOVVconst {
  9998  				break
  9999  			}
 10000  			c := v.AuxInt
 10001  			yes := b.Succs[0]
 10002  			no := b.Succs[1]
 10003  			if !(c >= 0) {
 10004  				break
 10005  			}
 10006  			b.Kind = BlockFirst
 10007  			b.SetControl(nil)
 10008  			_ = yes
 10009  			_ = no
 10010  			return true
 10011  		}
 10012  		// match: (GEZ (MOVVconst [c]) yes no)
 10013  		// cond: c <  0
 10014  		// result: (First nil no yes)
 10015  		for {
 10016  			v := b.Control
 10017  			if v.Op != OpMIPS64MOVVconst {
 10018  				break
 10019  			}
 10020  			c := v.AuxInt
 10021  			yes := b.Succs[0]
 10022  			no := b.Succs[1]
 10023  			if !(c < 0) {
 10024  				break
 10025  			}
 10026  			b.Kind = BlockFirst
 10027  			b.SetControl(nil)
 10028  			b.swapSuccessors()
 10029  			_ = no
 10030  			_ = yes
 10031  			return true
 10032  		}
 10033  	case BlockMIPS64GTZ:
 10034  		// match: (GTZ (MOVVconst [c]) yes no)
 10035  		// cond: c >  0
 10036  		// result: (First nil yes no)
 10037  		for {
 10038  			v := b.Control
 10039  			if v.Op != OpMIPS64MOVVconst {
 10040  				break
 10041  			}
 10042  			c := v.AuxInt
 10043  			yes := b.Succs[0]
 10044  			no := b.Succs[1]
 10045  			if !(c > 0) {
 10046  				break
 10047  			}
 10048  			b.Kind = BlockFirst
 10049  			b.SetControl(nil)
 10050  			_ = yes
 10051  			_ = no
 10052  			return true
 10053  		}
 10054  		// match: (GTZ (MOVVconst [c]) yes no)
 10055  		// cond: c <= 0
 10056  		// result: (First nil no yes)
 10057  		for {
 10058  			v := b.Control
 10059  			if v.Op != OpMIPS64MOVVconst {
 10060  				break
 10061  			}
 10062  			c := v.AuxInt
 10063  			yes := b.Succs[0]
 10064  			no := b.Succs[1]
 10065  			if !(c <= 0) {
 10066  				break
 10067  			}
 10068  			b.Kind = BlockFirst
 10069  			b.SetControl(nil)
 10070  			b.swapSuccessors()
 10071  			_ = no
 10072  			_ = yes
 10073  			return true
 10074  		}
 10075  	case BlockIf:
 10076  		// match: (If cond yes no)
 10077  		// cond:
 10078  		// result: (NE cond yes no)
 10079  		for {
 10080  			v := b.Control
 10081  			_ = v
 10082  			cond := b.Control
 10083  			yes := b.Succs[0]
 10084  			no := b.Succs[1]
 10085  			b.Kind = BlockMIPS64NE
 10086  			b.SetControl(cond)
 10087  			_ = yes
 10088  			_ = no
 10089  			return true
 10090  		}
 10091  	case BlockMIPS64LEZ:
 10092  		// match: (LEZ (MOVVconst [c]) yes no)
 10093  		// cond: c <= 0
 10094  		// result: (First nil yes no)
 10095  		for {
 10096  			v := b.Control
 10097  			if v.Op != OpMIPS64MOVVconst {
 10098  				break
 10099  			}
 10100  			c := v.AuxInt
 10101  			yes := b.Succs[0]
 10102  			no := b.Succs[1]
 10103  			if !(c <= 0) {
 10104  				break
 10105  			}
 10106  			b.Kind = BlockFirst
 10107  			b.SetControl(nil)
 10108  			_ = yes
 10109  			_ = no
 10110  			return true
 10111  		}
 10112  		// match: (LEZ (MOVVconst [c]) yes no)
 10113  		// cond: c >  0
 10114  		// result: (First nil no yes)
 10115  		for {
 10116  			v := b.Control
 10117  			if v.Op != OpMIPS64MOVVconst {
 10118  				break
 10119  			}
 10120  			c := v.AuxInt
 10121  			yes := b.Succs[0]
 10122  			no := b.Succs[1]
 10123  			if !(c > 0) {
 10124  				break
 10125  			}
 10126  			b.Kind = BlockFirst
 10127  			b.SetControl(nil)
 10128  			b.swapSuccessors()
 10129  			_ = no
 10130  			_ = yes
 10131  			return true
 10132  		}
 10133  	case BlockMIPS64LTZ:
 10134  		// match: (LTZ (MOVVconst [c]) yes no)
 10135  		// cond: c <  0
 10136  		// result: (First nil yes no)
 10137  		for {
 10138  			v := b.Control
 10139  			if v.Op != OpMIPS64MOVVconst {
 10140  				break
 10141  			}
 10142  			c := v.AuxInt
 10143  			yes := b.Succs[0]
 10144  			no := b.Succs[1]
 10145  			if !(c < 0) {
 10146  				break
 10147  			}
 10148  			b.Kind = BlockFirst
 10149  			b.SetControl(nil)
 10150  			_ = yes
 10151  			_ = no
 10152  			return true
 10153  		}
 10154  		// match: (LTZ (MOVVconst [c]) yes no)
 10155  		// cond: c >= 0
 10156  		// result: (First nil no yes)
 10157  		for {
 10158  			v := b.Control
 10159  			if v.Op != OpMIPS64MOVVconst {
 10160  				break
 10161  			}
 10162  			c := v.AuxInt
 10163  			yes := b.Succs[0]
 10164  			no := b.Succs[1]
 10165  			if !(c >= 0) {
 10166  				break
 10167  			}
 10168  			b.Kind = BlockFirst
 10169  			b.SetControl(nil)
 10170  			b.swapSuccessors()
 10171  			_ = no
 10172  			_ = yes
 10173  			return true
 10174  		}
 10175  	case BlockMIPS64NE:
 10176  		// match: (NE (FPFlagTrue cmp) yes no)
 10177  		// cond:
 10178  		// result: (FPT cmp yes no)
 10179  		for {
 10180  			v := b.Control
 10181  			if v.Op != OpMIPS64FPFlagTrue {
 10182  				break
 10183  			}
 10184  			cmp := v.Args[0]
 10185  			yes := b.Succs[0]
 10186  			no := b.Succs[1]
 10187  			b.Kind = BlockMIPS64FPT
 10188  			b.SetControl(cmp)
 10189  			_ = yes
 10190  			_ = no
 10191  			return true
 10192  		}
 10193  		// match: (NE (FPFlagFalse cmp) yes no)
 10194  		// cond:
 10195  		// result: (FPF cmp yes no)
 10196  		for {
 10197  			v := b.Control
 10198  			if v.Op != OpMIPS64FPFlagFalse {
 10199  				break
 10200  			}
 10201  			cmp := v.Args[0]
 10202  			yes := b.Succs[0]
 10203  			no := b.Succs[1]
 10204  			b.Kind = BlockMIPS64FPF
 10205  			b.SetControl(cmp)
 10206  			_ = yes
 10207  			_ = no
 10208  			return true
 10209  		}
 10210  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10211  		// cond:
 10212  		// result: (EQ cmp yes no)
 10213  		for {
 10214  			v := b.Control
 10215  			if v.Op != OpMIPS64XORconst {
 10216  				break
 10217  			}
 10218  			if v.AuxInt != 1 {
 10219  				break
 10220  			}
 10221  			cmp := v.Args[0]
 10222  			if cmp.Op != OpMIPS64SGT {
 10223  				break
 10224  			}
 10225  			yes := b.Succs[0]
 10226  			no := b.Succs[1]
 10227  			b.Kind = BlockMIPS64EQ
 10228  			b.SetControl(cmp)
 10229  			_ = yes
 10230  			_ = no
 10231  			return true
 10232  		}
 10233  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10234  		// cond:
 10235  		// result: (EQ cmp yes no)
 10236  		for {
 10237  			v := b.Control
 10238  			if v.Op != OpMIPS64XORconst {
 10239  				break
 10240  			}
 10241  			if v.AuxInt != 1 {
 10242  				break
 10243  			}
 10244  			cmp := v.Args[0]
 10245  			if cmp.Op != OpMIPS64SGTU {
 10246  				break
 10247  			}
 10248  			yes := b.Succs[0]
 10249  			no := b.Succs[1]
 10250  			b.Kind = BlockMIPS64EQ
 10251  			b.SetControl(cmp)
 10252  			_ = yes
 10253  			_ = no
 10254  			return true
 10255  		}
 10256  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10257  		// cond:
 10258  		// result: (EQ cmp yes no)
 10259  		for {
 10260  			v := b.Control
 10261  			if v.Op != OpMIPS64XORconst {
 10262  				break
 10263  			}
 10264  			if v.AuxInt != 1 {
 10265  				break
 10266  			}
 10267  			cmp := v.Args[0]
 10268  			if cmp.Op != OpMIPS64SGTconst {
 10269  				break
 10270  			}
 10271  			yes := b.Succs[0]
 10272  			no := b.Succs[1]
 10273  			b.Kind = BlockMIPS64EQ
 10274  			b.SetControl(cmp)
 10275  			_ = yes
 10276  			_ = no
 10277  			return true
 10278  		}
 10279  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10280  		// cond:
 10281  		// result: (EQ cmp yes no)
 10282  		for {
 10283  			v := b.Control
 10284  			if v.Op != OpMIPS64XORconst {
 10285  				break
 10286  			}
 10287  			if v.AuxInt != 1 {
 10288  				break
 10289  			}
 10290  			cmp := v.Args[0]
 10291  			if cmp.Op != OpMIPS64SGTUconst {
 10292  				break
 10293  			}
 10294  			yes := b.Succs[0]
 10295  			no := b.Succs[1]
 10296  			b.Kind = BlockMIPS64EQ
 10297  			b.SetControl(cmp)
 10298  			_ = yes
 10299  			_ = no
 10300  			return true
 10301  		}
 10302  		// match: (NE (SGTUconst [1] x) yes no)
 10303  		// cond:
 10304  		// result: (EQ x yes no)
 10305  		for {
 10306  			v := b.Control
 10307  			if v.Op != OpMIPS64SGTUconst {
 10308  				break
 10309  			}
 10310  			if v.AuxInt != 1 {
 10311  				break
 10312  			}
 10313  			x := v.Args[0]
 10314  			yes := b.Succs[0]
 10315  			no := b.Succs[1]
 10316  			b.Kind = BlockMIPS64EQ
 10317  			b.SetControl(x)
 10318  			_ = yes
 10319  			_ = no
 10320  			return true
 10321  		}
 10322  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 10323  		// cond:
 10324  		// result: (NE x yes no)
 10325  		for {
 10326  			v := b.Control
 10327  			if v.Op != OpMIPS64SGTU {
 10328  				break
 10329  			}
 10330  			x := v.Args[0]
 10331  			v_1 := v.Args[1]
 10332  			if v_1.Op != OpMIPS64MOVVconst {
 10333  				break
 10334  			}
 10335  			if v_1.AuxInt != 0 {
 10336  				break
 10337  			}
 10338  			yes := b.Succs[0]
 10339  			no := b.Succs[1]
 10340  			b.Kind = BlockMIPS64NE
 10341  			b.SetControl(x)
 10342  			_ = yes
 10343  			_ = no
 10344  			return true
 10345  		}
 10346  		// match: (NE (SGTconst [0] x) yes no)
 10347  		// cond:
 10348  		// result: (LTZ x yes no)
 10349  		for {
 10350  			v := b.Control
 10351  			if v.Op != OpMIPS64SGTconst {
 10352  				break
 10353  			}
 10354  			if v.AuxInt != 0 {
 10355  				break
 10356  			}
 10357  			x := v.Args[0]
 10358  			yes := b.Succs[0]
 10359  			no := b.Succs[1]
 10360  			b.Kind = BlockMIPS64LTZ
 10361  			b.SetControl(x)
 10362  			_ = yes
 10363  			_ = no
 10364  			return true
 10365  		}
 10366  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 10367  		// cond:
 10368  		// result: (GTZ x yes no)
 10369  		for {
 10370  			v := b.Control
 10371  			if v.Op != OpMIPS64SGT {
 10372  				break
 10373  			}
 10374  			x := v.Args[0]
 10375  			v_1 := v.Args[1]
 10376  			if v_1.Op != OpMIPS64MOVVconst {
 10377  				break
 10378  			}
 10379  			if v_1.AuxInt != 0 {
 10380  				break
 10381  			}
 10382  			yes := b.Succs[0]
 10383  			no := b.Succs[1]
 10384  			b.Kind = BlockMIPS64GTZ
 10385  			b.SetControl(x)
 10386  			_ = yes
 10387  			_ = no
 10388  			return true
 10389  		}
 10390  		// match: (NE  (MOVVconst [0]) yes no)
 10391  		// cond:
 10392  		// result: (First nil no yes)
 10393  		for {
 10394  			v := b.Control
 10395  			if v.Op != OpMIPS64MOVVconst {
 10396  				break
 10397  			}
 10398  			if v.AuxInt != 0 {
 10399  				break
 10400  			}
 10401  			yes := b.Succs[0]
 10402  			no := b.Succs[1]
 10403  			b.Kind = BlockFirst
 10404  			b.SetControl(nil)
 10405  			b.swapSuccessors()
 10406  			_ = no
 10407  			_ = yes
 10408  			return true
 10409  		}
 10410  		// match: (NE  (MOVVconst [c]) yes no)
 10411  		// cond: c != 0
 10412  		// result: (First nil yes no)
 10413  		for {
 10414  			v := b.Control
 10415  			if v.Op != OpMIPS64MOVVconst {
 10416  				break
 10417  			}
 10418  			c := v.AuxInt
 10419  			yes := b.Succs[0]
 10420  			no := b.Succs[1]
 10421  			if !(c != 0) {
 10422  				break
 10423  			}
 10424  			b.Kind = BlockFirst
 10425  			b.SetControl(nil)
 10426  			_ = yes
 10427  			_ = no
 10428  			return true
 10429  		}
 10430  	}
 10431  	return false
 10432  }