github.com/tidwall/go@v0.0.0-20170415222209-6694a6888b7d/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  
     9  var _ = math.MinInt8 // in case not otherwise used
    10  var _ = obj.ANOP     // in case not otherwise used
    11  func rewriteValueMIPS64(v *Value) bool {
    12  	switch v.Op {
    13  	case OpAdd16:
    14  		return rewriteValueMIPS64_OpAdd16(v)
    15  	case OpAdd32:
    16  		return rewriteValueMIPS64_OpAdd32(v)
    17  	case OpAdd32F:
    18  		return rewriteValueMIPS64_OpAdd32F(v)
    19  	case OpAdd64:
    20  		return rewriteValueMIPS64_OpAdd64(v)
    21  	case OpAdd64F:
    22  		return rewriteValueMIPS64_OpAdd64F(v)
    23  	case OpAdd8:
    24  		return rewriteValueMIPS64_OpAdd8(v)
    25  	case OpAddPtr:
    26  		return rewriteValueMIPS64_OpAddPtr(v)
    27  	case OpAddr:
    28  		return rewriteValueMIPS64_OpAddr(v)
    29  	case OpAnd16:
    30  		return rewriteValueMIPS64_OpAnd16(v)
    31  	case OpAnd32:
    32  		return rewriteValueMIPS64_OpAnd32(v)
    33  	case OpAnd64:
    34  		return rewriteValueMIPS64_OpAnd64(v)
    35  	case OpAnd8:
    36  		return rewriteValueMIPS64_OpAnd8(v)
    37  	case OpAndB:
    38  		return rewriteValueMIPS64_OpAndB(v)
    39  	case OpAvg64u:
    40  		return rewriteValueMIPS64_OpAvg64u(v)
    41  	case OpClosureCall:
    42  		return rewriteValueMIPS64_OpClosureCall(v)
    43  	case OpCom16:
    44  		return rewriteValueMIPS64_OpCom16(v)
    45  	case OpCom32:
    46  		return rewriteValueMIPS64_OpCom32(v)
    47  	case OpCom64:
    48  		return rewriteValueMIPS64_OpCom64(v)
    49  	case OpCom8:
    50  		return rewriteValueMIPS64_OpCom8(v)
    51  	case OpConst16:
    52  		return rewriteValueMIPS64_OpConst16(v)
    53  	case OpConst32:
    54  		return rewriteValueMIPS64_OpConst32(v)
    55  	case OpConst32F:
    56  		return rewriteValueMIPS64_OpConst32F(v)
    57  	case OpConst64:
    58  		return rewriteValueMIPS64_OpConst64(v)
    59  	case OpConst64F:
    60  		return rewriteValueMIPS64_OpConst64F(v)
    61  	case OpConst8:
    62  		return rewriteValueMIPS64_OpConst8(v)
    63  	case OpConstBool:
    64  		return rewriteValueMIPS64_OpConstBool(v)
    65  	case OpConstNil:
    66  		return rewriteValueMIPS64_OpConstNil(v)
    67  	case OpConvert:
    68  		return rewriteValueMIPS64_OpConvert(v)
    69  	case OpCvt32Fto32:
    70  		return rewriteValueMIPS64_OpCvt32Fto32(v)
    71  	case OpCvt32Fto64:
    72  		return rewriteValueMIPS64_OpCvt32Fto64(v)
    73  	case OpCvt32Fto64F:
    74  		return rewriteValueMIPS64_OpCvt32Fto64F(v)
    75  	case OpCvt32to32F:
    76  		return rewriteValueMIPS64_OpCvt32to32F(v)
    77  	case OpCvt32to64F:
    78  		return rewriteValueMIPS64_OpCvt32to64F(v)
    79  	case OpCvt64Fto32:
    80  		return rewriteValueMIPS64_OpCvt64Fto32(v)
    81  	case OpCvt64Fto32F:
    82  		return rewriteValueMIPS64_OpCvt64Fto32F(v)
    83  	case OpCvt64Fto64:
    84  		return rewriteValueMIPS64_OpCvt64Fto64(v)
    85  	case OpCvt64to32F:
    86  		return rewriteValueMIPS64_OpCvt64to32F(v)
    87  	case OpCvt64to64F:
    88  		return rewriteValueMIPS64_OpCvt64to64F(v)
    89  	case OpDiv16:
    90  		return rewriteValueMIPS64_OpDiv16(v)
    91  	case OpDiv16u:
    92  		return rewriteValueMIPS64_OpDiv16u(v)
    93  	case OpDiv32:
    94  		return rewriteValueMIPS64_OpDiv32(v)
    95  	case OpDiv32F:
    96  		return rewriteValueMIPS64_OpDiv32F(v)
    97  	case OpDiv32u:
    98  		return rewriteValueMIPS64_OpDiv32u(v)
    99  	case OpDiv64:
   100  		return rewriteValueMIPS64_OpDiv64(v)
   101  	case OpDiv64F:
   102  		return rewriteValueMIPS64_OpDiv64F(v)
   103  	case OpDiv64u:
   104  		return rewriteValueMIPS64_OpDiv64u(v)
   105  	case OpDiv8:
   106  		return rewriteValueMIPS64_OpDiv8(v)
   107  	case OpDiv8u:
   108  		return rewriteValueMIPS64_OpDiv8u(v)
   109  	case OpEq16:
   110  		return rewriteValueMIPS64_OpEq16(v)
   111  	case OpEq32:
   112  		return rewriteValueMIPS64_OpEq32(v)
   113  	case OpEq32F:
   114  		return rewriteValueMIPS64_OpEq32F(v)
   115  	case OpEq64:
   116  		return rewriteValueMIPS64_OpEq64(v)
   117  	case OpEq64F:
   118  		return rewriteValueMIPS64_OpEq64F(v)
   119  	case OpEq8:
   120  		return rewriteValueMIPS64_OpEq8(v)
   121  	case OpEqB:
   122  		return rewriteValueMIPS64_OpEqB(v)
   123  	case OpEqPtr:
   124  		return rewriteValueMIPS64_OpEqPtr(v)
   125  	case OpGeq16:
   126  		return rewriteValueMIPS64_OpGeq16(v)
   127  	case OpGeq16U:
   128  		return rewriteValueMIPS64_OpGeq16U(v)
   129  	case OpGeq32:
   130  		return rewriteValueMIPS64_OpGeq32(v)
   131  	case OpGeq32F:
   132  		return rewriteValueMIPS64_OpGeq32F(v)
   133  	case OpGeq32U:
   134  		return rewriteValueMIPS64_OpGeq32U(v)
   135  	case OpGeq64:
   136  		return rewriteValueMIPS64_OpGeq64(v)
   137  	case OpGeq64F:
   138  		return rewriteValueMIPS64_OpGeq64F(v)
   139  	case OpGeq64U:
   140  		return rewriteValueMIPS64_OpGeq64U(v)
   141  	case OpGeq8:
   142  		return rewriteValueMIPS64_OpGeq8(v)
   143  	case OpGeq8U:
   144  		return rewriteValueMIPS64_OpGeq8U(v)
   145  	case OpGetClosurePtr:
   146  		return rewriteValueMIPS64_OpGetClosurePtr(v)
   147  	case OpGreater16:
   148  		return rewriteValueMIPS64_OpGreater16(v)
   149  	case OpGreater16U:
   150  		return rewriteValueMIPS64_OpGreater16U(v)
   151  	case OpGreater32:
   152  		return rewriteValueMIPS64_OpGreater32(v)
   153  	case OpGreater32F:
   154  		return rewriteValueMIPS64_OpGreater32F(v)
   155  	case OpGreater32U:
   156  		return rewriteValueMIPS64_OpGreater32U(v)
   157  	case OpGreater64:
   158  		return rewriteValueMIPS64_OpGreater64(v)
   159  	case OpGreater64F:
   160  		return rewriteValueMIPS64_OpGreater64F(v)
   161  	case OpGreater64U:
   162  		return rewriteValueMIPS64_OpGreater64U(v)
   163  	case OpGreater8:
   164  		return rewriteValueMIPS64_OpGreater8(v)
   165  	case OpGreater8U:
   166  		return rewriteValueMIPS64_OpGreater8U(v)
   167  	case OpHmul32:
   168  		return rewriteValueMIPS64_OpHmul32(v)
   169  	case OpHmul32u:
   170  		return rewriteValueMIPS64_OpHmul32u(v)
   171  	case OpHmul64:
   172  		return rewriteValueMIPS64_OpHmul64(v)
   173  	case OpHmul64u:
   174  		return rewriteValueMIPS64_OpHmul64u(v)
   175  	case OpInterCall:
   176  		return rewriteValueMIPS64_OpInterCall(v)
   177  	case OpIsInBounds:
   178  		return rewriteValueMIPS64_OpIsInBounds(v)
   179  	case OpIsNonNil:
   180  		return rewriteValueMIPS64_OpIsNonNil(v)
   181  	case OpIsSliceInBounds:
   182  		return rewriteValueMIPS64_OpIsSliceInBounds(v)
   183  	case OpLeq16:
   184  		return rewriteValueMIPS64_OpLeq16(v)
   185  	case OpLeq16U:
   186  		return rewriteValueMIPS64_OpLeq16U(v)
   187  	case OpLeq32:
   188  		return rewriteValueMIPS64_OpLeq32(v)
   189  	case OpLeq32F:
   190  		return rewriteValueMIPS64_OpLeq32F(v)
   191  	case OpLeq32U:
   192  		return rewriteValueMIPS64_OpLeq32U(v)
   193  	case OpLeq64:
   194  		return rewriteValueMIPS64_OpLeq64(v)
   195  	case OpLeq64F:
   196  		return rewriteValueMIPS64_OpLeq64F(v)
   197  	case OpLeq64U:
   198  		return rewriteValueMIPS64_OpLeq64U(v)
   199  	case OpLeq8:
   200  		return rewriteValueMIPS64_OpLeq8(v)
   201  	case OpLeq8U:
   202  		return rewriteValueMIPS64_OpLeq8U(v)
   203  	case OpLess16:
   204  		return rewriteValueMIPS64_OpLess16(v)
   205  	case OpLess16U:
   206  		return rewriteValueMIPS64_OpLess16U(v)
   207  	case OpLess32:
   208  		return rewriteValueMIPS64_OpLess32(v)
   209  	case OpLess32F:
   210  		return rewriteValueMIPS64_OpLess32F(v)
   211  	case OpLess32U:
   212  		return rewriteValueMIPS64_OpLess32U(v)
   213  	case OpLess64:
   214  		return rewriteValueMIPS64_OpLess64(v)
   215  	case OpLess64F:
   216  		return rewriteValueMIPS64_OpLess64F(v)
   217  	case OpLess64U:
   218  		return rewriteValueMIPS64_OpLess64U(v)
   219  	case OpLess8:
   220  		return rewriteValueMIPS64_OpLess8(v)
   221  	case OpLess8U:
   222  		return rewriteValueMIPS64_OpLess8U(v)
   223  	case OpLoad:
   224  		return rewriteValueMIPS64_OpLoad(v)
   225  	case OpLsh16x16:
   226  		return rewriteValueMIPS64_OpLsh16x16(v)
   227  	case OpLsh16x32:
   228  		return rewriteValueMIPS64_OpLsh16x32(v)
   229  	case OpLsh16x64:
   230  		return rewriteValueMIPS64_OpLsh16x64(v)
   231  	case OpLsh16x8:
   232  		return rewriteValueMIPS64_OpLsh16x8(v)
   233  	case OpLsh32x16:
   234  		return rewriteValueMIPS64_OpLsh32x16(v)
   235  	case OpLsh32x32:
   236  		return rewriteValueMIPS64_OpLsh32x32(v)
   237  	case OpLsh32x64:
   238  		return rewriteValueMIPS64_OpLsh32x64(v)
   239  	case OpLsh32x8:
   240  		return rewriteValueMIPS64_OpLsh32x8(v)
   241  	case OpLsh64x16:
   242  		return rewriteValueMIPS64_OpLsh64x16(v)
   243  	case OpLsh64x32:
   244  		return rewriteValueMIPS64_OpLsh64x32(v)
   245  	case OpLsh64x64:
   246  		return rewriteValueMIPS64_OpLsh64x64(v)
   247  	case OpLsh64x8:
   248  		return rewriteValueMIPS64_OpLsh64x8(v)
   249  	case OpLsh8x16:
   250  		return rewriteValueMIPS64_OpLsh8x16(v)
   251  	case OpLsh8x32:
   252  		return rewriteValueMIPS64_OpLsh8x32(v)
   253  	case OpLsh8x64:
   254  		return rewriteValueMIPS64_OpLsh8x64(v)
   255  	case OpLsh8x8:
   256  		return rewriteValueMIPS64_OpLsh8x8(v)
   257  	case OpMIPS64ADDV:
   258  		return rewriteValueMIPS64_OpMIPS64ADDV(v)
   259  	case OpMIPS64ADDVconst:
   260  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
   261  	case OpMIPS64AND:
   262  		return rewriteValueMIPS64_OpMIPS64AND(v)
   263  	case OpMIPS64ANDconst:
   264  		return rewriteValueMIPS64_OpMIPS64ANDconst(v)
   265  	case OpMIPS64MOVBUload:
   266  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
   267  	case OpMIPS64MOVBUreg:
   268  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
   269  	case OpMIPS64MOVBload:
   270  		return rewriteValueMIPS64_OpMIPS64MOVBload(v)
   271  	case OpMIPS64MOVBreg:
   272  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
   273  	case OpMIPS64MOVBstore:
   274  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
   275  	case OpMIPS64MOVBstorezero:
   276  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
   277  	case OpMIPS64MOVDload:
   278  		return rewriteValueMIPS64_OpMIPS64MOVDload(v)
   279  	case OpMIPS64MOVDstore:
   280  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
   281  	case OpMIPS64MOVFload:
   282  		return rewriteValueMIPS64_OpMIPS64MOVFload(v)
   283  	case OpMIPS64MOVFstore:
   284  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
   285  	case OpMIPS64MOVHUload:
   286  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
   287  	case OpMIPS64MOVHUreg:
   288  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
   289  	case OpMIPS64MOVHload:
   290  		return rewriteValueMIPS64_OpMIPS64MOVHload(v)
   291  	case OpMIPS64MOVHreg:
   292  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
   293  	case OpMIPS64MOVHstore:
   294  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
   295  	case OpMIPS64MOVHstorezero:
   296  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
   297  	case OpMIPS64MOVVload:
   298  		return rewriteValueMIPS64_OpMIPS64MOVVload(v)
   299  	case OpMIPS64MOVVreg:
   300  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
   301  	case OpMIPS64MOVVstore:
   302  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
   303  	case OpMIPS64MOVVstorezero:
   304  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
   305  	case OpMIPS64MOVWUload:
   306  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
   307  	case OpMIPS64MOVWUreg:
   308  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
   309  	case OpMIPS64MOVWload:
   310  		return rewriteValueMIPS64_OpMIPS64MOVWload(v)
   311  	case OpMIPS64MOVWreg:
   312  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
   313  	case OpMIPS64MOVWstore:
   314  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
   315  	case OpMIPS64MOVWstorezero:
   316  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
   317  	case OpMIPS64NEGV:
   318  		return rewriteValueMIPS64_OpMIPS64NEGV(v)
   319  	case OpMIPS64NOR:
   320  		return rewriteValueMIPS64_OpMIPS64NOR(v)
   321  	case OpMIPS64NORconst:
   322  		return rewriteValueMIPS64_OpMIPS64NORconst(v)
   323  	case OpMIPS64OR:
   324  		return rewriteValueMIPS64_OpMIPS64OR(v)
   325  	case OpMIPS64ORconst:
   326  		return rewriteValueMIPS64_OpMIPS64ORconst(v)
   327  	case OpMIPS64SGT:
   328  		return rewriteValueMIPS64_OpMIPS64SGT(v)
   329  	case OpMIPS64SGTU:
   330  		return rewriteValueMIPS64_OpMIPS64SGTU(v)
   331  	case OpMIPS64SGTUconst:
   332  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
   333  	case OpMIPS64SGTconst:
   334  		return rewriteValueMIPS64_OpMIPS64SGTconst(v)
   335  	case OpMIPS64SLLV:
   336  		return rewriteValueMIPS64_OpMIPS64SLLV(v)
   337  	case OpMIPS64SLLVconst:
   338  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
   339  	case OpMIPS64SRAV:
   340  		return rewriteValueMIPS64_OpMIPS64SRAV(v)
   341  	case OpMIPS64SRAVconst:
   342  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
   343  	case OpMIPS64SRLV:
   344  		return rewriteValueMIPS64_OpMIPS64SRLV(v)
   345  	case OpMIPS64SRLVconst:
   346  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
   347  	case OpMIPS64SUBV:
   348  		return rewriteValueMIPS64_OpMIPS64SUBV(v)
   349  	case OpMIPS64SUBVconst:
   350  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
   351  	case OpMIPS64XOR:
   352  		return rewriteValueMIPS64_OpMIPS64XOR(v)
   353  	case OpMIPS64XORconst:
   354  		return rewriteValueMIPS64_OpMIPS64XORconst(v)
   355  	case OpMod16:
   356  		return rewriteValueMIPS64_OpMod16(v)
   357  	case OpMod16u:
   358  		return rewriteValueMIPS64_OpMod16u(v)
   359  	case OpMod32:
   360  		return rewriteValueMIPS64_OpMod32(v)
   361  	case OpMod32u:
   362  		return rewriteValueMIPS64_OpMod32u(v)
   363  	case OpMod64:
   364  		return rewriteValueMIPS64_OpMod64(v)
   365  	case OpMod64u:
   366  		return rewriteValueMIPS64_OpMod64u(v)
   367  	case OpMod8:
   368  		return rewriteValueMIPS64_OpMod8(v)
   369  	case OpMod8u:
   370  		return rewriteValueMIPS64_OpMod8u(v)
   371  	case OpMove:
   372  		return rewriteValueMIPS64_OpMove(v)
   373  	case OpMul16:
   374  		return rewriteValueMIPS64_OpMul16(v)
   375  	case OpMul32:
   376  		return rewriteValueMIPS64_OpMul32(v)
   377  	case OpMul32F:
   378  		return rewriteValueMIPS64_OpMul32F(v)
   379  	case OpMul64:
   380  		return rewriteValueMIPS64_OpMul64(v)
   381  	case OpMul64F:
   382  		return rewriteValueMIPS64_OpMul64F(v)
   383  	case OpMul8:
   384  		return rewriteValueMIPS64_OpMul8(v)
   385  	case OpNeg16:
   386  		return rewriteValueMIPS64_OpNeg16(v)
   387  	case OpNeg32:
   388  		return rewriteValueMIPS64_OpNeg32(v)
   389  	case OpNeg32F:
   390  		return rewriteValueMIPS64_OpNeg32F(v)
   391  	case OpNeg64:
   392  		return rewriteValueMIPS64_OpNeg64(v)
   393  	case OpNeg64F:
   394  		return rewriteValueMIPS64_OpNeg64F(v)
   395  	case OpNeg8:
   396  		return rewriteValueMIPS64_OpNeg8(v)
   397  	case OpNeq16:
   398  		return rewriteValueMIPS64_OpNeq16(v)
   399  	case OpNeq32:
   400  		return rewriteValueMIPS64_OpNeq32(v)
   401  	case OpNeq32F:
   402  		return rewriteValueMIPS64_OpNeq32F(v)
   403  	case OpNeq64:
   404  		return rewriteValueMIPS64_OpNeq64(v)
   405  	case OpNeq64F:
   406  		return rewriteValueMIPS64_OpNeq64F(v)
   407  	case OpNeq8:
   408  		return rewriteValueMIPS64_OpNeq8(v)
   409  	case OpNeqB:
   410  		return rewriteValueMIPS64_OpNeqB(v)
   411  	case OpNeqPtr:
   412  		return rewriteValueMIPS64_OpNeqPtr(v)
   413  	case OpNilCheck:
   414  		return rewriteValueMIPS64_OpNilCheck(v)
   415  	case OpNot:
   416  		return rewriteValueMIPS64_OpNot(v)
   417  	case OpOffPtr:
   418  		return rewriteValueMIPS64_OpOffPtr(v)
   419  	case OpOr16:
   420  		return rewriteValueMIPS64_OpOr16(v)
   421  	case OpOr32:
   422  		return rewriteValueMIPS64_OpOr32(v)
   423  	case OpOr64:
   424  		return rewriteValueMIPS64_OpOr64(v)
   425  	case OpOr8:
   426  		return rewriteValueMIPS64_OpOr8(v)
   427  	case OpOrB:
   428  		return rewriteValueMIPS64_OpOrB(v)
   429  	case OpRound32F:
   430  		return rewriteValueMIPS64_OpRound32F(v)
   431  	case OpRound64F:
   432  		return rewriteValueMIPS64_OpRound64F(v)
   433  	case OpRsh16Ux16:
   434  		return rewriteValueMIPS64_OpRsh16Ux16(v)
   435  	case OpRsh16Ux32:
   436  		return rewriteValueMIPS64_OpRsh16Ux32(v)
   437  	case OpRsh16Ux64:
   438  		return rewriteValueMIPS64_OpRsh16Ux64(v)
   439  	case OpRsh16Ux8:
   440  		return rewriteValueMIPS64_OpRsh16Ux8(v)
   441  	case OpRsh16x16:
   442  		return rewriteValueMIPS64_OpRsh16x16(v)
   443  	case OpRsh16x32:
   444  		return rewriteValueMIPS64_OpRsh16x32(v)
   445  	case OpRsh16x64:
   446  		return rewriteValueMIPS64_OpRsh16x64(v)
   447  	case OpRsh16x8:
   448  		return rewriteValueMIPS64_OpRsh16x8(v)
   449  	case OpRsh32Ux16:
   450  		return rewriteValueMIPS64_OpRsh32Ux16(v)
   451  	case OpRsh32Ux32:
   452  		return rewriteValueMIPS64_OpRsh32Ux32(v)
   453  	case OpRsh32Ux64:
   454  		return rewriteValueMIPS64_OpRsh32Ux64(v)
   455  	case OpRsh32Ux8:
   456  		return rewriteValueMIPS64_OpRsh32Ux8(v)
   457  	case OpRsh32x16:
   458  		return rewriteValueMIPS64_OpRsh32x16(v)
   459  	case OpRsh32x32:
   460  		return rewriteValueMIPS64_OpRsh32x32(v)
   461  	case OpRsh32x64:
   462  		return rewriteValueMIPS64_OpRsh32x64(v)
   463  	case OpRsh32x8:
   464  		return rewriteValueMIPS64_OpRsh32x8(v)
   465  	case OpRsh64Ux16:
   466  		return rewriteValueMIPS64_OpRsh64Ux16(v)
   467  	case OpRsh64Ux32:
   468  		return rewriteValueMIPS64_OpRsh64Ux32(v)
   469  	case OpRsh64Ux64:
   470  		return rewriteValueMIPS64_OpRsh64Ux64(v)
   471  	case OpRsh64Ux8:
   472  		return rewriteValueMIPS64_OpRsh64Ux8(v)
   473  	case OpRsh64x16:
   474  		return rewriteValueMIPS64_OpRsh64x16(v)
   475  	case OpRsh64x32:
   476  		return rewriteValueMIPS64_OpRsh64x32(v)
   477  	case OpRsh64x64:
   478  		return rewriteValueMIPS64_OpRsh64x64(v)
   479  	case OpRsh64x8:
   480  		return rewriteValueMIPS64_OpRsh64x8(v)
   481  	case OpRsh8Ux16:
   482  		return rewriteValueMIPS64_OpRsh8Ux16(v)
   483  	case OpRsh8Ux32:
   484  		return rewriteValueMIPS64_OpRsh8Ux32(v)
   485  	case OpRsh8Ux64:
   486  		return rewriteValueMIPS64_OpRsh8Ux64(v)
   487  	case OpRsh8Ux8:
   488  		return rewriteValueMIPS64_OpRsh8Ux8(v)
   489  	case OpRsh8x16:
   490  		return rewriteValueMIPS64_OpRsh8x16(v)
   491  	case OpRsh8x32:
   492  		return rewriteValueMIPS64_OpRsh8x32(v)
   493  	case OpRsh8x64:
   494  		return rewriteValueMIPS64_OpRsh8x64(v)
   495  	case OpRsh8x8:
   496  		return rewriteValueMIPS64_OpRsh8x8(v)
   497  	case OpSelect0:
   498  		return rewriteValueMIPS64_OpSelect0(v)
   499  	case OpSelect1:
   500  		return rewriteValueMIPS64_OpSelect1(v)
   501  	case OpSignExt16to32:
   502  		return rewriteValueMIPS64_OpSignExt16to32(v)
   503  	case OpSignExt16to64:
   504  		return rewriteValueMIPS64_OpSignExt16to64(v)
   505  	case OpSignExt32to64:
   506  		return rewriteValueMIPS64_OpSignExt32to64(v)
   507  	case OpSignExt8to16:
   508  		return rewriteValueMIPS64_OpSignExt8to16(v)
   509  	case OpSignExt8to32:
   510  		return rewriteValueMIPS64_OpSignExt8to32(v)
   511  	case OpSignExt8to64:
   512  		return rewriteValueMIPS64_OpSignExt8to64(v)
   513  	case OpSlicemask:
   514  		return rewriteValueMIPS64_OpSlicemask(v)
   515  	case OpStaticCall:
   516  		return rewriteValueMIPS64_OpStaticCall(v)
   517  	case OpStore:
   518  		return rewriteValueMIPS64_OpStore(v)
   519  	case OpSub16:
   520  		return rewriteValueMIPS64_OpSub16(v)
   521  	case OpSub32:
   522  		return rewriteValueMIPS64_OpSub32(v)
   523  	case OpSub32F:
   524  		return rewriteValueMIPS64_OpSub32F(v)
   525  	case OpSub64:
   526  		return rewriteValueMIPS64_OpSub64(v)
   527  	case OpSub64F:
   528  		return rewriteValueMIPS64_OpSub64F(v)
   529  	case OpSub8:
   530  		return rewriteValueMIPS64_OpSub8(v)
   531  	case OpSubPtr:
   532  		return rewriteValueMIPS64_OpSubPtr(v)
   533  	case OpTrunc16to8:
   534  		return rewriteValueMIPS64_OpTrunc16to8(v)
   535  	case OpTrunc32to16:
   536  		return rewriteValueMIPS64_OpTrunc32to16(v)
   537  	case OpTrunc32to8:
   538  		return rewriteValueMIPS64_OpTrunc32to8(v)
   539  	case OpTrunc64to16:
   540  		return rewriteValueMIPS64_OpTrunc64to16(v)
   541  	case OpTrunc64to32:
   542  		return rewriteValueMIPS64_OpTrunc64to32(v)
   543  	case OpTrunc64to8:
   544  		return rewriteValueMIPS64_OpTrunc64to8(v)
   545  	case OpXor16:
   546  		return rewriteValueMIPS64_OpXor16(v)
   547  	case OpXor32:
   548  		return rewriteValueMIPS64_OpXor32(v)
   549  	case OpXor64:
   550  		return rewriteValueMIPS64_OpXor64(v)
   551  	case OpXor8:
   552  		return rewriteValueMIPS64_OpXor8(v)
   553  	case OpZero:
   554  		return rewriteValueMIPS64_OpZero(v)
   555  	case OpZeroExt16to32:
   556  		return rewriteValueMIPS64_OpZeroExt16to32(v)
   557  	case OpZeroExt16to64:
   558  		return rewriteValueMIPS64_OpZeroExt16to64(v)
   559  	case OpZeroExt32to64:
   560  		return rewriteValueMIPS64_OpZeroExt32to64(v)
   561  	case OpZeroExt8to16:
   562  		return rewriteValueMIPS64_OpZeroExt8to16(v)
   563  	case OpZeroExt8to32:
   564  		return rewriteValueMIPS64_OpZeroExt8to32(v)
   565  	case OpZeroExt8to64:
   566  		return rewriteValueMIPS64_OpZeroExt8to64(v)
   567  	}
   568  	return false
   569  }
   570  func rewriteValueMIPS64_OpAdd16(v *Value) bool {
   571  	// match: (Add16 x y)
   572  	// cond:
   573  	// result: (ADDV x y)
   574  	for {
   575  		x := v.Args[0]
   576  		y := v.Args[1]
   577  		v.reset(OpMIPS64ADDV)
   578  		v.AddArg(x)
   579  		v.AddArg(y)
   580  		return true
   581  	}
   582  }
   583  func rewriteValueMIPS64_OpAdd32(v *Value) bool {
   584  	// match: (Add32 x y)
   585  	// cond:
   586  	// result: (ADDV x y)
   587  	for {
   588  		x := v.Args[0]
   589  		y := v.Args[1]
   590  		v.reset(OpMIPS64ADDV)
   591  		v.AddArg(x)
   592  		v.AddArg(y)
   593  		return true
   594  	}
   595  }
   596  func rewriteValueMIPS64_OpAdd32F(v *Value) bool {
   597  	// match: (Add32F x y)
   598  	// cond:
   599  	// result: (ADDF x y)
   600  	for {
   601  		x := v.Args[0]
   602  		y := v.Args[1]
   603  		v.reset(OpMIPS64ADDF)
   604  		v.AddArg(x)
   605  		v.AddArg(y)
   606  		return true
   607  	}
   608  }
   609  func rewriteValueMIPS64_OpAdd64(v *Value) bool {
   610  	// match: (Add64 x y)
   611  	// cond:
   612  	// result: (ADDV x y)
   613  	for {
   614  		x := v.Args[0]
   615  		y := v.Args[1]
   616  		v.reset(OpMIPS64ADDV)
   617  		v.AddArg(x)
   618  		v.AddArg(y)
   619  		return true
   620  	}
   621  }
   622  func rewriteValueMIPS64_OpAdd64F(v *Value) bool {
   623  	// match: (Add64F x y)
   624  	// cond:
   625  	// result: (ADDD x y)
   626  	for {
   627  		x := v.Args[0]
   628  		y := v.Args[1]
   629  		v.reset(OpMIPS64ADDD)
   630  		v.AddArg(x)
   631  		v.AddArg(y)
   632  		return true
   633  	}
   634  }
   635  func rewriteValueMIPS64_OpAdd8(v *Value) bool {
   636  	// match: (Add8 x y)
   637  	// cond:
   638  	// result: (ADDV x y)
   639  	for {
   640  		x := v.Args[0]
   641  		y := v.Args[1]
   642  		v.reset(OpMIPS64ADDV)
   643  		v.AddArg(x)
   644  		v.AddArg(y)
   645  		return true
   646  	}
   647  }
   648  func rewriteValueMIPS64_OpAddPtr(v *Value) bool {
   649  	// match: (AddPtr x y)
   650  	// cond:
   651  	// result: (ADDV x y)
   652  	for {
   653  		x := v.Args[0]
   654  		y := v.Args[1]
   655  		v.reset(OpMIPS64ADDV)
   656  		v.AddArg(x)
   657  		v.AddArg(y)
   658  		return true
   659  	}
   660  }
   661  func rewriteValueMIPS64_OpAddr(v *Value) bool {
   662  	// match: (Addr {sym} base)
   663  	// cond:
   664  	// result: (MOVVaddr {sym} base)
   665  	for {
   666  		sym := v.Aux
   667  		base := v.Args[0]
   668  		v.reset(OpMIPS64MOVVaddr)
   669  		v.Aux = sym
   670  		v.AddArg(base)
   671  		return true
   672  	}
   673  }
   674  func rewriteValueMIPS64_OpAnd16(v *Value) bool {
   675  	// match: (And16 x y)
   676  	// cond:
   677  	// result: (AND x y)
   678  	for {
   679  		x := v.Args[0]
   680  		y := v.Args[1]
   681  		v.reset(OpMIPS64AND)
   682  		v.AddArg(x)
   683  		v.AddArg(y)
   684  		return true
   685  	}
   686  }
   687  func rewriteValueMIPS64_OpAnd32(v *Value) bool {
   688  	// match: (And32 x y)
   689  	// cond:
   690  	// result: (AND x y)
   691  	for {
   692  		x := v.Args[0]
   693  		y := v.Args[1]
   694  		v.reset(OpMIPS64AND)
   695  		v.AddArg(x)
   696  		v.AddArg(y)
   697  		return true
   698  	}
   699  }
   700  func rewriteValueMIPS64_OpAnd64(v *Value) bool {
   701  	// match: (And64 x y)
   702  	// cond:
   703  	// result: (AND x y)
   704  	for {
   705  		x := v.Args[0]
   706  		y := v.Args[1]
   707  		v.reset(OpMIPS64AND)
   708  		v.AddArg(x)
   709  		v.AddArg(y)
   710  		return true
   711  	}
   712  }
   713  func rewriteValueMIPS64_OpAnd8(v *Value) bool {
   714  	// match: (And8 x y)
   715  	// cond:
   716  	// result: (AND x y)
   717  	for {
   718  		x := v.Args[0]
   719  		y := v.Args[1]
   720  		v.reset(OpMIPS64AND)
   721  		v.AddArg(x)
   722  		v.AddArg(y)
   723  		return true
   724  	}
   725  }
   726  func rewriteValueMIPS64_OpAndB(v *Value) bool {
   727  	// match: (AndB x y)
   728  	// cond:
   729  	// result: (AND x y)
   730  	for {
   731  		x := v.Args[0]
   732  		y := v.Args[1]
   733  		v.reset(OpMIPS64AND)
   734  		v.AddArg(x)
   735  		v.AddArg(y)
   736  		return true
   737  	}
   738  }
   739  func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
   740  	b := v.Block
   741  	_ = b
   742  	// match: (Avg64u <t> x y)
   743  	// cond:
   744  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   745  	for {
   746  		t := v.Type
   747  		x := v.Args[0]
   748  		y := v.Args[1]
   749  		v.reset(OpMIPS64ADDV)
   750  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   751  		v0.AuxInt = 1
   752  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   753  		v1.AddArg(x)
   754  		v1.AddArg(y)
   755  		v0.AddArg(v1)
   756  		v.AddArg(v0)
   757  		v.AddArg(y)
   758  		return true
   759  	}
   760  }
   761  func rewriteValueMIPS64_OpClosureCall(v *Value) bool {
   762  	// match: (ClosureCall [argwid] entry closure mem)
   763  	// cond:
   764  	// result: (CALLclosure [argwid] entry closure mem)
   765  	for {
   766  		argwid := v.AuxInt
   767  		entry := v.Args[0]
   768  		closure := v.Args[1]
   769  		mem := v.Args[2]
   770  		v.reset(OpMIPS64CALLclosure)
   771  		v.AuxInt = argwid
   772  		v.AddArg(entry)
   773  		v.AddArg(closure)
   774  		v.AddArg(mem)
   775  		return true
   776  	}
   777  }
   778  func rewriteValueMIPS64_OpCom16(v *Value) bool {
   779  	b := v.Block
   780  	_ = b
   781  	types := &b.Func.Config.Types
   782  	_ = types
   783  	// match: (Com16 x)
   784  	// cond:
   785  	// result: (NOR (MOVVconst [0]) x)
   786  	for {
   787  		x := v.Args[0]
   788  		v.reset(OpMIPS64NOR)
   789  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   790  		v0.AuxInt = 0
   791  		v.AddArg(v0)
   792  		v.AddArg(x)
   793  		return true
   794  	}
   795  }
   796  func rewriteValueMIPS64_OpCom32(v *Value) bool {
   797  	b := v.Block
   798  	_ = b
   799  	types := &b.Func.Config.Types
   800  	_ = types
   801  	// match: (Com32 x)
   802  	// cond:
   803  	// result: (NOR (MOVVconst [0]) x)
   804  	for {
   805  		x := v.Args[0]
   806  		v.reset(OpMIPS64NOR)
   807  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   808  		v0.AuxInt = 0
   809  		v.AddArg(v0)
   810  		v.AddArg(x)
   811  		return true
   812  	}
   813  }
   814  func rewriteValueMIPS64_OpCom64(v *Value) bool {
   815  	b := v.Block
   816  	_ = b
   817  	types := &b.Func.Config.Types
   818  	_ = types
   819  	// match: (Com64 x)
   820  	// cond:
   821  	// result: (NOR (MOVVconst [0]) x)
   822  	for {
   823  		x := v.Args[0]
   824  		v.reset(OpMIPS64NOR)
   825  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   826  		v0.AuxInt = 0
   827  		v.AddArg(v0)
   828  		v.AddArg(x)
   829  		return true
   830  	}
   831  }
   832  func rewriteValueMIPS64_OpCom8(v *Value) bool {
   833  	b := v.Block
   834  	_ = b
   835  	types := &b.Func.Config.Types
   836  	_ = types
   837  	// match: (Com8 x)
   838  	// cond:
   839  	// result: (NOR (MOVVconst [0]) x)
   840  	for {
   841  		x := v.Args[0]
   842  		v.reset(OpMIPS64NOR)
   843  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   844  		v0.AuxInt = 0
   845  		v.AddArg(v0)
   846  		v.AddArg(x)
   847  		return true
   848  	}
   849  }
   850  func rewriteValueMIPS64_OpConst16(v *Value) bool {
   851  	// match: (Const16 [val])
   852  	// cond:
   853  	// result: (MOVVconst [val])
   854  	for {
   855  		val := v.AuxInt
   856  		v.reset(OpMIPS64MOVVconst)
   857  		v.AuxInt = val
   858  		return true
   859  	}
   860  }
   861  func rewriteValueMIPS64_OpConst32(v *Value) bool {
   862  	// match: (Const32 [val])
   863  	// cond:
   864  	// result: (MOVVconst [val])
   865  	for {
   866  		val := v.AuxInt
   867  		v.reset(OpMIPS64MOVVconst)
   868  		v.AuxInt = val
   869  		return true
   870  	}
   871  }
   872  func rewriteValueMIPS64_OpConst32F(v *Value) bool {
   873  	// match: (Const32F [val])
   874  	// cond:
   875  	// result: (MOVFconst [val])
   876  	for {
   877  		val := v.AuxInt
   878  		v.reset(OpMIPS64MOVFconst)
   879  		v.AuxInt = val
   880  		return true
   881  	}
   882  }
   883  func rewriteValueMIPS64_OpConst64(v *Value) bool {
   884  	// match: (Const64 [val])
   885  	// cond:
   886  	// result: (MOVVconst [val])
   887  	for {
   888  		val := v.AuxInt
   889  		v.reset(OpMIPS64MOVVconst)
   890  		v.AuxInt = val
   891  		return true
   892  	}
   893  }
   894  func rewriteValueMIPS64_OpConst64F(v *Value) bool {
   895  	// match: (Const64F [val])
   896  	// cond:
   897  	// result: (MOVDconst [val])
   898  	for {
   899  		val := v.AuxInt
   900  		v.reset(OpMIPS64MOVDconst)
   901  		v.AuxInt = val
   902  		return true
   903  	}
   904  }
   905  func rewriteValueMIPS64_OpConst8(v *Value) bool {
   906  	// match: (Const8 [val])
   907  	// cond:
   908  	// result: (MOVVconst [val])
   909  	for {
   910  		val := v.AuxInt
   911  		v.reset(OpMIPS64MOVVconst)
   912  		v.AuxInt = val
   913  		return true
   914  	}
   915  }
   916  func rewriteValueMIPS64_OpConstBool(v *Value) bool {
   917  	// match: (ConstBool [b])
   918  	// cond:
   919  	// result: (MOVVconst [b])
   920  	for {
   921  		b := v.AuxInt
   922  		v.reset(OpMIPS64MOVVconst)
   923  		v.AuxInt = b
   924  		return true
   925  	}
   926  }
   927  func rewriteValueMIPS64_OpConstNil(v *Value) bool {
   928  	// match: (ConstNil)
   929  	// cond:
   930  	// result: (MOVVconst [0])
   931  	for {
   932  		v.reset(OpMIPS64MOVVconst)
   933  		v.AuxInt = 0
   934  		return true
   935  	}
   936  }
   937  func rewriteValueMIPS64_OpConvert(v *Value) bool {
   938  	// match: (Convert x mem)
   939  	// cond:
   940  	// result: (MOVVconvert x mem)
   941  	for {
   942  		x := v.Args[0]
   943  		mem := v.Args[1]
   944  		v.reset(OpMIPS64MOVVconvert)
   945  		v.AddArg(x)
   946  		v.AddArg(mem)
   947  		return true
   948  	}
   949  }
   950  func rewriteValueMIPS64_OpCvt32Fto32(v *Value) bool {
   951  	// match: (Cvt32Fto32 x)
   952  	// cond:
   953  	// result: (TRUNCFW x)
   954  	for {
   955  		x := v.Args[0]
   956  		v.reset(OpMIPS64TRUNCFW)
   957  		v.AddArg(x)
   958  		return true
   959  	}
   960  }
   961  func rewriteValueMIPS64_OpCvt32Fto64(v *Value) bool {
   962  	// match: (Cvt32Fto64 x)
   963  	// cond:
   964  	// result: (TRUNCFV x)
   965  	for {
   966  		x := v.Args[0]
   967  		v.reset(OpMIPS64TRUNCFV)
   968  		v.AddArg(x)
   969  		return true
   970  	}
   971  }
   972  func rewriteValueMIPS64_OpCvt32Fto64F(v *Value) bool {
   973  	// match: (Cvt32Fto64F x)
   974  	// cond:
   975  	// result: (MOVFD x)
   976  	for {
   977  		x := v.Args[0]
   978  		v.reset(OpMIPS64MOVFD)
   979  		v.AddArg(x)
   980  		return true
   981  	}
   982  }
   983  func rewriteValueMIPS64_OpCvt32to32F(v *Value) bool {
   984  	// match: (Cvt32to32F x)
   985  	// cond:
   986  	// result: (MOVWF x)
   987  	for {
   988  		x := v.Args[0]
   989  		v.reset(OpMIPS64MOVWF)
   990  		v.AddArg(x)
   991  		return true
   992  	}
   993  }
   994  func rewriteValueMIPS64_OpCvt32to64F(v *Value) bool {
   995  	// match: (Cvt32to64F x)
   996  	// cond:
   997  	// result: (MOVWD x)
   998  	for {
   999  		x := v.Args[0]
  1000  		v.reset(OpMIPS64MOVWD)
  1001  		v.AddArg(x)
  1002  		return true
  1003  	}
  1004  }
  1005  func rewriteValueMIPS64_OpCvt64Fto32(v *Value) bool {
  1006  	// match: (Cvt64Fto32 x)
  1007  	// cond:
  1008  	// result: (TRUNCDW x)
  1009  	for {
  1010  		x := v.Args[0]
  1011  		v.reset(OpMIPS64TRUNCDW)
  1012  		v.AddArg(x)
  1013  		return true
  1014  	}
  1015  }
  1016  func rewriteValueMIPS64_OpCvt64Fto32F(v *Value) bool {
  1017  	// match: (Cvt64Fto32F x)
  1018  	// cond:
  1019  	// result: (MOVDF x)
  1020  	for {
  1021  		x := v.Args[0]
  1022  		v.reset(OpMIPS64MOVDF)
  1023  		v.AddArg(x)
  1024  		return true
  1025  	}
  1026  }
  1027  func rewriteValueMIPS64_OpCvt64Fto64(v *Value) bool {
  1028  	// match: (Cvt64Fto64 x)
  1029  	// cond:
  1030  	// result: (TRUNCDV x)
  1031  	for {
  1032  		x := v.Args[0]
  1033  		v.reset(OpMIPS64TRUNCDV)
  1034  		v.AddArg(x)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueMIPS64_OpCvt64to32F(v *Value) bool {
  1039  	// match: (Cvt64to32F x)
  1040  	// cond:
  1041  	// result: (MOVVF x)
  1042  	for {
  1043  		x := v.Args[0]
  1044  		v.reset(OpMIPS64MOVVF)
  1045  		v.AddArg(x)
  1046  		return true
  1047  	}
  1048  }
  1049  func rewriteValueMIPS64_OpCvt64to64F(v *Value) bool {
  1050  	// match: (Cvt64to64F x)
  1051  	// cond:
  1052  	// result: (MOVVD x)
  1053  	for {
  1054  		x := v.Args[0]
  1055  		v.reset(OpMIPS64MOVVD)
  1056  		v.AddArg(x)
  1057  		return true
  1058  	}
  1059  }
  1060  func rewriteValueMIPS64_OpDiv16(v *Value) bool {
  1061  	b := v.Block
  1062  	_ = b
  1063  	types := &b.Func.Config.Types
  1064  	_ = types
  1065  	// match: (Div16 x y)
  1066  	// cond:
  1067  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1068  	for {
  1069  		x := v.Args[0]
  1070  		y := v.Args[1]
  1071  		v.reset(OpSelect1)
  1072  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1073  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1074  		v1.AddArg(x)
  1075  		v0.AddArg(v1)
  1076  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1077  		v2.AddArg(y)
  1078  		v0.AddArg(v2)
  1079  		v.AddArg(v0)
  1080  		return true
  1081  	}
  1082  }
  1083  func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
  1084  	b := v.Block
  1085  	_ = b
  1086  	types := &b.Func.Config.Types
  1087  	_ = types
  1088  	// match: (Div16u x y)
  1089  	// cond:
  1090  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1091  	for {
  1092  		x := v.Args[0]
  1093  		y := v.Args[1]
  1094  		v.reset(OpSelect1)
  1095  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1096  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1097  		v1.AddArg(x)
  1098  		v0.AddArg(v1)
  1099  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1100  		v2.AddArg(y)
  1101  		v0.AddArg(v2)
  1102  		v.AddArg(v0)
  1103  		return true
  1104  	}
  1105  }
  1106  func rewriteValueMIPS64_OpDiv32(v *Value) bool {
  1107  	b := v.Block
  1108  	_ = b
  1109  	types := &b.Func.Config.Types
  1110  	_ = types
  1111  	// match: (Div32 x y)
  1112  	// cond:
  1113  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1114  	for {
  1115  		x := v.Args[0]
  1116  		y := v.Args[1]
  1117  		v.reset(OpSelect1)
  1118  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1119  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1120  		v1.AddArg(x)
  1121  		v0.AddArg(v1)
  1122  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1123  		v2.AddArg(y)
  1124  		v0.AddArg(v2)
  1125  		v.AddArg(v0)
  1126  		return true
  1127  	}
  1128  }
  1129  func rewriteValueMIPS64_OpDiv32F(v *Value) bool {
  1130  	// match: (Div32F x y)
  1131  	// cond:
  1132  	// result: (DIVF x y)
  1133  	for {
  1134  		x := v.Args[0]
  1135  		y := v.Args[1]
  1136  		v.reset(OpMIPS64DIVF)
  1137  		v.AddArg(x)
  1138  		v.AddArg(y)
  1139  		return true
  1140  	}
  1141  }
  1142  func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
  1143  	b := v.Block
  1144  	_ = b
  1145  	types := &b.Func.Config.Types
  1146  	_ = types
  1147  	// match: (Div32u x y)
  1148  	// cond:
  1149  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1150  	for {
  1151  		x := v.Args[0]
  1152  		y := v.Args[1]
  1153  		v.reset(OpSelect1)
  1154  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1155  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1156  		v1.AddArg(x)
  1157  		v0.AddArg(v1)
  1158  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1159  		v2.AddArg(y)
  1160  		v0.AddArg(v2)
  1161  		v.AddArg(v0)
  1162  		return true
  1163  	}
  1164  }
  1165  func rewriteValueMIPS64_OpDiv64(v *Value) bool {
  1166  	b := v.Block
  1167  	_ = b
  1168  	types := &b.Func.Config.Types
  1169  	_ = types
  1170  	// match: (Div64 x y)
  1171  	// cond:
  1172  	// result: (Select1 (DIVV x y))
  1173  	for {
  1174  		x := v.Args[0]
  1175  		y := v.Args[1]
  1176  		v.reset(OpSelect1)
  1177  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1178  		v0.AddArg(x)
  1179  		v0.AddArg(y)
  1180  		v.AddArg(v0)
  1181  		return true
  1182  	}
  1183  }
  1184  func rewriteValueMIPS64_OpDiv64F(v *Value) bool {
  1185  	// match: (Div64F x y)
  1186  	// cond:
  1187  	// result: (DIVD x y)
  1188  	for {
  1189  		x := v.Args[0]
  1190  		y := v.Args[1]
  1191  		v.reset(OpMIPS64DIVD)
  1192  		v.AddArg(x)
  1193  		v.AddArg(y)
  1194  		return true
  1195  	}
  1196  }
  1197  func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
  1198  	b := v.Block
  1199  	_ = b
  1200  	types := &b.Func.Config.Types
  1201  	_ = types
  1202  	// match: (Div64u x y)
  1203  	// cond:
  1204  	// result: (Select1 (DIVVU x y))
  1205  	for {
  1206  		x := v.Args[0]
  1207  		y := v.Args[1]
  1208  		v.reset(OpSelect1)
  1209  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1210  		v0.AddArg(x)
  1211  		v0.AddArg(y)
  1212  		v.AddArg(v0)
  1213  		return true
  1214  	}
  1215  }
  1216  func rewriteValueMIPS64_OpDiv8(v *Value) bool {
  1217  	b := v.Block
  1218  	_ = b
  1219  	types := &b.Func.Config.Types
  1220  	_ = types
  1221  	// match: (Div8 x y)
  1222  	// cond:
  1223  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1224  	for {
  1225  		x := v.Args[0]
  1226  		y := v.Args[1]
  1227  		v.reset(OpSelect1)
  1228  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1229  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1230  		v1.AddArg(x)
  1231  		v0.AddArg(v1)
  1232  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1233  		v2.AddArg(y)
  1234  		v0.AddArg(v2)
  1235  		v.AddArg(v0)
  1236  		return true
  1237  	}
  1238  }
  1239  func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
  1240  	b := v.Block
  1241  	_ = b
  1242  	types := &b.Func.Config.Types
  1243  	_ = types
  1244  	// match: (Div8u x y)
  1245  	// cond:
  1246  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1247  	for {
  1248  		x := v.Args[0]
  1249  		y := v.Args[1]
  1250  		v.reset(OpSelect1)
  1251  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1252  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1253  		v1.AddArg(x)
  1254  		v0.AddArg(v1)
  1255  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1256  		v2.AddArg(y)
  1257  		v0.AddArg(v2)
  1258  		v.AddArg(v0)
  1259  		return true
  1260  	}
  1261  }
  1262  func rewriteValueMIPS64_OpEq16(v *Value) bool {
  1263  	b := v.Block
  1264  	_ = b
  1265  	types := &b.Func.Config.Types
  1266  	_ = types
  1267  	// match: (Eq16 x y)
  1268  	// cond:
  1269  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1270  	for {
  1271  		x := v.Args[0]
  1272  		y := v.Args[1]
  1273  		v.reset(OpMIPS64SGTU)
  1274  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1275  		v0.AuxInt = 1
  1276  		v.AddArg(v0)
  1277  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1278  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1279  		v2.AddArg(x)
  1280  		v1.AddArg(v2)
  1281  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1282  		v3.AddArg(y)
  1283  		v1.AddArg(v3)
  1284  		v.AddArg(v1)
  1285  		return true
  1286  	}
  1287  }
  1288  func rewriteValueMIPS64_OpEq32(v *Value) bool {
  1289  	b := v.Block
  1290  	_ = b
  1291  	types := &b.Func.Config.Types
  1292  	_ = types
  1293  	// match: (Eq32 x y)
  1294  	// cond:
  1295  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1296  	for {
  1297  		x := v.Args[0]
  1298  		y := v.Args[1]
  1299  		v.reset(OpMIPS64SGTU)
  1300  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1301  		v0.AuxInt = 1
  1302  		v.AddArg(v0)
  1303  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1304  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1305  		v2.AddArg(x)
  1306  		v1.AddArg(v2)
  1307  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1308  		v3.AddArg(y)
  1309  		v1.AddArg(v3)
  1310  		v.AddArg(v1)
  1311  		return true
  1312  	}
  1313  }
  1314  func rewriteValueMIPS64_OpEq32F(v *Value) bool {
  1315  	b := v.Block
  1316  	_ = b
  1317  	// match: (Eq32F x y)
  1318  	// cond:
  1319  	// result: (FPFlagTrue (CMPEQF x y))
  1320  	for {
  1321  		x := v.Args[0]
  1322  		y := v.Args[1]
  1323  		v.reset(OpMIPS64FPFlagTrue)
  1324  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, TypeFlags)
  1325  		v0.AddArg(x)
  1326  		v0.AddArg(y)
  1327  		v.AddArg(v0)
  1328  		return true
  1329  	}
  1330  }
  1331  func rewriteValueMIPS64_OpEq64(v *Value) bool {
  1332  	b := v.Block
  1333  	_ = b
  1334  	types := &b.Func.Config.Types
  1335  	_ = types
  1336  	// match: (Eq64 x y)
  1337  	// cond:
  1338  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1339  	for {
  1340  		x := v.Args[0]
  1341  		y := v.Args[1]
  1342  		v.reset(OpMIPS64SGTU)
  1343  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1344  		v0.AuxInt = 1
  1345  		v.AddArg(v0)
  1346  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1347  		v1.AddArg(x)
  1348  		v1.AddArg(y)
  1349  		v.AddArg(v1)
  1350  		return true
  1351  	}
  1352  }
  1353  func rewriteValueMIPS64_OpEq64F(v *Value) bool {
  1354  	b := v.Block
  1355  	_ = b
  1356  	// match: (Eq64F x y)
  1357  	// cond:
  1358  	// result: (FPFlagTrue (CMPEQD x y))
  1359  	for {
  1360  		x := v.Args[0]
  1361  		y := v.Args[1]
  1362  		v.reset(OpMIPS64FPFlagTrue)
  1363  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, TypeFlags)
  1364  		v0.AddArg(x)
  1365  		v0.AddArg(y)
  1366  		v.AddArg(v0)
  1367  		return true
  1368  	}
  1369  }
  1370  func rewriteValueMIPS64_OpEq8(v *Value) bool {
  1371  	b := v.Block
  1372  	_ = b
  1373  	types := &b.Func.Config.Types
  1374  	_ = types
  1375  	// match: (Eq8 x y)
  1376  	// cond:
  1377  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1378  	for {
  1379  		x := v.Args[0]
  1380  		y := v.Args[1]
  1381  		v.reset(OpMIPS64SGTU)
  1382  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1383  		v0.AuxInt = 1
  1384  		v.AddArg(v0)
  1385  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1386  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1387  		v2.AddArg(x)
  1388  		v1.AddArg(v2)
  1389  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1390  		v3.AddArg(y)
  1391  		v1.AddArg(v3)
  1392  		v.AddArg(v1)
  1393  		return true
  1394  	}
  1395  }
  1396  func rewriteValueMIPS64_OpEqB(v *Value) bool {
  1397  	b := v.Block
  1398  	_ = b
  1399  	types := &b.Func.Config.Types
  1400  	_ = types
  1401  	// match: (EqB x y)
  1402  	// cond:
  1403  	// result: (XOR (MOVVconst [1]) (XOR <types.Bool> x y))
  1404  	for {
  1405  		x := v.Args[0]
  1406  		y := v.Args[1]
  1407  		v.reset(OpMIPS64XOR)
  1408  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1409  		v0.AuxInt = 1
  1410  		v.AddArg(v0)
  1411  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.Bool)
  1412  		v1.AddArg(x)
  1413  		v1.AddArg(y)
  1414  		v.AddArg(v1)
  1415  		return true
  1416  	}
  1417  }
  1418  func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
  1419  	b := v.Block
  1420  	_ = b
  1421  	types := &b.Func.Config.Types
  1422  	_ = types
  1423  	// match: (EqPtr x y)
  1424  	// cond:
  1425  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1426  	for {
  1427  		x := v.Args[0]
  1428  		y := v.Args[1]
  1429  		v.reset(OpMIPS64SGTU)
  1430  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1431  		v0.AuxInt = 1
  1432  		v.AddArg(v0)
  1433  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1434  		v1.AddArg(x)
  1435  		v1.AddArg(y)
  1436  		v.AddArg(v1)
  1437  		return true
  1438  	}
  1439  }
  1440  func rewriteValueMIPS64_OpGeq16(v *Value) bool {
  1441  	b := v.Block
  1442  	_ = b
  1443  	types := &b.Func.Config.Types
  1444  	_ = types
  1445  	// match: (Geq16 x y)
  1446  	// cond:
  1447  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1448  	for {
  1449  		x := v.Args[0]
  1450  		y := v.Args[1]
  1451  		v.reset(OpMIPS64XOR)
  1452  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1453  		v0.AuxInt = 1
  1454  		v.AddArg(v0)
  1455  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1456  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1457  		v2.AddArg(y)
  1458  		v1.AddArg(v2)
  1459  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1460  		v3.AddArg(x)
  1461  		v1.AddArg(v3)
  1462  		v.AddArg(v1)
  1463  		return true
  1464  	}
  1465  }
  1466  func rewriteValueMIPS64_OpGeq16U(v *Value) bool {
  1467  	b := v.Block
  1468  	_ = b
  1469  	types := &b.Func.Config.Types
  1470  	_ = types
  1471  	// match: (Geq16U x y)
  1472  	// cond:
  1473  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1474  	for {
  1475  		x := v.Args[0]
  1476  		y := v.Args[1]
  1477  		v.reset(OpMIPS64XOR)
  1478  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1479  		v0.AuxInt = 1
  1480  		v.AddArg(v0)
  1481  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1482  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1483  		v2.AddArg(y)
  1484  		v1.AddArg(v2)
  1485  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1486  		v3.AddArg(x)
  1487  		v1.AddArg(v3)
  1488  		v.AddArg(v1)
  1489  		return true
  1490  	}
  1491  }
  1492  func rewriteValueMIPS64_OpGeq32(v *Value) bool {
  1493  	b := v.Block
  1494  	_ = b
  1495  	types := &b.Func.Config.Types
  1496  	_ = types
  1497  	// match: (Geq32 x y)
  1498  	// cond:
  1499  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1500  	for {
  1501  		x := v.Args[0]
  1502  		y := v.Args[1]
  1503  		v.reset(OpMIPS64XOR)
  1504  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1505  		v0.AuxInt = 1
  1506  		v.AddArg(v0)
  1507  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1508  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1509  		v2.AddArg(y)
  1510  		v1.AddArg(v2)
  1511  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1512  		v3.AddArg(x)
  1513  		v1.AddArg(v3)
  1514  		v.AddArg(v1)
  1515  		return true
  1516  	}
  1517  }
  1518  func rewriteValueMIPS64_OpGeq32F(v *Value) bool {
  1519  	b := v.Block
  1520  	_ = b
  1521  	// match: (Geq32F x y)
  1522  	// cond:
  1523  	// result: (FPFlagTrue (CMPGEF x y))
  1524  	for {
  1525  		x := v.Args[0]
  1526  		y := v.Args[1]
  1527  		v.reset(OpMIPS64FPFlagTrue)
  1528  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, TypeFlags)
  1529  		v0.AddArg(x)
  1530  		v0.AddArg(y)
  1531  		v.AddArg(v0)
  1532  		return true
  1533  	}
  1534  }
  1535  func rewriteValueMIPS64_OpGeq32U(v *Value) bool {
  1536  	b := v.Block
  1537  	_ = b
  1538  	types := &b.Func.Config.Types
  1539  	_ = types
  1540  	// match: (Geq32U x y)
  1541  	// cond:
  1542  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1543  	for {
  1544  		x := v.Args[0]
  1545  		y := v.Args[1]
  1546  		v.reset(OpMIPS64XOR)
  1547  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1548  		v0.AuxInt = 1
  1549  		v.AddArg(v0)
  1550  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1551  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1552  		v2.AddArg(y)
  1553  		v1.AddArg(v2)
  1554  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1555  		v3.AddArg(x)
  1556  		v1.AddArg(v3)
  1557  		v.AddArg(v1)
  1558  		return true
  1559  	}
  1560  }
  1561  func rewriteValueMIPS64_OpGeq64(v *Value) bool {
  1562  	b := v.Block
  1563  	_ = b
  1564  	types := &b.Func.Config.Types
  1565  	_ = types
  1566  	// match: (Geq64 x y)
  1567  	// cond:
  1568  	// result: (XOR (MOVVconst [1]) (SGT y x))
  1569  	for {
  1570  		x := v.Args[0]
  1571  		y := v.Args[1]
  1572  		v.reset(OpMIPS64XOR)
  1573  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1574  		v0.AuxInt = 1
  1575  		v.AddArg(v0)
  1576  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1577  		v1.AddArg(y)
  1578  		v1.AddArg(x)
  1579  		v.AddArg(v1)
  1580  		return true
  1581  	}
  1582  }
  1583  func rewriteValueMIPS64_OpGeq64F(v *Value) bool {
  1584  	b := v.Block
  1585  	_ = b
  1586  	// match: (Geq64F x y)
  1587  	// cond:
  1588  	// result: (FPFlagTrue (CMPGED x y))
  1589  	for {
  1590  		x := v.Args[0]
  1591  		y := v.Args[1]
  1592  		v.reset(OpMIPS64FPFlagTrue)
  1593  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, TypeFlags)
  1594  		v0.AddArg(x)
  1595  		v0.AddArg(y)
  1596  		v.AddArg(v0)
  1597  		return true
  1598  	}
  1599  }
  1600  func rewriteValueMIPS64_OpGeq64U(v *Value) bool {
  1601  	b := v.Block
  1602  	_ = b
  1603  	types := &b.Func.Config.Types
  1604  	_ = types
  1605  	// match: (Geq64U x y)
  1606  	// cond:
  1607  	// result: (XOR (MOVVconst [1]) (SGTU y x))
  1608  	for {
  1609  		x := v.Args[0]
  1610  		y := v.Args[1]
  1611  		v.reset(OpMIPS64XOR)
  1612  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1613  		v0.AuxInt = 1
  1614  		v.AddArg(v0)
  1615  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1616  		v1.AddArg(y)
  1617  		v1.AddArg(x)
  1618  		v.AddArg(v1)
  1619  		return true
  1620  	}
  1621  }
  1622  func rewriteValueMIPS64_OpGeq8(v *Value) bool {
  1623  	b := v.Block
  1624  	_ = b
  1625  	types := &b.Func.Config.Types
  1626  	_ = types
  1627  	// match: (Geq8 x y)
  1628  	// cond:
  1629  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1630  	for {
  1631  		x := v.Args[0]
  1632  		y := v.Args[1]
  1633  		v.reset(OpMIPS64XOR)
  1634  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1635  		v0.AuxInt = 1
  1636  		v.AddArg(v0)
  1637  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1638  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1639  		v2.AddArg(y)
  1640  		v1.AddArg(v2)
  1641  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1642  		v3.AddArg(x)
  1643  		v1.AddArg(v3)
  1644  		v.AddArg(v1)
  1645  		return true
  1646  	}
  1647  }
  1648  func rewriteValueMIPS64_OpGeq8U(v *Value) bool {
  1649  	b := v.Block
  1650  	_ = b
  1651  	types := &b.Func.Config.Types
  1652  	_ = types
  1653  	// match: (Geq8U x y)
  1654  	// cond:
  1655  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1656  	for {
  1657  		x := v.Args[0]
  1658  		y := v.Args[1]
  1659  		v.reset(OpMIPS64XOR)
  1660  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1661  		v0.AuxInt = 1
  1662  		v.AddArg(v0)
  1663  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1664  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1665  		v2.AddArg(y)
  1666  		v1.AddArg(v2)
  1667  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1668  		v3.AddArg(x)
  1669  		v1.AddArg(v3)
  1670  		v.AddArg(v1)
  1671  		return true
  1672  	}
  1673  }
  1674  func rewriteValueMIPS64_OpGetClosurePtr(v *Value) bool {
  1675  	// match: (GetClosurePtr)
  1676  	// cond:
  1677  	// result: (LoweredGetClosurePtr)
  1678  	for {
  1679  		v.reset(OpMIPS64LoweredGetClosurePtr)
  1680  		return true
  1681  	}
  1682  }
  1683  func rewriteValueMIPS64_OpGreater16(v *Value) bool {
  1684  	b := v.Block
  1685  	_ = b
  1686  	types := &b.Func.Config.Types
  1687  	_ = types
  1688  	// match: (Greater16 x y)
  1689  	// cond:
  1690  	// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1691  	for {
  1692  		x := v.Args[0]
  1693  		y := v.Args[1]
  1694  		v.reset(OpMIPS64SGT)
  1695  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1696  		v0.AddArg(x)
  1697  		v.AddArg(v0)
  1698  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1699  		v1.AddArg(y)
  1700  		v.AddArg(v1)
  1701  		return true
  1702  	}
  1703  }
  1704  func rewriteValueMIPS64_OpGreater16U(v *Value) bool {
  1705  	b := v.Block
  1706  	_ = b
  1707  	types := &b.Func.Config.Types
  1708  	_ = types
  1709  	// match: (Greater16U x y)
  1710  	// cond:
  1711  	// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1712  	for {
  1713  		x := v.Args[0]
  1714  		y := v.Args[1]
  1715  		v.reset(OpMIPS64SGTU)
  1716  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1717  		v0.AddArg(x)
  1718  		v.AddArg(v0)
  1719  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1720  		v1.AddArg(y)
  1721  		v.AddArg(v1)
  1722  		return true
  1723  	}
  1724  }
  1725  func rewriteValueMIPS64_OpGreater32(v *Value) bool {
  1726  	b := v.Block
  1727  	_ = b
  1728  	types := &b.Func.Config.Types
  1729  	_ = types
  1730  	// match: (Greater32 x y)
  1731  	// cond:
  1732  	// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  1733  	for {
  1734  		x := v.Args[0]
  1735  		y := v.Args[1]
  1736  		v.reset(OpMIPS64SGT)
  1737  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1738  		v0.AddArg(x)
  1739  		v.AddArg(v0)
  1740  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1741  		v1.AddArg(y)
  1742  		v.AddArg(v1)
  1743  		return true
  1744  	}
  1745  }
  1746  func rewriteValueMIPS64_OpGreater32F(v *Value) bool {
  1747  	b := v.Block
  1748  	_ = b
  1749  	// match: (Greater32F x y)
  1750  	// cond:
  1751  	// result: (FPFlagTrue (CMPGTF x y))
  1752  	for {
  1753  		x := v.Args[0]
  1754  		y := v.Args[1]
  1755  		v.reset(OpMIPS64FPFlagTrue)
  1756  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, TypeFlags)
  1757  		v0.AddArg(x)
  1758  		v0.AddArg(y)
  1759  		v.AddArg(v0)
  1760  		return true
  1761  	}
  1762  }
  1763  func rewriteValueMIPS64_OpGreater32U(v *Value) bool {
  1764  	b := v.Block
  1765  	_ = b
  1766  	types := &b.Func.Config.Types
  1767  	_ = types
  1768  	// match: (Greater32U x y)
  1769  	// cond:
  1770  	// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1771  	for {
  1772  		x := v.Args[0]
  1773  		y := v.Args[1]
  1774  		v.reset(OpMIPS64SGTU)
  1775  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1776  		v0.AddArg(x)
  1777  		v.AddArg(v0)
  1778  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1779  		v1.AddArg(y)
  1780  		v.AddArg(v1)
  1781  		return true
  1782  	}
  1783  }
  1784  func rewriteValueMIPS64_OpGreater64(v *Value) bool {
  1785  	// match: (Greater64 x y)
  1786  	// cond:
  1787  	// result: (SGT x y)
  1788  	for {
  1789  		x := v.Args[0]
  1790  		y := v.Args[1]
  1791  		v.reset(OpMIPS64SGT)
  1792  		v.AddArg(x)
  1793  		v.AddArg(y)
  1794  		return true
  1795  	}
  1796  }
  1797  func rewriteValueMIPS64_OpGreater64F(v *Value) bool {
  1798  	b := v.Block
  1799  	_ = b
  1800  	// match: (Greater64F x y)
  1801  	// cond:
  1802  	// result: (FPFlagTrue (CMPGTD x y))
  1803  	for {
  1804  		x := v.Args[0]
  1805  		y := v.Args[1]
  1806  		v.reset(OpMIPS64FPFlagTrue)
  1807  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, TypeFlags)
  1808  		v0.AddArg(x)
  1809  		v0.AddArg(y)
  1810  		v.AddArg(v0)
  1811  		return true
  1812  	}
  1813  }
  1814  func rewriteValueMIPS64_OpGreater64U(v *Value) bool {
  1815  	// match: (Greater64U x y)
  1816  	// cond:
  1817  	// result: (SGTU x y)
  1818  	for {
  1819  		x := v.Args[0]
  1820  		y := v.Args[1]
  1821  		v.reset(OpMIPS64SGTU)
  1822  		v.AddArg(x)
  1823  		v.AddArg(y)
  1824  		return true
  1825  	}
  1826  }
  1827  func rewriteValueMIPS64_OpGreater8(v *Value) bool {
  1828  	b := v.Block
  1829  	_ = b
  1830  	types := &b.Func.Config.Types
  1831  	_ = types
  1832  	// match: (Greater8 x y)
  1833  	// cond:
  1834  	// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  1835  	for {
  1836  		x := v.Args[0]
  1837  		y := v.Args[1]
  1838  		v.reset(OpMIPS64SGT)
  1839  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1840  		v0.AddArg(x)
  1841  		v.AddArg(v0)
  1842  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1843  		v1.AddArg(y)
  1844  		v.AddArg(v1)
  1845  		return true
  1846  	}
  1847  }
  1848  func rewriteValueMIPS64_OpGreater8U(v *Value) bool {
  1849  	b := v.Block
  1850  	_ = b
  1851  	types := &b.Func.Config.Types
  1852  	_ = types
  1853  	// match: (Greater8U x y)
  1854  	// cond:
  1855  	// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1856  	for {
  1857  		x := v.Args[0]
  1858  		y := v.Args[1]
  1859  		v.reset(OpMIPS64SGTU)
  1860  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1861  		v0.AddArg(x)
  1862  		v.AddArg(v0)
  1863  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1864  		v1.AddArg(y)
  1865  		v.AddArg(v1)
  1866  		return true
  1867  	}
  1868  }
  1869  func rewriteValueMIPS64_OpHmul32(v *Value) bool {
  1870  	b := v.Block
  1871  	_ = b
  1872  	types := &b.Func.Config.Types
  1873  	_ = types
  1874  	// match: (Hmul32 x y)
  1875  	// cond:
  1876  	// result: (SRAVconst (Select1 <types.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1877  	for {
  1878  		x := v.Args[0]
  1879  		y := v.Args[1]
  1880  		v.reset(OpMIPS64SRAVconst)
  1881  		v.AuxInt = 32
  1882  		v0 := b.NewValue0(v.Pos, OpSelect1, types.Int64)
  1883  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, MakeTuple(types.Int64, types.Int64))
  1884  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1885  		v2.AddArg(x)
  1886  		v1.AddArg(v2)
  1887  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1888  		v3.AddArg(y)
  1889  		v1.AddArg(v3)
  1890  		v0.AddArg(v1)
  1891  		v.AddArg(v0)
  1892  		return true
  1893  	}
  1894  }
  1895  func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
  1896  	b := v.Block
  1897  	_ = b
  1898  	types := &b.Func.Config.Types
  1899  	_ = types
  1900  	// match: (Hmul32u x y)
  1901  	// cond:
  1902  	// result: (SRLVconst (Select1 <types.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1903  	for {
  1904  		x := v.Args[0]
  1905  		y := v.Args[1]
  1906  		v.reset(OpMIPS64SRLVconst)
  1907  		v.AuxInt = 32
  1908  		v0 := b.NewValue0(v.Pos, OpSelect1, types.UInt64)
  1909  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  1910  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1911  		v2.AddArg(x)
  1912  		v1.AddArg(v2)
  1913  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1914  		v3.AddArg(y)
  1915  		v1.AddArg(v3)
  1916  		v0.AddArg(v1)
  1917  		v.AddArg(v0)
  1918  		return true
  1919  	}
  1920  }
  1921  func rewriteValueMIPS64_OpHmul64(v *Value) bool {
  1922  	b := v.Block
  1923  	_ = b
  1924  	types := &b.Func.Config.Types
  1925  	_ = types
  1926  	// match: (Hmul64 x y)
  1927  	// cond:
  1928  	// result: (Select0 (MULV x y))
  1929  	for {
  1930  		x := v.Args[0]
  1931  		y := v.Args[1]
  1932  		v.reset(OpSelect0)
  1933  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, MakeTuple(types.Int64, types.Int64))
  1934  		v0.AddArg(x)
  1935  		v0.AddArg(y)
  1936  		v.AddArg(v0)
  1937  		return true
  1938  	}
  1939  }
  1940  func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
  1941  	b := v.Block
  1942  	_ = b
  1943  	types := &b.Func.Config.Types
  1944  	_ = types
  1945  	// match: (Hmul64u x y)
  1946  	// cond:
  1947  	// result: (Select0 (MULVU x y))
  1948  	for {
  1949  		x := v.Args[0]
  1950  		y := v.Args[1]
  1951  		v.reset(OpSelect0)
  1952  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  1953  		v0.AddArg(x)
  1954  		v0.AddArg(y)
  1955  		v.AddArg(v0)
  1956  		return true
  1957  	}
  1958  }
  1959  func rewriteValueMIPS64_OpInterCall(v *Value) bool {
  1960  	// match: (InterCall [argwid] entry mem)
  1961  	// cond:
  1962  	// result: (CALLinter [argwid] entry mem)
  1963  	for {
  1964  		argwid := v.AuxInt
  1965  		entry := v.Args[0]
  1966  		mem := v.Args[1]
  1967  		v.reset(OpMIPS64CALLinter)
  1968  		v.AuxInt = argwid
  1969  		v.AddArg(entry)
  1970  		v.AddArg(mem)
  1971  		return true
  1972  	}
  1973  }
  1974  func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
  1975  	// match: (IsInBounds idx len)
  1976  	// cond:
  1977  	// result: (SGTU len idx)
  1978  	for {
  1979  		idx := v.Args[0]
  1980  		len := v.Args[1]
  1981  		v.reset(OpMIPS64SGTU)
  1982  		v.AddArg(len)
  1983  		v.AddArg(idx)
  1984  		return true
  1985  	}
  1986  }
  1987  func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
  1988  	b := v.Block
  1989  	_ = b
  1990  	types := &b.Func.Config.Types
  1991  	_ = types
  1992  	// match: (IsNonNil ptr)
  1993  	// cond:
  1994  	// result: (SGTU ptr (MOVVconst [0]))
  1995  	for {
  1996  		ptr := v.Args[0]
  1997  		v.reset(OpMIPS64SGTU)
  1998  		v.AddArg(ptr)
  1999  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2000  		v0.AuxInt = 0
  2001  		v.AddArg(v0)
  2002  		return true
  2003  	}
  2004  }
  2005  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
  2006  	b := v.Block
  2007  	_ = b
  2008  	types := &b.Func.Config.Types
  2009  	_ = types
  2010  	// match: (IsSliceInBounds idx len)
  2011  	// cond:
  2012  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2013  	for {
  2014  		idx := v.Args[0]
  2015  		len := v.Args[1]
  2016  		v.reset(OpMIPS64XOR)
  2017  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2018  		v0.AuxInt = 1
  2019  		v.AddArg(v0)
  2020  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2021  		v1.AddArg(idx)
  2022  		v1.AddArg(len)
  2023  		v.AddArg(v1)
  2024  		return true
  2025  	}
  2026  }
  2027  func rewriteValueMIPS64_OpLeq16(v *Value) bool {
  2028  	b := v.Block
  2029  	_ = b
  2030  	types := &b.Func.Config.Types
  2031  	_ = types
  2032  	// match: (Leq16 x y)
  2033  	// cond:
  2034  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2035  	for {
  2036  		x := v.Args[0]
  2037  		y := v.Args[1]
  2038  		v.reset(OpMIPS64XOR)
  2039  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2040  		v0.AuxInt = 1
  2041  		v.AddArg(v0)
  2042  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2043  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2044  		v2.AddArg(x)
  2045  		v1.AddArg(v2)
  2046  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2047  		v3.AddArg(y)
  2048  		v1.AddArg(v3)
  2049  		v.AddArg(v1)
  2050  		return true
  2051  	}
  2052  }
  2053  func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
  2054  	b := v.Block
  2055  	_ = b
  2056  	types := &b.Func.Config.Types
  2057  	_ = types
  2058  	// match: (Leq16U x y)
  2059  	// cond:
  2060  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2061  	for {
  2062  		x := v.Args[0]
  2063  		y := v.Args[1]
  2064  		v.reset(OpMIPS64XOR)
  2065  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2066  		v0.AuxInt = 1
  2067  		v.AddArg(v0)
  2068  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2069  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2070  		v2.AddArg(x)
  2071  		v1.AddArg(v2)
  2072  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2073  		v3.AddArg(y)
  2074  		v1.AddArg(v3)
  2075  		v.AddArg(v1)
  2076  		return true
  2077  	}
  2078  }
  2079  func rewriteValueMIPS64_OpLeq32(v *Value) bool {
  2080  	b := v.Block
  2081  	_ = b
  2082  	types := &b.Func.Config.Types
  2083  	_ = types
  2084  	// match: (Leq32 x y)
  2085  	// cond:
  2086  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2087  	for {
  2088  		x := v.Args[0]
  2089  		y := v.Args[1]
  2090  		v.reset(OpMIPS64XOR)
  2091  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2092  		v0.AuxInt = 1
  2093  		v.AddArg(v0)
  2094  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2095  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2096  		v2.AddArg(x)
  2097  		v1.AddArg(v2)
  2098  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2099  		v3.AddArg(y)
  2100  		v1.AddArg(v3)
  2101  		v.AddArg(v1)
  2102  		return true
  2103  	}
  2104  }
  2105  func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
  2106  	b := v.Block
  2107  	_ = b
  2108  	// match: (Leq32F x y)
  2109  	// cond:
  2110  	// result: (FPFlagTrue (CMPGEF y x))
  2111  	for {
  2112  		x := v.Args[0]
  2113  		y := v.Args[1]
  2114  		v.reset(OpMIPS64FPFlagTrue)
  2115  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, TypeFlags)
  2116  		v0.AddArg(y)
  2117  		v0.AddArg(x)
  2118  		v.AddArg(v0)
  2119  		return true
  2120  	}
  2121  }
  2122  func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
  2123  	b := v.Block
  2124  	_ = b
  2125  	types := &b.Func.Config.Types
  2126  	_ = types
  2127  	// match: (Leq32U x y)
  2128  	// cond:
  2129  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2130  	for {
  2131  		x := v.Args[0]
  2132  		y := v.Args[1]
  2133  		v.reset(OpMIPS64XOR)
  2134  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2135  		v0.AuxInt = 1
  2136  		v.AddArg(v0)
  2137  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2138  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2139  		v2.AddArg(x)
  2140  		v1.AddArg(v2)
  2141  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2142  		v3.AddArg(y)
  2143  		v1.AddArg(v3)
  2144  		v.AddArg(v1)
  2145  		return true
  2146  	}
  2147  }
  2148  func rewriteValueMIPS64_OpLeq64(v *Value) bool {
  2149  	b := v.Block
  2150  	_ = b
  2151  	types := &b.Func.Config.Types
  2152  	_ = types
  2153  	// match: (Leq64 x y)
  2154  	// cond:
  2155  	// result: (XOR (MOVVconst [1]) (SGT x y))
  2156  	for {
  2157  		x := v.Args[0]
  2158  		y := v.Args[1]
  2159  		v.reset(OpMIPS64XOR)
  2160  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2161  		v0.AuxInt = 1
  2162  		v.AddArg(v0)
  2163  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2164  		v1.AddArg(x)
  2165  		v1.AddArg(y)
  2166  		v.AddArg(v1)
  2167  		return true
  2168  	}
  2169  }
  2170  func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
  2171  	b := v.Block
  2172  	_ = b
  2173  	// match: (Leq64F x y)
  2174  	// cond:
  2175  	// result: (FPFlagTrue (CMPGED y x))
  2176  	for {
  2177  		x := v.Args[0]
  2178  		y := v.Args[1]
  2179  		v.reset(OpMIPS64FPFlagTrue)
  2180  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, TypeFlags)
  2181  		v0.AddArg(y)
  2182  		v0.AddArg(x)
  2183  		v.AddArg(v0)
  2184  		return true
  2185  	}
  2186  }
  2187  func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
  2188  	b := v.Block
  2189  	_ = b
  2190  	types := &b.Func.Config.Types
  2191  	_ = types
  2192  	// match: (Leq64U x y)
  2193  	// cond:
  2194  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  2195  	for {
  2196  		x := v.Args[0]
  2197  		y := v.Args[1]
  2198  		v.reset(OpMIPS64XOR)
  2199  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2200  		v0.AuxInt = 1
  2201  		v.AddArg(v0)
  2202  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2203  		v1.AddArg(x)
  2204  		v1.AddArg(y)
  2205  		v.AddArg(v1)
  2206  		return true
  2207  	}
  2208  }
  2209  func rewriteValueMIPS64_OpLeq8(v *Value) bool {
  2210  	b := v.Block
  2211  	_ = b
  2212  	types := &b.Func.Config.Types
  2213  	_ = types
  2214  	// match: (Leq8 x y)
  2215  	// cond:
  2216  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2217  	for {
  2218  		x := v.Args[0]
  2219  		y := v.Args[1]
  2220  		v.reset(OpMIPS64XOR)
  2221  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2222  		v0.AuxInt = 1
  2223  		v.AddArg(v0)
  2224  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2225  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2226  		v2.AddArg(x)
  2227  		v1.AddArg(v2)
  2228  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2229  		v3.AddArg(y)
  2230  		v1.AddArg(v3)
  2231  		v.AddArg(v1)
  2232  		return true
  2233  	}
  2234  }
  2235  func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
  2236  	b := v.Block
  2237  	_ = b
  2238  	types := &b.Func.Config.Types
  2239  	_ = types
  2240  	// match: (Leq8U x y)
  2241  	// cond:
  2242  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2243  	for {
  2244  		x := v.Args[0]
  2245  		y := v.Args[1]
  2246  		v.reset(OpMIPS64XOR)
  2247  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2248  		v0.AuxInt = 1
  2249  		v.AddArg(v0)
  2250  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2251  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2252  		v2.AddArg(x)
  2253  		v1.AddArg(v2)
  2254  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2255  		v3.AddArg(y)
  2256  		v1.AddArg(v3)
  2257  		v.AddArg(v1)
  2258  		return true
  2259  	}
  2260  }
  2261  func rewriteValueMIPS64_OpLess16(v *Value) bool {
  2262  	b := v.Block
  2263  	_ = b
  2264  	types := &b.Func.Config.Types
  2265  	_ = types
  2266  	// match: (Less16 x y)
  2267  	// cond:
  2268  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2269  	for {
  2270  		x := v.Args[0]
  2271  		y := v.Args[1]
  2272  		v.reset(OpMIPS64SGT)
  2273  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2274  		v0.AddArg(y)
  2275  		v.AddArg(v0)
  2276  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2277  		v1.AddArg(x)
  2278  		v.AddArg(v1)
  2279  		return true
  2280  	}
  2281  }
  2282  func rewriteValueMIPS64_OpLess16U(v *Value) bool {
  2283  	b := v.Block
  2284  	_ = b
  2285  	types := &b.Func.Config.Types
  2286  	_ = types
  2287  	// match: (Less16U x y)
  2288  	// cond:
  2289  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2290  	for {
  2291  		x := v.Args[0]
  2292  		y := v.Args[1]
  2293  		v.reset(OpMIPS64SGTU)
  2294  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2295  		v0.AddArg(y)
  2296  		v.AddArg(v0)
  2297  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2298  		v1.AddArg(x)
  2299  		v.AddArg(v1)
  2300  		return true
  2301  	}
  2302  }
  2303  func rewriteValueMIPS64_OpLess32(v *Value) bool {
  2304  	b := v.Block
  2305  	_ = b
  2306  	types := &b.Func.Config.Types
  2307  	_ = types
  2308  	// match: (Less32 x y)
  2309  	// cond:
  2310  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2311  	for {
  2312  		x := v.Args[0]
  2313  		y := v.Args[1]
  2314  		v.reset(OpMIPS64SGT)
  2315  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2316  		v0.AddArg(y)
  2317  		v.AddArg(v0)
  2318  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2319  		v1.AddArg(x)
  2320  		v.AddArg(v1)
  2321  		return true
  2322  	}
  2323  }
  2324  func rewriteValueMIPS64_OpLess32F(v *Value) bool {
  2325  	b := v.Block
  2326  	_ = b
  2327  	// match: (Less32F x y)
  2328  	// cond:
  2329  	// result: (FPFlagTrue (CMPGTF y x))
  2330  	for {
  2331  		x := v.Args[0]
  2332  		y := v.Args[1]
  2333  		v.reset(OpMIPS64FPFlagTrue)
  2334  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, TypeFlags)
  2335  		v0.AddArg(y)
  2336  		v0.AddArg(x)
  2337  		v.AddArg(v0)
  2338  		return true
  2339  	}
  2340  }
  2341  func rewriteValueMIPS64_OpLess32U(v *Value) bool {
  2342  	b := v.Block
  2343  	_ = b
  2344  	types := &b.Func.Config.Types
  2345  	_ = types
  2346  	// match: (Less32U x y)
  2347  	// cond:
  2348  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2349  	for {
  2350  		x := v.Args[0]
  2351  		y := v.Args[1]
  2352  		v.reset(OpMIPS64SGTU)
  2353  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2354  		v0.AddArg(y)
  2355  		v.AddArg(v0)
  2356  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2357  		v1.AddArg(x)
  2358  		v.AddArg(v1)
  2359  		return true
  2360  	}
  2361  }
  2362  func rewriteValueMIPS64_OpLess64(v *Value) bool {
  2363  	// match: (Less64 x y)
  2364  	// cond:
  2365  	// result: (SGT y x)
  2366  	for {
  2367  		x := v.Args[0]
  2368  		y := v.Args[1]
  2369  		v.reset(OpMIPS64SGT)
  2370  		v.AddArg(y)
  2371  		v.AddArg(x)
  2372  		return true
  2373  	}
  2374  }
  2375  func rewriteValueMIPS64_OpLess64F(v *Value) bool {
  2376  	b := v.Block
  2377  	_ = b
  2378  	// match: (Less64F x y)
  2379  	// cond:
  2380  	// result: (FPFlagTrue (CMPGTD y x))
  2381  	for {
  2382  		x := v.Args[0]
  2383  		y := v.Args[1]
  2384  		v.reset(OpMIPS64FPFlagTrue)
  2385  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, TypeFlags)
  2386  		v0.AddArg(y)
  2387  		v0.AddArg(x)
  2388  		v.AddArg(v0)
  2389  		return true
  2390  	}
  2391  }
  2392  func rewriteValueMIPS64_OpLess64U(v *Value) bool {
  2393  	// match: (Less64U x y)
  2394  	// cond:
  2395  	// result: (SGTU y x)
  2396  	for {
  2397  		x := v.Args[0]
  2398  		y := v.Args[1]
  2399  		v.reset(OpMIPS64SGTU)
  2400  		v.AddArg(y)
  2401  		v.AddArg(x)
  2402  		return true
  2403  	}
  2404  }
  2405  func rewriteValueMIPS64_OpLess8(v *Value) bool {
  2406  	b := v.Block
  2407  	_ = b
  2408  	types := &b.Func.Config.Types
  2409  	_ = types
  2410  	// match: (Less8 x y)
  2411  	// cond:
  2412  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2413  	for {
  2414  		x := v.Args[0]
  2415  		y := v.Args[1]
  2416  		v.reset(OpMIPS64SGT)
  2417  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2418  		v0.AddArg(y)
  2419  		v.AddArg(v0)
  2420  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2421  		v1.AddArg(x)
  2422  		v.AddArg(v1)
  2423  		return true
  2424  	}
  2425  }
  2426  func rewriteValueMIPS64_OpLess8U(v *Value) bool {
  2427  	b := v.Block
  2428  	_ = b
  2429  	types := &b.Func.Config.Types
  2430  	_ = types
  2431  	// match: (Less8U x y)
  2432  	// cond:
  2433  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2434  	for {
  2435  		x := v.Args[0]
  2436  		y := v.Args[1]
  2437  		v.reset(OpMIPS64SGTU)
  2438  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2439  		v0.AddArg(y)
  2440  		v.AddArg(v0)
  2441  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2442  		v1.AddArg(x)
  2443  		v.AddArg(v1)
  2444  		return true
  2445  	}
  2446  }
  2447  func rewriteValueMIPS64_OpLoad(v *Value) bool {
  2448  	// match: (Load <t> ptr mem)
  2449  	// cond: t.IsBoolean()
  2450  	// result: (MOVBUload ptr mem)
  2451  	for {
  2452  		t := v.Type
  2453  		ptr := v.Args[0]
  2454  		mem := v.Args[1]
  2455  		if !(t.IsBoolean()) {
  2456  			break
  2457  		}
  2458  		v.reset(OpMIPS64MOVBUload)
  2459  		v.AddArg(ptr)
  2460  		v.AddArg(mem)
  2461  		return true
  2462  	}
  2463  	// match: (Load <t> ptr mem)
  2464  	// cond: (is8BitInt(t) && isSigned(t))
  2465  	// result: (MOVBload ptr mem)
  2466  	for {
  2467  		t := v.Type
  2468  		ptr := v.Args[0]
  2469  		mem := v.Args[1]
  2470  		if !(is8BitInt(t) && isSigned(t)) {
  2471  			break
  2472  		}
  2473  		v.reset(OpMIPS64MOVBload)
  2474  		v.AddArg(ptr)
  2475  		v.AddArg(mem)
  2476  		return true
  2477  	}
  2478  	// match: (Load <t> ptr mem)
  2479  	// cond: (is8BitInt(t) && !isSigned(t))
  2480  	// result: (MOVBUload ptr mem)
  2481  	for {
  2482  		t := v.Type
  2483  		ptr := v.Args[0]
  2484  		mem := v.Args[1]
  2485  		if !(is8BitInt(t) && !isSigned(t)) {
  2486  			break
  2487  		}
  2488  		v.reset(OpMIPS64MOVBUload)
  2489  		v.AddArg(ptr)
  2490  		v.AddArg(mem)
  2491  		return true
  2492  	}
  2493  	// match: (Load <t> ptr mem)
  2494  	// cond: (is16BitInt(t) && isSigned(t))
  2495  	// result: (MOVHload ptr mem)
  2496  	for {
  2497  		t := v.Type
  2498  		ptr := v.Args[0]
  2499  		mem := v.Args[1]
  2500  		if !(is16BitInt(t) && isSigned(t)) {
  2501  			break
  2502  		}
  2503  		v.reset(OpMIPS64MOVHload)
  2504  		v.AddArg(ptr)
  2505  		v.AddArg(mem)
  2506  		return true
  2507  	}
  2508  	// match: (Load <t> ptr mem)
  2509  	// cond: (is16BitInt(t) && !isSigned(t))
  2510  	// result: (MOVHUload ptr mem)
  2511  	for {
  2512  		t := v.Type
  2513  		ptr := v.Args[0]
  2514  		mem := v.Args[1]
  2515  		if !(is16BitInt(t) && !isSigned(t)) {
  2516  			break
  2517  		}
  2518  		v.reset(OpMIPS64MOVHUload)
  2519  		v.AddArg(ptr)
  2520  		v.AddArg(mem)
  2521  		return true
  2522  	}
  2523  	// match: (Load <t> ptr mem)
  2524  	// cond: (is32BitInt(t) && isSigned(t))
  2525  	// result: (MOVWload ptr mem)
  2526  	for {
  2527  		t := v.Type
  2528  		ptr := v.Args[0]
  2529  		mem := v.Args[1]
  2530  		if !(is32BitInt(t) && isSigned(t)) {
  2531  			break
  2532  		}
  2533  		v.reset(OpMIPS64MOVWload)
  2534  		v.AddArg(ptr)
  2535  		v.AddArg(mem)
  2536  		return true
  2537  	}
  2538  	// match: (Load <t> ptr mem)
  2539  	// cond: (is32BitInt(t) && !isSigned(t))
  2540  	// result: (MOVWUload ptr mem)
  2541  	for {
  2542  		t := v.Type
  2543  		ptr := v.Args[0]
  2544  		mem := v.Args[1]
  2545  		if !(is32BitInt(t) && !isSigned(t)) {
  2546  			break
  2547  		}
  2548  		v.reset(OpMIPS64MOVWUload)
  2549  		v.AddArg(ptr)
  2550  		v.AddArg(mem)
  2551  		return true
  2552  	}
  2553  	// match: (Load <t> ptr mem)
  2554  	// cond: (is64BitInt(t) || isPtr(t))
  2555  	// result: (MOVVload ptr mem)
  2556  	for {
  2557  		t := v.Type
  2558  		ptr := v.Args[0]
  2559  		mem := v.Args[1]
  2560  		if !(is64BitInt(t) || isPtr(t)) {
  2561  			break
  2562  		}
  2563  		v.reset(OpMIPS64MOVVload)
  2564  		v.AddArg(ptr)
  2565  		v.AddArg(mem)
  2566  		return true
  2567  	}
  2568  	// match: (Load <t> ptr mem)
  2569  	// cond: is32BitFloat(t)
  2570  	// result: (MOVFload ptr mem)
  2571  	for {
  2572  		t := v.Type
  2573  		ptr := v.Args[0]
  2574  		mem := v.Args[1]
  2575  		if !(is32BitFloat(t)) {
  2576  			break
  2577  		}
  2578  		v.reset(OpMIPS64MOVFload)
  2579  		v.AddArg(ptr)
  2580  		v.AddArg(mem)
  2581  		return true
  2582  	}
  2583  	// match: (Load <t> ptr mem)
  2584  	// cond: is64BitFloat(t)
  2585  	// result: (MOVDload ptr mem)
  2586  	for {
  2587  		t := v.Type
  2588  		ptr := v.Args[0]
  2589  		mem := v.Args[1]
  2590  		if !(is64BitFloat(t)) {
  2591  			break
  2592  		}
  2593  		v.reset(OpMIPS64MOVDload)
  2594  		v.AddArg(ptr)
  2595  		v.AddArg(mem)
  2596  		return true
  2597  	}
  2598  	return false
  2599  }
  2600  func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
  2601  	b := v.Block
  2602  	_ = b
  2603  	types := &b.Func.Config.Types
  2604  	_ = types
  2605  	// match: (Lsh16x16 <t> x y)
  2606  	// cond:
  2607  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2608  	for {
  2609  		t := v.Type
  2610  		x := v.Args[0]
  2611  		y := v.Args[1]
  2612  		v.reset(OpMIPS64AND)
  2613  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2614  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2615  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2616  		v2.AuxInt = 64
  2617  		v1.AddArg(v2)
  2618  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2619  		v3.AddArg(y)
  2620  		v1.AddArg(v3)
  2621  		v0.AddArg(v1)
  2622  		v.AddArg(v0)
  2623  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2624  		v4.AddArg(x)
  2625  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2626  		v5.AddArg(y)
  2627  		v4.AddArg(v5)
  2628  		v.AddArg(v4)
  2629  		return true
  2630  	}
  2631  }
  2632  func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
  2633  	b := v.Block
  2634  	_ = b
  2635  	types := &b.Func.Config.Types
  2636  	_ = types
  2637  	// match: (Lsh16x32 <t> x y)
  2638  	// cond:
  2639  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2640  	for {
  2641  		t := v.Type
  2642  		x := v.Args[0]
  2643  		y := v.Args[1]
  2644  		v.reset(OpMIPS64AND)
  2645  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2646  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2647  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2648  		v2.AuxInt = 64
  2649  		v1.AddArg(v2)
  2650  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2651  		v3.AddArg(y)
  2652  		v1.AddArg(v3)
  2653  		v0.AddArg(v1)
  2654  		v.AddArg(v0)
  2655  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2656  		v4.AddArg(x)
  2657  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2658  		v5.AddArg(y)
  2659  		v4.AddArg(v5)
  2660  		v.AddArg(v4)
  2661  		return true
  2662  	}
  2663  }
  2664  func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
  2665  	b := v.Block
  2666  	_ = b
  2667  	types := &b.Func.Config.Types
  2668  	_ = types
  2669  	// match: (Lsh16x64 <t> x y)
  2670  	// cond:
  2671  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2672  	for {
  2673  		t := v.Type
  2674  		x := v.Args[0]
  2675  		y := v.Args[1]
  2676  		v.reset(OpMIPS64AND)
  2677  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2678  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2679  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2680  		v2.AuxInt = 64
  2681  		v1.AddArg(v2)
  2682  		v1.AddArg(y)
  2683  		v0.AddArg(v1)
  2684  		v.AddArg(v0)
  2685  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2686  		v3.AddArg(x)
  2687  		v3.AddArg(y)
  2688  		v.AddArg(v3)
  2689  		return true
  2690  	}
  2691  }
  2692  func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
  2693  	b := v.Block
  2694  	_ = b
  2695  	types := &b.Func.Config.Types
  2696  	_ = types
  2697  	// match: (Lsh16x8 <t> x y)
  2698  	// cond:
  2699  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2700  	for {
  2701  		t := v.Type
  2702  		x := v.Args[0]
  2703  		y := v.Args[1]
  2704  		v.reset(OpMIPS64AND)
  2705  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2706  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2707  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2708  		v2.AuxInt = 64
  2709  		v1.AddArg(v2)
  2710  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2711  		v3.AddArg(y)
  2712  		v1.AddArg(v3)
  2713  		v0.AddArg(v1)
  2714  		v.AddArg(v0)
  2715  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2716  		v4.AddArg(x)
  2717  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2718  		v5.AddArg(y)
  2719  		v4.AddArg(v5)
  2720  		v.AddArg(v4)
  2721  		return true
  2722  	}
  2723  }
  2724  func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
  2725  	b := v.Block
  2726  	_ = b
  2727  	types := &b.Func.Config.Types
  2728  	_ = types
  2729  	// match: (Lsh32x16 <t> x y)
  2730  	// cond:
  2731  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2732  	for {
  2733  		t := v.Type
  2734  		x := v.Args[0]
  2735  		y := v.Args[1]
  2736  		v.reset(OpMIPS64AND)
  2737  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2738  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2739  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2740  		v2.AuxInt = 64
  2741  		v1.AddArg(v2)
  2742  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2743  		v3.AddArg(y)
  2744  		v1.AddArg(v3)
  2745  		v0.AddArg(v1)
  2746  		v.AddArg(v0)
  2747  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2748  		v4.AddArg(x)
  2749  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2750  		v5.AddArg(y)
  2751  		v4.AddArg(v5)
  2752  		v.AddArg(v4)
  2753  		return true
  2754  	}
  2755  }
  2756  func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
  2757  	b := v.Block
  2758  	_ = b
  2759  	types := &b.Func.Config.Types
  2760  	_ = types
  2761  	// match: (Lsh32x32 <t> x y)
  2762  	// cond:
  2763  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2764  	for {
  2765  		t := v.Type
  2766  		x := v.Args[0]
  2767  		y := v.Args[1]
  2768  		v.reset(OpMIPS64AND)
  2769  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2770  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2771  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2772  		v2.AuxInt = 64
  2773  		v1.AddArg(v2)
  2774  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2775  		v3.AddArg(y)
  2776  		v1.AddArg(v3)
  2777  		v0.AddArg(v1)
  2778  		v.AddArg(v0)
  2779  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2780  		v4.AddArg(x)
  2781  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2782  		v5.AddArg(y)
  2783  		v4.AddArg(v5)
  2784  		v.AddArg(v4)
  2785  		return true
  2786  	}
  2787  }
  2788  func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
  2789  	b := v.Block
  2790  	_ = b
  2791  	types := &b.Func.Config.Types
  2792  	_ = types
  2793  	// match: (Lsh32x64 <t> x y)
  2794  	// cond:
  2795  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2796  	for {
  2797  		t := v.Type
  2798  		x := v.Args[0]
  2799  		y := v.Args[1]
  2800  		v.reset(OpMIPS64AND)
  2801  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2802  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2803  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2804  		v2.AuxInt = 64
  2805  		v1.AddArg(v2)
  2806  		v1.AddArg(y)
  2807  		v0.AddArg(v1)
  2808  		v.AddArg(v0)
  2809  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2810  		v3.AddArg(x)
  2811  		v3.AddArg(y)
  2812  		v.AddArg(v3)
  2813  		return true
  2814  	}
  2815  }
  2816  func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
  2817  	b := v.Block
  2818  	_ = b
  2819  	types := &b.Func.Config.Types
  2820  	_ = types
  2821  	// match: (Lsh32x8 <t> x y)
  2822  	// cond:
  2823  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2824  	for {
  2825  		t := v.Type
  2826  		x := v.Args[0]
  2827  		y := v.Args[1]
  2828  		v.reset(OpMIPS64AND)
  2829  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2830  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2831  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2832  		v2.AuxInt = 64
  2833  		v1.AddArg(v2)
  2834  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2835  		v3.AddArg(y)
  2836  		v1.AddArg(v3)
  2837  		v0.AddArg(v1)
  2838  		v.AddArg(v0)
  2839  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2840  		v4.AddArg(x)
  2841  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2842  		v5.AddArg(y)
  2843  		v4.AddArg(v5)
  2844  		v.AddArg(v4)
  2845  		return true
  2846  	}
  2847  }
  2848  func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
  2849  	b := v.Block
  2850  	_ = b
  2851  	types := &b.Func.Config.Types
  2852  	_ = types
  2853  	// match: (Lsh64x16 <t> x y)
  2854  	// cond:
  2855  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2856  	for {
  2857  		t := v.Type
  2858  		x := v.Args[0]
  2859  		y := v.Args[1]
  2860  		v.reset(OpMIPS64AND)
  2861  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2862  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2863  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2864  		v2.AuxInt = 64
  2865  		v1.AddArg(v2)
  2866  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2867  		v3.AddArg(y)
  2868  		v1.AddArg(v3)
  2869  		v0.AddArg(v1)
  2870  		v.AddArg(v0)
  2871  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2872  		v4.AddArg(x)
  2873  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2874  		v5.AddArg(y)
  2875  		v4.AddArg(v5)
  2876  		v.AddArg(v4)
  2877  		return true
  2878  	}
  2879  }
  2880  func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
  2881  	b := v.Block
  2882  	_ = b
  2883  	types := &b.Func.Config.Types
  2884  	_ = types
  2885  	// match: (Lsh64x32 <t> x y)
  2886  	// cond:
  2887  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2888  	for {
  2889  		t := v.Type
  2890  		x := v.Args[0]
  2891  		y := v.Args[1]
  2892  		v.reset(OpMIPS64AND)
  2893  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2894  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2895  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2896  		v2.AuxInt = 64
  2897  		v1.AddArg(v2)
  2898  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2899  		v3.AddArg(y)
  2900  		v1.AddArg(v3)
  2901  		v0.AddArg(v1)
  2902  		v.AddArg(v0)
  2903  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2904  		v4.AddArg(x)
  2905  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2906  		v5.AddArg(y)
  2907  		v4.AddArg(v5)
  2908  		v.AddArg(v4)
  2909  		return true
  2910  	}
  2911  }
  2912  func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
  2913  	b := v.Block
  2914  	_ = b
  2915  	types := &b.Func.Config.Types
  2916  	_ = types
  2917  	// match: (Lsh64x64 <t> x y)
  2918  	// cond:
  2919  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2920  	for {
  2921  		t := v.Type
  2922  		x := v.Args[0]
  2923  		y := v.Args[1]
  2924  		v.reset(OpMIPS64AND)
  2925  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2926  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2927  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2928  		v2.AuxInt = 64
  2929  		v1.AddArg(v2)
  2930  		v1.AddArg(y)
  2931  		v0.AddArg(v1)
  2932  		v.AddArg(v0)
  2933  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2934  		v3.AddArg(x)
  2935  		v3.AddArg(y)
  2936  		v.AddArg(v3)
  2937  		return true
  2938  	}
  2939  }
  2940  func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
  2941  	b := v.Block
  2942  	_ = b
  2943  	types := &b.Func.Config.Types
  2944  	_ = types
  2945  	// match: (Lsh64x8 <t> x y)
  2946  	// cond:
  2947  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2948  	for {
  2949  		t := v.Type
  2950  		x := v.Args[0]
  2951  		y := v.Args[1]
  2952  		v.reset(OpMIPS64AND)
  2953  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2954  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2955  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2956  		v2.AuxInt = 64
  2957  		v1.AddArg(v2)
  2958  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2959  		v3.AddArg(y)
  2960  		v1.AddArg(v3)
  2961  		v0.AddArg(v1)
  2962  		v.AddArg(v0)
  2963  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2964  		v4.AddArg(x)
  2965  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2966  		v5.AddArg(y)
  2967  		v4.AddArg(v5)
  2968  		v.AddArg(v4)
  2969  		return true
  2970  	}
  2971  }
  2972  func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
  2973  	b := v.Block
  2974  	_ = b
  2975  	types := &b.Func.Config.Types
  2976  	_ = types
  2977  	// match: (Lsh8x16 <t> x y)
  2978  	// cond:
  2979  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2980  	for {
  2981  		t := v.Type
  2982  		x := v.Args[0]
  2983  		y := v.Args[1]
  2984  		v.reset(OpMIPS64AND)
  2985  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2986  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2987  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2988  		v2.AuxInt = 64
  2989  		v1.AddArg(v2)
  2990  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2991  		v3.AddArg(y)
  2992  		v1.AddArg(v3)
  2993  		v0.AddArg(v1)
  2994  		v.AddArg(v0)
  2995  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2996  		v4.AddArg(x)
  2997  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2998  		v5.AddArg(y)
  2999  		v4.AddArg(v5)
  3000  		v.AddArg(v4)
  3001  		return true
  3002  	}
  3003  }
  3004  func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
  3005  	b := v.Block
  3006  	_ = b
  3007  	types := &b.Func.Config.Types
  3008  	_ = types
  3009  	// match: (Lsh8x32 <t> x y)
  3010  	// cond:
  3011  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3012  	for {
  3013  		t := v.Type
  3014  		x := v.Args[0]
  3015  		y := v.Args[1]
  3016  		v.reset(OpMIPS64AND)
  3017  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3018  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3019  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3020  		v2.AuxInt = 64
  3021  		v1.AddArg(v2)
  3022  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  3023  		v3.AddArg(y)
  3024  		v1.AddArg(v3)
  3025  		v0.AddArg(v1)
  3026  		v.AddArg(v0)
  3027  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3028  		v4.AddArg(x)
  3029  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  3030  		v5.AddArg(y)
  3031  		v4.AddArg(v5)
  3032  		v.AddArg(v4)
  3033  		return true
  3034  	}
  3035  }
  3036  func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
  3037  	b := v.Block
  3038  	_ = b
  3039  	types := &b.Func.Config.Types
  3040  	_ = types
  3041  	// match: (Lsh8x64 <t> x y)
  3042  	// cond:
  3043  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  3044  	for {
  3045  		t := v.Type
  3046  		x := v.Args[0]
  3047  		y := v.Args[1]
  3048  		v.reset(OpMIPS64AND)
  3049  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3050  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3051  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3052  		v2.AuxInt = 64
  3053  		v1.AddArg(v2)
  3054  		v1.AddArg(y)
  3055  		v0.AddArg(v1)
  3056  		v.AddArg(v0)
  3057  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3058  		v3.AddArg(x)
  3059  		v3.AddArg(y)
  3060  		v.AddArg(v3)
  3061  		return true
  3062  	}
  3063  }
  3064  func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
  3065  	b := v.Block
  3066  	_ = b
  3067  	types := &b.Func.Config.Types
  3068  	_ = types
  3069  	// match: (Lsh8x8 <t> x y)
  3070  	// cond:
  3071  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3072  	for {
  3073  		t := v.Type
  3074  		x := v.Args[0]
  3075  		y := v.Args[1]
  3076  		v.reset(OpMIPS64AND)
  3077  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3078  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3079  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3080  		v2.AuxInt = 64
  3081  		v1.AddArg(v2)
  3082  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  3083  		v3.AddArg(y)
  3084  		v1.AddArg(v3)
  3085  		v0.AddArg(v1)
  3086  		v.AddArg(v0)
  3087  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3088  		v4.AddArg(x)
  3089  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  3090  		v5.AddArg(y)
  3091  		v4.AddArg(v5)
  3092  		v.AddArg(v4)
  3093  		return true
  3094  	}
  3095  }
  3096  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
  3097  	// match: (ADDV x (MOVVconst [c]))
  3098  	// cond: is32Bit(c)
  3099  	// result: (ADDVconst [c] x)
  3100  	for {
  3101  		x := v.Args[0]
  3102  		v_1 := v.Args[1]
  3103  		if v_1.Op != OpMIPS64MOVVconst {
  3104  			break
  3105  		}
  3106  		c := v_1.AuxInt
  3107  		if !(is32Bit(c)) {
  3108  			break
  3109  		}
  3110  		v.reset(OpMIPS64ADDVconst)
  3111  		v.AuxInt = c
  3112  		v.AddArg(x)
  3113  		return true
  3114  	}
  3115  	// match: (ADDV (MOVVconst [c]) x)
  3116  	// cond: is32Bit(c)
  3117  	// result: (ADDVconst [c] x)
  3118  	for {
  3119  		v_0 := v.Args[0]
  3120  		if v_0.Op != OpMIPS64MOVVconst {
  3121  			break
  3122  		}
  3123  		c := v_0.AuxInt
  3124  		x := v.Args[1]
  3125  		if !(is32Bit(c)) {
  3126  			break
  3127  		}
  3128  		v.reset(OpMIPS64ADDVconst)
  3129  		v.AuxInt = c
  3130  		v.AddArg(x)
  3131  		return true
  3132  	}
  3133  	// match: (ADDV x (NEGV y))
  3134  	// cond:
  3135  	// result: (SUBV x y)
  3136  	for {
  3137  		x := v.Args[0]
  3138  		v_1 := v.Args[1]
  3139  		if v_1.Op != OpMIPS64NEGV {
  3140  			break
  3141  		}
  3142  		y := v_1.Args[0]
  3143  		v.reset(OpMIPS64SUBV)
  3144  		v.AddArg(x)
  3145  		v.AddArg(y)
  3146  		return true
  3147  	}
  3148  	// match: (ADDV (NEGV y) x)
  3149  	// cond:
  3150  	// result: (SUBV x y)
  3151  	for {
  3152  		v_0 := v.Args[0]
  3153  		if v_0.Op != OpMIPS64NEGV {
  3154  			break
  3155  		}
  3156  		y := v_0.Args[0]
  3157  		x := v.Args[1]
  3158  		v.reset(OpMIPS64SUBV)
  3159  		v.AddArg(x)
  3160  		v.AddArg(y)
  3161  		return true
  3162  	}
  3163  	return false
  3164  }
  3165  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
  3166  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3167  	// cond:
  3168  	// result: (MOVVaddr [off1+off2] {sym} ptr)
  3169  	for {
  3170  		off1 := v.AuxInt
  3171  		v_0 := v.Args[0]
  3172  		if v_0.Op != OpMIPS64MOVVaddr {
  3173  			break
  3174  		}
  3175  		off2 := v_0.AuxInt
  3176  		sym := v_0.Aux
  3177  		ptr := v_0.Args[0]
  3178  		v.reset(OpMIPS64MOVVaddr)
  3179  		v.AuxInt = off1 + off2
  3180  		v.Aux = sym
  3181  		v.AddArg(ptr)
  3182  		return true
  3183  	}
  3184  	// match: (ADDVconst [0] x)
  3185  	// cond:
  3186  	// result: x
  3187  	for {
  3188  		if v.AuxInt != 0 {
  3189  			break
  3190  		}
  3191  		x := v.Args[0]
  3192  		v.reset(OpCopy)
  3193  		v.Type = x.Type
  3194  		v.AddArg(x)
  3195  		return true
  3196  	}
  3197  	// match: (ADDVconst [c] (MOVVconst [d]))
  3198  	// cond:
  3199  	// result: (MOVVconst [c+d])
  3200  	for {
  3201  		c := v.AuxInt
  3202  		v_0 := v.Args[0]
  3203  		if v_0.Op != OpMIPS64MOVVconst {
  3204  			break
  3205  		}
  3206  		d := v_0.AuxInt
  3207  		v.reset(OpMIPS64MOVVconst)
  3208  		v.AuxInt = c + d
  3209  		return true
  3210  	}
  3211  	// match: (ADDVconst [c] (ADDVconst [d] x))
  3212  	// cond: is32Bit(c+d)
  3213  	// result: (ADDVconst [c+d] x)
  3214  	for {
  3215  		c := v.AuxInt
  3216  		v_0 := v.Args[0]
  3217  		if v_0.Op != OpMIPS64ADDVconst {
  3218  			break
  3219  		}
  3220  		d := v_0.AuxInt
  3221  		x := v_0.Args[0]
  3222  		if !(is32Bit(c + d)) {
  3223  			break
  3224  		}
  3225  		v.reset(OpMIPS64ADDVconst)
  3226  		v.AuxInt = c + d
  3227  		v.AddArg(x)
  3228  		return true
  3229  	}
  3230  	// match: (ADDVconst [c] (SUBVconst [d] x))
  3231  	// cond: is32Bit(c-d)
  3232  	// result: (ADDVconst [c-d] x)
  3233  	for {
  3234  		c := v.AuxInt
  3235  		v_0 := v.Args[0]
  3236  		if v_0.Op != OpMIPS64SUBVconst {
  3237  			break
  3238  		}
  3239  		d := v_0.AuxInt
  3240  		x := v_0.Args[0]
  3241  		if !(is32Bit(c - d)) {
  3242  			break
  3243  		}
  3244  		v.reset(OpMIPS64ADDVconst)
  3245  		v.AuxInt = c - d
  3246  		v.AddArg(x)
  3247  		return true
  3248  	}
  3249  	return false
  3250  }
  3251  func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
  3252  	// match: (AND x (MOVVconst [c]))
  3253  	// cond: is32Bit(c)
  3254  	// result: (ANDconst [c] x)
  3255  	for {
  3256  		x := v.Args[0]
  3257  		v_1 := v.Args[1]
  3258  		if v_1.Op != OpMIPS64MOVVconst {
  3259  			break
  3260  		}
  3261  		c := v_1.AuxInt
  3262  		if !(is32Bit(c)) {
  3263  			break
  3264  		}
  3265  		v.reset(OpMIPS64ANDconst)
  3266  		v.AuxInt = c
  3267  		v.AddArg(x)
  3268  		return true
  3269  	}
  3270  	// match: (AND (MOVVconst [c]) x)
  3271  	// cond: is32Bit(c)
  3272  	// result: (ANDconst [c] x)
  3273  	for {
  3274  		v_0 := v.Args[0]
  3275  		if v_0.Op != OpMIPS64MOVVconst {
  3276  			break
  3277  		}
  3278  		c := v_0.AuxInt
  3279  		x := v.Args[1]
  3280  		if !(is32Bit(c)) {
  3281  			break
  3282  		}
  3283  		v.reset(OpMIPS64ANDconst)
  3284  		v.AuxInt = c
  3285  		v.AddArg(x)
  3286  		return true
  3287  	}
  3288  	// match: (AND x x)
  3289  	// cond:
  3290  	// result: x
  3291  	for {
  3292  		x := v.Args[0]
  3293  		if x != v.Args[1] {
  3294  			break
  3295  		}
  3296  		v.reset(OpCopy)
  3297  		v.Type = x.Type
  3298  		v.AddArg(x)
  3299  		return true
  3300  	}
  3301  	return false
  3302  }
  3303  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
  3304  	// match: (ANDconst [0] _)
  3305  	// cond:
  3306  	// result: (MOVVconst [0])
  3307  	for {
  3308  		if v.AuxInt != 0 {
  3309  			break
  3310  		}
  3311  		v.reset(OpMIPS64MOVVconst)
  3312  		v.AuxInt = 0
  3313  		return true
  3314  	}
  3315  	// match: (ANDconst [-1] x)
  3316  	// cond:
  3317  	// result: x
  3318  	for {
  3319  		if v.AuxInt != -1 {
  3320  			break
  3321  		}
  3322  		x := v.Args[0]
  3323  		v.reset(OpCopy)
  3324  		v.Type = x.Type
  3325  		v.AddArg(x)
  3326  		return true
  3327  	}
  3328  	// match: (ANDconst [c] (MOVVconst [d]))
  3329  	// cond:
  3330  	// result: (MOVVconst [c&d])
  3331  	for {
  3332  		c := v.AuxInt
  3333  		v_0 := v.Args[0]
  3334  		if v_0.Op != OpMIPS64MOVVconst {
  3335  			break
  3336  		}
  3337  		d := v_0.AuxInt
  3338  		v.reset(OpMIPS64MOVVconst)
  3339  		v.AuxInt = c & d
  3340  		return true
  3341  	}
  3342  	// match: (ANDconst [c] (ANDconst [d] x))
  3343  	// cond:
  3344  	// result: (ANDconst [c&d] x)
  3345  	for {
  3346  		c := v.AuxInt
  3347  		v_0 := v.Args[0]
  3348  		if v_0.Op != OpMIPS64ANDconst {
  3349  			break
  3350  		}
  3351  		d := v_0.AuxInt
  3352  		x := v_0.Args[0]
  3353  		v.reset(OpMIPS64ANDconst)
  3354  		v.AuxInt = c & d
  3355  		v.AddArg(x)
  3356  		return true
  3357  	}
  3358  	return false
  3359  }
  3360  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
  3361  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3362  	// cond: is32Bit(off1+off2)
  3363  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3364  	for {
  3365  		off1 := v.AuxInt
  3366  		sym := v.Aux
  3367  		v_0 := v.Args[0]
  3368  		if v_0.Op != OpMIPS64ADDVconst {
  3369  			break
  3370  		}
  3371  		off2 := v_0.AuxInt
  3372  		ptr := v_0.Args[0]
  3373  		mem := v.Args[1]
  3374  		if !(is32Bit(off1 + off2)) {
  3375  			break
  3376  		}
  3377  		v.reset(OpMIPS64MOVBUload)
  3378  		v.AuxInt = off1 + off2
  3379  		v.Aux = sym
  3380  		v.AddArg(ptr)
  3381  		v.AddArg(mem)
  3382  		return true
  3383  	}
  3384  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3385  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3386  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3387  	for {
  3388  		off1 := v.AuxInt
  3389  		sym1 := v.Aux
  3390  		v_0 := v.Args[0]
  3391  		if v_0.Op != OpMIPS64MOVVaddr {
  3392  			break
  3393  		}
  3394  		off2 := v_0.AuxInt
  3395  		sym2 := v_0.Aux
  3396  		ptr := v_0.Args[0]
  3397  		mem := v.Args[1]
  3398  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3399  			break
  3400  		}
  3401  		v.reset(OpMIPS64MOVBUload)
  3402  		v.AuxInt = off1 + off2
  3403  		v.Aux = mergeSym(sym1, sym2)
  3404  		v.AddArg(ptr)
  3405  		v.AddArg(mem)
  3406  		return true
  3407  	}
  3408  	return false
  3409  }
  3410  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
  3411  	// match: (MOVBUreg x:(MOVBUload _ _))
  3412  	// cond:
  3413  	// result: (MOVVreg x)
  3414  	for {
  3415  		x := v.Args[0]
  3416  		if x.Op != OpMIPS64MOVBUload {
  3417  			break
  3418  		}
  3419  		v.reset(OpMIPS64MOVVreg)
  3420  		v.AddArg(x)
  3421  		return true
  3422  	}
  3423  	// match: (MOVBUreg x:(MOVBUreg _))
  3424  	// cond:
  3425  	// result: (MOVVreg x)
  3426  	for {
  3427  		x := v.Args[0]
  3428  		if x.Op != OpMIPS64MOVBUreg {
  3429  			break
  3430  		}
  3431  		v.reset(OpMIPS64MOVVreg)
  3432  		v.AddArg(x)
  3433  		return true
  3434  	}
  3435  	// match: (MOVBUreg (MOVVconst [c]))
  3436  	// cond:
  3437  	// result: (MOVVconst [int64(uint8(c))])
  3438  	for {
  3439  		v_0 := v.Args[0]
  3440  		if v_0.Op != OpMIPS64MOVVconst {
  3441  			break
  3442  		}
  3443  		c := v_0.AuxInt
  3444  		v.reset(OpMIPS64MOVVconst)
  3445  		v.AuxInt = int64(uint8(c))
  3446  		return true
  3447  	}
  3448  	return false
  3449  }
  3450  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
  3451  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3452  	// cond: is32Bit(off1+off2)
  3453  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3454  	for {
  3455  		off1 := v.AuxInt
  3456  		sym := v.Aux
  3457  		v_0 := v.Args[0]
  3458  		if v_0.Op != OpMIPS64ADDVconst {
  3459  			break
  3460  		}
  3461  		off2 := v_0.AuxInt
  3462  		ptr := v_0.Args[0]
  3463  		mem := v.Args[1]
  3464  		if !(is32Bit(off1 + off2)) {
  3465  			break
  3466  		}
  3467  		v.reset(OpMIPS64MOVBload)
  3468  		v.AuxInt = off1 + off2
  3469  		v.Aux = sym
  3470  		v.AddArg(ptr)
  3471  		v.AddArg(mem)
  3472  		return true
  3473  	}
  3474  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3475  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3476  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3477  	for {
  3478  		off1 := v.AuxInt
  3479  		sym1 := v.Aux
  3480  		v_0 := v.Args[0]
  3481  		if v_0.Op != OpMIPS64MOVVaddr {
  3482  			break
  3483  		}
  3484  		off2 := v_0.AuxInt
  3485  		sym2 := v_0.Aux
  3486  		ptr := v_0.Args[0]
  3487  		mem := v.Args[1]
  3488  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3489  			break
  3490  		}
  3491  		v.reset(OpMIPS64MOVBload)
  3492  		v.AuxInt = off1 + off2
  3493  		v.Aux = mergeSym(sym1, sym2)
  3494  		v.AddArg(ptr)
  3495  		v.AddArg(mem)
  3496  		return true
  3497  	}
  3498  	return false
  3499  }
  3500  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
  3501  	// match: (MOVBreg x:(MOVBload _ _))
  3502  	// cond:
  3503  	// result: (MOVVreg x)
  3504  	for {
  3505  		x := v.Args[0]
  3506  		if x.Op != OpMIPS64MOVBload {
  3507  			break
  3508  		}
  3509  		v.reset(OpMIPS64MOVVreg)
  3510  		v.AddArg(x)
  3511  		return true
  3512  	}
  3513  	// match: (MOVBreg x:(MOVBreg _))
  3514  	// cond:
  3515  	// result: (MOVVreg x)
  3516  	for {
  3517  		x := v.Args[0]
  3518  		if x.Op != OpMIPS64MOVBreg {
  3519  			break
  3520  		}
  3521  		v.reset(OpMIPS64MOVVreg)
  3522  		v.AddArg(x)
  3523  		return true
  3524  	}
  3525  	// match: (MOVBreg (MOVVconst [c]))
  3526  	// cond:
  3527  	// result: (MOVVconst [int64(int8(c))])
  3528  	for {
  3529  		v_0 := v.Args[0]
  3530  		if v_0.Op != OpMIPS64MOVVconst {
  3531  			break
  3532  		}
  3533  		c := v_0.AuxInt
  3534  		v.reset(OpMIPS64MOVVconst)
  3535  		v.AuxInt = int64(int8(c))
  3536  		return true
  3537  	}
  3538  	return false
  3539  }
  3540  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
  3541  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3542  	// cond: is32Bit(off1+off2)
  3543  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3544  	for {
  3545  		off1 := v.AuxInt
  3546  		sym := v.Aux
  3547  		v_0 := v.Args[0]
  3548  		if v_0.Op != OpMIPS64ADDVconst {
  3549  			break
  3550  		}
  3551  		off2 := v_0.AuxInt
  3552  		ptr := v_0.Args[0]
  3553  		val := v.Args[1]
  3554  		mem := v.Args[2]
  3555  		if !(is32Bit(off1 + off2)) {
  3556  			break
  3557  		}
  3558  		v.reset(OpMIPS64MOVBstore)
  3559  		v.AuxInt = off1 + off2
  3560  		v.Aux = sym
  3561  		v.AddArg(ptr)
  3562  		v.AddArg(val)
  3563  		v.AddArg(mem)
  3564  		return true
  3565  	}
  3566  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3567  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3568  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3569  	for {
  3570  		off1 := v.AuxInt
  3571  		sym1 := v.Aux
  3572  		v_0 := v.Args[0]
  3573  		if v_0.Op != OpMIPS64MOVVaddr {
  3574  			break
  3575  		}
  3576  		off2 := v_0.AuxInt
  3577  		sym2 := v_0.Aux
  3578  		ptr := v_0.Args[0]
  3579  		val := v.Args[1]
  3580  		mem := v.Args[2]
  3581  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3582  			break
  3583  		}
  3584  		v.reset(OpMIPS64MOVBstore)
  3585  		v.AuxInt = off1 + off2
  3586  		v.Aux = mergeSym(sym1, sym2)
  3587  		v.AddArg(ptr)
  3588  		v.AddArg(val)
  3589  		v.AddArg(mem)
  3590  		return true
  3591  	}
  3592  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  3593  	// cond:
  3594  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3595  	for {
  3596  		off := v.AuxInt
  3597  		sym := v.Aux
  3598  		ptr := v.Args[0]
  3599  		v_1 := v.Args[1]
  3600  		if v_1.Op != OpMIPS64MOVVconst {
  3601  			break
  3602  		}
  3603  		if v_1.AuxInt != 0 {
  3604  			break
  3605  		}
  3606  		mem := v.Args[2]
  3607  		v.reset(OpMIPS64MOVBstorezero)
  3608  		v.AuxInt = off
  3609  		v.Aux = sym
  3610  		v.AddArg(ptr)
  3611  		v.AddArg(mem)
  3612  		return true
  3613  	}
  3614  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3615  	// cond:
  3616  	// result: (MOVBstore [off] {sym} ptr x mem)
  3617  	for {
  3618  		off := v.AuxInt
  3619  		sym := v.Aux
  3620  		ptr := v.Args[0]
  3621  		v_1 := v.Args[1]
  3622  		if v_1.Op != OpMIPS64MOVBreg {
  3623  			break
  3624  		}
  3625  		x := v_1.Args[0]
  3626  		mem := v.Args[2]
  3627  		v.reset(OpMIPS64MOVBstore)
  3628  		v.AuxInt = off
  3629  		v.Aux = sym
  3630  		v.AddArg(ptr)
  3631  		v.AddArg(x)
  3632  		v.AddArg(mem)
  3633  		return true
  3634  	}
  3635  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3636  	// cond:
  3637  	// result: (MOVBstore [off] {sym} ptr x mem)
  3638  	for {
  3639  		off := v.AuxInt
  3640  		sym := v.Aux
  3641  		ptr := v.Args[0]
  3642  		v_1 := v.Args[1]
  3643  		if v_1.Op != OpMIPS64MOVBUreg {
  3644  			break
  3645  		}
  3646  		x := v_1.Args[0]
  3647  		mem := v.Args[2]
  3648  		v.reset(OpMIPS64MOVBstore)
  3649  		v.AuxInt = off
  3650  		v.Aux = sym
  3651  		v.AddArg(ptr)
  3652  		v.AddArg(x)
  3653  		v.AddArg(mem)
  3654  		return true
  3655  	}
  3656  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3657  	// cond:
  3658  	// result: (MOVBstore [off] {sym} ptr x mem)
  3659  	for {
  3660  		off := v.AuxInt
  3661  		sym := v.Aux
  3662  		ptr := v.Args[0]
  3663  		v_1 := v.Args[1]
  3664  		if v_1.Op != OpMIPS64MOVHreg {
  3665  			break
  3666  		}
  3667  		x := v_1.Args[0]
  3668  		mem := v.Args[2]
  3669  		v.reset(OpMIPS64MOVBstore)
  3670  		v.AuxInt = off
  3671  		v.Aux = sym
  3672  		v.AddArg(ptr)
  3673  		v.AddArg(x)
  3674  		v.AddArg(mem)
  3675  		return true
  3676  	}
  3677  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3678  	// cond:
  3679  	// result: (MOVBstore [off] {sym} ptr x mem)
  3680  	for {
  3681  		off := v.AuxInt
  3682  		sym := v.Aux
  3683  		ptr := v.Args[0]
  3684  		v_1 := v.Args[1]
  3685  		if v_1.Op != OpMIPS64MOVHUreg {
  3686  			break
  3687  		}
  3688  		x := v_1.Args[0]
  3689  		mem := v.Args[2]
  3690  		v.reset(OpMIPS64MOVBstore)
  3691  		v.AuxInt = off
  3692  		v.Aux = sym
  3693  		v.AddArg(ptr)
  3694  		v.AddArg(x)
  3695  		v.AddArg(mem)
  3696  		return true
  3697  	}
  3698  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3699  	// cond:
  3700  	// result: (MOVBstore [off] {sym} ptr x mem)
  3701  	for {
  3702  		off := v.AuxInt
  3703  		sym := v.Aux
  3704  		ptr := v.Args[0]
  3705  		v_1 := v.Args[1]
  3706  		if v_1.Op != OpMIPS64MOVWreg {
  3707  			break
  3708  		}
  3709  		x := v_1.Args[0]
  3710  		mem := v.Args[2]
  3711  		v.reset(OpMIPS64MOVBstore)
  3712  		v.AuxInt = off
  3713  		v.Aux = sym
  3714  		v.AddArg(ptr)
  3715  		v.AddArg(x)
  3716  		v.AddArg(mem)
  3717  		return true
  3718  	}
  3719  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3720  	// cond:
  3721  	// result: (MOVBstore [off] {sym} ptr x mem)
  3722  	for {
  3723  		off := v.AuxInt
  3724  		sym := v.Aux
  3725  		ptr := v.Args[0]
  3726  		v_1 := v.Args[1]
  3727  		if v_1.Op != OpMIPS64MOVWUreg {
  3728  			break
  3729  		}
  3730  		x := v_1.Args[0]
  3731  		mem := v.Args[2]
  3732  		v.reset(OpMIPS64MOVBstore)
  3733  		v.AuxInt = off
  3734  		v.Aux = sym
  3735  		v.AddArg(ptr)
  3736  		v.AddArg(x)
  3737  		v.AddArg(mem)
  3738  		return true
  3739  	}
  3740  	return false
  3741  }
  3742  func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
  3743  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3744  	// cond: is32Bit(off1+off2)
  3745  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3746  	for {
  3747  		off1 := v.AuxInt
  3748  		sym := v.Aux
  3749  		v_0 := v.Args[0]
  3750  		if v_0.Op != OpMIPS64ADDVconst {
  3751  			break
  3752  		}
  3753  		off2 := v_0.AuxInt
  3754  		ptr := v_0.Args[0]
  3755  		mem := v.Args[1]
  3756  		if !(is32Bit(off1 + off2)) {
  3757  			break
  3758  		}
  3759  		v.reset(OpMIPS64MOVBstorezero)
  3760  		v.AuxInt = off1 + off2
  3761  		v.Aux = sym
  3762  		v.AddArg(ptr)
  3763  		v.AddArg(mem)
  3764  		return true
  3765  	}
  3766  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3767  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3768  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3769  	for {
  3770  		off1 := v.AuxInt
  3771  		sym1 := v.Aux
  3772  		v_0 := v.Args[0]
  3773  		if v_0.Op != OpMIPS64MOVVaddr {
  3774  			break
  3775  		}
  3776  		off2 := v_0.AuxInt
  3777  		sym2 := v_0.Aux
  3778  		ptr := v_0.Args[0]
  3779  		mem := v.Args[1]
  3780  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3781  			break
  3782  		}
  3783  		v.reset(OpMIPS64MOVBstorezero)
  3784  		v.AuxInt = off1 + off2
  3785  		v.Aux = mergeSym(sym1, sym2)
  3786  		v.AddArg(ptr)
  3787  		v.AddArg(mem)
  3788  		return true
  3789  	}
  3790  	return false
  3791  }
  3792  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
  3793  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3794  	// cond: is32Bit(off1+off2)
  3795  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3796  	for {
  3797  		off1 := v.AuxInt
  3798  		sym := v.Aux
  3799  		v_0 := v.Args[0]
  3800  		if v_0.Op != OpMIPS64ADDVconst {
  3801  			break
  3802  		}
  3803  		off2 := v_0.AuxInt
  3804  		ptr := v_0.Args[0]
  3805  		mem := v.Args[1]
  3806  		if !(is32Bit(off1 + off2)) {
  3807  			break
  3808  		}
  3809  		v.reset(OpMIPS64MOVDload)
  3810  		v.AuxInt = off1 + off2
  3811  		v.Aux = sym
  3812  		v.AddArg(ptr)
  3813  		v.AddArg(mem)
  3814  		return true
  3815  	}
  3816  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3817  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3818  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3819  	for {
  3820  		off1 := v.AuxInt
  3821  		sym1 := v.Aux
  3822  		v_0 := v.Args[0]
  3823  		if v_0.Op != OpMIPS64MOVVaddr {
  3824  			break
  3825  		}
  3826  		off2 := v_0.AuxInt
  3827  		sym2 := v_0.Aux
  3828  		ptr := v_0.Args[0]
  3829  		mem := v.Args[1]
  3830  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3831  			break
  3832  		}
  3833  		v.reset(OpMIPS64MOVDload)
  3834  		v.AuxInt = off1 + off2
  3835  		v.Aux = mergeSym(sym1, sym2)
  3836  		v.AddArg(ptr)
  3837  		v.AddArg(mem)
  3838  		return true
  3839  	}
  3840  	return false
  3841  }
  3842  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
  3843  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3844  	// cond: is32Bit(off1+off2)
  3845  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3846  	for {
  3847  		off1 := v.AuxInt
  3848  		sym := v.Aux
  3849  		v_0 := v.Args[0]
  3850  		if v_0.Op != OpMIPS64ADDVconst {
  3851  			break
  3852  		}
  3853  		off2 := v_0.AuxInt
  3854  		ptr := v_0.Args[0]
  3855  		val := v.Args[1]
  3856  		mem := v.Args[2]
  3857  		if !(is32Bit(off1 + off2)) {
  3858  			break
  3859  		}
  3860  		v.reset(OpMIPS64MOVDstore)
  3861  		v.AuxInt = off1 + off2
  3862  		v.Aux = sym
  3863  		v.AddArg(ptr)
  3864  		v.AddArg(val)
  3865  		v.AddArg(mem)
  3866  		return true
  3867  	}
  3868  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3869  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3870  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3871  	for {
  3872  		off1 := v.AuxInt
  3873  		sym1 := v.Aux
  3874  		v_0 := v.Args[0]
  3875  		if v_0.Op != OpMIPS64MOVVaddr {
  3876  			break
  3877  		}
  3878  		off2 := v_0.AuxInt
  3879  		sym2 := v_0.Aux
  3880  		ptr := v_0.Args[0]
  3881  		val := v.Args[1]
  3882  		mem := v.Args[2]
  3883  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3884  			break
  3885  		}
  3886  		v.reset(OpMIPS64MOVDstore)
  3887  		v.AuxInt = off1 + off2
  3888  		v.Aux = mergeSym(sym1, sym2)
  3889  		v.AddArg(ptr)
  3890  		v.AddArg(val)
  3891  		v.AddArg(mem)
  3892  		return true
  3893  	}
  3894  	return false
  3895  }
  3896  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
  3897  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3898  	// cond: is32Bit(off1+off2)
  3899  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  3900  	for {
  3901  		off1 := v.AuxInt
  3902  		sym := v.Aux
  3903  		v_0 := v.Args[0]
  3904  		if v_0.Op != OpMIPS64ADDVconst {
  3905  			break
  3906  		}
  3907  		off2 := v_0.AuxInt
  3908  		ptr := v_0.Args[0]
  3909  		mem := v.Args[1]
  3910  		if !(is32Bit(off1 + off2)) {
  3911  			break
  3912  		}
  3913  		v.reset(OpMIPS64MOVFload)
  3914  		v.AuxInt = off1 + off2
  3915  		v.Aux = sym
  3916  		v.AddArg(ptr)
  3917  		v.AddArg(mem)
  3918  		return true
  3919  	}
  3920  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3921  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3922  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3923  	for {
  3924  		off1 := v.AuxInt
  3925  		sym1 := v.Aux
  3926  		v_0 := v.Args[0]
  3927  		if v_0.Op != OpMIPS64MOVVaddr {
  3928  			break
  3929  		}
  3930  		off2 := v_0.AuxInt
  3931  		sym2 := v_0.Aux
  3932  		ptr := v_0.Args[0]
  3933  		mem := v.Args[1]
  3934  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3935  			break
  3936  		}
  3937  		v.reset(OpMIPS64MOVFload)
  3938  		v.AuxInt = off1 + off2
  3939  		v.Aux = mergeSym(sym1, sym2)
  3940  		v.AddArg(ptr)
  3941  		v.AddArg(mem)
  3942  		return true
  3943  	}
  3944  	return false
  3945  }
  3946  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
  3947  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3948  	// cond: is32Bit(off1+off2)
  3949  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  3950  	for {
  3951  		off1 := v.AuxInt
  3952  		sym := v.Aux
  3953  		v_0 := v.Args[0]
  3954  		if v_0.Op != OpMIPS64ADDVconst {
  3955  			break
  3956  		}
  3957  		off2 := v_0.AuxInt
  3958  		ptr := v_0.Args[0]
  3959  		val := v.Args[1]
  3960  		mem := v.Args[2]
  3961  		if !(is32Bit(off1 + off2)) {
  3962  			break
  3963  		}
  3964  		v.reset(OpMIPS64MOVFstore)
  3965  		v.AuxInt = off1 + off2
  3966  		v.Aux = sym
  3967  		v.AddArg(ptr)
  3968  		v.AddArg(val)
  3969  		v.AddArg(mem)
  3970  		return true
  3971  	}
  3972  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3973  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3974  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3975  	for {
  3976  		off1 := v.AuxInt
  3977  		sym1 := v.Aux
  3978  		v_0 := v.Args[0]
  3979  		if v_0.Op != OpMIPS64MOVVaddr {
  3980  			break
  3981  		}
  3982  		off2 := v_0.AuxInt
  3983  		sym2 := v_0.Aux
  3984  		ptr := v_0.Args[0]
  3985  		val := v.Args[1]
  3986  		mem := v.Args[2]
  3987  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3988  			break
  3989  		}
  3990  		v.reset(OpMIPS64MOVFstore)
  3991  		v.AuxInt = off1 + off2
  3992  		v.Aux = mergeSym(sym1, sym2)
  3993  		v.AddArg(ptr)
  3994  		v.AddArg(val)
  3995  		v.AddArg(mem)
  3996  		return true
  3997  	}
  3998  	return false
  3999  }
  4000  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
  4001  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4002  	// cond: is32Bit(off1+off2)
  4003  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4004  	for {
  4005  		off1 := v.AuxInt
  4006  		sym := v.Aux
  4007  		v_0 := v.Args[0]
  4008  		if v_0.Op != OpMIPS64ADDVconst {
  4009  			break
  4010  		}
  4011  		off2 := v_0.AuxInt
  4012  		ptr := v_0.Args[0]
  4013  		mem := v.Args[1]
  4014  		if !(is32Bit(off1 + off2)) {
  4015  			break
  4016  		}
  4017  		v.reset(OpMIPS64MOVHUload)
  4018  		v.AuxInt = off1 + off2
  4019  		v.Aux = sym
  4020  		v.AddArg(ptr)
  4021  		v.AddArg(mem)
  4022  		return true
  4023  	}
  4024  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4025  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4026  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4027  	for {
  4028  		off1 := v.AuxInt
  4029  		sym1 := v.Aux
  4030  		v_0 := v.Args[0]
  4031  		if v_0.Op != OpMIPS64MOVVaddr {
  4032  			break
  4033  		}
  4034  		off2 := v_0.AuxInt
  4035  		sym2 := v_0.Aux
  4036  		ptr := v_0.Args[0]
  4037  		mem := v.Args[1]
  4038  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4039  			break
  4040  		}
  4041  		v.reset(OpMIPS64MOVHUload)
  4042  		v.AuxInt = off1 + off2
  4043  		v.Aux = mergeSym(sym1, sym2)
  4044  		v.AddArg(ptr)
  4045  		v.AddArg(mem)
  4046  		return true
  4047  	}
  4048  	return false
  4049  }
  4050  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
  4051  	// match: (MOVHUreg x:(MOVBUload _ _))
  4052  	// cond:
  4053  	// result: (MOVVreg x)
  4054  	for {
  4055  		x := v.Args[0]
  4056  		if x.Op != OpMIPS64MOVBUload {
  4057  			break
  4058  		}
  4059  		v.reset(OpMIPS64MOVVreg)
  4060  		v.AddArg(x)
  4061  		return true
  4062  	}
  4063  	// match: (MOVHUreg x:(MOVHUload _ _))
  4064  	// cond:
  4065  	// result: (MOVVreg x)
  4066  	for {
  4067  		x := v.Args[0]
  4068  		if x.Op != OpMIPS64MOVHUload {
  4069  			break
  4070  		}
  4071  		v.reset(OpMIPS64MOVVreg)
  4072  		v.AddArg(x)
  4073  		return true
  4074  	}
  4075  	// match: (MOVHUreg x:(MOVBUreg _))
  4076  	// cond:
  4077  	// result: (MOVVreg x)
  4078  	for {
  4079  		x := v.Args[0]
  4080  		if x.Op != OpMIPS64MOVBUreg {
  4081  			break
  4082  		}
  4083  		v.reset(OpMIPS64MOVVreg)
  4084  		v.AddArg(x)
  4085  		return true
  4086  	}
  4087  	// match: (MOVHUreg x:(MOVHUreg _))
  4088  	// cond:
  4089  	// result: (MOVVreg x)
  4090  	for {
  4091  		x := v.Args[0]
  4092  		if x.Op != OpMIPS64MOVHUreg {
  4093  			break
  4094  		}
  4095  		v.reset(OpMIPS64MOVVreg)
  4096  		v.AddArg(x)
  4097  		return true
  4098  	}
  4099  	// match: (MOVHUreg (MOVVconst [c]))
  4100  	// cond:
  4101  	// result: (MOVVconst [int64(uint16(c))])
  4102  	for {
  4103  		v_0 := v.Args[0]
  4104  		if v_0.Op != OpMIPS64MOVVconst {
  4105  			break
  4106  		}
  4107  		c := v_0.AuxInt
  4108  		v.reset(OpMIPS64MOVVconst)
  4109  		v.AuxInt = int64(uint16(c))
  4110  		return true
  4111  	}
  4112  	return false
  4113  }
  4114  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
  4115  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4116  	// cond: is32Bit(off1+off2)
  4117  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4118  	for {
  4119  		off1 := v.AuxInt
  4120  		sym := v.Aux
  4121  		v_0 := v.Args[0]
  4122  		if v_0.Op != OpMIPS64ADDVconst {
  4123  			break
  4124  		}
  4125  		off2 := v_0.AuxInt
  4126  		ptr := v_0.Args[0]
  4127  		mem := v.Args[1]
  4128  		if !(is32Bit(off1 + off2)) {
  4129  			break
  4130  		}
  4131  		v.reset(OpMIPS64MOVHload)
  4132  		v.AuxInt = off1 + off2
  4133  		v.Aux = sym
  4134  		v.AddArg(ptr)
  4135  		v.AddArg(mem)
  4136  		return true
  4137  	}
  4138  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4139  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4140  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4141  	for {
  4142  		off1 := v.AuxInt
  4143  		sym1 := v.Aux
  4144  		v_0 := v.Args[0]
  4145  		if v_0.Op != OpMIPS64MOVVaddr {
  4146  			break
  4147  		}
  4148  		off2 := v_0.AuxInt
  4149  		sym2 := v_0.Aux
  4150  		ptr := v_0.Args[0]
  4151  		mem := v.Args[1]
  4152  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4153  			break
  4154  		}
  4155  		v.reset(OpMIPS64MOVHload)
  4156  		v.AuxInt = off1 + off2
  4157  		v.Aux = mergeSym(sym1, sym2)
  4158  		v.AddArg(ptr)
  4159  		v.AddArg(mem)
  4160  		return true
  4161  	}
  4162  	return false
  4163  }
  4164  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
  4165  	// match: (MOVHreg x:(MOVBload _ _))
  4166  	// cond:
  4167  	// result: (MOVVreg x)
  4168  	for {
  4169  		x := v.Args[0]
  4170  		if x.Op != OpMIPS64MOVBload {
  4171  			break
  4172  		}
  4173  		v.reset(OpMIPS64MOVVreg)
  4174  		v.AddArg(x)
  4175  		return true
  4176  	}
  4177  	// match: (MOVHreg x:(MOVBUload _ _))
  4178  	// cond:
  4179  	// result: (MOVVreg x)
  4180  	for {
  4181  		x := v.Args[0]
  4182  		if x.Op != OpMIPS64MOVBUload {
  4183  			break
  4184  		}
  4185  		v.reset(OpMIPS64MOVVreg)
  4186  		v.AddArg(x)
  4187  		return true
  4188  	}
  4189  	// match: (MOVHreg x:(MOVHload _ _))
  4190  	// cond:
  4191  	// result: (MOVVreg x)
  4192  	for {
  4193  		x := v.Args[0]
  4194  		if x.Op != OpMIPS64MOVHload {
  4195  			break
  4196  		}
  4197  		v.reset(OpMIPS64MOVVreg)
  4198  		v.AddArg(x)
  4199  		return true
  4200  	}
  4201  	// match: (MOVHreg x:(MOVBreg _))
  4202  	// cond:
  4203  	// result: (MOVVreg x)
  4204  	for {
  4205  		x := v.Args[0]
  4206  		if x.Op != OpMIPS64MOVBreg {
  4207  			break
  4208  		}
  4209  		v.reset(OpMIPS64MOVVreg)
  4210  		v.AddArg(x)
  4211  		return true
  4212  	}
  4213  	// match: (MOVHreg x:(MOVBUreg _))
  4214  	// cond:
  4215  	// result: (MOVVreg x)
  4216  	for {
  4217  		x := v.Args[0]
  4218  		if x.Op != OpMIPS64MOVBUreg {
  4219  			break
  4220  		}
  4221  		v.reset(OpMIPS64MOVVreg)
  4222  		v.AddArg(x)
  4223  		return true
  4224  	}
  4225  	// match: (MOVHreg x:(MOVHreg _))
  4226  	// cond:
  4227  	// result: (MOVVreg x)
  4228  	for {
  4229  		x := v.Args[0]
  4230  		if x.Op != OpMIPS64MOVHreg {
  4231  			break
  4232  		}
  4233  		v.reset(OpMIPS64MOVVreg)
  4234  		v.AddArg(x)
  4235  		return true
  4236  	}
  4237  	// match: (MOVHreg (MOVVconst [c]))
  4238  	// cond:
  4239  	// result: (MOVVconst [int64(int16(c))])
  4240  	for {
  4241  		v_0 := v.Args[0]
  4242  		if v_0.Op != OpMIPS64MOVVconst {
  4243  			break
  4244  		}
  4245  		c := v_0.AuxInt
  4246  		v.reset(OpMIPS64MOVVconst)
  4247  		v.AuxInt = int64(int16(c))
  4248  		return true
  4249  	}
  4250  	return false
  4251  }
  4252  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
  4253  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4254  	// cond: is32Bit(off1+off2)
  4255  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4256  	for {
  4257  		off1 := v.AuxInt
  4258  		sym := v.Aux
  4259  		v_0 := v.Args[0]
  4260  		if v_0.Op != OpMIPS64ADDVconst {
  4261  			break
  4262  		}
  4263  		off2 := v_0.AuxInt
  4264  		ptr := v_0.Args[0]
  4265  		val := v.Args[1]
  4266  		mem := v.Args[2]
  4267  		if !(is32Bit(off1 + off2)) {
  4268  			break
  4269  		}
  4270  		v.reset(OpMIPS64MOVHstore)
  4271  		v.AuxInt = off1 + off2
  4272  		v.Aux = sym
  4273  		v.AddArg(ptr)
  4274  		v.AddArg(val)
  4275  		v.AddArg(mem)
  4276  		return true
  4277  	}
  4278  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4279  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4280  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4281  	for {
  4282  		off1 := v.AuxInt
  4283  		sym1 := v.Aux
  4284  		v_0 := v.Args[0]
  4285  		if v_0.Op != OpMIPS64MOVVaddr {
  4286  			break
  4287  		}
  4288  		off2 := v_0.AuxInt
  4289  		sym2 := v_0.Aux
  4290  		ptr := v_0.Args[0]
  4291  		val := v.Args[1]
  4292  		mem := v.Args[2]
  4293  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4294  			break
  4295  		}
  4296  		v.reset(OpMIPS64MOVHstore)
  4297  		v.AuxInt = off1 + off2
  4298  		v.Aux = mergeSym(sym1, sym2)
  4299  		v.AddArg(ptr)
  4300  		v.AddArg(val)
  4301  		v.AddArg(mem)
  4302  		return true
  4303  	}
  4304  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4305  	// cond:
  4306  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4307  	for {
  4308  		off := v.AuxInt
  4309  		sym := v.Aux
  4310  		ptr := v.Args[0]
  4311  		v_1 := v.Args[1]
  4312  		if v_1.Op != OpMIPS64MOVVconst {
  4313  			break
  4314  		}
  4315  		if v_1.AuxInt != 0 {
  4316  			break
  4317  		}
  4318  		mem := v.Args[2]
  4319  		v.reset(OpMIPS64MOVHstorezero)
  4320  		v.AuxInt = off
  4321  		v.Aux = sym
  4322  		v.AddArg(ptr)
  4323  		v.AddArg(mem)
  4324  		return true
  4325  	}
  4326  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4327  	// cond:
  4328  	// result: (MOVHstore [off] {sym} ptr x mem)
  4329  	for {
  4330  		off := v.AuxInt
  4331  		sym := v.Aux
  4332  		ptr := v.Args[0]
  4333  		v_1 := v.Args[1]
  4334  		if v_1.Op != OpMIPS64MOVHreg {
  4335  			break
  4336  		}
  4337  		x := v_1.Args[0]
  4338  		mem := v.Args[2]
  4339  		v.reset(OpMIPS64MOVHstore)
  4340  		v.AuxInt = off
  4341  		v.Aux = sym
  4342  		v.AddArg(ptr)
  4343  		v.AddArg(x)
  4344  		v.AddArg(mem)
  4345  		return true
  4346  	}
  4347  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4348  	// cond:
  4349  	// result: (MOVHstore [off] {sym} ptr x mem)
  4350  	for {
  4351  		off := v.AuxInt
  4352  		sym := v.Aux
  4353  		ptr := v.Args[0]
  4354  		v_1 := v.Args[1]
  4355  		if v_1.Op != OpMIPS64MOVHUreg {
  4356  			break
  4357  		}
  4358  		x := v_1.Args[0]
  4359  		mem := v.Args[2]
  4360  		v.reset(OpMIPS64MOVHstore)
  4361  		v.AuxInt = off
  4362  		v.Aux = sym
  4363  		v.AddArg(ptr)
  4364  		v.AddArg(x)
  4365  		v.AddArg(mem)
  4366  		return true
  4367  	}
  4368  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4369  	// cond:
  4370  	// result: (MOVHstore [off] {sym} ptr x mem)
  4371  	for {
  4372  		off := v.AuxInt
  4373  		sym := v.Aux
  4374  		ptr := v.Args[0]
  4375  		v_1 := v.Args[1]
  4376  		if v_1.Op != OpMIPS64MOVWreg {
  4377  			break
  4378  		}
  4379  		x := v_1.Args[0]
  4380  		mem := v.Args[2]
  4381  		v.reset(OpMIPS64MOVHstore)
  4382  		v.AuxInt = off
  4383  		v.Aux = sym
  4384  		v.AddArg(ptr)
  4385  		v.AddArg(x)
  4386  		v.AddArg(mem)
  4387  		return true
  4388  	}
  4389  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4390  	// cond:
  4391  	// result: (MOVHstore [off] {sym} ptr x mem)
  4392  	for {
  4393  		off := v.AuxInt
  4394  		sym := v.Aux
  4395  		ptr := v.Args[0]
  4396  		v_1 := v.Args[1]
  4397  		if v_1.Op != OpMIPS64MOVWUreg {
  4398  			break
  4399  		}
  4400  		x := v_1.Args[0]
  4401  		mem := v.Args[2]
  4402  		v.reset(OpMIPS64MOVHstore)
  4403  		v.AuxInt = off
  4404  		v.Aux = sym
  4405  		v.AddArg(ptr)
  4406  		v.AddArg(x)
  4407  		v.AddArg(mem)
  4408  		return true
  4409  	}
  4410  	return false
  4411  }
  4412  func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
  4413  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4414  	// cond: is32Bit(off1+off2)
  4415  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4416  	for {
  4417  		off1 := v.AuxInt
  4418  		sym := v.Aux
  4419  		v_0 := v.Args[0]
  4420  		if v_0.Op != OpMIPS64ADDVconst {
  4421  			break
  4422  		}
  4423  		off2 := v_0.AuxInt
  4424  		ptr := v_0.Args[0]
  4425  		mem := v.Args[1]
  4426  		if !(is32Bit(off1 + off2)) {
  4427  			break
  4428  		}
  4429  		v.reset(OpMIPS64MOVHstorezero)
  4430  		v.AuxInt = off1 + off2
  4431  		v.Aux = sym
  4432  		v.AddArg(ptr)
  4433  		v.AddArg(mem)
  4434  		return true
  4435  	}
  4436  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4437  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4438  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4439  	for {
  4440  		off1 := v.AuxInt
  4441  		sym1 := v.Aux
  4442  		v_0 := v.Args[0]
  4443  		if v_0.Op != OpMIPS64MOVVaddr {
  4444  			break
  4445  		}
  4446  		off2 := v_0.AuxInt
  4447  		sym2 := v_0.Aux
  4448  		ptr := v_0.Args[0]
  4449  		mem := v.Args[1]
  4450  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4451  			break
  4452  		}
  4453  		v.reset(OpMIPS64MOVHstorezero)
  4454  		v.AuxInt = off1 + off2
  4455  		v.Aux = mergeSym(sym1, sym2)
  4456  		v.AddArg(ptr)
  4457  		v.AddArg(mem)
  4458  		return true
  4459  	}
  4460  	return false
  4461  }
  4462  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
  4463  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4464  	// cond: is32Bit(off1+off2)
  4465  	// result: (MOVVload  [off1+off2] {sym} ptr mem)
  4466  	for {
  4467  		off1 := v.AuxInt
  4468  		sym := v.Aux
  4469  		v_0 := v.Args[0]
  4470  		if v_0.Op != OpMIPS64ADDVconst {
  4471  			break
  4472  		}
  4473  		off2 := v_0.AuxInt
  4474  		ptr := v_0.Args[0]
  4475  		mem := v.Args[1]
  4476  		if !(is32Bit(off1 + off2)) {
  4477  			break
  4478  		}
  4479  		v.reset(OpMIPS64MOVVload)
  4480  		v.AuxInt = off1 + off2
  4481  		v.Aux = sym
  4482  		v.AddArg(ptr)
  4483  		v.AddArg(mem)
  4484  		return true
  4485  	}
  4486  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4487  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4488  	// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4489  	for {
  4490  		off1 := v.AuxInt
  4491  		sym1 := v.Aux
  4492  		v_0 := v.Args[0]
  4493  		if v_0.Op != OpMIPS64MOVVaddr {
  4494  			break
  4495  		}
  4496  		off2 := v_0.AuxInt
  4497  		sym2 := v_0.Aux
  4498  		ptr := v_0.Args[0]
  4499  		mem := v.Args[1]
  4500  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4501  			break
  4502  		}
  4503  		v.reset(OpMIPS64MOVVload)
  4504  		v.AuxInt = off1 + off2
  4505  		v.Aux = mergeSym(sym1, sym2)
  4506  		v.AddArg(ptr)
  4507  		v.AddArg(mem)
  4508  		return true
  4509  	}
  4510  	return false
  4511  }
  4512  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
  4513  	// match: (MOVVreg x)
  4514  	// cond: x.Uses == 1
  4515  	// result: (MOVVnop x)
  4516  	for {
  4517  		x := v.Args[0]
  4518  		if !(x.Uses == 1) {
  4519  			break
  4520  		}
  4521  		v.reset(OpMIPS64MOVVnop)
  4522  		v.AddArg(x)
  4523  		return true
  4524  	}
  4525  	// match: (MOVVreg (MOVVconst [c]))
  4526  	// cond:
  4527  	// result: (MOVVconst [c])
  4528  	for {
  4529  		v_0 := v.Args[0]
  4530  		if v_0.Op != OpMIPS64MOVVconst {
  4531  			break
  4532  		}
  4533  		c := v_0.AuxInt
  4534  		v.reset(OpMIPS64MOVVconst)
  4535  		v.AuxInt = c
  4536  		return true
  4537  	}
  4538  	return false
  4539  }
  4540  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
  4541  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4542  	// cond: is32Bit(off1+off2)
  4543  	// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  4544  	for {
  4545  		off1 := v.AuxInt
  4546  		sym := v.Aux
  4547  		v_0 := v.Args[0]
  4548  		if v_0.Op != OpMIPS64ADDVconst {
  4549  			break
  4550  		}
  4551  		off2 := v_0.AuxInt
  4552  		ptr := v_0.Args[0]
  4553  		val := v.Args[1]
  4554  		mem := v.Args[2]
  4555  		if !(is32Bit(off1 + off2)) {
  4556  			break
  4557  		}
  4558  		v.reset(OpMIPS64MOVVstore)
  4559  		v.AuxInt = off1 + off2
  4560  		v.Aux = sym
  4561  		v.AddArg(ptr)
  4562  		v.AddArg(val)
  4563  		v.AddArg(mem)
  4564  		return true
  4565  	}
  4566  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4567  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4568  	// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4569  	for {
  4570  		off1 := v.AuxInt
  4571  		sym1 := v.Aux
  4572  		v_0 := v.Args[0]
  4573  		if v_0.Op != OpMIPS64MOVVaddr {
  4574  			break
  4575  		}
  4576  		off2 := v_0.AuxInt
  4577  		sym2 := v_0.Aux
  4578  		ptr := v_0.Args[0]
  4579  		val := v.Args[1]
  4580  		mem := v.Args[2]
  4581  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4582  			break
  4583  		}
  4584  		v.reset(OpMIPS64MOVVstore)
  4585  		v.AuxInt = off1 + off2
  4586  		v.Aux = mergeSym(sym1, sym2)
  4587  		v.AddArg(ptr)
  4588  		v.AddArg(val)
  4589  		v.AddArg(mem)
  4590  		return true
  4591  	}
  4592  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4593  	// cond:
  4594  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4595  	for {
  4596  		off := v.AuxInt
  4597  		sym := v.Aux
  4598  		ptr := v.Args[0]
  4599  		v_1 := v.Args[1]
  4600  		if v_1.Op != OpMIPS64MOVVconst {
  4601  			break
  4602  		}
  4603  		if v_1.AuxInt != 0 {
  4604  			break
  4605  		}
  4606  		mem := v.Args[2]
  4607  		v.reset(OpMIPS64MOVVstorezero)
  4608  		v.AuxInt = off
  4609  		v.Aux = sym
  4610  		v.AddArg(ptr)
  4611  		v.AddArg(mem)
  4612  		return true
  4613  	}
  4614  	return false
  4615  }
  4616  func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
  4617  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4618  	// cond: is32Bit(off1+off2)
  4619  	// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  4620  	for {
  4621  		off1 := v.AuxInt
  4622  		sym := v.Aux
  4623  		v_0 := v.Args[0]
  4624  		if v_0.Op != OpMIPS64ADDVconst {
  4625  			break
  4626  		}
  4627  		off2 := v_0.AuxInt
  4628  		ptr := v_0.Args[0]
  4629  		mem := v.Args[1]
  4630  		if !(is32Bit(off1 + off2)) {
  4631  			break
  4632  		}
  4633  		v.reset(OpMIPS64MOVVstorezero)
  4634  		v.AuxInt = off1 + off2
  4635  		v.Aux = sym
  4636  		v.AddArg(ptr)
  4637  		v.AddArg(mem)
  4638  		return true
  4639  	}
  4640  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4641  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4642  	// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4643  	for {
  4644  		off1 := v.AuxInt
  4645  		sym1 := v.Aux
  4646  		v_0 := v.Args[0]
  4647  		if v_0.Op != OpMIPS64MOVVaddr {
  4648  			break
  4649  		}
  4650  		off2 := v_0.AuxInt
  4651  		sym2 := v_0.Aux
  4652  		ptr := v_0.Args[0]
  4653  		mem := v.Args[1]
  4654  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4655  			break
  4656  		}
  4657  		v.reset(OpMIPS64MOVVstorezero)
  4658  		v.AuxInt = off1 + off2
  4659  		v.Aux = mergeSym(sym1, sym2)
  4660  		v.AddArg(ptr)
  4661  		v.AddArg(mem)
  4662  		return true
  4663  	}
  4664  	return false
  4665  }
  4666  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
  4667  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4668  	// cond: is32Bit(off1+off2)
  4669  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4670  	for {
  4671  		off1 := v.AuxInt
  4672  		sym := v.Aux
  4673  		v_0 := v.Args[0]
  4674  		if v_0.Op != OpMIPS64ADDVconst {
  4675  			break
  4676  		}
  4677  		off2 := v_0.AuxInt
  4678  		ptr := v_0.Args[0]
  4679  		mem := v.Args[1]
  4680  		if !(is32Bit(off1 + off2)) {
  4681  			break
  4682  		}
  4683  		v.reset(OpMIPS64MOVWUload)
  4684  		v.AuxInt = off1 + off2
  4685  		v.Aux = sym
  4686  		v.AddArg(ptr)
  4687  		v.AddArg(mem)
  4688  		return true
  4689  	}
  4690  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4691  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4692  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4693  	for {
  4694  		off1 := v.AuxInt
  4695  		sym1 := v.Aux
  4696  		v_0 := v.Args[0]
  4697  		if v_0.Op != OpMIPS64MOVVaddr {
  4698  			break
  4699  		}
  4700  		off2 := v_0.AuxInt
  4701  		sym2 := v_0.Aux
  4702  		ptr := v_0.Args[0]
  4703  		mem := v.Args[1]
  4704  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4705  			break
  4706  		}
  4707  		v.reset(OpMIPS64MOVWUload)
  4708  		v.AuxInt = off1 + off2
  4709  		v.Aux = mergeSym(sym1, sym2)
  4710  		v.AddArg(ptr)
  4711  		v.AddArg(mem)
  4712  		return true
  4713  	}
  4714  	return false
  4715  }
  4716  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
  4717  	// match: (MOVWUreg x:(MOVBUload _ _))
  4718  	// cond:
  4719  	// result: (MOVVreg x)
  4720  	for {
  4721  		x := v.Args[0]
  4722  		if x.Op != OpMIPS64MOVBUload {
  4723  			break
  4724  		}
  4725  		v.reset(OpMIPS64MOVVreg)
  4726  		v.AddArg(x)
  4727  		return true
  4728  	}
  4729  	// match: (MOVWUreg x:(MOVHUload _ _))
  4730  	// cond:
  4731  	// result: (MOVVreg x)
  4732  	for {
  4733  		x := v.Args[0]
  4734  		if x.Op != OpMIPS64MOVHUload {
  4735  			break
  4736  		}
  4737  		v.reset(OpMIPS64MOVVreg)
  4738  		v.AddArg(x)
  4739  		return true
  4740  	}
  4741  	// match: (MOVWUreg x:(MOVWUload _ _))
  4742  	// cond:
  4743  	// result: (MOVVreg x)
  4744  	for {
  4745  		x := v.Args[0]
  4746  		if x.Op != OpMIPS64MOVWUload {
  4747  			break
  4748  		}
  4749  		v.reset(OpMIPS64MOVVreg)
  4750  		v.AddArg(x)
  4751  		return true
  4752  	}
  4753  	// match: (MOVWUreg x:(MOVBUreg _))
  4754  	// cond:
  4755  	// result: (MOVVreg x)
  4756  	for {
  4757  		x := v.Args[0]
  4758  		if x.Op != OpMIPS64MOVBUreg {
  4759  			break
  4760  		}
  4761  		v.reset(OpMIPS64MOVVreg)
  4762  		v.AddArg(x)
  4763  		return true
  4764  	}
  4765  	// match: (MOVWUreg x:(MOVHUreg _))
  4766  	// cond:
  4767  	// result: (MOVVreg x)
  4768  	for {
  4769  		x := v.Args[0]
  4770  		if x.Op != OpMIPS64MOVHUreg {
  4771  			break
  4772  		}
  4773  		v.reset(OpMIPS64MOVVreg)
  4774  		v.AddArg(x)
  4775  		return true
  4776  	}
  4777  	// match: (MOVWUreg x:(MOVWUreg _))
  4778  	// cond:
  4779  	// result: (MOVVreg x)
  4780  	for {
  4781  		x := v.Args[0]
  4782  		if x.Op != OpMIPS64MOVWUreg {
  4783  			break
  4784  		}
  4785  		v.reset(OpMIPS64MOVVreg)
  4786  		v.AddArg(x)
  4787  		return true
  4788  	}
  4789  	// match: (MOVWUreg (MOVVconst [c]))
  4790  	// cond:
  4791  	// result: (MOVVconst [int64(uint32(c))])
  4792  	for {
  4793  		v_0 := v.Args[0]
  4794  		if v_0.Op != OpMIPS64MOVVconst {
  4795  			break
  4796  		}
  4797  		c := v_0.AuxInt
  4798  		v.reset(OpMIPS64MOVVconst)
  4799  		v.AuxInt = int64(uint32(c))
  4800  		return true
  4801  	}
  4802  	return false
  4803  }
  4804  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
  4805  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4806  	// cond: is32Bit(off1+off2)
  4807  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4808  	for {
  4809  		off1 := v.AuxInt
  4810  		sym := v.Aux
  4811  		v_0 := v.Args[0]
  4812  		if v_0.Op != OpMIPS64ADDVconst {
  4813  			break
  4814  		}
  4815  		off2 := v_0.AuxInt
  4816  		ptr := v_0.Args[0]
  4817  		mem := v.Args[1]
  4818  		if !(is32Bit(off1 + off2)) {
  4819  			break
  4820  		}
  4821  		v.reset(OpMIPS64MOVWload)
  4822  		v.AuxInt = off1 + off2
  4823  		v.Aux = sym
  4824  		v.AddArg(ptr)
  4825  		v.AddArg(mem)
  4826  		return true
  4827  	}
  4828  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4829  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4830  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4831  	for {
  4832  		off1 := v.AuxInt
  4833  		sym1 := v.Aux
  4834  		v_0 := v.Args[0]
  4835  		if v_0.Op != OpMIPS64MOVVaddr {
  4836  			break
  4837  		}
  4838  		off2 := v_0.AuxInt
  4839  		sym2 := v_0.Aux
  4840  		ptr := v_0.Args[0]
  4841  		mem := v.Args[1]
  4842  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4843  			break
  4844  		}
  4845  		v.reset(OpMIPS64MOVWload)
  4846  		v.AuxInt = off1 + off2
  4847  		v.Aux = mergeSym(sym1, sym2)
  4848  		v.AddArg(ptr)
  4849  		v.AddArg(mem)
  4850  		return true
  4851  	}
  4852  	return false
  4853  }
  4854  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
  4855  	// match: (MOVWreg x:(MOVBload _ _))
  4856  	// cond:
  4857  	// result: (MOVVreg x)
  4858  	for {
  4859  		x := v.Args[0]
  4860  		if x.Op != OpMIPS64MOVBload {
  4861  			break
  4862  		}
  4863  		v.reset(OpMIPS64MOVVreg)
  4864  		v.AddArg(x)
  4865  		return true
  4866  	}
  4867  	// match: (MOVWreg x:(MOVBUload _ _))
  4868  	// cond:
  4869  	// result: (MOVVreg x)
  4870  	for {
  4871  		x := v.Args[0]
  4872  		if x.Op != OpMIPS64MOVBUload {
  4873  			break
  4874  		}
  4875  		v.reset(OpMIPS64MOVVreg)
  4876  		v.AddArg(x)
  4877  		return true
  4878  	}
  4879  	// match: (MOVWreg x:(MOVHload _ _))
  4880  	// cond:
  4881  	// result: (MOVVreg x)
  4882  	for {
  4883  		x := v.Args[0]
  4884  		if x.Op != OpMIPS64MOVHload {
  4885  			break
  4886  		}
  4887  		v.reset(OpMIPS64MOVVreg)
  4888  		v.AddArg(x)
  4889  		return true
  4890  	}
  4891  	// match: (MOVWreg x:(MOVHUload _ _))
  4892  	// cond:
  4893  	// result: (MOVVreg x)
  4894  	for {
  4895  		x := v.Args[0]
  4896  		if x.Op != OpMIPS64MOVHUload {
  4897  			break
  4898  		}
  4899  		v.reset(OpMIPS64MOVVreg)
  4900  		v.AddArg(x)
  4901  		return true
  4902  	}
  4903  	// match: (MOVWreg x:(MOVWload _ _))
  4904  	// cond:
  4905  	// result: (MOVVreg x)
  4906  	for {
  4907  		x := v.Args[0]
  4908  		if x.Op != OpMIPS64MOVWload {
  4909  			break
  4910  		}
  4911  		v.reset(OpMIPS64MOVVreg)
  4912  		v.AddArg(x)
  4913  		return true
  4914  	}
  4915  	// match: (MOVWreg x:(MOVBreg _))
  4916  	// cond:
  4917  	// result: (MOVVreg x)
  4918  	for {
  4919  		x := v.Args[0]
  4920  		if x.Op != OpMIPS64MOVBreg {
  4921  			break
  4922  		}
  4923  		v.reset(OpMIPS64MOVVreg)
  4924  		v.AddArg(x)
  4925  		return true
  4926  	}
  4927  	// match: (MOVWreg x:(MOVBUreg _))
  4928  	// cond:
  4929  	// result: (MOVVreg x)
  4930  	for {
  4931  		x := v.Args[0]
  4932  		if x.Op != OpMIPS64MOVBUreg {
  4933  			break
  4934  		}
  4935  		v.reset(OpMIPS64MOVVreg)
  4936  		v.AddArg(x)
  4937  		return true
  4938  	}
  4939  	// match: (MOVWreg x:(MOVHreg _))
  4940  	// cond:
  4941  	// result: (MOVVreg x)
  4942  	for {
  4943  		x := v.Args[0]
  4944  		if x.Op != OpMIPS64MOVHreg {
  4945  			break
  4946  		}
  4947  		v.reset(OpMIPS64MOVVreg)
  4948  		v.AddArg(x)
  4949  		return true
  4950  	}
  4951  	// match: (MOVWreg x:(MOVHreg _))
  4952  	// cond:
  4953  	// result: (MOVVreg x)
  4954  	for {
  4955  		x := v.Args[0]
  4956  		if x.Op != OpMIPS64MOVHreg {
  4957  			break
  4958  		}
  4959  		v.reset(OpMIPS64MOVVreg)
  4960  		v.AddArg(x)
  4961  		return true
  4962  	}
  4963  	// match: (MOVWreg x:(MOVWreg _))
  4964  	// cond:
  4965  	// result: (MOVVreg x)
  4966  	for {
  4967  		x := v.Args[0]
  4968  		if x.Op != OpMIPS64MOVWreg {
  4969  			break
  4970  		}
  4971  		v.reset(OpMIPS64MOVVreg)
  4972  		v.AddArg(x)
  4973  		return true
  4974  	}
  4975  	// match: (MOVWreg (MOVVconst [c]))
  4976  	// cond:
  4977  	// result: (MOVVconst [int64(int32(c))])
  4978  	for {
  4979  		v_0 := v.Args[0]
  4980  		if v_0.Op != OpMIPS64MOVVconst {
  4981  			break
  4982  		}
  4983  		c := v_0.AuxInt
  4984  		v.reset(OpMIPS64MOVVconst)
  4985  		v.AuxInt = int64(int32(c))
  4986  		return true
  4987  	}
  4988  	return false
  4989  }
  4990  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
  4991  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4992  	// cond: is32Bit(off1+off2)
  4993  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4994  	for {
  4995  		off1 := v.AuxInt
  4996  		sym := v.Aux
  4997  		v_0 := v.Args[0]
  4998  		if v_0.Op != OpMIPS64ADDVconst {
  4999  			break
  5000  		}
  5001  		off2 := v_0.AuxInt
  5002  		ptr := v_0.Args[0]
  5003  		val := v.Args[1]
  5004  		mem := v.Args[2]
  5005  		if !(is32Bit(off1 + off2)) {
  5006  			break
  5007  		}
  5008  		v.reset(OpMIPS64MOVWstore)
  5009  		v.AuxInt = off1 + off2
  5010  		v.Aux = sym
  5011  		v.AddArg(ptr)
  5012  		v.AddArg(val)
  5013  		v.AddArg(mem)
  5014  		return true
  5015  	}
  5016  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5017  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5018  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5019  	for {
  5020  		off1 := v.AuxInt
  5021  		sym1 := v.Aux
  5022  		v_0 := v.Args[0]
  5023  		if v_0.Op != OpMIPS64MOVVaddr {
  5024  			break
  5025  		}
  5026  		off2 := v_0.AuxInt
  5027  		sym2 := v_0.Aux
  5028  		ptr := v_0.Args[0]
  5029  		val := v.Args[1]
  5030  		mem := v.Args[2]
  5031  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5032  			break
  5033  		}
  5034  		v.reset(OpMIPS64MOVWstore)
  5035  		v.AuxInt = off1 + off2
  5036  		v.Aux = mergeSym(sym1, sym2)
  5037  		v.AddArg(ptr)
  5038  		v.AddArg(val)
  5039  		v.AddArg(mem)
  5040  		return true
  5041  	}
  5042  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5043  	// cond:
  5044  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5045  	for {
  5046  		off := v.AuxInt
  5047  		sym := v.Aux
  5048  		ptr := v.Args[0]
  5049  		v_1 := v.Args[1]
  5050  		if v_1.Op != OpMIPS64MOVVconst {
  5051  			break
  5052  		}
  5053  		if v_1.AuxInt != 0 {
  5054  			break
  5055  		}
  5056  		mem := v.Args[2]
  5057  		v.reset(OpMIPS64MOVWstorezero)
  5058  		v.AuxInt = off
  5059  		v.Aux = sym
  5060  		v.AddArg(ptr)
  5061  		v.AddArg(mem)
  5062  		return true
  5063  	}
  5064  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5065  	// cond:
  5066  	// result: (MOVWstore [off] {sym} ptr x mem)
  5067  	for {
  5068  		off := v.AuxInt
  5069  		sym := v.Aux
  5070  		ptr := v.Args[0]
  5071  		v_1 := v.Args[1]
  5072  		if v_1.Op != OpMIPS64MOVWreg {
  5073  			break
  5074  		}
  5075  		x := v_1.Args[0]
  5076  		mem := v.Args[2]
  5077  		v.reset(OpMIPS64MOVWstore)
  5078  		v.AuxInt = off
  5079  		v.Aux = sym
  5080  		v.AddArg(ptr)
  5081  		v.AddArg(x)
  5082  		v.AddArg(mem)
  5083  		return true
  5084  	}
  5085  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5086  	// cond:
  5087  	// result: (MOVWstore [off] {sym} ptr x mem)
  5088  	for {
  5089  		off := v.AuxInt
  5090  		sym := v.Aux
  5091  		ptr := v.Args[0]
  5092  		v_1 := v.Args[1]
  5093  		if v_1.Op != OpMIPS64MOVWUreg {
  5094  			break
  5095  		}
  5096  		x := v_1.Args[0]
  5097  		mem := v.Args[2]
  5098  		v.reset(OpMIPS64MOVWstore)
  5099  		v.AuxInt = off
  5100  		v.Aux = sym
  5101  		v.AddArg(ptr)
  5102  		v.AddArg(x)
  5103  		v.AddArg(mem)
  5104  		return true
  5105  	}
  5106  	return false
  5107  }
  5108  func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
  5109  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5110  	// cond: is32Bit(off1+off2)
  5111  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5112  	for {
  5113  		off1 := v.AuxInt
  5114  		sym := v.Aux
  5115  		v_0 := v.Args[0]
  5116  		if v_0.Op != OpMIPS64ADDVconst {
  5117  			break
  5118  		}
  5119  		off2 := v_0.AuxInt
  5120  		ptr := v_0.Args[0]
  5121  		mem := v.Args[1]
  5122  		if !(is32Bit(off1 + off2)) {
  5123  			break
  5124  		}
  5125  		v.reset(OpMIPS64MOVWstorezero)
  5126  		v.AuxInt = off1 + off2
  5127  		v.Aux = sym
  5128  		v.AddArg(ptr)
  5129  		v.AddArg(mem)
  5130  		return true
  5131  	}
  5132  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5133  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5134  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5135  	for {
  5136  		off1 := v.AuxInt
  5137  		sym1 := v.Aux
  5138  		v_0 := v.Args[0]
  5139  		if v_0.Op != OpMIPS64MOVVaddr {
  5140  			break
  5141  		}
  5142  		off2 := v_0.AuxInt
  5143  		sym2 := v_0.Aux
  5144  		ptr := v_0.Args[0]
  5145  		mem := v.Args[1]
  5146  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5147  			break
  5148  		}
  5149  		v.reset(OpMIPS64MOVWstorezero)
  5150  		v.AuxInt = off1 + off2
  5151  		v.Aux = mergeSym(sym1, sym2)
  5152  		v.AddArg(ptr)
  5153  		v.AddArg(mem)
  5154  		return true
  5155  	}
  5156  	return false
  5157  }
  5158  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
  5159  	// match: (NEGV (MOVVconst [c]))
  5160  	// cond:
  5161  	// result: (MOVVconst [-c])
  5162  	for {
  5163  		v_0 := v.Args[0]
  5164  		if v_0.Op != OpMIPS64MOVVconst {
  5165  			break
  5166  		}
  5167  		c := v_0.AuxInt
  5168  		v.reset(OpMIPS64MOVVconst)
  5169  		v.AuxInt = -c
  5170  		return true
  5171  	}
  5172  	return false
  5173  }
  5174  func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
  5175  	// match: (NOR x (MOVVconst [c]))
  5176  	// cond: is32Bit(c)
  5177  	// result: (NORconst [c] x)
  5178  	for {
  5179  		x := v.Args[0]
  5180  		v_1 := v.Args[1]
  5181  		if v_1.Op != OpMIPS64MOVVconst {
  5182  			break
  5183  		}
  5184  		c := v_1.AuxInt
  5185  		if !(is32Bit(c)) {
  5186  			break
  5187  		}
  5188  		v.reset(OpMIPS64NORconst)
  5189  		v.AuxInt = c
  5190  		v.AddArg(x)
  5191  		return true
  5192  	}
  5193  	// match: (NOR (MOVVconst [c]) x)
  5194  	// cond: is32Bit(c)
  5195  	// result: (NORconst [c] x)
  5196  	for {
  5197  		v_0 := v.Args[0]
  5198  		if v_0.Op != OpMIPS64MOVVconst {
  5199  			break
  5200  		}
  5201  		c := v_0.AuxInt
  5202  		x := v.Args[1]
  5203  		if !(is32Bit(c)) {
  5204  			break
  5205  		}
  5206  		v.reset(OpMIPS64NORconst)
  5207  		v.AuxInt = c
  5208  		v.AddArg(x)
  5209  		return true
  5210  	}
  5211  	return false
  5212  }
  5213  func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
  5214  	// match: (NORconst [c] (MOVVconst [d]))
  5215  	// cond:
  5216  	// result: (MOVVconst [^(c|d)])
  5217  	for {
  5218  		c := v.AuxInt
  5219  		v_0 := v.Args[0]
  5220  		if v_0.Op != OpMIPS64MOVVconst {
  5221  			break
  5222  		}
  5223  		d := v_0.AuxInt
  5224  		v.reset(OpMIPS64MOVVconst)
  5225  		v.AuxInt = ^(c | d)
  5226  		return true
  5227  	}
  5228  	return false
  5229  }
  5230  func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
  5231  	// match: (OR x (MOVVconst [c]))
  5232  	// cond: is32Bit(c)
  5233  	// result: (ORconst  [c] x)
  5234  	for {
  5235  		x := v.Args[0]
  5236  		v_1 := v.Args[1]
  5237  		if v_1.Op != OpMIPS64MOVVconst {
  5238  			break
  5239  		}
  5240  		c := v_1.AuxInt
  5241  		if !(is32Bit(c)) {
  5242  			break
  5243  		}
  5244  		v.reset(OpMIPS64ORconst)
  5245  		v.AuxInt = c
  5246  		v.AddArg(x)
  5247  		return true
  5248  	}
  5249  	// match: (OR (MOVVconst [c]) x)
  5250  	// cond: is32Bit(c)
  5251  	// result: (ORconst  [c] x)
  5252  	for {
  5253  		v_0 := v.Args[0]
  5254  		if v_0.Op != OpMIPS64MOVVconst {
  5255  			break
  5256  		}
  5257  		c := v_0.AuxInt
  5258  		x := v.Args[1]
  5259  		if !(is32Bit(c)) {
  5260  			break
  5261  		}
  5262  		v.reset(OpMIPS64ORconst)
  5263  		v.AuxInt = c
  5264  		v.AddArg(x)
  5265  		return true
  5266  	}
  5267  	// match: (OR x x)
  5268  	// cond:
  5269  	// result: x
  5270  	for {
  5271  		x := v.Args[0]
  5272  		if x != v.Args[1] {
  5273  			break
  5274  		}
  5275  		v.reset(OpCopy)
  5276  		v.Type = x.Type
  5277  		v.AddArg(x)
  5278  		return true
  5279  	}
  5280  	return false
  5281  }
  5282  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
  5283  	// match: (ORconst [0] x)
  5284  	// cond:
  5285  	// result: x
  5286  	for {
  5287  		if v.AuxInt != 0 {
  5288  			break
  5289  		}
  5290  		x := v.Args[0]
  5291  		v.reset(OpCopy)
  5292  		v.Type = x.Type
  5293  		v.AddArg(x)
  5294  		return true
  5295  	}
  5296  	// match: (ORconst [-1] _)
  5297  	// cond:
  5298  	// result: (MOVVconst [-1])
  5299  	for {
  5300  		if v.AuxInt != -1 {
  5301  			break
  5302  		}
  5303  		v.reset(OpMIPS64MOVVconst)
  5304  		v.AuxInt = -1
  5305  		return true
  5306  	}
  5307  	// match: (ORconst [c] (MOVVconst [d]))
  5308  	// cond:
  5309  	// result: (MOVVconst [c|d])
  5310  	for {
  5311  		c := v.AuxInt
  5312  		v_0 := v.Args[0]
  5313  		if v_0.Op != OpMIPS64MOVVconst {
  5314  			break
  5315  		}
  5316  		d := v_0.AuxInt
  5317  		v.reset(OpMIPS64MOVVconst)
  5318  		v.AuxInt = c | d
  5319  		return true
  5320  	}
  5321  	// match: (ORconst [c] (ORconst [d] x))
  5322  	// cond: is32Bit(c|d)
  5323  	// result: (ORconst [c|d] x)
  5324  	for {
  5325  		c := v.AuxInt
  5326  		v_0 := v.Args[0]
  5327  		if v_0.Op != OpMIPS64ORconst {
  5328  			break
  5329  		}
  5330  		d := v_0.AuxInt
  5331  		x := v_0.Args[0]
  5332  		if !(is32Bit(c | d)) {
  5333  			break
  5334  		}
  5335  		v.reset(OpMIPS64ORconst)
  5336  		v.AuxInt = c | d
  5337  		v.AddArg(x)
  5338  		return true
  5339  	}
  5340  	return false
  5341  }
  5342  func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
  5343  	// match: (SGT (MOVVconst [c]) x)
  5344  	// cond: is32Bit(c)
  5345  	// result: (SGTconst  [c] x)
  5346  	for {
  5347  		v_0 := v.Args[0]
  5348  		if v_0.Op != OpMIPS64MOVVconst {
  5349  			break
  5350  		}
  5351  		c := v_0.AuxInt
  5352  		x := v.Args[1]
  5353  		if !(is32Bit(c)) {
  5354  			break
  5355  		}
  5356  		v.reset(OpMIPS64SGTconst)
  5357  		v.AuxInt = c
  5358  		v.AddArg(x)
  5359  		return true
  5360  	}
  5361  	return false
  5362  }
  5363  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
  5364  	// match: (SGTU (MOVVconst [c]) x)
  5365  	// cond: is32Bit(c)
  5366  	// result: (SGTUconst [c] x)
  5367  	for {
  5368  		v_0 := v.Args[0]
  5369  		if v_0.Op != OpMIPS64MOVVconst {
  5370  			break
  5371  		}
  5372  		c := v_0.AuxInt
  5373  		x := v.Args[1]
  5374  		if !(is32Bit(c)) {
  5375  			break
  5376  		}
  5377  		v.reset(OpMIPS64SGTUconst)
  5378  		v.AuxInt = c
  5379  		v.AddArg(x)
  5380  		return true
  5381  	}
  5382  	return false
  5383  }
  5384  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
  5385  	// match: (SGTUconst [c] (MOVVconst [d]))
  5386  	// cond: uint64(c)>uint64(d)
  5387  	// result: (MOVVconst [1])
  5388  	for {
  5389  		c := v.AuxInt
  5390  		v_0 := v.Args[0]
  5391  		if v_0.Op != OpMIPS64MOVVconst {
  5392  			break
  5393  		}
  5394  		d := v_0.AuxInt
  5395  		if !(uint64(c) > uint64(d)) {
  5396  			break
  5397  		}
  5398  		v.reset(OpMIPS64MOVVconst)
  5399  		v.AuxInt = 1
  5400  		return true
  5401  	}
  5402  	// match: (SGTUconst [c] (MOVVconst [d]))
  5403  	// cond: uint64(c)<=uint64(d)
  5404  	// result: (MOVVconst [0])
  5405  	for {
  5406  		c := v.AuxInt
  5407  		v_0 := v.Args[0]
  5408  		if v_0.Op != OpMIPS64MOVVconst {
  5409  			break
  5410  		}
  5411  		d := v_0.AuxInt
  5412  		if !(uint64(c) <= uint64(d)) {
  5413  			break
  5414  		}
  5415  		v.reset(OpMIPS64MOVVconst)
  5416  		v.AuxInt = 0
  5417  		return true
  5418  	}
  5419  	// match: (SGTUconst [c] (MOVBUreg _))
  5420  	// cond: 0xff < uint64(c)
  5421  	// result: (MOVVconst [1])
  5422  	for {
  5423  		c := v.AuxInt
  5424  		v_0 := v.Args[0]
  5425  		if v_0.Op != OpMIPS64MOVBUreg {
  5426  			break
  5427  		}
  5428  		if !(0xff < uint64(c)) {
  5429  			break
  5430  		}
  5431  		v.reset(OpMIPS64MOVVconst)
  5432  		v.AuxInt = 1
  5433  		return true
  5434  	}
  5435  	// match: (SGTUconst [c] (MOVHUreg _))
  5436  	// cond: 0xffff < uint64(c)
  5437  	// result: (MOVVconst [1])
  5438  	for {
  5439  		c := v.AuxInt
  5440  		v_0 := v.Args[0]
  5441  		if v_0.Op != OpMIPS64MOVHUreg {
  5442  			break
  5443  		}
  5444  		if !(0xffff < uint64(c)) {
  5445  			break
  5446  		}
  5447  		v.reset(OpMIPS64MOVVconst)
  5448  		v.AuxInt = 1
  5449  		return true
  5450  	}
  5451  	// match: (SGTUconst [c] (ANDconst [m] _))
  5452  	// cond: uint64(m) < uint64(c)
  5453  	// result: (MOVVconst [1])
  5454  	for {
  5455  		c := v.AuxInt
  5456  		v_0 := v.Args[0]
  5457  		if v_0.Op != OpMIPS64ANDconst {
  5458  			break
  5459  		}
  5460  		m := v_0.AuxInt
  5461  		if !(uint64(m) < uint64(c)) {
  5462  			break
  5463  		}
  5464  		v.reset(OpMIPS64MOVVconst)
  5465  		v.AuxInt = 1
  5466  		return true
  5467  	}
  5468  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5469  	// cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)
  5470  	// result: (MOVVconst [1])
  5471  	for {
  5472  		c := v.AuxInt
  5473  		v_0 := v.Args[0]
  5474  		if v_0.Op != OpMIPS64SRLVconst {
  5475  			break
  5476  		}
  5477  		d := v_0.AuxInt
  5478  		if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) {
  5479  			break
  5480  		}
  5481  		v.reset(OpMIPS64MOVVconst)
  5482  		v.AuxInt = 1
  5483  		return true
  5484  	}
  5485  	return false
  5486  }
  5487  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
  5488  	// match: (SGTconst [c] (MOVVconst [d]))
  5489  	// cond: int64(c)>int64(d)
  5490  	// result: (MOVVconst [1])
  5491  	for {
  5492  		c := v.AuxInt
  5493  		v_0 := v.Args[0]
  5494  		if v_0.Op != OpMIPS64MOVVconst {
  5495  			break
  5496  		}
  5497  		d := v_0.AuxInt
  5498  		if !(int64(c) > int64(d)) {
  5499  			break
  5500  		}
  5501  		v.reset(OpMIPS64MOVVconst)
  5502  		v.AuxInt = 1
  5503  		return true
  5504  	}
  5505  	// match: (SGTconst [c] (MOVVconst [d]))
  5506  	// cond: int64(c)<=int64(d)
  5507  	// result: (MOVVconst [0])
  5508  	for {
  5509  		c := v.AuxInt
  5510  		v_0 := v.Args[0]
  5511  		if v_0.Op != OpMIPS64MOVVconst {
  5512  			break
  5513  		}
  5514  		d := v_0.AuxInt
  5515  		if !(int64(c) <= int64(d)) {
  5516  			break
  5517  		}
  5518  		v.reset(OpMIPS64MOVVconst)
  5519  		v.AuxInt = 0
  5520  		return true
  5521  	}
  5522  	// match: (SGTconst [c] (MOVBreg _))
  5523  	// cond: 0x7f < int64(c)
  5524  	// result: (MOVVconst [1])
  5525  	for {
  5526  		c := v.AuxInt
  5527  		v_0 := v.Args[0]
  5528  		if v_0.Op != OpMIPS64MOVBreg {
  5529  			break
  5530  		}
  5531  		if !(0x7f < int64(c)) {
  5532  			break
  5533  		}
  5534  		v.reset(OpMIPS64MOVVconst)
  5535  		v.AuxInt = 1
  5536  		return true
  5537  	}
  5538  	// match: (SGTconst [c] (MOVBreg _))
  5539  	// cond: int64(c) <= -0x80
  5540  	// result: (MOVVconst [0])
  5541  	for {
  5542  		c := v.AuxInt
  5543  		v_0 := v.Args[0]
  5544  		if v_0.Op != OpMIPS64MOVBreg {
  5545  			break
  5546  		}
  5547  		if !(int64(c) <= -0x80) {
  5548  			break
  5549  		}
  5550  		v.reset(OpMIPS64MOVVconst)
  5551  		v.AuxInt = 0
  5552  		return true
  5553  	}
  5554  	// match: (SGTconst [c] (MOVBUreg _))
  5555  	// cond: 0xff < int64(c)
  5556  	// result: (MOVVconst [1])
  5557  	for {
  5558  		c := v.AuxInt
  5559  		v_0 := v.Args[0]
  5560  		if v_0.Op != OpMIPS64MOVBUreg {
  5561  			break
  5562  		}
  5563  		if !(0xff < int64(c)) {
  5564  			break
  5565  		}
  5566  		v.reset(OpMIPS64MOVVconst)
  5567  		v.AuxInt = 1
  5568  		return true
  5569  	}
  5570  	// match: (SGTconst [c] (MOVBUreg _))
  5571  	// cond: int64(c) < 0
  5572  	// result: (MOVVconst [0])
  5573  	for {
  5574  		c := v.AuxInt
  5575  		v_0 := v.Args[0]
  5576  		if v_0.Op != OpMIPS64MOVBUreg {
  5577  			break
  5578  		}
  5579  		if !(int64(c) < 0) {
  5580  			break
  5581  		}
  5582  		v.reset(OpMIPS64MOVVconst)
  5583  		v.AuxInt = 0
  5584  		return true
  5585  	}
  5586  	// match: (SGTconst [c] (MOVHreg _))
  5587  	// cond: 0x7fff < int64(c)
  5588  	// result: (MOVVconst [1])
  5589  	for {
  5590  		c := v.AuxInt
  5591  		v_0 := v.Args[0]
  5592  		if v_0.Op != OpMIPS64MOVHreg {
  5593  			break
  5594  		}
  5595  		if !(0x7fff < int64(c)) {
  5596  			break
  5597  		}
  5598  		v.reset(OpMIPS64MOVVconst)
  5599  		v.AuxInt = 1
  5600  		return true
  5601  	}
  5602  	// match: (SGTconst [c] (MOVHreg _))
  5603  	// cond: int64(c) <= -0x8000
  5604  	// result: (MOVVconst [0])
  5605  	for {
  5606  		c := v.AuxInt
  5607  		v_0 := v.Args[0]
  5608  		if v_0.Op != OpMIPS64MOVHreg {
  5609  			break
  5610  		}
  5611  		if !(int64(c) <= -0x8000) {
  5612  			break
  5613  		}
  5614  		v.reset(OpMIPS64MOVVconst)
  5615  		v.AuxInt = 0
  5616  		return true
  5617  	}
  5618  	// match: (SGTconst [c] (MOVHUreg _))
  5619  	// cond: 0xffff < int64(c)
  5620  	// result: (MOVVconst [1])
  5621  	for {
  5622  		c := v.AuxInt
  5623  		v_0 := v.Args[0]
  5624  		if v_0.Op != OpMIPS64MOVHUreg {
  5625  			break
  5626  		}
  5627  		if !(0xffff < int64(c)) {
  5628  			break
  5629  		}
  5630  		v.reset(OpMIPS64MOVVconst)
  5631  		v.AuxInt = 1
  5632  		return true
  5633  	}
  5634  	// match: (SGTconst [c] (MOVHUreg _))
  5635  	// cond: int64(c) < 0
  5636  	// result: (MOVVconst [0])
  5637  	for {
  5638  		c := v.AuxInt
  5639  		v_0 := v.Args[0]
  5640  		if v_0.Op != OpMIPS64MOVHUreg {
  5641  			break
  5642  		}
  5643  		if !(int64(c) < 0) {
  5644  			break
  5645  		}
  5646  		v.reset(OpMIPS64MOVVconst)
  5647  		v.AuxInt = 0
  5648  		return true
  5649  	}
  5650  	// match: (SGTconst [c] (MOVWUreg _))
  5651  	// cond: int64(c) < 0
  5652  	// result: (MOVVconst [0])
  5653  	for {
  5654  		c := v.AuxInt
  5655  		v_0 := v.Args[0]
  5656  		if v_0.Op != OpMIPS64MOVWUreg {
  5657  			break
  5658  		}
  5659  		if !(int64(c) < 0) {
  5660  			break
  5661  		}
  5662  		v.reset(OpMIPS64MOVVconst)
  5663  		v.AuxInt = 0
  5664  		return true
  5665  	}
  5666  	// match: (SGTconst [c] (ANDconst [m] _))
  5667  	// cond: 0 <= m && m < c
  5668  	// result: (MOVVconst [1])
  5669  	for {
  5670  		c := v.AuxInt
  5671  		v_0 := v.Args[0]
  5672  		if v_0.Op != OpMIPS64ANDconst {
  5673  			break
  5674  		}
  5675  		m := v_0.AuxInt
  5676  		if !(0 <= m && m < c) {
  5677  			break
  5678  		}
  5679  		v.reset(OpMIPS64MOVVconst)
  5680  		v.AuxInt = 1
  5681  		return true
  5682  	}
  5683  	// match: (SGTconst [c] (SRLVconst _ [d]))
  5684  	// cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c
  5685  	// result: (MOVVconst [1])
  5686  	for {
  5687  		c := v.AuxInt
  5688  		v_0 := v.Args[0]
  5689  		if v_0.Op != OpMIPS64SRLVconst {
  5690  			break
  5691  		}
  5692  		d := v_0.AuxInt
  5693  		if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) {
  5694  			break
  5695  		}
  5696  		v.reset(OpMIPS64MOVVconst)
  5697  		v.AuxInt = 1
  5698  		return true
  5699  	}
  5700  	return false
  5701  }
  5702  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
  5703  	// match: (SLLV _ (MOVVconst [c]))
  5704  	// cond: uint64(c)>=64
  5705  	// result: (MOVVconst [0])
  5706  	for {
  5707  		v_1 := v.Args[1]
  5708  		if v_1.Op != OpMIPS64MOVVconst {
  5709  			break
  5710  		}
  5711  		c := v_1.AuxInt
  5712  		if !(uint64(c) >= 64) {
  5713  			break
  5714  		}
  5715  		v.reset(OpMIPS64MOVVconst)
  5716  		v.AuxInt = 0
  5717  		return true
  5718  	}
  5719  	// match: (SLLV x (MOVVconst [c]))
  5720  	// cond:
  5721  	// result: (SLLVconst x [c])
  5722  	for {
  5723  		x := v.Args[0]
  5724  		v_1 := v.Args[1]
  5725  		if v_1.Op != OpMIPS64MOVVconst {
  5726  			break
  5727  		}
  5728  		c := v_1.AuxInt
  5729  		v.reset(OpMIPS64SLLVconst)
  5730  		v.AuxInt = c
  5731  		v.AddArg(x)
  5732  		return true
  5733  	}
  5734  	return false
  5735  }
  5736  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
  5737  	// match: (SLLVconst [c] (MOVVconst [d]))
  5738  	// cond:
  5739  	// result: (MOVVconst [int64(d)<<uint64(c)])
  5740  	for {
  5741  		c := v.AuxInt
  5742  		v_0 := v.Args[0]
  5743  		if v_0.Op != OpMIPS64MOVVconst {
  5744  			break
  5745  		}
  5746  		d := v_0.AuxInt
  5747  		v.reset(OpMIPS64MOVVconst)
  5748  		v.AuxInt = int64(d) << uint64(c)
  5749  		return true
  5750  	}
  5751  	return false
  5752  }
  5753  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
  5754  	// match: (SRAV x (MOVVconst [c]))
  5755  	// cond: uint64(c)>=64
  5756  	// result: (SRAVconst x [63])
  5757  	for {
  5758  		x := v.Args[0]
  5759  		v_1 := v.Args[1]
  5760  		if v_1.Op != OpMIPS64MOVVconst {
  5761  			break
  5762  		}
  5763  		c := v_1.AuxInt
  5764  		if !(uint64(c) >= 64) {
  5765  			break
  5766  		}
  5767  		v.reset(OpMIPS64SRAVconst)
  5768  		v.AuxInt = 63
  5769  		v.AddArg(x)
  5770  		return true
  5771  	}
  5772  	// match: (SRAV x (MOVVconst [c]))
  5773  	// cond:
  5774  	// result: (SRAVconst x [c])
  5775  	for {
  5776  		x := v.Args[0]
  5777  		v_1 := v.Args[1]
  5778  		if v_1.Op != OpMIPS64MOVVconst {
  5779  			break
  5780  		}
  5781  		c := v_1.AuxInt
  5782  		v.reset(OpMIPS64SRAVconst)
  5783  		v.AuxInt = c
  5784  		v.AddArg(x)
  5785  		return true
  5786  	}
  5787  	return false
  5788  }
  5789  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
  5790  	// match: (SRAVconst [c] (MOVVconst [d]))
  5791  	// cond:
  5792  	// result: (MOVVconst [int64(d)>>uint64(c)])
  5793  	for {
  5794  		c := v.AuxInt
  5795  		v_0 := v.Args[0]
  5796  		if v_0.Op != OpMIPS64MOVVconst {
  5797  			break
  5798  		}
  5799  		d := v_0.AuxInt
  5800  		v.reset(OpMIPS64MOVVconst)
  5801  		v.AuxInt = int64(d) >> uint64(c)
  5802  		return true
  5803  	}
  5804  	return false
  5805  }
  5806  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
  5807  	// match: (SRLV _ (MOVVconst [c]))
  5808  	// cond: uint64(c)>=64
  5809  	// result: (MOVVconst [0])
  5810  	for {
  5811  		v_1 := v.Args[1]
  5812  		if v_1.Op != OpMIPS64MOVVconst {
  5813  			break
  5814  		}
  5815  		c := v_1.AuxInt
  5816  		if !(uint64(c) >= 64) {
  5817  			break
  5818  		}
  5819  		v.reset(OpMIPS64MOVVconst)
  5820  		v.AuxInt = 0
  5821  		return true
  5822  	}
  5823  	// match: (SRLV x (MOVVconst [c]))
  5824  	// cond:
  5825  	// result: (SRLVconst x [c])
  5826  	for {
  5827  		x := v.Args[0]
  5828  		v_1 := v.Args[1]
  5829  		if v_1.Op != OpMIPS64MOVVconst {
  5830  			break
  5831  		}
  5832  		c := v_1.AuxInt
  5833  		v.reset(OpMIPS64SRLVconst)
  5834  		v.AuxInt = c
  5835  		v.AddArg(x)
  5836  		return true
  5837  	}
  5838  	return false
  5839  }
  5840  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
  5841  	// match: (SRLVconst [c] (MOVVconst [d]))
  5842  	// cond:
  5843  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  5844  	for {
  5845  		c := v.AuxInt
  5846  		v_0 := v.Args[0]
  5847  		if v_0.Op != OpMIPS64MOVVconst {
  5848  			break
  5849  		}
  5850  		d := v_0.AuxInt
  5851  		v.reset(OpMIPS64MOVVconst)
  5852  		v.AuxInt = int64(uint64(d) >> uint64(c))
  5853  		return true
  5854  	}
  5855  	return false
  5856  }
  5857  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
  5858  	// match: (SUBV x (MOVVconst [c]))
  5859  	// cond: is32Bit(c)
  5860  	// result: (SUBVconst [c] x)
  5861  	for {
  5862  		x := v.Args[0]
  5863  		v_1 := v.Args[1]
  5864  		if v_1.Op != OpMIPS64MOVVconst {
  5865  			break
  5866  		}
  5867  		c := v_1.AuxInt
  5868  		if !(is32Bit(c)) {
  5869  			break
  5870  		}
  5871  		v.reset(OpMIPS64SUBVconst)
  5872  		v.AuxInt = c
  5873  		v.AddArg(x)
  5874  		return true
  5875  	}
  5876  	// match: (SUBV x x)
  5877  	// cond:
  5878  	// result: (MOVVconst [0])
  5879  	for {
  5880  		x := v.Args[0]
  5881  		if x != v.Args[1] {
  5882  			break
  5883  		}
  5884  		v.reset(OpMIPS64MOVVconst)
  5885  		v.AuxInt = 0
  5886  		return true
  5887  	}
  5888  	// match: (SUBV (MOVVconst [0]) x)
  5889  	// cond:
  5890  	// result: (NEGV x)
  5891  	for {
  5892  		v_0 := v.Args[0]
  5893  		if v_0.Op != OpMIPS64MOVVconst {
  5894  			break
  5895  		}
  5896  		if v_0.AuxInt != 0 {
  5897  			break
  5898  		}
  5899  		x := v.Args[1]
  5900  		v.reset(OpMIPS64NEGV)
  5901  		v.AddArg(x)
  5902  		return true
  5903  	}
  5904  	return false
  5905  }
  5906  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
  5907  	// match: (SUBVconst [0] x)
  5908  	// cond:
  5909  	// result: x
  5910  	for {
  5911  		if v.AuxInt != 0 {
  5912  			break
  5913  		}
  5914  		x := v.Args[0]
  5915  		v.reset(OpCopy)
  5916  		v.Type = x.Type
  5917  		v.AddArg(x)
  5918  		return true
  5919  	}
  5920  	// match: (SUBVconst [c] (MOVVconst [d]))
  5921  	// cond:
  5922  	// result: (MOVVconst [d-c])
  5923  	for {
  5924  		c := v.AuxInt
  5925  		v_0 := v.Args[0]
  5926  		if v_0.Op != OpMIPS64MOVVconst {
  5927  			break
  5928  		}
  5929  		d := v_0.AuxInt
  5930  		v.reset(OpMIPS64MOVVconst)
  5931  		v.AuxInt = d - c
  5932  		return true
  5933  	}
  5934  	// match: (SUBVconst [c] (SUBVconst [d] x))
  5935  	// cond: is32Bit(-c-d)
  5936  	// result: (ADDVconst [-c-d] x)
  5937  	for {
  5938  		c := v.AuxInt
  5939  		v_0 := v.Args[0]
  5940  		if v_0.Op != OpMIPS64SUBVconst {
  5941  			break
  5942  		}
  5943  		d := v_0.AuxInt
  5944  		x := v_0.Args[0]
  5945  		if !(is32Bit(-c - d)) {
  5946  			break
  5947  		}
  5948  		v.reset(OpMIPS64ADDVconst)
  5949  		v.AuxInt = -c - d
  5950  		v.AddArg(x)
  5951  		return true
  5952  	}
  5953  	// match: (SUBVconst [c] (ADDVconst [d] x))
  5954  	// cond: is32Bit(-c+d)
  5955  	// result: (ADDVconst [-c+d] x)
  5956  	for {
  5957  		c := v.AuxInt
  5958  		v_0 := v.Args[0]
  5959  		if v_0.Op != OpMIPS64ADDVconst {
  5960  			break
  5961  		}
  5962  		d := v_0.AuxInt
  5963  		x := v_0.Args[0]
  5964  		if !(is32Bit(-c + d)) {
  5965  			break
  5966  		}
  5967  		v.reset(OpMIPS64ADDVconst)
  5968  		v.AuxInt = -c + d
  5969  		v.AddArg(x)
  5970  		return true
  5971  	}
  5972  	return false
  5973  }
  5974  func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
  5975  	// match: (XOR x (MOVVconst [c]))
  5976  	// cond: is32Bit(c)
  5977  	// result: (XORconst [c] x)
  5978  	for {
  5979  		x := v.Args[0]
  5980  		v_1 := v.Args[1]
  5981  		if v_1.Op != OpMIPS64MOVVconst {
  5982  			break
  5983  		}
  5984  		c := v_1.AuxInt
  5985  		if !(is32Bit(c)) {
  5986  			break
  5987  		}
  5988  		v.reset(OpMIPS64XORconst)
  5989  		v.AuxInt = c
  5990  		v.AddArg(x)
  5991  		return true
  5992  	}
  5993  	// match: (XOR (MOVVconst [c]) x)
  5994  	// cond: is32Bit(c)
  5995  	// result: (XORconst [c] x)
  5996  	for {
  5997  		v_0 := v.Args[0]
  5998  		if v_0.Op != OpMIPS64MOVVconst {
  5999  			break
  6000  		}
  6001  		c := v_0.AuxInt
  6002  		x := v.Args[1]
  6003  		if !(is32Bit(c)) {
  6004  			break
  6005  		}
  6006  		v.reset(OpMIPS64XORconst)
  6007  		v.AuxInt = c
  6008  		v.AddArg(x)
  6009  		return true
  6010  	}
  6011  	// match: (XOR x x)
  6012  	// cond:
  6013  	// result: (MOVVconst [0])
  6014  	for {
  6015  		x := v.Args[0]
  6016  		if x != v.Args[1] {
  6017  			break
  6018  		}
  6019  		v.reset(OpMIPS64MOVVconst)
  6020  		v.AuxInt = 0
  6021  		return true
  6022  	}
  6023  	return false
  6024  }
  6025  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
  6026  	// match: (XORconst [0] x)
  6027  	// cond:
  6028  	// result: x
  6029  	for {
  6030  		if v.AuxInt != 0 {
  6031  			break
  6032  		}
  6033  		x := v.Args[0]
  6034  		v.reset(OpCopy)
  6035  		v.Type = x.Type
  6036  		v.AddArg(x)
  6037  		return true
  6038  	}
  6039  	// match: (XORconst [-1] x)
  6040  	// cond:
  6041  	// result: (NORconst [0] x)
  6042  	for {
  6043  		if v.AuxInt != -1 {
  6044  			break
  6045  		}
  6046  		x := v.Args[0]
  6047  		v.reset(OpMIPS64NORconst)
  6048  		v.AuxInt = 0
  6049  		v.AddArg(x)
  6050  		return true
  6051  	}
  6052  	// match: (XORconst [c] (MOVVconst [d]))
  6053  	// cond:
  6054  	// result: (MOVVconst [c^d])
  6055  	for {
  6056  		c := v.AuxInt
  6057  		v_0 := v.Args[0]
  6058  		if v_0.Op != OpMIPS64MOVVconst {
  6059  			break
  6060  		}
  6061  		d := v_0.AuxInt
  6062  		v.reset(OpMIPS64MOVVconst)
  6063  		v.AuxInt = c ^ d
  6064  		return true
  6065  	}
  6066  	// match: (XORconst [c] (XORconst [d] x))
  6067  	// cond: is32Bit(c^d)
  6068  	// result: (XORconst [c^d] x)
  6069  	for {
  6070  		c := v.AuxInt
  6071  		v_0 := v.Args[0]
  6072  		if v_0.Op != OpMIPS64XORconst {
  6073  			break
  6074  		}
  6075  		d := v_0.AuxInt
  6076  		x := v_0.Args[0]
  6077  		if !(is32Bit(c ^ d)) {
  6078  			break
  6079  		}
  6080  		v.reset(OpMIPS64XORconst)
  6081  		v.AuxInt = c ^ d
  6082  		v.AddArg(x)
  6083  		return true
  6084  	}
  6085  	return false
  6086  }
  6087  func rewriteValueMIPS64_OpMod16(v *Value) bool {
  6088  	b := v.Block
  6089  	_ = b
  6090  	types := &b.Func.Config.Types
  6091  	_ = types
  6092  	// match: (Mod16 x y)
  6093  	// cond:
  6094  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6095  	for {
  6096  		x := v.Args[0]
  6097  		y := v.Args[1]
  6098  		v.reset(OpSelect0)
  6099  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6100  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  6101  		v1.AddArg(x)
  6102  		v0.AddArg(v1)
  6103  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  6104  		v2.AddArg(y)
  6105  		v0.AddArg(v2)
  6106  		v.AddArg(v0)
  6107  		return true
  6108  	}
  6109  }
  6110  func rewriteValueMIPS64_OpMod16u(v *Value) bool {
  6111  	b := v.Block
  6112  	_ = b
  6113  	types := &b.Func.Config.Types
  6114  	_ = types
  6115  	// match: (Mod16u x y)
  6116  	// cond:
  6117  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6118  	for {
  6119  		x := v.Args[0]
  6120  		y := v.Args[1]
  6121  		v.reset(OpSelect0)
  6122  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6123  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6124  		v1.AddArg(x)
  6125  		v0.AddArg(v1)
  6126  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6127  		v2.AddArg(y)
  6128  		v0.AddArg(v2)
  6129  		v.AddArg(v0)
  6130  		return true
  6131  	}
  6132  }
  6133  func rewriteValueMIPS64_OpMod32(v *Value) bool {
  6134  	b := v.Block
  6135  	_ = b
  6136  	types := &b.Func.Config.Types
  6137  	_ = types
  6138  	// match: (Mod32 x y)
  6139  	// cond:
  6140  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6141  	for {
  6142  		x := v.Args[0]
  6143  		y := v.Args[1]
  6144  		v.reset(OpSelect0)
  6145  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6146  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  6147  		v1.AddArg(x)
  6148  		v0.AddArg(v1)
  6149  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  6150  		v2.AddArg(y)
  6151  		v0.AddArg(v2)
  6152  		v.AddArg(v0)
  6153  		return true
  6154  	}
  6155  }
  6156  func rewriteValueMIPS64_OpMod32u(v *Value) bool {
  6157  	b := v.Block
  6158  	_ = b
  6159  	types := &b.Func.Config.Types
  6160  	_ = types
  6161  	// match: (Mod32u x y)
  6162  	// cond:
  6163  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6164  	for {
  6165  		x := v.Args[0]
  6166  		y := v.Args[1]
  6167  		v.reset(OpSelect0)
  6168  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6169  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6170  		v1.AddArg(x)
  6171  		v0.AddArg(v1)
  6172  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6173  		v2.AddArg(y)
  6174  		v0.AddArg(v2)
  6175  		v.AddArg(v0)
  6176  		return true
  6177  	}
  6178  }
  6179  func rewriteValueMIPS64_OpMod64(v *Value) bool {
  6180  	b := v.Block
  6181  	_ = b
  6182  	types := &b.Func.Config.Types
  6183  	_ = types
  6184  	// match: (Mod64 x y)
  6185  	// cond:
  6186  	// result: (Select0 (DIVV x y))
  6187  	for {
  6188  		x := v.Args[0]
  6189  		y := v.Args[1]
  6190  		v.reset(OpSelect0)
  6191  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6192  		v0.AddArg(x)
  6193  		v0.AddArg(y)
  6194  		v.AddArg(v0)
  6195  		return true
  6196  	}
  6197  }
  6198  func rewriteValueMIPS64_OpMod64u(v *Value) bool {
  6199  	b := v.Block
  6200  	_ = b
  6201  	types := &b.Func.Config.Types
  6202  	_ = types
  6203  	// match: (Mod64u x y)
  6204  	// cond:
  6205  	// result: (Select0 (DIVVU x y))
  6206  	for {
  6207  		x := v.Args[0]
  6208  		y := v.Args[1]
  6209  		v.reset(OpSelect0)
  6210  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6211  		v0.AddArg(x)
  6212  		v0.AddArg(y)
  6213  		v.AddArg(v0)
  6214  		return true
  6215  	}
  6216  }
  6217  func rewriteValueMIPS64_OpMod8(v *Value) bool {
  6218  	b := v.Block
  6219  	_ = b
  6220  	types := &b.Func.Config.Types
  6221  	_ = types
  6222  	// match: (Mod8 x y)
  6223  	// cond:
  6224  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6225  	for {
  6226  		x := v.Args[0]
  6227  		y := v.Args[1]
  6228  		v.reset(OpSelect0)
  6229  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6230  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  6231  		v1.AddArg(x)
  6232  		v0.AddArg(v1)
  6233  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  6234  		v2.AddArg(y)
  6235  		v0.AddArg(v2)
  6236  		v.AddArg(v0)
  6237  		return true
  6238  	}
  6239  }
  6240  func rewriteValueMIPS64_OpMod8u(v *Value) bool {
  6241  	b := v.Block
  6242  	_ = b
  6243  	types := &b.Func.Config.Types
  6244  	_ = types
  6245  	// match: (Mod8u x y)
  6246  	// cond:
  6247  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6248  	for {
  6249  		x := v.Args[0]
  6250  		y := v.Args[1]
  6251  		v.reset(OpSelect0)
  6252  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6253  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  6254  		v1.AddArg(x)
  6255  		v0.AddArg(v1)
  6256  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  6257  		v2.AddArg(y)
  6258  		v0.AddArg(v2)
  6259  		v.AddArg(v0)
  6260  		return true
  6261  	}
  6262  }
  6263  func rewriteValueMIPS64_OpMove(v *Value) bool {
  6264  	b := v.Block
  6265  	_ = b
  6266  	config := b.Func.Config
  6267  	_ = config
  6268  	types := &b.Func.Config.Types
  6269  	_ = types
  6270  	// match: (Move [0] _ _ mem)
  6271  	// cond:
  6272  	// result: mem
  6273  	for {
  6274  		if v.AuxInt != 0 {
  6275  			break
  6276  		}
  6277  		mem := v.Args[2]
  6278  		v.reset(OpCopy)
  6279  		v.Type = mem.Type
  6280  		v.AddArg(mem)
  6281  		return true
  6282  	}
  6283  	// match: (Move [1] dst src mem)
  6284  	// cond:
  6285  	// result: (MOVBstore dst (MOVBload src mem) mem)
  6286  	for {
  6287  		if v.AuxInt != 1 {
  6288  			break
  6289  		}
  6290  		dst := v.Args[0]
  6291  		src := v.Args[1]
  6292  		mem := v.Args[2]
  6293  		v.reset(OpMIPS64MOVBstore)
  6294  		v.AddArg(dst)
  6295  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6296  		v0.AddArg(src)
  6297  		v0.AddArg(mem)
  6298  		v.AddArg(v0)
  6299  		v.AddArg(mem)
  6300  		return true
  6301  	}
  6302  	// match: (Move [2] {t} dst src mem)
  6303  	// cond: t.(Type).Alignment()%2 == 0
  6304  	// result: (MOVHstore dst (MOVHload src mem) mem)
  6305  	for {
  6306  		if v.AuxInt != 2 {
  6307  			break
  6308  		}
  6309  		t := v.Aux
  6310  		dst := v.Args[0]
  6311  		src := v.Args[1]
  6312  		mem := v.Args[2]
  6313  		if !(t.(Type).Alignment()%2 == 0) {
  6314  			break
  6315  		}
  6316  		v.reset(OpMIPS64MOVHstore)
  6317  		v.AddArg(dst)
  6318  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6319  		v0.AddArg(src)
  6320  		v0.AddArg(mem)
  6321  		v.AddArg(v0)
  6322  		v.AddArg(mem)
  6323  		return true
  6324  	}
  6325  	// match: (Move [2] dst src mem)
  6326  	// cond:
  6327  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) 		(MOVBstore dst (MOVBload src mem) mem))
  6328  	for {
  6329  		if v.AuxInt != 2 {
  6330  			break
  6331  		}
  6332  		dst := v.Args[0]
  6333  		src := v.Args[1]
  6334  		mem := v.Args[2]
  6335  		v.reset(OpMIPS64MOVBstore)
  6336  		v.AuxInt = 1
  6337  		v.AddArg(dst)
  6338  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6339  		v0.AuxInt = 1
  6340  		v0.AddArg(src)
  6341  		v0.AddArg(mem)
  6342  		v.AddArg(v0)
  6343  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6344  		v1.AddArg(dst)
  6345  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6346  		v2.AddArg(src)
  6347  		v2.AddArg(mem)
  6348  		v1.AddArg(v2)
  6349  		v1.AddArg(mem)
  6350  		v.AddArg(v1)
  6351  		return true
  6352  	}
  6353  	// match: (Move [4] {t} dst src mem)
  6354  	// cond: t.(Type).Alignment()%4 == 0
  6355  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6356  	for {
  6357  		if v.AuxInt != 4 {
  6358  			break
  6359  		}
  6360  		t := v.Aux
  6361  		dst := v.Args[0]
  6362  		src := v.Args[1]
  6363  		mem := v.Args[2]
  6364  		if !(t.(Type).Alignment()%4 == 0) {
  6365  			break
  6366  		}
  6367  		v.reset(OpMIPS64MOVWstore)
  6368  		v.AddArg(dst)
  6369  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6370  		v0.AddArg(src)
  6371  		v0.AddArg(mem)
  6372  		v.AddArg(v0)
  6373  		v.AddArg(mem)
  6374  		return true
  6375  	}
  6376  	// match: (Move [4] {t} dst src mem)
  6377  	// cond: t.(Type).Alignment()%2 == 0
  6378  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) 		(MOVHstore dst (MOVHload src mem) mem))
  6379  	for {
  6380  		if v.AuxInt != 4 {
  6381  			break
  6382  		}
  6383  		t := v.Aux
  6384  		dst := v.Args[0]
  6385  		src := v.Args[1]
  6386  		mem := v.Args[2]
  6387  		if !(t.(Type).Alignment()%2 == 0) {
  6388  			break
  6389  		}
  6390  		v.reset(OpMIPS64MOVHstore)
  6391  		v.AuxInt = 2
  6392  		v.AddArg(dst)
  6393  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6394  		v0.AuxInt = 2
  6395  		v0.AddArg(src)
  6396  		v0.AddArg(mem)
  6397  		v.AddArg(v0)
  6398  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6399  		v1.AddArg(dst)
  6400  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6401  		v2.AddArg(src)
  6402  		v2.AddArg(mem)
  6403  		v1.AddArg(v2)
  6404  		v1.AddArg(mem)
  6405  		v.AddArg(v1)
  6406  		return true
  6407  	}
  6408  	// match: (Move [4] dst src mem)
  6409  	// cond:
  6410  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) 		(MOVBstore [2] dst (MOVBload [2] src mem) 			(MOVBstore [1] dst (MOVBload [1] src mem) 				(MOVBstore dst (MOVBload src mem) mem))))
  6411  	for {
  6412  		if v.AuxInt != 4 {
  6413  			break
  6414  		}
  6415  		dst := v.Args[0]
  6416  		src := v.Args[1]
  6417  		mem := v.Args[2]
  6418  		v.reset(OpMIPS64MOVBstore)
  6419  		v.AuxInt = 3
  6420  		v.AddArg(dst)
  6421  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6422  		v0.AuxInt = 3
  6423  		v0.AddArg(src)
  6424  		v0.AddArg(mem)
  6425  		v.AddArg(v0)
  6426  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6427  		v1.AuxInt = 2
  6428  		v1.AddArg(dst)
  6429  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6430  		v2.AuxInt = 2
  6431  		v2.AddArg(src)
  6432  		v2.AddArg(mem)
  6433  		v1.AddArg(v2)
  6434  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6435  		v3.AuxInt = 1
  6436  		v3.AddArg(dst)
  6437  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6438  		v4.AuxInt = 1
  6439  		v4.AddArg(src)
  6440  		v4.AddArg(mem)
  6441  		v3.AddArg(v4)
  6442  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6443  		v5.AddArg(dst)
  6444  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6445  		v6.AddArg(src)
  6446  		v6.AddArg(mem)
  6447  		v5.AddArg(v6)
  6448  		v5.AddArg(mem)
  6449  		v3.AddArg(v5)
  6450  		v1.AddArg(v3)
  6451  		v.AddArg(v1)
  6452  		return true
  6453  	}
  6454  	// match: (Move [8] {t} dst src mem)
  6455  	// cond: t.(Type).Alignment()%8 == 0
  6456  	// result: (MOVVstore dst (MOVVload src mem) mem)
  6457  	for {
  6458  		if v.AuxInt != 8 {
  6459  			break
  6460  		}
  6461  		t := v.Aux
  6462  		dst := v.Args[0]
  6463  		src := v.Args[1]
  6464  		mem := v.Args[2]
  6465  		if !(t.(Type).Alignment()%8 == 0) {
  6466  			break
  6467  		}
  6468  		v.reset(OpMIPS64MOVVstore)
  6469  		v.AddArg(dst)
  6470  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6471  		v0.AddArg(src)
  6472  		v0.AddArg(mem)
  6473  		v.AddArg(v0)
  6474  		v.AddArg(mem)
  6475  		return true
  6476  	}
  6477  	// match: (Move [8] {t} dst src mem)
  6478  	// cond: t.(Type).Alignment()%4 == 0
  6479  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6480  	for {
  6481  		if v.AuxInt != 8 {
  6482  			break
  6483  		}
  6484  		t := v.Aux
  6485  		dst := v.Args[0]
  6486  		src := v.Args[1]
  6487  		mem := v.Args[2]
  6488  		if !(t.(Type).Alignment()%4 == 0) {
  6489  			break
  6490  		}
  6491  		v.reset(OpMIPS64MOVWstore)
  6492  		v.AuxInt = 4
  6493  		v.AddArg(dst)
  6494  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6495  		v0.AuxInt = 4
  6496  		v0.AddArg(src)
  6497  		v0.AddArg(mem)
  6498  		v.AddArg(v0)
  6499  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6500  		v1.AddArg(dst)
  6501  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6502  		v2.AddArg(src)
  6503  		v2.AddArg(mem)
  6504  		v1.AddArg(v2)
  6505  		v1.AddArg(mem)
  6506  		v.AddArg(v1)
  6507  		return true
  6508  	}
  6509  	// match: (Move [8] {t} dst src mem)
  6510  	// cond: t.(Type).Alignment()%2 == 0
  6511  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6512  	for {
  6513  		if v.AuxInt != 8 {
  6514  			break
  6515  		}
  6516  		t := v.Aux
  6517  		dst := v.Args[0]
  6518  		src := v.Args[1]
  6519  		mem := v.Args[2]
  6520  		if !(t.(Type).Alignment()%2 == 0) {
  6521  			break
  6522  		}
  6523  		v.reset(OpMIPS64MOVHstore)
  6524  		v.AuxInt = 6
  6525  		v.AddArg(dst)
  6526  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6527  		v0.AuxInt = 6
  6528  		v0.AddArg(src)
  6529  		v0.AddArg(mem)
  6530  		v.AddArg(v0)
  6531  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6532  		v1.AuxInt = 4
  6533  		v1.AddArg(dst)
  6534  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6535  		v2.AuxInt = 4
  6536  		v2.AddArg(src)
  6537  		v2.AddArg(mem)
  6538  		v1.AddArg(v2)
  6539  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6540  		v3.AuxInt = 2
  6541  		v3.AddArg(dst)
  6542  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6543  		v4.AuxInt = 2
  6544  		v4.AddArg(src)
  6545  		v4.AddArg(mem)
  6546  		v3.AddArg(v4)
  6547  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6548  		v5.AddArg(dst)
  6549  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6550  		v6.AddArg(src)
  6551  		v6.AddArg(mem)
  6552  		v5.AddArg(v6)
  6553  		v5.AddArg(mem)
  6554  		v3.AddArg(v5)
  6555  		v1.AddArg(v3)
  6556  		v.AddArg(v1)
  6557  		return true
  6558  	}
  6559  	// match: (Move [3] dst src mem)
  6560  	// cond:
  6561  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) 		(MOVBstore [1] dst (MOVBload [1] src mem) 			(MOVBstore dst (MOVBload src mem) mem)))
  6562  	for {
  6563  		if v.AuxInt != 3 {
  6564  			break
  6565  		}
  6566  		dst := v.Args[0]
  6567  		src := v.Args[1]
  6568  		mem := v.Args[2]
  6569  		v.reset(OpMIPS64MOVBstore)
  6570  		v.AuxInt = 2
  6571  		v.AddArg(dst)
  6572  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6573  		v0.AuxInt = 2
  6574  		v0.AddArg(src)
  6575  		v0.AddArg(mem)
  6576  		v.AddArg(v0)
  6577  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6578  		v1.AuxInt = 1
  6579  		v1.AddArg(dst)
  6580  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6581  		v2.AuxInt = 1
  6582  		v2.AddArg(src)
  6583  		v2.AddArg(mem)
  6584  		v1.AddArg(v2)
  6585  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6586  		v3.AddArg(dst)
  6587  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6588  		v4.AddArg(src)
  6589  		v4.AddArg(mem)
  6590  		v3.AddArg(v4)
  6591  		v3.AddArg(mem)
  6592  		v1.AddArg(v3)
  6593  		v.AddArg(v1)
  6594  		return true
  6595  	}
  6596  	// match: (Move [6] {t} dst src mem)
  6597  	// cond: t.(Type).Alignment()%2 == 0
  6598  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6599  	for {
  6600  		if v.AuxInt != 6 {
  6601  			break
  6602  		}
  6603  		t := v.Aux
  6604  		dst := v.Args[0]
  6605  		src := v.Args[1]
  6606  		mem := v.Args[2]
  6607  		if !(t.(Type).Alignment()%2 == 0) {
  6608  			break
  6609  		}
  6610  		v.reset(OpMIPS64MOVHstore)
  6611  		v.AuxInt = 4
  6612  		v.AddArg(dst)
  6613  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6614  		v0.AuxInt = 4
  6615  		v0.AddArg(src)
  6616  		v0.AddArg(mem)
  6617  		v.AddArg(v0)
  6618  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6619  		v1.AuxInt = 2
  6620  		v1.AddArg(dst)
  6621  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6622  		v2.AuxInt = 2
  6623  		v2.AddArg(src)
  6624  		v2.AddArg(mem)
  6625  		v1.AddArg(v2)
  6626  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6627  		v3.AddArg(dst)
  6628  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6629  		v4.AddArg(src)
  6630  		v4.AddArg(mem)
  6631  		v3.AddArg(v4)
  6632  		v3.AddArg(mem)
  6633  		v1.AddArg(v3)
  6634  		v.AddArg(v1)
  6635  		return true
  6636  	}
  6637  	// match: (Move [12] {t} dst src mem)
  6638  	// cond: t.(Type).Alignment()%4 == 0
  6639  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6640  	for {
  6641  		if v.AuxInt != 12 {
  6642  			break
  6643  		}
  6644  		t := v.Aux
  6645  		dst := v.Args[0]
  6646  		src := v.Args[1]
  6647  		mem := v.Args[2]
  6648  		if !(t.(Type).Alignment()%4 == 0) {
  6649  			break
  6650  		}
  6651  		v.reset(OpMIPS64MOVWstore)
  6652  		v.AuxInt = 8
  6653  		v.AddArg(dst)
  6654  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6655  		v0.AuxInt = 8
  6656  		v0.AddArg(src)
  6657  		v0.AddArg(mem)
  6658  		v.AddArg(v0)
  6659  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6660  		v1.AuxInt = 4
  6661  		v1.AddArg(dst)
  6662  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6663  		v2.AuxInt = 4
  6664  		v2.AddArg(src)
  6665  		v2.AddArg(mem)
  6666  		v1.AddArg(v2)
  6667  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6668  		v3.AddArg(dst)
  6669  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6670  		v4.AddArg(src)
  6671  		v4.AddArg(mem)
  6672  		v3.AddArg(v4)
  6673  		v3.AddArg(mem)
  6674  		v1.AddArg(v3)
  6675  		v.AddArg(v1)
  6676  		return true
  6677  	}
  6678  	// match: (Move [16] {t} dst src mem)
  6679  	// cond: t.(Type).Alignment()%8 == 0
  6680  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) 		(MOVVstore dst (MOVVload src mem) mem))
  6681  	for {
  6682  		if v.AuxInt != 16 {
  6683  			break
  6684  		}
  6685  		t := v.Aux
  6686  		dst := v.Args[0]
  6687  		src := v.Args[1]
  6688  		mem := v.Args[2]
  6689  		if !(t.(Type).Alignment()%8 == 0) {
  6690  			break
  6691  		}
  6692  		v.reset(OpMIPS64MOVVstore)
  6693  		v.AuxInt = 8
  6694  		v.AddArg(dst)
  6695  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6696  		v0.AuxInt = 8
  6697  		v0.AddArg(src)
  6698  		v0.AddArg(mem)
  6699  		v.AddArg(v0)
  6700  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6701  		v1.AddArg(dst)
  6702  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6703  		v2.AddArg(src)
  6704  		v2.AddArg(mem)
  6705  		v1.AddArg(v2)
  6706  		v1.AddArg(mem)
  6707  		v.AddArg(v1)
  6708  		return true
  6709  	}
  6710  	// match: (Move [24] {t} dst src mem)
  6711  	// cond: t.(Type).Alignment()%8 == 0
  6712  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) 		(MOVVstore [8] dst (MOVVload [8] src mem) 			(MOVVstore dst (MOVVload src mem) mem)))
  6713  	for {
  6714  		if v.AuxInt != 24 {
  6715  			break
  6716  		}
  6717  		t := v.Aux
  6718  		dst := v.Args[0]
  6719  		src := v.Args[1]
  6720  		mem := v.Args[2]
  6721  		if !(t.(Type).Alignment()%8 == 0) {
  6722  			break
  6723  		}
  6724  		v.reset(OpMIPS64MOVVstore)
  6725  		v.AuxInt = 16
  6726  		v.AddArg(dst)
  6727  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6728  		v0.AuxInt = 16
  6729  		v0.AddArg(src)
  6730  		v0.AddArg(mem)
  6731  		v.AddArg(v0)
  6732  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6733  		v1.AuxInt = 8
  6734  		v1.AddArg(dst)
  6735  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6736  		v2.AuxInt = 8
  6737  		v2.AddArg(src)
  6738  		v2.AddArg(mem)
  6739  		v1.AddArg(v2)
  6740  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6741  		v3.AddArg(dst)
  6742  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6743  		v4.AddArg(src)
  6744  		v4.AddArg(mem)
  6745  		v3.AddArg(v4)
  6746  		v3.AddArg(mem)
  6747  		v1.AddArg(v3)
  6748  		v.AddArg(v1)
  6749  		return true
  6750  	}
  6751  	// match: (Move [s] {t} dst src mem)
  6752  	// cond: s > 24 || t.(Type).Alignment()%8 != 0
  6753  	// result: (LoweredMove [t.(Type).Alignment()] 		dst 		src 		(ADDVconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  6754  	for {
  6755  		s := v.AuxInt
  6756  		t := v.Aux
  6757  		dst := v.Args[0]
  6758  		src := v.Args[1]
  6759  		mem := v.Args[2]
  6760  		if !(s > 24 || t.(Type).Alignment()%8 != 0) {
  6761  			break
  6762  		}
  6763  		v.reset(OpMIPS64LoweredMove)
  6764  		v.AuxInt = t.(Type).Alignment()
  6765  		v.AddArg(dst)
  6766  		v.AddArg(src)
  6767  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  6768  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  6769  		v0.AddArg(src)
  6770  		v.AddArg(v0)
  6771  		v.AddArg(mem)
  6772  		return true
  6773  	}
  6774  	return false
  6775  }
  6776  func rewriteValueMIPS64_OpMul16(v *Value) bool {
  6777  	b := v.Block
  6778  	_ = b
  6779  	types := &b.Func.Config.Types
  6780  	_ = types
  6781  	// match: (Mul16 x y)
  6782  	// cond:
  6783  	// result: (Select1 (MULVU x y))
  6784  	for {
  6785  		x := v.Args[0]
  6786  		y := v.Args[1]
  6787  		v.reset(OpSelect1)
  6788  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6789  		v0.AddArg(x)
  6790  		v0.AddArg(y)
  6791  		v.AddArg(v0)
  6792  		return true
  6793  	}
  6794  }
  6795  func rewriteValueMIPS64_OpMul32(v *Value) bool {
  6796  	b := v.Block
  6797  	_ = b
  6798  	types := &b.Func.Config.Types
  6799  	_ = types
  6800  	// match: (Mul32 x y)
  6801  	// cond:
  6802  	// result: (Select1 (MULVU x y))
  6803  	for {
  6804  		x := v.Args[0]
  6805  		y := v.Args[1]
  6806  		v.reset(OpSelect1)
  6807  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6808  		v0.AddArg(x)
  6809  		v0.AddArg(y)
  6810  		v.AddArg(v0)
  6811  		return true
  6812  	}
  6813  }
  6814  func rewriteValueMIPS64_OpMul32F(v *Value) bool {
  6815  	// match: (Mul32F x y)
  6816  	// cond:
  6817  	// result: (MULF x y)
  6818  	for {
  6819  		x := v.Args[0]
  6820  		y := v.Args[1]
  6821  		v.reset(OpMIPS64MULF)
  6822  		v.AddArg(x)
  6823  		v.AddArg(y)
  6824  		return true
  6825  	}
  6826  }
  6827  func rewriteValueMIPS64_OpMul64(v *Value) bool {
  6828  	b := v.Block
  6829  	_ = b
  6830  	types := &b.Func.Config.Types
  6831  	_ = types
  6832  	// match: (Mul64 x y)
  6833  	// cond:
  6834  	// result: (Select1 (MULVU x y))
  6835  	for {
  6836  		x := v.Args[0]
  6837  		y := v.Args[1]
  6838  		v.reset(OpSelect1)
  6839  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6840  		v0.AddArg(x)
  6841  		v0.AddArg(y)
  6842  		v.AddArg(v0)
  6843  		return true
  6844  	}
  6845  }
  6846  func rewriteValueMIPS64_OpMul64F(v *Value) bool {
  6847  	// match: (Mul64F x y)
  6848  	// cond:
  6849  	// result: (MULD x y)
  6850  	for {
  6851  		x := v.Args[0]
  6852  		y := v.Args[1]
  6853  		v.reset(OpMIPS64MULD)
  6854  		v.AddArg(x)
  6855  		v.AddArg(y)
  6856  		return true
  6857  	}
  6858  }
  6859  func rewriteValueMIPS64_OpMul8(v *Value) bool {
  6860  	b := v.Block
  6861  	_ = b
  6862  	types := &b.Func.Config.Types
  6863  	_ = types
  6864  	// match: (Mul8 x y)
  6865  	// cond:
  6866  	// result: (Select1 (MULVU x y))
  6867  	for {
  6868  		x := v.Args[0]
  6869  		y := v.Args[1]
  6870  		v.reset(OpSelect1)
  6871  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6872  		v0.AddArg(x)
  6873  		v0.AddArg(y)
  6874  		v.AddArg(v0)
  6875  		return true
  6876  	}
  6877  }
  6878  func rewriteValueMIPS64_OpNeg16(v *Value) bool {
  6879  	// match: (Neg16 x)
  6880  	// cond:
  6881  	// result: (NEGV x)
  6882  	for {
  6883  		x := v.Args[0]
  6884  		v.reset(OpMIPS64NEGV)
  6885  		v.AddArg(x)
  6886  		return true
  6887  	}
  6888  }
  6889  func rewriteValueMIPS64_OpNeg32(v *Value) bool {
  6890  	// match: (Neg32 x)
  6891  	// cond:
  6892  	// result: (NEGV x)
  6893  	for {
  6894  		x := v.Args[0]
  6895  		v.reset(OpMIPS64NEGV)
  6896  		v.AddArg(x)
  6897  		return true
  6898  	}
  6899  }
  6900  func rewriteValueMIPS64_OpNeg32F(v *Value) bool {
  6901  	// match: (Neg32F x)
  6902  	// cond:
  6903  	// result: (NEGF x)
  6904  	for {
  6905  		x := v.Args[0]
  6906  		v.reset(OpMIPS64NEGF)
  6907  		v.AddArg(x)
  6908  		return true
  6909  	}
  6910  }
  6911  func rewriteValueMIPS64_OpNeg64(v *Value) bool {
  6912  	// match: (Neg64 x)
  6913  	// cond:
  6914  	// result: (NEGV x)
  6915  	for {
  6916  		x := v.Args[0]
  6917  		v.reset(OpMIPS64NEGV)
  6918  		v.AddArg(x)
  6919  		return true
  6920  	}
  6921  }
  6922  func rewriteValueMIPS64_OpNeg64F(v *Value) bool {
  6923  	// match: (Neg64F x)
  6924  	// cond:
  6925  	// result: (NEGD x)
  6926  	for {
  6927  		x := v.Args[0]
  6928  		v.reset(OpMIPS64NEGD)
  6929  		v.AddArg(x)
  6930  		return true
  6931  	}
  6932  }
  6933  func rewriteValueMIPS64_OpNeg8(v *Value) bool {
  6934  	// match: (Neg8 x)
  6935  	// cond:
  6936  	// result: (NEGV x)
  6937  	for {
  6938  		x := v.Args[0]
  6939  		v.reset(OpMIPS64NEGV)
  6940  		v.AddArg(x)
  6941  		return true
  6942  	}
  6943  }
  6944  func rewriteValueMIPS64_OpNeq16(v *Value) bool {
  6945  	b := v.Block
  6946  	_ = b
  6947  	types := &b.Func.Config.Types
  6948  	_ = types
  6949  	// match: (Neq16 x y)
  6950  	// cond:
  6951  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  6952  	for {
  6953  		x := v.Args[0]
  6954  		y := v.Args[1]
  6955  		v.reset(OpMIPS64SGTU)
  6956  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  6957  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6958  		v1.AddArg(x)
  6959  		v0.AddArg(v1)
  6960  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6961  		v2.AddArg(y)
  6962  		v0.AddArg(v2)
  6963  		v.AddArg(v0)
  6964  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  6965  		v3.AuxInt = 0
  6966  		v.AddArg(v3)
  6967  		return true
  6968  	}
  6969  }
  6970  func rewriteValueMIPS64_OpNeq32(v *Value) bool {
  6971  	b := v.Block
  6972  	_ = b
  6973  	types := &b.Func.Config.Types
  6974  	_ = types
  6975  	// match: (Neq32 x y)
  6976  	// cond:
  6977  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  6978  	for {
  6979  		x := v.Args[0]
  6980  		y := v.Args[1]
  6981  		v.reset(OpMIPS64SGTU)
  6982  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  6983  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6984  		v1.AddArg(x)
  6985  		v0.AddArg(v1)
  6986  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6987  		v2.AddArg(y)
  6988  		v0.AddArg(v2)
  6989  		v.AddArg(v0)
  6990  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  6991  		v3.AuxInt = 0
  6992  		v.AddArg(v3)
  6993  		return true
  6994  	}
  6995  }
  6996  func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
  6997  	b := v.Block
  6998  	_ = b
  6999  	// match: (Neq32F x y)
  7000  	// cond:
  7001  	// result: (FPFlagFalse (CMPEQF x y))
  7002  	for {
  7003  		x := v.Args[0]
  7004  		y := v.Args[1]
  7005  		v.reset(OpMIPS64FPFlagFalse)
  7006  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, TypeFlags)
  7007  		v0.AddArg(x)
  7008  		v0.AddArg(y)
  7009  		v.AddArg(v0)
  7010  		return true
  7011  	}
  7012  }
  7013  func rewriteValueMIPS64_OpNeq64(v *Value) bool {
  7014  	b := v.Block
  7015  	_ = b
  7016  	types := &b.Func.Config.Types
  7017  	_ = types
  7018  	// match: (Neq64 x y)
  7019  	// cond:
  7020  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7021  	for {
  7022  		x := v.Args[0]
  7023  		y := v.Args[1]
  7024  		v.reset(OpMIPS64SGTU)
  7025  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7026  		v0.AddArg(x)
  7027  		v0.AddArg(y)
  7028  		v.AddArg(v0)
  7029  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7030  		v1.AuxInt = 0
  7031  		v.AddArg(v1)
  7032  		return true
  7033  	}
  7034  }
  7035  func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
  7036  	b := v.Block
  7037  	_ = b
  7038  	// match: (Neq64F x y)
  7039  	// cond:
  7040  	// result: (FPFlagFalse (CMPEQD x y))
  7041  	for {
  7042  		x := v.Args[0]
  7043  		y := v.Args[1]
  7044  		v.reset(OpMIPS64FPFlagFalse)
  7045  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, TypeFlags)
  7046  		v0.AddArg(x)
  7047  		v0.AddArg(y)
  7048  		v.AddArg(v0)
  7049  		return true
  7050  	}
  7051  }
  7052  func rewriteValueMIPS64_OpNeq8(v *Value) bool {
  7053  	b := v.Block
  7054  	_ = b
  7055  	types := &b.Func.Config.Types
  7056  	_ = types
  7057  	// match: (Neq8 x y)
  7058  	// cond:
  7059  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7060  	for {
  7061  		x := v.Args[0]
  7062  		y := v.Args[1]
  7063  		v.reset(OpMIPS64SGTU)
  7064  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7065  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7066  		v1.AddArg(x)
  7067  		v0.AddArg(v1)
  7068  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7069  		v2.AddArg(y)
  7070  		v0.AddArg(v2)
  7071  		v.AddArg(v0)
  7072  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7073  		v3.AuxInt = 0
  7074  		v.AddArg(v3)
  7075  		return true
  7076  	}
  7077  }
  7078  func rewriteValueMIPS64_OpNeqB(v *Value) bool {
  7079  	// match: (NeqB x y)
  7080  	// cond:
  7081  	// result: (XOR x y)
  7082  	for {
  7083  		x := v.Args[0]
  7084  		y := v.Args[1]
  7085  		v.reset(OpMIPS64XOR)
  7086  		v.AddArg(x)
  7087  		v.AddArg(y)
  7088  		return true
  7089  	}
  7090  }
  7091  func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
  7092  	b := v.Block
  7093  	_ = b
  7094  	types := &b.Func.Config.Types
  7095  	_ = types
  7096  	// match: (NeqPtr x y)
  7097  	// cond:
  7098  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7099  	for {
  7100  		x := v.Args[0]
  7101  		y := v.Args[1]
  7102  		v.reset(OpMIPS64SGTU)
  7103  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7104  		v0.AddArg(x)
  7105  		v0.AddArg(y)
  7106  		v.AddArg(v0)
  7107  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7108  		v1.AuxInt = 0
  7109  		v.AddArg(v1)
  7110  		return true
  7111  	}
  7112  }
  7113  func rewriteValueMIPS64_OpNilCheck(v *Value) bool {
  7114  	// match: (NilCheck ptr mem)
  7115  	// cond:
  7116  	// result: (LoweredNilCheck ptr mem)
  7117  	for {
  7118  		ptr := v.Args[0]
  7119  		mem := v.Args[1]
  7120  		v.reset(OpMIPS64LoweredNilCheck)
  7121  		v.AddArg(ptr)
  7122  		v.AddArg(mem)
  7123  		return true
  7124  	}
  7125  }
  7126  func rewriteValueMIPS64_OpNot(v *Value) bool {
  7127  	// match: (Not x)
  7128  	// cond:
  7129  	// result: (XORconst [1] x)
  7130  	for {
  7131  		x := v.Args[0]
  7132  		v.reset(OpMIPS64XORconst)
  7133  		v.AuxInt = 1
  7134  		v.AddArg(x)
  7135  		return true
  7136  	}
  7137  }
  7138  func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
  7139  	// match: (OffPtr [off] ptr:(SP))
  7140  	// cond:
  7141  	// result: (MOVVaddr [off] ptr)
  7142  	for {
  7143  		off := v.AuxInt
  7144  		ptr := v.Args[0]
  7145  		if ptr.Op != OpSP {
  7146  			break
  7147  		}
  7148  		v.reset(OpMIPS64MOVVaddr)
  7149  		v.AuxInt = off
  7150  		v.AddArg(ptr)
  7151  		return true
  7152  	}
  7153  	// match: (OffPtr [off] ptr)
  7154  	// cond:
  7155  	// result: (ADDVconst [off] ptr)
  7156  	for {
  7157  		off := v.AuxInt
  7158  		ptr := v.Args[0]
  7159  		v.reset(OpMIPS64ADDVconst)
  7160  		v.AuxInt = off
  7161  		v.AddArg(ptr)
  7162  		return true
  7163  	}
  7164  }
  7165  func rewriteValueMIPS64_OpOr16(v *Value) bool {
  7166  	// match: (Or16 x y)
  7167  	// cond:
  7168  	// result: (OR x y)
  7169  	for {
  7170  		x := v.Args[0]
  7171  		y := v.Args[1]
  7172  		v.reset(OpMIPS64OR)
  7173  		v.AddArg(x)
  7174  		v.AddArg(y)
  7175  		return true
  7176  	}
  7177  }
  7178  func rewriteValueMIPS64_OpOr32(v *Value) bool {
  7179  	// match: (Or32 x y)
  7180  	// cond:
  7181  	// result: (OR x y)
  7182  	for {
  7183  		x := v.Args[0]
  7184  		y := v.Args[1]
  7185  		v.reset(OpMIPS64OR)
  7186  		v.AddArg(x)
  7187  		v.AddArg(y)
  7188  		return true
  7189  	}
  7190  }
  7191  func rewriteValueMIPS64_OpOr64(v *Value) bool {
  7192  	// match: (Or64 x y)
  7193  	// cond:
  7194  	// result: (OR x y)
  7195  	for {
  7196  		x := v.Args[0]
  7197  		y := v.Args[1]
  7198  		v.reset(OpMIPS64OR)
  7199  		v.AddArg(x)
  7200  		v.AddArg(y)
  7201  		return true
  7202  	}
  7203  }
  7204  func rewriteValueMIPS64_OpOr8(v *Value) bool {
  7205  	// match: (Or8 x y)
  7206  	// cond:
  7207  	// result: (OR x y)
  7208  	for {
  7209  		x := v.Args[0]
  7210  		y := v.Args[1]
  7211  		v.reset(OpMIPS64OR)
  7212  		v.AddArg(x)
  7213  		v.AddArg(y)
  7214  		return true
  7215  	}
  7216  }
  7217  func rewriteValueMIPS64_OpOrB(v *Value) bool {
  7218  	// match: (OrB x y)
  7219  	// cond:
  7220  	// result: (OR x y)
  7221  	for {
  7222  		x := v.Args[0]
  7223  		y := v.Args[1]
  7224  		v.reset(OpMIPS64OR)
  7225  		v.AddArg(x)
  7226  		v.AddArg(y)
  7227  		return true
  7228  	}
  7229  }
  7230  func rewriteValueMIPS64_OpRound32F(v *Value) bool {
  7231  	// match: (Round32F x)
  7232  	// cond:
  7233  	// result: x
  7234  	for {
  7235  		x := v.Args[0]
  7236  		v.reset(OpCopy)
  7237  		v.Type = x.Type
  7238  		v.AddArg(x)
  7239  		return true
  7240  	}
  7241  }
  7242  func rewriteValueMIPS64_OpRound64F(v *Value) bool {
  7243  	// match: (Round64F x)
  7244  	// cond:
  7245  	// result: x
  7246  	for {
  7247  		x := v.Args[0]
  7248  		v.reset(OpCopy)
  7249  		v.Type = x.Type
  7250  		v.AddArg(x)
  7251  		return true
  7252  	}
  7253  }
  7254  func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
  7255  	b := v.Block
  7256  	_ = b
  7257  	types := &b.Func.Config.Types
  7258  	_ = types
  7259  	// match: (Rsh16Ux16 <t> x y)
  7260  	// cond:
  7261  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7262  	for {
  7263  		t := v.Type
  7264  		x := v.Args[0]
  7265  		y := v.Args[1]
  7266  		v.reset(OpMIPS64AND)
  7267  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7268  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7269  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7270  		v2.AuxInt = 64
  7271  		v1.AddArg(v2)
  7272  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7273  		v3.AddArg(y)
  7274  		v1.AddArg(v3)
  7275  		v0.AddArg(v1)
  7276  		v.AddArg(v0)
  7277  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7278  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7279  		v5.AddArg(x)
  7280  		v4.AddArg(v5)
  7281  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7282  		v6.AddArg(y)
  7283  		v4.AddArg(v6)
  7284  		v.AddArg(v4)
  7285  		return true
  7286  	}
  7287  }
  7288  func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
  7289  	b := v.Block
  7290  	_ = b
  7291  	types := &b.Func.Config.Types
  7292  	_ = types
  7293  	// match: (Rsh16Ux32 <t> x y)
  7294  	// cond:
  7295  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7296  	for {
  7297  		t := v.Type
  7298  		x := v.Args[0]
  7299  		y := v.Args[1]
  7300  		v.reset(OpMIPS64AND)
  7301  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7302  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7303  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7304  		v2.AuxInt = 64
  7305  		v1.AddArg(v2)
  7306  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7307  		v3.AddArg(y)
  7308  		v1.AddArg(v3)
  7309  		v0.AddArg(v1)
  7310  		v.AddArg(v0)
  7311  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7312  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7313  		v5.AddArg(x)
  7314  		v4.AddArg(v5)
  7315  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7316  		v6.AddArg(y)
  7317  		v4.AddArg(v6)
  7318  		v.AddArg(v4)
  7319  		return true
  7320  	}
  7321  }
  7322  func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
  7323  	b := v.Block
  7324  	_ = b
  7325  	types := &b.Func.Config.Types
  7326  	_ = types
  7327  	// match: (Rsh16Ux64 <t> x y)
  7328  	// cond:
  7329  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7330  	for {
  7331  		t := v.Type
  7332  		x := v.Args[0]
  7333  		y := v.Args[1]
  7334  		v.reset(OpMIPS64AND)
  7335  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7336  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7337  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7338  		v2.AuxInt = 64
  7339  		v1.AddArg(v2)
  7340  		v1.AddArg(y)
  7341  		v0.AddArg(v1)
  7342  		v.AddArg(v0)
  7343  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7344  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7345  		v4.AddArg(x)
  7346  		v3.AddArg(v4)
  7347  		v3.AddArg(y)
  7348  		v.AddArg(v3)
  7349  		return true
  7350  	}
  7351  }
  7352  func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
  7353  	b := v.Block
  7354  	_ = b
  7355  	types := &b.Func.Config.Types
  7356  	_ = types
  7357  	// match: (Rsh16Ux8 <t> x y)
  7358  	// cond:
  7359  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64  y)))
  7360  	for {
  7361  		t := v.Type
  7362  		x := v.Args[0]
  7363  		y := v.Args[1]
  7364  		v.reset(OpMIPS64AND)
  7365  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7366  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7367  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7368  		v2.AuxInt = 64
  7369  		v1.AddArg(v2)
  7370  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7371  		v3.AddArg(y)
  7372  		v1.AddArg(v3)
  7373  		v0.AddArg(v1)
  7374  		v.AddArg(v0)
  7375  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7376  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7377  		v5.AddArg(x)
  7378  		v4.AddArg(v5)
  7379  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7380  		v6.AddArg(y)
  7381  		v4.AddArg(v6)
  7382  		v.AddArg(v4)
  7383  		return true
  7384  	}
  7385  }
  7386  func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
  7387  	b := v.Block
  7388  	_ = b
  7389  	types := &b.Func.Config.Types
  7390  	_ = types
  7391  	// match: (Rsh16x16 <t> x y)
  7392  	// cond:
  7393  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7394  	for {
  7395  		t := v.Type
  7396  		x := v.Args[0]
  7397  		y := v.Args[1]
  7398  		v.reset(OpMIPS64SRAV)
  7399  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7400  		v0.AddArg(x)
  7401  		v.AddArg(v0)
  7402  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7403  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7404  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7405  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7406  		v4.AddArg(y)
  7407  		v3.AddArg(v4)
  7408  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7409  		v5.AuxInt = 63
  7410  		v3.AddArg(v5)
  7411  		v2.AddArg(v3)
  7412  		v1.AddArg(v2)
  7413  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7414  		v6.AddArg(y)
  7415  		v1.AddArg(v6)
  7416  		v.AddArg(v1)
  7417  		return true
  7418  	}
  7419  }
  7420  func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
  7421  	b := v.Block
  7422  	_ = b
  7423  	types := &b.Func.Config.Types
  7424  	_ = types
  7425  	// match: (Rsh16x32 <t> x y)
  7426  	// cond:
  7427  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7428  	for {
  7429  		t := v.Type
  7430  		x := v.Args[0]
  7431  		y := v.Args[1]
  7432  		v.reset(OpMIPS64SRAV)
  7433  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7434  		v0.AddArg(x)
  7435  		v.AddArg(v0)
  7436  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7437  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7438  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7439  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7440  		v4.AddArg(y)
  7441  		v3.AddArg(v4)
  7442  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7443  		v5.AuxInt = 63
  7444  		v3.AddArg(v5)
  7445  		v2.AddArg(v3)
  7446  		v1.AddArg(v2)
  7447  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7448  		v6.AddArg(y)
  7449  		v1.AddArg(v6)
  7450  		v.AddArg(v1)
  7451  		return true
  7452  	}
  7453  }
  7454  func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
  7455  	b := v.Block
  7456  	_ = b
  7457  	types := &b.Func.Config.Types
  7458  	_ = types
  7459  	// match: (Rsh16x64 <t> x y)
  7460  	// cond:
  7461  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7462  	for {
  7463  		t := v.Type
  7464  		x := v.Args[0]
  7465  		y := v.Args[1]
  7466  		v.reset(OpMIPS64SRAV)
  7467  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7468  		v0.AddArg(x)
  7469  		v.AddArg(v0)
  7470  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7471  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7472  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7473  		v3.AddArg(y)
  7474  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7475  		v4.AuxInt = 63
  7476  		v3.AddArg(v4)
  7477  		v2.AddArg(v3)
  7478  		v1.AddArg(v2)
  7479  		v1.AddArg(y)
  7480  		v.AddArg(v1)
  7481  		return true
  7482  	}
  7483  }
  7484  func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
  7485  	b := v.Block
  7486  	_ = b
  7487  	types := &b.Func.Config.Types
  7488  	_ = types
  7489  	// match: (Rsh16x8 <t> x y)
  7490  	// cond:
  7491  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  7492  	for {
  7493  		t := v.Type
  7494  		x := v.Args[0]
  7495  		y := v.Args[1]
  7496  		v.reset(OpMIPS64SRAV)
  7497  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7498  		v0.AddArg(x)
  7499  		v.AddArg(v0)
  7500  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7501  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7502  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7503  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7504  		v4.AddArg(y)
  7505  		v3.AddArg(v4)
  7506  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7507  		v5.AuxInt = 63
  7508  		v3.AddArg(v5)
  7509  		v2.AddArg(v3)
  7510  		v1.AddArg(v2)
  7511  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7512  		v6.AddArg(y)
  7513  		v1.AddArg(v6)
  7514  		v.AddArg(v1)
  7515  		return true
  7516  	}
  7517  }
  7518  func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
  7519  	b := v.Block
  7520  	_ = b
  7521  	types := &b.Func.Config.Types
  7522  	_ = types
  7523  	// match: (Rsh32Ux16 <t> x y)
  7524  	// cond:
  7525  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  7526  	for {
  7527  		t := v.Type
  7528  		x := v.Args[0]
  7529  		y := v.Args[1]
  7530  		v.reset(OpMIPS64AND)
  7531  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7532  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7533  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7534  		v2.AuxInt = 64
  7535  		v1.AddArg(v2)
  7536  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7537  		v3.AddArg(y)
  7538  		v1.AddArg(v3)
  7539  		v0.AddArg(v1)
  7540  		v.AddArg(v0)
  7541  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7542  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7543  		v5.AddArg(x)
  7544  		v4.AddArg(v5)
  7545  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7546  		v6.AddArg(y)
  7547  		v4.AddArg(v6)
  7548  		v.AddArg(v4)
  7549  		return true
  7550  	}
  7551  }
  7552  func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
  7553  	b := v.Block
  7554  	_ = b
  7555  	types := &b.Func.Config.Types
  7556  	_ = types
  7557  	// match: (Rsh32Ux32 <t> x y)
  7558  	// cond:
  7559  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7560  	for {
  7561  		t := v.Type
  7562  		x := v.Args[0]
  7563  		y := v.Args[1]
  7564  		v.reset(OpMIPS64AND)
  7565  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7566  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7567  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7568  		v2.AuxInt = 64
  7569  		v1.AddArg(v2)
  7570  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7571  		v3.AddArg(y)
  7572  		v1.AddArg(v3)
  7573  		v0.AddArg(v1)
  7574  		v.AddArg(v0)
  7575  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7576  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7577  		v5.AddArg(x)
  7578  		v4.AddArg(v5)
  7579  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7580  		v6.AddArg(y)
  7581  		v4.AddArg(v6)
  7582  		v.AddArg(v4)
  7583  		return true
  7584  	}
  7585  }
  7586  func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
  7587  	b := v.Block
  7588  	_ = b
  7589  	types := &b.Func.Config.Types
  7590  	_ = types
  7591  	// match: (Rsh32Ux64 <t> x y)
  7592  	// cond:
  7593  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  7594  	for {
  7595  		t := v.Type
  7596  		x := v.Args[0]
  7597  		y := v.Args[1]
  7598  		v.reset(OpMIPS64AND)
  7599  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7600  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7601  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7602  		v2.AuxInt = 64
  7603  		v1.AddArg(v2)
  7604  		v1.AddArg(y)
  7605  		v0.AddArg(v1)
  7606  		v.AddArg(v0)
  7607  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7608  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7609  		v4.AddArg(x)
  7610  		v3.AddArg(v4)
  7611  		v3.AddArg(y)
  7612  		v.AddArg(v3)
  7613  		return true
  7614  	}
  7615  }
  7616  func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
  7617  	b := v.Block
  7618  	_ = b
  7619  	types := &b.Func.Config.Types
  7620  	_ = types
  7621  	// match: (Rsh32Ux8 <t> x y)
  7622  	// cond:
  7623  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64  y)))
  7624  	for {
  7625  		t := v.Type
  7626  		x := v.Args[0]
  7627  		y := v.Args[1]
  7628  		v.reset(OpMIPS64AND)
  7629  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7630  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7631  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7632  		v2.AuxInt = 64
  7633  		v1.AddArg(v2)
  7634  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7635  		v3.AddArg(y)
  7636  		v1.AddArg(v3)
  7637  		v0.AddArg(v1)
  7638  		v.AddArg(v0)
  7639  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7640  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7641  		v5.AddArg(x)
  7642  		v4.AddArg(v5)
  7643  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7644  		v6.AddArg(y)
  7645  		v4.AddArg(v6)
  7646  		v.AddArg(v4)
  7647  		return true
  7648  	}
  7649  }
  7650  func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
  7651  	b := v.Block
  7652  	_ = b
  7653  	types := &b.Func.Config.Types
  7654  	_ = types
  7655  	// match: (Rsh32x16 <t> x y)
  7656  	// cond:
  7657  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7658  	for {
  7659  		t := v.Type
  7660  		x := v.Args[0]
  7661  		y := v.Args[1]
  7662  		v.reset(OpMIPS64SRAV)
  7663  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7664  		v0.AddArg(x)
  7665  		v.AddArg(v0)
  7666  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7667  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7668  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7669  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7670  		v4.AddArg(y)
  7671  		v3.AddArg(v4)
  7672  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7673  		v5.AuxInt = 63
  7674  		v3.AddArg(v5)
  7675  		v2.AddArg(v3)
  7676  		v1.AddArg(v2)
  7677  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7678  		v6.AddArg(y)
  7679  		v1.AddArg(v6)
  7680  		v.AddArg(v1)
  7681  		return true
  7682  	}
  7683  }
  7684  func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
  7685  	b := v.Block
  7686  	_ = b
  7687  	types := &b.Func.Config.Types
  7688  	_ = types
  7689  	// match: (Rsh32x32 <t> x y)
  7690  	// cond:
  7691  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7692  	for {
  7693  		t := v.Type
  7694  		x := v.Args[0]
  7695  		y := v.Args[1]
  7696  		v.reset(OpMIPS64SRAV)
  7697  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7698  		v0.AddArg(x)
  7699  		v.AddArg(v0)
  7700  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7701  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7702  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7703  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7704  		v4.AddArg(y)
  7705  		v3.AddArg(v4)
  7706  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7707  		v5.AuxInt = 63
  7708  		v3.AddArg(v5)
  7709  		v2.AddArg(v3)
  7710  		v1.AddArg(v2)
  7711  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7712  		v6.AddArg(y)
  7713  		v1.AddArg(v6)
  7714  		v.AddArg(v1)
  7715  		return true
  7716  	}
  7717  }
  7718  func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
  7719  	b := v.Block
  7720  	_ = b
  7721  	types := &b.Func.Config.Types
  7722  	_ = types
  7723  	// match: (Rsh32x64 <t> x y)
  7724  	// cond:
  7725  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7726  	for {
  7727  		t := v.Type
  7728  		x := v.Args[0]
  7729  		y := v.Args[1]
  7730  		v.reset(OpMIPS64SRAV)
  7731  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7732  		v0.AddArg(x)
  7733  		v.AddArg(v0)
  7734  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7735  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7736  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7737  		v3.AddArg(y)
  7738  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7739  		v4.AuxInt = 63
  7740  		v3.AddArg(v4)
  7741  		v2.AddArg(v3)
  7742  		v1.AddArg(v2)
  7743  		v1.AddArg(y)
  7744  		v.AddArg(v1)
  7745  		return true
  7746  	}
  7747  }
  7748  func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
  7749  	b := v.Block
  7750  	_ = b
  7751  	types := &b.Func.Config.Types
  7752  	_ = types
  7753  	// match: (Rsh32x8 <t> x y)
  7754  	// cond:
  7755  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  7756  	for {
  7757  		t := v.Type
  7758  		x := v.Args[0]
  7759  		y := v.Args[1]
  7760  		v.reset(OpMIPS64SRAV)
  7761  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7762  		v0.AddArg(x)
  7763  		v.AddArg(v0)
  7764  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7765  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7766  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7767  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7768  		v4.AddArg(y)
  7769  		v3.AddArg(v4)
  7770  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7771  		v5.AuxInt = 63
  7772  		v3.AddArg(v5)
  7773  		v2.AddArg(v3)
  7774  		v1.AddArg(v2)
  7775  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7776  		v6.AddArg(y)
  7777  		v1.AddArg(v6)
  7778  		v.AddArg(v1)
  7779  		return true
  7780  	}
  7781  }
  7782  func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
  7783  	b := v.Block
  7784  	_ = b
  7785  	types := &b.Func.Config.Types
  7786  	_ = types
  7787  	// match: (Rsh64Ux16 <t> x y)
  7788  	// cond:
  7789  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  7790  	for {
  7791  		t := v.Type
  7792  		x := v.Args[0]
  7793  		y := v.Args[1]
  7794  		v.reset(OpMIPS64AND)
  7795  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7796  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7797  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7798  		v2.AuxInt = 64
  7799  		v1.AddArg(v2)
  7800  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7801  		v3.AddArg(y)
  7802  		v1.AddArg(v3)
  7803  		v0.AddArg(v1)
  7804  		v.AddArg(v0)
  7805  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7806  		v4.AddArg(x)
  7807  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7808  		v5.AddArg(y)
  7809  		v4.AddArg(v5)
  7810  		v.AddArg(v4)
  7811  		return true
  7812  	}
  7813  }
  7814  func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
  7815  	b := v.Block
  7816  	_ = b
  7817  	types := &b.Func.Config.Types
  7818  	_ = types
  7819  	// match: (Rsh64Ux32 <t> x y)
  7820  	// cond:
  7821  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  7822  	for {
  7823  		t := v.Type
  7824  		x := v.Args[0]
  7825  		y := v.Args[1]
  7826  		v.reset(OpMIPS64AND)
  7827  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7828  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7829  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7830  		v2.AuxInt = 64
  7831  		v1.AddArg(v2)
  7832  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7833  		v3.AddArg(y)
  7834  		v1.AddArg(v3)
  7835  		v0.AddArg(v1)
  7836  		v.AddArg(v0)
  7837  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7838  		v4.AddArg(x)
  7839  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7840  		v5.AddArg(y)
  7841  		v4.AddArg(v5)
  7842  		v.AddArg(v4)
  7843  		return true
  7844  	}
  7845  }
  7846  func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
  7847  	b := v.Block
  7848  	_ = b
  7849  	types := &b.Func.Config.Types
  7850  	_ = types
  7851  	// match: (Rsh64Ux64 <t> x y)
  7852  	// cond:
  7853  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> x y))
  7854  	for {
  7855  		t := v.Type
  7856  		x := v.Args[0]
  7857  		y := v.Args[1]
  7858  		v.reset(OpMIPS64AND)
  7859  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7860  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7861  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7862  		v2.AuxInt = 64
  7863  		v1.AddArg(v2)
  7864  		v1.AddArg(y)
  7865  		v0.AddArg(v1)
  7866  		v.AddArg(v0)
  7867  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7868  		v3.AddArg(x)
  7869  		v3.AddArg(y)
  7870  		v.AddArg(v3)
  7871  		return true
  7872  	}
  7873  }
  7874  func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
  7875  	b := v.Block
  7876  	_ = b
  7877  	types := &b.Func.Config.Types
  7878  	_ = types
  7879  	// match: (Rsh64Ux8 <t> x y)
  7880  	// cond:
  7881  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> x (ZeroExt8to64  y)))
  7882  	for {
  7883  		t := v.Type
  7884  		x := v.Args[0]
  7885  		y := v.Args[1]
  7886  		v.reset(OpMIPS64AND)
  7887  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7888  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7889  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7890  		v2.AuxInt = 64
  7891  		v1.AddArg(v2)
  7892  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7893  		v3.AddArg(y)
  7894  		v1.AddArg(v3)
  7895  		v0.AddArg(v1)
  7896  		v.AddArg(v0)
  7897  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7898  		v4.AddArg(x)
  7899  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7900  		v5.AddArg(y)
  7901  		v4.AddArg(v5)
  7902  		v.AddArg(v4)
  7903  		return true
  7904  	}
  7905  }
  7906  func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
  7907  	b := v.Block
  7908  	_ = b
  7909  	types := &b.Func.Config.Types
  7910  	_ = types
  7911  	// match: (Rsh64x16 <t> x y)
  7912  	// cond:
  7913  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7914  	for {
  7915  		t := v.Type
  7916  		x := v.Args[0]
  7917  		y := v.Args[1]
  7918  		v.reset(OpMIPS64SRAV)
  7919  		v.AddArg(x)
  7920  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7921  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7922  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7923  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7924  		v3.AddArg(y)
  7925  		v2.AddArg(v3)
  7926  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7927  		v4.AuxInt = 63
  7928  		v2.AddArg(v4)
  7929  		v1.AddArg(v2)
  7930  		v0.AddArg(v1)
  7931  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7932  		v5.AddArg(y)
  7933  		v0.AddArg(v5)
  7934  		v.AddArg(v0)
  7935  		return true
  7936  	}
  7937  }
  7938  func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
  7939  	b := v.Block
  7940  	_ = b
  7941  	types := &b.Func.Config.Types
  7942  	_ = types
  7943  	// match: (Rsh64x32 <t> x y)
  7944  	// cond:
  7945  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7946  	for {
  7947  		t := v.Type
  7948  		x := v.Args[0]
  7949  		y := v.Args[1]
  7950  		v.reset(OpMIPS64SRAV)
  7951  		v.AddArg(x)
  7952  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7953  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7954  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7955  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7956  		v3.AddArg(y)
  7957  		v2.AddArg(v3)
  7958  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7959  		v4.AuxInt = 63
  7960  		v2.AddArg(v4)
  7961  		v1.AddArg(v2)
  7962  		v0.AddArg(v1)
  7963  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7964  		v5.AddArg(y)
  7965  		v0.AddArg(v5)
  7966  		v.AddArg(v0)
  7967  		return true
  7968  	}
  7969  }
  7970  func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
  7971  	b := v.Block
  7972  	_ = b
  7973  	types := &b.Func.Config.Types
  7974  	_ = types
  7975  	// match: (Rsh64x64 <t> x y)
  7976  	// cond:
  7977  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7978  	for {
  7979  		t := v.Type
  7980  		x := v.Args[0]
  7981  		y := v.Args[1]
  7982  		v.reset(OpMIPS64SRAV)
  7983  		v.AddArg(x)
  7984  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7985  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7986  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7987  		v2.AddArg(y)
  7988  		v3 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7989  		v3.AuxInt = 63
  7990  		v2.AddArg(v3)
  7991  		v1.AddArg(v2)
  7992  		v0.AddArg(v1)
  7993  		v0.AddArg(y)
  7994  		v.AddArg(v0)
  7995  		return true
  7996  	}
  7997  }
  7998  func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
  7999  	b := v.Block
  8000  	_ = b
  8001  	types := &b.Func.Config.Types
  8002  	_ = types
  8003  	// match: (Rsh64x8 <t> x y)
  8004  	// cond:
  8005  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  8006  	for {
  8007  		t := v.Type
  8008  		x := v.Args[0]
  8009  		y := v.Args[1]
  8010  		v.reset(OpMIPS64SRAV)
  8011  		v.AddArg(x)
  8012  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8013  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8014  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8015  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8016  		v3.AddArg(y)
  8017  		v2.AddArg(v3)
  8018  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8019  		v4.AuxInt = 63
  8020  		v2.AddArg(v4)
  8021  		v1.AddArg(v2)
  8022  		v0.AddArg(v1)
  8023  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8024  		v5.AddArg(y)
  8025  		v0.AddArg(v5)
  8026  		v.AddArg(v0)
  8027  		return true
  8028  	}
  8029  }
  8030  func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
  8031  	b := v.Block
  8032  	_ = b
  8033  	types := &b.Func.Config.Types
  8034  	_ = types
  8035  	// match: (Rsh8Ux16 <t> x y)
  8036  	// cond:
  8037  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8038  	for {
  8039  		t := v.Type
  8040  		x := v.Args[0]
  8041  		y := v.Args[1]
  8042  		v.reset(OpMIPS64AND)
  8043  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8044  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8045  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8046  		v2.AuxInt = 64
  8047  		v1.AddArg(v2)
  8048  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8049  		v3.AddArg(y)
  8050  		v1.AddArg(v3)
  8051  		v0.AddArg(v1)
  8052  		v.AddArg(v0)
  8053  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8054  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8055  		v5.AddArg(x)
  8056  		v4.AddArg(v5)
  8057  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8058  		v6.AddArg(y)
  8059  		v4.AddArg(v6)
  8060  		v.AddArg(v4)
  8061  		return true
  8062  	}
  8063  }
  8064  func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
  8065  	b := v.Block
  8066  	_ = b
  8067  	types := &b.Func.Config.Types
  8068  	_ = types
  8069  	// match: (Rsh8Ux32 <t> x y)
  8070  	// cond:
  8071  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8072  	for {
  8073  		t := v.Type
  8074  		x := v.Args[0]
  8075  		y := v.Args[1]
  8076  		v.reset(OpMIPS64AND)
  8077  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8078  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8079  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8080  		v2.AuxInt = 64
  8081  		v1.AddArg(v2)
  8082  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8083  		v3.AddArg(y)
  8084  		v1.AddArg(v3)
  8085  		v0.AddArg(v1)
  8086  		v.AddArg(v0)
  8087  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8088  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8089  		v5.AddArg(x)
  8090  		v4.AddArg(v5)
  8091  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8092  		v6.AddArg(y)
  8093  		v4.AddArg(v6)
  8094  		v.AddArg(v4)
  8095  		return true
  8096  	}
  8097  }
  8098  func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
  8099  	b := v.Block
  8100  	_ = b
  8101  	types := &b.Func.Config.Types
  8102  	_ = types
  8103  	// match: (Rsh8Ux64 <t> x y)
  8104  	// cond:
  8105  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8106  	for {
  8107  		t := v.Type
  8108  		x := v.Args[0]
  8109  		y := v.Args[1]
  8110  		v.reset(OpMIPS64AND)
  8111  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8112  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8113  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8114  		v2.AuxInt = 64
  8115  		v1.AddArg(v2)
  8116  		v1.AddArg(y)
  8117  		v0.AddArg(v1)
  8118  		v.AddArg(v0)
  8119  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8120  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8121  		v4.AddArg(x)
  8122  		v3.AddArg(v4)
  8123  		v3.AddArg(y)
  8124  		v.AddArg(v3)
  8125  		return true
  8126  	}
  8127  }
  8128  func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
  8129  	b := v.Block
  8130  	_ = b
  8131  	types := &b.Func.Config.Types
  8132  	_ = types
  8133  	// match: (Rsh8Ux8 <t> x y)
  8134  	// cond:
  8135  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64  y)))
  8136  	for {
  8137  		t := v.Type
  8138  		x := v.Args[0]
  8139  		y := v.Args[1]
  8140  		v.reset(OpMIPS64AND)
  8141  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8142  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8143  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8144  		v2.AuxInt = 64
  8145  		v1.AddArg(v2)
  8146  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8147  		v3.AddArg(y)
  8148  		v1.AddArg(v3)
  8149  		v0.AddArg(v1)
  8150  		v.AddArg(v0)
  8151  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8152  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8153  		v5.AddArg(x)
  8154  		v4.AddArg(v5)
  8155  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8156  		v6.AddArg(y)
  8157  		v4.AddArg(v6)
  8158  		v.AddArg(v4)
  8159  		return true
  8160  	}
  8161  }
  8162  func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
  8163  	b := v.Block
  8164  	_ = b
  8165  	types := &b.Func.Config.Types
  8166  	_ = types
  8167  	// match: (Rsh8x16 <t> x y)
  8168  	// cond:
  8169  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  8170  	for {
  8171  		t := v.Type
  8172  		x := v.Args[0]
  8173  		y := v.Args[1]
  8174  		v.reset(OpMIPS64SRAV)
  8175  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8176  		v0.AddArg(x)
  8177  		v.AddArg(v0)
  8178  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8179  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8180  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8181  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8182  		v4.AddArg(y)
  8183  		v3.AddArg(v4)
  8184  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8185  		v5.AuxInt = 63
  8186  		v3.AddArg(v5)
  8187  		v2.AddArg(v3)
  8188  		v1.AddArg(v2)
  8189  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8190  		v6.AddArg(y)
  8191  		v1.AddArg(v6)
  8192  		v.AddArg(v1)
  8193  		return true
  8194  	}
  8195  }
  8196  func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
  8197  	b := v.Block
  8198  	_ = b
  8199  	types := &b.Func.Config.Types
  8200  	_ = types
  8201  	// match: (Rsh8x32 <t> x y)
  8202  	// cond:
  8203  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  8204  	for {
  8205  		t := v.Type
  8206  		x := v.Args[0]
  8207  		y := v.Args[1]
  8208  		v.reset(OpMIPS64SRAV)
  8209  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8210  		v0.AddArg(x)
  8211  		v.AddArg(v0)
  8212  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8213  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8214  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8215  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8216  		v4.AddArg(y)
  8217  		v3.AddArg(v4)
  8218  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8219  		v5.AuxInt = 63
  8220  		v3.AddArg(v5)
  8221  		v2.AddArg(v3)
  8222  		v1.AddArg(v2)
  8223  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8224  		v6.AddArg(y)
  8225  		v1.AddArg(v6)
  8226  		v.AddArg(v1)
  8227  		return true
  8228  	}
  8229  }
  8230  func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
  8231  	b := v.Block
  8232  	_ = b
  8233  	types := &b.Func.Config.Types
  8234  	_ = types
  8235  	// match: (Rsh8x64 <t> x y)
  8236  	// cond:
  8237  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  8238  	for {
  8239  		t := v.Type
  8240  		x := v.Args[0]
  8241  		y := v.Args[1]
  8242  		v.reset(OpMIPS64SRAV)
  8243  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8244  		v0.AddArg(x)
  8245  		v.AddArg(v0)
  8246  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8247  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8248  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8249  		v3.AddArg(y)
  8250  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8251  		v4.AuxInt = 63
  8252  		v3.AddArg(v4)
  8253  		v2.AddArg(v3)
  8254  		v1.AddArg(v2)
  8255  		v1.AddArg(y)
  8256  		v.AddArg(v1)
  8257  		return true
  8258  	}
  8259  }
  8260  func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
  8261  	b := v.Block
  8262  	_ = b
  8263  	types := &b.Func.Config.Types
  8264  	_ = types
  8265  	// match: (Rsh8x8 <t> x y)
  8266  	// cond:
  8267  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  8268  	for {
  8269  		t := v.Type
  8270  		x := v.Args[0]
  8271  		y := v.Args[1]
  8272  		v.reset(OpMIPS64SRAV)
  8273  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8274  		v0.AddArg(x)
  8275  		v.AddArg(v0)
  8276  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8277  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8278  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8279  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8280  		v4.AddArg(y)
  8281  		v3.AddArg(v4)
  8282  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8283  		v5.AuxInt = 63
  8284  		v3.AddArg(v5)
  8285  		v2.AddArg(v3)
  8286  		v1.AddArg(v2)
  8287  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8288  		v6.AddArg(y)
  8289  		v1.AddArg(v6)
  8290  		v.AddArg(v1)
  8291  		return true
  8292  	}
  8293  }
  8294  func rewriteValueMIPS64_OpSelect0(v *Value) bool {
  8295  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8296  	// cond:
  8297  	// result: (MOVVconst [0])
  8298  	for {
  8299  		v_0 := v.Args[0]
  8300  		if v_0.Op != OpMIPS64DIVVU {
  8301  			break
  8302  		}
  8303  		v_0_1 := v_0.Args[1]
  8304  		if v_0_1.Op != OpMIPS64MOVVconst {
  8305  			break
  8306  		}
  8307  		if v_0_1.AuxInt != 1 {
  8308  			break
  8309  		}
  8310  		v.reset(OpMIPS64MOVVconst)
  8311  		v.AuxInt = 0
  8312  		return true
  8313  	}
  8314  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  8315  	// cond: isPowerOfTwo(c)
  8316  	// result: (ANDconst [c-1] x)
  8317  	for {
  8318  		v_0 := v.Args[0]
  8319  		if v_0.Op != OpMIPS64DIVVU {
  8320  			break
  8321  		}
  8322  		x := v_0.Args[0]
  8323  		v_0_1 := v_0.Args[1]
  8324  		if v_0_1.Op != OpMIPS64MOVVconst {
  8325  			break
  8326  		}
  8327  		c := v_0_1.AuxInt
  8328  		if !(isPowerOfTwo(c)) {
  8329  			break
  8330  		}
  8331  		v.reset(OpMIPS64ANDconst)
  8332  		v.AuxInt = c - 1
  8333  		v.AddArg(x)
  8334  		return true
  8335  	}
  8336  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8337  	// cond:
  8338  	// result: (MOVVconst [int64(c)%int64(d)])
  8339  	for {
  8340  		v_0 := v.Args[0]
  8341  		if v_0.Op != OpMIPS64DIVV {
  8342  			break
  8343  		}
  8344  		v_0_0 := v_0.Args[0]
  8345  		if v_0_0.Op != OpMIPS64MOVVconst {
  8346  			break
  8347  		}
  8348  		c := v_0_0.AuxInt
  8349  		v_0_1 := v_0.Args[1]
  8350  		if v_0_1.Op != OpMIPS64MOVVconst {
  8351  			break
  8352  		}
  8353  		d := v_0_1.AuxInt
  8354  		v.reset(OpMIPS64MOVVconst)
  8355  		v.AuxInt = int64(c) % int64(d)
  8356  		return true
  8357  	}
  8358  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8359  	// cond:
  8360  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8361  	for {
  8362  		v_0 := v.Args[0]
  8363  		if v_0.Op != OpMIPS64DIVVU {
  8364  			break
  8365  		}
  8366  		v_0_0 := v_0.Args[0]
  8367  		if v_0_0.Op != OpMIPS64MOVVconst {
  8368  			break
  8369  		}
  8370  		c := v_0_0.AuxInt
  8371  		v_0_1 := v_0.Args[1]
  8372  		if v_0_1.Op != OpMIPS64MOVVconst {
  8373  			break
  8374  		}
  8375  		d := v_0_1.AuxInt
  8376  		v.reset(OpMIPS64MOVVconst)
  8377  		v.AuxInt = int64(uint64(c) % uint64(d))
  8378  		return true
  8379  	}
  8380  	return false
  8381  }
  8382  func rewriteValueMIPS64_OpSelect1(v *Value) bool {
  8383  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  8384  	// cond:
  8385  	// result: (NEGV x)
  8386  	for {
  8387  		v_0 := v.Args[0]
  8388  		if v_0.Op != OpMIPS64MULVU {
  8389  			break
  8390  		}
  8391  		x := v_0.Args[0]
  8392  		v_0_1 := v_0.Args[1]
  8393  		if v_0_1.Op != OpMIPS64MOVVconst {
  8394  			break
  8395  		}
  8396  		if v_0_1.AuxInt != -1 {
  8397  			break
  8398  		}
  8399  		v.reset(OpMIPS64NEGV)
  8400  		v.AddArg(x)
  8401  		return true
  8402  	}
  8403  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8404  	// cond:
  8405  	// result: (NEGV x)
  8406  	for {
  8407  		v_0 := v.Args[0]
  8408  		if v_0.Op != OpMIPS64MULVU {
  8409  			break
  8410  		}
  8411  		v_0_0 := v_0.Args[0]
  8412  		if v_0_0.Op != OpMIPS64MOVVconst {
  8413  			break
  8414  		}
  8415  		if v_0_0.AuxInt != -1 {
  8416  			break
  8417  		}
  8418  		x := v_0.Args[1]
  8419  		v.reset(OpMIPS64NEGV)
  8420  		v.AddArg(x)
  8421  		return true
  8422  	}
  8423  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  8424  	// cond:
  8425  	// result: (MOVVconst [0])
  8426  	for {
  8427  		v_0 := v.Args[0]
  8428  		if v_0.Op != OpMIPS64MULVU {
  8429  			break
  8430  		}
  8431  		v_0_1 := v_0.Args[1]
  8432  		if v_0_1.Op != OpMIPS64MOVVconst {
  8433  			break
  8434  		}
  8435  		if v_0_1.AuxInt != 0 {
  8436  			break
  8437  		}
  8438  		v.reset(OpMIPS64MOVVconst)
  8439  		v.AuxInt = 0
  8440  		return true
  8441  	}
  8442  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  8443  	// cond:
  8444  	// result: (MOVVconst [0])
  8445  	for {
  8446  		v_0 := v.Args[0]
  8447  		if v_0.Op != OpMIPS64MULVU {
  8448  			break
  8449  		}
  8450  		v_0_0 := v_0.Args[0]
  8451  		if v_0_0.Op != OpMIPS64MOVVconst {
  8452  			break
  8453  		}
  8454  		if v_0_0.AuxInt != 0 {
  8455  			break
  8456  		}
  8457  		v.reset(OpMIPS64MOVVconst)
  8458  		v.AuxInt = 0
  8459  		return true
  8460  	}
  8461  	// match: (Select1 (MULVU x (MOVVconst [1])))
  8462  	// cond:
  8463  	// result: x
  8464  	for {
  8465  		v_0 := v.Args[0]
  8466  		if v_0.Op != OpMIPS64MULVU {
  8467  			break
  8468  		}
  8469  		x := v_0.Args[0]
  8470  		v_0_1 := v_0.Args[1]
  8471  		if v_0_1.Op != OpMIPS64MOVVconst {
  8472  			break
  8473  		}
  8474  		if v_0_1.AuxInt != 1 {
  8475  			break
  8476  		}
  8477  		v.reset(OpCopy)
  8478  		v.Type = x.Type
  8479  		v.AddArg(x)
  8480  		return true
  8481  	}
  8482  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  8483  	// cond:
  8484  	// result: x
  8485  	for {
  8486  		v_0 := v.Args[0]
  8487  		if v_0.Op != OpMIPS64MULVU {
  8488  			break
  8489  		}
  8490  		v_0_0 := v_0.Args[0]
  8491  		if v_0_0.Op != OpMIPS64MOVVconst {
  8492  			break
  8493  		}
  8494  		if v_0_0.AuxInt != 1 {
  8495  			break
  8496  		}
  8497  		x := v_0.Args[1]
  8498  		v.reset(OpCopy)
  8499  		v.Type = x.Type
  8500  		v.AddArg(x)
  8501  		return true
  8502  	}
  8503  	// match: (Select1 (MULVU x (MOVVconst [c])))
  8504  	// cond: isPowerOfTwo(c)
  8505  	// result: (SLLVconst [log2(c)] x)
  8506  	for {
  8507  		v_0 := v.Args[0]
  8508  		if v_0.Op != OpMIPS64MULVU {
  8509  			break
  8510  		}
  8511  		x := v_0.Args[0]
  8512  		v_0_1 := v_0.Args[1]
  8513  		if v_0_1.Op != OpMIPS64MOVVconst {
  8514  			break
  8515  		}
  8516  		c := v_0_1.AuxInt
  8517  		if !(isPowerOfTwo(c)) {
  8518  			break
  8519  		}
  8520  		v.reset(OpMIPS64SLLVconst)
  8521  		v.AuxInt = log2(c)
  8522  		v.AddArg(x)
  8523  		return true
  8524  	}
  8525  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  8526  	// cond: isPowerOfTwo(c)
  8527  	// result: (SLLVconst [log2(c)] x)
  8528  	for {
  8529  		v_0 := v.Args[0]
  8530  		if v_0.Op != OpMIPS64MULVU {
  8531  			break
  8532  		}
  8533  		v_0_0 := v_0.Args[0]
  8534  		if v_0_0.Op != OpMIPS64MOVVconst {
  8535  			break
  8536  		}
  8537  		c := v_0_0.AuxInt
  8538  		x := v_0.Args[1]
  8539  		if !(isPowerOfTwo(c)) {
  8540  			break
  8541  		}
  8542  		v.reset(OpMIPS64SLLVconst)
  8543  		v.AuxInt = log2(c)
  8544  		v.AddArg(x)
  8545  		return true
  8546  	}
  8547  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8548  	// cond:
  8549  	// result: (NEGV x)
  8550  	for {
  8551  		v_0 := v.Args[0]
  8552  		if v_0.Op != OpMIPS64MULVU {
  8553  			break
  8554  		}
  8555  		v_0_0 := v_0.Args[0]
  8556  		if v_0_0.Op != OpMIPS64MOVVconst {
  8557  			break
  8558  		}
  8559  		if v_0_0.AuxInt != -1 {
  8560  			break
  8561  		}
  8562  		x := v_0.Args[1]
  8563  		v.reset(OpMIPS64NEGV)
  8564  		v.AddArg(x)
  8565  		return true
  8566  	}
  8567  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  8568  	// cond:
  8569  	// result: (NEGV x)
  8570  	for {
  8571  		v_0 := v.Args[0]
  8572  		if v_0.Op != OpMIPS64MULVU {
  8573  			break
  8574  		}
  8575  		x := v_0.Args[0]
  8576  		v_0_1 := v_0.Args[1]
  8577  		if v_0_1.Op != OpMIPS64MOVVconst {
  8578  			break
  8579  		}
  8580  		if v_0_1.AuxInt != -1 {
  8581  			break
  8582  		}
  8583  		v.reset(OpMIPS64NEGV)
  8584  		v.AddArg(x)
  8585  		return true
  8586  	}
  8587  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  8588  	// cond:
  8589  	// result: (MOVVconst [0])
  8590  	for {
  8591  		v_0 := v.Args[0]
  8592  		if v_0.Op != OpMIPS64MULVU {
  8593  			break
  8594  		}
  8595  		v_0_0 := v_0.Args[0]
  8596  		if v_0_0.Op != OpMIPS64MOVVconst {
  8597  			break
  8598  		}
  8599  		if v_0_0.AuxInt != 0 {
  8600  			break
  8601  		}
  8602  		v.reset(OpMIPS64MOVVconst)
  8603  		v.AuxInt = 0
  8604  		return true
  8605  	}
  8606  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  8607  	// cond:
  8608  	// result: (MOVVconst [0])
  8609  	for {
  8610  		v_0 := v.Args[0]
  8611  		if v_0.Op != OpMIPS64MULVU {
  8612  			break
  8613  		}
  8614  		v_0_1 := v_0.Args[1]
  8615  		if v_0_1.Op != OpMIPS64MOVVconst {
  8616  			break
  8617  		}
  8618  		if v_0_1.AuxInt != 0 {
  8619  			break
  8620  		}
  8621  		v.reset(OpMIPS64MOVVconst)
  8622  		v.AuxInt = 0
  8623  		return true
  8624  	}
  8625  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  8626  	// cond:
  8627  	// result: x
  8628  	for {
  8629  		v_0 := v.Args[0]
  8630  		if v_0.Op != OpMIPS64MULVU {
  8631  			break
  8632  		}
  8633  		v_0_0 := v_0.Args[0]
  8634  		if v_0_0.Op != OpMIPS64MOVVconst {
  8635  			break
  8636  		}
  8637  		if v_0_0.AuxInt != 1 {
  8638  			break
  8639  		}
  8640  		x := v_0.Args[1]
  8641  		v.reset(OpCopy)
  8642  		v.Type = x.Type
  8643  		v.AddArg(x)
  8644  		return true
  8645  	}
  8646  	// match: (Select1 (MULVU x (MOVVconst [1])))
  8647  	// cond:
  8648  	// result: x
  8649  	for {
  8650  		v_0 := v.Args[0]
  8651  		if v_0.Op != OpMIPS64MULVU {
  8652  			break
  8653  		}
  8654  		x := v_0.Args[0]
  8655  		v_0_1 := v_0.Args[1]
  8656  		if v_0_1.Op != OpMIPS64MOVVconst {
  8657  			break
  8658  		}
  8659  		if v_0_1.AuxInt != 1 {
  8660  			break
  8661  		}
  8662  		v.reset(OpCopy)
  8663  		v.Type = x.Type
  8664  		v.AddArg(x)
  8665  		return true
  8666  	}
  8667  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  8668  	// cond: isPowerOfTwo(c)
  8669  	// result: (SLLVconst [log2(c)] x)
  8670  	for {
  8671  		v_0 := v.Args[0]
  8672  		if v_0.Op != OpMIPS64MULVU {
  8673  			break
  8674  		}
  8675  		v_0_0 := v_0.Args[0]
  8676  		if v_0_0.Op != OpMIPS64MOVVconst {
  8677  			break
  8678  		}
  8679  		c := v_0_0.AuxInt
  8680  		x := v_0.Args[1]
  8681  		if !(isPowerOfTwo(c)) {
  8682  			break
  8683  		}
  8684  		v.reset(OpMIPS64SLLVconst)
  8685  		v.AuxInt = log2(c)
  8686  		v.AddArg(x)
  8687  		return true
  8688  	}
  8689  	// match: (Select1 (MULVU x (MOVVconst [c])))
  8690  	// cond: isPowerOfTwo(c)
  8691  	// result: (SLLVconst [log2(c)] x)
  8692  	for {
  8693  		v_0 := v.Args[0]
  8694  		if v_0.Op != OpMIPS64MULVU {
  8695  			break
  8696  		}
  8697  		x := v_0.Args[0]
  8698  		v_0_1 := v_0.Args[1]
  8699  		if v_0_1.Op != OpMIPS64MOVVconst {
  8700  			break
  8701  		}
  8702  		c := v_0_1.AuxInt
  8703  		if !(isPowerOfTwo(c)) {
  8704  			break
  8705  		}
  8706  		v.reset(OpMIPS64SLLVconst)
  8707  		v.AuxInt = log2(c)
  8708  		v.AddArg(x)
  8709  		return true
  8710  	}
  8711  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  8712  	// cond:
  8713  	// result: x
  8714  	for {
  8715  		v_0 := v.Args[0]
  8716  		if v_0.Op != OpMIPS64DIVVU {
  8717  			break
  8718  		}
  8719  		x := v_0.Args[0]
  8720  		v_0_1 := v_0.Args[1]
  8721  		if v_0_1.Op != OpMIPS64MOVVconst {
  8722  			break
  8723  		}
  8724  		if v_0_1.AuxInt != 1 {
  8725  			break
  8726  		}
  8727  		v.reset(OpCopy)
  8728  		v.Type = x.Type
  8729  		v.AddArg(x)
  8730  		return true
  8731  	}
  8732  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  8733  	// cond: isPowerOfTwo(c)
  8734  	// result: (SRLVconst [log2(c)] x)
  8735  	for {
  8736  		v_0 := v.Args[0]
  8737  		if v_0.Op != OpMIPS64DIVVU {
  8738  			break
  8739  		}
  8740  		x := v_0.Args[0]
  8741  		v_0_1 := v_0.Args[1]
  8742  		if v_0_1.Op != OpMIPS64MOVVconst {
  8743  			break
  8744  		}
  8745  		c := v_0_1.AuxInt
  8746  		if !(isPowerOfTwo(c)) {
  8747  			break
  8748  		}
  8749  		v.reset(OpMIPS64SRLVconst)
  8750  		v.AuxInt = log2(c)
  8751  		v.AddArg(x)
  8752  		return true
  8753  	}
  8754  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  8755  	// cond:
  8756  	// result: (MOVVconst [c*d])
  8757  	for {
  8758  		v_0 := v.Args[0]
  8759  		if v_0.Op != OpMIPS64MULVU {
  8760  			break
  8761  		}
  8762  		v_0_0 := v_0.Args[0]
  8763  		if v_0_0.Op != OpMIPS64MOVVconst {
  8764  			break
  8765  		}
  8766  		c := v_0_0.AuxInt
  8767  		v_0_1 := v_0.Args[1]
  8768  		if v_0_1.Op != OpMIPS64MOVVconst {
  8769  			break
  8770  		}
  8771  		d := v_0_1.AuxInt
  8772  		v.reset(OpMIPS64MOVVconst)
  8773  		v.AuxInt = c * d
  8774  		return true
  8775  	}
  8776  	// match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c])))
  8777  	// cond:
  8778  	// result: (MOVVconst [c*d])
  8779  	for {
  8780  		v_0 := v.Args[0]
  8781  		if v_0.Op != OpMIPS64MULVU {
  8782  			break
  8783  		}
  8784  		v_0_0 := v_0.Args[0]
  8785  		if v_0_0.Op != OpMIPS64MOVVconst {
  8786  			break
  8787  		}
  8788  		d := v_0_0.AuxInt
  8789  		v_0_1 := v_0.Args[1]
  8790  		if v_0_1.Op != OpMIPS64MOVVconst {
  8791  			break
  8792  		}
  8793  		c := v_0_1.AuxInt
  8794  		v.reset(OpMIPS64MOVVconst)
  8795  		v.AuxInt = c * d
  8796  		return true
  8797  	}
  8798  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8799  	// cond:
  8800  	// result: (MOVVconst [int64(c)/int64(d)])
  8801  	for {
  8802  		v_0 := v.Args[0]
  8803  		if v_0.Op != OpMIPS64DIVV {
  8804  			break
  8805  		}
  8806  		v_0_0 := v_0.Args[0]
  8807  		if v_0_0.Op != OpMIPS64MOVVconst {
  8808  			break
  8809  		}
  8810  		c := v_0_0.AuxInt
  8811  		v_0_1 := v_0.Args[1]
  8812  		if v_0_1.Op != OpMIPS64MOVVconst {
  8813  			break
  8814  		}
  8815  		d := v_0_1.AuxInt
  8816  		v.reset(OpMIPS64MOVVconst)
  8817  		v.AuxInt = int64(c) / int64(d)
  8818  		return true
  8819  	}
  8820  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8821  	// cond:
  8822  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  8823  	for {
  8824  		v_0 := v.Args[0]
  8825  		if v_0.Op != OpMIPS64DIVVU {
  8826  			break
  8827  		}
  8828  		v_0_0 := v_0.Args[0]
  8829  		if v_0_0.Op != OpMIPS64MOVVconst {
  8830  			break
  8831  		}
  8832  		c := v_0_0.AuxInt
  8833  		v_0_1 := v_0.Args[1]
  8834  		if v_0_1.Op != OpMIPS64MOVVconst {
  8835  			break
  8836  		}
  8837  		d := v_0_1.AuxInt
  8838  		v.reset(OpMIPS64MOVVconst)
  8839  		v.AuxInt = int64(uint64(c) / uint64(d))
  8840  		return true
  8841  	}
  8842  	return false
  8843  }
  8844  func rewriteValueMIPS64_OpSignExt16to32(v *Value) bool {
  8845  	// match: (SignExt16to32 x)
  8846  	// cond:
  8847  	// result: (MOVHreg x)
  8848  	for {
  8849  		x := v.Args[0]
  8850  		v.reset(OpMIPS64MOVHreg)
  8851  		v.AddArg(x)
  8852  		return true
  8853  	}
  8854  }
  8855  func rewriteValueMIPS64_OpSignExt16to64(v *Value) bool {
  8856  	// match: (SignExt16to64 x)
  8857  	// cond:
  8858  	// result: (MOVHreg x)
  8859  	for {
  8860  		x := v.Args[0]
  8861  		v.reset(OpMIPS64MOVHreg)
  8862  		v.AddArg(x)
  8863  		return true
  8864  	}
  8865  }
  8866  func rewriteValueMIPS64_OpSignExt32to64(v *Value) bool {
  8867  	// match: (SignExt32to64 x)
  8868  	// cond:
  8869  	// result: (MOVWreg x)
  8870  	for {
  8871  		x := v.Args[0]
  8872  		v.reset(OpMIPS64MOVWreg)
  8873  		v.AddArg(x)
  8874  		return true
  8875  	}
  8876  }
  8877  func rewriteValueMIPS64_OpSignExt8to16(v *Value) bool {
  8878  	// match: (SignExt8to16 x)
  8879  	// cond:
  8880  	// result: (MOVBreg x)
  8881  	for {
  8882  		x := v.Args[0]
  8883  		v.reset(OpMIPS64MOVBreg)
  8884  		v.AddArg(x)
  8885  		return true
  8886  	}
  8887  }
  8888  func rewriteValueMIPS64_OpSignExt8to32(v *Value) bool {
  8889  	// match: (SignExt8to32 x)
  8890  	// cond:
  8891  	// result: (MOVBreg x)
  8892  	for {
  8893  		x := v.Args[0]
  8894  		v.reset(OpMIPS64MOVBreg)
  8895  		v.AddArg(x)
  8896  		return true
  8897  	}
  8898  }
  8899  func rewriteValueMIPS64_OpSignExt8to64(v *Value) bool {
  8900  	// match: (SignExt8to64 x)
  8901  	// cond:
  8902  	// result: (MOVBreg x)
  8903  	for {
  8904  		x := v.Args[0]
  8905  		v.reset(OpMIPS64MOVBreg)
  8906  		v.AddArg(x)
  8907  		return true
  8908  	}
  8909  }
  8910  func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
  8911  	b := v.Block
  8912  	_ = b
  8913  	// match: (Slicemask <t> x)
  8914  	// cond:
  8915  	// result: (SRAVconst (NEGV <t> x) [63])
  8916  	for {
  8917  		t := v.Type
  8918  		x := v.Args[0]
  8919  		v.reset(OpMIPS64SRAVconst)
  8920  		v.AuxInt = 63
  8921  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8922  		v0.AddArg(x)
  8923  		v.AddArg(v0)
  8924  		return true
  8925  	}
  8926  }
  8927  func rewriteValueMIPS64_OpStaticCall(v *Value) bool {
  8928  	// match: (StaticCall [argwid] {target} mem)
  8929  	// cond:
  8930  	// result: (CALLstatic [argwid] {target} mem)
  8931  	for {
  8932  		argwid := v.AuxInt
  8933  		target := v.Aux
  8934  		mem := v.Args[0]
  8935  		v.reset(OpMIPS64CALLstatic)
  8936  		v.AuxInt = argwid
  8937  		v.Aux = target
  8938  		v.AddArg(mem)
  8939  		return true
  8940  	}
  8941  }
  8942  func rewriteValueMIPS64_OpStore(v *Value) bool {
  8943  	// match: (Store {t} ptr val mem)
  8944  	// cond: t.(Type).Size() == 1
  8945  	// result: (MOVBstore ptr val mem)
  8946  	for {
  8947  		t := v.Aux
  8948  		ptr := v.Args[0]
  8949  		val := v.Args[1]
  8950  		mem := v.Args[2]
  8951  		if !(t.(Type).Size() == 1) {
  8952  			break
  8953  		}
  8954  		v.reset(OpMIPS64MOVBstore)
  8955  		v.AddArg(ptr)
  8956  		v.AddArg(val)
  8957  		v.AddArg(mem)
  8958  		return true
  8959  	}
  8960  	// match: (Store {t} ptr val mem)
  8961  	// cond: t.(Type).Size() == 2
  8962  	// result: (MOVHstore ptr val mem)
  8963  	for {
  8964  		t := v.Aux
  8965  		ptr := v.Args[0]
  8966  		val := v.Args[1]
  8967  		mem := v.Args[2]
  8968  		if !(t.(Type).Size() == 2) {
  8969  			break
  8970  		}
  8971  		v.reset(OpMIPS64MOVHstore)
  8972  		v.AddArg(ptr)
  8973  		v.AddArg(val)
  8974  		v.AddArg(mem)
  8975  		return true
  8976  	}
  8977  	// match: (Store {t} ptr val mem)
  8978  	// cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
  8979  	// result: (MOVWstore ptr val mem)
  8980  	for {
  8981  		t := v.Aux
  8982  		ptr := v.Args[0]
  8983  		val := v.Args[1]
  8984  		mem := v.Args[2]
  8985  		if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8986  			break
  8987  		}
  8988  		v.reset(OpMIPS64MOVWstore)
  8989  		v.AddArg(ptr)
  8990  		v.AddArg(val)
  8991  		v.AddArg(mem)
  8992  		return true
  8993  	}
  8994  	// match: (Store {t} ptr val mem)
  8995  	// cond: t.(Type).Size() == 8 && !is64BitFloat(val.Type)
  8996  	// result: (MOVVstore ptr val mem)
  8997  	for {
  8998  		t := v.Aux
  8999  		ptr := v.Args[0]
  9000  		val := v.Args[1]
  9001  		mem := v.Args[2]
  9002  		if !(t.(Type).Size() == 8 && !is64BitFloat(val.Type)) {
  9003  			break
  9004  		}
  9005  		v.reset(OpMIPS64MOVVstore)
  9006  		v.AddArg(ptr)
  9007  		v.AddArg(val)
  9008  		v.AddArg(mem)
  9009  		return true
  9010  	}
  9011  	// match: (Store {t} ptr val mem)
  9012  	// cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
  9013  	// result: (MOVFstore ptr val mem)
  9014  	for {
  9015  		t := v.Aux
  9016  		ptr := v.Args[0]
  9017  		val := v.Args[1]
  9018  		mem := v.Args[2]
  9019  		if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
  9020  			break
  9021  		}
  9022  		v.reset(OpMIPS64MOVFstore)
  9023  		v.AddArg(ptr)
  9024  		v.AddArg(val)
  9025  		v.AddArg(mem)
  9026  		return true
  9027  	}
  9028  	// match: (Store {t} ptr val mem)
  9029  	// cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
  9030  	// result: (MOVDstore ptr val mem)
  9031  	for {
  9032  		t := v.Aux
  9033  		ptr := v.Args[0]
  9034  		val := v.Args[1]
  9035  		mem := v.Args[2]
  9036  		if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
  9037  			break
  9038  		}
  9039  		v.reset(OpMIPS64MOVDstore)
  9040  		v.AddArg(ptr)
  9041  		v.AddArg(val)
  9042  		v.AddArg(mem)
  9043  		return true
  9044  	}
  9045  	return false
  9046  }
  9047  func rewriteValueMIPS64_OpSub16(v *Value) bool {
  9048  	// match: (Sub16 x y)
  9049  	// cond:
  9050  	// result: (SUBV x y)
  9051  	for {
  9052  		x := v.Args[0]
  9053  		y := v.Args[1]
  9054  		v.reset(OpMIPS64SUBV)
  9055  		v.AddArg(x)
  9056  		v.AddArg(y)
  9057  		return true
  9058  	}
  9059  }
  9060  func rewriteValueMIPS64_OpSub32(v *Value) bool {
  9061  	// match: (Sub32 x y)
  9062  	// cond:
  9063  	// result: (SUBV x y)
  9064  	for {
  9065  		x := v.Args[0]
  9066  		y := v.Args[1]
  9067  		v.reset(OpMIPS64SUBV)
  9068  		v.AddArg(x)
  9069  		v.AddArg(y)
  9070  		return true
  9071  	}
  9072  }
  9073  func rewriteValueMIPS64_OpSub32F(v *Value) bool {
  9074  	// match: (Sub32F x y)
  9075  	// cond:
  9076  	// result: (SUBF x y)
  9077  	for {
  9078  		x := v.Args[0]
  9079  		y := v.Args[1]
  9080  		v.reset(OpMIPS64SUBF)
  9081  		v.AddArg(x)
  9082  		v.AddArg(y)
  9083  		return true
  9084  	}
  9085  }
  9086  func rewriteValueMIPS64_OpSub64(v *Value) bool {
  9087  	// match: (Sub64 x y)
  9088  	// cond:
  9089  	// result: (SUBV x y)
  9090  	for {
  9091  		x := v.Args[0]
  9092  		y := v.Args[1]
  9093  		v.reset(OpMIPS64SUBV)
  9094  		v.AddArg(x)
  9095  		v.AddArg(y)
  9096  		return true
  9097  	}
  9098  }
  9099  func rewriteValueMIPS64_OpSub64F(v *Value) bool {
  9100  	// match: (Sub64F x y)
  9101  	// cond:
  9102  	// result: (SUBD x y)
  9103  	for {
  9104  		x := v.Args[0]
  9105  		y := v.Args[1]
  9106  		v.reset(OpMIPS64SUBD)
  9107  		v.AddArg(x)
  9108  		v.AddArg(y)
  9109  		return true
  9110  	}
  9111  }
  9112  func rewriteValueMIPS64_OpSub8(v *Value) bool {
  9113  	// match: (Sub8 x y)
  9114  	// cond:
  9115  	// result: (SUBV x y)
  9116  	for {
  9117  		x := v.Args[0]
  9118  		y := v.Args[1]
  9119  		v.reset(OpMIPS64SUBV)
  9120  		v.AddArg(x)
  9121  		v.AddArg(y)
  9122  		return true
  9123  	}
  9124  }
  9125  func rewriteValueMIPS64_OpSubPtr(v *Value) bool {
  9126  	// match: (SubPtr x y)
  9127  	// cond:
  9128  	// result: (SUBV x y)
  9129  	for {
  9130  		x := v.Args[0]
  9131  		y := v.Args[1]
  9132  		v.reset(OpMIPS64SUBV)
  9133  		v.AddArg(x)
  9134  		v.AddArg(y)
  9135  		return true
  9136  	}
  9137  }
  9138  func rewriteValueMIPS64_OpTrunc16to8(v *Value) bool {
  9139  	// match: (Trunc16to8 x)
  9140  	// cond:
  9141  	// result: x
  9142  	for {
  9143  		x := v.Args[0]
  9144  		v.reset(OpCopy)
  9145  		v.Type = x.Type
  9146  		v.AddArg(x)
  9147  		return true
  9148  	}
  9149  }
  9150  func rewriteValueMIPS64_OpTrunc32to16(v *Value) bool {
  9151  	// match: (Trunc32to16 x)
  9152  	// cond:
  9153  	// result: x
  9154  	for {
  9155  		x := v.Args[0]
  9156  		v.reset(OpCopy)
  9157  		v.Type = x.Type
  9158  		v.AddArg(x)
  9159  		return true
  9160  	}
  9161  }
  9162  func rewriteValueMIPS64_OpTrunc32to8(v *Value) bool {
  9163  	// match: (Trunc32to8 x)
  9164  	// cond:
  9165  	// result: x
  9166  	for {
  9167  		x := v.Args[0]
  9168  		v.reset(OpCopy)
  9169  		v.Type = x.Type
  9170  		v.AddArg(x)
  9171  		return true
  9172  	}
  9173  }
  9174  func rewriteValueMIPS64_OpTrunc64to16(v *Value) bool {
  9175  	// match: (Trunc64to16 x)
  9176  	// cond:
  9177  	// result: x
  9178  	for {
  9179  		x := v.Args[0]
  9180  		v.reset(OpCopy)
  9181  		v.Type = x.Type
  9182  		v.AddArg(x)
  9183  		return true
  9184  	}
  9185  }
  9186  func rewriteValueMIPS64_OpTrunc64to32(v *Value) bool {
  9187  	// match: (Trunc64to32 x)
  9188  	// cond:
  9189  	// result: x
  9190  	for {
  9191  		x := v.Args[0]
  9192  		v.reset(OpCopy)
  9193  		v.Type = x.Type
  9194  		v.AddArg(x)
  9195  		return true
  9196  	}
  9197  }
  9198  func rewriteValueMIPS64_OpTrunc64to8(v *Value) bool {
  9199  	// match: (Trunc64to8 x)
  9200  	// cond:
  9201  	// result: x
  9202  	for {
  9203  		x := v.Args[0]
  9204  		v.reset(OpCopy)
  9205  		v.Type = x.Type
  9206  		v.AddArg(x)
  9207  		return true
  9208  	}
  9209  }
  9210  func rewriteValueMIPS64_OpXor16(v *Value) bool {
  9211  	// match: (Xor16 x y)
  9212  	// cond:
  9213  	// result: (XOR x y)
  9214  	for {
  9215  		x := v.Args[0]
  9216  		y := v.Args[1]
  9217  		v.reset(OpMIPS64XOR)
  9218  		v.AddArg(x)
  9219  		v.AddArg(y)
  9220  		return true
  9221  	}
  9222  }
  9223  func rewriteValueMIPS64_OpXor32(v *Value) bool {
  9224  	// match: (Xor32 x y)
  9225  	// cond:
  9226  	// result: (XOR x y)
  9227  	for {
  9228  		x := v.Args[0]
  9229  		y := v.Args[1]
  9230  		v.reset(OpMIPS64XOR)
  9231  		v.AddArg(x)
  9232  		v.AddArg(y)
  9233  		return true
  9234  	}
  9235  }
  9236  func rewriteValueMIPS64_OpXor64(v *Value) bool {
  9237  	// match: (Xor64 x y)
  9238  	// cond:
  9239  	// result: (XOR x y)
  9240  	for {
  9241  		x := v.Args[0]
  9242  		y := v.Args[1]
  9243  		v.reset(OpMIPS64XOR)
  9244  		v.AddArg(x)
  9245  		v.AddArg(y)
  9246  		return true
  9247  	}
  9248  }
  9249  func rewriteValueMIPS64_OpXor8(v *Value) bool {
  9250  	// match: (Xor8 x y)
  9251  	// cond:
  9252  	// result: (XOR x y)
  9253  	for {
  9254  		x := v.Args[0]
  9255  		y := v.Args[1]
  9256  		v.reset(OpMIPS64XOR)
  9257  		v.AddArg(x)
  9258  		v.AddArg(y)
  9259  		return true
  9260  	}
  9261  }
  9262  func rewriteValueMIPS64_OpZero(v *Value) bool {
  9263  	b := v.Block
  9264  	_ = b
  9265  	config := b.Func.Config
  9266  	_ = config
  9267  	types := &b.Func.Config.Types
  9268  	_ = types
  9269  	// match: (Zero [0] _ mem)
  9270  	// cond:
  9271  	// result: mem
  9272  	for {
  9273  		if v.AuxInt != 0 {
  9274  			break
  9275  		}
  9276  		mem := v.Args[1]
  9277  		v.reset(OpCopy)
  9278  		v.Type = mem.Type
  9279  		v.AddArg(mem)
  9280  		return true
  9281  	}
  9282  	// match: (Zero [1] ptr mem)
  9283  	// cond:
  9284  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9285  	for {
  9286  		if v.AuxInt != 1 {
  9287  			break
  9288  		}
  9289  		ptr := v.Args[0]
  9290  		mem := v.Args[1]
  9291  		v.reset(OpMIPS64MOVBstore)
  9292  		v.AddArg(ptr)
  9293  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9294  		v0.AuxInt = 0
  9295  		v.AddArg(v0)
  9296  		v.AddArg(mem)
  9297  		return true
  9298  	}
  9299  	// match: (Zero [2] {t} ptr mem)
  9300  	// cond: t.(Type).Alignment()%2 == 0
  9301  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  9302  	for {
  9303  		if v.AuxInt != 2 {
  9304  			break
  9305  		}
  9306  		t := v.Aux
  9307  		ptr := v.Args[0]
  9308  		mem := v.Args[1]
  9309  		if !(t.(Type).Alignment()%2 == 0) {
  9310  			break
  9311  		}
  9312  		v.reset(OpMIPS64MOVHstore)
  9313  		v.AddArg(ptr)
  9314  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9315  		v0.AuxInt = 0
  9316  		v.AddArg(v0)
  9317  		v.AddArg(mem)
  9318  		return true
  9319  	}
  9320  	// match: (Zero [2] ptr mem)
  9321  	// cond:
  9322  	// result: (MOVBstore [1] ptr (MOVVconst [0]) 		(MOVBstore [0] ptr (MOVVconst [0]) mem))
  9323  	for {
  9324  		if v.AuxInt != 2 {
  9325  			break
  9326  		}
  9327  		ptr := v.Args[0]
  9328  		mem := v.Args[1]
  9329  		v.reset(OpMIPS64MOVBstore)
  9330  		v.AuxInt = 1
  9331  		v.AddArg(ptr)
  9332  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9333  		v0.AuxInt = 0
  9334  		v.AddArg(v0)
  9335  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9336  		v1.AuxInt = 0
  9337  		v1.AddArg(ptr)
  9338  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9339  		v2.AuxInt = 0
  9340  		v1.AddArg(v2)
  9341  		v1.AddArg(mem)
  9342  		v.AddArg(v1)
  9343  		return true
  9344  	}
  9345  	// match: (Zero [4] {t} ptr mem)
  9346  	// cond: t.(Type).Alignment()%4 == 0
  9347  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  9348  	for {
  9349  		if v.AuxInt != 4 {
  9350  			break
  9351  		}
  9352  		t := v.Aux
  9353  		ptr := v.Args[0]
  9354  		mem := v.Args[1]
  9355  		if !(t.(Type).Alignment()%4 == 0) {
  9356  			break
  9357  		}
  9358  		v.reset(OpMIPS64MOVWstore)
  9359  		v.AddArg(ptr)
  9360  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9361  		v0.AuxInt = 0
  9362  		v.AddArg(v0)
  9363  		v.AddArg(mem)
  9364  		return true
  9365  	}
  9366  	// match: (Zero [4] {t} ptr mem)
  9367  	// cond: t.(Type).Alignment()%2 == 0
  9368  	// result: (MOVHstore [2] ptr (MOVVconst [0]) 		(MOVHstore [0] ptr (MOVVconst [0]) mem))
  9369  	for {
  9370  		if v.AuxInt != 4 {
  9371  			break
  9372  		}
  9373  		t := v.Aux
  9374  		ptr := v.Args[0]
  9375  		mem := v.Args[1]
  9376  		if !(t.(Type).Alignment()%2 == 0) {
  9377  			break
  9378  		}
  9379  		v.reset(OpMIPS64MOVHstore)
  9380  		v.AuxInt = 2
  9381  		v.AddArg(ptr)
  9382  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9383  		v0.AuxInt = 0
  9384  		v.AddArg(v0)
  9385  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9386  		v1.AuxInt = 0
  9387  		v1.AddArg(ptr)
  9388  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9389  		v2.AuxInt = 0
  9390  		v1.AddArg(v2)
  9391  		v1.AddArg(mem)
  9392  		v.AddArg(v1)
  9393  		return true
  9394  	}
  9395  	// match: (Zero [4] ptr mem)
  9396  	// cond:
  9397  	// result: (MOVBstore [3] ptr (MOVVconst [0]) 		(MOVBstore [2] ptr (MOVVconst [0]) 			(MOVBstore [1] ptr (MOVVconst [0]) 				(MOVBstore [0] ptr (MOVVconst [0]) mem))))
  9398  	for {
  9399  		if v.AuxInt != 4 {
  9400  			break
  9401  		}
  9402  		ptr := v.Args[0]
  9403  		mem := v.Args[1]
  9404  		v.reset(OpMIPS64MOVBstore)
  9405  		v.AuxInt = 3
  9406  		v.AddArg(ptr)
  9407  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9408  		v0.AuxInt = 0
  9409  		v.AddArg(v0)
  9410  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9411  		v1.AuxInt = 2
  9412  		v1.AddArg(ptr)
  9413  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9414  		v2.AuxInt = 0
  9415  		v1.AddArg(v2)
  9416  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9417  		v3.AuxInt = 1
  9418  		v3.AddArg(ptr)
  9419  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9420  		v4.AuxInt = 0
  9421  		v3.AddArg(v4)
  9422  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9423  		v5.AuxInt = 0
  9424  		v5.AddArg(ptr)
  9425  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9426  		v6.AuxInt = 0
  9427  		v5.AddArg(v6)
  9428  		v5.AddArg(mem)
  9429  		v3.AddArg(v5)
  9430  		v1.AddArg(v3)
  9431  		v.AddArg(v1)
  9432  		return true
  9433  	}
  9434  	// match: (Zero [8] {t} ptr mem)
  9435  	// cond: t.(Type).Alignment()%8 == 0
  9436  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  9437  	for {
  9438  		if v.AuxInt != 8 {
  9439  			break
  9440  		}
  9441  		t := v.Aux
  9442  		ptr := v.Args[0]
  9443  		mem := v.Args[1]
  9444  		if !(t.(Type).Alignment()%8 == 0) {
  9445  			break
  9446  		}
  9447  		v.reset(OpMIPS64MOVVstore)
  9448  		v.AddArg(ptr)
  9449  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9450  		v0.AuxInt = 0
  9451  		v.AddArg(v0)
  9452  		v.AddArg(mem)
  9453  		return true
  9454  	}
  9455  	// match: (Zero [8] {t} ptr mem)
  9456  	// cond: t.(Type).Alignment()%4 == 0
  9457  	// result: (MOVWstore [4] ptr (MOVVconst [0]) 		(MOVWstore [0] ptr (MOVVconst [0]) mem))
  9458  	for {
  9459  		if v.AuxInt != 8 {
  9460  			break
  9461  		}
  9462  		t := v.Aux
  9463  		ptr := v.Args[0]
  9464  		mem := v.Args[1]
  9465  		if !(t.(Type).Alignment()%4 == 0) {
  9466  			break
  9467  		}
  9468  		v.reset(OpMIPS64MOVWstore)
  9469  		v.AuxInt = 4
  9470  		v.AddArg(ptr)
  9471  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9472  		v0.AuxInt = 0
  9473  		v.AddArg(v0)
  9474  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9475  		v1.AuxInt = 0
  9476  		v1.AddArg(ptr)
  9477  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9478  		v2.AuxInt = 0
  9479  		v1.AddArg(v2)
  9480  		v1.AddArg(mem)
  9481  		v.AddArg(v1)
  9482  		return true
  9483  	}
  9484  	// match: (Zero [8] {t} ptr mem)
  9485  	// cond: t.(Type).Alignment()%2 == 0
  9486  	// result: (MOVHstore [6] ptr (MOVVconst [0]) 		(MOVHstore [4] ptr (MOVVconst [0]) 			(MOVHstore [2] ptr (MOVVconst [0]) 				(MOVHstore [0] ptr (MOVVconst [0]) mem))))
  9487  	for {
  9488  		if v.AuxInt != 8 {
  9489  			break
  9490  		}
  9491  		t := v.Aux
  9492  		ptr := v.Args[0]
  9493  		mem := v.Args[1]
  9494  		if !(t.(Type).Alignment()%2 == 0) {
  9495  			break
  9496  		}
  9497  		v.reset(OpMIPS64MOVHstore)
  9498  		v.AuxInt = 6
  9499  		v.AddArg(ptr)
  9500  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9501  		v0.AuxInt = 0
  9502  		v.AddArg(v0)
  9503  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9504  		v1.AuxInt = 4
  9505  		v1.AddArg(ptr)
  9506  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9507  		v2.AuxInt = 0
  9508  		v1.AddArg(v2)
  9509  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9510  		v3.AuxInt = 2
  9511  		v3.AddArg(ptr)
  9512  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9513  		v4.AuxInt = 0
  9514  		v3.AddArg(v4)
  9515  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9516  		v5.AuxInt = 0
  9517  		v5.AddArg(ptr)
  9518  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9519  		v6.AuxInt = 0
  9520  		v5.AddArg(v6)
  9521  		v5.AddArg(mem)
  9522  		v3.AddArg(v5)
  9523  		v1.AddArg(v3)
  9524  		v.AddArg(v1)
  9525  		return true
  9526  	}
  9527  	// match: (Zero [3] ptr mem)
  9528  	// cond:
  9529  	// result: (MOVBstore [2] ptr (MOVVconst [0]) 		(MOVBstore [1] ptr (MOVVconst [0]) 			(MOVBstore [0] ptr (MOVVconst [0]) mem)))
  9530  	for {
  9531  		if v.AuxInt != 3 {
  9532  			break
  9533  		}
  9534  		ptr := v.Args[0]
  9535  		mem := v.Args[1]
  9536  		v.reset(OpMIPS64MOVBstore)
  9537  		v.AuxInt = 2
  9538  		v.AddArg(ptr)
  9539  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9540  		v0.AuxInt = 0
  9541  		v.AddArg(v0)
  9542  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9543  		v1.AuxInt = 1
  9544  		v1.AddArg(ptr)
  9545  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9546  		v2.AuxInt = 0
  9547  		v1.AddArg(v2)
  9548  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9549  		v3.AuxInt = 0
  9550  		v3.AddArg(ptr)
  9551  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9552  		v4.AuxInt = 0
  9553  		v3.AddArg(v4)
  9554  		v3.AddArg(mem)
  9555  		v1.AddArg(v3)
  9556  		v.AddArg(v1)
  9557  		return true
  9558  	}
  9559  	// match: (Zero [6] {t} ptr mem)
  9560  	// cond: t.(Type).Alignment()%2 == 0
  9561  	// result: (MOVHstore [4] ptr (MOVVconst [0]) 		(MOVHstore [2] ptr (MOVVconst [0]) 			(MOVHstore [0] ptr (MOVVconst [0]) mem)))
  9562  	for {
  9563  		if v.AuxInt != 6 {
  9564  			break
  9565  		}
  9566  		t := v.Aux
  9567  		ptr := v.Args[0]
  9568  		mem := v.Args[1]
  9569  		if !(t.(Type).Alignment()%2 == 0) {
  9570  			break
  9571  		}
  9572  		v.reset(OpMIPS64MOVHstore)
  9573  		v.AuxInt = 4
  9574  		v.AddArg(ptr)
  9575  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9576  		v0.AuxInt = 0
  9577  		v.AddArg(v0)
  9578  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9579  		v1.AuxInt = 2
  9580  		v1.AddArg(ptr)
  9581  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9582  		v2.AuxInt = 0
  9583  		v1.AddArg(v2)
  9584  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9585  		v3.AuxInt = 0
  9586  		v3.AddArg(ptr)
  9587  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9588  		v4.AuxInt = 0
  9589  		v3.AddArg(v4)
  9590  		v3.AddArg(mem)
  9591  		v1.AddArg(v3)
  9592  		v.AddArg(v1)
  9593  		return true
  9594  	}
  9595  	// match: (Zero [12] {t} ptr mem)
  9596  	// cond: t.(Type).Alignment()%4 == 0
  9597  	// result: (MOVWstore [8] ptr (MOVVconst [0]) 		(MOVWstore [4] ptr (MOVVconst [0]) 			(MOVWstore [0] ptr (MOVVconst [0]) mem)))
  9598  	for {
  9599  		if v.AuxInt != 12 {
  9600  			break
  9601  		}
  9602  		t := v.Aux
  9603  		ptr := v.Args[0]
  9604  		mem := v.Args[1]
  9605  		if !(t.(Type).Alignment()%4 == 0) {
  9606  			break
  9607  		}
  9608  		v.reset(OpMIPS64MOVWstore)
  9609  		v.AuxInt = 8
  9610  		v.AddArg(ptr)
  9611  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9612  		v0.AuxInt = 0
  9613  		v.AddArg(v0)
  9614  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9615  		v1.AuxInt = 4
  9616  		v1.AddArg(ptr)
  9617  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9618  		v2.AuxInt = 0
  9619  		v1.AddArg(v2)
  9620  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9621  		v3.AuxInt = 0
  9622  		v3.AddArg(ptr)
  9623  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9624  		v4.AuxInt = 0
  9625  		v3.AddArg(v4)
  9626  		v3.AddArg(mem)
  9627  		v1.AddArg(v3)
  9628  		v.AddArg(v1)
  9629  		return true
  9630  	}
  9631  	// match: (Zero [16] {t} ptr mem)
  9632  	// cond: t.(Type).Alignment()%8 == 0
  9633  	// result: (MOVVstore [8] ptr (MOVVconst [0]) 		(MOVVstore [0] ptr (MOVVconst [0]) mem))
  9634  	for {
  9635  		if v.AuxInt != 16 {
  9636  			break
  9637  		}
  9638  		t := v.Aux
  9639  		ptr := v.Args[0]
  9640  		mem := v.Args[1]
  9641  		if !(t.(Type).Alignment()%8 == 0) {
  9642  			break
  9643  		}
  9644  		v.reset(OpMIPS64MOVVstore)
  9645  		v.AuxInt = 8
  9646  		v.AddArg(ptr)
  9647  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9648  		v0.AuxInt = 0
  9649  		v.AddArg(v0)
  9650  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9651  		v1.AuxInt = 0
  9652  		v1.AddArg(ptr)
  9653  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9654  		v2.AuxInt = 0
  9655  		v1.AddArg(v2)
  9656  		v1.AddArg(mem)
  9657  		v.AddArg(v1)
  9658  		return true
  9659  	}
  9660  	// match: (Zero [24] {t} ptr mem)
  9661  	// cond: t.(Type).Alignment()%8 == 0
  9662  	// result: (MOVVstore [16] ptr (MOVVconst [0]) 		(MOVVstore [8] ptr (MOVVconst [0]) 			(MOVVstore [0] ptr (MOVVconst [0]) mem)))
  9663  	for {
  9664  		if v.AuxInt != 24 {
  9665  			break
  9666  		}
  9667  		t := v.Aux
  9668  		ptr := v.Args[0]
  9669  		mem := v.Args[1]
  9670  		if !(t.(Type).Alignment()%8 == 0) {
  9671  			break
  9672  		}
  9673  		v.reset(OpMIPS64MOVVstore)
  9674  		v.AuxInt = 16
  9675  		v.AddArg(ptr)
  9676  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9677  		v0.AuxInt = 0
  9678  		v.AddArg(v0)
  9679  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9680  		v1.AuxInt = 8
  9681  		v1.AddArg(ptr)
  9682  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9683  		v2.AuxInt = 0
  9684  		v1.AddArg(v2)
  9685  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9686  		v3.AuxInt = 0
  9687  		v3.AddArg(ptr)
  9688  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9689  		v4.AuxInt = 0
  9690  		v3.AddArg(v4)
  9691  		v3.AddArg(mem)
  9692  		v1.AddArg(v3)
  9693  		v.AddArg(v1)
  9694  		return true
  9695  	}
  9696  	// match: (Zero [s] {t} ptr mem)
  9697  	// cond: s%8 == 0 && s > 24 && s <= 8*128 	&& t.(Type).Alignment()%8 == 0 && !config.noDuffDevice
  9698  	// result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
  9699  	for {
  9700  		s := v.AuxInt
  9701  		t := v.Aux
  9702  		ptr := v.Args[0]
  9703  		mem := v.Args[1]
  9704  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(Type).Alignment()%8 == 0 && !config.noDuffDevice) {
  9705  			break
  9706  		}
  9707  		v.reset(OpMIPS64DUFFZERO)
  9708  		v.AuxInt = 8 * (128 - int64(s/8))
  9709  		v.AddArg(ptr)
  9710  		v.AddArg(mem)
  9711  		return true
  9712  	}
  9713  	// match: (Zero [s] {t} ptr mem)
  9714  	// cond: (s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0
  9715  	// result: (LoweredZero [t.(Type).Alignment()] 		ptr 		(ADDVconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  9716  	for {
  9717  		s := v.AuxInt
  9718  		t := v.Aux
  9719  		ptr := v.Args[0]
  9720  		mem := v.Args[1]
  9721  		if !((s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0) {
  9722  			break
  9723  		}
  9724  		v.reset(OpMIPS64LoweredZero)
  9725  		v.AuxInt = t.(Type).Alignment()
  9726  		v.AddArg(ptr)
  9727  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  9728  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  9729  		v0.AddArg(ptr)
  9730  		v.AddArg(v0)
  9731  		v.AddArg(mem)
  9732  		return true
  9733  	}
  9734  	return false
  9735  }
  9736  func rewriteValueMIPS64_OpZeroExt16to32(v *Value) bool {
  9737  	// match: (ZeroExt16to32 x)
  9738  	// cond:
  9739  	// result: (MOVHUreg x)
  9740  	for {
  9741  		x := v.Args[0]
  9742  		v.reset(OpMIPS64MOVHUreg)
  9743  		v.AddArg(x)
  9744  		return true
  9745  	}
  9746  }
  9747  func rewriteValueMIPS64_OpZeroExt16to64(v *Value) bool {
  9748  	// match: (ZeroExt16to64 x)
  9749  	// cond:
  9750  	// result: (MOVHUreg x)
  9751  	for {
  9752  		x := v.Args[0]
  9753  		v.reset(OpMIPS64MOVHUreg)
  9754  		v.AddArg(x)
  9755  		return true
  9756  	}
  9757  }
  9758  func rewriteValueMIPS64_OpZeroExt32to64(v *Value) bool {
  9759  	// match: (ZeroExt32to64 x)
  9760  	// cond:
  9761  	// result: (MOVWUreg x)
  9762  	for {
  9763  		x := v.Args[0]
  9764  		v.reset(OpMIPS64MOVWUreg)
  9765  		v.AddArg(x)
  9766  		return true
  9767  	}
  9768  }
  9769  func rewriteValueMIPS64_OpZeroExt8to16(v *Value) bool {
  9770  	// match: (ZeroExt8to16 x)
  9771  	// cond:
  9772  	// result: (MOVBUreg x)
  9773  	for {
  9774  		x := v.Args[0]
  9775  		v.reset(OpMIPS64MOVBUreg)
  9776  		v.AddArg(x)
  9777  		return true
  9778  	}
  9779  }
  9780  func rewriteValueMIPS64_OpZeroExt8to32(v *Value) bool {
  9781  	// match: (ZeroExt8to32 x)
  9782  	// cond:
  9783  	// result: (MOVBUreg x)
  9784  	for {
  9785  		x := v.Args[0]
  9786  		v.reset(OpMIPS64MOVBUreg)
  9787  		v.AddArg(x)
  9788  		return true
  9789  	}
  9790  }
  9791  func rewriteValueMIPS64_OpZeroExt8to64(v *Value) bool {
  9792  	// match: (ZeroExt8to64 x)
  9793  	// cond:
  9794  	// result: (MOVBUreg x)
  9795  	for {
  9796  		x := v.Args[0]
  9797  		v.reset(OpMIPS64MOVBUreg)
  9798  		v.AddArg(x)
  9799  		return true
  9800  	}
  9801  }
  9802  func rewriteBlockMIPS64(b *Block) bool {
  9803  	config := b.Func.Config
  9804  	_ = config
  9805  	fe := b.Func.fe
  9806  	_ = fe
  9807  	types := &config.Types
  9808  	_ = types
  9809  	switch b.Kind {
  9810  	case BlockMIPS64EQ:
  9811  		// match: (EQ (FPFlagTrue cmp) yes no)
  9812  		// cond:
  9813  		// result: (FPF cmp yes no)
  9814  		for {
  9815  			v := b.Control
  9816  			if v.Op != OpMIPS64FPFlagTrue {
  9817  				break
  9818  			}
  9819  			cmp := v.Args[0]
  9820  			yes := b.Succs[0]
  9821  			no := b.Succs[1]
  9822  			b.Kind = BlockMIPS64FPF
  9823  			b.SetControl(cmp)
  9824  			_ = yes
  9825  			_ = no
  9826  			return true
  9827  		}
  9828  		// match: (EQ (FPFlagFalse cmp) yes no)
  9829  		// cond:
  9830  		// result: (FPT cmp yes no)
  9831  		for {
  9832  			v := b.Control
  9833  			if v.Op != OpMIPS64FPFlagFalse {
  9834  				break
  9835  			}
  9836  			cmp := v.Args[0]
  9837  			yes := b.Succs[0]
  9838  			no := b.Succs[1]
  9839  			b.Kind = BlockMIPS64FPT
  9840  			b.SetControl(cmp)
  9841  			_ = yes
  9842  			_ = no
  9843  			return true
  9844  		}
  9845  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9846  		// cond:
  9847  		// result: (NE cmp yes no)
  9848  		for {
  9849  			v := b.Control
  9850  			if v.Op != OpMIPS64XORconst {
  9851  				break
  9852  			}
  9853  			if v.AuxInt != 1 {
  9854  				break
  9855  			}
  9856  			cmp := v.Args[0]
  9857  			if cmp.Op != OpMIPS64SGT {
  9858  				break
  9859  			}
  9860  			yes := b.Succs[0]
  9861  			no := b.Succs[1]
  9862  			b.Kind = BlockMIPS64NE
  9863  			b.SetControl(cmp)
  9864  			_ = yes
  9865  			_ = no
  9866  			return true
  9867  		}
  9868  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9869  		// cond:
  9870  		// result: (NE cmp yes no)
  9871  		for {
  9872  			v := b.Control
  9873  			if v.Op != OpMIPS64XORconst {
  9874  				break
  9875  			}
  9876  			if v.AuxInt != 1 {
  9877  				break
  9878  			}
  9879  			cmp := v.Args[0]
  9880  			if cmp.Op != OpMIPS64SGTU {
  9881  				break
  9882  			}
  9883  			yes := b.Succs[0]
  9884  			no := b.Succs[1]
  9885  			b.Kind = BlockMIPS64NE
  9886  			b.SetControl(cmp)
  9887  			_ = yes
  9888  			_ = no
  9889  			return true
  9890  		}
  9891  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9892  		// cond:
  9893  		// result: (NE cmp yes no)
  9894  		for {
  9895  			v := b.Control
  9896  			if v.Op != OpMIPS64XORconst {
  9897  				break
  9898  			}
  9899  			if v.AuxInt != 1 {
  9900  				break
  9901  			}
  9902  			cmp := v.Args[0]
  9903  			if cmp.Op != OpMIPS64SGTconst {
  9904  				break
  9905  			}
  9906  			yes := b.Succs[0]
  9907  			no := b.Succs[1]
  9908  			b.Kind = BlockMIPS64NE
  9909  			b.SetControl(cmp)
  9910  			_ = yes
  9911  			_ = no
  9912  			return true
  9913  		}
  9914  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9915  		// cond:
  9916  		// result: (NE cmp yes no)
  9917  		for {
  9918  			v := b.Control
  9919  			if v.Op != OpMIPS64XORconst {
  9920  				break
  9921  			}
  9922  			if v.AuxInt != 1 {
  9923  				break
  9924  			}
  9925  			cmp := v.Args[0]
  9926  			if cmp.Op != OpMIPS64SGTUconst {
  9927  				break
  9928  			}
  9929  			yes := b.Succs[0]
  9930  			no := b.Succs[1]
  9931  			b.Kind = BlockMIPS64NE
  9932  			b.SetControl(cmp)
  9933  			_ = yes
  9934  			_ = no
  9935  			return true
  9936  		}
  9937  		// match: (EQ (SGTUconst [1] x) yes no)
  9938  		// cond:
  9939  		// result: (NE x yes no)
  9940  		for {
  9941  			v := b.Control
  9942  			if v.Op != OpMIPS64SGTUconst {
  9943  				break
  9944  			}
  9945  			if v.AuxInt != 1 {
  9946  				break
  9947  			}
  9948  			x := v.Args[0]
  9949  			yes := b.Succs[0]
  9950  			no := b.Succs[1]
  9951  			b.Kind = BlockMIPS64NE
  9952  			b.SetControl(x)
  9953  			_ = yes
  9954  			_ = no
  9955  			return true
  9956  		}
  9957  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  9958  		// cond:
  9959  		// result: (EQ x yes no)
  9960  		for {
  9961  			v := b.Control
  9962  			if v.Op != OpMIPS64SGTU {
  9963  				break
  9964  			}
  9965  			x := v.Args[0]
  9966  			v_1 := v.Args[1]
  9967  			if v_1.Op != OpMIPS64MOVVconst {
  9968  				break
  9969  			}
  9970  			if v_1.AuxInt != 0 {
  9971  				break
  9972  			}
  9973  			yes := b.Succs[0]
  9974  			no := b.Succs[1]
  9975  			b.Kind = BlockMIPS64EQ
  9976  			b.SetControl(x)
  9977  			_ = yes
  9978  			_ = no
  9979  			return true
  9980  		}
  9981  		// match: (EQ (SGTconst [0] x) yes no)
  9982  		// cond:
  9983  		// result: (GEZ x yes no)
  9984  		for {
  9985  			v := b.Control
  9986  			if v.Op != OpMIPS64SGTconst {
  9987  				break
  9988  			}
  9989  			if v.AuxInt != 0 {
  9990  				break
  9991  			}
  9992  			x := v.Args[0]
  9993  			yes := b.Succs[0]
  9994  			no := b.Succs[1]
  9995  			b.Kind = BlockMIPS64GEZ
  9996  			b.SetControl(x)
  9997  			_ = yes
  9998  			_ = no
  9999  			return true
 10000  		}
 10001  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 10002  		// cond:
 10003  		// result: (LEZ x yes no)
 10004  		for {
 10005  			v := b.Control
 10006  			if v.Op != OpMIPS64SGT {
 10007  				break
 10008  			}
 10009  			x := v.Args[0]
 10010  			v_1 := v.Args[1]
 10011  			if v_1.Op != OpMIPS64MOVVconst {
 10012  				break
 10013  			}
 10014  			if v_1.AuxInt != 0 {
 10015  				break
 10016  			}
 10017  			yes := b.Succs[0]
 10018  			no := b.Succs[1]
 10019  			b.Kind = BlockMIPS64LEZ
 10020  			b.SetControl(x)
 10021  			_ = yes
 10022  			_ = no
 10023  			return true
 10024  		}
 10025  		// match: (EQ (MOVVconst [0]) yes no)
 10026  		// cond:
 10027  		// result: (First nil yes no)
 10028  		for {
 10029  			v := b.Control
 10030  			if v.Op != OpMIPS64MOVVconst {
 10031  				break
 10032  			}
 10033  			if v.AuxInt != 0 {
 10034  				break
 10035  			}
 10036  			yes := b.Succs[0]
 10037  			no := b.Succs[1]
 10038  			b.Kind = BlockFirst
 10039  			b.SetControl(nil)
 10040  			_ = yes
 10041  			_ = no
 10042  			return true
 10043  		}
 10044  		// match: (EQ (MOVVconst [c]) yes no)
 10045  		// cond: c != 0
 10046  		// result: (First nil no yes)
 10047  		for {
 10048  			v := b.Control
 10049  			if v.Op != OpMIPS64MOVVconst {
 10050  				break
 10051  			}
 10052  			c := v.AuxInt
 10053  			yes := b.Succs[0]
 10054  			no := b.Succs[1]
 10055  			if !(c != 0) {
 10056  				break
 10057  			}
 10058  			b.Kind = BlockFirst
 10059  			b.SetControl(nil)
 10060  			b.swapSuccessors()
 10061  			_ = no
 10062  			_ = yes
 10063  			return true
 10064  		}
 10065  	case BlockMIPS64GEZ:
 10066  		// match: (GEZ (MOVVconst [c]) yes no)
 10067  		// cond: c >= 0
 10068  		// result: (First nil yes no)
 10069  		for {
 10070  			v := b.Control
 10071  			if v.Op != OpMIPS64MOVVconst {
 10072  				break
 10073  			}
 10074  			c := v.AuxInt
 10075  			yes := b.Succs[0]
 10076  			no := b.Succs[1]
 10077  			if !(c >= 0) {
 10078  				break
 10079  			}
 10080  			b.Kind = BlockFirst
 10081  			b.SetControl(nil)
 10082  			_ = yes
 10083  			_ = no
 10084  			return true
 10085  		}
 10086  		// match: (GEZ (MOVVconst [c]) yes no)
 10087  		// cond: c <  0
 10088  		// result: (First nil no yes)
 10089  		for {
 10090  			v := b.Control
 10091  			if v.Op != OpMIPS64MOVVconst {
 10092  				break
 10093  			}
 10094  			c := v.AuxInt
 10095  			yes := b.Succs[0]
 10096  			no := b.Succs[1]
 10097  			if !(c < 0) {
 10098  				break
 10099  			}
 10100  			b.Kind = BlockFirst
 10101  			b.SetControl(nil)
 10102  			b.swapSuccessors()
 10103  			_ = no
 10104  			_ = yes
 10105  			return true
 10106  		}
 10107  	case BlockMIPS64GTZ:
 10108  		// match: (GTZ (MOVVconst [c]) yes no)
 10109  		// cond: c >  0
 10110  		// result: (First nil yes no)
 10111  		for {
 10112  			v := b.Control
 10113  			if v.Op != OpMIPS64MOVVconst {
 10114  				break
 10115  			}
 10116  			c := v.AuxInt
 10117  			yes := b.Succs[0]
 10118  			no := b.Succs[1]
 10119  			if !(c > 0) {
 10120  				break
 10121  			}
 10122  			b.Kind = BlockFirst
 10123  			b.SetControl(nil)
 10124  			_ = yes
 10125  			_ = no
 10126  			return true
 10127  		}
 10128  		// match: (GTZ (MOVVconst [c]) yes no)
 10129  		// cond: c <= 0
 10130  		// result: (First nil no yes)
 10131  		for {
 10132  			v := b.Control
 10133  			if v.Op != OpMIPS64MOVVconst {
 10134  				break
 10135  			}
 10136  			c := v.AuxInt
 10137  			yes := b.Succs[0]
 10138  			no := b.Succs[1]
 10139  			if !(c <= 0) {
 10140  				break
 10141  			}
 10142  			b.Kind = BlockFirst
 10143  			b.SetControl(nil)
 10144  			b.swapSuccessors()
 10145  			_ = no
 10146  			_ = yes
 10147  			return true
 10148  		}
 10149  	case BlockIf:
 10150  		// match: (If cond yes no)
 10151  		// cond:
 10152  		// result: (NE cond yes no)
 10153  		for {
 10154  			v := b.Control
 10155  			_ = v
 10156  			cond := b.Control
 10157  			yes := b.Succs[0]
 10158  			no := b.Succs[1]
 10159  			b.Kind = BlockMIPS64NE
 10160  			b.SetControl(cond)
 10161  			_ = yes
 10162  			_ = no
 10163  			return true
 10164  		}
 10165  	case BlockMIPS64LEZ:
 10166  		// match: (LEZ (MOVVconst [c]) yes no)
 10167  		// cond: c <= 0
 10168  		// result: (First nil yes no)
 10169  		for {
 10170  			v := b.Control
 10171  			if v.Op != OpMIPS64MOVVconst {
 10172  				break
 10173  			}
 10174  			c := v.AuxInt
 10175  			yes := b.Succs[0]
 10176  			no := b.Succs[1]
 10177  			if !(c <= 0) {
 10178  				break
 10179  			}
 10180  			b.Kind = BlockFirst
 10181  			b.SetControl(nil)
 10182  			_ = yes
 10183  			_ = no
 10184  			return true
 10185  		}
 10186  		// match: (LEZ (MOVVconst [c]) yes no)
 10187  		// cond: c >  0
 10188  		// result: (First nil no yes)
 10189  		for {
 10190  			v := b.Control
 10191  			if v.Op != OpMIPS64MOVVconst {
 10192  				break
 10193  			}
 10194  			c := v.AuxInt
 10195  			yes := b.Succs[0]
 10196  			no := b.Succs[1]
 10197  			if !(c > 0) {
 10198  				break
 10199  			}
 10200  			b.Kind = BlockFirst
 10201  			b.SetControl(nil)
 10202  			b.swapSuccessors()
 10203  			_ = no
 10204  			_ = yes
 10205  			return true
 10206  		}
 10207  	case BlockMIPS64LTZ:
 10208  		// match: (LTZ (MOVVconst [c]) yes no)
 10209  		// cond: c <  0
 10210  		// result: (First nil yes no)
 10211  		for {
 10212  			v := b.Control
 10213  			if v.Op != OpMIPS64MOVVconst {
 10214  				break
 10215  			}
 10216  			c := v.AuxInt
 10217  			yes := b.Succs[0]
 10218  			no := b.Succs[1]
 10219  			if !(c < 0) {
 10220  				break
 10221  			}
 10222  			b.Kind = BlockFirst
 10223  			b.SetControl(nil)
 10224  			_ = yes
 10225  			_ = no
 10226  			return true
 10227  		}
 10228  		// match: (LTZ (MOVVconst [c]) yes no)
 10229  		// cond: c >= 0
 10230  		// result: (First nil no yes)
 10231  		for {
 10232  			v := b.Control
 10233  			if v.Op != OpMIPS64MOVVconst {
 10234  				break
 10235  			}
 10236  			c := v.AuxInt
 10237  			yes := b.Succs[0]
 10238  			no := b.Succs[1]
 10239  			if !(c >= 0) {
 10240  				break
 10241  			}
 10242  			b.Kind = BlockFirst
 10243  			b.SetControl(nil)
 10244  			b.swapSuccessors()
 10245  			_ = no
 10246  			_ = yes
 10247  			return true
 10248  		}
 10249  	case BlockMIPS64NE:
 10250  		// match: (NE (FPFlagTrue cmp) yes no)
 10251  		// cond:
 10252  		// result: (FPT cmp yes no)
 10253  		for {
 10254  			v := b.Control
 10255  			if v.Op != OpMIPS64FPFlagTrue {
 10256  				break
 10257  			}
 10258  			cmp := v.Args[0]
 10259  			yes := b.Succs[0]
 10260  			no := b.Succs[1]
 10261  			b.Kind = BlockMIPS64FPT
 10262  			b.SetControl(cmp)
 10263  			_ = yes
 10264  			_ = no
 10265  			return true
 10266  		}
 10267  		// match: (NE (FPFlagFalse cmp) yes no)
 10268  		// cond:
 10269  		// result: (FPF cmp yes no)
 10270  		for {
 10271  			v := b.Control
 10272  			if v.Op != OpMIPS64FPFlagFalse {
 10273  				break
 10274  			}
 10275  			cmp := v.Args[0]
 10276  			yes := b.Succs[0]
 10277  			no := b.Succs[1]
 10278  			b.Kind = BlockMIPS64FPF
 10279  			b.SetControl(cmp)
 10280  			_ = yes
 10281  			_ = no
 10282  			return true
 10283  		}
 10284  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10285  		// cond:
 10286  		// result: (EQ cmp yes no)
 10287  		for {
 10288  			v := b.Control
 10289  			if v.Op != OpMIPS64XORconst {
 10290  				break
 10291  			}
 10292  			if v.AuxInt != 1 {
 10293  				break
 10294  			}
 10295  			cmp := v.Args[0]
 10296  			if cmp.Op != OpMIPS64SGT {
 10297  				break
 10298  			}
 10299  			yes := b.Succs[0]
 10300  			no := b.Succs[1]
 10301  			b.Kind = BlockMIPS64EQ
 10302  			b.SetControl(cmp)
 10303  			_ = yes
 10304  			_ = no
 10305  			return true
 10306  		}
 10307  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10308  		// cond:
 10309  		// result: (EQ cmp yes no)
 10310  		for {
 10311  			v := b.Control
 10312  			if v.Op != OpMIPS64XORconst {
 10313  				break
 10314  			}
 10315  			if v.AuxInt != 1 {
 10316  				break
 10317  			}
 10318  			cmp := v.Args[0]
 10319  			if cmp.Op != OpMIPS64SGTU {
 10320  				break
 10321  			}
 10322  			yes := b.Succs[0]
 10323  			no := b.Succs[1]
 10324  			b.Kind = BlockMIPS64EQ
 10325  			b.SetControl(cmp)
 10326  			_ = yes
 10327  			_ = no
 10328  			return true
 10329  		}
 10330  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10331  		// cond:
 10332  		// result: (EQ cmp yes no)
 10333  		for {
 10334  			v := b.Control
 10335  			if v.Op != OpMIPS64XORconst {
 10336  				break
 10337  			}
 10338  			if v.AuxInt != 1 {
 10339  				break
 10340  			}
 10341  			cmp := v.Args[0]
 10342  			if cmp.Op != OpMIPS64SGTconst {
 10343  				break
 10344  			}
 10345  			yes := b.Succs[0]
 10346  			no := b.Succs[1]
 10347  			b.Kind = BlockMIPS64EQ
 10348  			b.SetControl(cmp)
 10349  			_ = yes
 10350  			_ = no
 10351  			return true
 10352  		}
 10353  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10354  		// cond:
 10355  		// result: (EQ cmp yes no)
 10356  		for {
 10357  			v := b.Control
 10358  			if v.Op != OpMIPS64XORconst {
 10359  				break
 10360  			}
 10361  			if v.AuxInt != 1 {
 10362  				break
 10363  			}
 10364  			cmp := v.Args[0]
 10365  			if cmp.Op != OpMIPS64SGTUconst {
 10366  				break
 10367  			}
 10368  			yes := b.Succs[0]
 10369  			no := b.Succs[1]
 10370  			b.Kind = BlockMIPS64EQ
 10371  			b.SetControl(cmp)
 10372  			_ = yes
 10373  			_ = no
 10374  			return true
 10375  		}
 10376  		// match: (NE (SGTUconst [1] x) yes no)
 10377  		// cond:
 10378  		// result: (EQ x yes no)
 10379  		for {
 10380  			v := b.Control
 10381  			if v.Op != OpMIPS64SGTUconst {
 10382  				break
 10383  			}
 10384  			if v.AuxInt != 1 {
 10385  				break
 10386  			}
 10387  			x := v.Args[0]
 10388  			yes := b.Succs[0]
 10389  			no := b.Succs[1]
 10390  			b.Kind = BlockMIPS64EQ
 10391  			b.SetControl(x)
 10392  			_ = yes
 10393  			_ = no
 10394  			return true
 10395  		}
 10396  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 10397  		// cond:
 10398  		// result: (NE x yes no)
 10399  		for {
 10400  			v := b.Control
 10401  			if v.Op != OpMIPS64SGTU {
 10402  				break
 10403  			}
 10404  			x := v.Args[0]
 10405  			v_1 := v.Args[1]
 10406  			if v_1.Op != OpMIPS64MOVVconst {
 10407  				break
 10408  			}
 10409  			if v_1.AuxInt != 0 {
 10410  				break
 10411  			}
 10412  			yes := b.Succs[0]
 10413  			no := b.Succs[1]
 10414  			b.Kind = BlockMIPS64NE
 10415  			b.SetControl(x)
 10416  			_ = yes
 10417  			_ = no
 10418  			return true
 10419  		}
 10420  		// match: (NE (SGTconst [0] x) yes no)
 10421  		// cond:
 10422  		// result: (LTZ x yes no)
 10423  		for {
 10424  			v := b.Control
 10425  			if v.Op != OpMIPS64SGTconst {
 10426  				break
 10427  			}
 10428  			if v.AuxInt != 0 {
 10429  				break
 10430  			}
 10431  			x := v.Args[0]
 10432  			yes := b.Succs[0]
 10433  			no := b.Succs[1]
 10434  			b.Kind = BlockMIPS64LTZ
 10435  			b.SetControl(x)
 10436  			_ = yes
 10437  			_ = no
 10438  			return true
 10439  		}
 10440  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 10441  		// cond:
 10442  		// result: (GTZ x yes no)
 10443  		for {
 10444  			v := b.Control
 10445  			if v.Op != OpMIPS64SGT {
 10446  				break
 10447  			}
 10448  			x := v.Args[0]
 10449  			v_1 := v.Args[1]
 10450  			if v_1.Op != OpMIPS64MOVVconst {
 10451  				break
 10452  			}
 10453  			if v_1.AuxInt != 0 {
 10454  				break
 10455  			}
 10456  			yes := b.Succs[0]
 10457  			no := b.Succs[1]
 10458  			b.Kind = BlockMIPS64GTZ
 10459  			b.SetControl(x)
 10460  			_ = yes
 10461  			_ = no
 10462  			return true
 10463  		}
 10464  		// match: (NE (MOVVconst [0]) yes no)
 10465  		// cond:
 10466  		// result: (First nil no yes)
 10467  		for {
 10468  			v := b.Control
 10469  			if v.Op != OpMIPS64MOVVconst {
 10470  				break
 10471  			}
 10472  			if v.AuxInt != 0 {
 10473  				break
 10474  			}
 10475  			yes := b.Succs[0]
 10476  			no := b.Succs[1]
 10477  			b.Kind = BlockFirst
 10478  			b.SetControl(nil)
 10479  			b.swapSuccessors()
 10480  			_ = no
 10481  			_ = yes
 10482  			return true
 10483  		}
 10484  		// match: (NE (MOVVconst [c]) yes no)
 10485  		// cond: c != 0
 10486  		// result: (First nil yes no)
 10487  		for {
 10488  			v := b.Control
 10489  			if v.Op != OpMIPS64MOVVconst {
 10490  				break
 10491  			}
 10492  			c := v.AuxInt
 10493  			yes := b.Succs[0]
 10494  			no := b.Succs[1]
 10495  			if !(c != 0) {
 10496  				break
 10497  			}
 10498  			b.Kind = BlockFirst
 10499  			b.SetControl(nil)
 10500  			_ = yes
 10501  			_ = no
 10502  			return true
 10503  		}
 10504  	}
 10505  	return false
 10506  }