github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueMIPS64(v *Value) bool {
    17  	switch v.Op {
    18  	case OpAdd16:
    19  		return rewriteValueMIPS64_OpAdd16_0(v)
    20  	case OpAdd32:
    21  		return rewriteValueMIPS64_OpAdd32_0(v)
    22  	case OpAdd32F:
    23  		return rewriteValueMIPS64_OpAdd32F_0(v)
    24  	case OpAdd64:
    25  		return rewriteValueMIPS64_OpAdd64_0(v)
    26  	case OpAdd64F:
    27  		return rewriteValueMIPS64_OpAdd64F_0(v)
    28  	case OpAdd8:
    29  		return rewriteValueMIPS64_OpAdd8_0(v)
    30  	case OpAddPtr:
    31  		return rewriteValueMIPS64_OpAddPtr_0(v)
    32  	case OpAddr:
    33  		return rewriteValueMIPS64_OpAddr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValueMIPS64_OpAnd16_0(v)
    36  	case OpAnd32:
    37  		return rewriteValueMIPS64_OpAnd32_0(v)
    38  	case OpAnd64:
    39  		return rewriteValueMIPS64_OpAnd64_0(v)
    40  	case OpAnd8:
    41  		return rewriteValueMIPS64_OpAnd8_0(v)
    42  	case OpAndB:
    43  		return rewriteValueMIPS64_OpAndB_0(v)
    44  	case OpAtomicAdd32:
    45  		return rewriteValueMIPS64_OpAtomicAdd32_0(v)
    46  	case OpAtomicAdd64:
    47  		return rewriteValueMIPS64_OpAtomicAdd64_0(v)
    48  	case OpAtomicCompareAndSwap32:
    49  		return rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v)
    50  	case OpAtomicCompareAndSwap64:
    51  		return rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v)
    52  	case OpAtomicExchange32:
    53  		return rewriteValueMIPS64_OpAtomicExchange32_0(v)
    54  	case OpAtomicExchange64:
    55  		return rewriteValueMIPS64_OpAtomicExchange64_0(v)
    56  	case OpAtomicLoad32:
    57  		return rewriteValueMIPS64_OpAtomicLoad32_0(v)
    58  	case OpAtomicLoad64:
    59  		return rewriteValueMIPS64_OpAtomicLoad64_0(v)
    60  	case OpAtomicLoadPtr:
    61  		return rewriteValueMIPS64_OpAtomicLoadPtr_0(v)
    62  	case OpAtomicStore32:
    63  		return rewriteValueMIPS64_OpAtomicStore32_0(v)
    64  	case OpAtomicStore64:
    65  		return rewriteValueMIPS64_OpAtomicStore64_0(v)
    66  	case OpAtomicStorePtrNoWB:
    67  		return rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v)
    68  	case OpAvg64u:
    69  		return rewriteValueMIPS64_OpAvg64u_0(v)
    70  	case OpClosureCall:
    71  		return rewriteValueMIPS64_OpClosureCall_0(v)
    72  	case OpCom16:
    73  		return rewriteValueMIPS64_OpCom16_0(v)
    74  	case OpCom32:
    75  		return rewriteValueMIPS64_OpCom32_0(v)
    76  	case OpCom64:
    77  		return rewriteValueMIPS64_OpCom64_0(v)
    78  	case OpCom8:
    79  		return rewriteValueMIPS64_OpCom8_0(v)
    80  	case OpConst16:
    81  		return rewriteValueMIPS64_OpConst16_0(v)
    82  	case OpConst32:
    83  		return rewriteValueMIPS64_OpConst32_0(v)
    84  	case OpConst32F:
    85  		return rewriteValueMIPS64_OpConst32F_0(v)
    86  	case OpConst64:
    87  		return rewriteValueMIPS64_OpConst64_0(v)
    88  	case OpConst64F:
    89  		return rewriteValueMIPS64_OpConst64F_0(v)
    90  	case OpConst8:
    91  		return rewriteValueMIPS64_OpConst8_0(v)
    92  	case OpConstBool:
    93  		return rewriteValueMIPS64_OpConstBool_0(v)
    94  	case OpConstNil:
    95  		return rewriteValueMIPS64_OpConstNil_0(v)
    96  	case OpConvert:
    97  		return rewriteValueMIPS64_OpConvert_0(v)
    98  	case OpCvt32Fto32:
    99  		return rewriteValueMIPS64_OpCvt32Fto32_0(v)
   100  	case OpCvt32Fto64:
   101  		return rewriteValueMIPS64_OpCvt32Fto64_0(v)
   102  	case OpCvt32Fto64F:
   103  		return rewriteValueMIPS64_OpCvt32Fto64F_0(v)
   104  	case OpCvt32to32F:
   105  		return rewriteValueMIPS64_OpCvt32to32F_0(v)
   106  	case OpCvt32to64F:
   107  		return rewriteValueMIPS64_OpCvt32to64F_0(v)
   108  	case OpCvt64Fto32:
   109  		return rewriteValueMIPS64_OpCvt64Fto32_0(v)
   110  	case OpCvt64Fto32F:
   111  		return rewriteValueMIPS64_OpCvt64Fto32F_0(v)
   112  	case OpCvt64Fto64:
   113  		return rewriteValueMIPS64_OpCvt64Fto64_0(v)
   114  	case OpCvt64to32F:
   115  		return rewriteValueMIPS64_OpCvt64to32F_0(v)
   116  	case OpCvt64to64F:
   117  		return rewriteValueMIPS64_OpCvt64to64F_0(v)
   118  	case OpDiv16:
   119  		return rewriteValueMIPS64_OpDiv16_0(v)
   120  	case OpDiv16u:
   121  		return rewriteValueMIPS64_OpDiv16u_0(v)
   122  	case OpDiv32:
   123  		return rewriteValueMIPS64_OpDiv32_0(v)
   124  	case OpDiv32F:
   125  		return rewriteValueMIPS64_OpDiv32F_0(v)
   126  	case OpDiv32u:
   127  		return rewriteValueMIPS64_OpDiv32u_0(v)
   128  	case OpDiv64:
   129  		return rewriteValueMIPS64_OpDiv64_0(v)
   130  	case OpDiv64F:
   131  		return rewriteValueMIPS64_OpDiv64F_0(v)
   132  	case OpDiv64u:
   133  		return rewriteValueMIPS64_OpDiv64u_0(v)
   134  	case OpDiv8:
   135  		return rewriteValueMIPS64_OpDiv8_0(v)
   136  	case OpDiv8u:
   137  		return rewriteValueMIPS64_OpDiv8u_0(v)
   138  	case OpEq16:
   139  		return rewriteValueMIPS64_OpEq16_0(v)
   140  	case OpEq32:
   141  		return rewriteValueMIPS64_OpEq32_0(v)
   142  	case OpEq32F:
   143  		return rewriteValueMIPS64_OpEq32F_0(v)
   144  	case OpEq64:
   145  		return rewriteValueMIPS64_OpEq64_0(v)
   146  	case OpEq64F:
   147  		return rewriteValueMIPS64_OpEq64F_0(v)
   148  	case OpEq8:
   149  		return rewriteValueMIPS64_OpEq8_0(v)
   150  	case OpEqB:
   151  		return rewriteValueMIPS64_OpEqB_0(v)
   152  	case OpEqPtr:
   153  		return rewriteValueMIPS64_OpEqPtr_0(v)
   154  	case OpGeq16:
   155  		return rewriteValueMIPS64_OpGeq16_0(v)
   156  	case OpGeq16U:
   157  		return rewriteValueMIPS64_OpGeq16U_0(v)
   158  	case OpGeq32:
   159  		return rewriteValueMIPS64_OpGeq32_0(v)
   160  	case OpGeq32F:
   161  		return rewriteValueMIPS64_OpGeq32F_0(v)
   162  	case OpGeq32U:
   163  		return rewriteValueMIPS64_OpGeq32U_0(v)
   164  	case OpGeq64:
   165  		return rewriteValueMIPS64_OpGeq64_0(v)
   166  	case OpGeq64F:
   167  		return rewriteValueMIPS64_OpGeq64F_0(v)
   168  	case OpGeq64U:
   169  		return rewriteValueMIPS64_OpGeq64U_0(v)
   170  	case OpGeq8:
   171  		return rewriteValueMIPS64_OpGeq8_0(v)
   172  	case OpGeq8U:
   173  		return rewriteValueMIPS64_OpGeq8U_0(v)
   174  	case OpGetCallerSP:
   175  		return rewriteValueMIPS64_OpGetCallerSP_0(v)
   176  	case OpGetClosurePtr:
   177  		return rewriteValueMIPS64_OpGetClosurePtr_0(v)
   178  	case OpGreater16:
   179  		return rewriteValueMIPS64_OpGreater16_0(v)
   180  	case OpGreater16U:
   181  		return rewriteValueMIPS64_OpGreater16U_0(v)
   182  	case OpGreater32:
   183  		return rewriteValueMIPS64_OpGreater32_0(v)
   184  	case OpGreater32F:
   185  		return rewriteValueMIPS64_OpGreater32F_0(v)
   186  	case OpGreater32U:
   187  		return rewriteValueMIPS64_OpGreater32U_0(v)
   188  	case OpGreater64:
   189  		return rewriteValueMIPS64_OpGreater64_0(v)
   190  	case OpGreater64F:
   191  		return rewriteValueMIPS64_OpGreater64F_0(v)
   192  	case OpGreater64U:
   193  		return rewriteValueMIPS64_OpGreater64U_0(v)
   194  	case OpGreater8:
   195  		return rewriteValueMIPS64_OpGreater8_0(v)
   196  	case OpGreater8U:
   197  		return rewriteValueMIPS64_OpGreater8U_0(v)
   198  	case OpHmul32:
   199  		return rewriteValueMIPS64_OpHmul32_0(v)
   200  	case OpHmul32u:
   201  		return rewriteValueMIPS64_OpHmul32u_0(v)
   202  	case OpHmul64:
   203  		return rewriteValueMIPS64_OpHmul64_0(v)
   204  	case OpHmul64u:
   205  		return rewriteValueMIPS64_OpHmul64u_0(v)
   206  	case OpInterCall:
   207  		return rewriteValueMIPS64_OpInterCall_0(v)
   208  	case OpIsInBounds:
   209  		return rewriteValueMIPS64_OpIsInBounds_0(v)
   210  	case OpIsNonNil:
   211  		return rewriteValueMIPS64_OpIsNonNil_0(v)
   212  	case OpIsSliceInBounds:
   213  		return rewriteValueMIPS64_OpIsSliceInBounds_0(v)
   214  	case OpLeq16:
   215  		return rewriteValueMIPS64_OpLeq16_0(v)
   216  	case OpLeq16U:
   217  		return rewriteValueMIPS64_OpLeq16U_0(v)
   218  	case OpLeq32:
   219  		return rewriteValueMIPS64_OpLeq32_0(v)
   220  	case OpLeq32F:
   221  		return rewriteValueMIPS64_OpLeq32F_0(v)
   222  	case OpLeq32U:
   223  		return rewriteValueMIPS64_OpLeq32U_0(v)
   224  	case OpLeq64:
   225  		return rewriteValueMIPS64_OpLeq64_0(v)
   226  	case OpLeq64F:
   227  		return rewriteValueMIPS64_OpLeq64F_0(v)
   228  	case OpLeq64U:
   229  		return rewriteValueMIPS64_OpLeq64U_0(v)
   230  	case OpLeq8:
   231  		return rewriteValueMIPS64_OpLeq8_0(v)
   232  	case OpLeq8U:
   233  		return rewriteValueMIPS64_OpLeq8U_0(v)
   234  	case OpLess16:
   235  		return rewriteValueMIPS64_OpLess16_0(v)
   236  	case OpLess16U:
   237  		return rewriteValueMIPS64_OpLess16U_0(v)
   238  	case OpLess32:
   239  		return rewriteValueMIPS64_OpLess32_0(v)
   240  	case OpLess32F:
   241  		return rewriteValueMIPS64_OpLess32F_0(v)
   242  	case OpLess32U:
   243  		return rewriteValueMIPS64_OpLess32U_0(v)
   244  	case OpLess64:
   245  		return rewriteValueMIPS64_OpLess64_0(v)
   246  	case OpLess64F:
   247  		return rewriteValueMIPS64_OpLess64F_0(v)
   248  	case OpLess64U:
   249  		return rewriteValueMIPS64_OpLess64U_0(v)
   250  	case OpLess8:
   251  		return rewriteValueMIPS64_OpLess8_0(v)
   252  	case OpLess8U:
   253  		return rewriteValueMIPS64_OpLess8U_0(v)
   254  	case OpLoad:
   255  		return rewriteValueMIPS64_OpLoad_0(v)
   256  	case OpLsh16x16:
   257  		return rewriteValueMIPS64_OpLsh16x16_0(v)
   258  	case OpLsh16x32:
   259  		return rewriteValueMIPS64_OpLsh16x32_0(v)
   260  	case OpLsh16x64:
   261  		return rewriteValueMIPS64_OpLsh16x64_0(v)
   262  	case OpLsh16x8:
   263  		return rewriteValueMIPS64_OpLsh16x8_0(v)
   264  	case OpLsh32x16:
   265  		return rewriteValueMIPS64_OpLsh32x16_0(v)
   266  	case OpLsh32x32:
   267  		return rewriteValueMIPS64_OpLsh32x32_0(v)
   268  	case OpLsh32x64:
   269  		return rewriteValueMIPS64_OpLsh32x64_0(v)
   270  	case OpLsh32x8:
   271  		return rewriteValueMIPS64_OpLsh32x8_0(v)
   272  	case OpLsh64x16:
   273  		return rewriteValueMIPS64_OpLsh64x16_0(v)
   274  	case OpLsh64x32:
   275  		return rewriteValueMIPS64_OpLsh64x32_0(v)
   276  	case OpLsh64x64:
   277  		return rewriteValueMIPS64_OpLsh64x64_0(v)
   278  	case OpLsh64x8:
   279  		return rewriteValueMIPS64_OpLsh64x8_0(v)
   280  	case OpLsh8x16:
   281  		return rewriteValueMIPS64_OpLsh8x16_0(v)
   282  	case OpLsh8x32:
   283  		return rewriteValueMIPS64_OpLsh8x32_0(v)
   284  	case OpLsh8x64:
   285  		return rewriteValueMIPS64_OpLsh8x64_0(v)
   286  	case OpLsh8x8:
   287  		return rewriteValueMIPS64_OpLsh8x8_0(v)
   288  	case OpMIPS64ADDV:
   289  		return rewriteValueMIPS64_OpMIPS64ADDV_0(v)
   290  	case OpMIPS64ADDVconst:
   291  		return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v)
   292  	case OpMIPS64AND:
   293  		return rewriteValueMIPS64_OpMIPS64AND_0(v)
   294  	case OpMIPS64ANDconst:
   295  		return rewriteValueMIPS64_OpMIPS64ANDconst_0(v)
   296  	case OpMIPS64LoweredAtomicAdd32:
   297  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v)
   298  	case OpMIPS64LoweredAtomicAdd64:
   299  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v)
   300  	case OpMIPS64LoweredAtomicStore32:
   301  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v)
   302  	case OpMIPS64LoweredAtomicStore64:
   303  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v)
   304  	case OpMIPS64MOVBUload:
   305  		return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v)
   306  	case OpMIPS64MOVBUreg:
   307  		return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v)
   308  	case OpMIPS64MOVBload:
   309  		return rewriteValueMIPS64_OpMIPS64MOVBload_0(v)
   310  	case OpMIPS64MOVBreg:
   311  		return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v)
   312  	case OpMIPS64MOVBstore:
   313  		return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v)
   314  	case OpMIPS64MOVBstorezero:
   315  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v)
   316  	case OpMIPS64MOVDload:
   317  		return rewriteValueMIPS64_OpMIPS64MOVDload_0(v)
   318  	case OpMIPS64MOVDstore:
   319  		return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v)
   320  	case OpMIPS64MOVFload:
   321  		return rewriteValueMIPS64_OpMIPS64MOVFload_0(v)
   322  	case OpMIPS64MOVFstore:
   323  		return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v)
   324  	case OpMIPS64MOVHUload:
   325  		return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v)
   326  	case OpMIPS64MOVHUreg:
   327  		return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v)
   328  	case OpMIPS64MOVHload:
   329  		return rewriteValueMIPS64_OpMIPS64MOVHload_0(v)
   330  	case OpMIPS64MOVHreg:
   331  		return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v)
   332  	case OpMIPS64MOVHstore:
   333  		return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v)
   334  	case OpMIPS64MOVHstorezero:
   335  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v)
   336  	case OpMIPS64MOVVload:
   337  		return rewriteValueMIPS64_OpMIPS64MOVVload_0(v)
   338  	case OpMIPS64MOVVreg:
   339  		return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v)
   340  	case OpMIPS64MOVVstore:
   341  		return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v)
   342  	case OpMIPS64MOVVstorezero:
   343  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v)
   344  	case OpMIPS64MOVWUload:
   345  		return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v)
   346  	case OpMIPS64MOVWUreg:
   347  		return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v)
   348  	case OpMIPS64MOVWload:
   349  		return rewriteValueMIPS64_OpMIPS64MOVWload_0(v)
   350  	case OpMIPS64MOVWreg:
   351  		return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v)
   352  	case OpMIPS64MOVWstore:
   353  		return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v)
   354  	case OpMIPS64MOVWstorezero:
   355  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v)
   356  	case OpMIPS64NEGV:
   357  		return rewriteValueMIPS64_OpMIPS64NEGV_0(v)
   358  	case OpMIPS64NOR:
   359  		return rewriteValueMIPS64_OpMIPS64NOR_0(v)
   360  	case OpMIPS64NORconst:
   361  		return rewriteValueMIPS64_OpMIPS64NORconst_0(v)
   362  	case OpMIPS64OR:
   363  		return rewriteValueMIPS64_OpMIPS64OR_0(v)
   364  	case OpMIPS64ORconst:
   365  		return rewriteValueMIPS64_OpMIPS64ORconst_0(v)
   366  	case OpMIPS64SGT:
   367  		return rewriteValueMIPS64_OpMIPS64SGT_0(v)
   368  	case OpMIPS64SGTU:
   369  		return rewriteValueMIPS64_OpMIPS64SGTU_0(v)
   370  	case OpMIPS64SGTUconst:
   371  		return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v)
   372  	case OpMIPS64SGTconst:
   373  		return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v)
   374  	case OpMIPS64SLLV:
   375  		return rewriteValueMIPS64_OpMIPS64SLLV_0(v)
   376  	case OpMIPS64SLLVconst:
   377  		return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v)
   378  	case OpMIPS64SRAV:
   379  		return rewriteValueMIPS64_OpMIPS64SRAV_0(v)
   380  	case OpMIPS64SRAVconst:
   381  		return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v)
   382  	case OpMIPS64SRLV:
   383  		return rewriteValueMIPS64_OpMIPS64SRLV_0(v)
   384  	case OpMIPS64SRLVconst:
   385  		return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v)
   386  	case OpMIPS64SUBV:
   387  		return rewriteValueMIPS64_OpMIPS64SUBV_0(v)
   388  	case OpMIPS64SUBVconst:
   389  		return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v)
   390  	case OpMIPS64XOR:
   391  		return rewriteValueMIPS64_OpMIPS64XOR_0(v)
   392  	case OpMIPS64XORconst:
   393  		return rewriteValueMIPS64_OpMIPS64XORconst_0(v)
   394  	case OpMod16:
   395  		return rewriteValueMIPS64_OpMod16_0(v)
   396  	case OpMod16u:
   397  		return rewriteValueMIPS64_OpMod16u_0(v)
   398  	case OpMod32:
   399  		return rewriteValueMIPS64_OpMod32_0(v)
   400  	case OpMod32u:
   401  		return rewriteValueMIPS64_OpMod32u_0(v)
   402  	case OpMod64:
   403  		return rewriteValueMIPS64_OpMod64_0(v)
   404  	case OpMod64u:
   405  		return rewriteValueMIPS64_OpMod64u_0(v)
   406  	case OpMod8:
   407  		return rewriteValueMIPS64_OpMod8_0(v)
   408  	case OpMod8u:
   409  		return rewriteValueMIPS64_OpMod8u_0(v)
   410  	case OpMove:
   411  		return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v)
   412  	case OpMul16:
   413  		return rewriteValueMIPS64_OpMul16_0(v)
   414  	case OpMul32:
   415  		return rewriteValueMIPS64_OpMul32_0(v)
   416  	case OpMul32F:
   417  		return rewriteValueMIPS64_OpMul32F_0(v)
   418  	case OpMul64:
   419  		return rewriteValueMIPS64_OpMul64_0(v)
   420  	case OpMul64F:
   421  		return rewriteValueMIPS64_OpMul64F_0(v)
   422  	case OpMul8:
   423  		return rewriteValueMIPS64_OpMul8_0(v)
   424  	case OpNeg16:
   425  		return rewriteValueMIPS64_OpNeg16_0(v)
   426  	case OpNeg32:
   427  		return rewriteValueMIPS64_OpNeg32_0(v)
   428  	case OpNeg32F:
   429  		return rewriteValueMIPS64_OpNeg32F_0(v)
   430  	case OpNeg64:
   431  		return rewriteValueMIPS64_OpNeg64_0(v)
   432  	case OpNeg64F:
   433  		return rewriteValueMIPS64_OpNeg64F_0(v)
   434  	case OpNeg8:
   435  		return rewriteValueMIPS64_OpNeg8_0(v)
   436  	case OpNeq16:
   437  		return rewriteValueMIPS64_OpNeq16_0(v)
   438  	case OpNeq32:
   439  		return rewriteValueMIPS64_OpNeq32_0(v)
   440  	case OpNeq32F:
   441  		return rewriteValueMIPS64_OpNeq32F_0(v)
   442  	case OpNeq64:
   443  		return rewriteValueMIPS64_OpNeq64_0(v)
   444  	case OpNeq64F:
   445  		return rewriteValueMIPS64_OpNeq64F_0(v)
   446  	case OpNeq8:
   447  		return rewriteValueMIPS64_OpNeq8_0(v)
   448  	case OpNeqB:
   449  		return rewriteValueMIPS64_OpNeqB_0(v)
   450  	case OpNeqPtr:
   451  		return rewriteValueMIPS64_OpNeqPtr_0(v)
   452  	case OpNilCheck:
   453  		return rewriteValueMIPS64_OpNilCheck_0(v)
   454  	case OpNot:
   455  		return rewriteValueMIPS64_OpNot_0(v)
   456  	case OpOffPtr:
   457  		return rewriteValueMIPS64_OpOffPtr_0(v)
   458  	case OpOr16:
   459  		return rewriteValueMIPS64_OpOr16_0(v)
   460  	case OpOr32:
   461  		return rewriteValueMIPS64_OpOr32_0(v)
   462  	case OpOr64:
   463  		return rewriteValueMIPS64_OpOr64_0(v)
   464  	case OpOr8:
   465  		return rewriteValueMIPS64_OpOr8_0(v)
   466  	case OpOrB:
   467  		return rewriteValueMIPS64_OpOrB_0(v)
   468  	case OpRound32F:
   469  		return rewriteValueMIPS64_OpRound32F_0(v)
   470  	case OpRound64F:
   471  		return rewriteValueMIPS64_OpRound64F_0(v)
   472  	case OpRsh16Ux16:
   473  		return rewriteValueMIPS64_OpRsh16Ux16_0(v)
   474  	case OpRsh16Ux32:
   475  		return rewriteValueMIPS64_OpRsh16Ux32_0(v)
   476  	case OpRsh16Ux64:
   477  		return rewriteValueMIPS64_OpRsh16Ux64_0(v)
   478  	case OpRsh16Ux8:
   479  		return rewriteValueMIPS64_OpRsh16Ux8_0(v)
   480  	case OpRsh16x16:
   481  		return rewriteValueMIPS64_OpRsh16x16_0(v)
   482  	case OpRsh16x32:
   483  		return rewriteValueMIPS64_OpRsh16x32_0(v)
   484  	case OpRsh16x64:
   485  		return rewriteValueMIPS64_OpRsh16x64_0(v)
   486  	case OpRsh16x8:
   487  		return rewriteValueMIPS64_OpRsh16x8_0(v)
   488  	case OpRsh32Ux16:
   489  		return rewriteValueMIPS64_OpRsh32Ux16_0(v)
   490  	case OpRsh32Ux32:
   491  		return rewriteValueMIPS64_OpRsh32Ux32_0(v)
   492  	case OpRsh32Ux64:
   493  		return rewriteValueMIPS64_OpRsh32Ux64_0(v)
   494  	case OpRsh32Ux8:
   495  		return rewriteValueMIPS64_OpRsh32Ux8_0(v)
   496  	case OpRsh32x16:
   497  		return rewriteValueMIPS64_OpRsh32x16_0(v)
   498  	case OpRsh32x32:
   499  		return rewriteValueMIPS64_OpRsh32x32_0(v)
   500  	case OpRsh32x64:
   501  		return rewriteValueMIPS64_OpRsh32x64_0(v)
   502  	case OpRsh32x8:
   503  		return rewriteValueMIPS64_OpRsh32x8_0(v)
   504  	case OpRsh64Ux16:
   505  		return rewriteValueMIPS64_OpRsh64Ux16_0(v)
   506  	case OpRsh64Ux32:
   507  		return rewriteValueMIPS64_OpRsh64Ux32_0(v)
   508  	case OpRsh64Ux64:
   509  		return rewriteValueMIPS64_OpRsh64Ux64_0(v)
   510  	case OpRsh64Ux8:
   511  		return rewriteValueMIPS64_OpRsh64Ux8_0(v)
   512  	case OpRsh64x16:
   513  		return rewriteValueMIPS64_OpRsh64x16_0(v)
   514  	case OpRsh64x32:
   515  		return rewriteValueMIPS64_OpRsh64x32_0(v)
   516  	case OpRsh64x64:
   517  		return rewriteValueMIPS64_OpRsh64x64_0(v)
   518  	case OpRsh64x8:
   519  		return rewriteValueMIPS64_OpRsh64x8_0(v)
   520  	case OpRsh8Ux16:
   521  		return rewriteValueMIPS64_OpRsh8Ux16_0(v)
   522  	case OpRsh8Ux32:
   523  		return rewriteValueMIPS64_OpRsh8Ux32_0(v)
   524  	case OpRsh8Ux64:
   525  		return rewriteValueMIPS64_OpRsh8Ux64_0(v)
   526  	case OpRsh8Ux8:
   527  		return rewriteValueMIPS64_OpRsh8Ux8_0(v)
   528  	case OpRsh8x16:
   529  		return rewriteValueMIPS64_OpRsh8x16_0(v)
   530  	case OpRsh8x32:
   531  		return rewriteValueMIPS64_OpRsh8x32_0(v)
   532  	case OpRsh8x64:
   533  		return rewriteValueMIPS64_OpRsh8x64_0(v)
   534  	case OpRsh8x8:
   535  		return rewriteValueMIPS64_OpRsh8x8_0(v)
   536  	case OpSelect0:
   537  		return rewriteValueMIPS64_OpSelect0_0(v)
   538  	case OpSelect1:
   539  		return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v)
   540  	case OpSignExt16to32:
   541  		return rewriteValueMIPS64_OpSignExt16to32_0(v)
   542  	case OpSignExt16to64:
   543  		return rewriteValueMIPS64_OpSignExt16to64_0(v)
   544  	case OpSignExt32to64:
   545  		return rewriteValueMIPS64_OpSignExt32to64_0(v)
   546  	case OpSignExt8to16:
   547  		return rewriteValueMIPS64_OpSignExt8to16_0(v)
   548  	case OpSignExt8to32:
   549  		return rewriteValueMIPS64_OpSignExt8to32_0(v)
   550  	case OpSignExt8to64:
   551  		return rewriteValueMIPS64_OpSignExt8to64_0(v)
   552  	case OpSlicemask:
   553  		return rewriteValueMIPS64_OpSlicemask_0(v)
   554  	case OpSqrt:
   555  		return rewriteValueMIPS64_OpSqrt_0(v)
   556  	case OpStaticCall:
   557  		return rewriteValueMIPS64_OpStaticCall_0(v)
   558  	case OpStore:
   559  		return rewriteValueMIPS64_OpStore_0(v)
   560  	case OpSub16:
   561  		return rewriteValueMIPS64_OpSub16_0(v)
   562  	case OpSub32:
   563  		return rewriteValueMIPS64_OpSub32_0(v)
   564  	case OpSub32F:
   565  		return rewriteValueMIPS64_OpSub32F_0(v)
   566  	case OpSub64:
   567  		return rewriteValueMIPS64_OpSub64_0(v)
   568  	case OpSub64F:
   569  		return rewriteValueMIPS64_OpSub64F_0(v)
   570  	case OpSub8:
   571  		return rewriteValueMIPS64_OpSub8_0(v)
   572  	case OpSubPtr:
   573  		return rewriteValueMIPS64_OpSubPtr_0(v)
   574  	case OpTrunc16to8:
   575  		return rewriteValueMIPS64_OpTrunc16to8_0(v)
   576  	case OpTrunc32to16:
   577  		return rewriteValueMIPS64_OpTrunc32to16_0(v)
   578  	case OpTrunc32to8:
   579  		return rewriteValueMIPS64_OpTrunc32to8_0(v)
   580  	case OpTrunc64to16:
   581  		return rewriteValueMIPS64_OpTrunc64to16_0(v)
   582  	case OpTrunc64to32:
   583  		return rewriteValueMIPS64_OpTrunc64to32_0(v)
   584  	case OpTrunc64to8:
   585  		return rewriteValueMIPS64_OpTrunc64to8_0(v)
   586  	case OpWB:
   587  		return rewriteValueMIPS64_OpWB_0(v)
   588  	case OpXor16:
   589  		return rewriteValueMIPS64_OpXor16_0(v)
   590  	case OpXor32:
   591  		return rewriteValueMIPS64_OpXor32_0(v)
   592  	case OpXor64:
   593  		return rewriteValueMIPS64_OpXor64_0(v)
   594  	case OpXor8:
   595  		return rewriteValueMIPS64_OpXor8_0(v)
   596  	case OpZero:
   597  		return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v)
   598  	case OpZeroExt16to32:
   599  		return rewriteValueMIPS64_OpZeroExt16to32_0(v)
   600  	case OpZeroExt16to64:
   601  		return rewriteValueMIPS64_OpZeroExt16to64_0(v)
   602  	case OpZeroExt32to64:
   603  		return rewriteValueMIPS64_OpZeroExt32to64_0(v)
   604  	case OpZeroExt8to16:
   605  		return rewriteValueMIPS64_OpZeroExt8to16_0(v)
   606  	case OpZeroExt8to32:
   607  		return rewriteValueMIPS64_OpZeroExt8to32_0(v)
   608  	case OpZeroExt8to64:
   609  		return rewriteValueMIPS64_OpZeroExt8to64_0(v)
   610  	}
   611  	return false
   612  }
   613  func rewriteValueMIPS64_OpAdd16_0(v *Value) bool {
   614  	// match: (Add16 x y)
   615  	// cond:
   616  	// result: (ADDV x y)
   617  	for {
   618  		_ = v.Args[1]
   619  		x := v.Args[0]
   620  		y := v.Args[1]
   621  		v.reset(OpMIPS64ADDV)
   622  		v.AddArg(x)
   623  		v.AddArg(y)
   624  		return true
   625  	}
   626  }
   627  func rewriteValueMIPS64_OpAdd32_0(v *Value) bool {
   628  	// match: (Add32 x y)
   629  	// cond:
   630  	// result: (ADDV x y)
   631  	for {
   632  		_ = v.Args[1]
   633  		x := v.Args[0]
   634  		y := v.Args[1]
   635  		v.reset(OpMIPS64ADDV)
   636  		v.AddArg(x)
   637  		v.AddArg(y)
   638  		return true
   639  	}
   640  }
   641  func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool {
   642  	// match: (Add32F x y)
   643  	// cond:
   644  	// result: (ADDF x y)
   645  	for {
   646  		_ = v.Args[1]
   647  		x := v.Args[0]
   648  		y := v.Args[1]
   649  		v.reset(OpMIPS64ADDF)
   650  		v.AddArg(x)
   651  		v.AddArg(y)
   652  		return true
   653  	}
   654  }
   655  func rewriteValueMIPS64_OpAdd64_0(v *Value) bool {
   656  	// match: (Add64 x y)
   657  	// cond:
   658  	// result: (ADDV x y)
   659  	for {
   660  		_ = v.Args[1]
   661  		x := v.Args[0]
   662  		y := v.Args[1]
   663  		v.reset(OpMIPS64ADDV)
   664  		v.AddArg(x)
   665  		v.AddArg(y)
   666  		return true
   667  	}
   668  }
   669  func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool {
   670  	// match: (Add64F x y)
   671  	// cond:
   672  	// result: (ADDD x y)
   673  	for {
   674  		_ = v.Args[1]
   675  		x := v.Args[0]
   676  		y := v.Args[1]
   677  		v.reset(OpMIPS64ADDD)
   678  		v.AddArg(x)
   679  		v.AddArg(y)
   680  		return true
   681  	}
   682  }
   683  func rewriteValueMIPS64_OpAdd8_0(v *Value) bool {
   684  	// match: (Add8 x y)
   685  	// cond:
   686  	// result: (ADDV x y)
   687  	for {
   688  		_ = v.Args[1]
   689  		x := v.Args[0]
   690  		y := v.Args[1]
   691  		v.reset(OpMIPS64ADDV)
   692  		v.AddArg(x)
   693  		v.AddArg(y)
   694  		return true
   695  	}
   696  }
   697  func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool {
   698  	// match: (AddPtr x y)
   699  	// cond:
   700  	// result: (ADDV x y)
   701  	for {
   702  		_ = v.Args[1]
   703  		x := v.Args[0]
   704  		y := v.Args[1]
   705  		v.reset(OpMIPS64ADDV)
   706  		v.AddArg(x)
   707  		v.AddArg(y)
   708  		return true
   709  	}
   710  }
   711  func rewriteValueMIPS64_OpAddr_0(v *Value) bool {
   712  	// match: (Addr {sym} base)
   713  	// cond:
   714  	// result: (MOVVaddr {sym} base)
   715  	for {
   716  		sym := v.Aux
   717  		base := v.Args[0]
   718  		v.reset(OpMIPS64MOVVaddr)
   719  		v.Aux = sym
   720  		v.AddArg(base)
   721  		return true
   722  	}
   723  }
   724  func rewriteValueMIPS64_OpAnd16_0(v *Value) bool {
   725  	// match: (And16 x y)
   726  	// cond:
   727  	// result: (AND x y)
   728  	for {
   729  		_ = v.Args[1]
   730  		x := v.Args[0]
   731  		y := v.Args[1]
   732  		v.reset(OpMIPS64AND)
   733  		v.AddArg(x)
   734  		v.AddArg(y)
   735  		return true
   736  	}
   737  }
   738  func rewriteValueMIPS64_OpAnd32_0(v *Value) bool {
   739  	// match: (And32 x y)
   740  	// cond:
   741  	// result: (AND x y)
   742  	for {
   743  		_ = v.Args[1]
   744  		x := v.Args[0]
   745  		y := v.Args[1]
   746  		v.reset(OpMIPS64AND)
   747  		v.AddArg(x)
   748  		v.AddArg(y)
   749  		return true
   750  	}
   751  }
   752  func rewriteValueMIPS64_OpAnd64_0(v *Value) bool {
   753  	// match: (And64 x y)
   754  	// cond:
   755  	// result: (AND x y)
   756  	for {
   757  		_ = v.Args[1]
   758  		x := v.Args[0]
   759  		y := v.Args[1]
   760  		v.reset(OpMIPS64AND)
   761  		v.AddArg(x)
   762  		v.AddArg(y)
   763  		return true
   764  	}
   765  }
   766  func rewriteValueMIPS64_OpAnd8_0(v *Value) bool {
   767  	// match: (And8 x y)
   768  	// cond:
   769  	// result: (AND x y)
   770  	for {
   771  		_ = v.Args[1]
   772  		x := v.Args[0]
   773  		y := v.Args[1]
   774  		v.reset(OpMIPS64AND)
   775  		v.AddArg(x)
   776  		v.AddArg(y)
   777  		return true
   778  	}
   779  }
   780  func rewriteValueMIPS64_OpAndB_0(v *Value) bool {
   781  	// match: (AndB x y)
   782  	// cond:
   783  	// result: (AND x y)
   784  	for {
   785  		_ = v.Args[1]
   786  		x := v.Args[0]
   787  		y := v.Args[1]
   788  		v.reset(OpMIPS64AND)
   789  		v.AddArg(x)
   790  		v.AddArg(y)
   791  		return true
   792  	}
   793  }
   794  func rewriteValueMIPS64_OpAtomicAdd32_0(v *Value) bool {
   795  	// match: (AtomicAdd32 ptr val mem)
   796  	// cond:
   797  	// result: (LoweredAtomicAdd32 ptr val mem)
   798  	for {
   799  		_ = v.Args[2]
   800  		ptr := v.Args[0]
   801  		val := v.Args[1]
   802  		mem := v.Args[2]
   803  		v.reset(OpMIPS64LoweredAtomicAdd32)
   804  		v.AddArg(ptr)
   805  		v.AddArg(val)
   806  		v.AddArg(mem)
   807  		return true
   808  	}
   809  }
   810  func rewriteValueMIPS64_OpAtomicAdd64_0(v *Value) bool {
   811  	// match: (AtomicAdd64 ptr val mem)
   812  	// cond:
   813  	// result: (LoweredAtomicAdd64 ptr val mem)
   814  	for {
   815  		_ = v.Args[2]
   816  		ptr := v.Args[0]
   817  		val := v.Args[1]
   818  		mem := v.Args[2]
   819  		v.reset(OpMIPS64LoweredAtomicAdd64)
   820  		v.AddArg(ptr)
   821  		v.AddArg(val)
   822  		v.AddArg(mem)
   823  		return true
   824  	}
   825  }
   826  func rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v *Value) bool {
   827  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   828  	// cond:
   829  	// result: (LoweredAtomicCas32 ptr old new_ mem)
   830  	for {
   831  		_ = v.Args[3]
   832  		ptr := v.Args[0]
   833  		old := v.Args[1]
   834  		new_ := v.Args[2]
   835  		mem := v.Args[3]
   836  		v.reset(OpMIPS64LoweredAtomicCas32)
   837  		v.AddArg(ptr)
   838  		v.AddArg(old)
   839  		v.AddArg(new_)
   840  		v.AddArg(mem)
   841  		return true
   842  	}
   843  }
   844  func rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v *Value) bool {
   845  	// match: (AtomicCompareAndSwap64 ptr old new_ mem)
   846  	// cond:
   847  	// result: (LoweredAtomicCas64 ptr old new_ mem)
   848  	for {
   849  		_ = v.Args[3]
   850  		ptr := v.Args[0]
   851  		old := v.Args[1]
   852  		new_ := v.Args[2]
   853  		mem := v.Args[3]
   854  		v.reset(OpMIPS64LoweredAtomicCas64)
   855  		v.AddArg(ptr)
   856  		v.AddArg(old)
   857  		v.AddArg(new_)
   858  		v.AddArg(mem)
   859  		return true
   860  	}
   861  }
   862  func rewriteValueMIPS64_OpAtomicExchange32_0(v *Value) bool {
   863  	// match: (AtomicExchange32 ptr val mem)
   864  	// cond:
   865  	// result: (LoweredAtomicExchange32 ptr val mem)
   866  	for {
   867  		_ = v.Args[2]
   868  		ptr := v.Args[0]
   869  		val := v.Args[1]
   870  		mem := v.Args[2]
   871  		v.reset(OpMIPS64LoweredAtomicExchange32)
   872  		v.AddArg(ptr)
   873  		v.AddArg(val)
   874  		v.AddArg(mem)
   875  		return true
   876  	}
   877  }
   878  func rewriteValueMIPS64_OpAtomicExchange64_0(v *Value) bool {
   879  	// match: (AtomicExchange64 ptr val mem)
   880  	// cond:
   881  	// result: (LoweredAtomicExchange64 ptr val mem)
   882  	for {
   883  		_ = v.Args[2]
   884  		ptr := v.Args[0]
   885  		val := v.Args[1]
   886  		mem := v.Args[2]
   887  		v.reset(OpMIPS64LoweredAtomicExchange64)
   888  		v.AddArg(ptr)
   889  		v.AddArg(val)
   890  		v.AddArg(mem)
   891  		return true
   892  	}
   893  }
   894  func rewriteValueMIPS64_OpAtomicLoad32_0(v *Value) bool {
   895  	// match: (AtomicLoad32 ptr mem)
   896  	// cond:
   897  	// result: (LoweredAtomicLoad32 ptr mem)
   898  	for {
   899  		_ = v.Args[1]
   900  		ptr := v.Args[0]
   901  		mem := v.Args[1]
   902  		v.reset(OpMIPS64LoweredAtomicLoad32)
   903  		v.AddArg(ptr)
   904  		v.AddArg(mem)
   905  		return true
   906  	}
   907  }
   908  func rewriteValueMIPS64_OpAtomicLoad64_0(v *Value) bool {
   909  	// match: (AtomicLoad64 ptr mem)
   910  	// cond:
   911  	// result: (LoweredAtomicLoad64 ptr mem)
   912  	for {
   913  		_ = v.Args[1]
   914  		ptr := v.Args[0]
   915  		mem := v.Args[1]
   916  		v.reset(OpMIPS64LoweredAtomicLoad64)
   917  		v.AddArg(ptr)
   918  		v.AddArg(mem)
   919  		return true
   920  	}
   921  }
   922  func rewriteValueMIPS64_OpAtomicLoadPtr_0(v *Value) bool {
   923  	// match: (AtomicLoadPtr ptr mem)
   924  	// cond:
   925  	// result: (LoweredAtomicLoad64 ptr mem)
   926  	for {
   927  		_ = v.Args[1]
   928  		ptr := v.Args[0]
   929  		mem := v.Args[1]
   930  		v.reset(OpMIPS64LoweredAtomicLoad64)
   931  		v.AddArg(ptr)
   932  		v.AddArg(mem)
   933  		return true
   934  	}
   935  }
   936  func rewriteValueMIPS64_OpAtomicStore32_0(v *Value) bool {
   937  	// match: (AtomicStore32 ptr val mem)
   938  	// cond:
   939  	// result: (LoweredAtomicStore32 ptr val mem)
   940  	for {
   941  		_ = v.Args[2]
   942  		ptr := v.Args[0]
   943  		val := v.Args[1]
   944  		mem := v.Args[2]
   945  		v.reset(OpMIPS64LoweredAtomicStore32)
   946  		v.AddArg(ptr)
   947  		v.AddArg(val)
   948  		v.AddArg(mem)
   949  		return true
   950  	}
   951  }
   952  func rewriteValueMIPS64_OpAtomicStore64_0(v *Value) bool {
   953  	// match: (AtomicStore64 ptr val mem)
   954  	// cond:
   955  	// result: (LoweredAtomicStore64 ptr val mem)
   956  	for {
   957  		_ = v.Args[2]
   958  		ptr := v.Args[0]
   959  		val := v.Args[1]
   960  		mem := v.Args[2]
   961  		v.reset(OpMIPS64LoweredAtomicStore64)
   962  		v.AddArg(ptr)
   963  		v.AddArg(val)
   964  		v.AddArg(mem)
   965  		return true
   966  	}
   967  }
   968  func rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v *Value) bool {
   969  	// match: (AtomicStorePtrNoWB ptr val mem)
   970  	// cond:
   971  	// result: (LoweredAtomicStore64 ptr val mem)
   972  	for {
   973  		_ = v.Args[2]
   974  		ptr := v.Args[0]
   975  		val := v.Args[1]
   976  		mem := v.Args[2]
   977  		v.reset(OpMIPS64LoweredAtomicStore64)
   978  		v.AddArg(ptr)
   979  		v.AddArg(val)
   980  		v.AddArg(mem)
   981  		return true
   982  	}
   983  }
   984  func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool {
   985  	b := v.Block
   986  	_ = b
   987  	// match: (Avg64u <t> x y)
   988  	// cond:
   989  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   990  	for {
   991  		t := v.Type
   992  		_ = v.Args[1]
   993  		x := v.Args[0]
   994  		y := v.Args[1]
   995  		v.reset(OpMIPS64ADDV)
   996  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   997  		v0.AuxInt = 1
   998  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   999  		v1.AddArg(x)
  1000  		v1.AddArg(y)
  1001  		v0.AddArg(v1)
  1002  		v.AddArg(v0)
  1003  		v.AddArg(y)
  1004  		return true
  1005  	}
  1006  }
  1007  func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool {
  1008  	// match: (ClosureCall [argwid] entry closure mem)
  1009  	// cond:
  1010  	// result: (CALLclosure [argwid] entry closure mem)
  1011  	for {
  1012  		argwid := v.AuxInt
  1013  		_ = v.Args[2]
  1014  		entry := v.Args[0]
  1015  		closure := v.Args[1]
  1016  		mem := v.Args[2]
  1017  		v.reset(OpMIPS64CALLclosure)
  1018  		v.AuxInt = argwid
  1019  		v.AddArg(entry)
  1020  		v.AddArg(closure)
  1021  		v.AddArg(mem)
  1022  		return true
  1023  	}
  1024  }
  1025  func rewriteValueMIPS64_OpCom16_0(v *Value) bool {
  1026  	b := v.Block
  1027  	_ = b
  1028  	typ := &b.Func.Config.Types
  1029  	_ = typ
  1030  	// match: (Com16 x)
  1031  	// cond:
  1032  	// result: (NOR (MOVVconst [0]) x)
  1033  	for {
  1034  		x := v.Args[0]
  1035  		v.reset(OpMIPS64NOR)
  1036  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1037  		v0.AuxInt = 0
  1038  		v.AddArg(v0)
  1039  		v.AddArg(x)
  1040  		return true
  1041  	}
  1042  }
  1043  func rewriteValueMIPS64_OpCom32_0(v *Value) bool {
  1044  	b := v.Block
  1045  	_ = b
  1046  	typ := &b.Func.Config.Types
  1047  	_ = typ
  1048  	// match: (Com32 x)
  1049  	// cond:
  1050  	// result: (NOR (MOVVconst [0]) x)
  1051  	for {
  1052  		x := v.Args[0]
  1053  		v.reset(OpMIPS64NOR)
  1054  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1055  		v0.AuxInt = 0
  1056  		v.AddArg(v0)
  1057  		v.AddArg(x)
  1058  		return true
  1059  	}
  1060  }
  1061  func rewriteValueMIPS64_OpCom64_0(v *Value) bool {
  1062  	b := v.Block
  1063  	_ = b
  1064  	typ := &b.Func.Config.Types
  1065  	_ = typ
  1066  	// match: (Com64 x)
  1067  	// cond:
  1068  	// result: (NOR (MOVVconst [0]) x)
  1069  	for {
  1070  		x := v.Args[0]
  1071  		v.reset(OpMIPS64NOR)
  1072  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1073  		v0.AuxInt = 0
  1074  		v.AddArg(v0)
  1075  		v.AddArg(x)
  1076  		return true
  1077  	}
  1078  }
  1079  func rewriteValueMIPS64_OpCom8_0(v *Value) bool {
  1080  	b := v.Block
  1081  	_ = b
  1082  	typ := &b.Func.Config.Types
  1083  	_ = typ
  1084  	// match: (Com8 x)
  1085  	// cond:
  1086  	// result: (NOR (MOVVconst [0]) x)
  1087  	for {
  1088  		x := v.Args[0]
  1089  		v.reset(OpMIPS64NOR)
  1090  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1091  		v0.AuxInt = 0
  1092  		v.AddArg(v0)
  1093  		v.AddArg(x)
  1094  		return true
  1095  	}
  1096  }
  1097  func rewriteValueMIPS64_OpConst16_0(v *Value) bool {
  1098  	// match: (Const16 [val])
  1099  	// cond:
  1100  	// result: (MOVVconst [val])
  1101  	for {
  1102  		val := v.AuxInt
  1103  		v.reset(OpMIPS64MOVVconst)
  1104  		v.AuxInt = val
  1105  		return true
  1106  	}
  1107  }
  1108  func rewriteValueMIPS64_OpConst32_0(v *Value) bool {
  1109  	// match: (Const32 [val])
  1110  	// cond:
  1111  	// result: (MOVVconst [val])
  1112  	for {
  1113  		val := v.AuxInt
  1114  		v.reset(OpMIPS64MOVVconst)
  1115  		v.AuxInt = val
  1116  		return true
  1117  	}
  1118  }
  1119  func rewriteValueMIPS64_OpConst32F_0(v *Value) bool {
  1120  	// match: (Const32F [val])
  1121  	// cond:
  1122  	// result: (MOVFconst [val])
  1123  	for {
  1124  		val := v.AuxInt
  1125  		v.reset(OpMIPS64MOVFconst)
  1126  		v.AuxInt = val
  1127  		return true
  1128  	}
  1129  }
  1130  func rewriteValueMIPS64_OpConst64_0(v *Value) bool {
  1131  	// match: (Const64 [val])
  1132  	// cond:
  1133  	// result: (MOVVconst [val])
  1134  	for {
  1135  		val := v.AuxInt
  1136  		v.reset(OpMIPS64MOVVconst)
  1137  		v.AuxInt = val
  1138  		return true
  1139  	}
  1140  }
  1141  func rewriteValueMIPS64_OpConst64F_0(v *Value) bool {
  1142  	// match: (Const64F [val])
  1143  	// cond:
  1144  	// result: (MOVDconst [val])
  1145  	for {
  1146  		val := v.AuxInt
  1147  		v.reset(OpMIPS64MOVDconst)
  1148  		v.AuxInt = val
  1149  		return true
  1150  	}
  1151  }
  1152  func rewriteValueMIPS64_OpConst8_0(v *Value) bool {
  1153  	// match: (Const8 [val])
  1154  	// cond:
  1155  	// result: (MOVVconst [val])
  1156  	for {
  1157  		val := v.AuxInt
  1158  		v.reset(OpMIPS64MOVVconst)
  1159  		v.AuxInt = val
  1160  		return true
  1161  	}
  1162  }
  1163  func rewriteValueMIPS64_OpConstBool_0(v *Value) bool {
  1164  	// match: (ConstBool [b])
  1165  	// cond:
  1166  	// result: (MOVVconst [b])
  1167  	for {
  1168  		b := v.AuxInt
  1169  		v.reset(OpMIPS64MOVVconst)
  1170  		v.AuxInt = b
  1171  		return true
  1172  	}
  1173  }
  1174  func rewriteValueMIPS64_OpConstNil_0(v *Value) bool {
  1175  	// match: (ConstNil)
  1176  	// cond:
  1177  	// result: (MOVVconst [0])
  1178  	for {
  1179  		v.reset(OpMIPS64MOVVconst)
  1180  		v.AuxInt = 0
  1181  		return true
  1182  	}
  1183  }
  1184  func rewriteValueMIPS64_OpConvert_0(v *Value) bool {
  1185  	// match: (Convert x mem)
  1186  	// cond:
  1187  	// result: (MOVVconvert x mem)
  1188  	for {
  1189  		_ = v.Args[1]
  1190  		x := v.Args[0]
  1191  		mem := v.Args[1]
  1192  		v.reset(OpMIPS64MOVVconvert)
  1193  		v.AddArg(x)
  1194  		v.AddArg(mem)
  1195  		return true
  1196  	}
  1197  }
  1198  func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool {
  1199  	// match: (Cvt32Fto32 x)
  1200  	// cond:
  1201  	// result: (TRUNCFW x)
  1202  	for {
  1203  		x := v.Args[0]
  1204  		v.reset(OpMIPS64TRUNCFW)
  1205  		v.AddArg(x)
  1206  		return true
  1207  	}
  1208  }
  1209  func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool {
  1210  	// match: (Cvt32Fto64 x)
  1211  	// cond:
  1212  	// result: (TRUNCFV x)
  1213  	for {
  1214  		x := v.Args[0]
  1215  		v.reset(OpMIPS64TRUNCFV)
  1216  		v.AddArg(x)
  1217  		return true
  1218  	}
  1219  }
  1220  func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool {
  1221  	// match: (Cvt32Fto64F x)
  1222  	// cond:
  1223  	// result: (MOVFD x)
  1224  	for {
  1225  		x := v.Args[0]
  1226  		v.reset(OpMIPS64MOVFD)
  1227  		v.AddArg(x)
  1228  		return true
  1229  	}
  1230  }
  1231  func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool {
  1232  	// match: (Cvt32to32F x)
  1233  	// cond:
  1234  	// result: (MOVWF x)
  1235  	for {
  1236  		x := v.Args[0]
  1237  		v.reset(OpMIPS64MOVWF)
  1238  		v.AddArg(x)
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool {
  1243  	// match: (Cvt32to64F x)
  1244  	// cond:
  1245  	// result: (MOVWD x)
  1246  	for {
  1247  		x := v.Args[0]
  1248  		v.reset(OpMIPS64MOVWD)
  1249  		v.AddArg(x)
  1250  		return true
  1251  	}
  1252  }
  1253  func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool {
  1254  	// match: (Cvt64Fto32 x)
  1255  	// cond:
  1256  	// result: (TRUNCDW x)
  1257  	for {
  1258  		x := v.Args[0]
  1259  		v.reset(OpMIPS64TRUNCDW)
  1260  		v.AddArg(x)
  1261  		return true
  1262  	}
  1263  }
  1264  func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool {
  1265  	// match: (Cvt64Fto32F x)
  1266  	// cond:
  1267  	// result: (MOVDF x)
  1268  	for {
  1269  		x := v.Args[0]
  1270  		v.reset(OpMIPS64MOVDF)
  1271  		v.AddArg(x)
  1272  		return true
  1273  	}
  1274  }
  1275  func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool {
  1276  	// match: (Cvt64Fto64 x)
  1277  	// cond:
  1278  	// result: (TRUNCDV x)
  1279  	for {
  1280  		x := v.Args[0]
  1281  		v.reset(OpMIPS64TRUNCDV)
  1282  		v.AddArg(x)
  1283  		return true
  1284  	}
  1285  }
  1286  func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool {
  1287  	// match: (Cvt64to32F x)
  1288  	// cond:
  1289  	// result: (MOVVF x)
  1290  	for {
  1291  		x := v.Args[0]
  1292  		v.reset(OpMIPS64MOVVF)
  1293  		v.AddArg(x)
  1294  		return true
  1295  	}
  1296  }
  1297  func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool {
  1298  	// match: (Cvt64to64F x)
  1299  	// cond:
  1300  	// result: (MOVVD x)
  1301  	for {
  1302  		x := v.Args[0]
  1303  		v.reset(OpMIPS64MOVVD)
  1304  		v.AddArg(x)
  1305  		return true
  1306  	}
  1307  }
  1308  func rewriteValueMIPS64_OpDiv16_0(v *Value) bool {
  1309  	b := v.Block
  1310  	_ = b
  1311  	typ := &b.Func.Config.Types
  1312  	_ = typ
  1313  	// match: (Div16 x y)
  1314  	// cond:
  1315  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1316  	for {
  1317  		_ = v.Args[1]
  1318  		x := v.Args[0]
  1319  		y := v.Args[1]
  1320  		v.reset(OpSelect1)
  1321  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1322  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1323  		v1.AddArg(x)
  1324  		v0.AddArg(v1)
  1325  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1326  		v2.AddArg(y)
  1327  		v0.AddArg(v2)
  1328  		v.AddArg(v0)
  1329  		return true
  1330  	}
  1331  }
  1332  func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool {
  1333  	b := v.Block
  1334  	_ = b
  1335  	typ := &b.Func.Config.Types
  1336  	_ = typ
  1337  	// match: (Div16u x y)
  1338  	// cond:
  1339  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1340  	for {
  1341  		_ = v.Args[1]
  1342  		x := v.Args[0]
  1343  		y := v.Args[1]
  1344  		v.reset(OpSelect1)
  1345  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1346  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1347  		v1.AddArg(x)
  1348  		v0.AddArg(v1)
  1349  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1350  		v2.AddArg(y)
  1351  		v0.AddArg(v2)
  1352  		v.AddArg(v0)
  1353  		return true
  1354  	}
  1355  }
  1356  func rewriteValueMIPS64_OpDiv32_0(v *Value) bool {
  1357  	b := v.Block
  1358  	_ = b
  1359  	typ := &b.Func.Config.Types
  1360  	_ = typ
  1361  	// match: (Div32 x y)
  1362  	// cond:
  1363  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1364  	for {
  1365  		_ = v.Args[1]
  1366  		x := v.Args[0]
  1367  		y := v.Args[1]
  1368  		v.reset(OpSelect1)
  1369  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1370  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1371  		v1.AddArg(x)
  1372  		v0.AddArg(v1)
  1373  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1374  		v2.AddArg(y)
  1375  		v0.AddArg(v2)
  1376  		v.AddArg(v0)
  1377  		return true
  1378  	}
  1379  }
  1380  func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool {
  1381  	// match: (Div32F x y)
  1382  	// cond:
  1383  	// result: (DIVF x y)
  1384  	for {
  1385  		_ = v.Args[1]
  1386  		x := v.Args[0]
  1387  		y := v.Args[1]
  1388  		v.reset(OpMIPS64DIVF)
  1389  		v.AddArg(x)
  1390  		v.AddArg(y)
  1391  		return true
  1392  	}
  1393  }
  1394  func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool {
  1395  	b := v.Block
  1396  	_ = b
  1397  	typ := &b.Func.Config.Types
  1398  	_ = typ
  1399  	// match: (Div32u x y)
  1400  	// cond:
  1401  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1402  	for {
  1403  		_ = v.Args[1]
  1404  		x := v.Args[0]
  1405  		y := v.Args[1]
  1406  		v.reset(OpSelect1)
  1407  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1408  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1409  		v1.AddArg(x)
  1410  		v0.AddArg(v1)
  1411  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1412  		v2.AddArg(y)
  1413  		v0.AddArg(v2)
  1414  		v.AddArg(v0)
  1415  		return true
  1416  	}
  1417  }
  1418  func rewriteValueMIPS64_OpDiv64_0(v *Value) bool {
  1419  	b := v.Block
  1420  	_ = b
  1421  	typ := &b.Func.Config.Types
  1422  	_ = typ
  1423  	// match: (Div64 x y)
  1424  	// cond:
  1425  	// result: (Select1 (DIVV x y))
  1426  	for {
  1427  		_ = v.Args[1]
  1428  		x := v.Args[0]
  1429  		y := v.Args[1]
  1430  		v.reset(OpSelect1)
  1431  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1432  		v0.AddArg(x)
  1433  		v0.AddArg(y)
  1434  		v.AddArg(v0)
  1435  		return true
  1436  	}
  1437  }
  1438  func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool {
  1439  	// match: (Div64F x y)
  1440  	// cond:
  1441  	// result: (DIVD x y)
  1442  	for {
  1443  		_ = v.Args[1]
  1444  		x := v.Args[0]
  1445  		y := v.Args[1]
  1446  		v.reset(OpMIPS64DIVD)
  1447  		v.AddArg(x)
  1448  		v.AddArg(y)
  1449  		return true
  1450  	}
  1451  }
  1452  func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool {
  1453  	b := v.Block
  1454  	_ = b
  1455  	typ := &b.Func.Config.Types
  1456  	_ = typ
  1457  	// match: (Div64u x y)
  1458  	// cond:
  1459  	// result: (Select1 (DIVVU x y))
  1460  	for {
  1461  		_ = v.Args[1]
  1462  		x := v.Args[0]
  1463  		y := v.Args[1]
  1464  		v.reset(OpSelect1)
  1465  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1466  		v0.AddArg(x)
  1467  		v0.AddArg(y)
  1468  		v.AddArg(v0)
  1469  		return true
  1470  	}
  1471  }
  1472  func rewriteValueMIPS64_OpDiv8_0(v *Value) bool {
  1473  	b := v.Block
  1474  	_ = b
  1475  	typ := &b.Func.Config.Types
  1476  	_ = typ
  1477  	// match: (Div8 x y)
  1478  	// cond:
  1479  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1480  	for {
  1481  		_ = v.Args[1]
  1482  		x := v.Args[0]
  1483  		y := v.Args[1]
  1484  		v.reset(OpSelect1)
  1485  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1486  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1487  		v1.AddArg(x)
  1488  		v0.AddArg(v1)
  1489  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1490  		v2.AddArg(y)
  1491  		v0.AddArg(v2)
  1492  		v.AddArg(v0)
  1493  		return true
  1494  	}
  1495  }
  1496  func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool {
  1497  	b := v.Block
  1498  	_ = b
  1499  	typ := &b.Func.Config.Types
  1500  	_ = typ
  1501  	// match: (Div8u x y)
  1502  	// cond:
  1503  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1504  	for {
  1505  		_ = v.Args[1]
  1506  		x := v.Args[0]
  1507  		y := v.Args[1]
  1508  		v.reset(OpSelect1)
  1509  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1510  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1511  		v1.AddArg(x)
  1512  		v0.AddArg(v1)
  1513  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1514  		v2.AddArg(y)
  1515  		v0.AddArg(v2)
  1516  		v.AddArg(v0)
  1517  		return true
  1518  	}
  1519  }
  1520  func rewriteValueMIPS64_OpEq16_0(v *Value) bool {
  1521  	b := v.Block
  1522  	_ = b
  1523  	typ := &b.Func.Config.Types
  1524  	_ = typ
  1525  	// match: (Eq16 x y)
  1526  	// cond:
  1527  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1528  	for {
  1529  		_ = v.Args[1]
  1530  		x := v.Args[0]
  1531  		y := v.Args[1]
  1532  		v.reset(OpMIPS64SGTU)
  1533  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1534  		v0.AuxInt = 1
  1535  		v.AddArg(v0)
  1536  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1537  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1538  		v2.AddArg(x)
  1539  		v1.AddArg(v2)
  1540  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1541  		v3.AddArg(y)
  1542  		v1.AddArg(v3)
  1543  		v.AddArg(v1)
  1544  		return true
  1545  	}
  1546  }
  1547  func rewriteValueMIPS64_OpEq32_0(v *Value) bool {
  1548  	b := v.Block
  1549  	_ = b
  1550  	typ := &b.Func.Config.Types
  1551  	_ = typ
  1552  	// match: (Eq32 x y)
  1553  	// cond:
  1554  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1555  	for {
  1556  		_ = v.Args[1]
  1557  		x := v.Args[0]
  1558  		y := v.Args[1]
  1559  		v.reset(OpMIPS64SGTU)
  1560  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1561  		v0.AuxInt = 1
  1562  		v.AddArg(v0)
  1563  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1564  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1565  		v2.AddArg(x)
  1566  		v1.AddArg(v2)
  1567  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1568  		v3.AddArg(y)
  1569  		v1.AddArg(v3)
  1570  		v.AddArg(v1)
  1571  		return true
  1572  	}
  1573  }
  1574  func rewriteValueMIPS64_OpEq32F_0(v *Value) bool {
  1575  	b := v.Block
  1576  	_ = b
  1577  	// match: (Eq32F x y)
  1578  	// cond:
  1579  	// result: (FPFlagTrue (CMPEQF x y))
  1580  	for {
  1581  		_ = v.Args[1]
  1582  		x := v.Args[0]
  1583  		y := v.Args[1]
  1584  		v.reset(OpMIPS64FPFlagTrue)
  1585  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1586  		v0.AddArg(x)
  1587  		v0.AddArg(y)
  1588  		v.AddArg(v0)
  1589  		return true
  1590  	}
  1591  }
  1592  func rewriteValueMIPS64_OpEq64_0(v *Value) bool {
  1593  	b := v.Block
  1594  	_ = b
  1595  	typ := &b.Func.Config.Types
  1596  	_ = typ
  1597  	// match: (Eq64 x y)
  1598  	// cond:
  1599  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1600  	for {
  1601  		_ = v.Args[1]
  1602  		x := v.Args[0]
  1603  		y := v.Args[1]
  1604  		v.reset(OpMIPS64SGTU)
  1605  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1606  		v0.AuxInt = 1
  1607  		v.AddArg(v0)
  1608  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1609  		v1.AddArg(x)
  1610  		v1.AddArg(y)
  1611  		v.AddArg(v1)
  1612  		return true
  1613  	}
  1614  }
  1615  func rewriteValueMIPS64_OpEq64F_0(v *Value) bool {
  1616  	b := v.Block
  1617  	_ = b
  1618  	// match: (Eq64F x y)
  1619  	// cond:
  1620  	// result: (FPFlagTrue (CMPEQD x y))
  1621  	for {
  1622  		_ = v.Args[1]
  1623  		x := v.Args[0]
  1624  		y := v.Args[1]
  1625  		v.reset(OpMIPS64FPFlagTrue)
  1626  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1627  		v0.AddArg(x)
  1628  		v0.AddArg(y)
  1629  		v.AddArg(v0)
  1630  		return true
  1631  	}
  1632  }
  1633  func rewriteValueMIPS64_OpEq8_0(v *Value) bool {
  1634  	b := v.Block
  1635  	_ = b
  1636  	typ := &b.Func.Config.Types
  1637  	_ = typ
  1638  	// match: (Eq8 x y)
  1639  	// cond:
  1640  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1641  	for {
  1642  		_ = v.Args[1]
  1643  		x := v.Args[0]
  1644  		y := v.Args[1]
  1645  		v.reset(OpMIPS64SGTU)
  1646  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1647  		v0.AuxInt = 1
  1648  		v.AddArg(v0)
  1649  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1650  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1651  		v2.AddArg(x)
  1652  		v1.AddArg(v2)
  1653  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1654  		v3.AddArg(y)
  1655  		v1.AddArg(v3)
  1656  		v.AddArg(v1)
  1657  		return true
  1658  	}
  1659  }
  1660  func rewriteValueMIPS64_OpEqB_0(v *Value) bool {
  1661  	b := v.Block
  1662  	_ = b
  1663  	typ := &b.Func.Config.Types
  1664  	_ = typ
  1665  	// match: (EqB x y)
  1666  	// cond:
  1667  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1668  	for {
  1669  		_ = v.Args[1]
  1670  		x := v.Args[0]
  1671  		y := v.Args[1]
  1672  		v.reset(OpMIPS64XOR)
  1673  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1674  		v0.AuxInt = 1
  1675  		v.AddArg(v0)
  1676  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1677  		v1.AddArg(x)
  1678  		v1.AddArg(y)
  1679  		v.AddArg(v1)
  1680  		return true
  1681  	}
  1682  }
  1683  func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool {
  1684  	b := v.Block
  1685  	_ = b
  1686  	typ := &b.Func.Config.Types
  1687  	_ = typ
  1688  	// match: (EqPtr x y)
  1689  	// cond:
  1690  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1691  	for {
  1692  		_ = v.Args[1]
  1693  		x := v.Args[0]
  1694  		y := v.Args[1]
  1695  		v.reset(OpMIPS64SGTU)
  1696  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1697  		v0.AuxInt = 1
  1698  		v.AddArg(v0)
  1699  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1700  		v1.AddArg(x)
  1701  		v1.AddArg(y)
  1702  		v.AddArg(v1)
  1703  		return true
  1704  	}
  1705  }
  1706  func rewriteValueMIPS64_OpGeq16_0(v *Value) bool {
  1707  	b := v.Block
  1708  	_ = b
  1709  	typ := &b.Func.Config.Types
  1710  	_ = typ
  1711  	// match: (Geq16 x y)
  1712  	// cond:
  1713  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1714  	for {
  1715  		_ = v.Args[1]
  1716  		x := v.Args[0]
  1717  		y := v.Args[1]
  1718  		v.reset(OpMIPS64XOR)
  1719  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1720  		v0.AuxInt = 1
  1721  		v.AddArg(v0)
  1722  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1723  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1724  		v2.AddArg(y)
  1725  		v1.AddArg(v2)
  1726  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1727  		v3.AddArg(x)
  1728  		v1.AddArg(v3)
  1729  		v.AddArg(v1)
  1730  		return true
  1731  	}
  1732  }
  1733  func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool {
  1734  	b := v.Block
  1735  	_ = b
  1736  	typ := &b.Func.Config.Types
  1737  	_ = typ
  1738  	// match: (Geq16U x y)
  1739  	// cond:
  1740  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1741  	for {
  1742  		_ = v.Args[1]
  1743  		x := v.Args[0]
  1744  		y := v.Args[1]
  1745  		v.reset(OpMIPS64XOR)
  1746  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1747  		v0.AuxInt = 1
  1748  		v.AddArg(v0)
  1749  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1750  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1751  		v2.AddArg(y)
  1752  		v1.AddArg(v2)
  1753  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1754  		v3.AddArg(x)
  1755  		v1.AddArg(v3)
  1756  		v.AddArg(v1)
  1757  		return true
  1758  	}
  1759  }
  1760  func rewriteValueMIPS64_OpGeq32_0(v *Value) bool {
  1761  	b := v.Block
  1762  	_ = b
  1763  	typ := &b.Func.Config.Types
  1764  	_ = typ
  1765  	// match: (Geq32 x y)
  1766  	// cond:
  1767  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1768  	for {
  1769  		_ = v.Args[1]
  1770  		x := v.Args[0]
  1771  		y := v.Args[1]
  1772  		v.reset(OpMIPS64XOR)
  1773  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1774  		v0.AuxInt = 1
  1775  		v.AddArg(v0)
  1776  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1777  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1778  		v2.AddArg(y)
  1779  		v1.AddArg(v2)
  1780  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1781  		v3.AddArg(x)
  1782  		v1.AddArg(v3)
  1783  		v.AddArg(v1)
  1784  		return true
  1785  	}
  1786  }
  1787  func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool {
  1788  	b := v.Block
  1789  	_ = b
  1790  	// match: (Geq32F x y)
  1791  	// cond:
  1792  	// result: (FPFlagTrue (CMPGEF x y))
  1793  	for {
  1794  		_ = v.Args[1]
  1795  		x := v.Args[0]
  1796  		y := v.Args[1]
  1797  		v.reset(OpMIPS64FPFlagTrue)
  1798  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1799  		v0.AddArg(x)
  1800  		v0.AddArg(y)
  1801  		v.AddArg(v0)
  1802  		return true
  1803  	}
  1804  }
  1805  func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool {
  1806  	b := v.Block
  1807  	_ = b
  1808  	typ := &b.Func.Config.Types
  1809  	_ = typ
  1810  	// match: (Geq32U x y)
  1811  	// cond:
  1812  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1813  	for {
  1814  		_ = v.Args[1]
  1815  		x := v.Args[0]
  1816  		y := v.Args[1]
  1817  		v.reset(OpMIPS64XOR)
  1818  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1819  		v0.AuxInt = 1
  1820  		v.AddArg(v0)
  1821  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1822  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1823  		v2.AddArg(y)
  1824  		v1.AddArg(v2)
  1825  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1826  		v3.AddArg(x)
  1827  		v1.AddArg(v3)
  1828  		v.AddArg(v1)
  1829  		return true
  1830  	}
  1831  }
  1832  func rewriteValueMIPS64_OpGeq64_0(v *Value) bool {
  1833  	b := v.Block
  1834  	_ = b
  1835  	typ := &b.Func.Config.Types
  1836  	_ = typ
  1837  	// match: (Geq64 x y)
  1838  	// cond:
  1839  	// result: (XOR (MOVVconst [1]) (SGT y x))
  1840  	for {
  1841  		_ = v.Args[1]
  1842  		x := v.Args[0]
  1843  		y := v.Args[1]
  1844  		v.reset(OpMIPS64XOR)
  1845  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1846  		v0.AuxInt = 1
  1847  		v.AddArg(v0)
  1848  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1849  		v1.AddArg(y)
  1850  		v1.AddArg(x)
  1851  		v.AddArg(v1)
  1852  		return true
  1853  	}
  1854  }
  1855  func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool {
  1856  	b := v.Block
  1857  	_ = b
  1858  	// match: (Geq64F x y)
  1859  	// cond:
  1860  	// result: (FPFlagTrue (CMPGED x y))
  1861  	for {
  1862  		_ = v.Args[1]
  1863  		x := v.Args[0]
  1864  		y := v.Args[1]
  1865  		v.reset(OpMIPS64FPFlagTrue)
  1866  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1867  		v0.AddArg(x)
  1868  		v0.AddArg(y)
  1869  		v.AddArg(v0)
  1870  		return true
  1871  	}
  1872  }
  1873  func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool {
  1874  	b := v.Block
  1875  	_ = b
  1876  	typ := &b.Func.Config.Types
  1877  	_ = typ
  1878  	// match: (Geq64U x y)
  1879  	// cond:
  1880  	// result: (XOR (MOVVconst [1]) (SGTU y x))
  1881  	for {
  1882  		_ = v.Args[1]
  1883  		x := v.Args[0]
  1884  		y := v.Args[1]
  1885  		v.reset(OpMIPS64XOR)
  1886  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1887  		v0.AuxInt = 1
  1888  		v.AddArg(v0)
  1889  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1890  		v1.AddArg(y)
  1891  		v1.AddArg(x)
  1892  		v.AddArg(v1)
  1893  		return true
  1894  	}
  1895  }
  1896  func rewriteValueMIPS64_OpGeq8_0(v *Value) bool {
  1897  	b := v.Block
  1898  	_ = b
  1899  	typ := &b.Func.Config.Types
  1900  	_ = typ
  1901  	// match: (Geq8 x y)
  1902  	// cond:
  1903  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1904  	for {
  1905  		_ = v.Args[1]
  1906  		x := v.Args[0]
  1907  		y := v.Args[1]
  1908  		v.reset(OpMIPS64XOR)
  1909  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1910  		v0.AuxInt = 1
  1911  		v.AddArg(v0)
  1912  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1913  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1914  		v2.AddArg(y)
  1915  		v1.AddArg(v2)
  1916  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1917  		v3.AddArg(x)
  1918  		v1.AddArg(v3)
  1919  		v.AddArg(v1)
  1920  		return true
  1921  	}
  1922  }
  1923  func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool {
  1924  	b := v.Block
  1925  	_ = b
  1926  	typ := &b.Func.Config.Types
  1927  	_ = typ
  1928  	// match: (Geq8U x y)
  1929  	// cond:
  1930  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1931  	for {
  1932  		_ = v.Args[1]
  1933  		x := v.Args[0]
  1934  		y := v.Args[1]
  1935  		v.reset(OpMIPS64XOR)
  1936  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1937  		v0.AuxInt = 1
  1938  		v.AddArg(v0)
  1939  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1940  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1941  		v2.AddArg(y)
  1942  		v1.AddArg(v2)
  1943  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1944  		v3.AddArg(x)
  1945  		v1.AddArg(v3)
  1946  		v.AddArg(v1)
  1947  		return true
  1948  	}
  1949  }
  1950  func rewriteValueMIPS64_OpGetCallerSP_0(v *Value) bool {
  1951  	// match: (GetCallerSP)
  1952  	// cond:
  1953  	// result: (LoweredGetCallerSP)
  1954  	for {
  1955  		v.reset(OpMIPS64LoweredGetCallerSP)
  1956  		return true
  1957  	}
  1958  }
  1959  func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool {
  1960  	// match: (GetClosurePtr)
  1961  	// cond:
  1962  	// result: (LoweredGetClosurePtr)
  1963  	for {
  1964  		v.reset(OpMIPS64LoweredGetClosurePtr)
  1965  		return true
  1966  	}
  1967  }
  1968  func rewriteValueMIPS64_OpGreater16_0(v *Value) bool {
  1969  	b := v.Block
  1970  	_ = b
  1971  	typ := &b.Func.Config.Types
  1972  	_ = typ
  1973  	// match: (Greater16 x y)
  1974  	// cond:
  1975  	// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1976  	for {
  1977  		_ = v.Args[1]
  1978  		x := v.Args[0]
  1979  		y := v.Args[1]
  1980  		v.reset(OpMIPS64SGT)
  1981  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1982  		v0.AddArg(x)
  1983  		v.AddArg(v0)
  1984  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1985  		v1.AddArg(y)
  1986  		v.AddArg(v1)
  1987  		return true
  1988  	}
  1989  }
  1990  func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool {
  1991  	b := v.Block
  1992  	_ = b
  1993  	typ := &b.Func.Config.Types
  1994  	_ = typ
  1995  	// match: (Greater16U x y)
  1996  	// cond:
  1997  	// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1998  	for {
  1999  		_ = v.Args[1]
  2000  		x := v.Args[0]
  2001  		y := v.Args[1]
  2002  		v.reset(OpMIPS64SGTU)
  2003  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2004  		v0.AddArg(x)
  2005  		v.AddArg(v0)
  2006  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2007  		v1.AddArg(y)
  2008  		v.AddArg(v1)
  2009  		return true
  2010  	}
  2011  }
  2012  func rewriteValueMIPS64_OpGreater32_0(v *Value) bool {
  2013  	b := v.Block
  2014  	_ = b
  2015  	typ := &b.Func.Config.Types
  2016  	_ = typ
  2017  	// match: (Greater32 x y)
  2018  	// cond:
  2019  	// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  2020  	for {
  2021  		_ = v.Args[1]
  2022  		x := v.Args[0]
  2023  		y := v.Args[1]
  2024  		v.reset(OpMIPS64SGT)
  2025  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2026  		v0.AddArg(x)
  2027  		v.AddArg(v0)
  2028  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2029  		v1.AddArg(y)
  2030  		v.AddArg(v1)
  2031  		return true
  2032  	}
  2033  }
  2034  func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool {
  2035  	b := v.Block
  2036  	_ = b
  2037  	// match: (Greater32F x y)
  2038  	// cond:
  2039  	// result: (FPFlagTrue (CMPGTF x y))
  2040  	for {
  2041  		_ = v.Args[1]
  2042  		x := v.Args[0]
  2043  		y := v.Args[1]
  2044  		v.reset(OpMIPS64FPFlagTrue)
  2045  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  2046  		v0.AddArg(x)
  2047  		v0.AddArg(y)
  2048  		v.AddArg(v0)
  2049  		return true
  2050  	}
  2051  }
  2052  func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool {
  2053  	b := v.Block
  2054  	_ = b
  2055  	typ := &b.Func.Config.Types
  2056  	_ = typ
  2057  	// match: (Greater32U x y)
  2058  	// cond:
  2059  	// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  2060  	for {
  2061  		_ = v.Args[1]
  2062  		x := v.Args[0]
  2063  		y := v.Args[1]
  2064  		v.reset(OpMIPS64SGTU)
  2065  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2066  		v0.AddArg(x)
  2067  		v.AddArg(v0)
  2068  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2069  		v1.AddArg(y)
  2070  		v.AddArg(v1)
  2071  		return true
  2072  	}
  2073  }
  2074  func rewriteValueMIPS64_OpGreater64_0(v *Value) bool {
  2075  	// match: (Greater64 x y)
  2076  	// cond:
  2077  	// result: (SGT x y)
  2078  	for {
  2079  		_ = v.Args[1]
  2080  		x := v.Args[0]
  2081  		y := v.Args[1]
  2082  		v.reset(OpMIPS64SGT)
  2083  		v.AddArg(x)
  2084  		v.AddArg(y)
  2085  		return true
  2086  	}
  2087  }
  2088  func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool {
  2089  	b := v.Block
  2090  	_ = b
  2091  	// match: (Greater64F x y)
  2092  	// cond:
  2093  	// result: (FPFlagTrue (CMPGTD x y))
  2094  	for {
  2095  		_ = v.Args[1]
  2096  		x := v.Args[0]
  2097  		y := v.Args[1]
  2098  		v.reset(OpMIPS64FPFlagTrue)
  2099  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  2100  		v0.AddArg(x)
  2101  		v0.AddArg(y)
  2102  		v.AddArg(v0)
  2103  		return true
  2104  	}
  2105  }
  2106  func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool {
  2107  	// match: (Greater64U x y)
  2108  	// cond:
  2109  	// result: (SGTU x y)
  2110  	for {
  2111  		_ = v.Args[1]
  2112  		x := v.Args[0]
  2113  		y := v.Args[1]
  2114  		v.reset(OpMIPS64SGTU)
  2115  		v.AddArg(x)
  2116  		v.AddArg(y)
  2117  		return true
  2118  	}
  2119  }
  2120  func rewriteValueMIPS64_OpGreater8_0(v *Value) bool {
  2121  	b := v.Block
  2122  	_ = b
  2123  	typ := &b.Func.Config.Types
  2124  	_ = typ
  2125  	// match: (Greater8 x y)
  2126  	// cond:
  2127  	// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  2128  	for {
  2129  		_ = v.Args[1]
  2130  		x := v.Args[0]
  2131  		y := v.Args[1]
  2132  		v.reset(OpMIPS64SGT)
  2133  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2134  		v0.AddArg(x)
  2135  		v.AddArg(v0)
  2136  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2137  		v1.AddArg(y)
  2138  		v.AddArg(v1)
  2139  		return true
  2140  	}
  2141  }
  2142  func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool {
  2143  	b := v.Block
  2144  	_ = b
  2145  	typ := &b.Func.Config.Types
  2146  	_ = typ
  2147  	// match: (Greater8U x y)
  2148  	// cond:
  2149  	// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  2150  	for {
  2151  		_ = v.Args[1]
  2152  		x := v.Args[0]
  2153  		y := v.Args[1]
  2154  		v.reset(OpMIPS64SGTU)
  2155  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2156  		v0.AddArg(x)
  2157  		v.AddArg(v0)
  2158  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2159  		v1.AddArg(y)
  2160  		v.AddArg(v1)
  2161  		return true
  2162  	}
  2163  }
  2164  func rewriteValueMIPS64_OpHmul32_0(v *Value) bool {
  2165  	b := v.Block
  2166  	_ = b
  2167  	typ := &b.Func.Config.Types
  2168  	_ = typ
  2169  	// match: (Hmul32 x y)
  2170  	// cond:
  2171  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  2172  	for {
  2173  		_ = v.Args[1]
  2174  		x := v.Args[0]
  2175  		y := v.Args[1]
  2176  		v.reset(OpMIPS64SRAVconst)
  2177  		v.AuxInt = 32
  2178  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  2179  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2180  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2181  		v2.AddArg(x)
  2182  		v1.AddArg(v2)
  2183  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2184  		v3.AddArg(y)
  2185  		v1.AddArg(v3)
  2186  		v0.AddArg(v1)
  2187  		v.AddArg(v0)
  2188  		return true
  2189  	}
  2190  }
  2191  func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool {
  2192  	b := v.Block
  2193  	_ = b
  2194  	typ := &b.Func.Config.Types
  2195  	_ = typ
  2196  	// match: (Hmul32u x y)
  2197  	// cond:
  2198  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  2199  	for {
  2200  		_ = v.Args[1]
  2201  		x := v.Args[0]
  2202  		y := v.Args[1]
  2203  		v.reset(OpMIPS64SRLVconst)
  2204  		v.AuxInt = 32
  2205  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  2206  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2207  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2208  		v2.AddArg(x)
  2209  		v1.AddArg(v2)
  2210  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2211  		v3.AddArg(y)
  2212  		v1.AddArg(v3)
  2213  		v0.AddArg(v1)
  2214  		v.AddArg(v0)
  2215  		return true
  2216  	}
  2217  }
  2218  func rewriteValueMIPS64_OpHmul64_0(v *Value) bool {
  2219  	b := v.Block
  2220  	_ = b
  2221  	typ := &b.Func.Config.Types
  2222  	_ = typ
  2223  	// match: (Hmul64 x y)
  2224  	// cond:
  2225  	// result: (Select0 (MULV x y))
  2226  	for {
  2227  		_ = v.Args[1]
  2228  		x := v.Args[0]
  2229  		y := v.Args[1]
  2230  		v.reset(OpSelect0)
  2231  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2232  		v0.AddArg(x)
  2233  		v0.AddArg(y)
  2234  		v.AddArg(v0)
  2235  		return true
  2236  	}
  2237  }
  2238  func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool {
  2239  	b := v.Block
  2240  	_ = b
  2241  	typ := &b.Func.Config.Types
  2242  	_ = typ
  2243  	// match: (Hmul64u x y)
  2244  	// cond:
  2245  	// result: (Select0 (MULVU x y))
  2246  	for {
  2247  		_ = v.Args[1]
  2248  		x := v.Args[0]
  2249  		y := v.Args[1]
  2250  		v.reset(OpSelect0)
  2251  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2252  		v0.AddArg(x)
  2253  		v0.AddArg(y)
  2254  		v.AddArg(v0)
  2255  		return true
  2256  	}
  2257  }
  2258  func rewriteValueMIPS64_OpInterCall_0(v *Value) bool {
  2259  	// match: (InterCall [argwid] entry mem)
  2260  	// cond:
  2261  	// result: (CALLinter [argwid] entry mem)
  2262  	for {
  2263  		argwid := v.AuxInt
  2264  		_ = v.Args[1]
  2265  		entry := v.Args[0]
  2266  		mem := v.Args[1]
  2267  		v.reset(OpMIPS64CALLinter)
  2268  		v.AuxInt = argwid
  2269  		v.AddArg(entry)
  2270  		v.AddArg(mem)
  2271  		return true
  2272  	}
  2273  }
  2274  func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool {
  2275  	// match: (IsInBounds idx len)
  2276  	// cond:
  2277  	// result: (SGTU len idx)
  2278  	for {
  2279  		_ = v.Args[1]
  2280  		idx := v.Args[0]
  2281  		len := v.Args[1]
  2282  		v.reset(OpMIPS64SGTU)
  2283  		v.AddArg(len)
  2284  		v.AddArg(idx)
  2285  		return true
  2286  	}
  2287  }
  2288  func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool {
  2289  	b := v.Block
  2290  	_ = b
  2291  	typ := &b.Func.Config.Types
  2292  	_ = typ
  2293  	// match: (IsNonNil ptr)
  2294  	// cond:
  2295  	// result: (SGTU ptr (MOVVconst [0]))
  2296  	for {
  2297  		ptr := v.Args[0]
  2298  		v.reset(OpMIPS64SGTU)
  2299  		v.AddArg(ptr)
  2300  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2301  		v0.AuxInt = 0
  2302  		v.AddArg(v0)
  2303  		return true
  2304  	}
  2305  }
  2306  func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool {
  2307  	b := v.Block
  2308  	_ = b
  2309  	typ := &b.Func.Config.Types
  2310  	_ = typ
  2311  	// match: (IsSliceInBounds idx len)
  2312  	// cond:
  2313  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2314  	for {
  2315  		_ = v.Args[1]
  2316  		idx := v.Args[0]
  2317  		len := v.Args[1]
  2318  		v.reset(OpMIPS64XOR)
  2319  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2320  		v0.AuxInt = 1
  2321  		v.AddArg(v0)
  2322  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2323  		v1.AddArg(idx)
  2324  		v1.AddArg(len)
  2325  		v.AddArg(v1)
  2326  		return true
  2327  	}
  2328  }
  2329  func rewriteValueMIPS64_OpLeq16_0(v *Value) bool {
  2330  	b := v.Block
  2331  	_ = b
  2332  	typ := &b.Func.Config.Types
  2333  	_ = typ
  2334  	// match: (Leq16 x y)
  2335  	// cond:
  2336  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2337  	for {
  2338  		_ = v.Args[1]
  2339  		x := v.Args[0]
  2340  		y := v.Args[1]
  2341  		v.reset(OpMIPS64XOR)
  2342  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2343  		v0.AuxInt = 1
  2344  		v.AddArg(v0)
  2345  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2346  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2347  		v2.AddArg(x)
  2348  		v1.AddArg(v2)
  2349  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2350  		v3.AddArg(y)
  2351  		v1.AddArg(v3)
  2352  		v.AddArg(v1)
  2353  		return true
  2354  	}
  2355  }
  2356  func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool {
  2357  	b := v.Block
  2358  	_ = b
  2359  	typ := &b.Func.Config.Types
  2360  	_ = typ
  2361  	// match: (Leq16U x y)
  2362  	// cond:
  2363  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2364  	for {
  2365  		_ = v.Args[1]
  2366  		x := v.Args[0]
  2367  		y := v.Args[1]
  2368  		v.reset(OpMIPS64XOR)
  2369  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2370  		v0.AuxInt = 1
  2371  		v.AddArg(v0)
  2372  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2373  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2374  		v2.AddArg(x)
  2375  		v1.AddArg(v2)
  2376  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2377  		v3.AddArg(y)
  2378  		v1.AddArg(v3)
  2379  		v.AddArg(v1)
  2380  		return true
  2381  	}
  2382  }
  2383  func rewriteValueMIPS64_OpLeq32_0(v *Value) bool {
  2384  	b := v.Block
  2385  	_ = b
  2386  	typ := &b.Func.Config.Types
  2387  	_ = typ
  2388  	// match: (Leq32 x y)
  2389  	// cond:
  2390  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2391  	for {
  2392  		_ = v.Args[1]
  2393  		x := v.Args[0]
  2394  		y := v.Args[1]
  2395  		v.reset(OpMIPS64XOR)
  2396  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2397  		v0.AuxInt = 1
  2398  		v.AddArg(v0)
  2399  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2400  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2401  		v2.AddArg(x)
  2402  		v1.AddArg(v2)
  2403  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2404  		v3.AddArg(y)
  2405  		v1.AddArg(v3)
  2406  		v.AddArg(v1)
  2407  		return true
  2408  	}
  2409  }
  2410  func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool {
  2411  	b := v.Block
  2412  	_ = b
  2413  	// match: (Leq32F x y)
  2414  	// cond:
  2415  	// result: (FPFlagTrue (CMPGEF y x))
  2416  	for {
  2417  		_ = v.Args[1]
  2418  		x := v.Args[0]
  2419  		y := v.Args[1]
  2420  		v.reset(OpMIPS64FPFlagTrue)
  2421  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  2422  		v0.AddArg(y)
  2423  		v0.AddArg(x)
  2424  		v.AddArg(v0)
  2425  		return true
  2426  	}
  2427  }
  2428  func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool {
  2429  	b := v.Block
  2430  	_ = b
  2431  	typ := &b.Func.Config.Types
  2432  	_ = typ
  2433  	// match: (Leq32U x y)
  2434  	// cond:
  2435  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2436  	for {
  2437  		_ = v.Args[1]
  2438  		x := v.Args[0]
  2439  		y := v.Args[1]
  2440  		v.reset(OpMIPS64XOR)
  2441  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2442  		v0.AuxInt = 1
  2443  		v.AddArg(v0)
  2444  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2445  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2446  		v2.AddArg(x)
  2447  		v1.AddArg(v2)
  2448  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2449  		v3.AddArg(y)
  2450  		v1.AddArg(v3)
  2451  		v.AddArg(v1)
  2452  		return true
  2453  	}
  2454  }
  2455  func rewriteValueMIPS64_OpLeq64_0(v *Value) bool {
  2456  	b := v.Block
  2457  	_ = b
  2458  	typ := &b.Func.Config.Types
  2459  	_ = typ
  2460  	// match: (Leq64 x y)
  2461  	// cond:
  2462  	// result: (XOR (MOVVconst [1]) (SGT x y))
  2463  	for {
  2464  		_ = v.Args[1]
  2465  		x := v.Args[0]
  2466  		y := v.Args[1]
  2467  		v.reset(OpMIPS64XOR)
  2468  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2469  		v0.AuxInt = 1
  2470  		v.AddArg(v0)
  2471  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2472  		v1.AddArg(x)
  2473  		v1.AddArg(y)
  2474  		v.AddArg(v1)
  2475  		return true
  2476  	}
  2477  }
  2478  func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool {
  2479  	b := v.Block
  2480  	_ = b
  2481  	// match: (Leq64F x y)
  2482  	// cond:
  2483  	// result: (FPFlagTrue (CMPGED y x))
  2484  	for {
  2485  		_ = v.Args[1]
  2486  		x := v.Args[0]
  2487  		y := v.Args[1]
  2488  		v.reset(OpMIPS64FPFlagTrue)
  2489  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  2490  		v0.AddArg(y)
  2491  		v0.AddArg(x)
  2492  		v.AddArg(v0)
  2493  		return true
  2494  	}
  2495  }
  2496  func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool {
  2497  	b := v.Block
  2498  	_ = b
  2499  	typ := &b.Func.Config.Types
  2500  	_ = typ
  2501  	// match: (Leq64U x y)
  2502  	// cond:
  2503  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  2504  	for {
  2505  		_ = v.Args[1]
  2506  		x := v.Args[0]
  2507  		y := v.Args[1]
  2508  		v.reset(OpMIPS64XOR)
  2509  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2510  		v0.AuxInt = 1
  2511  		v.AddArg(v0)
  2512  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2513  		v1.AddArg(x)
  2514  		v1.AddArg(y)
  2515  		v.AddArg(v1)
  2516  		return true
  2517  	}
  2518  }
  2519  func rewriteValueMIPS64_OpLeq8_0(v *Value) bool {
  2520  	b := v.Block
  2521  	_ = b
  2522  	typ := &b.Func.Config.Types
  2523  	_ = typ
  2524  	// match: (Leq8 x y)
  2525  	// cond:
  2526  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2527  	for {
  2528  		_ = v.Args[1]
  2529  		x := v.Args[0]
  2530  		y := v.Args[1]
  2531  		v.reset(OpMIPS64XOR)
  2532  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2533  		v0.AuxInt = 1
  2534  		v.AddArg(v0)
  2535  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2536  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2537  		v2.AddArg(x)
  2538  		v1.AddArg(v2)
  2539  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2540  		v3.AddArg(y)
  2541  		v1.AddArg(v3)
  2542  		v.AddArg(v1)
  2543  		return true
  2544  	}
  2545  }
  2546  func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool {
  2547  	b := v.Block
  2548  	_ = b
  2549  	typ := &b.Func.Config.Types
  2550  	_ = typ
  2551  	// match: (Leq8U x y)
  2552  	// cond:
  2553  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2554  	for {
  2555  		_ = v.Args[1]
  2556  		x := v.Args[0]
  2557  		y := v.Args[1]
  2558  		v.reset(OpMIPS64XOR)
  2559  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2560  		v0.AuxInt = 1
  2561  		v.AddArg(v0)
  2562  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2563  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2564  		v2.AddArg(x)
  2565  		v1.AddArg(v2)
  2566  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2567  		v3.AddArg(y)
  2568  		v1.AddArg(v3)
  2569  		v.AddArg(v1)
  2570  		return true
  2571  	}
  2572  }
  2573  func rewriteValueMIPS64_OpLess16_0(v *Value) bool {
  2574  	b := v.Block
  2575  	_ = b
  2576  	typ := &b.Func.Config.Types
  2577  	_ = typ
  2578  	// match: (Less16 x y)
  2579  	// cond:
  2580  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2581  	for {
  2582  		_ = v.Args[1]
  2583  		x := v.Args[0]
  2584  		y := v.Args[1]
  2585  		v.reset(OpMIPS64SGT)
  2586  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2587  		v0.AddArg(y)
  2588  		v.AddArg(v0)
  2589  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2590  		v1.AddArg(x)
  2591  		v.AddArg(v1)
  2592  		return true
  2593  	}
  2594  }
  2595  func rewriteValueMIPS64_OpLess16U_0(v *Value) bool {
  2596  	b := v.Block
  2597  	_ = b
  2598  	typ := &b.Func.Config.Types
  2599  	_ = typ
  2600  	// match: (Less16U x y)
  2601  	// cond:
  2602  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2603  	for {
  2604  		_ = v.Args[1]
  2605  		x := v.Args[0]
  2606  		y := v.Args[1]
  2607  		v.reset(OpMIPS64SGTU)
  2608  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2609  		v0.AddArg(y)
  2610  		v.AddArg(v0)
  2611  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2612  		v1.AddArg(x)
  2613  		v.AddArg(v1)
  2614  		return true
  2615  	}
  2616  }
  2617  func rewriteValueMIPS64_OpLess32_0(v *Value) bool {
  2618  	b := v.Block
  2619  	_ = b
  2620  	typ := &b.Func.Config.Types
  2621  	_ = typ
  2622  	// match: (Less32 x y)
  2623  	// cond:
  2624  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2625  	for {
  2626  		_ = v.Args[1]
  2627  		x := v.Args[0]
  2628  		y := v.Args[1]
  2629  		v.reset(OpMIPS64SGT)
  2630  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2631  		v0.AddArg(y)
  2632  		v.AddArg(v0)
  2633  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2634  		v1.AddArg(x)
  2635  		v.AddArg(v1)
  2636  		return true
  2637  	}
  2638  }
  2639  func rewriteValueMIPS64_OpLess32F_0(v *Value) bool {
  2640  	b := v.Block
  2641  	_ = b
  2642  	// match: (Less32F x y)
  2643  	// cond:
  2644  	// result: (FPFlagTrue (CMPGTF y x))
  2645  	for {
  2646  		_ = v.Args[1]
  2647  		x := v.Args[0]
  2648  		y := v.Args[1]
  2649  		v.reset(OpMIPS64FPFlagTrue)
  2650  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  2651  		v0.AddArg(y)
  2652  		v0.AddArg(x)
  2653  		v.AddArg(v0)
  2654  		return true
  2655  	}
  2656  }
  2657  func rewriteValueMIPS64_OpLess32U_0(v *Value) bool {
  2658  	b := v.Block
  2659  	_ = b
  2660  	typ := &b.Func.Config.Types
  2661  	_ = typ
  2662  	// match: (Less32U x y)
  2663  	// cond:
  2664  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2665  	for {
  2666  		_ = v.Args[1]
  2667  		x := v.Args[0]
  2668  		y := v.Args[1]
  2669  		v.reset(OpMIPS64SGTU)
  2670  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2671  		v0.AddArg(y)
  2672  		v.AddArg(v0)
  2673  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2674  		v1.AddArg(x)
  2675  		v.AddArg(v1)
  2676  		return true
  2677  	}
  2678  }
  2679  func rewriteValueMIPS64_OpLess64_0(v *Value) bool {
  2680  	// match: (Less64 x y)
  2681  	// cond:
  2682  	// result: (SGT y x)
  2683  	for {
  2684  		_ = v.Args[1]
  2685  		x := v.Args[0]
  2686  		y := v.Args[1]
  2687  		v.reset(OpMIPS64SGT)
  2688  		v.AddArg(y)
  2689  		v.AddArg(x)
  2690  		return true
  2691  	}
  2692  }
  2693  func rewriteValueMIPS64_OpLess64F_0(v *Value) bool {
  2694  	b := v.Block
  2695  	_ = b
  2696  	// match: (Less64F x y)
  2697  	// cond:
  2698  	// result: (FPFlagTrue (CMPGTD y x))
  2699  	for {
  2700  		_ = v.Args[1]
  2701  		x := v.Args[0]
  2702  		y := v.Args[1]
  2703  		v.reset(OpMIPS64FPFlagTrue)
  2704  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  2705  		v0.AddArg(y)
  2706  		v0.AddArg(x)
  2707  		v.AddArg(v0)
  2708  		return true
  2709  	}
  2710  }
  2711  func rewriteValueMIPS64_OpLess64U_0(v *Value) bool {
  2712  	// match: (Less64U x y)
  2713  	// cond:
  2714  	// result: (SGTU y x)
  2715  	for {
  2716  		_ = v.Args[1]
  2717  		x := v.Args[0]
  2718  		y := v.Args[1]
  2719  		v.reset(OpMIPS64SGTU)
  2720  		v.AddArg(y)
  2721  		v.AddArg(x)
  2722  		return true
  2723  	}
  2724  }
  2725  func rewriteValueMIPS64_OpLess8_0(v *Value) bool {
  2726  	b := v.Block
  2727  	_ = b
  2728  	typ := &b.Func.Config.Types
  2729  	_ = typ
  2730  	// match: (Less8 x y)
  2731  	// cond:
  2732  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2733  	for {
  2734  		_ = v.Args[1]
  2735  		x := v.Args[0]
  2736  		y := v.Args[1]
  2737  		v.reset(OpMIPS64SGT)
  2738  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2739  		v0.AddArg(y)
  2740  		v.AddArg(v0)
  2741  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2742  		v1.AddArg(x)
  2743  		v.AddArg(v1)
  2744  		return true
  2745  	}
  2746  }
  2747  func rewriteValueMIPS64_OpLess8U_0(v *Value) bool {
  2748  	b := v.Block
  2749  	_ = b
  2750  	typ := &b.Func.Config.Types
  2751  	_ = typ
  2752  	// match: (Less8U x y)
  2753  	// cond:
  2754  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2755  	for {
  2756  		_ = v.Args[1]
  2757  		x := v.Args[0]
  2758  		y := v.Args[1]
  2759  		v.reset(OpMIPS64SGTU)
  2760  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2761  		v0.AddArg(y)
  2762  		v.AddArg(v0)
  2763  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2764  		v1.AddArg(x)
  2765  		v.AddArg(v1)
  2766  		return true
  2767  	}
  2768  }
  2769  func rewriteValueMIPS64_OpLoad_0(v *Value) bool {
  2770  	// match: (Load <t> ptr mem)
  2771  	// cond: t.IsBoolean()
  2772  	// result: (MOVBUload ptr mem)
  2773  	for {
  2774  		t := v.Type
  2775  		_ = v.Args[1]
  2776  		ptr := v.Args[0]
  2777  		mem := v.Args[1]
  2778  		if !(t.IsBoolean()) {
  2779  			break
  2780  		}
  2781  		v.reset(OpMIPS64MOVBUload)
  2782  		v.AddArg(ptr)
  2783  		v.AddArg(mem)
  2784  		return true
  2785  	}
  2786  	// match: (Load <t> ptr mem)
  2787  	// cond: (is8BitInt(t) && isSigned(t))
  2788  	// result: (MOVBload ptr mem)
  2789  	for {
  2790  		t := v.Type
  2791  		_ = v.Args[1]
  2792  		ptr := v.Args[0]
  2793  		mem := v.Args[1]
  2794  		if !(is8BitInt(t) && isSigned(t)) {
  2795  			break
  2796  		}
  2797  		v.reset(OpMIPS64MOVBload)
  2798  		v.AddArg(ptr)
  2799  		v.AddArg(mem)
  2800  		return true
  2801  	}
  2802  	// match: (Load <t> ptr mem)
  2803  	// cond: (is8BitInt(t) && !isSigned(t))
  2804  	// result: (MOVBUload ptr mem)
  2805  	for {
  2806  		t := v.Type
  2807  		_ = v.Args[1]
  2808  		ptr := v.Args[0]
  2809  		mem := v.Args[1]
  2810  		if !(is8BitInt(t) && !isSigned(t)) {
  2811  			break
  2812  		}
  2813  		v.reset(OpMIPS64MOVBUload)
  2814  		v.AddArg(ptr)
  2815  		v.AddArg(mem)
  2816  		return true
  2817  	}
  2818  	// match: (Load <t> ptr mem)
  2819  	// cond: (is16BitInt(t) && isSigned(t))
  2820  	// result: (MOVHload ptr mem)
  2821  	for {
  2822  		t := v.Type
  2823  		_ = v.Args[1]
  2824  		ptr := v.Args[0]
  2825  		mem := v.Args[1]
  2826  		if !(is16BitInt(t) && isSigned(t)) {
  2827  			break
  2828  		}
  2829  		v.reset(OpMIPS64MOVHload)
  2830  		v.AddArg(ptr)
  2831  		v.AddArg(mem)
  2832  		return true
  2833  	}
  2834  	// match: (Load <t> ptr mem)
  2835  	// cond: (is16BitInt(t) && !isSigned(t))
  2836  	// result: (MOVHUload ptr mem)
  2837  	for {
  2838  		t := v.Type
  2839  		_ = v.Args[1]
  2840  		ptr := v.Args[0]
  2841  		mem := v.Args[1]
  2842  		if !(is16BitInt(t) && !isSigned(t)) {
  2843  			break
  2844  		}
  2845  		v.reset(OpMIPS64MOVHUload)
  2846  		v.AddArg(ptr)
  2847  		v.AddArg(mem)
  2848  		return true
  2849  	}
  2850  	// match: (Load <t> ptr mem)
  2851  	// cond: (is32BitInt(t) && isSigned(t))
  2852  	// result: (MOVWload ptr mem)
  2853  	for {
  2854  		t := v.Type
  2855  		_ = v.Args[1]
  2856  		ptr := v.Args[0]
  2857  		mem := v.Args[1]
  2858  		if !(is32BitInt(t) && isSigned(t)) {
  2859  			break
  2860  		}
  2861  		v.reset(OpMIPS64MOVWload)
  2862  		v.AddArg(ptr)
  2863  		v.AddArg(mem)
  2864  		return true
  2865  	}
  2866  	// match: (Load <t> ptr mem)
  2867  	// cond: (is32BitInt(t) && !isSigned(t))
  2868  	// result: (MOVWUload ptr mem)
  2869  	for {
  2870  		t := v.Type
  2871  		_ = v.Args[1]
  2872  		ptr := v.Args[0]
  2873  		mem := v.Args[1]
  2874  		if !(is32BitInt(t) && !isSigned(t)) {
  2875  			break
  2876  		}
  2877  		v.reset(OpMIPS64MOVWUload)
  2878  		v.AddArg(ptr)
  2879  		v.AddArg(mem)
  2880  		return true
  2881  	}
  2882  	// match: (Load <t> ptr mem)
  2883  	// cond: (is64BitInt(t) || isPtr(t))
  2884  	// result: (MOVVload ptr mem)
  2885  	for {
  2886  		t := v.Type
  2887  		_ = v.Args[1]
  2888  		ptr := v.Args[0]
  2889  		mem := v.Args[1]
  2890  		if !(is64BitInt(t) || isPtr(t)) {
  2891  			break
  2892  		}
  2893  		v.reset(OpMIPS64MOVVload)
  2894  		v.AddArg(ptr)
  2895  		v.AddArg(mem)
  2896  		return true
  2897  	}
  2898  	// match: (Load <t> ptr mem)
  2899  	// cond: is32BitFloat(t)
  2900  	// result: (MOVFload ptr mem)
  2901  	for {
  2902  		t := v.Type
  2903  		_ = v.Args[1]
  2904  		ptr := v.Args[0]
  2905  		mem := v.Args[1]
  2906  		if !(is32BitFloat(t)) {
  2907  			break
  2908  		}
  2909  		v.reset(OpMIPS64MOVFload)
  2910  		v.AddArg(ptr)
  2911  		v.AddArg(mem)
  2912  		return true
  2913  	}
  2914  	// match: (Load <t> ptr mem)
  2915  	// cond: is64BitFloat(t)
  2916  	// result: (MOVDload ptr mem)
  2917  	for {
  2918  		t := v.Type
  2919  		_ = v.Args[1]
  2920  		ptr := v.Args[0]
  2921  		mem := v.Args[1]
  2922  		if !(is64BitFloat(t)) {
  2923  			break
  2924  		}
  2925  		v.reset(OpMIPS64MOVDload)
  2926  		v.AddArg(ptr)
  2927  		v.AddArg(mem)
  2928  		return true
  2929  	}
  2930  	return false
  2931  }
  2932  func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
  2933  	b := v.Block
  2934  	_ = b
  2935  	typ := &b.Func.Config.Types
  2936  	_ = typ
  2937  	// match: (Lsh16x16 <t> x y)
  2938  	// cond:
  2939  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2940  	for {
  2941  		t := v.Type
  2942  		_ = v.Args[1]
  2943  		x := v.Args[0]
  2944  		y := v.Args[1]
  2945  		v.reset(OpMIPS64AND)
  2946  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2947  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2948  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2949  		v2.AuxInt = 64
  2950  		v1.AddArg(v2)
  2951  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2952  		v3.AddArg(y)
  2953  		v1.AddArg(v3)
  2954  		v0.AddArg(v1)
  2955  		v.AddArg(v0)
  2956  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2957  		v4.AddArg(x)
  2958  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2959  		v5.AddArg(y)
  2960  		v4.AddArg(v5)
  2961  		v.AddArg(v4)
  2962  		return true
  2963  	}
  2964  }
  2965  func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
  2966  	b := v.Block
  2967  	_ = b
  2968  	typ := &b.Func.Config.Types
  2969  	_ = typ
  2970  	// match: (Lsh16x32 <t> x y)
  2971  	// cond:
  2972  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2973  	for {
  2974  		t := v.Type
  2975  		_ = v.Args[1]
  2976  		x := v.Args[0]
  2977  		y := v.Args[1]
  2978  		v.reset(OpMIPS64AND)
  2979  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2980  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2981  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2982  		v2.AuxInt = 64
  2983  		v1.AddArg(v2)
  2984  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2985  		v3.AddArg(y)
  2986  		v1.AddArg(v3)
  2987  		v0.AddArg(v1)
  2988  		v.AddArg(v0)
  2989  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2990  		v4.AddArg(x)
  2991  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2992  		v5.AddArg(y)
  2993  		v4.AddArg(v5)
  2994  		v.AddArg(v4)
  2995  		return true
  2996  	}
  2997  }
  2998  func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
  2999  	b := v.Block
  3000  	_ = b
  3001  	typ := &b.Func.Config.Types
  3002  	_ = typ
  3003  	// match: (Lsh16x64 <t> x y)
  3004  	// cond:
  3005  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3006  	for {
  3007  		t := v.Type
  3008  		_ = v.Args[1]
  3009  		x := v.Args[0]
  3010  		y := v.Args[1]
  3011  		v.reset(OpMIPS64AND)
  3012  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3013  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3014  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3015  		v2.AuxInt = 64
  3016  		v1.AddArg(v2)
  3017  		v1.AddArg(y)
  3018  		v0.AddArg(v1)
  3019  		v.AddArg(v0)
  3020  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3021  		v3.AddArg(x)
  3022  		v3.AddArg(y)
  3023  		v.AddArg(v3)
  3024  		return true
  3025  	}
  3026  }
  3027  func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
  3028  	b := v.Block
  3029  	_ = b
  3030  	typ := &b.Func.Config.Types
  3031  	_ = typ
  3032  	// match: (Lsh16x8 <t> x y)
  3033  	// cond:
  3034  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3035  	for {
  3036  		t := v.Type
  3037  		_ = v.Args[1]
  3038  		x := v.Args[0]
  3039  		y := v.Args[1]
  3040  		v.reset(OpMIPS64AND)
  3041  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3042  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3043  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3044  		v2.AuxInt = 64
  3045  		v1.AddArg(v2)
  3046  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3047  		v3.AddArg(y)
  3048  		v1.AddArg(v3)
  3049  		v0.AddArg(v1)
  3050  		v.AddArg(v0)
  3051  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3052  		v4.AddArg(x)
  3053  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3054  		v5.AddArg(y)
  3055  		v4.AddArg(v5)
  3056  		v.AddArg(v4)
  3057  		return true
  3058  	}
  3059  }
  3060  func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
  3061  	b := v.Block
  3062  	_ = b
  3063  	typ := &b.Func.Config.Types
  3064  	_ = typ
  3065  	// match: (Lsh32x16 <t> x y)
  3066  	// cond:
  3067  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3068  	for {
  3069  		t := v.Type
  3070  		_ = v.Args[1]
  3071  		x := v.Args[0]
  3072  		y := v.Args[1]
  3073  		v.reset(OpMIPS64AND)
  3074  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3075  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3076  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3077  		v2.AuxInt = 64
  3078  		v1.AddArg(v2)
  3079  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3080  		v3.AddArg(y)
  3081  		v1.AddArg(v3)
  3082  		v0.AddArg(v1)
  3083  		v.AddArg(v0)
  3084  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3085  		v4.AddArg(x)
  3086  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3087  		v5.AddArg(y)
  3088  		v4.AddArg(v5)
  3089  		v.AddArg(v4)
  3090  		return true
  3091  	}
  3092  }
  3093  func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
  3094  	b := v.Block
  3095  	_ = b
  3096  	typ := &b.Func.Config.Types
  3097  	_ = typ
  3098  	// match: (Lsh32x32 <t> x y)
  3099  	// cond:
  3100  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3101  	for {
  3102  		t := v.Type
  3103  		_ = v.Args[1]
  3104  		x := v.Args[0]
  3105  		y := v.Args[1]
  3106  		v.reset(OpMIPS64AND)
  3107  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3108  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3109  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3110  		v2.AuxInt = 64
  3111  		v1.AddArg(v2)
  3112  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3113  		v3.AddArg(y)
  3114  		v1.AddArg(v3)
  3115  		v0.AddArg(v1)
  3116  		v.AddArg(v0)
  3117  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3118  		v4.AddArg(x)
  3119  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3120  		v5.AddArg(y)
  3121  		v4.AddArg(v5)
  3122  		v.AddArg(v4)
  3123  		return true
  3124  	}
  3125  }
  3126  func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
  3127  	b := v.Block
  3128  	_ = b
  3129  	typ := &b.Func.Config.Types
  3130  	_ = typ
  3131  	// match: (Lsh32x64 <t> x y)
  3132  	// cond:
  3133  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3134  	for {
  3135  		t := v.Type
  3136  		_ = v.Args[1]
  3137  		x := v.Args[0]
  3138  		y := v.Args[1]
  3139  		v.reset(OpMIPS64AND)
  3140  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3141  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3142  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3143  		v2.AuxInt = 64
  3144  		v1.AddArg(v2)
  3145  		v1.AddArg(y)
  3146  		v0.AddArg(v1)
  3147  		v.AddArg(v0)
  3148  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3149  		v3.AddArg(x)
  3150  		v3.AddArg(y)
  3151  		v.AddArg(v3)
  3152  		return true
  3153  	}
  3154  }
  3155  func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
  3156  	b := v.Block
  3157  	_ = b
  3158  	typ := &b.Func.Config.Types
  3159  	_ = typ
  3160  	// match: (Lsh32x8 <t> x y)
  3161  	// cond:
  3162  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3163  	for {
  3164  		t := v.Type
  3165  		_ = v.Args[1]
  3166  		x := v.Args[0]
  3167  		y := v.Args[1]
  3168  		v.reset(OpMIPS64AND)
  3169  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3170  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3171  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3172  		v2.AuxInt = 64
  3173  		v1.AddArg(v2)
  3174  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3175  		v3.AddArg(y)
  3176  		v1.AddArg(v3)
  3177  		v0.AddArg(v1)
  3178  		v.AddArg(v0)
  3179  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3180  		v4.AddArg(x)
  3181  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3182  		v5.AddArg(y)
  3183  		v4.AddArg(v5)
  3184  		v.AddArg(v4)
  3185  		return true
  3186  	}
  3187  }
  3188  func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
  3189  	b := v.Block
  3190  	_ = b
  3191  	typ := &b.Func.Config.Types
  3192  	_ = typ
  3193  	// match: (Lsh64x16 <t> x y)
  3194  	// cond:
  3195  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3196  	for {
  3197  		t := v.Type
  3198  		_ = v.Args[1]
  3199  		x := v.Args[0]
  3200  		y := v.Args[1]
  3201  		v.reset(OpMIPS64AND)
  3202  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3203  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3204  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3205  		v2.AuxInt = 64
  3206  		v1.AddArg(v2)
  3207  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3208  		v3.AddArg(y)
  3209  		v1.AddArg(v3)
  3210  		v0.AddArg(v1)
  3211  		v.AddArg(v0)
  3212  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3213  		v4.AddArg(x)
  3214  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3215  		v5.AddArg(y)
  3216  		v4.AddArg(v5)
  3217  		v.AddArg(v4)
  3218  		return true
  3219  	}
  3220  }
  3221  func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
  3222  	b := v.Block
  3223  	_ = b
  3224  	typ := &b.Func.Config.Types
  3225  	_ = typ
  3226  	// match: (Lsh64x32 <t> x y)
  3227  	// cond:
  3228  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3229  	for {
  3230  		t := v.Type
  3231  		_ = v.Args[1]
  3232  		x := v.Args[0]
  3233  		y := v.Args[1]
  3234  		v.reset(OpMIPS64AND)
  3235  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3236  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3237  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3238  		v2.AuxInt = 64
  3239  		v1.AddArg(v2)
  3240  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3241  		v3.AddArg(y)
  3242  		v1.AddArg(v3)
  3243  		v0.AddArg(v1)
  3244  		v.AddArg(v0)
  3245  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3246  		v4.AddArg(x)
  3247  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3248  		v5.AddArg(y)
  3249  		v4.AddArg(v5)
  3250  		v.AddArg(v4)
  3251  		return true
  3252  	}
  3253  }
  3254  func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
  3255  	b := v.Block
  3256  	_ = b
  3257  	typ := &b.Func.Config.Types
  3258  	_ = typ
  3259  	// match: (Lsh64x64 <t> x y)
  3260  	// cond:
  3261  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3262  	for {
  3263  		t := v.Type
  3264  		_ = v.Args[1]
  3265  		x := v.Args[0]
  3266  		y := v.Args[1]
  3267  		v.reset(OpMIPS64AND)
  3268  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3269  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3270  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3271  		v2.AuxInt = 64
  3272  		v1.AddArg(v2)
  3273  		v1.AddArg(y)
  3274  		v0.AddArg(v1)
  3275  		v.AddArg(v0)
  3276  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3277  		v3.AddArg(x)
  3278  		v3.AddArg(y)
  3279  		v.AddArg(v3)
  3280  		return true
  3281  	}
  3282  }
  3283  func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
  3284  	b := v.Block
  3285  	_ = b
  3286  	typ := &b.Func.Config.Types
  3287  	_ = typ
  3288  	// match: (Lsh64x8 <t> x y)
  3289  	// cond:
  3290  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3291  	for {
  3292  		t := v.Type
  3293  		_ = v.Args[1]
  3294  		x := v.Args[0]
  3295  		y := v.Args[1]
  3296  		v.reset(OpMIPS64AND)
  3297  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3298  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3299  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3300  		v2.AuxInt = 64
  3301  		v1.AddArg(v2)
  3302  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3303  		v3.AddArg(y)
  3304  		v1.AddArg(v3)
  3305  		v0.AddArg(v1)
  3306  		v.AddArg(v0)
  3307  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3308  		v4.AddArg(x)
  3309  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3310  		v5.AddArg(y)
  3311  		v4.AddArg(v5)
  3312  		v.AddArg(v4)
  3313  		return true
  3314  	}
  3315  }
  3316  func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
  3317  	b := v.Block
  3318  	_ = b
  3319  	typ := &b.Func.Config.Types
  3320  	_ = typ
  3321  	// match: (Lsh8x16 <t> x y)
  3322  	// cond:
  3323  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3324  	for {
  3325  		t := v.Type
  3326  		_ = v.Args[1]
  3327  		x := v.Args[0]
  3328  		y := v.Args[1]
  3329  		v.reset(OpMIPS64AND)
  3330  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3331  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3332  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3333  		v2.AuxInt = 64
  3334  		v1.AddArg(v2)
  3335  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3336  		v3.AddArg(y)
  3337  		v1.AddArg(v3)
  3338  		v0.AddArg(v1)
  3339  		v.AddArg(v0)
  3340  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3341  		v4.AddArg(x)
  3342  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3343  		v5.AddArg(y)
  3344  		v4.AddArg(v5)
  3345  		v.AddArg(v4)
  3346  		return true
  3347  	}
  3348  }
  3349  func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
  3350  	b := v.Block
  3351  	_ = b
  3352  	typ := &b.Func.Config.Types
  3353  	_ = typ
  3354  	// match: (Lsh8x32 <t> x y)
  3355  	// cond:
  3356  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3357  	for {
  3358  		t := v.Type
  3359  		_ = v.Args[1]
  3360  		x := v.Args[0]
  3361  		y := v.Args[1]
  3362  		v.reset(OpMIPS64AND)
  3363  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3364  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3365  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3366  		v2.AuxInt = 64
  3367  		v1.AddArg(v2)
  3368  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3369  		v3.AddArg(y)
  3370  		v1.AddArg(v3)
  3371  		v0.AddArg(v1)
  3372  		v.AddArg(v0)
  3373  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3374  		v4.AddArg(x)
  3375  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3376  		v5.AddArg(y)
  3377  		v4.AddArg(v5)
  3378  		v.AddArg(v4)
  3379  		return true
  3380  	}
  3381  }
  3382  func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
  3383  	b := v.Block
  3384  	_ = b
  3385  	typ := &b.Func.Config.Types
  3386  	_ = typ
  3387  	// match: (Lsh8x64 <t> x y)
  3388  	// cond:
  3389  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3390  	for {
  3391  		t := v.Type
  3392  		_ = v.Args[1]
  3393  		x := v.Args[0]
  3394  		y := v.Args[1]
  3395  		v.reset(OpMIPS64AND)
  3396  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3397  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3398  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3399  		v2.AuxInt = 64
  3400  		v1.AddArg(v2)
  3401  		v1.AddArg(y)
  3402  		v0.AddArg(v1)
  3403  		v.AddArg(v0)
  3404  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3405  		v3.AddArg(x)
  3406  		v3.AddArg(y)
  3407  		v.AddArg(v3)
  3408  		return true
  3409  	}
  3410  }
  3411  func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
  3412  	b := v.Block
  3413  	_ = b
  3414  	typ := &b.Func.Config.Types
  3415  	_ = typ
  3416  	// match: (Lsh8x8 <t> x y)
  3417  	// cond:
  3418  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3419  	for {
  3420  		t := v.Type
  3421  		_ = v.Args[1]
  3422  		x := v.Args[0]
  3423  		y := v.Args[1]
  3424  		v.reset(OpMIPS64AND)
  3425  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3426  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3427  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3428  		v2.AuxInt = 64
  3429  		v1.AddArg(v2)
  3430  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3431  		v3.AddArg(y)
  3432  		v1.AddArg(v3)
  3433  		v0.AddArg(v1)
  3434  		v.AddArg(v0)
  3435  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3436  		v4.AddArg(x)
  3437  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3438  		v5.AddArg(y)
  3439  		v4.AddArg(v5)
  3440  		v.AddArg(v4)
  3441  		return true
  3442  	}
  3443  }
  3444  func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool {
  3445  	// match: (ADDV x (MOVVconst [c]))
  3446  	// cond: is32Bit(c)
  3447  	// result: (ADDVconst [c] x)
  3448  	for {
  3449  		_ = v.Args[1]
  3450  		x := v.Args[0]
  3451  		v_1 := v.Args[1]
  3452  		if v_1.Op != OpMIPS64MOVVconst {
  3453  			break
  3454  		}
  3455  		c := v_1.AuxInt
  3456  		if !(is32Bit(c)) {
  3457  			break
  3458  		}
  3459  		v.reset(OpMIPS64ADDVconst)
  3460  		v.AuxInt = c
  3461  		v.AddArg(x)
  3462  		return true
  3463  	}
  3464  	// match: (ADDV (MOVVconst [c]) x)
  3465  	// cond: is32Bit(c)
  3466  	// result: (ADDVconst [c] x)
  3467  	for {
  3468  		_ = v.Args[1]
  3469  		v_0 := v.Args[0]
  3470  		if v_0.Op != OpMIPS64MOVVconst {
  3471  			break
  3472  		}
  3473  		c := v_0.AuxInt
  3474  		x := v.Args[1]
  3475  		if !(is32Bit(c)) {
  3476  			break
  3477  		}
  3478  		v.reset(OpMIPS64ADDVconst)
  3479  		v.AuxInt = c
  3480  		v.AddArg(x)
  3481  		return true
  3482  	}
  3483  	// match: (ADDV x (NEGV y))
  3484  	// cond:
  3485  	// result: (SUBV x y)
  3486  	for {
  3487  		_ = v.Args[1]
  3488  		x := v.Args[0]
  3489  		v_1 := v.Args[1]
  3490  		if v_1.Op != OpMIPS64NEGV {
  3491  			break
  3492  		}
  3493  		y := v_1.Args[0]
  3494  		v.reset(OpMIPS64SUBV)
  3495  		v.AddArg(x)
  3496  		v.AddArg(y)
  3497  		return true
  3498  	}
  3499  	// match: (ADDV (NEGV y) x)
  3500  	// cond:
  3501  	// result: (SUBV x y)
  3502  	for {
  3503  		_ = v.Args[1]
  3504  		v_0 := v.Args[0]
  3505  		if v_0.Op != OpMIPS64NEGV {
  3506  			break
  3507  		}
  3508  		y := v_0.Args[0]
  3509  		x := v.Args[1]
  3510  		v.reset(OpMIPS64SUBV)
  3511  		v.AddArg(x)
  3512  		v.AddArg(y)
  3513  		return true
  3514  	}
  3515  	return false
  3516  }
  3517  func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool {
  3518  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3519  	// cond:
  3520  	// result: (MOVVaddr [off1+off2] {sym} ptr)
  3521  	for {
  3522  		off1 := v.AuxInt
  3523  		v_0 := v.Args[0]
  3524  		if v_0.Op != OpMIPS64MOVVaddr {
  3525  			break
  3526  		}
  3527  		off2 := v_0.AuxInt
  3528  		sym := v_0.Aux
  3529  		ptr := v_0.Args[0]
  3530  		v.reset(OpMIPS64MOVVaddr)
  3531  		v.AuxInt = off1 + off2
  3532  		v.Aux = sym
  3533  		v.AddArg(ptr)
  3534  		return true
  3535  	}
  3536  	// match: (ADDVconst [0] x)
  3537  	// cond:
  3538  	// result: x
  3539  	for {
  3540  		if v.AuxInt != 0 {
  3541  			break
  3542  		}
  3543  		x := v.Args[0]
  3544  		v.reset(OpCopy)
  3545  		v.Type = x.Type
  3546  		v.AddArg(x)
  3547  		return true
  3548  	}
  3549  	// match: (ADDVconst [c] (MOVVconst [d]))
  3550  	// cond:
  3551  	// result: (MOVVconst [c+d])
  3552  	for {
  3553  		c := v.AuxInt
  3554  		v_0 := v.Args[0]
  3555  		if v_0.Op != OpMIPS64MOVVconst {
  3556  			break
  3557  		}
  3558  		d := v_0.AuxInt
  3559  		v.reset(OpMIPS64MOVVconst)
  3560  		v.AuxInt = c + d
  3561  		return true
  3562  	}
  3563  	// match: (ADDVconst [c] (ADDVconst [d] x))
  3564  	// cond: is32Bit(c+d)
  3565  	// result: (ADDVconst [c+d] x)
  3566  	for {
  3567  		c := v.AuxInt
  3568  		v_0 := v.Args[0]
  3569  		if v_0.Op != OpMIPS64ADDVconst {
  3570  			break
  3571  		}
  3572  		d := v_0.AuxInt
  3573  		x := v_0.Args[0]
  3574  		if !(is32Bit(c + d)) {
  3575  			break
  3576  		}
  3577  		v.reset(OpMIPS64ADDVconst)
  3578  		v.AuxInt = c + d
  3579  		v.AddArg(x)
  3580  		return true
  3581  	}
  3582  	// match: (ADDVconst [c] (SUBVconst [d] x))
  3583  	// cond: is32Bit(c-d)
  3584  	// result: (ADDVconst [c-d] x)
  3585  	for {
  3586  		c := v.AuxInt
  3587  		v_0 := v.Args[0]
  3588  		if v_0.Op != OpMIPS64SUBVconst {
  3589  			break
  3590  		}
  3591  		d := v_0.AuxInt
  3592  		x := v_0.Args[0]
  3593  		if !(is32Bit(c - d)) {
  3594  			break
  3595  		}
  3596  		v.reset(OpMIPS64ADDVconst)
  3597  		v.AuxInt = c - d
  3598  		v.AddArg(x)
  3599  		return true
  3600  	}
  3601  	return false
  3602  }
  3603  func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool {
  3604  	// match: (AND x (MOVVconst [c]))
  3605  	// cond: is32Bit(c)
  3606  	// result: (ANDconst [c] x)
  3607  	for {
  3608  		_ = v.Args[1]
  3609  		x := v.Args[0]
  3610  		v_1 := v.Args[1]
  3611  		if v_1.Op != OpMIPS64MOVVconst {
  3612  			break
  3613  		}
  3614  		c := v_1.AuxInt
  3615  		if !(is32Bit(c)) {
  3616  			break
  3617  		}
  3618  		v.reset(OpMIPS64ANDconst)
  3619  		v.AuxInt = c
  3620  		v.AddArg(x)
  3621  		return true
  3622  	}
  3623  	// match: (AND (MOVVconst [c]) x)
  3624  	// cond: is32Bit(c)
  3625  	// result: (ANDconst [c] x)
  3626  	for {
  3627  		_ = v.Args[1]
  3628  		v_0 := v.Args[0]
  3629  		if v_0.Op != OpMIPS64MOVVconst {
  3630  			break
  3631  		}
  3632  		c := v_0.AuxInt
  3633  		x := v.Args[1]
  3634  		if !(is32Bit(c)) {
  3635  			break
  3636  		}
  3637  		v.reset(OpMIPS64ANDconst)
  3638  		v.AuxInt = c
  3639  		v.AddArg(x)
  3640  		return true
  3641  	}
  3642  	// match: (AND x x)
  3643  	// cond:
  3644  	// result: x
  3645  	for {
  3646  		_ = v.Args[1]
  3647  		x := v.Args[0]
  3648  		if x != v.Args[1] {
  3649  			break
  3650  		}
  3651  		v.reset(OpCopy)
  3652  		v.Type = x.Type
  3653  		v.AddArg(x)
  3654  		return true
  3655  	}
  3656  	return false
  3657  }
  3658  func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool {
  3659  	// match: (ANDconst [0] _)
  3660  	// cond:
  3661  	// result: (MOVVconst [0])
  3662  	for {
  3663  		if v.AuxInt != 0 {
  3664  			break
  3665  		}
  3666  		v.reset(OpMIPS64MOVVconst)
  3667  		v.AuxInt = 0
  3668  		return true
  3669  	}
  3670  	// match: (ANDconst [-1] x)
  3671  	// cond:
  3672  	// result: x
  3673  	for {
  3674  		if v.AuxInt != -1 {
  3675  			break
  3676  		}
  3677  		x := v.Args[0]
  3678  		v.reset(OpCopy)
  3679  		v.Type = x.Type
  3680  		v.AddArg(x)
  3681  		return true
  3682  	}
  3683  	// match: (ANDconst [c] (MOVVconst [d]))
  3684  	// cond:
  3685  	// result: (MOVVconst [c&d])
  3686  	for {
  3687  		c := v.AuxInt
  3688  		v_0 := v.Args[0]
  3689  		if v_0.Op != OpMIPS64MOVVconst {
  3690  			break
  3691  		}
  3692  		d := v_0.AuxInt
  3693  		v.reset(OpMIPS64MOVVconst)
  3694  		v.AuxInt = c & d
  3695  		return true
  3696  	}
  3697  	// match: (ANDconst [c] (ANDconst [d] x))
  3698  	// cond:
  3699  	// result: (ANDconst [c&d] x)
  3700  	for {
  3701  		c := v.AuxInt
  3702  		v_0 := v.Args[0]
  3703  		if v_0.Op != OpMIPS64ANDconst {
  3704  			break
  3705  		}
  3706  		d := v_0.AuxInt
  3707  		x := v_0.Args[0]
  3708  		v.reset(OpMIPS64ANDconst)
  3709  		v.AuxInt = c & d
  3710  		v.AddArg(x)
  3711  		return true
  3712  	}
  3713  	return false
  3714  }
  3715  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v *Value) bool {
  3716  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  3717  	// cond: is32Bit(c)
  3718  	// result: (LoweredAtomicAddconst32 [c] ptr mem)
  3719  	for {
  3720  		_ = v.Args[2]
  3721  		ptr := v.Args[0]
  3722  		v_1 := v.Args[1]
  3723  		if v_1.Op != OpMIPS64MOVVconst {
  3724  			break
  3725  		}
  3726  		c := v_1.AuxInt
  3727  		mem := v.Args[2]
  3728  		if !(is32Bit(c)) {
  3729  			break
  3730  		}
  3731  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  3732  		v.AuxInt = c
  3733  		v.AddArg(ptr)
  3734  		v.AddArg(mem)
  3735  		return true
  3736  	}
  3737  	return false
  3738  }
  3739  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v *Value) bool {
  3740  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  3741  	// cond: is32Bit(c)
  3742  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  3743  	for {
  3744  		_ = v.Args[2]
  3745  		ptr := v.Args[0]
  3746  		v_1 := v.Args[1]
  3747  		if v_1.Op != OpMIPS64MOVVconst {
  3748  			break
  3749  		}
  3750  		c := v_1.AuxInt
  3751  		mem := v.Args[2]
  3752  		if !(is32Bit(c)) {
  3753  			break
  3754  		}
  3755  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  3756  		v.AuxInt = c
  3757  		v.AddArg(ptr)
  3758  		v.AddArg(mem)
  3759  		return true
  3760  	}
  3761  	return false
  3762  }
  3763  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v *Value) bool {
  3764  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  3765  	// cond:
  3766  	// result: (LoweredAtomicStorezero32 ptr mem)
  3767  	for {
  3768  		_ = v.Args[2]
  3769  		ptr := v.Args[0]
  3770  		v_1 := v.Args[1]
  3771  		if v_1.Op != OpMIPS64MOVVconst {
  3772  			break
  3773  		}
  3774  		if v_1.AuxInt != 0 {
  3775  			break
  3776  		}
  3777  		mem := v.Args[2]
  3778  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  3779  		v.AddArg(ptr)
  3780  		v.AddArg(mem)
  3781  		return true
  3782  	}
  3783  	return false
  3784  }
  3785  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v *Value) bool {
  3786  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  3787  	// cond:
  3788  	// result: (LoweredAtomicStorezero64 ptr mem)
  3789  	for {
  3790  		_ = v.Args[2]
  3791  		ptr := v.Args[0]
  3792  		v_1 := v.Args[1]
  3793  		if v_1.Op != OpMIPS64MOVVconst {
  3794  			break
  3795  		}
  3796  		if v_1.AuxInt != 0 {
  3797  			break
  3798  		}
  3799  		mem := v.Args[2]
  3800  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  3801  		v.AddArg(ptr)
  3802  		v.AddArg(mem)
  3803  		return true
  3804  	}
  3805  	return false
  3806  }
  3807  func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool {
  3808  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3809  	// cond: is32Bit(off1+off2)
  3810  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3811  	for {
  3812  		off1 := v.AuxInt
  3813  		sym := v.Aux
  3814  		_ = v.Args[1]
  3815  		v_0 := v.Args[0]
  3816  		if v_0.Op != OpMIPS64ADDVconst {
  3817  			break
  3818  		}
  3819  		off2 := v_0.AuxInt
  3820  		ptr := v_0.Args[0]
  3821  		mem := v.Args[1]
  3822  		if !(is32Bit(off1 + off2)) {
  3823  			break
  3824  		}
  3825  		v.reset(OpMIPS64MOVBUload)
  3826  		v.AuxInt = off1 + off2
  3827  		v.Aux = sym
  3828  		v.AddArg(ptr)
  3829  		v.AddArg(mem)
  3830  		return true
  3831  	}
  3832  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3833  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3834  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3835  	for {
  3836  		off1 := v.AuxInt
  3837  		sym1 := v.Aux
  3838  		_ = v.Args[1]
  3839  		v_0 := v.Args[0]
  3840  		if v_0.Op != OpMIPS64MOVVaddr {
  3841  			break
  3842  		}
  3843  		off2 := v_0.AuxInt
  3844  		sym2 := v_0.Aux
  3845  		ptr := v_0.Args[0]
  3846  		mem := v.Args[1]
  3847  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3848  			break
  3849  		}
  3850  		v.reset(OpMIPS64MOVBUload)
  3851  		v.AuxInt = off1 + off2
  3852  		v.Aux = mergeSym(sym1, sym2)
  3853  		v.AddArg(ptr)
  3854  		v.AddArg(mem)
  3855  		return true
  3856  	}
  3857  	return false
  3858  }
  3859  func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool {
  3860  	// match: (MOVBUreg x:(MOVBUload _ _))
  3861  	// cond:
  3862  	// result: (MOVVreg x)
  3863  	for {
  3864  		x := v.Args[0]
  3865  		if x.Op != OpMIPS64MOVBUload {
  3866  			break
  3867  		}
  3868  		_ = x.Args[1]
  3869  		v.reset(OpMIPS64MOVVreg)
  3870  		v.AddArg(x)
  3871  		return true
  3872  	}
  3873  	// match: (MOVBUreg x:(MOVBUreg _))
  3874  	// cond:
  3875  	// result: (MOVVreg x)
  3876  	for {
  3877  		x := v.Args[0]
  3878  		if x.Op != OpMIPS64MOVBUreg {
  3879  			break
  3880  		}
  3881  		v.reset(OpMIPS64MOVVreg)
  3882  		v.AddArg(x)
  3883  		return true
  3884  	}
  3885  	// match: (MOVBUreg (MOVVconst [c]))
  3886  	// cond:
  3887  	// result: (MOVVconst [int64(uint8(c))])
  3888  	for {
  3889  		v_0 := v.Args[0]
  3890  		if v_0.Op != OpMIPS64MOVVconst {
  3891  			break
  3892  		}
  3893  		c := v_0.AuxInt
  3894  		v.reset(OpMIPS64MOVVconst)
  3895  		v.AuxInt = int64(uint8(c))
  3896  		return true
  3897  	}
  3898  	return false
  3899  }
  3900  func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool {
  3901  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3902  	// cond: is32Bit(off1+off2)
  3903  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  3904  	for {
  3905  		off1 := v.AuxInt
  3906  		sym := v.Aux
  3907  		_ = v.Args[1]
  3908  		v_0 := v.Args[0]
  3909  		if v_0.Op != OpMIPS64ADDVconst {
  3910  			break
  3911  		}
  3912  		off2 := v_0.AuxInt
  3913  		ptr := v_0.Args[0]
  3914  		mem := v.Args[1]
  3915  		if !(is32Bit(off1 + off2)) {
  3916  			break
  3917  		}
  3918  		v.reset(OpMIPS64MOVBload)
  3919  		v.AuxInt = off1 + off2
  3920  		v.Aux = sym
  3921  		v.AddArg(ptr)
  3922  		v.AddArg(mem)
  3923  		return true
  3924  	}
  3925  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3926  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3927  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3928  	for {
  3929  		off1 := v.AuxInt
  3930  		sym1 := v.Aux
  3931  		_ = v.Args[1]
  3932  		v_0 := v.Args[0]
  3933  		if v_0.Op != OpMIPS64MOVVaddr {
  3934  			break
  3935  		}
  3936  		off2 := v_0.AuxInt
  3937  		sym2 := v_0.Aux
  3938  		ptr := v_0.Args[0]
  3939  		mem := v.Args[1]
  3940  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3941  			break
  3942  		}
  3943  		v.reset(OpMIPS64MOVBload)
  3944  		v.AuxInt = off1 + off2
  3945  		v.Aux = mergeSym(sym1, sym2)
  3946  		v.AddArg(ptr)
  3947  		v.AddArg(mem)
  3948  		return true
  3949  	}
  3950  	return false
  3951  }
  3952  func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool {
  3953  	// match: (MOVBreg x:(MOVBload _ _))
  3954  	// cond:
  3955  	// result: (MOVVreg x)
  3956  	for {
  3957  		x := v.Args[0]
  3958  		if x.Op != OpMIPS64MOVBload {
  3959  			break
  3960  		}
  3961  		_ = x.Args[1]
  3962  		v.reset(OpMIPS64MOVVreg)
  3963  		v.AddArg(x)
  3964  		return true
  3965  	}
  3966  	// match: (MOVBreg x:(MOVBreg _))
  3967  	// cond:
  3968  	// result: (MOVVreg x)
  3969  	for {
  3970  		x := v.Args[0]
  3971  		if x.Op != OpMIPS64MOVBreg {
  3972  			break
  3973  		}
  3974  		v.reset(OpMIPS64MOVVreg)
  3975  		v.AddArg(x)
  3976  		return true
  3977  	}
  3978  	// match: (MOVBreg (MOVVconst [c]))
  3979  	// cond:
  3980  	// result: (MOVVconst [int64(int8(c))])
  3981  	for {
  3982  		v_0 := v.Args[0]
  3983  		if v_0.Op != OpMIPS64MOVVconst {
  3984  			break
  3985  		}
  3986  		c := v_0.AuxInt
  3987  		v.reset(OpMIPS64MOVVconst)
  3988  		v.AuxInt = int64(int8(c))
  3989  		return true
  3990  	}
  3991  	return false
  3992  }
  3993  func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool {
  3994  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3995  	// cond: is32Bit(off1+off2)
  3996  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3997  	for {
  3998  		off1 := v.AuxInt
  3999  		sym := v.Aux
  4000  		_ = v.Args[2]
  4001  		v_0 := v.Args[0]
  4002  		if v_0.Op != OpMIPS64ADDVconst {
  4003  			break
  4004  		}
  4005  		off2 := v_0.AuxInt
  4006  		ptr := v_0.Args[0]
  4007  		val := v.Args[1]
  4008  		mem := v.Args[2]
  4009  		if !(is32Bit(off1 + off2)) {
  4010  			break
  4011  		}
  4012  		v.reset(OpMIPS64MOVBstore)
  4013  		v.AuxInt = off1 + off2
  4014  		v.Aux = sym
  4015  		v.AddArg(ptr)
  4016  		v.AddArg(val)
  4017  		v.AddArg(mem)
  4018  		return true
  4019  	}
  4020  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4021  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4022  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4023  	for {
  4024  		off1 := v.AuxInt
  4025  		sym1 := v.Aux
  4026  		_ = v.Args[2]
  4027  		v_0 := v.Args[0]
  4028  		if v_0.Op != OpMIPS64MOVVaddr {
  4029  			break
  4030  		}
  4031  		off2 := v_0.AuxInt
  4032  		sym2 := v_0.Aux
  4033  		ptr := v_0.Args[0]
  4034  		val := v.Args[1]
  4035  		mem := v.Args[2]
  4036  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4037  			break
  4038  		}
  4039  		v.reset(OpMIPS64MOVBstore)
  4040  		v.AuxInt = off1 + off2
  4041  		v.Aux = mergeSym(sym1, sym2)
  4042  		v.AddArg(ptr)
  4043  		v.AddArg(val)
  4044  		v.AddArg(mem)
  4045  		return true
  4046  	}
  4047  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  4048  	// cond:
  4049  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4050  	for {
  4051  		off := v.AuxInt
  4052  		sym := v.Aux
  4053  		_ = v.Args[2]
  4054  		ptr := v.Args[0]
  4055  		v_1 := v.Args[1]
  4056  		if v_1.Op != OpMIPS64MOVVconst {
  4057  			break
  4058  		}
  4059  		if v_1.AuxInt != 0 {
  4060  			break
  4061  		}
  4062  		mem := v.Args[2]
  4063  		v.reset(OpMIPS64MOVBstorezero)
  4064  		v.AuxInt = off
  4065  		v.Aux = sym
  4066  		v.AddArg(ptr)
  4067  		v.AddArg(mem)
  4068  		return true
  4069  	}
  4070  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4071  	// cond:
  4072  	// result: (MOVBstore [off] {sym} ptr x mem)
  4073  	for {
  4074  		off := v.AuxInt
  4075  		sym := v.Aux
  4076  		_ = v.Args[2]
  4077  		ptr := v.Args[0]
  4078  		v_1 := v.Args[1]
  4079  		if v_1.Op != OpMIPS64MOVBreg {
  4080  			break
  4081  		}
  4082  		x := v_1.Args[0]
  4083  		mem := v.Args[2]
  4084  		v.reset(OpMIPS64MOVBstore)
  4085  		v.AuxInt = off
  4086  		v.Aux = sym
  4087  		v.AddArg(ptr)
  4088  		v.AddArg(x)
  4089  		v.AddArg(mem)
  4090  		return true
  4091  	}
  4092  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4093  	// cond:
  4094  	// result: (MOVBstore [off] {sym} ptr x mem)
  4095  	for {
  4096  		off := v.AuxInt
  4097  		sym := v.Aux
  4098  		_ = v.Args[2]
  4099  		ptr := v.Args[0]
  4100  		v_1 := v.Args[1]
  4101  		if v_1.Op != OpMIPS64MOVBUreg {
  4102  			break
  4103  		}
  4104  		x := v_1.Args[0]
  4105  		mem := v.Args[2]
  4106  		v.reset(OpMIPS64MOVBstore)
  4107  		v.AuxInt = off
  4108  		v.Aux = sym
  4109  		v.AddArg(ptr)
  4110  		v.AddArg(x)
  4111  		v.AddArg(mem)
  4112  		return true
  4113  	}
  4114  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4115  	// cond:
  4116  	// result: (MOVBstore [off] {sym} ptr x mem)
  4117  	for {
  4118  		off := v.AuxInt
  4119  		sym := v.Aux
  4120  		_ = v.Args[2]
  4121  		ptr := v.Args[0]
  4122  		v_1 := v.Args[1]
  4123  		if v_1.Op != OpMIPS64MOVHreg {
  4124  			break
  4125  		}
  4126  		x := v_1.Args[0]
  4127  		mem := v.Args[2]
  4128  		v.reset(OpMIPS64MOVBstore)
  4129  		v.AuxInt = off
  4130  		v.Aux = sym
  4131  		v.AddArg(ptr)
  4132  		v.AddArg(x)
  4133  		v.AddArg(mem)
  4134  		return true
  4135  	}
  4136  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4137  	// cond:
  4138  	// result: (MOVBstore [off] {sym} ptr x mem)
  4139  	for {
  4140  		off := v.AuxInt
  4141  		sym := v.Aux
  4142  		_ = v.Args[2]
  4143  		ptr := v.Args[0]
  4144  		v_1 := v.Args[1]
  4145  		if v_1.Op != OpMIPS64MOVHUreg {
  4146  			break
  4147  		}
  4148  		x := v_1.Args[0]
  4149  		mem := v.Args[2]
  4150  		v.reset(OpMIPS64MOVBstore)
  4151  		v.AuxInt = off
  4152  		v.Aux = sym
  4153  		v.AddArg(ptr)
  4154  		v.AddArg(x)
  4155  		v.AddArg(mem)
  4156  		return true
  4157  	}
  4158  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4159  	// cond:
  4160  	// result: (MOVBstore [off] {sym} ptr x mem)
  4161  	for {
  4162  		off := v.AuxInt
  4163  		sym := v.Aux
  4164  		_ = v.Args[2]
  4165  		ptr := v.Args[0]
  4166  		v_1 := v.Args[1]
  4167  		if v_1.Op != OpMIPS64MOVWreg {
  4168  			break
  4169  		}
  4170  		x := v_1.Args[0]
  4171  		mem := v.Args[2]
  4172  		v.reset(OpMIPS64MOVBstore)
  4173  		v.AuxInt = off
  4174  		v.Aux = sym
  4175  		v.AddArg(ptr)
  4176  		v.AddArg(x)
  4177  		v.AddArg(mem)
  4178  		return true
  4179  	}
  4180  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4181  	// cond:
  4182  	// result: (MOVBstore [off] {sym} ptr x mem)
  4183  	for {
  4184  		off := v.AuxInt
  4185  		sym := v.Aux
  4186  		_ = v.Args[2]
  4187  		ptr := v.Args[0]
  4188  		v_1 := v.Args[1]
  4189  		if v_1.Op != OpMIPS64MOVWUreg {
  4190  			break
  4191  		}
  4192  		x := v_1.Args[0]
  4193  		mem := v.Args[2]
  4194  		v.reset(OpMIPS64MOVBstore)
  4195  		v.AuxInt = off
  4196  		v.Aux = sym
  4197  		v.AddArg(ptr)
  4198  		v.AddArg(x)
  4199  		v.AddArg(mem)
  4200  		return true
  4201  	}
  4202  	return false
  4203  }
  4204  func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool {
  4205  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4206  	// cond: is32Bit(off1+off2)
  4207  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  4208  	for {
  4209  		off1 := v.AuxInt
  4210  		sym := v.Aux
  4211  		_ = v.Args[1]
  4212  		v_0 := v.Args[0]
  4213  		if v_0.Op != OpMIPS64ADDVconst {
  4214  			break
  4215  		}
  4216  		off2 := v_0.AuxInt
  4217  		ptr := v_0.Args[0]
  4218  		mem := v.Args[1]
  4219  		if !(is32Bit(off1 + off2)) {
  4220  			break
  4221  		}
  4222  		v.reset(OpMIPS64MOVBstorezero)
  4223  		v.AuxInt = off1 + off2
  4224  		v.Aux = sym
  4225  		v.AddArg(ptr)
  4226  		v.AddArg(mem)
  4227  		return true
  4228  	}
  4229  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4230  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4231  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4232  	for {
  4233  		off1 := v.AuxInt
  4234  		sym1 := v.Aux
  4235  		_ = v.Args[1]
  4236  		v_0 := v.Args[0]
  4237  		if v_0.Op != OpMIPS64MOVVaddr {
  4238  			break
  4239  		}
  4240  		off2 := v_0.AuxInt
  4241  		sym2 := v_0.Aux
  4242  		ptr := v_0.Args[0]
  4243  		mem := v.Args[1]
  4244  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4245  			break
  4246  		}
  4247  		v.reset(OpMIPS64MOVBstorezero)
  4248  		v.AuxInt = off1 + off2
  4249  		v.Aux = mergeSym(sym1, sym2)
  4250  		v.AddArg(ptr)
  4251  		v.AddArg(mem)
  4252  		return true
  4253  	}
  4254  	return false
  4255  }
  4256  func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool {
  4257  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4258  	// cond: is32Bit(off1+off2)
  4259  	// result: (MOVDload [off1+off2] {sym} ptr mem)
  4260  	for {
  4261  		off1 := v.AuxInt
  4262  		sym := v.Aux
  4263  		_ = v.Args[1]
  4264  		v_0 := v.Args[0]
  4265  		if v_0.Op != OpMIPS64ADDVconst {
  4266  			break
  4267  		}
  4268  		off2 := v_0.AuxInt
  4269  		ptr := v_0.Args[0]
  4270  		mem := v.Args[1]
  4271  		if !(is32Bit(off1 + off2)) {
  4272  			break
  4273  		}
  4274  		v.reset(OpMIPS64MOVDload)
  4275  		v.AuxInt = off1 + off2
  4276  		v.Aux = sym
  4277  		v.AddArg(ptr)
  4278  		v.AddArg(mem)
  4279  		return true
  4280  	}
  4281  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4282  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4283  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4284  	for {
  4285  		off1 := v.AuxInt
  4286  		sym1 := v.Aux
  4287  		_ = v.Args[1]
  4288  		v_0 := v.Args[0]
  4289  		if v_0.Op != OpMIPS64MOVVaddr {
  4290  			break
  4291  		}
  4292  		off2 := v_0.AuxInt
  4293  		sym2 := v_0.Aux
  4294  		ptr := v_0.Args[0]
  4295  		mem := v.Args[1]
  4296  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4297  			break
  4298  		}
  4299  		v.reset(OpMIPS64MOVDload)
  4300  		v.AuxInt = off1 + off2
  4301  		v.Aux = mergeSym(sym1, sym2)
  4302  		v.AddArg(ptr)
  4303  		v.AddArg(mem)
  4304  		return true
  4305  	}
  4306  	return false
  4307  }
  4308  func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool {
  4309  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4310  	// cond: is32Bit(off1+off2)
  4311  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4312  	for {
  4313  		off1 := v.AuxInt
  4314  		sym := v.Aux
  4315  		_ = v.Args[2]
  4316  		v_0 := v.Args[0]
  4317  		if v_0.Op != OpMIPS64ADDVconst {
  4318  			break
  4319  		}
  4320  		off2 := v_0.AuxInt
  4321  		ptr := v_0.Args[0]
  4322  		val := v.Args[1]
  4323  		mem := v.Args[2]
  4324  		if !(is32Bit(off1 + off2)) {
  4325  			break
  4326  		}
  4327  		v.reset(OpMIPS64MOVDstore)
  4328  		v.AuxInt = off1 + off2
  4329  		v.Aux = sym
  4330  		v.AddArg(ptr)
  4331  		v.AddArg(val)
  4332  		v.AddArg(mem)
  4333  		return true
  4334  	}
  4335  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4336  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4337  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4338  	for {
  4339  		off1 := v.AuxInt
  4340  		sym1 := v.Aux
  4341  		_ = v.Args[2]
  4342  		v_0 := v.Args[0]
  4343  		if v_0.Op != OpMIPS64MOVVaddr {
  4344  			break
  4345  		}
  4346  		off2 := v_0.AuxInt
  4347  		sym2 := v_0.Aux
  4348  		ptr := v_0.Args[0]
  4349  		val := v.Args[1]
  4350  		mem := v.Args[2]
  4351  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4352  			break
  4353  		}
  4354  		v.reset(OpMIPS64MOVDstore)
  4355  		v.AuxInt = off1 + off2
  4356  		v.Aux = mergeSym(sym1, sym2)
  4357  		v.AddArg(ptr)
  4358  		v.AddArg(val)
  4359  		v.AddArg(mem)
  4360  		return true
  4361  	}
  4362  	return false
  4363  }
  4364  func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool {
  4365  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4366  	// cond: is32Bit(off1+off2)
  4367  	// result: (MOVFload [off1+off2] {sym} ptr mem)
  4368  	for {
  4369  		off1 := v.AuxInt
  4370  		sym := v.Aux
  4371  		_ = v.Args[1]
  4372  		v_0 := v.Args[0]
  4373  		if v_0.Op != OpMIPS64ADDVconst {
  4374  			break
  4375  		}
  4376  		off2 := v_0.AuxInt
  4377  		ptr := v_0.Args[0]
  4378  		mem := v.Args[1]
  4379  		if !(is32Bit(off1 + off2)) {
  4380  			break
  4381  		}
  4382  		v.reset(OpMIPS64MOVFload)
  4383  		v.AuxInt = off1 + off2
  4384  		v.Aux = sym
  4385  		v.AddArg(ptr)
  4386  		v.AddArg(mem)
  4387  		return true
  4388  	}
  4389  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4390  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4391  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4392  	for {
  4393  		off1 := v.AuxInt
  4394  		sym1 := v.Aux
  4395  		_ = v.Args[1]
  4396  		v_0 := v.Args[0]
  4397  		if v_0.Op != OpMIPS64MOVVaddr {
  4398  			break
  4399  		}
  4400  		off2 := v_0.AuxInt
  4401  		sym2 := v_0.Aux
  4402  		ptr := v_0.Args[0]
  4403  		mem := v.Args[1]
  4404  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4405  			break
  4406  		}
  4407  		v.reset(OpMIPS64MOVFload)
  4408  		v.AuxInt = off1 + off2
  4409  		v.Aux = mergeSym(sym1, sym2)
  4410  		v.AddArg(ptr)
  4411  		v.AddArg(mem)
  4412  		return true
  4413  	}
  4414  	return false
  4415  }
  4416  func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool {
  4417  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4418  	// cond: is32Bit(off1+off2)
  4419  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4420  	for {
  4421  		off1 := v.AuxInt
  4422  		sym := v.Aux
  4423  		_ = v.Args[2]
  4424  		v_0 := v.Args[0]
  4425  		if v_0.Op != OpMIPS64ADDVconst {
  4426  			break
  4427  		}
  4428  		off2 := v_0.AuxInt
  4429  		ptr := v_0.Args[0]
  4430  		val := v.Args[1]
  4431  		mem := v.Args[2]
  4432  		if !(is32Bit(off1 + off2)) {
  4433  			break
  4434  		}
  4435  		v.reset(OpMIPS64MOVFstore)
  4436  		v.AuxInt = off1 + off2
  4437  		v.Aux = sym
  4438  		v.AddArg(ptr)
  4439  		v.AddArg(val)
  4440  		v.AddArg(mem)
  4441  		return true
  4442  	}
  4443  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4444  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4445  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4446  	for {
  4447  		off1 := v.AuxInt
  4448  		sym1 := v.Aux
  4449  		_ = v.Args[2]
  4450  		v_0 := v.Args[0]
  4451  		if v_0.Op != OpMIPS64MOVVaddr {
  4452  			break
  4453  		}
  4454  		off2 := v_0.AuxInt
  4455  		sym2 := v_0.Aux
  4456  		ptr := v_0.Args[0]
  4457  		val := v.Args[1]
  4458  		mem := v.Args[2]
  4459  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4460  			break
  4461  		}
  4462  		v.reset(OpMIPS64MOVFstore)
  4463  		v.AuxInt = off1 + off2
  4464  		v.Aux = mergeSym(sym1, sym2)
  4465  		v.AddArg(ptr)
  4466  		v.AddArg(val)
  4467  		v.AddArg(mem)
  4468  		return true
  4469  	}
  4470  	return false
  4471  }
  4472  func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool {
  4473  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4474  	// cond: is32Bit(off1+off2)
  4475  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4476  	for {
  4477  		off1 := v.AuxInt
  4478  		sym := v.Aux
  4479  		_ = v.Args[1]
  4480  		v_0 := v.Args[0]
  4481  		if v_0.Op != OpMIPS64ADDVconst {
  4482  			break
  4483  		}
  4484  		off2 := v_0.AuxInt
  4485  		ptr := v_0.Args[0]
  4486  		mem := v.Args[1]
  4487  		if !(is32Bit(off1 + off2)) {
  4488  			break
  4489  		}
  4490  		v.reset(OpMIPS64MOVHUload)
  4491  		v.AuxInt = off1 + off2
  4492  		v.Aux = sym
  4493  		v.AddArg(ptr)
  4494  		v.AddArg(mem)
  4495  		return true
  4496  	}
  4497  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4498  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4499  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4500  	for {
  4501  		off1 := v.AuxInt
  4502  		sym1 := v.Aux
  4503  		_ = v.Args[1]
  4504  		v_0 := v.Args[0]
  4505  		if v_0.Op != OpMIPS64MOVVaddr {
  4506  			break
  4507  		}
  4508  		off2 := v_0.AuxInt
  4509  		sym2 := v_0.Aux
  4510  		ptr := v_0.Args[0]
  4511  		mem := v.Args[1]
  4512  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4513  			break
  4514  		}
  4515  		v.reset(OpMIPS64MOVHUload)
  4516  		v.AuxInt = off1 + off2
  4517  		v.Aux = mergeSym(sym1, sym2)
  4518  		v.AddArg(ptr)
  4519  		v.AddArg(mem)
  4520  		return true
  4521  	}
  4522  	return false
  4523  }
  4524  func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool {
  4525  	// match: (MOVHUreg x:(MOVBUload _ _))
  4526  	// cond:
  4527  	// result: (MOVVreg x)
  4528  	for {
  4529  		x := v.Args[0]
  4530  		if x.Op != OpMIPS64MOVBUload {
  4531  			break
  4532  		}
  4533  		_ = x.Args[1]
  4534  		v.reset(OpMIPS64MOVVreg)
  4535  		v.AddArg(x)
  4536  		return true
  4537  	}
  4538  	// match: (MOVHUreg x:(MOVHUload _ _))
  4539  	// cond:
  4540  	// result: (MOVVreg x)
  4541  	for {
  4542  		x := v.Args[0]
  4543  		if x.Op != OpMIPS64MOVHUload {
  4544  			break
  4545  		}
  4546  		_ = x.Args[1]
  4547  		v.reset(OpMIPS64MOVVreg)
  4548  		v.AddArg(x)
  4549  		return true
  4550  	}
  4551  	// match: (MOVHUreg x:(MOVBUreg _))
  4552  	// cond:
  4553  	// result: (MOVVreg x)
  4554  	for {
  4555  		x := v.Args[0]
  4556  		if x.Op != OpMIPS64MOVBUreg {
  4557  			break
  4558  		}
  4559  		v.reset(OpMIPS64MOVVreg)
  4560  		v.AddArg(x)
  4561  		return true
  4562  	}
  4563  	// match: (MOVHUreg x:(MOVHUreg _))
  4564  	// cond:
  4565  	// result: (MOVVreg x)
  4566  	for {
  4567  		x := v.Args[0]
  4568  		if x.Op != OpMIPS64MOVHUreg {
  4569  			break
  4570  		}
  4571  		v.reset(OpMIPS64MOVVreg)
  4572  		v.AddArg(x)
  4573  		return true
  4574  	}
  4575  	// match: (MOVHUreg (MOVVconst [c]))
  4576  	// cond:
  4577  	// result: (MOVVconst [int64(uint16(c))])
  4578  	for {
  4579  		v_0 := v.Args[0]
  4580  		if v_0.Op != OpMIPS64MOVVconst {
  4581  			break
  4582  		}
  4583  		c := v_0.AuxInt
  4584  		v.reset(OpMIPS64MOVVconst)
  4585  		v.AuxInt = int64(uint16(c))
  4586  		return true
  4587  	}
  4588  	return false
  4589  }
  4590  func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool {
  4591  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4592  	// cond: is32Bit(off1+off2)
  4593  	// result: (MOVHload [off1+off2] {sym} ptr mem)
  4594  	for {
  4595  		off1 := v.AuxInt
  4596  		sym := v.Aux
  4597  		_ = v.Args[1]
  4598  		v_0 := v.Args[0]
  4599  		if v_0.Op != OpMIPS64ADDVconst {
  4600  			break
  4601  		}
  4602  		off2 := v_0.AuxInt
  4603  		ptr := v_0.Args[0]
  4604  		mem := v.Args[1]
  4605  		if !(is32Bit(off1 + off2)) {
  4606  			break
  4607  		}
  4608  		v.reset(OpMIPS64MOVHload)
  4609  		v.AuxInt = off1 + off2
  4610  		v.Aux = sym
  4611  		v.AddArg(ptr)
  4612  		v.AddArg(mem)
  4613  		return true
  4614  	}
  4615  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4616  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4617  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4618  	for {
  4619  		off1 := v.AuxInt
  4620  		sym1 := v.Aux
  4621  		_ = v.Args[1]
  4622  		v_0 := v.Args[0]
  4623  		if v_0.Op != OpMIPS64MOVVaddr {
  4624  			break
  4625  		}
  4626  		off2 := v_0.AuxInt
  4627  		sym2 := v_0.Aux
  4628  		ptr := v_0.Args[0]
  4629  		mem := v.Args[1]
  4630  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4631  			break
  4632  		}
  4633  		v.reset(OpMIPS64MOVHload)
  4634  		v.AuxInt = off1 + off2
  4635  		v.Aux = mergeSym(sym1, sym2)
  4636  		v.AddArg(ptr)
  4637  		v.AddArg(mem)
  4638  		return true
  4639  	}
  4640  	return false
  4641  }
  4642  func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool {
  4643  	// match: (MOVHreg x:(MOVBload _ _))
  4644  	// cond:
  4645  	// result: (MOVVreg x)
  4646  	for {
  4647  		x := v.Args[0]
  4648  		if x.Op != OpMIPS64MOVBload {
  4649  			break
  4650  		}
  4651  		_ = x.Args[1]
  4652  		v.reset(OpMIPS64MOVVreg)
  4653  		v.AddArg(x)
  4654  		return true
  4655  	}
  4656  	// match: (MOVHreg x:(MOVBUload _ _))
  4657  	// cond:
  4658  	// result: (MOVVreg x)
  4659  	for {
  4660  		x := v.Args[0]
  4661  		if x.Op != OpMIPS64MOVBUload {
  4662  			break
  4663  		}
  4664  		_ = x.Args[1]
  4665  		v.reset(OpMIPS64MOVVreg)
  4666  		v.AddArg(x)
  4667  		return true
  4668  	}
  4669  	// match: (MOVHreg x:(MOVHload _ _))
  4670  	// cond:
  4671  	// result: (MOVVreg x)
  4672  	for {
  4673  		x := v.Args[0]
  4674  		if x.Op != OpMIPS64MOVHload {
  4675  			break
  4676  		}
  4677  		_ = x.Args[1]
  4678  		v.reset(OpMIPS64MOVVreg)
  4679  		v.AddArg(x)
  4680  		return true
  4681  	}
  4682  	// match: (MOVHreg x:(MOVBreg _))
  4683  	// cond:
  4684  	// result: (MOVVreg x)
  4685  	for {
  4686  		x := v.Args[0]
  4687  		if x.Op != OpMIPS64MOVBreg {
  4688  			break
  4689  		}
  4690  		v.reset(OpMIPS64MOVVreg)
  4691  		v.AddArg(x)
  4692  		return true
  4693  	}
  4694  	// match: (MOVHreg x:(MOVBUreg _))
  4695  	// cond:
  4696  	// result: (MOVVreg x)
  4697  	for {
  4698  		x := v.Args[0]
  4699  		if x.Op != OpMIPS64MOVBUreg {
  4700  			break
  4701  		}
  4702  		v.reset(OpMIPS64MOVVreg)
  4703  		v.AddArg(x)
  4704  		return true
  4705  	}
  4706  	// match: (MOVHreg x:(MOVHreg _))
  4707  	// cond:
  4708  	// result: (MOVVreg x)
  4709  	for {
  4710  		x := v.Args[0]
  4711  		if x.Op != OpMIPS64MOVHreg {
  4712  			break
  4713  		}
  4714  		v.reset(OpMIPS64MOVVreg)
  4715  		v.AddArg(x)
  4716  		return true
  4717  	}
  4718  	// match: (MOVHreg (MOVVconst [c]))
  4719  	// cond:
  4720  	// result: (MOVVconst [int64(int16(c))])
  4721  	for {
  4722  		v_0 := v.Args[0]
  4723  		if v_0.Op != OpMIPS64MOVVconst {
  4724  			break
  4725  		}
  4726  		c := v_0.AuxInt
  4727  		v.reset(OpMIPS64MOVVconst)
  4728  		v.AuxInt = int64(int16(c))
  4729  		return true
  4730  	}
  4731  	return false
  4732  }
  4733  func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool {
  4734  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4735  	// cond: is32Bit(off1+off2)
  4736  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4737  	for {
  4738  		off1 := v.AuxInt
  4739  		sym := v.Aux
  4740  		_ = v.Args[2]
  4741  		v_0 := v.Args[0]
  4742  		if v_0.Op != OpMIPS64ADDVconst {
  4743  			break
  4744  		}
  4745  		off2 := v_0.AuxInt
  4746  		ptr := v_0.Args[0]
  4747  		val := v.Args[1]
  4748  		mem := v.Args[2]
  4749  		if !(is32Bit(off1 + off2)) {
  4750  			break
  4751  		}
  4752  		v.reset(OpMIPS64MOVHstore)
  4753  		v.AuxInt = off1 + off2
  4754  		v.Aux = sym
  4755  		v.AddArg(ptr)
  4756  		v.AddArg(val)
  4757  		v.AddArg(mem)
  4758  		return true
  4759  	}
  4760  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4761  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4762  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4763  	for {
  4764  		off1 := v.AuxInt
  4765  		sym1 := v.Aux
  4766  		_ = v.Args[2]
  4767  		v_0 := v.Args[0]
  4768  		if v_0.Op != OpMIPS64MOVVaddr {
  4769  			break
  4770  		}
  4771  		off2 := v_0.AuxInt
  4772  		sym2 := v_0.Aux
  4773  		ptr := v_0.Args[0]
  4774  		val := v.Args[1]
  4775  		mem := v.Args[2]
  4776  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4777  			break
  4778  		}
  4779  		v.reset(OpMIPS64MOVHstore)
  4780  		v.AuxInt = off1 + off2
  4781  		v.Aux = mergeSym(sym1, sym2)
  4782  		v.AddArg(ptr)
  4783  		v.AddArg(val)
  4784  		v.AddArg(mem)
  4785  		return true
  4786  	}
  4787  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4788  	// cond:
  4789  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4790  	for {
  4791  		off := v.AuxInt
  4792  		sym := v.Aux
  4793  		_ = v.Args[2]
  4794  		ptr := v.Args[0]
  4795  		v_1 := v.Args[1]
  4796  		if v_1.Op != OpMIPS64MOVVconst {
  4797  			break
  4798  		}
  4799  		if v_1.AuxInt != 0 {
  4800  			break
  4801  		}
  4802  		mem := v.Args[2]
  4803  		v.reset(OpMIPS64MOVHstorezero)
  4804  		v.AuxInt = off
  4805  		v.Aux = sym
  4806  		v.AddArg(ptr)
  4807  		v.AddArg(mem)
  4808  		return true
  4809  	}
  4810  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4811  	// cond:
  4812  	// result: (MOVHstore [off] {sym} ptr x mem)
  4813  	for {
  4814  		off := v.AuxInt
  4815  		sym := v.Aux
  4816  		_ = v.Args[2]
  4817  		ptr := v.Args[0]
  4818  		v_1 := v.Args[1]
  4819  		if v_1.Op != OpMIPS64MOVHreg {
  4820  			break
  4821  		}
  4822  		x := v_1.Args[0]
  4823  		mem := v.Args[2]
  4824  		v.reset(OpMIPS64MOVHstore)
  4825  		v.AuxInt = off
  4826  		v.Aux = sym
  4827  		v.AddArg(ptr)
  4828  		v.AddArg(x)
  4829  		v.AddArg(mem)
  4830  		return true
  4831  	}
  4832  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4833  	// cond:
  4834  	// result: (MOVHstore [off] {sym} ptr x mem)
  4835  	for {
  4836  		off := v.AuxInt
  4837  		sym := v.Aux
  4838  		_ = v.Args[2]
  4839  		ptr := v.Args[0]
  4840  		v_1 := v.Args[1]
  4841  		if v_1.Op != OpMIPS64MOVHUreg {
  4842  			break
  4843  		}
  4844  		x := v_1.Args[0]
  4845  		mem := v.Args[2]
  4846  		v.reset(OpMIPS64MOVHstore)
  4847  		v.AuxInt = off
  4848  		v.Aux = sym
  4849  		v.AddArg(ptr)
  4850  		v.AddArg(x)
  4851  		v.AddArg(mem)
  4852  		return true
  4853  	}
  4854  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4855  	// cond:
  4856  	// result: (MOVHstore [off] {sym} ptr x mem)
  4857  	for {
  4858  		off := v.AuxInt
  4859  		sym := v.Aux
  4860  		_ = v.Args[2]
  4861  		ptr := v.Args[0]
  4862  		v_1 := v.Args[1]
  4863  		if v_1.Op != OpMIPS64MOVWreg {
  4864  			break
  4865  		}
  4866  		x := v_1.Args[0]
  4867  		mem := v.Args[2]
  4868  		v.reset(OpMIPS64MOVHstore)
  4869  		v.AuxInt = off
  4870  		v.Aux = sym
  4871  		v.AddArg(ptr)
  4872  		v.AddArg(x)
  4873  		v.AddArg(mem)
  4874  		return true
  4875  	}
  4876  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4877  	// cond:
  4878  	// result: (MOVHstore [off] {sym} ptr x mem)
  4879  	for {
  4880  		off := v.AuxInt
  4881  		sym := v.Aux
  4882  		_ = v.Args[2]
  4883  		ptr := v.Args[0]
  4884  		v_1 := v.Args[1]
  4885  		if v_1.Op != OpMIPS64MOVWUreg {
  4886  			break
  4887  		}
  4888  		x := v_1.Args[0]
  4889  		mem := v.Args[2]
  4890  		v.reset(OpMIPS64MOVHstore)
  4891  		v.AuxInt = off
  4892  		v.Aux = sym
  4893  		v.AddArg(ptr)
  4894  		v.AddArg(x)
  4895  		v.AddArg(mem)
  4896  		return true
  4897  	}
  4898  	return false
  4899  }
  4900  func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool {
  4901  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4902  	// cond: is32Bit(off1+off2)
  4903  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4904  	for {
  4905  		off1 := v.AuxInt
  4906  		sym := v.Aux
  4907  		_ = v.Args[1]
  4908  		v_0 := v.Args[0]
  4909  		if v_0.Op != OpMIPS64ADDVconst {
  4910  			break
  4911  		}
  4912  		off2 := v_0.AuxInt
  4913  		ptr := v_0.Args[0]
  4914  		mem := v.Args[1]
  4915  		if !(is32Bit(off1 + off2)) {
  4916  			break
  4917  		}
  4918  		v.reset(OpMIPS64MOVHstorezero)
  4919  		v.AuxInt = off1 + off2
  4920  		v.Aux = sym
  4921  		v.AddArg(ptr)
  4922  		v.AddArg(mem)
  4923  		return true
  4924  	}
  4925  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4926  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4927  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4928  	for {
  4929  		off1 := v.AuxInt
  4930  		sym1 := v.Aux
  4931  		_ = v.Args[1]
  4932  		v_0 := v.Args[0]
  4933  		if v_0.Op != OpMIPS64MOVVaddr {
  4934  			break
  4935  		}
  4936  		off2 := v_0.AuxInt
  4937  		sym2 := v_0.Aux
  4938  		ptr := v_0.Args[0]
  4939  		mem := v.Args[1]
  4940  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4941  			break
  4942  		}
  4943  		v.reset(OpMIPS64MOVHstorezero)
  4944  		v.AuxInt = off1 + off2
  4945  		v.Aux = mergeSym(sym1, sym2)
  4946  		v.AddArg(ptr)
  4947  		v.AddArg(mem)
  4948  		return true
  4949  	}
  4950  	return false
  4951  }
  4952  func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool {
  4953  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4954  	// cond: is32Bit(off1+off2)
  4955  	// result: (MOVVload [off1+off2] {sym} ptr mem)
  4956  	for {
  4957  		off1 := v.AuxInt
  4958  		sym := v.Aux
  4959  		_ = v.Args[1]
  4960  		v_0 := v.Args[0]
  4961  		if v_0.Op != OpMIPS64ADDVconst {
  4962  			break
  4963  		}
  4964  		off2 := v_0.AuxInt
  4965  		ptr := v_0.Args[0]
  4966  		mem := v.Args[1]
  4967  		if !(is32Bit(off1 + off2)) {
  4968  			break
  4969  		}
  4970  		v.reset(OpMIPS64MOVVload)
  4971  		v.AuxInt = off1 + off2
  4972  		v.Aux = sym
  4973  		v.AddArg(ptr)
  4974  		v.AddArg(mem)
  4975  		return true
  4976  	}
  4977  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4978  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4979  	// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4980  	for {
  4981  		off1 := v.AuxInt
  4982  		sym1 := v.Aux
  4983  		_ = v.Args[1]
  4984  		v_0 := v.Args[0]
  4985  		if v_0.Op != OpMIPS64MOVVaddr {
  4986  			break
  4987  		}
  4988  		off2 := v_0.AuxInt
  4989  		sym2 := v_0.Aux
  4990  		ptr := v_0.Args[0]
  4991  		mem := v.Args[1]
  4992  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4993  			break
  4994  		}
  4995  		v.reset(OpMIPS64MOVVload)
  4996  		v.AuxInt = off1 + off2
  4997  		v.Aux = mergeSym(sym1, sym2)
  4998  		v.AddArg(ptr)
  4999  		v.AddArg(mem)
  5000  		return true
  5001  	}
  5002  	return false
  5003  }
  5004  func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool {
  5005  	// match: (MOVVreg x)
  5006  	// cond: x.Uses == 1
  5007  	// result: (MOVVnop x)
  5008  	for {
  5009  		x := v.Args[0]
  5010  		if !(x.Uses == 1) {
  5011  			break
  5012  		}
  5013  		v.reset(OpMIPS64MOVVnop)
  5014  		v.AddArg(x)
  5015  		return true
  5016  	}
  5017  	// match: (MOVVreg (MOVVconst [c]))
  5018  	// cond:
  5019  	// result: (MOVVconst [c])
  5020  	for {
  5021  		v_0 := v.Args[0]
  5022  		if v_0.Op != OpMIPS64MOVVconst {
  5023  			break
  5024  		}
  5025  		c := v_0.AuxInt
  5026  		v.reset(OpMIPS64MOVVconst)
  5027  		v.AuxInt = c
  5028  		return true
  5029  	}
  5030  	return false
  5031  }
  5032  func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool {
  5033  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5034  	// cond: is32Bit(off1+off2)
  5035  	// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  5036  	for {
  5037  		off1 := v.AuxInt
  5038  		sym := v.Aux
  5039  		_ = v.Args[2]
  5040  		v_0 := v.Args[0]
  5041  		if v_0.Op != OpMIPS64ADDVconst {
  5042  			break
  5043  		}
  5044  		off2 := v_0.AuxInt
  5045  		ptr := v_0.Args[0]
  5046  		val := v.Args[1]
  5047  		mem := v.Args[2]
  5048  		if !(is32Bit(off1 + off2)) {
  5049  			break
  5050  		}
  5051  		v.reset(OpMIPS64MOVVstore)
  5052  		v.AuxInt = off1 + off2
  5053  		v.Aux = sym
  5054  		v.AddArg(ptr)
  5055  		v.AddArg(val)
  5056  		v.AddArg(mem)
  5057  		return true
  5058  	}
  5059  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5060  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5061  	// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5062  	for {
  5063  		off1 := v.AuxInt
  5064  		sym1 := v.Aux
  5065  		_ = v.Args[2]
  5066  		v_0 := v.Args[0]
  5067  		if v_0.Op != OpMIPS64MOVVaddr {
  5068  			break
  5069  		}
  5070  		off2 := v_0.AuxInt
  5071  		sym2 := v_0.Aux
  5072  		ptr := v_0.Args[0]
  5073  		val := v.Args[1]
  5074  		mem := v.Args[2]
  5075  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5076  			break
  5077  		}
  5078  		v.reset(OpMIPS64MOVVstore)
  5079  		v.AuxInt = off1 + off2
  5080  		v.Aux = mergeSym(sym1, sym2)
  5081  		v.AddArg(ptr)
  5082  		v.AddArg(val)
  5083  		v.AddArg(mem)
  5084  		return true
  5085  	}
  5086  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  5087  	// cond:
  5088  	// result: (MOVVstorezero [off] {sym} ptr mem)
  5089  	for {
  5090  		off := v.AuxInt
  5091  		sym := v.Aux
  5092  		_ = v.Args[2]
  5093  		ptr := v.Args[0]
  5094  		v_1 := v.Args[1]
  5095  		if v_1.Op != OpMIPS64MOVVconst {
  5096  			break
  5097  		}
  5098  		if v_1.AuxInt != 0 {
  5099  			break
  5100  		}
  5101  		mem := v.Args[2]
  5102  		v.reset(OpMIPS64MOVVstorezero)
  5103  		v.AuxInt = off
  5104  		v.Aux = sym
  5105  		v.AddArg(ptr)
  5106  		v.AddArg(mem)
  5107  		return true
  5108  	}
  5109  	return false
  5110  }
  5111  func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool {
  5112  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5113  	// cond: is32Bit(off1+off2)
  5114  	// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  5115  	for {
  5116  		off1 := v.AuxInt
  5117  		sym := v.Aux
  5118  		_ = v.Args[1]
  5119  		v_0 := v.Args[0]
  5120  		if v_0.Op != OpMIPS64ADDVconst {
  5121  			break
  5122  		}
  5123  		off2 := v_0.AuxInt
  5124  		ptr := v_0.Args[0]
  5125  		mem := v.Args[1]
  5126  		if !(is32Bit(off1 + off2)) {
  5127  			break
  5128  		}
  5129  		v.reset(OpMIPS64MOVVstorezero)
  5130  		v.AuxInt = off1 + off2
  5131  		v.Aux = sym
  5132  		v.AddArg(ptr)
  5133  		v.AddArg(mem)
  5134  		return true
  5135  	}
  5136  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5137  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5138  	// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5139  	for {
  5140  		off1 := v.AuxInt
  5141  		sym1 := v.Aux
  5142  		_ = v.Args[1]
  5143  		v_0 := v.Args[0]
  5144  		if v_0.Op != OpMIPS64MOVVaddr {
  5145  			break
  5146  		}
  5147  		off2 := v_0.AuxInt
  5148  		sym2 := v_0.Aux
  5149  		ptr := v_0.Args[0]
  5150  		mem := v.Args[1]
  5151  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5152  			break
  5153  		}
  5154  		v.reset(OpMIPS64MOVVstorezero)
  5155  		v.AuxInt = off1 + off2
  5156  		v.Aux = mergeSym(sym1, sym2)
  5157  		v.AddArg(ptr)
  5158  		v.AddArg(mem)
  5159  		return true
  5160  	}
  5161  	return false
  5162  }
  5163  func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool {
  5164  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5165  	// cond: is32Bit(off1+off2)
  5166  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  5167  	for {
  5168  		off1 := v.AuxInt
  5169  		sym := v.Aux
  5170  		_ = v.Args[1]
  5171  		v_0 := v.Args[0]
  5172  		if v_0.Op != OpMIPS64ADDVconst {
  5173  			break
  5174  		}
  5175  		off2 := v_0.AuxInt
  5176  		ptr := v_0.Args[0]
  5177  		mem := v.Args[1]
  5178  		if !(is32Bit(off1 + off2)) {
  5179  			break
  5180  		}
  5181  		v.reset(OpMIPS64MOVWUload)
  5182  		v.AuxInt = off1 + off2
  5183  		v.Aux = sym
  5184  		v.AddArg(ptr)
  5185  		v.AddArg(mem)
  5186  		return true
  5187  	}
  5188  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5189  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5190  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5191  	for {
  5192  		off1 := v.AuxInt
  5193  		sym1 := v.Aux
  5194  		_ = v.Args[1]
  5195  		v_0 := v.Args[0]
  5196  		if v_0.Op != OpMIPS64MOVVaddr {
  5197  			break
  5198  		}
  5199  		off2 := v_0.AuxInt
  5200  		sym2 := v_0.Aux
  5201  		ptr := v_0.Args[0]
  5202  		mem := v.Args[1]
  5203  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5204  			break
  5205  		}
  5206  		v.reset(OpMIPS64MOVWUload)
  5207  		v.AuxInt = off1 + off2
  5208  		v.Aux = mergeSym(sym1, sym2)
  5209  		v.AddArg(ptr)
  5210  		v.AddArg(mem)
  5211  		return true
  5212  	}
  5213  	return false
  5214  }
  5215  func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool {
  5216  	// match: (MOVWUreg x:(MOVBUload _ _))
  5217  	// cond:
  5218  	// result: (MOVVreg x)
  5219  	for {
  5220  		x := v.Args[0]
  5221  		if x.Op != OpMIPS64MOVBUload {
  5222  			break
  5223  		}
  5224  		_ = x.Args[1]
  5225  		v.reset(OpMIPS64MOVVreg)
  5226  		v.AddArg(x)
  5227  		return true
  5228  	}
  5229  	// match: (MOVWUreg x:(MOVHUload _ _))
  5230  	// cond:
  5231  	// result: (MOVVreg x)
  5232  	for {
  5233  		x := v.Args[0]
  5234  		if x.Op != OpMIPS64MOVHUload {
  5235  			break
  5236  		}
  5237  		_ = x.Args[1]
  5238  		v.reset(OpMIPS64MOVVreg)
  5239  		v.AddArg(x)
  5240  		return true
  5241  	}
  5242  	// match: (MOVWUreg x:(MOVWUload _ _))
  5243  	// cond:
  5244  	// result: (MOVVreg x)
  5245  	for {
  5246  		x := v.Args[0]
  5247  		if x.Op != OpMIPS64MOVWUload {
  5248  			break
  5249  		}
  5250  		_ = x.Args[1]
  5251  		v.reset(OpMIPS64MOVVreg)
  5252  		v.AddArg(x)
  5253  		return true
  5254  	}
  5255  	// match: (MOVWUreg x:(MOVBUreg _))
  5256  	// cond:
  5257  	// result: (MOVVreg x)
  5258  	for {
  5259  		x := v.Args[0]
  5260  		if x.Op != OpMIPS64MOVBUreg {
  5261  			break
  5262  		}
  5263  		v.reset(OpMIPS64MOVVreg)
  5264  		v.AddArg(x)
  5265  		return true
  5266  	}
  5267  	// match: (MOVWUreg x:(MOVHUreg _))
  5268  	// cond:
  5269  	// result: (MOVVreg x)
  5270  	for {
  5271  		x := v.Args[0]
  5272  		if x.Op != OpMIPS64MOVHUreg {
  5273  			break
  5274  		}
  5275  		v.reset(OpMIPS64MOVVreg)
  5276  		v.AddArg(x)
  5277  		return true
  5278  	}
  5279  	// match: (MOVWUreg x:(MOVWUreg _))
  5280  	// cond:
  5281  	// result: (MOVVreg x)
  5282  	for {
  5283  		x := v.Args[0]
  5284  		if x.Op != OpMIPS64MOVWUreg {
  5285  			break
  5286  		}
  5287  		v.reset(OpMIPS64MOVVreg)
  5288  		v.AddArg(x)
  5289  		return true
  5290  	}
  5291  	// match: (MOVWUreg (MOVVconst [c]))
  5292  	// cond:
  5293  	// result: (MOVVconst [int64(uint32(c))])
  5294  	for {
  5295  		v_0 := v.Args[0]
  5296  		if v_0.Op != OpMIPS64MOVVconst {
  5297  			break
  5298  		}
  5299  		c := v_0.AuxInt
  5300  		v.reset(OpMIPS64MOVVconst)
  5301  		v.AuxInt = int64(uint32(c))
  5302  		return true
  5303  	}
  5304  	return false
  5305  }
  5306  func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool {
  5307  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5308  	// cond: is32Bit(off1+off2)
  5309  	// result: (MOVWload [off1+off2] {sym} ptr mem)
  5310  	for {
  5311  		off1 := v.AuxInt
  5312  		sym := v.Aux
  5313  		_ = v.Args[1]
  5314  		v_0 := v.Args[0]
  5315  		if v_0.Op != OpMIPS64ADDVconst {
  5316  			break
  5317  		}
  5318  		off2 := v_0.AuxInt
  5319  		ptr := v_0.Args[0]
  5320  		mem := v.Args[1]
  5321  		if !(is32Bit(off1 + off2)) {
  5322  			break
  5323  		}
  5324  		v.reset(OpMIPS64MOVWload)
  5325  		v.AuxInt = off1 + off2
  5326  		v.Aux = sym
  5327  		v.AddArg(ptr)
  5328  		v.AddArg(mem)
  5329  		return true
  5330  	}
  5331  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5332  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5333  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5334  	for {
  5335  		off1 := v.AuxInt
  5336  		sym1 := v.Aux
  5337  		_ = v.Args[1]
  5338  		v_0 := v.Args[0]
  5339  		if v_0.Op != OpMIPS64MOVVaddr {
  5340  			break
  5341  		}
  5342  		off2 := v_0.AuxInt
  5343  		sym2 := v_0.Aux
  5344  		ptr := v_0.Args[0]
  5345  		mem := v.Args[1]
  5346  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5347  			break
  5348  		}
  5349  		v.reset(OpMIPS64MOVWload)
  5350  		v.AuxInt = off1 + off2
  5351  		v.Aux = mergeSym(sym1, sym2)
  5352  		v.AddArg(ptr)
  5353  		v.AddArg(mem)
  5354  		return true
  5355  	}
  5356  	return false
  5357  }
  5358  func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool {
  5359  	// match: (MOVWreg x:(MOVBload _ _))
  5360  	// cond:
  5361  	// result: (MOVVreg x)
  5362  	for {
  5363  		x := v.Args[0]
  5364  		if x.Op != OpMIPS64MOVBload {
  5365  			break
  5366  		}
  5367  		_ = x.Args[1]
  5368  		v.reset(OpMIPS64MOVVreg)
  5369  		v.AddArg(x)
  5370  		return true
  5371  	}
  5372  	// match: (MOVWreg x:(MOVBUload _ _))
  5373  	// cond:
  5374  	// result: (MOVVreg x)
  5375  	for {
  5376  		x := v.Args[0]
  5377  		if x.Op != OpMIPS64MOVBUload {
  5378  			break
  5379  		}
  5380  		_ = x.Args[1]
  5381  		v.reset(OpMIPS64MOVVreg)
  5382  		v.AddArg(x)
  5383  		return true
  5384  	}
  5385  	// match: (MOVWreg x:(MOVHload _ _))
  5386  	// cond:
  5387  	// result: (MOVVreg x)
  5388  	for {
  5389  		x := v.Args[0]
  5390  		if x.Op != OpMIPS64MOVHload {
  5391  			break
  5392  		}
  5393  		_ = x.Args[1]
  5394  		v.reset(OpMIPS64MOVVreg)
  5395  		v.AddArg(x)
  5396  		return true
  5397  	}
  5398  	// match: (MOVWreg x:(MOVHUload _ _))
  5399  	// cond:
  5400  	// result: (MOVVreg x)
  5401  	for {
  5402  		x := v.Args[0]
  5403  		if x.Op != OpMIPS64MOVHUload {
  5404  			break
  5405  		}
  5406  		_ = x.Args[1]
  5407  		v.reset(OpMIPS64MOVVreg)
  5408  		v.AddArg(x)
  5409  		return true
  5410  	}
  5411  	// match: (MOVWreg x:(MOVWload _ _))
  5412  	// cond:
  5413  	// result: (MOVVreg x)
  5414  	for {
  5415  		x := v.Args[0]
  5416  		if x.Op != OpMIPS64MOVWload {
  5417  			break
  5418  		}
  5419  		_ = x.Args[1]
  5420  		v.reset(OpMIPS64MOVVreg)
  5421  		v.AddArg(x)
  5422  		return true
  5423  	}
  5424  	// match: (MOVWreg x:(MOVBreg _))
  5425  	// cond:
  5426  	// result: (MOVVreg x)
  5427  	for {
  5428  		x := v.Args[0]
  5429  		if x.Op != OpMIPS64MOVBreg {
  5430  			break
  5431  		}
  5432  		v.reset(OpMIPS64MOVVreg)
  5433  		v.AddArg(x)
  5434  		return true
  5435  	}
  5436  	// match: (MOVWreg x:(MOVBUreg _))
  5437  	// cond:
  5438  	// result: (MOVVreg x)
  5439  	for {
  5440  		x := v.Args[0]
  5441  		if x.Op != OpMIPS64MOVBUreg {
  5442  			break
  5443  		}
  5444  		v.reset(OpMIPS64MOVVreg)
  5445  		v.AddArg(x)
  5446  		return true
  5447  	}
  5448  	// match: (MOVWreg x:(MOVHreg _))
  5449  	// cond:
  5450  	// result: (MOVVreg x)
  5451  	for {
  5452  		x := v.Args[0]
  5453  		if x.Op != OpMIPS64MOVHreg {
  5454  			break
  5455  		}
  5456  		v.reset(OpMIPS64MOVVreg)
  5457  		v.AddArg(x)
  5458  		return true
  5459  	}
  5460  	// match: (MOVWreg x:(MOVHreg _))
  5461  	// cond:
  5462  	// result: (MOVVreg x)
  5463  	for {
  5464  		x := v.Args[0]
  5465  		if x.Op != OpMIPS64MOVHreg {
  5466  			break
  5467  		}
  5468  		v.reset(OpMIPS64MOVVreg)
  5469  		v.AddArg(x)
  5470  		return true
  5471  	}
  5472  	// match: (MOVWreg x:(MOVWreg _))
  5473  	// cond:
  5474  	// result: (MOVVreg x)
  5475  	for {
  5476  		x := v.Args[0]
  5477  		if x.Op != OpMIPS64MOVWreg {
  5478  			break
  5479  		}
  5480  		v.reset(OpMIPS64MOVVreg)
  5481  		v.AddArg(x)
  5482  		return true
  5483  	}
  5484  	return false
  5485  }
  5486  func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool {
  5487  	// match: (MOVWreg (MOVVconst [c]))
  5488  	// cond:
  5489  	// result: (MOVVconst [int64(int32(c))])
  5490  	for {
  5491  		v_0 := v.Args[0]
  5492  		if v_0.Op != OpMIPS64MOVVconst {
  5493  			break
  5494  		}
  5495  		c := v_0.AuxInt
  5496  		v.reset(OpMIPS64MOVVconst)
  5497  		v.AuxInt = int64(int32(c))
  5498  		return true
  5499  	}
  5500  	return false
  5501  }
  5502  func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool {
  5503  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5504  	// cond: is32Bit(off1+off2)
  5505  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5506  	for {
  5507  		off1 := v.AuxInt
  5508  		sym := v.Aux
  5509  		_ = v.Args[2]
  5510  		v_0 := v.Args[0]
  5511  		if v_0.Op != OpMIPS64ADDVconst {
  5512  			break
  5513  		}
  5514  		off2 := v_0.AuxInt
  5515  		ptr := v_0.Args[0]
  5516  		val := v.Args[1]
  5517  		mem := v.Args[2]
  5518  		if !(is32Bit(off1 + off2)) {
  5519  			break
  5520  		}
  5521  		v.reset(OpMIPS64MOVWstore)
  5522  		v.AuxInt = off1 + off2
  5523  		v.Aux = sym
  5524  		v.AddArg(ptr)
  5525  		v.AddArg(val)
  5526  		v.AddArg(mem)
  5527  		return true
  5528  	}
  5529  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5530  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5531  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5532  	for {
  5533  		off1 := v.AuxInt
  5534  		sym1 := v.Aux
  5535  		_ = v.Args[2]
  5536  		v_0 := v.Args[0]
  5537  		if v_0.Op != OpMIPS64MOVVaddr {
  5538  			break
  5539  		}
  5540  		off2 := v_0.AuxInt
  5541  		sym2 := v_0.Aux
  5542  		ptr := v_0.Args[0]
  5543  		val := v.Args[1]
  5544  		mem := v.Args[2]
  5545  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5546  			break
  5547  		}
  5548  		v.reset(OpMIPS64MOVWstore)
  5549  		v.AuxInt = off1 + off2
  5550  		v.Aux = mergeSym(sym1, sym2)
  5551  		v.AddArg(ptr)
  5552  		v.AddArg(val)
  5553  		v.AddArg(mem)
  5554  		return true
  5555  	}
  5556  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5557  	// cond:
  5558  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5559  	for {
  5560  		off := v.AuxInt
  5561  		sym := v.Aux
  5562  		_ = v.Args[2]
  5563  		ptr := v.Args[0]
  5564  		v_1 := v.Args[1]
  5565  		if v_1.Op != OpMIPS64MOVVconst {
  5566  			break
  5567  		}
  5568  		if v_1.AuxInt != 0 {
  5569  			break
  5570  		}
  5571  		mem := v.Args[2]
  5572  		v.reset(OpMIPS64MOVWstorezero)
  5573  		v.AuxInt = off
  5574  		v.Aux = sym
  5575  		v.AddArg(ptr)
  5576  		v.AddArg(mem)
  5577  		return true
  5578  	}
  5579  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5580  	// cond:
  5581  	// result: (MOVWstore [off] {sym} ptr x mem)
  5582  	for {
  5583  		off := v.AuxInt
  5584  		sym := v.Aux
  5585  		_ = v.Args[2]
  5586  		ptr := v.Args[0]
  5587  		v_1 := v.Args[1]
  5588  		if v_1.Op != OpMIPS64MOVWreg {
  5589  			break
  5590  		}
  5591  		x := v_1.Args[0]
  5592  		mem := v.Args[2]
  5593  		v.reset(OpMIPS64MOVWstore)
  5594  		v.AuxInt = off
  5595  		v.Aux = sym
  5596  		v.AddArg(ptr)
  5597  		v.AddArg(x)
  5598  		v.AddArg(mem)
  5599  		return true
  5600  	}
  5601  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5602  	// cond:
  5603  	// result: (MOVWstore [off] {sym} ptr x mem)
  5604  	for {
  5605  		off := v.AuxInt
  5606  		sym := v.Aux
  5607  		_ = v.Args[2]
  5608  		ptr := v.Args[0]
  5609  		v_1 := v.Args[1]
  5610  		if v_1.Op != OpMIPS64MOVWUreg {
  5611  			break
  5612  		}
  5613  		x := v_1.Args[0]
  5614  		mem := v.Args[2]
  5615  		v.reset(OpMIPS64MOVWstore)
  5616  		v.AuxInt = off
  5617  		v.Aux = sym
  5618  		v.AddArg(ptr)
  5619  		v.AddArg(x)
  5620  		v.AddArg(mem)
  5621  		return true
  5622  	}
  5623  	return false
  5624  }
  5625  func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool {
  5626  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5627  	// cond: is32Bit(off1+off2)
  5628  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5629  	for {
  5630  		off1 := v.AuxInt
  5631  		sym := v.Aux
  5632  		_ = v.Args[1]
  5633  		v_0 := v.Args[0]
  5634  		if v_0.Op != OpMIPS64ADDVconst {
  5635  			break
  5636  		}
  5637  		off2 := v_0.AuxInt
  5638  		ptr := v_0.Args[0]
  5639  		mem := v.Args[1]
  5640  		if !(is32Bit(off1 + off2)) {
  5641  			break
  5642  		}
  5643  		v.reset(OpMIPS64MOVWstorezero)
  5644  		v.AuxInt = off1 + off2
  5645  		v.Aux = sym
  5646  		v.AddArg(ptr)
  5647  		v.AddArg(mem)
  5648  		return true
  5649  	}
  5650  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5651  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5652  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5653  	for {
  5654  		off1 := v.AuxInt
  5655  		sym1 := v.Aux
  5656  		_ = v.Args[1]
  5657  		v_0 := v.Args[0]
  5658  		if v_0.Op != OpMIPS64MOVVaddr {
  5659  			break
  5660  		}
  5661  		off2 := v_0.AuxInt
  5662  		sym2 := v_0.Aux
  5663  		ptr := v_0.Args[0]
  5664  		mem := v.Args[1]
  5665  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5666  			break
  5667  		}
  5668  		v.reset(OpMIPS64MOVWstorezero)
  5669  		v.AuxInt = off1 + off2
  5670  		v.Aux = mergeSym(sym1, sym2)
  5671  		v.AddArg(ptr)
  5672  		v.AddArg(mem)
  5673  		return true
  5674  	}
  5675  	return false
  5676  }
  5677  func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool {
  5678  	// match: (NEGV (MOVVconst [c]))
  5679  	// cond:
  5680  	// result: (MOVVconst [-c])
  5681  	for {
  5682  		v_0 := v.Args[0]
  5683  		if v_0.Op != OpMIPS64MOVVconst {
  5684  			break
  5685  		}
  5686  		c := v_0.AuxInt
  5687  		v.reset(OpMIPS64MOVVconst)
  5688  		v.AuxInt = -c
  5689  		return true
  5690  	}
  5691  	return false
  5692  }
  5693  func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool {
  5694  	// match: (NOR x (MOVVconst [c]))
  5695  	// cond: is32Bit(c)
  5696  	// result: (NORconst [c] x)
  5697  	for {
  5698  		_ = v.Args[1]
  5699  		x := v.Args[0]
  5700  		v_1 := v.Args[1]
  5701  		if v_1.Op != OpMIPS64MOVVconst {
  5702  			break
  5703  		}
  5704  		c := v_1.AuxInt
  5705  		if !(is32Bit(c)) {
  5706  			break
  5707  		}
  5708  		v.reset(OpMIPS64NORconst)
  5709  		v.AuxInt = c
  5710  		v.AddArg(x)
  5711  		return true
  5712  	}
  5713  	// match: (NOR (MOVVconst [c]) x)
  5714  	// cond: is32Bit(c)
  5715  	// result: (NORconst [c] x)
  5716  	for {
  5717  		_ = v.Args[1]
  5718  		v_0 := v.Args[0]
  5719  		if v_0.Op != OpMIPS64MOVVconst {
  5720  			break
  5721  		}
  5722  		c := v_0.AuxInt
  5723  		x := v.Args[1]
  5724  		if !(is32Bit(c)) {
  5725  			break
  5726  		}
  5727  		v.reset(OpMIPS64NORconst)
  5728  		v.AuxInt = c
  5729  		v.AddArg(x)
  5730  		return true
  5731  	}
  5732  	return false
  5733  }
  5734  func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool {
  5735  	// match: (NORconst [c] (MOVVconst [d]))
  5736  	// cond:
  5737  	// result: (MOVVconst [^(c|d)])
  5738  	for {
  5739  		c := v.AuxInt
  5740  		v_0 := v.Args[0]
  5741  		if v_0.Op != OpMIPS64MOVVconst {
  5742  			break
  5743  		}
  5744  		d := v_0.AuxInt
  5745  		v.reset(OpMIPS64MOVVconst)
  5746  		v.AuxInt = ^(c | d)
  5747  		return true
  5748  	}
  5749  	return false
  5750  }
  5751  func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
  5752  	// match: (OR x (MOVVconst [c]))
  5753  	// cond: is32Bit(c)
  5754  	// result: (ORconst [c] x)
  5755  	for {
  5756  		_ = v.Args[1]
  5757  		x := v.Args[0]
  5758  		v_1 := v.Args[1]
  5759  		if v_1.Op != OpMIPS64MOVVconst {
  5760  			break
  5761  		}
  5762  		c := v_1.AuxInt
  5763  		if !(is32Bit(c)) {
  5764  			break
  5765  		}
  5766  		v.reset(OpMIPS64ORconst)
  5767  		v.AuxInt = c
  5768  		v.AddArg(x)
  5769  		return true
  5770  	}
  5771  	// match: (OR (MOVVconst [c]) x)
  5772  	// cond: is32Bit(c)
  5773  	// result: (ORconst [c] x)
  5774  	for {
  5775  		_ = v.Args[1]
  5776  		v_0 := v.Args[0]
  5777  		if v_0.Op != OpMIPS64MOVVconst {
  5778  			break
  5779  		}
  5780  		c := v_0.AuxInt
  5781  		x := v.Args[1]
  5782  		if !(is32Bit(c)) {
  5783  			break
  5784  		}
  5785  		v.reset(OpMIPS64ORconst)
  5786  		v.AuxInt = c
  5787  		v.AddArg(x)
  5788  		return true
  5789  	}
  5790  	// match: (OR x x)
  5791  	// cond:
  5792  	// result: x
  5793  	for {
  5794  		_ = v.Args[1]
  5795  		x := v.Args[0]
  5796  		if x != v.Args[1] {
  5797  			break
  5798  		}
  5799  		v.reset(OpCopy)
  5800  		v.Type = x.Type
  5801  		v.AddArg(x)
  5802  		return true
  5803  	}
  5804  	return false
  5805  }
  5806  func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool {
  5807  	// match: (ORconst [0] x)
  5808  	// cond:
  5809  	// result: x
  5810  	for {
  5811  		if v.AuxInt != 0 {
  5812  			break
  5813  		}
  5814  		x := v.Args[0]
  5815  		v.reset(OpCopy)
  5816  		v.Type = x.Type
  5817  		v.AddArg(x)
  5818  		return true
  5819  	}
  5820  	// match: (ORconst [-1] _)
  5821  	// cond:
  5822  	// result: (MOVVconst [-1])
  5823  	for {
  5824  		if v.AuxInt != -1 {
  5825  			break
  5826  		}
  5827  		v.reset(OpMIPS64MOVVconst)
  5828  		v.AuxInt = -1
  5829  		return true
  5830  	}
  5831  	// match: (ORconst [c] (MOVVconst [d]))
  5832  	// cond:
  5833  	// result: (MOVVconst [c|d])
  5834  	for {
  5835  		c := v.AuxInt
  5836  		v_0 := v.Args[0]
  5837  		if v_0.Op != OpMIPS64MOVVconst {
  5838  			break
  5839  		}
  5840  		d := v_0.AuxInt
  5841  		v.reset(OpMIPS64MOVVconst)
  5842  		v.AuxInt = c | d
  5843  		return true
  5844  	}
  5845  	// match: (ORconst [c] (ORconst [d] x))
  5846  	// cond: is32Bit(c|d)
  5847  	// result: (ORconst [c|d] x)
  5848  	for {
  5849  		c := v.AuxInt
  5850  		v_0 := v.Args[0]
  5851  		if v_0.Op != OpMIPS64ORconst {
  5852  			break
  5853  		}
  5854  		d := v_0.AuxInt
  5855  		x := v_0.Args[0]
  5856  		if !(is32Bit(c | d)) {
  5857  			break
  5858  		}
  5859  		v.reset(OpMIPS64ORconst)
  5860  		v.AuxInt = c | d
  5861  		v.AddArg(x)
  5862  		return true
  5863  	}
  5864  	return false
  5865  }
  5866  func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool {
  5867  	// match: (SGT (MOVVconst [c]) x)
  5868  	// cond: is32Bit(c)
  5869  	// result: (SGTconst [c] x)
  5870  	for {
  5871  		_ = v.Args[1]
  5872  		v_0 := v.Args[0]
  5873  		if v_0.Op != OpMIPS64MOVVconst {
  5874  			break
  5875  		}
  5876  		c := v_0.AuxInt
  5877  		x := v.Args[1]
  5878  		if !(is32Bit(c)) {
  5879  			break
  5880  		}
  5881  		v.reset(OpMIPS64SGTconst)
  5882  		v.AuxInt = c
  5883  		v.AddArg(x)
  5884  		return true
  5885  	}
  5886  	return false
  5887  }
  5888  func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool {
  5889  	// match: (SGTU (MOVVconst [c]) x)
  5890  	// cond: is32Bit(c)
  5891  	// result: (SGTUconst [c] x)
  5892  	for {
  5893  		_ = v.Args[1]
  5894  		v_0 := v.Args[0]
  5895  		if v_0.Op != OpMIPS64MOVVconst {
  5896  			break
  5897  		}
  5898  		c := v_0.AuxInt
  5899  		x := v.Args[1]
  5900  		if !(is32Bit(c)) {
  5901  			break
  5902  		}
  5903  		v.reset(OpMIPS64SGTUconst)
  5904  		v.AuxInt = c
  5905  		v.AddArg(x)
  5906  		return true
  5907  	}
  5908  	return false
  5909  }
  5910  func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool {
  5911  	// match: (SGTUconst [c] (MOVVconst [d]))
  5912  	// cond: uint64(c)>uint64(d)
  5913  	// result: (MOVVconst [1])
  5914  	for {
  5915  		c := v.AuxInt
  5916  		v_0 := v.Args[0]
  5917  		if v_0.Op != OpMIPS64MOVVconst {
  5918  			break
  5919  		}
  5920  		d := v_0.AuxInt
  5921  		if !(uint64(c) > uint64(d)) {
  5922  			break
  5923  		}
  5924  		v.reset(OpMIPS64MOVVconst)
  5925  		v.AuxInt = 1
  5926  		return true
  5927  	}
  5928  	// match: (SGTUconst [c] (MOVVconst [d]))
  5929  	// cond: uint64(c)<=uint64(d)
  5930  	// result: (MOVVconst [0])
  5931  	for {
  5932  		c := v.AuxInt
  5933  		v_0 := v.Args[0]
  5934  		if v_0.Op != OpMIPS64MOVVconst {
  5935  			break
  5936  		}
  5937  		d := v_0.AuxInt
  5938  		if !(uint64(c) <= uint64(d)) {
  5939  			break
  5940  		}
  5941  		v.reset(OpMIPS64MOVVconst)
  5942  		v.AuxInt = 0
  5943  		return true
  5944  	}
  5945  	// match: (SGTUconst [c] (MOVBUreg _))
  5946  	// cond: 0xff < uint64(c)
  5947  	// result: (MOVVconst [1])
  5948  	for {
  5949  		c := v.AuxInt
  5950  		v_0 := v.Args[0]
  5951  		if v_0.Op != OpMIPS64MOVBUreg {
  5952  			break
  5953  		}
  5954  		if !(0xff < uint64(c)) {
  5955  			break
  5956  		}
  5957  		v.reset(OpMIPS64MOVVconst)
  5958  		v.AuxInt = 1
  5959  		return true
  5960  	}
  5961  	// match: (SGTUconst [c] (MOVHUreg _))
  5962  	// cond: 0xffff < uint64(c)
  5963  	// result: (MOVVconst [1])
  5964  	for {
  5965  		c := v.AuxInt
  5966  		v_0 := v.Args[0]
  5967  		if v_0.Op != OpMIPS64MOVHUreg {
  5968  			break
  5969  		}
  5970  		if !(0xffff < uint64(c)) {
  5971  			break
  5972  		}
  5973  		v.reset(OpMIPS64MOVVconst)
  5974  		v.AuxInt = 1
  5975  		return true
  5976  	}
  5977  	// match: (SGTUconst [c] (ANDconst [m] _))
  5978  	// cond: uint64(m) < uint64(c)
  5979  	// result: (MOVVconst [1])
  5980  	for {
  5981  		c := v.AuxInt
  5982  		v_0 := v.Args[0]
  5983  		if v_0.Op != OpMIPS64ANDconst {
  5984  			break
  5985  		}
  5986  		m := v_0.AuxInt
  5987  		if !(uint64(m) < uint64(c)) {
  5988  			break
  5989  		}
  5990  		v.reset(OpMIPS64MOVVconst)
  5991  		v.AuxInt = 1
  5992  		return true
  5993  	}
  5994  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5995  	// cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)
  5996  	// result: (MOVVconst [1])
  5997  	for {
  5998  		c := v.AuxInt
  5999  		v_0 := v.Args[0]
  6000  		if v_0.Op != OpMIPS64SRLVconst {
  6001  			break
  6002  		}
  6003  		d := v_0.AuxInt
  6004  		if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) {
  6005  			break
  6006  		}
  6007  		v.reset(OpMIPS64MOVVconst)
  6008  		v.AuxInt = 1
  6009  		return true
  6010  	}
  6011  	return false
  6012  }
  6013  func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool {
  6014  	// match: (SGTconst [c] (MOVVconst [d]))
  6015  	// cond: int64(c)>int64(d)
  6016  	// result: (MOVVconst [1])
  6017  	for {
  6018  		c := v.AuxInt
  6019  		v_0 := v.Args[0]
  6020  		if v_0.Op != OpMIPS64MOVVconst {
  6021  			break
  6022  		}
  6023  		d := v_0.AuxInt
  6024  		if !(int64(c) > int64(d)) {
  6025  			break
  6026  		}
  6027  		v.reset(OpMIPS64MOVVconst)
  6028  		v.AuxInt = 1
  6029  		return true
  6030  	}
  6031  	// match: (SGTconst [c] (MOVVconst [d]))
  6032  	// cond: int64(c)<=int64(d)
  6033  	// result: (MOVVconst [0])
  6034  	for {
  6035  		c := v.AuxInt
  6036  		v_0 := v.Args[0]
  6037  		if v_0.Op != OpMIPS64MOVVconst {
  6038  			break
  6039  		}
  6040  		d := v_0.AuxInt
  6041  		if !(int64(c) <= int64(d)) {
  6042  			break
  6043  		}
  6044  		v.reset(OpMIPS64MOVVconst)
  6045  		v.AuxInt = 0
  6046  		return true
  6047  	}
  6048  	// match: (SGTconst [c] (MOVBreg _))
  6049  	// cond: 0x7f < int64(c)
  6050  	// result: (MOVVconst [1])
  6051  	for {
  6052  		c := v.AuxInt
  6053  		v_0 := v.Args[0]
  6054  		if v_0.Op != OpMIPS64MOVBreg {
  6055  			break
  6056  		}
  6057  		if !(0x7f < int64(c)) {
  6058  			break
  6059  		}
  6060  		v.reset(OpMIPS64MOVVconst)
  6061  		v.AuxInt = 1
  6062  		return true
  6063  	}
  6064  	// match: (SGTconst [c] (MOVBreg _))
  6065  	// cond: int64(c) <= -0x80
  6066  	// result: (MOVVconst [0])
  6067  	for {
  6068  		c := v.AuxInt
  6069  		v_0 := v.Args[0]
  6070  		if v_0.Op != OpMIPS64MOVBreg {
  6071  			break
  6072  		}
  6073  		if !(int64(c) <= -0x80) {
  6074  			break
  6075  		}
  6076  		v.reset(OpMIPS64MOVVconst)
  6077  		v.AuxInt = 0
  6078  		return true
  6079  	}
  6080  	// match: (SGTconst [c] (MOVBUreg _))
  6081  	// cond: 0xff < int64(c)
  6082  	// result: (MOVVconst [1])
  6083  	for {
  6084  		c := v.AuxInt
  6085  		v_0 := v.Args[0]
  6086  		if v_0.Op != OpMIPS64MOVBUreg {
  6087  			break
  6088  		}
  6089  		if !(0xff < int64(c)) {
  6090  			break
  6091  		}
  6092  		v.reset(OpMIPS64MOVVconst)
  6093  		v.AuxInt = 1
  6094  		return true
  6095  	}
  6096  	// match: (SGTconst [c] (MOVBUreg _))
  6097  	// cond: int64(c) < 0
  6098  	// result: (MOVVconst [0])
  6099  	for {
  6100  		c := v.AuxInt
  6101  		v_0 := v.Args[0]
  6102  		if v_0.Op != OpMIPS64MOVBUreg {
  6103  			break
  6104  		}
  6105  		if !(int64(c) < 0) {
  6106  			break
  6107  		}
  6108  		v.reset(OpMIPS64MOVVconst)
  6109  		v.AuxInt = 0
  6110  		return true
  6111  	}
  6112  	// match: (SGTconst [c] (MOVHreg _))
  6113  	// cond: 0x7fff < int64(c)
  6114  	// result: (MOVVconst [1])
  6115  	for {
  6116  		c := v.AuxInt
  6117  		v_0 := v.Args[0]
  6118  		if v_0.Op != OpMIPS64MOVHreg {
  6119  			break
  6120  		}
  6121  		if !(0x7fff < int64(c)) {
  6122  			break
  6123  		}
  6124  		v.reset(OpMIPS64MOVVconst)
  6125  		v.AuxInt = 1
  6126  		return true
  6127  	}
  6128  	// match: (SGTconst [c] (MOVHreg _))
  6129  	// cond: int64(c) <= -0x8000
  6130  	// result: (MOVVconst [0])
  6131  	for {
  6132  		c := v.AuxInt
  6133  		v_0 := v.Args[0]
  6134  		if v_0.Op != OpMIPS64MOVHreg {
  6135  			break
  6136  		}
  6137  		if !(int64(c) <= -0x8000) {
  6138  			break
  6139  		}
  6140  		v.reset(OpMIPS64MOVVconst)
  6141  		v.AuxInt = 0
  6142  		return true
  6143  	}
  6144  	// match: (SGTconst [c] (MOVHUreg _))
  6145  	// cond: 0xffff < int64(c)
  6146  	// result: (MOVVconst [1])
  6147  	for {
  6148  		c := v.AuxInt
  6149  		v_0 := v.Args[0]
  6150  		if v_0.Op != OpMIPS64MOVHUreg {
  6151  			break
  6152  		}
  6153  		if !(0xffff < int64(c)) {
  6154  			break
  6155  		}
  6156  		v.reset(OpMIPS64MOVVconst)
  6157  		v.AuxInt = 1
  6158  		return true
  6159  	}
  6160  	// match: (SGTconst [c] (MOVHUreg _))
  6161  	// cond: int64(c) < 0
  6162  	// result: (MOVVconst [0])
  6163  	for {
  6164  		c := v.AuxInt
  6165  		v_0 := v.Args[0]
  6166  		if v_0.Op != OpMIPS64MOVHUreg {
  6167  			break
  6168  		}
  6169  		if !(int64(c) < 0) {
  6170  			break
  6171  		}
  6172  		v.reset(OpMIPS64MOVVconst)
  6173  		v.AuxInt = 0
  6174  		return true
  6175  	}
  6176  	return false
  6177  }
  6178  func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool {
  6179  	// match: (SGTconst [c] (MOVWUreg _))
  6180  	// cond: int64(c) < 0
  6181  	// result: (MOVVconst [0])
  6182  	for {
  6183  		c := v.AuxInt
  6184  		v_0 := v.Args[0]
  6185  		if v_0.Op != OpMIPS64MOVWUreg {
  6186  			break
  6187  		}
  6188  		if !(int64(c) < 0) {
  6189  			break
  6190  		}
  6191  		v.reset(OpMIPS64MOVVconst)
  6192  		v.AuxInt = 0
  6193  		return true
  6194  	}
  6195  	// match: (SGTconst [c] (ANDconst [m] _))
  6196  	// cond: 0 <= m && m < c
  6197  	// result: (MOVVconst [1])
  6198  	for {
  6199  		c := v.AuxInt
  6200  		v_0 := v.Args[0]
  6201  		if v_0.Op != OpMIPS64ANDconst {
  6202  			break
  6203  		}
  6204  		m := v_0.AuxInt
  6205  		if !(0 <= m && m < c) {
  6206  			break
  6207  		}
  6208  		v.reset(OpMIPS64MOVVconst)
  6209  		v.AuxInt = 1
  6210  		return true
  6211  	}
  6212  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6213  	// cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c
  6214  	// result: (MOVVconst [1])
  6215  	for {
  6216  		c := v.AuxInt
  6217  		v_0 := v.Args[0]
  6218  		if v_0.Op != OpMIPS64SRLVconst {
  6219  			break
  6220  		}
  6221  		d := v_0.AuxInt
  6222  		if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) {
  6223  			break
  6224  		}
  6225  		v.reset(OpMIPS64MOVVconst)
  6226  		v.AuxInt = 1
  6227  		return true
  6228  	}
  6229  	return false
  6230  }
  6231  func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool {
  6232  	// match: (SLLV _ (MOVVconst [c]))
  6233  	// cond: uint64(c)>=64
  6234  	// result: (MOVVconst [0])
  6235  	for {
  6236  		_ = v.Args[1]
  6237  		v_1 := v.Args[1]
  6238  		if v_1.Op != OpMIPS64MOVVconst {
  6239  			break
  6240  		}
  6241  		c := v_1.AuxInt
  6242  		if !(uint64(c) >= 64) {
  6243  			break
  6244  		}
  6245  		v.reset(OpMIPS64MOVVconst)
  6246  		v.AuxInt = 0
  6247  		return true
  6248  	}
  6249  	// match: (SLLV x (MOVVconst [c]))
  6250  	// cond:
  6251  	// result: (SLLVconst x [c])
  6252  	for {
  6253  		_ = v.Args[1]
  6254  		x := v.Args[0]
  6255  		v_1 := v.Args[1]
  6256  		if v_1.Op != OpMIPS64MOVVconst {
  6257  			break
  6258  		}
  6259  		c := v_1.AuxInt
  6260  		v.reset(OpMIPS64SLLVconst)
  6261  		v.AuxInt = c
  6262  		v.AddArg(x)
  6263  		return true
  6264  	}
  6265  	return false
  6266  }
  6267  func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool {
  6268  	// match: (SLLVconst [c] (MOVVconst [d]))
  6269  	// cond:
  6270  	// result: (MOVVconst [int64(d)<<uint64(c)])
  6271  	for {
  6272  		c := v.AuxInt
  6273  		v_0 := v.Args[0]
  6274  		if v_0.Op != OpMIPS64MOVVconst {
  6275  			break
  6276  		}
  6277  		d := v_0.AuxInt
  6278  		v.reset(OpMIPS64MOVVconst)
  6279  		v.AuxInt = int64(d) << uint64(c)
  6280  		return true
  6281  	}
  6282  	return false
  6283  }
  6284  func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool {
  6285  	// match: (SRAV x (MOVVconst [c]))
  6286  	// cond: uint64(c)>=64
  6287  	// result: (SRAVconst x [63])
  6288  	for {
  6289  		_ = v.Args[1]
  6290  		x := v.Args[0]
  6291  		v_1 := v.Args[1]
  6292  		if v_1.Op != OpMIPS64MOVVconst {
  6293  			break
  6294  		}
  6295  		c := v_1.AuxInt
  6296  		if !(uint64(c) >= 64) {
  6297  			break
  6298  		}
  6299  		v.reset(OpMIPS64SRAVconst)
  6300  		v.AuxInt = 63
  6301  		v.AddArg(x)
  6302  		return true
  6303  	}
  6304  	// match: (SRAV x (MOVVconst [c]))
  6305  	// cond:
  6306  	// result: (SRAVconst x [c])
  6307  	for {
  6308  		_ = v.Args[1]
  6309  		x := v.Args[0]
  6310  		v_1 := v.Args[1]
  6311  		if v_1.Op != OpMIPS64MOVVconst {
  6312  			break
  6313  		}
  6314  		c := v_1.AuxInt
  6315  		v.reset(OpMIPS64SRAVconst)
  6316  		v.AuxInt = c
  6317  		v.AddArg(x)
  6318  		return true
  6319  	}
  6320  	return false
  6321  }
  6322  func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool {
  6323  	// match: (SRAVconst [c] (MOVVconst [d]))
  6324  	// cond:
  6325  	// result: (MOVVconst [int64(d)>>uint64(c)])
  6326  	for {
  6327  		c := v.AuxInt
  6328  		v_0 := v.Args[0]
  6329  		if v_0.Op != OpMIPS64MOVVconst {
  6330  			break
  6331  		}
  6332  		d := v_0.AuxInt
  6333  		v.reset(OpMIPS64MOVVconst)
  6334  		v.AuxInt = int64(d) >> uint64(c)
  6335  		return true
  6336  	}
  6337  	return false
  6338  }
  6339  func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool {
  6340  	// match: (SRLV _ (MOVVconst [c]))
  6341  	// cond: uint64(c)>=64
  6342  	// result: (MOVVconst [0])
  6343  	for {
  6344  		_ = v.Args[1]
  6345  		v_1 := v.Args[1]
  6346  		if v_1.Op != OpMIPS64MOVVconst {
  6347  			break
  6348  		}
  6349  		c := v_1.AuxInt
  6350  		if !(uint64(c) >= 64) {
  6351  			break
  6352  		}
  6353  		v.reset(OpMIPS64MOVVconst)
  6354  		v.AuxInt = 0
  6355  		return true
  6356  	}
  6357  	// match: (SRLV x (MOVVconst [c]))
  6358  	// cond:
  6359  	// result: (SRLVconst x [c])
  6360  	for {
  6361  		_ = v.Args[1]
  6362  		x := v.Args[0]
  6363  		v_1 := v.Args[1]
  6364  		if v_1.Op != OpMIPS64MOVVconst {
  6365  			break
  6366  		}
  6367  		c := v_1.AuxInt
  6368  		v.reset(OpMIPS64SRLVconst)
  6369  		v.AuxInt = c
  6370  		v.AddArg(x)
  6371  		return true
  6372  	}
  6373  	return false
  6374  }
  6375  func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool {
  6376  	// match: (SRLVconst [c] (MOVVconst [d]))
  6377  	// cond:
  6378  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6379  	for {
  6380  		c := v.AuxInt
  6381  		v_0 := v.Args[0]
  6382  		if v_0.Op != OpMIPS64MOVVconst {
  6383  			break
  6384  		}
  6385  		d := v_0.AuxInt
  6386  		v.reset(OpMIPS64MOVVconst)
  6387  		v.AuxInt = int64(uint64(d) >> uint64(c))
  6388  		return true
  6389  	}
  6390  	return false
  6391  }
  6392  func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool {
  6393  	// match: (SUBV x (MOVVconst [c]))
  6394  	// cond: is32Bit(c)
  6395  	// result: (SUBVconst [c] x)
  6396  	for {
  6397  		_ = v.Args[1]
  6398  		x := v.Args[0]
  6399  		v_1 := v.Args[1]
  6400  		if v_1.Op != OpMIPS64MOVVconst {
  6401  			break
  6402  		}
  6403  		c := v_1.AuxInt
  6404  		if !(is32Bit(c)) {
  6405  			break
  6406  		}
  6407  		v.reset(OpMIPS64SUBVconst)
  6408  		v.AuxInt = c
  6409  		v.AddArg(x)
  6410  		return true
  6411  	}
  6412  	// match: (SUBV x x)
  6413  	// cond:
  6414  	// result: (MOVVconst [0])
  6415  	for {
  6416  		_ = v.Args[1]
  6417  		x := v.Args[0]
  6418  		if x != v.Args[1] {
  6419  			break
  6420  		}
  6421  		v.reset(OpMIPS64MOVVconst)
  6422  		v.AuxInt = 0
  6423  		return true
  6424  	}
  6425  	// match: (SUBV (MOVVconst [0]) x)
  6426  	// cond:
  6427  	// result: (NEGV x)
  6428  	for {
  6429  		_ = v.Args[1]
  6430  		v_0 := v.Args[0]
  6431  		if v_0.Op != OpMIPS64MOVVconst {
  6432  			break
  6433  		}
  6434  		if v_0.AuxInt != 0 {
  6435  			break
  6436  		}
  6437  		x := v.Args[1]
  6438  		v.reset(OpMIPS64NEGV)
  6439  		v.AddArg(x)
  6440  		return true
  6441  	}
  6442  	return false
  6443  }
  6444  func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool {
  6445  	// match: (SUBVconst [0] x)
  6446  	// cond:
  6447  	// result: x
  6448  	for {
  6449  		if v.AuxInt != 0 {
  6450  			break
  6451  		}
  6452  		x := v.Args[0]
  6453  		v.reset(OpCopy)
  6454  		v.Type = x.Type
  6455  		v.AddArg(x)
  6456  		return true
  6457  	}
  6458  	// match: (SUBVconst [c] (MOVVconst [d]))
  6459  	// cond:
  6460  	// result: (MOVVconst [d-c])
  6461  	for {
  6462  		c := v.AuxInt
  6463  		v_0 := v.Args[0]
  6464  		if v_0.Op != OpMIPS64MOVVconst {
  6465  			break
  6466  		}
  6467  		d := v_0.AuxInt
  6468  		v.reset(OpMIPS64MOVVconst)
  6469  		v.AuxInt = d - c
  6470  		return true
  6471  	}
  6472  	// match: (SUBVconst [c] (SUBVconst [d] x))
  6473  	// cond: is32Bit(-c-d)
  6474  	// result: (ADDVconst [-c-d] x)
  6475  	for {
  6476  		c := v.AuxInt
  6477  		v_0 := v.Args[0]
  6478  		if v_0.Op != OpMIPS64SUBVconst {
  6479  			break
  6480  		}
  6481  		d := v_0.AuxInt
  6482  		x := v_0.Args[0]
  6483  		if !(is32Bit(-c - d)) {
  6484  			break
  6485  		}
  6486  		v.reset(OpMIPS64ADDVconst)
  6487  		v.AuxInt = -c - d
  6488  		v.AddArg(x)
  6489  		return true
  6490  	}
  6491  	// match: (SUBVconst [c] (ADDVconst [d] x))
  6492  	// cond: is32Bit(-c+d)
  6493  	// result: (ADDVconst [-c+d] x)
  6494  	for {
  6495  		c := v.AuxInt
  6496  		v_0 := v.Args[0]
  6497  		if v_0.Op != OpMIPS64ADDVconst {
  6498  			break
  6499  		}
  6500  		d := v_0.AuxInt
  6501  		x := v_0.Args[0]
  6502  		if !(is32Bit(-c + d)) {
  6503  			break
  6504  		}
  6505  		v.reset(OpMIPS64ADDVconst)
  6506  		v.AuxInt = -c + d
  6507  		v.AddArg(x)
  6508  		return true
  6509  	}
  6510  	return false
  6511  }
  6512  func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool {
  6513  	// match: (XOR x (MOVVconst [c]))
  6514  	// cond: is32Bit(c)
  6515  	// result: (XORconst [c] x)
  6516  	for {
  6517  		_ = v.Args[1]
  6518  		x := v.Args[0]
  6519  		v_1 := v.Args[1]
  6520  		if v_1.Op != OpMIPS64MOVVconst {
  6521  			break
  6522  		}
  6523  		c := v_1.AuxInt
  6524  		if !(is32Bit(c)) {
  6525  			break
  6526  		}
  6527  		v.reset(OpMIPS64XORconst)
  6528  		v.AuxInt = c
  6529  		v.AddArg(x)
  6530  		return true
  6531  	}
  6532  	// match: (XOR (MOVVconst [c]) x)
  6533  	// cond: is32Bit(c)
  6534  	// result: (XORconst [c] x)
  6535  	for {
  6536  		_ = v.Args[1]
  6537  		v_0 := v.Args[0]
  6538  		if v_0.Op != OpMIPS64MOVVconst {
  6539  			break
  6540  		}
  6541  		c := v_0.AuxInt
  6542  		x := v.Args[1]
  6543  		if !(is32Bit(c)) {
  6544  			break
  6545  		}
  6546  		v.reset(OpMIPS64XORconst)
  6547  		v.AuxInt = c
  6548  		v.AddArg(x)
  6549  		return true
  6550  	}
  6551  	// match: (XOR x x)
  6552  	// cond:
  6553  	// result: (MOVVconst [0])
  6554  	for {
  6555  		_ = v.Args[1]
  6556  		x := v.Args[0]
  6557  		if x != v.Args[1] {
  6558  			break
  6559  		}
  6560  		v.reset(OpMIPS64MOVVconst)
  6561  		v.AuxInt = 0
  6562  		return true
  6563  	}
  6564  	return false
  6565  }
  6566  func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool {
  6567  	// match: (XORconst [0] x)
  6568  	// cond:
  6569  	// result: x
  6570  	for {
  6571  		if v.AuxInt != 0 {
  6572  			break
  6573  		}
  6574  		x := v.Args[0]
  6575  		v.reset(OpCopy)
  6576  		v.Type = x.Type
  6577  		v.AddArg(x)
  6578  		return true
  6579  	}
  6580  	// match: (XORconst [-1] x)
  6581  	// cond:
  6582  	// result: (NORconst [0] x)
  6583  	for {
  6584  		if v.AuxInt != -1 {
  6585  			break
  6586  		}
  6587  		x := v.Args[0]
  6588  		v.reset(OpMIPS64NORconst)
  6589  		v.AuxInt = 0
  6590  		v.AddArg(x)
  6591  		return true
  6592  	}
  6593  	// match: (XORconst [c] (MOVVconst [d]))
  6594  	// cond:
  6595  	// result: (MOVVconst [c^d])
  6596  	for {
  6597  		c := v.AuxInt
  6598  		v_0 := v.Args[0]
  6599  		if v_0.Op != OpMIPS64MOVVconst {
  6600  			break
  6601  		}
  6602  		d := v_0.AuxInt
  6603  		v.reset(OpMIPS64MOVVconst)
  6604  		v.AuxInt = c ^ d
  6605  		return true
  6606  	}
  6607  	// match: (XORconst [c] (XORconst [d] x))
  6608  	// cond: is32Bit(c^d)
  6609  	// result: (XORconst [c^d] x)
  6610  	for {
  6611  		c := v.AuxInt
  6612  		v_0 := v.Args[0]
  6613  		if v_0.Op != OpMIPS64XORconst {
  6614  			break
  6615  		}
  6616  		d := v_0.AuxInt
  6617  		x := v_0.Args[0]
  6618  		if !(is32Bit(c ^ d)) {
  6619  			break
  6620  		}
  6621  		v.reset(OpMIPS64XORconst)
  6622  		v.AuxInt = c ^ d
  6623  		v.AddArg(x)
  6624  		return true
  6625  	}
  6626  	return false
  6627  }
  6628  func rewriteValueMIPS64_OpMod16_0(v *Value) bool {
  6629  	b := v.Block
  6630  	_ = b
  6631  	typ := &b.Func.Config.Types
  6632  	_ = typ
  6633  	// match: (Mod16 x y)
  6634  	// cond:
  6635  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6636  	for {
  6637  		_ = v.Args[1]
  6638  		x := v.Args[0]
  6639  		y := v.Args[1]
  6640  		v.reset(OpSelect0)
  6641  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6642  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6643  		v1.AddArg(x)
  6644  		v0.AddArg(v1)
  6645  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6646  		v2.AddArg(y)
  6647  		v0.AddArg(v2)
  6648  		v.AddArg(v0)
  6649  		return true
  6650  	}
  6651  }
  6652  func rewriteValueMIPS64_OpMod16u_0(v *Value) bool {
  6653  	b := v.Block
  6654  	_ = b
  6655  	typ := &b.Func.Config.Types
  6656  	_ = typ
  6657  	// match: (Mod16u x y)
  6658  	// cond:
  6659  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6660  	for {
  6661  		_ = v.Args[1]
  6662  		x := v.Args[0]
  6663  		y := v.Args[1]
  6664  		v.reset(OpSelect0)
  6665  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6666  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6667  		v1.AddArg(x)
  6668  		v0.AddArg(v1)
  6669  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6670  		v2.AddArg(y)
  6671  		v0.AddArg(v2)
  6672  		v.AddArg(v0)
  6673  		return true
  6674  	}
  6675  }
  6676  func rewriteValueMIPS64_OpMod32_0(v *Value) bool {
  6677  	b := v.Block
  6678  	_ = b
  6679  	typ := &b.Func.Config.Types
  6680  	_ = typ
  6681  	// match: (Mod32 x y)
  6682  	// cond:
  6683  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6684  	for {
  6685  		_ = v.Args[1]
  6686  		x := v.Args[0]
  6687  		y := v.Args[1]
  6688  		v.reset(OpSelect0)
  6689  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6690  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6691  		v1.AddArg(x)
  6692  		v0.AddArg(v1)
  6693  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6694  		v2.AddArg(y)
  6695  		v0.AddArg(v2)
  6696  		v.AddArg(v0)
  6697  		return true
  6698  	}
  6699  }
  6700  func rewriteValueMIPS64_OpMod32u_0(v *Value) bool {
  6701  	b := v.Block
  6702  	_ = b
  6703  	typ := &b.Func.Config.Types
  6704  	_ = typ
  6705  	// match: (Mod32u x y)
  6706  	// cond:
  6707  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6708  	for {
  6709  		_ = v.Args[1]
  6710  		x := v.Args[0]
  6711  		y := v.Args[1]
  6712  		v.reset(OpSelect0)
  6713  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6714  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6715  		v1.AddArg(x)
  6716  		v0.AddArg(v1)
  6717  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6718  		v2.AddArg(y)
  6719  		v0.AddArg(v2)
  6720  		v.AddArg(v0)
  6721  		return true
  6722  	}
  6723  }
  6724  func rewriteValueMIPS64_OpMod64_0(v *Value) bool {
  6725  	b := v.Block
  6726  	_ = b
  6727  	typ := &b.Func.Config.Types
  6728  	_ = typ
  6729  	// match: (Mod64 x y)
  6730  	// cond:
  6731  	// result: (Select0 (DIVV x y))
  6732  	for {
  6733  		_ = v.Args[1]
  6734  		x := v.Args[0]
  6735  		y := v.Args[1]
  6736  		v.reset(OpSelect0)
  6737  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6738  		v0.AddArg(x)
  6739  		v0.AddArg(y)
  6740  		v.AddArg(v0)
  6741  		return true
  6742  	}
  6743  }
  6744  func rewriteValueMIPS64_OpMod64u_0(v *Value) bool {
  6745  	b := v.Block
  6746  	_ = b
  6747  	typ := &b.Func.Config.Types
  6748  	_ = typ
  6749  	// match: (Mod64u x y)
  6750  	// cond:
  6751  	// result: (Select0 (DIVVU x y))
  6752  	for {
  6753  		_ = v.Args[1]
  6754  		x := v.Args[0]
  6755  		y := v.Args[1]
  6756  		v.reset(OpSelect0)
  6757  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6758  		v0.AddArg(x)
  6759  		v0.AddArg(y)
  6760  		v.AddArg(v0)
  6761  		return true
  6762  	}
  6763  }
  6764  func rewriteValueMIPS64_OpMod8_0(v *Value) bool {
  6765  	b := v.Block
  6766  	_ = b
  6767  	typ := &b.Func.Config.Types
  6768  	_ = typ
  6769  	// match: (Mod8 x y)
  6770  	// cond:
  6771  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6772  	for {
  6773  		_ = v.Args[1]
  6774  		x := v.Args[0]
  6775  		y := v.Args[1]
  6776  		v.reset(OpSelect0)
  6777  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6778  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6779  		v1.AddArg(x)
  6780  		v0.AddArg(v1)
  6781  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6782  		v2.AddArg(y)
  6783  		v0.AddArg(v2)
  6784  		v.AddArg(v0)
  6785  		return true
  6786  	}
  6787  }
  6788  func rewriteValueMIPS64_OpMod8u_0(v *Value) bool {
  6789  	b := v.Block
  6790  	_ = b
  6791  	typ := &b.Func.Config.Types
  6792  	_ = typ
  6793  	// match: (Mod8u x y)
  6794  	// cond:
  6795  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6796  	for {
  6797  		_ = v.Args[1]
  6798  		x := v.Args[0]
  6799  		y := v.Args[1]
  6800  		v.reset(OpSelect0)
  6801  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6802  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6803  		v1.AddArg(x)
  6804  		v0.AddArg(v1)
  6805  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6806  		v2.AddArg(y)
  6807  		v0.AddArg(v2)
  6808  		v.AddArg(v0)
  6809  		return true
  6810  	}
  6811  }
  6812  func rewriteValueMIPS64_OpMove_0(v *Value) bool {
  6813  	b := v.Block
  6814  	_ = b
  6815  	typ := &b.Func.Config.Types
  6816  	_ = typ
  6817  	// match: (Move [0] _ _ mem)
  6818  	// cond:
  6819  	// result: mem
  6820  	for {
  6821  		if v.AuxInt != 0 {
  6822  			break
  6823  		}
  6824  		_ = v.Args[2]
  6825  		mem := v.Args[2]
  6826  		v.reset(OpCopy)
  6827  		v.Type = mem.Type
  6828  		v.AddArg(mem)
  6829  		return true
  6830  	}
  6831  	// match: (Move [1] dst src mem)
  6832  	// cond:
  6833  	// result: (MOVBstore dst (MOVBload src mem) mem)
  6834  	for {
  6835  		if v.AuxInt != 1 {
  6836  			break
  6837  		}
  6838  		_ = v.Args[2]
  6839  		dst := v.Args[0]
  6840  		src := v.Args[1]
  6841  		mem := v.Args[2]
  6842  		v.reset(OpMIPS64MOVBstore)
  6843  		v.AddArg(dst)
  6844  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6845  		v0.AddArg(src)
  6846  		v0.AddArg(mem)
  6847  		v.AddArg(v0)
  6848  		v.AddArg(mem)
  6849  		return true
  6850  	}
  6851  	// match: (Move [2] {t} dst src mem)
  6852  	// cond: t.(*types.Type).Alignment()%2 == 0
  6853  	// result: (MOVHstore dst (MOVHload src mem) mem)
  6854  	for {
  6855  		if v.AuxInt != 2 {
  6856  			break
  6857  		}
  6858  		t := v.Aux
  6859  		_ = v.Args[2]
  6860  		dst := v.Args[0]
  6861  		src := v.Args[1]
  6862  		mem := v.Args[2]
  6863  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6864  			break
  6865  		}
  6866  		v.reset(OpMIPS64MOVHstore)
  6867  		v.AddArg(dst)
  6868  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6869  		v0.AddArg(src)
  6870  		v0.AddArg(mem)
  6871  		v.AddArg(v0)
  6872  		v.AddArg(mem)
  6873  		return true
  6874  	}
  6875  	// match: (Move [2] dst src mem)
  6876  	// cond:
  6877  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  6878  	for {
  6879  		if v.AuxInt != 2 {
  6880  			break
  6881  		}
  6882  		_ = v.Args[2]
  6883  		dst := v.Args[0]
  6884  		src := v.Args[1]
  6885  		mem := v.Args[2]
  6886  		v.reset(OpMIPS64MOVBstore)
  6887  		v.AuxInt = 1
  6888  		v.AddArg(dst)
  6889  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6890  		v0.AuxInt = 1
  6891  		v0.AddArg(src)
  6892  		v0.AddArg(mem)
  6893  		v.AddArg(v0)
  6894  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6895  		v1.AddArg(dst)
  6896  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6897  		v2.AddArg(src)
  6898  		v2.AddArg(mem)
  6899  		v1.AddArg(v2)
  6900  		v1.AddArg(mem)
  6901  		v.AddArg(v1)
  6902  		return true
  6903  	}
  6904  	// match: (Move [4] {t} dst src mem)
  6905  	// cond: t.(*types.Type).Alignment()%4 == 0
  6906  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6907  	for {
  6908  		if v.AuxInt != 4 {
  6909  			break
  6910  		}
  6911  		t := v.Aux
  6912  		_ = v.Args[2]
  6913  		dst := v.Args[0]
  6914  		src := v.Args[1]
  6915  		mem := v.Args[2]
  6916  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6917  			break
  6918  		}
  6919  		v.reset(OpMIPS64MOVWstore)
  6920  		v.AddArg(dst)
  6921  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6922  		v0.AddArg(src)
  6923  		v0.AddArg(mem)
  6924  		v.AddArg(v0)
  6925  		v.AddArg(mem)
  6926  		return true
  6927  	}
  6928  	// match: (Move [4] {t} dst src mem)
  6929  	// cond: t.(*types.Type).Alignment()%2 == 0
  6930  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  6931  	for {
  6932  		if v.AuxInt != 4 {
  6933  			break
  6934  		}
  6935  		t := v.Aux
  6936  		_ = v.Args[2]
  6937  		dst := v.Args[0]
  6938  		src := v.Args[1]
  6939  		mem := v.Args[2]
  6940  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6941  			break
  6942  		}
  6943  		v.reset(OpMIPS64MOVHstore)
  6944  		v.AuxInt = 2
  6945  		v.AddArg(dst)
  6946  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6947  		v0.AuxInt = 2
  6948  		v0.AddArg(src)
  6949  		v0.AddArg(mem)
  6950  		v.AddArg(v0)
  6951  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6952  		v1.AddArg(dst)
  6953  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6954  		v2.AddArg(src)
  6955  		v2.AddArg(mem)
  6956  		v1.AddArg(v2)
  6957  		v1.AddArg(mem)
  6958  		v.AddArg(v1)
  6959  		return true
  6960  	}
  6961  	// match: (Move [4] dst src mem)
  6962  	// cond:
  6963  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  6964  	for {
  6965  		if v.AuxInt != 4 {
  6966  			break
  6967  		}
  6968  		_ = v.Args[2]
  6969  		dst := v.Args[0]
  6970  		src := v.Args[1]
  6971  		mem := v.Args[2]
  6972  		v.reset(OpMIPS64MOVBstore)
  6973  		v.AuxInt = 3
  6974  		v.AddArg(dst)
  6975  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6976  		v0.AuxInt = 3
  6977  		v0.AddArg(src)
  6978  		v0.AddArg(mem)
  6979  		v.AddArg(v0)
  6980  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6981  		v1.AuxInt = 2
  6982  		v1.AddArg(dst)
  6983  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6984  		v2.AuxInt = 2
  6985  		v2.AddArg(src)
  6986  		v2.AddArg(mem)
  6987  		v1.AddArg(v2)
  6988  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6989  		v3.AuxInt = 1
  6990  		v3.AddArg(dst)
  6991  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6992  		v4.AuxInt = 1
  6993  		v4.AddArg(src)
  6994  		v4.AddArg(mem)
  6995  		v3.AddArg(v4)
  6996  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6997  		v5.AddArg(dst)
  6998  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6999  		v6.AddArg(src)
  7000  		v6.AddArg(mem)
  7001  		v5.AddArg(v6)
  7002  		v5.AddArg(mem)
  7003  		v3.AddArg(v5)
  7004  		v1.AddArg(v3)
  7005  		v.AddArg(v1)
  7006  		return true
  7007  	}
  7008  	// match: (Move [8] {t} dst src mem)
  7009  	// cond: t.(*types.Type).Alignment()%8 == 0
  7010  	// result: (MOVVstore dst (MOVVload src mem) mem)
  7011  	for {
  7012  		if v.AuxInt != 8 {
  7013  			break
  7014  		}
  7015  		t := v.Aux
  7016  		_ = v.Args[2]
  7017  		dst := v.Args[0]
  7018  		src := v.Args[1]
  7019  		mem := v.Args[2]
  7020  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7021  			break
  7022  		}
  7023  		v.reset(OpMIPS64MOVVstore)
  7024  		v.AddArg(dst)
  7025  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7026  		v0.AddArg(src)
  7027  		v0.AddArg(mem)
  7028  		v.AddArg(v0)
  7029  		v.AddArg(mem)
  7030  		return true
  7031  	}
  7032  	// match: (Move [8] {t} dst src mem)
  7033  	// cond: t.(*types.Type).Alignment()%4 == 0
  7034  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  7035  	for {
  7036  		if v.AuxInt != 8 {
  7037  			break
  7038  		}
  7039  		t := v.Aux
  7040  		_ = v.Args[2]
  7041  		dst := v.Args[0]
  7042  		src := v.Args[1]
  7043  		mem := v.Args[2]
  7044  		if !(t.(*types.Type).Alignment()%4 == 0) {
  7045  			break
  7046  		}
  7047  		v.reset(OpMIPS64MOVWstore)
  7048  		v.AuxInt = 4
  7049  		v.AddArg(dst)
  7050  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7051  		v0.AuxInt = 4
  7052  		v0.AddArg(src)
  7053  		v0.AddArg(mem)
  7054  		v.AddArg(v0)
  7055  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7056  		v1.AddArg(dst)
  7057  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7058  		v2.AddArg(src)
  7059  		v2.AddArg(mem)
  7060  		v1.AddArg(v2)
  7061  		v1.AddArg(mem)
  7062  		v.AddArg(v1)
  7063  		return true
  7064  	}
  7065  	// match: (Move [8] {t} dst src mem)
  7066  	// cond: t.(*types.Type).Alignment()%2 == 0
  7067  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  7068  	for {
  7069  		if v.AuxInt != 8 {
  7070  			break
  7071  		}
  7072  		t := v.Aux
  7073  		_ = v.Args[2]
  7074  		dst := v.Args[0]
  7075  		src := v.Args[1]
  7076  		mem := v.Args[2]
  7077  		if !(t.(*types.Type).Alignment()%2 == 0) {
  7078  			break
  7079  		}
  7080  		v.reset(OpMIPS64MOVHstore)
  7081  		v.AuxInt = 6
  7082  		v.AddArg(dst)
  7083  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7084  		v0.AuxInt = 6
  7085  		v0.AddArg(src)
  7086  		v0.AddArg(mem)
  7087  		v.AddArg(v0)
  7088  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7089  		v1.AuxInt = 4
  7090  		v1.AddArg(dst)
  7091  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7092  		v2.AuxInt = 4
  7093  		v2.AddArg(src)
  7094  		v2.AddArg(mem)
  7095  		v1.AddArg(v2)
  7096  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7097  		v3.AuxInt = 2
  7098  		v3.AddArg(dst)
  7099  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7100  		v4.AuxInt = 2
  7101  		v4.AddArg(src)
  7102  		v4.AddArg(mem)
  7103  		v3.AddArg(v4)
  7104  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7105  		v5.AddArg(dst)
  7106  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7107  		v6.AddArg(src)
  7108  		v6.AddArg(mem)
  7109  		v5.AddArg(v6)
  7110  		v5.AddArg(mem)
  7111  		v3.AddArg(v5)
  7112  		v1.AddArg(v3)
  7113  		v.AddArg(v1)
  7114  		return true
  7115  	}
  7116  	return false
  7117  }
  7118  func rewriteValueMIPS64_OpMove_10(v *Value) bool {
  7119  	b := v.Block
  7120  	_ = b
  7121  	config := b.Func.Config
  7122  	_ = config
  7123  	typ := &b.Func.Config.Types
  7124  	_ = typ
  7125  	// match: (Move [3] dst src mem)
  7126  	// cond:
  7127  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  7128  	for {
  7129  		if v.AuxInt != 3 {
  7130  			break
  7131  		}
  7132  		_ = v.Args[2]
  7133  		dst := v.Args[0]
  7134  		src := v.Args[1]
  7135  		mem := v.Args[2]
  7136  		v.reset(OpMIPS64MOVBstore)
  7137  		v.AuxInt = 2
  7138  		v.AddArg(dst)
  7139  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7140  		v0.AuxInt = 2
  7141  		v0.AddArg(src)
  7142  		v0.AddArg(mem)
  7143  		v.AddArg(v0)
  7144  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7145  		v1.AuxInt = 1
  7146  		v1.AddArg(dst)
  7147  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7148  		v2.AuxInt = 1
  7149  		v2.AddArg(src)
  7150  		v2.AddArg(mem)
  7151  		v1.AddArg(v2)
  7152  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7153  		v3.AddArg(dst)
  7154  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7155  		v4.AddArg(src)
  7156  		v4.AddArg(mem)
  7157  		v3.AddArg(v4)
  7158  		v3.AddArg(mem)
  7159  		v1.AddArg(v3)
  7160  		v.AddArg(v1)
  7161  		return true
  7162  	}
  7163  	// match: (Move [6] {t} dst src mem)
  7164  	// cond: t.(*types.Type).Alignment()%2 == 0
  7165  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  7166  	for {
  7167  		if v.AuxInt != 6 {
  7168  			break
  7169  		}
  7170  		t := v.Aux
  7171  		_ = v.Args[2]
  7172  		dst := v.Args[0]
  7173  		src := v.Args[1]
  7174  		mem := v.Args[2]
  7175  		if !(t.(*types.Type).Alignment()%2 == 0) {
  7176  			break
  7177  		}
  7178  		v.reset(OpMIPS64MOVHstore)
  7179  		v.AuxInt = 4
  7180  		v.AddArg(dst)
  7181  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7182  		v0.AuxInt = 4
  7183  		v0.AddArg(src)
  7184  		v0.AddArg(mem)
  7185  		v.AddArg(v0)
  7186  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7187  		v1.AuxInt = 2
  7188  		v1.AddArg(dst)
  7189  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7190  		v2.AuxInt = 2
  7191  		v2.AddArg(src)
  7192  		v2.AddArg(mem)
  7193  		v1.AddArg(v2)
  7194  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7195  		v3.AddArg(dst)
  7196  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7197  		v4.AddArg(src)
  7198  		v4.AddArg(mem)
  7199  		v3.AddArg(v4)
  7200  		v3.AddArg(mem)
  7201  		v1.AddArg(v3)
  7202  		v.AddArg(v1)
  7203  		return true
  7204  	}
  7205  	// match: (Move [12] {t} dst src mem)
  7206  	// cond: t.(*types.Type).Alignment()%4 == 0
  7207  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  7208  	for {
  7209  		if v.AuxInt != 12 {
  7210  			break
  7211  		}
  7212  		t := v.Aux
  7213  		_ = v.Args[2]
  7214  		dst := v.Args[0]
  7215  		src := v.Args[1]
  7216  		mem := v.Args[2]
  7217  		if !(t.(*types.Type).Alignment()%4 == 0) {
  7218  			break
  7219  		}
  7220  		v.reset(OpMIPS64MOVWstore)
  7221  		v.AuxInt = 8
  7222  		v.AddArg(dst)
  7223  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7224  		v0.AuxInt = 8
  7225  		v0.AddArg(src)
  7226  		v0.AddArg(mem)
  7227  		v.AddArg(v0)
  7228  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7229  		v1.AuxInt = 4
  7230  		v1.AddArg(dst)
  7231  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7232  		v2.AuxInt = 4
  7233  		v2.AddArg(src)
  7234  		v2.AddArg(mem)
  7235  		v1.AddArg(v2)
  7236  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7237  		v3.AddArg(dst)
  7238  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7239  		v4.AddArg(src)
  7240  		v4.AddArg(mem)
  7241  		v3.AddArg(v4)
  7242  		v3.AddArg(mem)
  7243  		v1.AddArg(v3)
  7244  		v.AddArg(v1)
  7245  		return true
  7246  	}
  7247  	// match: (Move [16] {t} dst src mem)
  7248  	// cond: t.(*types.Type).Alignment()%8 == 0
  7249  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  7250  	for {
  7251  		if v.AuxInt != 16 {
  7252  			break
  7253  		}
  7254  		t := v.Aux
  7255  		_ = v.Args[2]
  7256  		dst := v.Args[0]
  7257  		src := v.Args[1]
  7258  		mem := v.Args[2]
  7259  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7260  			break
  7261  		}
  7262  		v.reset(OpMIPS64MOVVstore)
  7263  		v.AuxInt = 8
  7264  		v.AddArg(dst)
  7265  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7266  		v0.AuxInt = 8
  7267  		v0.AddArg(src)
  7268  		v0.AddArg(mem)
  7269  		v.AddArg(v0)
  7270  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7271  		v1.AddArg(dst)
  7272  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7273  		v2.AddArg(src)
  7274  		v2.AddArg(mem)
  7275  		v1.AddArg(v2)
  7276  		v1.AddArg(mem)
  7277  		v.AddArg(v1)
  7278  		return true
  7279  	}
  7280  	// match: (Move [24] {t} dst src mem)
  7281  	// cond: t.(*types.Type).Alignment()%8 == 0
  7282  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  7283  	for {
  7284  		if v.AuxInt != 24 {
  7285  			break
  7286  		}
  7287  		t := v.Aux
  7288  		_ = v.Args[2]
  7289  		dst := v.Args[0]
  7290  		src := v.Args[1]
  7291  		mem := v.Args[2]
  7292  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7293  			break
  7294  		}
  7295  		v.reset(OpMIPS64MOVVstore)
  7296  		v.AuxInt = 16
  7297  		v.AddArg(dst)
  7298  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7299  		v0.AuxInt = 16
  7300  		v0.AddArg(src)
  7301  		v0.AddArg(mem)
  7302  		v.AddArg(v0)
  7303  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7304  		v1.AuxInt = 8
  7305  		v1.AddArg(dst)
  7306  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7307  		v2.AuxInt = 8
  7308  		v2.AddArg(src)
  7309  		v2.AddArg(mem)
  7310  		v1.AddArg(v2)
  7311  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7312  		v3.AddArg(dst)
  7313  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7314  		v4.AddArg(src)
  7315  		v4.AddArg(mem)
  7316  		v3.AddArg(v4)
  7317  		v3.AddArg(mem)
  7318  		v1.AddArg(v3)
  7319  		v.AddArg(v1)
  7320  		return true
  7321  	}
  7322  	// match: (Move [s] {t} dst src mem)
  7323  	// cond: s > 24 || t.(*types.Type).Alignment()%8 != 0
  7324  	// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  7325  	for {
  7326  		s := v.AuxInt
  7327  		t := v.Aux
  7328  		_ = v.Args[2]
  7329  		dst := v.Args[0]
  7330  		src := v.Args[1]
  7331  		mem := v.Args[2]
  7332  		if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) {
  7333  			break
  7334  		}
  7335  		v.reset(OpMIPS64LoweredMove)
  7336  		v.AuxInt = t.(*types.Type).Alignment()
  7337  		v.AddArg(dst)
  7338  		v.AddArg(src)
  7339  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  7340  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  7341  		v0.AddArg(src)
  7342  		v.AddArg(v0)
  7343  		v.AddArg(mem)
  7344  		return true
  7345  	}
  7346  	return false
  7347  }
  7348  func rewriteValueMIPS64_OpMul16_0(v *Value) bool {
  7349  	b := v.Block
  7350  	_ = b
  7351  	typ := &b.Func.Config.Types
  7352  	_ = typ
  7353  	// match: (Mul16 x y)
  7354  	// cond:
  7355  	// result: (Select1 (MULVU x y))
  7356  	for {
  7357  		_ = v.Args[1]
  7358  		x := v.Args[0]
  7359  		y := v.Args[1]
  7360  		v.reset(OpSelect1)
  7361  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7362  		v0.AddArg(x)
  7363  		v0.AddArg(y)
  7364  		v.AddArg(v0)
  7365  		return true
  7366  	}
  7367  }
  7368  func rewriteValueMIPS64_OpMul32_0(v *Value) bool {
  7369  	b := v.Block
  7370  	_ = b
  7371  	typ := &b.Func.Config.Types
  7372  	_ = typ
  7373  	// match: (Mul32 x y)
  7374  	// cond:
  7375  	// result: (Select1 (MULVU x y))
  7376  	for {
  7377  		_ = v.Args[1]
  7378  		x := v.Args[0]
  7379  		y := v.Args[1]
  7380  		v.reset(OpSelect1)
  7381  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7382  		v0.AddArg(x)
  7383  		v0.AddArg(y)
  7384  		v.AddArg(v0)
  7385  		return true
  7386  	}
  7387  }
  7388  func rewriteValueMIPS64_OpMul32F_0(v *Value) bool {
  7389  	// match: (Mul32F x y)
  7390  	// cond:
  7391  	// result: (MULF x y)
  7392  	for {
  7393  		_ = v.Args[1]
  7394  		x := v.Args[0]
  7395  		y := v.Args[1]
  7396  		v.reset(OpMIPS64MULF)
  7397  		v.AddArg(x)
  7398  		v.AddArg(y)
  7399  		return true
  7400  	}
  7401  }
  7402  func rewriteValueMIPS64_OpMul64_0(v *Value) bool {
  7403  	b := v.Block
  7404  	_ = b
  7405  	typ := &b.Func.Config.Types
  7406  	_ = typ
  7407  	// match: (Mul64 x y)
  7408  	// cond:
  7409  	// result: (Select1 (MULVU x y))
  7410  	for {
  7411  		_ = v.Args[1]
  7412  		x := v.Args[0]
  7413  		y := v.Args[1]
  7414  		v.reset(OpSelect1)
  7415  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7416  		v0.AddArg(x)
  7417  		v0.AddArg(y)
  7418  		v.AddArg(v0)
  7419  		return true
  7420  	}
  7421  }
  7422  func rewriteValueMIPS64_OpMul64F_0(v *Value) bool {
  7423  	// match: (Mul64F x y)
  7424  	// cond:
  7425  	// result: (MULD x y)
  7426  	for {
  7427  		_ = v.Args[1]
  7428  		x := v.Args[0]
  7429  		y := v.Args[1]
  7430  		v.reset(OpMIPS64MULD)
  7431  		v.AddArg(x)
  7432  		v.AddArg(y)
  7433  		return true
  7434  	}
  7435  }
  7436  func rewriteValueMIPS64_OpMul8_0(v *Value) bool {
  7437  	b := v.Block
  7438  	_ = b
  7439  	typ := &b.Func.Config.Types
  7440  	_ = typ
  7441  	// match: (Mul8 x y)
  7442  	// cond:
  7443  	// result: (Select1 (MULVU x y))
  7444  	for {
  7445  		_ = v.Args[1]
  7446  		x := v.Args[0]
  7447  		y := v.Args[1]
  7448  		v.reset(OpSelect1)
  7449  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7450  		v0.AddArg(x)
  7451  		v0.AddArg(y)
  7452  		v.AddArg(v0)
  7453  		return true
  7454  	}
  7455  }
  7456  func rewriteValueMIPS64_OpNeg16_0(v *Value) bool {
  7457  	// match: (Neg16 x)
  7458  	// cond:
  7459  	// result: (NEGV x)
  7460  	for {
  7461  		x := v.Args[0]
  7462  		v.reset(OpMIPS64NEGV)
  7463  		v.AddArg(x)
  7464  		return true
  7465  	}
  7466  }
  7467  func rewriteValueMIPS64_OpNeg32_0(v *Value) bool {
  7468  	// match: (Neg32 x)
  7469  	// cond:
  7470  	// result: (NEGV x)
  7471  	for {
  7472  		x := v.Args[0]
  7473  		v.reset(OpMIPS64NEGV)
  7474  		v.AddArg(x)
  7475  		return true
  7476  	}
  7477  }
  7478  func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool {
  7479  	// match: (Neg32F x)
  7480  	// cond:
  7481  	// result: (NEGF x)
  7482  	for {
  7483  		x := v.Args[0]
  7484  		v.reset(OpMIPS64NEGF)
  7485  		v.AddArg(x)
  7486  		return true
  7487  	}
  7488  }
  7489  func rewriteValueMIPS64_OpNeg64_0(v *Value) bool {
  7490  	// match: (Neg64 x)
  7491  	// cond:
  7492  	// result: (NEGV x)
  7493  	for {
  7494  		x := v.Args[0]
  7495  		v.reset(OpMIPS64NEGV)
  7496  		v.AddArg(x)
  7497  		return true
  7498  	}
  7499  }
  7500  func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool {
  7501  	// match: (Neg64F x)
  7502  	// cond:
  7503  	// result: (NEGD x)
  7504  	for {
  7505  		x := v.Args[0]
  7506  		v.reset(OpMIPS64NEGD)
  7507  		v.AddArg(x)
  7508  		return true
  7509  	}
  7510  }
  7511  func rewriteValueMIPS64_OpNeg8_0(v *Value) bool {
  7512  	// match: (Neg8 x)
  7513  	// cond:
  7514  	// result: (NEGV x)
  7515  	for {
  7516  		x := v.Args[0]
  7517  		v.reset(OpMIPS64NEGV)
  7518  		v.AddArg(x)
  7519  		return true
  7520  	}
  7521  }
  7522  func rewriteValueMIPS64_OpNeq16_0(v *Value) bool {
  7523  	b := v.Block
  7524  	_ = b
  7525  	typ := &b.Func.Config.Types
  7526  	_ = typ
  7527  	// match: (Neq16 x y)
  7528  	// cond:
  7529  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  7530  	for {
  7531  		_ = v.Args[1]
  7532  		x := v.Args[0]
  7533  		y := v.Args[1]
  7534  		v.reset(OpMIPS64SGTU)
  7535  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7536  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7537  		v1.AddArg(x)
  7538  		v0.AddArg(v1)
  7539  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7540  		v2.AddArg(y)
  7541  		v0.AddArg(v2)
  7542  		v.AddArg(v0)
  7543  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7544  		v3.AuxInt = 0
  7545  		v.AddArg(v3)
  7546  		return true
  7547  	}
  7548  }
  7549  func rewriteValueMIPS64_OpNeq32_0(v *Value) bool {
  7550  	b := v.Block
  7551  	_ = b
  7552  	typ := &b.Func.Config.Types
  7553  	_ = typ
  7554  	// match: (Neq32 x y)
  7555  	// cond:
  7556  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  7557  	for {
  7558  		_ = v.Args[1]
  7559  		x := v.Args[0]
  7560  		y := v.Args[1]
  7561  		v.reset(OpMIPS64SGTU)
  7562  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7563  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7564  		v1.AddArg(x)
  7565  		v0.AddArg(v1)
  7566  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7567  		v2.AddArg(y)
  7568  		v0.AddArg(v2)
  7569  		v.AddArg(v0)
  7570  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7571  		v3.AuxInt = 0
  7572  		v.AddArg(v3)
  7573  		return true
  7574  	}
  7575  }
  7576  func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool {
  7577  	b := v.Block
  7578  	_ = b
  7579  	// match: (Neq32F x y)
  7580  	// cond:
  7581  	// result: (FPFlagFalse (CMPEQF x y))
  7582  	for {
  7583  		_ = v.Args[1]
  7584  		x := v.Args[0]
  7585  		y := v.Args[1]
  7586  		v.reset(OpMIPS64FPFlagFalse)
  7587  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  7588  		v0.AddArg(x)
  7589  		v0.AddArg(y)
  7590  		v.AddArg(v0)
  7591  		return true
  7592  	}
  7593  }
  7594  func rewriteValueMIPS64_OpNeq64_0(v *Value) bool {
  7595  	b := v.Block
  7596  	_ = b
  7597  	typ := &b.Func.Config.Types
  7598  	_ = typ
  7599  	// match: (Neq64 x y)
  7600  	// cond:
  7601  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7602  	for {
  7603  		_ = v.Args[1]
  7604  		x := v.Args[0]
  7605  		y := v.Args[1]
  7606  		v.reset(OpMIPS64SGTU)
  7607  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7608  		v0.AddArg(x)
  7609  		v0.AddArg(y)
  7610  		v.AddArg(v0)
  7611  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7612  		v1.AuxInt = 0
  7613  		v.AddArg(v1)
  7614  		return true
  7615  	}
  7616  }
  7617  func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool {
  7618  	b := v.Block
  7619  	_ = b
  7620  	// match: (Neq64F x y)
  7621  	// cond:
  7622  	// result: (FPFlagFalse (CMPEQD x y))
  7623  	for {
  7624  		_ = v.Args[1]
  7625  		x := v.Args[0]
  7626  		y := v.Args[1]
  7627  		v.reset(OpMIPS64FPFlagFalse)
  7628  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  7629  		v0.AddArg(x)
  7630  		v0.AddArg(y)
  7631  		v.AddArg(v0)
  7632  		return true
  7633  	}
  7634  }
  7635  func rewriteValueMIPS64_OpNeq8_0(v *Value) bool {
  7636  	b := v.Block
  7637  	_ = b
  7638  	typ := &b.Func.Config.Types
  7639  	_ = typ
  7640  	// match: (Neq8 x y)
  7641  	// cond:
  7642  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7643  	for {
  7644  		_ = v.Args[1]
  7645  		x := v.Args[0]
  7646  		y := v.Args[1]
  7647  		v.reset(OpMIPS64SGTU)
  7648  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7649  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7650  		v1.AddArg(x)
  7651  		v0.AddArg(v1)
  7652  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7653  		v2.AddArg(y)
  7654  		v0.AddArg(v2)
  7655  		v.AddArg(v0)
  7656  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7657  		v3.AuxInt = 0
  7658  		v.AddArg(v3)
  7659  		return true
  7660  	}
  7661  }
  7662  func rewriteValueMIPS64_OpNeqB_0(v *Value) bool {
  7663  	// match: (NeqB x y)
  7664  	// cond:
  7665  	// result: (XOR x y)
  7666  	for {
  7667  		_ = v.Args[1]
  7668  		x := v.Args[0]
  7669  		y := v.Args[1]
  7670  		v.reset(OpMIPS64XOR)
  7671  		v.AddArg(x)
  7672  		v.AddArg(y)
  7673  		return true
  7674  	}
  7675  }
  7676  func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool {
  7677  	b := v.Block
  7678  	_ = b
  7679  	typ := &b.Func.Config.Types
  7680  	_ = typ
  7681  	// match: (NeqPtr x y)
  7682  	// cond:
  7683  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7684  	for {
  7685  		_ = v.Args[1]
  7686  		x := v.Args[0]
  7687  		y := v.Args[1]
  7688  		v.reset(OpMIPS64SGTU)
  7689  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7690  		v0.AddArg(x)
  7691  		v0.AddArg(y)
  7692  		v.AddArg(v0)
  7693  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7694  		v1.AuxInt = 0
  7695  		v.AddArg(v1)
  7696  		return true
  7697  	}
  7698  }
  7699  func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool {
  7700  	// match: (NilCheck ptr mem)
  7701  	// cond:
  7702  	// result: (LoweredNilCheck ptr mem)
  7703  	for {
  7704  		_ = v.Args[1]
  7705  		ptr := v.Args[0]
  7706  		mem := v.Args[1]
  7707  		v.reset(OpMIPS64LoweredNilCheck)
  7708  		v.AddArg(ptr)
  7709  		v.AddArg(mem)
  7710  		return true
  7711  	}
  7712  }
  7713  func rewriteValueMIPS64_OpNot_0(v *Value) bool {
  7714  	// match: (Not x)
  7715  	// cond:
  7716  	// result: (XORconst [1] x)
  7717  	for {
  7718  		x := v.Args[0]
  7719  		v.reset(OpMIPS64XORconst)
  7720  		v.AuxInt = 1
  7721  		v.AddArg(x)
  7722  		return true
  7723  	}
  7724  }
  7725  func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool {
  7726  	// match: (OffPtr [off] ptr:(SP))
  7727  	// cond:
  7728  	// result: (MOVVaddr [off] ptr)
  7729  	for {
  7730  		off := v.AuxInt
  7731  		ptr := v.Args[0]
  7732  		if ptr.Op != OpSP {
  7733  			break
  7734  		}
  7735  		v.reset(OpMIPS64MOVVaddr)
  7736  		v.AuxInt = off
  7737  		v.AddArg(ptr)
  7738  		return true
  7739  	}
  7740  	// match: (OffPtr [off] ptr)
  7741  	// cond:
  7742  	// result: (ADDVconst [off] ptr)
  7743  	for {
  7744  		off := v.AuxInt
  7745  		ptr := v.Args[0]
  7746  		v.reset(OpMIPS64ADDVconst)
  7747  		v.AuxInt = off
  7748  		v.AddArg(ptr)
  7749  		return true
  7750  	}
  7751  }
  7752  func rewriteValueMIPS64_OpOr16_0(v *Value) bool {
  7753  	// match: (Or16 x y)
  7754  	// cond:
  7755  	// result: (OR x y)
  7756  	for {
  7757  		_ = v.Args[1]
  7758  		x := v.Args[0]
  7759  		y := v.Args[1]
  7760  		v.reset(OpMIPS64OR)
  7761  		v.AddArg(x)
  7762  		v.AddArg(y)
  7763  		return true
  7764  	}
  7765  }
  7766  func rewriteValueMIPS64_OpOr32_0(v *Value) bool {
  7767  	// match: (Or32 x y)
  7768  	// cond:
  7769  	// result: (OR x y)
  7770  	for {
  7771  		_ = v.Args[1]
  7772  		x := v.Args[0]
  7773  		y := v.Args[1]
  7774  		v.reset(OpMIPS64OR)
  7775  		v.AddArg(x)
  7776  		v.AddArg(y)
  7777  		return true
  7778  	}
  7779  }
  7780  func rewriteValueMIPS64_OpOr64_0(v *Value) bool {
  7781  	// match: (Or64 x y)
  7782  	// cond:
  7783  	// result: (OR x y)
  7784  	for {
  7785  		_ = v.Args[1]
  7786  		x := v.Args[0]
  7787  		y := v.Args[1]
  7788  		v.reset(OpMIPS64OR)
  7789  		v.AddArg(x)
  7790  		v.AddArg(y)
  7791  		return true
  7792  	}
  7793  }
  7794  func rewriteValueMIPS64_OpOr8_0(v *Value) bool {
  7795  	// match: (Or8 x y)
  7796  	// cond:
  7797  	// result: (OR x y)
  7798  	for {
  7799  		_ = v.Args[1]
  7800  		x := v.Args[0]
  7801  		y := v.Args[1]
  7802  		v.reset(OpMIPS64OR)
  7803  		v.AddArg(x)
  7804  		v.AddArg(y)
  7805  		return true
  7806  	}
  7807  }
  7808  func rewriteValueMIPS64_OpOrB_0(v *Value) bool {
  7809  	// match: (OrB x y)
  7810  	// cond:
  7811  	// result: (OR x y)
  7812  	for {
  7813  		_ = v.Args[1]
  7814  		x := v.Args[0]
  7815  		y := v.Args[1]
  7816  		v.reset(OpMIPS64OR)
  7817  		v.AddArg(x)
  7818  		v.AddArg(y)
  7819  		return true
  7820  	}
  7821  }
  7822  func rewriteValueMIPS64_OpRound32F_0(v *Value) bool {
  7823  	// match: (Round32F x)
  7824  	// cond:
  7825  	// result: x
  7826  	for {
  7827  		x := v.Args[0]
  7828  		v.reset(OpCopy)
  7829  		v.Type = x.Type
  7830  		v.AddArg(x)
  7831  		return true
  7832  	}
  7833  }
  7834  func rewriteValueMIPS64_OpRound64F_0(v *Value) bool {
  7835  	// match: (Round64F x)
  7836  	// cond:
  7837  	// result: x
  7838  	for {
  7839  		x := v.Args[0]
  7840  		v.reset(OpCopy)
  7841  		v.Type = x.Type
  7842  		v.AddArg(x)
  7843  		return true
  7844  	}
  7845  }
  7846  func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
  7847  	b := v.Block
  7848  	_ = b
  7849  	typ := &b.Func.Config.Types
  7850  	_ = typ
  7851  	// match: (Rsh16Ux16 <t> x y)
  7852  	// cond:
  7853  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7854  	for {
  7855  		t := v.Type
  7856  		_ = v.Args[1]
  7857  		x := v.Args[0]
  7858  		y := v.Args[1]
  7859  		v.reset(OpMIPS64AND)
  7860  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7861  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7862  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7863  		v2.AuxInt = 64
  7864  		v1.AddArg(v2)
  7865  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7866  		v3.AddArg(y)
  7867  		v1.AddArg(v3)
  7868  		v0.AddArg(v1)
  7869  		v.AddArg(v0)
  7870  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7871  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7872  		v5.AddArg(x)
  7873  		v4.AddArg(v5)
  7874  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7875  		v6.AddArg(y)
  7876  		v4.AddArg(v6)
  7877  		v.AddArg(v4)
  7878  		return true
  7879  	}
  7880  }
  7881  func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
  7882  	b := v.Block
  7883  	_ = b
  7884  	typ := &b.Func.Config.Types
  7885  	_ = typ
  7886  	// match: (Rsh16Ux32 <t> x y)
  7887  	// cond:
  7888  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7889  	for {
  7890  		t := v.Type
  7891  		_ = v.Args[1]
  7892  		x := v.Args[0]
  7893  		y := v.Args[1]
  7894  		v.reset(OpMIPS64AND)
  7895  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7896  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7897  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7898  		v2.AuxInt = 64
  7899  		v1.AddArg(v2)
  7900  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7901  		v3.AddArg(y)
  7902  		v1.AddArg(v3)
  7903  		v0.AddArg(v1)
  7904  		v.AddArg(v0)
  7905  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7906  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7907  		v5.AddArg(x)
  7908  		v4.AddArg(v5)
  7909  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7910  		v6.AddArg(y)
  7911  		v4.AddArg(v6)
  7912  		v.AddArg(v4)
  7913  		return true
  7914  	}
  7915  }
  7916  func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
  7917  	b := v.Block
  7918  	_ = b
  7919  	typ := &b.Func.Config.Types
  7920  	_ = typ
  7921  	// match: (Rsh16Ux64 <t> x y)
  7922  	// cond:
  7923  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7924  	for {
  7925  		t := v.Type
  7926  		_ = v.Args[1]
  7927  		x := v.Args[0]
  7928  		y := v.Args[1]
  7929  		v.reset(OpMIPS64AND)
  7930  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7931  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7932  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7933  		v2.AuxInt = 64
  7934  		v1.AddArg(v2)
  7935  		v1.AddArg(y)
  7936  		v0.AddArg(v1)
  7937  		v.AddArg(v0)
  7938  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7939  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7940  		v4.AddArg(x)
  7941  		v3.AddArg(v4)
  7942  		v3.AddArg(y)
  7943  		v.AddArg(v3)
  7944  		return true
  7945  	}
  7946  }
  7947  func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
  7948  	b := v.Block
  7949  	_ = b
  7950  	typ := &b.Func.Config.Types
  7951  	_ = typ
  7952  	// match: (Rsh16Ux8 <t> x y)
  7953  	// cond:
  7954  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  7955  	for {
  7956  		t := v.Type
  7957  		_ = v.Args[1]
  7958  		x := v.Args[0]
  7959  		y := v.Args[1]
  7960  		v.reset(OpMIPS64AND)
  7961  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7962  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7963  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7964  		v2.AuxInt = 64
  7965  		v1.AddArg(v2)
  7966  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7967  		v3.AddArg(y)
  7968  		v1.AddArg(v3)
  7969  		v0.AddArg(v1)
  7970  		v.AddArg(v0)
  7971  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7972  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7973  		v5.AddArg(x)
  7974  		v4.AddArg(v5)
  7975  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7976  		v6.AddArg(y)
  7977  		v4.AddArg(v6)
  7978  		v.AddArg(v4)
  7979  		return true
  7980  	}
  7981  }
  7982  func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
  7983  	b := v.Block
  7984  	_ = b
  7985  	typ := &b.Func.Config.Types
  7986  	_ = typ
  7987  	// match: (Rsh16x16 <t> x y)
  7988  	// cond:
  7989  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7990  	for {
  7991  		t := v.Type
  7992  		_ = v.Args[1]
  7993  		x := v.Args[0]
  7994  		y := v.Args[1]
  7995  		v.reset(OpMIPS64SRAV)
  7996  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7997  		v0.AddArg(x)
  7998  		v.AddArg(v0)
  7999  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8000  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8001  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8002  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8003  		v4.AddArg(y)
  8004  		v3.AddArg(v4)
  8005  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8006  		v5.AuxInt = 63
  8007  		v3.AddArg(v5)
  8008  		v2.AddArg(v3)
  8009  		v1.AddArg(v2)
  8010  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8011  		v6.AddArg(y)
  8012  		v1.AddArg(v6)
  8013  		v.AddArg(v1)
  8014  		return true
  8015  	}
  8016  }
  8017  func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
  8018  	b := v.Block
  8019  	_ = b
  8020  	typ := &b.Func.Config.Types
  8021  	_ = typ
  8022  	// match: (Rsh16x32 <t> x y)
  8023  	// cond:
  8024  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8025  	for {
  8026  		t := v.Type
  8027  		_ = v.Args[1]
  8028  		x := v.Args[0]
  8029  		y := v.Args[1]
  8030  		v.reset(OpMIPS64SRAV)
  8031  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8032  		v0.AddArg(x)
  8033  		v.AddArg(v0)
  8034  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8035  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8036  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8037  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8038  		v4.AddArg(y)
  8039  		v3.AddArg(v4)
  8040  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8041  		v5.AuxInt = 63
  8042  		v3.AddArg(v5)
  8043  		v2.AddArg(v3)
  8044  		v1.AddArg(v2)
  8045  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8046  		v6.AddArg(y)
  8047  		v1.AddArg(v6)
  8048  		v.AddArg(v1)
  8049  		return true
  8050  	}
  8051  }
  8052  func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
  8053  	b := v.Block
  8054  	_ = b
  8055  	typ := &b.Func.Config.Types
  8056  	_ = typ
  8057  	// match: (Rsh16x64 <t> x y)
  8058  	// cond:
  8059  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8060  	for {
  8061  		t := v.Type
  8062  		_ = v.Args[1]
  8063  		x := v.Args[0]
  8064  		y := v.Args[1]
  8065  		v.reset(OpMIPS64SRAV)
  8066  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8067  		v0.AddArg(x)
  8068  		v.AddArg(v0)
  8069  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8070  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8071  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8072  		v3.AddArg(y)
  8073  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8074  		v4.AuxInt = 63
  8075  		v3.AddArg(v4)
  8076  		v2.AddArg(v3)
  8077  		v1.AddArg(v2)
  8078  		v1.AddArg(y)
  8079  		v.AddArg(v1)
  8080  		return true
  8081  	}
  8082  }
  8083  func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
  8084  	b := v.Block
  8085  	_ = b
  8086  	typ := &b.Func.Config.Types
  8087  	_ = typ
  8088  	// match: (Rsh16x8 <t> x y)
  8089  	// cond:
  8090  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8091  	for {
  8092  		t := v.Type
  8093  		_ = v.Args[1]
  8094  		x := v.Args[0]
  8095  		y := v.Args[1]
  8096  		v.reset(OpMIPS64SRAV)
  8097  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8098  		v0.AddArg(x)
  8099  		v.AddArg(v0)
  8100  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8101  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8102  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8103  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8104  		v4.AddArg(y)
  8105  		v3.AddArg(v4)
  8106  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8107  		v5.AuxInt = 63
  8108  		v3.AddArg(v5)
  8109  		v2.AddArg(v3)
  8110  		v1.AddArg(v2)
  8111  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8112  		v6.AddArg(y)
  8113  		v1.AddArg(v6)
  8114  		v.AddArg(v1)
  8115  		return true
  8116  	}
  8117  }
  8118  func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
  8119  	b := v.Block
  8120  	_ = b
  8121  	typ := &b.Func.Config.Types
  8122  	_ = typ
  8123  	// match: (Rsh32Ux16 <t> x y)
  8124  	// cond:
  8125  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  8126  	for {
  8127  		t := v.Type
  8128  		_ = v.Args[1]
  8129  		x := v.Args[0]
  8130  		y := v.Args[1]
  8131  		v.reset(OpMIPS64AND)
  8132  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8133  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8134  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8135  		v2.AuxInt = 64
  8136  		v1.AddArg(v2)
  8137  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8138  		v3.AddArg(y)
  8139  		v1.AddArg(v3)
  8140  		v0.AddArg(v1)
  8141  		v.AddArg(v0)
  8142  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8143  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8144  		v5.AddArg(x)
  8145  		v4.AddArg(v5)
  8146  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8147  		v6.AddArg(y)
  8148  		v4.AddArg(v6)
  8149  		v.AddArg(v4)
  8150  		return true
  8151  	}
  8152  }
  8153  func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
  8154  	b := v.Block
  8155  	_ = b
  8156  	typ := &b.Func.Config.Types
  8157  	_ = typ
  8158  	// match: (Rsh32Ux32 <t> x y)
  8159  	// cond:
  8160  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  8161  	for {
  8162  		t := v.Type
  8163  		_ = v.Args[1]
  8164  		x := v.Args[0]
  8165  		y := v.Args[1]
  8166  		v.reset(OpMIPS64AND)
  8167  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8168  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8169  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8170  		v2.AuxInt = 64
  8171  		v1.AddArg(v2)
  8172  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8173  		v3.AddArg(y)
  8174  		v1.AddArg(v3)
  8175  		v0.AddArg(v1)
  8176  		v.AddArg(v0)
  8177  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8178  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8179  		v5.AddArg(x)
  8180  		v4.AddArg(v5)
  8181  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8182  		v6.AddArg(y)
  8183  		v4.AddArg(v6)
  8184  		v.AddArg(v4)
  8185  		return true
  8186  	}
  8187  }
  8188  func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
  8189  	b := v.Block
  8190  	_ = b
  8191  	typ := &b.Func.Config.Types
  8192  	_ = typ
  8193  	// match: (Rsh32Ux64 <t> x y)
  8194  	// cond:
  8195  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  8196  	for {
  8197  		t := v.Type
  8198  		_ = v.Args[1]
  8199  		x := v.Args[0]
  8200  		y := v.Args[1]
  8201  		v.reset(OpMIPS64AND)
  8202  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8203  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8204  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8205  		v2.AuxInt = 64
  8206  		v1.AddArg(v2)
  8207  		v1.AddArg(y)
  8208  		v0.AddArg(v1)
  8209  		v.AddArg(v0)
  8210  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8211  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8212  		v4.AddArg(x)
  8213  		v3.AddArg(v4)
  8214  		v3.AddArg(y)
  8215  		v.AddArg(v3)
  8216  		return true
  8217  	}
  8218  }
  8219  func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
  8220  	b := v.Block
  8221  	_ = b
  8222  	typ := &b.Func.Config.Types
  8223  	_ = typ
  8224  	// match: (Rsh32Ux8 <t> x y)
  8225  	// cond:
  8226  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  8227  	for {
  8228  		t := v.Type
  8229  		_ = v.Args[1]
  8230  		x := v.Args[0]
  8231  		y := v.Args[1]
  8232  		v.reset(OpMIPS64AND)
  8233  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8234  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8235  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8236  		v2.AuxInt = 64
  8237  		v1.AddArg(v2)
  8238  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8239  		v3.AddArg(y)
  8240  		v1.AddArg(v3)
  8241  		v0.AddArg(v1)
  8242  		v.AddArg(v0)
  8243  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8244  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8245  		v5.AddArg(x)
  8246  		v4.AddArg(v5)
  8247  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8248  		v6.AddArg(y)
  8249  		v4.AddArg(v6)
  8250  		v.AddArg(v4)
  8251  		return true
  8252  	}
  8253  }
  8254  func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
  8255  	b := v.Block
  8256  	_ = b
  8257  	typ := &b.Func.Config.Types
  8258  	_ = typ
  8259  	// match: (Rsh32x16 <t> x y)
  8260  	// cond:
  8261  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8262  	for {
  8263  		t := v.Type
  8264  		_ = v.Args[1]
  8265  		x := v.Args[0]
  8266  		y := v.Args[1]
  8267  		v.reset(OpMIPS64SRAV)
  8268  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8269  		v0.AddArg(x)
  8270  		v.AddArg(v0)
  8271  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8272  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8273  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8274  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8275  		v4.AddArg(y)
  8276  		v3.AddArg(v4)
  8277  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8278  		v5.AuxInt = 63
  8279  		v3.AddArg(v5)
  8280  		v2.AddArg(v3)
  8281  		v1.AddArg(v2)
  8282  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8283  		v6.AddArg(y)
  8284  		v1.AddArg(v6)
  8285  		v.AddArg(v1)
  8286  		return true
  8287  	}
  8288  }
  8289  func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
  8290  	b := v.Block
  8291  	_ = b
  8292  	typ := &b.Func.Config.Types
  8293  	_ = typ
  8294  	// match: (Rsh32x32 <t> x y)
  8295  	// cond:
  8296  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8297  	for {
  8298  		t := v.Type
  8299  		_ = v.Args[1]
  8300  		x := v.Args[0]
  8301  		y := v.Args[1]
  8302  		v.reset(OpMIPS64SRAV)
  8303  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8304  		v0.AddArg(x)
  8305  		v.AddArg(v0)
  8306  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8307  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8308  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8309  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8310  		v4.AddArg(y)
  8311  		v3.AddArg(v4)
  8312  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8313  		v5.AuxInt = 63
  8314  		v3.AddArg(v5)
  8315  		v2.AddArg(v3)
  8316  		v1.AddArg(v2)
  8317  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8318  		v6.AddArg(y)
  8319  		v1.AddArg(v6)
  8320  		v.AddArg(v1)
  8321  		return true
  8322  	}
  8323  }
  8324  func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
  8325  	b := v.Block
  8326  	_ = b
  8327  	typ := &b.Func.Config.Types
  8328  	_ = typ
  8329  	// match: (Rsh32x64 <t> x y)
  8330  	// cond:
  8331  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8332  	for {
  8333  		t := v.Type
  8334  		_ = v.Args[1]
  8335  		x := v.Args[0]
  8336  		y := v.Args[1]
  8337  		v.reset(OpMIPS64SRAV)
  8338  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8339  		v0.AddArg(x)
  8340  		v.AddArg(v0)
  8341  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8342  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8343  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8344  		v3.AddArg(y)
  8345  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8346  		v4.AuxInt = 63
  8347  		v3.AddArg(v4)
  8348  		v2.AddArg(v3)
  8349  		v1.AddArg(v2)
  8350  		v1.AddArg(y)
  8351  		v.AddArg(v1)
  8352  		return true
  8353  	}
  8354  }
  8355  func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
  8356  	b := v.Block
  8357  	_ = b
  8358  	typ := &b.Func.Config.Types
  8359  	_ = typ
  8360  	// match: (Rsh32x8 <t> x y)
  8361  	// cond:
  8362  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8363  	for {
  8364  		t := v.Type
  8365  		_ = v.Args[1]
  8366  		x := v.Args[0]
  8367  		y := v.Args[1]
  8368  		v.reset(OpMIPS64SRAV)
  8369  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8370  		v0.AddArg(x)
  8371  		v.AddArg(v0)
  8372  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8373  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8374  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8375  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8376  		v4.AddArg(y)
  8377  		v3.AddArg(v4)
  8378  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8379  		v5.AuxInt = 63
  8380  		v3.AddArg(v5)
  8381  		v2.AddArg(v3)
  8382  		v1.AddArg(v2)
  8383  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8384  		v6.AddArg(y)
  8385  		v1.AddArg(v6)
  8386  		v.AddArg(v1)
  8387  		return true
  8388  	}
  8389  }
  8390  func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
  8391  	b := v.Block
  8392  	_ = b
  8393  	typ := &b.Func.Config.Types
  8394  	_ = typ
  8395  	// match: (Rsh64Ux16 <t> x y)
  8396  	// cond:
  8397  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  8398  	for {
  8399  		t := v.Type
  8400  		_ = v.Args[1]
  8401  		x := v.Args[0]
  8402  		y := v.Args[1]
  8403  		v.reset(OpMIPS64AND)
  8404  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8405  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8406  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8407  		v2.AuxInt = 64
  8408  		v1.AddArg(v2)
  8409  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8410  		v3.AddArg(y)
  8411  		v1.AddArg(v3)
  8412  		v0.AddArg(v1)
  8413  		v.AddArg(v0)
  8414  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8415  		v4.AddArg(x)
  8416  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8417  		v5.AddArg(y)
  8418  		v4.AddArg(v5)
  8419  		v.AddArg(v4)
  8420  		return true
  8421  	}
  8422  }
  8423  func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
  8424  	b := v.Block
  8425  	_ = b
  8426  	typ := &b.Func.Config.Types
  8427  	_ = typ
  8428  	// match: (Rsh64Ux32 <t> x y)
  8429  	// cond:
  8430  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  8431  	for {
  8432  		t := v.Type
  8433  		_ = v.Args[1]
  8434  		x := v.Args[0]
  8435  		y := v.Args[1]
  8436  		v.reset(OpMIPS64AND)
  8437  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8438  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8439  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8440  		v2.AuxInt = 64
  8441  		v1.AddArg(v2)
  8442  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8443  		v3.AddArg(y)
  8444  		v1.AddArg(v3)
  8445  		v0.AddArg(v1)
  8446  		v.AddArg(v0)
  8447  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8448  		v4.AddArg(x)
  8449  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8450  		v5.AddArg(y)
  8451  		v4.AddArg(v5)
  8452  		v.AddArg(v4)
  8453  		return true
  8454  	}
  8455  }
  8456  func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
  8457  	b := v.Block
  8458  	_ = b
  8459  	typ := &b.Func.Config.Types
  8460  	_ = typ
  8461  	// match: (Rsh64Ux64 <t> x y)
  8462  	// cond:
  8463  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  8464  	for {
  8465  		t := v.Type
  8466  		_ = v.Args[1]
  8467  		x := v.Args[0]
  8468  		y := v.Args[1]
  8469  		v.reset(OpMIPS64AND)
  8470  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8471  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8472  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8473  		v2.AuxInt = 64
  8474  		v1.AddArg(v2)
  8475  		v1.AddArg(y)
  8476  		v0.AddArg(v1)
  8477  		v.AddArg(v0)
  8478  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8479  		v3.AddArg(x)
  8480  		v3.AddArg(y)
  8481  		v.AddArg(v3)
  8482  		return true
  8483  	}
  8484  }
  8485  func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
  8486  	b := v.Block
  8487  	_ = b
  8488  	typ := &b.Func.Config.Types
  8489  	_ = typ
  8490  	// match: (Rsh64Ux8 <t> x y)
  8491  	// cond:
  8492  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  8493  	for {
  8494  		t := v.Type
  8495  		_ = v.Args[1]
  8496  		x := v.Args[0]
  8497  		y := v.Args[1]
  8498  		v.reset(OpMIPS64AND)
  8499  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8500  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8501  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8502  		v2.AuxInt = 64
  8503  		v1.AddArg(v2)
  8504  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8505  		v3.AddArg(y)
  8506  		v1.AddArg(v3)
  8507  		v0.AddArg(v1)
  8508  		v.AddArg(v0)
  8509  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8510  		v4.AddArg(x)
  8511  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8512  		v5.AddArg(y)
  8513  		v4.AddArg(v5)
  8514  		v.AddArg(v4)
  8515  		return true
  8516  	}
  8517  }
  8518  func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
  8519  	b := v.Block
  8520  	_ = b
  8521  	typ := &b.Func.Config.Types
  8522  	_ = typ
  8523  	// match: (Rsh64x16 <t> x y)
  8524  	// cond:
  8525  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8526  	for {
  8527  		t := v.Type
  8528  		_ = v.Args[1]
  8529  		x := v.Args[0]
  8530  		y := v.Args[1]
  8531  		v.reset(OpMIPS64SRAV)
  8532  		v.AddArg(x)
  8533  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8534  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8535  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8536  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8537  		v3.AddArg(y)
  8538  		v2.AddArg(v3)
  8539  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8540  		v4.AuxInt = 63
  8541  		v2.AddArg(v4)
  8542  		v1.AddArg(v2)
  8543  		v0.AddArg(v1)
  8544  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8545  		v5.AddArg(y)
  8546  		v0.AddArg(v5)
  8547  		v.AddArg(v0)
  8548  		return true
  8549  	}
  8550  }
  8551  func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
  8552  	b := v.Block
  8553  	_ = b
  8554  	typ := &b.Func.Config.Types
  8555  	_ = typ
  8556  	// match: (Rsh64x32 <t> x y)
  8557  	// cond:
  8558  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8559  	for {
  8560  		t := v.Type
  8561  		_ = v.Args[1]
  8562  		x := v.Args[0]
  8563  		y := v.Args[1]
  8564  		v.reset(OpMIPS64SRAV)
  8565  		v.AddArg(x)
  8566  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8567  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8568  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8569  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8570  		v3.AddArg(y)
  8571  		v2.AddArg(v3)
  8572  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8573  		v4.AuxInt = 63
  8574  		v2.AddArg(v4)
  8575  		v1.AddArg(v2)
  8576  		v0.AddArg(v1)
  8577  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8578  		v5.AddArg(y)
  8579  		v0.AddArg(v5)
  8580  		v.AddArg(v0)
  8581  		return true
  8582  	}
  8583  }
  8584  func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
  8585  	b := v.Block
  8586  	_ = b
  8587  	typ := &b.Func.Config.Types
  8588  	_ = typ
  8589  	// match: (Rsh64x64 <t> x y)
  8590  	// cond:
  8591  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8592  	for {
  8593  		t := v.Type
  8594  		_ = v.Args[1]
  8595  		x := v.Args[0]
  8596  		y := v.Args[1]
  8597  		v.reset(OpMIPS64SRAV)
  8598  		v.AddArg(x)
  8599  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8600  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8601  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8602  		v2.AddArg(y)
  8603  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8604  		v3.AuxInt = 63
  8605  		v2.AddArg(v3)
  8606  		v1.AddArg(v2)
  8607  		v0.AddArg(v1)
  8608  		v0.AddArg(y)
  8609  		v.AddArg(v0)
  8610  		return true
  8611  	}
  8612  }
  8613  func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
  8614  	b := v.Block
  8615  	_ = b
  8616  	typ := &b.Func.Config.Types
  8617  	_ = typ
  8618  	// match: (Rsh64x8 <t> x y)
  8619  	// cond:
  8620  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8621  	for {
  8622  		t := v.Type
  8623  		_ = v.Args[1]
  8624  		x := v.Args[0]
  8625  		y := v.Args[1]
  8626  		v.reset(OpMIPS64SRAV)
  8627  		v.AddArg(x)
  8628  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8629  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8630  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8631  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8632  		v3.AddArg(y)
  8633  		v2.AddArg(v3)
  8634  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8635  		v4.AuxInt = 63
  8636  		v2.AddArg(v4)
  8637  		v1.AddArg(v2)
  8638  		v0.AddArg(v1)
  8639  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8640  		v5.AddArg(y)
  8641  		v0.AddArg(v5)
  8642  		v.AddArg(v0)
  8643  		return true
  8644  	}
  8645  }
  8646  func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
  8647  	b := v.Block
  8648  	_ = b
  8649  	typ := &b.Func.Config.Types
  8650  	_ = typ
  8651  	// match: (Rsh8Ux16 <t> x y)
  8652  	// cond:
  8653  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8654  	for {
  8655  		t := v.Type
  8656  		_ = v.Args[1]
  8657  		x := v.Args[0]
  8658  		y := v.Args[1]
  8659  		v.reset(OpMIPS64AND)
  8660  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8661  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8662  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8663  		v2.AuxInt = 64
  8664  		v1.AddArg(v2)
  8665  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8666  		v3.AddArg(y)
  8667  		v1.AddArg(v3)
  8668  		v0.AddArg(v1)
  8669  		v.AddArg(v0)
  8670  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8671  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8672  		v5.AddArg(x)
  8673  		v4.AddArg(v5)
  8674  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8675  		v6.AddArg(y)
  8676  		v4.AddArg(v6)
  8677  		v.AddArg(v4)
  8678  		return true
  8679  	}
  8680  }
  8681  func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
  8682  	b := v.Block
  8683  	_ = b
  8684  	typ := &b.Func.Config.Types
  8685  	_ = typ
  8686  	// match: (Rsh8Ux32 <t> x y)
  8687  	// cond:
  8688  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8689  	for {
  8690  		t := v.Type
  8691  		_ = v.Args[1]
  8692  		x := v.Args[0]
  8693  		y := v.Args[1]
  8694  		v.reset(OpMIPS64AND)
  8695  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8696  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8697  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8698  		v2.AuxInt = 64
  8699  		v1.AddArg(v2)
  8700  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8701  		v3.AddArg(y)
  8702  		v1.AddArg(v3)
  8703  		v0.AddArg(v1)
  8704  		v.AddArg(v0)
  8705  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8706  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8707  		v5.AddArg(x)
  8708  		v4.AddArg(v5)
  8709  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8710  		v6.AddArg(y)
  8711  		v4.AddArg(v6)
  8712  		v.AddArg(v4)
  8713  		return true
  8714  	}
  8715  }
  8716  func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
  8717  	b := v.Block
  8718  	_ = b
  8719  	typ := &b.Func.Config.Types
  8720  	_ = typ
  8721  	// match: (Rsh8Ux64 <t> x y)
  8722  	// cond:
  8723  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8724  	for {
  8725  		t := v.Type
  8726  		_ = v.Args[1]
  8727  		x := v.Args[0]
  8728  		y := v.Args[1]
  8729  		v.reset(OpMIPS64AND)
  8730  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8731  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8732  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8733  		v2.AuxInt = 64
  8734  		v1.AddArg(v2)
  8735  		v1.AddArg(y)
  8736  		v0.AddArg(v1)
  8737  		v.AddArg(v0)
  8738  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8739  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8740  		v4.AddArg(x)
  8741  		v3.AddArg(v4)
  8742  		v3.AddArg(y)
  8743  		v.AddArg(v3)
  8744  		return true
  8745  	}
  8746  }
  8747  func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
  8748  	b := v.Block
  8749  	_ = b
  8750  	typ := &b.Func.Config.Types
  8751  	_ = typ
  8752  	// match: (Rsh8Ux8 <t> x y)
  8753  	// cond:
  8754  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  8755  	for {
  8756  		t := v.Type
  8757  		_ = v.Args[1]
  8758  		x := v.Args[0]
  8759  		y := v.Args[1]
  8760  		v.reset(OpMIPS64AND)
  8761  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8762  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8763  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8764  		v2.AuxInt = 64
  8765  		v1.AddArg(v2)
  8766  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8767  		v3.AddArg(y)
  8768  		v1.AddArg(v3)
  8769  		v0.AddArg(v1)
  8770  		v.AddArg(v0)
  8771  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8772  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8773  		v5.AddArg(x)
  8774  		v4.AddArg(v5)
  8775  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8776  		v6.AddArg(y)
  8777  		v4.AddArg(v6)
  8778  		v.AddArg(v4)
  8779  		return true
  8780  	}
  8781  }
  8782  func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
  8783  	b := v.Block
  8784  	_ = b
  8785  	typ := &b.Func.Config.Types
  8786  	_ = typ
  8787  	// match: (Rsh8x16 <t> x y)
  8788  	// cond:
  8789  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8790  	for {
  8791  		t := v.Type
  8792  		_ = v.Args[1]
  8793  		x := v.Args[0]
  8794  		y := v.Args[1]
  8795  		v.reset(OpMIPS64SRAV)
  8796  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8797  		v0.AddArg(x)
  8798  		v.AddArg(v0)
  8799  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8800  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8801  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8802  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8803  		v4.AddArg(y)
  8804  		v3.AddArg(v4)
  8805  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8806  		v5.AuxInt = 63
  8807  		v3.AddArg(v5)
  8808  		v2.AddArg(v3)
  8809  		v1.AddArg(v2)
  8810  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8811  		v6.AddArg(y)
  8812  		v1.AddArg(v6)
  8813  		v.AddArg(v1)
  8814  		return true
  8815  	}
  8816  }
  8817  func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
  8818  	b := v.Block
  8819  	_ = b
  8820  	typ := &b.Func.Config.Types
  8821  	_ = typ
  8822  	// match: (Rsh8x32 <t> x y)
  8823  	// cond:
  8824  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8825  	for {
  8826  		t := v.Type
  8827  		_ = v.Args[1]
  8828  		x := v.Args[0]
  8829  		y := v.Args[1]
  8830  		v.reset(OpMIPS64SRAV)
  8831  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8832  		v0.AddArg(x)
  8833  		v.AddArg(v0)
  8834  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8835  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8836  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8837  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8838  		v4.AddArg(y)
  8839  		v3.AddArg(v4)
  8840  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8841  		v5.AuxInt = 63
  8842  		v3.AddArg(v5)
  8843  		v2.AddArg(v3)
  8844  		v1.AddArg(v2)
  8845  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8846  		v6.AddArg(y)
  8847  		v1.AddArg(v6)
  8848  		v.AddArg(v1)
  8849  		return true
  8850  	}
  8851  }
  8852  func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
  8853  	b := v.Block
  8854  	_ = b
  8855  	typ := &b.Func.Config.Types
  8856  	_ = typ
  8857  	// match: (Rsh8x64 <t> x y)
  8858  	// cond:
  8859  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8860  	for {
  8861  		t := v.Type
  8862  		_ = v.Args[1]
  8863  		x := v.Args[0]
  8864  		y := v.Args[1]
  8865  		v.reset(OpMIPS64SRAV)
  8866  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8867  		v0.AddArg(x)
  8868  		v.AddArg(v0)
  8869  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8870  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8871  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8872  		v3.AddArg(y)
  8873  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8874  		v4.AuxInt = 63
  8875  		v3.AddArg(v4)
  8876  		v2.AddArg(v3)
  8877  		v1.AddArg(v2)
  8878  		v1.AddArg(y)
  8879  		v.AddArg(v1)
  8880  		return true
  8881  	}
  8882  }
  8883  func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
  8884  	b := v.Block
  8885  	_ = b
  8886  	typ := &b.Func.Config.Types
  8887  	_ = typ
  8888  	// match: (Rsh8x8 <t> x y)
  8889  	// cond:
  8890  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8891  	for {
  8892  		t := v.Type
  8893  		_ = v.Args[1]
  8894  		x := v.Args[0]
  8895  		y := v.Args[1]
  8896  		v.reset(OpMIPS64SRAV)
  8897  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8898  		v0.AddArg(x)
  8899  		v.AddArg(v0)
  8900  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8901  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8902  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8903  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8904  		v4.AddArg(y)
  8905  		v3.AddArg(v4)
  8906  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8907  		v5.AuxInt = 63
  8908  		v3.AddArg(v5)
  8909  		v2.AddArg(v3)
  8910  		v1.AddArg(v2)
  8911  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8912  		v6.AddArg(y)
  8913  		v1.AddArg(v6)
  8914  		v.AddArg(v1)
  8915  		return true
  8916  	}
  8917  }
  8918  func rewriteValueMIPS64_OpSelect0_0(v *Value) bool {
  8919  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8920  	// cond:
  8921  	// result: (MOVVconst [0])
  8922  	for {
  8923  		v_0 := v.Args[0]
  8924  		if v_0.Op != OpMIPS64DIVVU {
  8925  			break
  8926  		}
  8927  		_ = v_0.Args[1]
  8928  		v_0_1 := v_0.Args[1]
  8929  		if v_0_1.Op != OpMIPS64MOVVconst {
  8930  			break
  8931  		}
  8932  		if v_0_1.AuxInt != 1 {
  8933  			break
  8934  		}
  8935  		v.reset(OpMIPS64MOVVconst)
  8936  		v.AuxInt = 0
  8937  		return true
  8938  	}
  8939  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  8940  	// cond: isPowerOfTwo(c)
  8941  	// result: (ANDconst [c-1] x)
  8942  	for {
  8943  		v_0 := v.Args[0]
  8944  		if v_0.Op != OpMIPS64DIVVU {
  8945  			break
  8946  		}
  8947  		_ = v_0.Args[1]
  8948  		x := v_0.Args[0]
  8949  		v_0_1 := v_0.Args[1]
  8950  		if v_0_1.Op != OpMIPS64MOVVconst {
  8951  			break
  8952  		}
  8953  		c := v_0_1.AuxInt
  8954  		if !(isPowerOfTwo(c)) {
  8955  			break
  8956  		}
  8957  		v.reset(OpMIPS64ANDconst)
  8958  		v.AuxInt = c - 1
  8959  		v.AddArg(x)
  8960  		return true
  8961  	}
  8962  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8963  	// cond:
  8964  	// result: (MOVVconst [int64(c)%int64(d)])
  8965  	for {
  8966  		v_0 := v.Args[0]
  8967  		if v_0.Op != OpMIPS64DIVV {
  8968  			break
  8969  		}
  8970  		_ = v_0.Args[1]
  8971  		v_0_0 := v_0.Args[0]
  8972  		if v_0_0.Op != OpMIPS64MOVVconst {
  8973  			break
  8974  		}
  8975  		c := v_0_0.AuxInt
  8976  		v_0_1 := v_0.Args[1]
  8977  		if v_0_1.Op != OpMIPS64MOVVconst {
  8978  			break
  8979  		}
  8980  		d := v_0_1.AuxInt
  8981  		v.reset(OpMIPS64MOVVconst)
  8982  		v.AuxInt = int64(c) % int64(d)
  8983  		return true
  8984  	}
  8985  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8986  	// cond:
  8987  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8988  	for {
  8989  		v_0 := v.Args[0]
  8990  		if v_0.Op != OpMIPS64DIVVU {
  8991  			break
  8992  		}
  8993  		_ = v_0.Args[1]
  8994  		v_0_0 := v_0.Args[0]
  8995  		if v_0_0.Op != OpMIPS64MOVVconst {
  8996  			break
  8997  		}
  8998  		c := v_0_0.AuxInt
  8999  		v_0_1 := v_0.Args[1]
  9000  		if v_0_1.Op != OpMIPS64MOVVconst {
  9001  			break
  9002  		}
  9003  		d := v_0_1.AuxInt
  9004  		v.reset(OpMIPS64MOVVconst)
  9005  		v.AuxInt = int64(uint64(c) % uint64(d))
  9006  		return true
  9007  	}
  9008  	return false
  9009  }
  9010  func rewriteValueMIPS64_OpSelect1_0(v *Value) bool {
  9011  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  9012  	// cond:
  9013  	// result: (NEGV x)
  9014  	for {
  9015  		v_0 := v.Args[0]
  9016  		if v_0.Op != OpMIPS64MULVU {
  9017  			break
  9018  		}
  9019  		_ = v_0.Args[1]
  9020  		x := v_0.Args[0]
  9021  		v_0_1 := v_0.Args[1]
  9022  		if v_0_1.Op != OpMIPS64MOVVconst {
  9023  			break
  9024  		}
  9025  		if v_0_1.AuxInt != -1 {
  9026  			break
  9027  		}
  9028  		v.reset(OpMIPS64NEGV)
  9029  		v.AddArg(x)
  9030  		return true
  9031  	}
  9032  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  9033  	// cond:
  9034  	// result: (NEGV x)
  9035  	for {
  9036  		v_0 := v.Args[0]
  9037  		if v_0.Op != OpMIPS64MULVU {
  9038  			break
  9039  		}
  9040  		_ = v_0.Args[1]
  9041  		v_0_0 := v_0.Args[0]
  9042  		if v_0_0.Op != OpMIPS64MOVVconst {
  9043  			break
  9044  		}
  9045  		if v_0_0.AuxInt != -1 {
  9046  			break
  9047  		}
  9048  		x := v_0.Args[1]
  9049  		v.reset(OpMIPS64NEGV)
  9050  		v.AddArg(x)
  9051  		return true
  9052  	}
  9053  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  9054  	// cond:
  9055  	// result: (MOVVconst [0])
  9056  	for {
  9057  		v_0 := v.Args[0]
  9058  		if v_0.Op != OpMIPS64MULVU {
  9059  			break
  9060  		}
  9061  		_ = v_0.Args[1]
  9062  		v_0_1 := v_0.Args[1]
  9063  		if v_0_1.Op != OpMIPS64MOVVconst {
  9064  			break
  9065  		}
  9066  		if v_0_1.AuxInt != 0 {
  9067  			break
  9068  		}
  9069  		v.reset(OpMIPS64MOVVconst)
  9070  		v.AuxInt = 0
  9071  		return true
  9072  	}
  9073  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  9074  	// cond:
  9075  	// result: (MOVVconst [0])
  9076  	for {
  9077  		v_0 := v.Args[0]
  9078  		if v_0.Op != OpMIPS64MULVU {
  9079  			break
  9080  		}
  9081  		_ = v_0.Args[1]
  9082  		v_0_0 := v_0.Args[0]
  9083  		if v_0_0.Op != OpMIPS64MOVVconst {
  9084  			break
  9085  		}
  9086  		if v_0_0.AuxInt != 0 {
  9087  			break
  9088  		}
  9089  		v.reset(OpMIPS64MOVVconst)
  9090  		v.AuxInt = 0
  9091  		return true
  9092  	}
  9093  	// match: (Select1 (MULVU x (MOVVconst [1])))
  9094  	// cond:
  9095  	// result: x
  9096  	for {
  9097  		v_0 := v.Args[0]
  9098  		if v_0.Op != OpMIPS64MULVU {
  9099  			break
  9100  		}
  9101  		_ = v_0.Args[1]
  9102  		x := v_0.Args[0]
  9103  		v_0_1 := v_0.Args[1]
  9104  		if v_0_1.Op != OpMIPS64MOVVconst {
  9105  			break
  9106  		}
  9107  		if v_0_1.AuxInt != 1 {
  9108  			break
  9109  		}
  9110  		v.reset(OpCopy)
  9111  		v.Type = x.Type
  9112  		v.AddArg(x)
  9113  		return true
  9114  	}
  9115  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  9116  	// cond:
  9117  	// result: x
  9118  	for {
  9119  		v_0 := v.Args[0]
  9120  		if v_0.Op != OpMIPS64MULVU {
  9121  			break
  9122  		}
  9123  		_ = v_0.Args[1]
  9124  		v_0_0 := v_0.Args[0]
  9125  		if v_0_0.Op != OpMIPS64MOVVconst {
  9126  			break
  9127  		}
  9128  		if v_0_0.AuxInt != 1 {
  9129  			break
  9130  		}
  9131  		x := v_0.Args[1]
  9132  		v.reset(OpCopy)
  9133  		v.Type = x.Type
  9134  		v.AddArg(x)
  9135  		return true
  9136  	}
  9137  	// match: (Select1 (MULVU x (MOVVconst [c])))
  9138  	// cond: isPowerOfTwo(c)
  9139  	// result: (SLLVconst [log2(c)] x)
  9140  	for {
  9141  		v_0 := v.Args[0]
  9142  		if v_0.Op != OpMIPS64MULVU {
  9143  			break
  9144  		}
  9145  		_ = v_0.Args[1]
  9146  		x := v_0.Args[0]
  9147  		v_0_1 := v_0.Args[1]
  9148  		if v_0_1.Op != OpMIPS64MOVVconst {
  9149  			break
  9150  		}
  9151  		c := v_0_1.AuxInt
  9152  		if !(isPowerOfTwo(c)) {
  9153  			break
  9154  		}
  9155  		v.reset(OpMIPS64SLLVconst)
  9156  		v.AuxInt = log2(c)
  9157  		v.AddArg(x)
  9158  		return true
  9159  	}
  9160  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  9161  	// cond: isPowerOfTwo(c)
  9162  	// result: (SLLVconst [log2(c)] x)
  9163  	for {
  9164  		v_0 := v.Args[0]
  9165  		if v_0.Op != OpMIPS64MULVU {
  9166  			break
  9167  		}
  9168  		_ = v_0.Args[1]
  9169  		v_0_0 := v_0.Args[0]
  9170  		if v_0_0.Op != OpMIPS64MOVVconst {
  9171  			break
  9172  		}
  9173  		c := v_0_0.AuxInt
  9174  		x := v_0.Args[1]
  9175  		if !(isPowerOfTwo(c)) {
  9176  			break
  9177  		}
  9178  		v.reset(OpMIPS64SLLVconst)
  9179  		v.AuxInt = log2(c)
  9180  		v.AddArg(x)
  9181  		return true
  9182  	}
  9183  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  9184  	// cond:
  9185  	// result: (NEGV x)
  9186  	for {
  9187  		v_0 := v.Args[0]
  9188  		if v_0.Op != OpMIPS64MULVU {
  9189  			break
  9190  		}
  9191  		_ = v_0.Args[1]
  9192  		v_0_0 := v_0.Args[0]
  9193  		if v_0_0.Op != OpMIPS64MOVVconst {
  9194  			break
  9195  		}
  9196  		if v_0_0.AuxInt != -1 {
  9197  			break
  9198  		}
  9199  		x := v_0.Args[1]
  9200  		v.reset(OpMIPS64NEGV)
  9201  		v.AddArg(x)
  9202  		return true
  9203  	}
  9204  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  9205  	// cond:
  9206  	// result: (NEGV x)
  9207  	for {
  9208  		v_0 := v.Args[0]
  9209  		if v_0.Op != OpMIPS64MULVU {
  9210  			break
  9211  		}
  9212  		_ = v_0.Args[1]
  9213  		x := v_0.Args[0]
  9214  		v_0_1 := v_0.Args[1]
  9215  		if v_0_1.Op != OpMIPS64MOVVconst {
  9216  			break
  9217  		}
  9218  		if v_0_1.AuxInt != -1 {
  9219  			break
  9220  		}
  9221  		v.reset(OpMIPS64NEGV)
  9222  		v.AddArg(x)
  9223  		return true
  9224  	}
  9225  	return false
  9226  }
  9227  func rewriteValueMIPS64_OpSelect1_10(v *Value) bool {
  9228  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  9229  	// cond:
  9230  	// result: (MOVVconst [0])
  9231  	for {
  9232  		v_0 := v.Args[0]
  9233  		if v_0.Op != OpMIPS64MULVU {
  9234  			break
  9235  		}
  9236  		_ = v_0.Args[1]
  9237  		v_0_0 := v_0.Args[0]
  9238  		if v_0_0.Op != OpMIPS64MOVVconst {
  9239  			break
  9240  		}
  9241  		if v_0_0.AuxInt != 0 {
  9242  			break
  9243  		}
  9244  		v.reset(OpMIPS64MOVVconst)
  9245  		v.AuxInt = 0
  9246  		return true
  9247  	}
  9248  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  9249  	// cond:
  9250  	// result: (MOVVconst [0])
  9251  	for {
  9252  		v_0 := v.Args[0]
  9253  		if v_0.Op != OpMIPS64MULVU {
  9254  			break
  9255  		}
  9256  		_ = v_0.Args[1]
  9257  		v_0_1 := v_0.Args[1]
  9258  		if v_0_1.Op != OpMIPS64MOVVconst {
  9259  			break
  9260  		}
  9261  		if v_0_1.AuxInt != 0 {
  9262  			break
  9263  		}
  9264  		v.reset(OpMIPS64MOVVconst)
  9265  		v.AuxInt = 0
  9266  		return true
  9267  	}
  9268  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  9269  	// cond:
  9270  	// result: x
  9271  	for {
  9272  		v_0 := v.Args[0]
  9273  		if v_0.Op != OpMIPS64MULVU {
  9274  			break
  9275  		}
  9276  		_ = v_0.Args[1]
  9277  		v_0_0 := v_0.Args[0]
  9278  		if v_0_0.Op != OpMIPS64MOVVconst {
  9279  			break
  9280  		}
  9281  		if v_0_0.AuxInt != 1 {
  9282  			break
  9283  		}
  9284  		x := v_0.Args[1]
  9285  		v.reset(OpCopy)
  9286  		v.Type = x.Type
  9287  		v.AddArg(x)
  9288  		return true
  9289  	}
  9290  	// match: (Select1 (MULVU x (MOVVconst [1])))
  9291  	// cond:
  9292  	// result: x
  9293  	for {
  9294  		v_0 := v.Args[0]
  9295  		if v_0.Op != OpMIPS64MULVU {
  9296  			break
  9297  		}
  9298  		_ = v_0.Args[1]
  9299  		x := v_0.Args[0]
  9300  		v_0_1 := v_0.Args[1]
  9301  		if v_0_1.Op != OpMIPS64MOVVconst {
  9302  			break
  9303  		}
  9304  		if v_0_1.AuxInt != 1 {
  9305  			break
  9306  		}
  9307  		v.reset(OpCopy)
  9308  		v.Type = x.Type
  9309  		v.AddArg(x)
  9310  		return true
  9311  	}
  9312  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  9313  	// cond: isPowerOfTwo(c)
  9314  	// result: (SLLVconst [log2(c)] x)
  9315  	for {
  9316  		v_0 := v.Args[0]
  9317  		if v_0.Op != OpMIPS64MULVU {
  9318  			break
  9319  		}
  9320  		_ = v_0.Args[1]
  9321  		v_0_0 := v_0.Args[0]
  9322  		if v_0_0.Op != OpMIPS64MOVVconst {
  9323  			break
  9324  		}
  9325  		c := v_0_0.AuxInt
  9326  		x := v_0.Args[1]
  9327  		if !(isPowerOfTwo(c)) {
  9328  			break
  9329  		}
  9330  		v.reset(OpMIPS64SLLVconst)
  9331  		v.AuxInt = log2(c)
  9332  		v.AddArg(x)
  9333  		return true
  9334  	}
  9335  	// match: (Select1 (MULVU x (MOVVconst [c])))
  9336  	// cond: isPowerOfTwo(c)
  9337  	// result: (SLLVconst [log2(c)] x)
  9338  	for {
  9339  		v_0 := v.Args[0]
  9340  		if v_0.Op != OpMIPS64MULVU {
  9341  			break
  9342  		}
  9343  		_ = v_0.Args[1]
  9344  		x := v_0.Args[0]
  9345  		v_0_1 := v_0.Args[1]
  9346  		if v_0_1.Op != OpMIPS64MOVVconst {
  9347  			break
  9348  		}
  9349  		c := v_0_1.AuxInt
  9350  		if !(isPowerOfTwo(c)) {
  9351  			break
  9352  		}
  9353  		v.reset(OpMIPS64SLLVconst)
  9354  		v.AuxInt = log2(c)
  9355  		v.AddArg(x)
  9356  		return true
  9357  	}
  9358  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  9359  	// cond:
  9360  	// result: x
  9361  	for {
  9362  		v_0 := v.Args[0]
  9363  		if v_0.Op != OpMIPS64DIVVU {
  9364  			break
  9365  		}
  9366  		_ = v_0.Args[1]
  9367  		x := v_0.Args[0]
  9368  		v_0_1 := v_0.Args[1]
  9369  		if v_0_1.Op != OpMIPS64MOVVconst {
  9370  			break
  9371  		}
  9372  		if v_0_1.AuxInt != 1 {
  9373  			break
  9374  		}
  9375  		v.reset(OpCopy)
  9376  		v.Type = x.Type
  9377  		v.AddArg(x)
  9378  		return true
  9379  	}
  9380  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  9381  	// cond: isPowerOfTwo(c)
  9382  	// result: (SRLVconst [log2(c)] x)
  9383  	for {
  9384  		v_0 := v.Args[0]
  9385  		if v_0.Op != OpMIPS64DIVVU {
  9386  			break
  9387  		}
  9388  		_ = v_0.Args[1]
  9389  		x := v_0.Args[0]
  9390  		v_0_1 := v_0.Args[1]
  9391  		if v_0_1.Op != OpMIPS64MOVVconst {
  9392  			break
  9393  		}
  9394  		c := v_0_1.AuxInt
  9395  		if !(isPowerOfTwo(c)) {
  9396  			break
  9397  		}
  9398  		v.reset(OpMIPS64SRLVconst)
  9399  		v.AuxInt = log2(c)
  9400  		v.AddArg(x)
  9401  		return true
  9402  	}
  9403  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  9404  	// cond:
  9405  	// result: (MOVVconst [c*d])
  9406  	for {
  9407  		v_0 := v.Args[0]
  9408  		if v_0.Op != OpMIPS64MULVU {
  9409  			break
  9410  		}
  9411  		_ = v_0.Args[1]
  9412  		v_0_0 := v_0.Args[0]
  9413  		if v_0_0.Op != OpMIPS64MOVVconst {
  9414  			break
  9415  		}
  9416  		c := v_0_0.AuxInt
  9417  		v_0_1 := v_0.Args[1]
  9418  		if v_0_1.Op != OpMIPS64MOVVconst {
  9419  			break
  9420  		}
  9421  		d := v_0_1.AuxInt
  9422  		v.reset(OpMIPS64MOVVconst)
  9423  		v.AuxInt = c * d
  9424  		return true
  9425  	}
  9426  	// match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c])))
  9427  	// cond:
  9428  	// result: (MOVVconst [c*d])
  9429  	for {
  9430  		v_0 := v.Args[0]
  9431  		if v_0.Op != OpMIPS64MULVU {
  9432  			break
  9433  		}
  9434  		_ = v_0.Args[1]
  9435  		v_0_0 := v_0.Args[0]
  9436  		if v_0_0.Op != OpMIPS64MOVVconst {
  9437  			break
  9438  		}
  9439  		d := v_0_0.AuxInt
  9440  		v_0_1 := v_0.Args[1]
  9441  		if v_0_1.Op != OpMIPS64MOVVconst {
  9442  			break
  9443  		}
  9444  		c := v_0_1.AuxInt
  9445  		v.reset(OpMIPS64MOVVconst)
  9446  		v.AuxInt = c * d
  9447  		return true
  9448  	}
  9449  	return false
  9450  }
  9451  func rewriteValueMIPS64_OpSelect1_20(v *Value) bool {
  9452  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  9453  	// cond:
  9454  	// result: (MOVVconst [int64(c)/int64(d)])
  9455  	for {
  9456  		v_0 := v.Args[0]
  9457  		if v_0.Op != OpMIPS64DIVV {
  9458  			break
  9459  		}
  9460  		_ = v_0.Args[1]
  9461  		v_0_0 := v_0.Args[0]
  9462  		if v_0_0.Op != OpMIPS64MOVVconst {
  9463  			break
  9464  		}
  9465  		c := v_0_0.AuxInt
  9466  		v_0_1 := v_0.Args[1]
  9467  		if v_0_1.Op != OpMIPS64MOVVconst {
  9468  			break
  9469  		}
  9470  		d := v_0_1.AuxInt
  9471  		v.reset(OpMIPS64MOVVconst)
  9472  		v.AuxInt = int64(c) / int64(d)
  9473  		return true
  9474  	}
  9475  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  9476  	// cond:
  9477  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  9478  	for {
  9479  		v_0 := v.Args[0]
  9480  		if v_0.Op != OpMIPS64DIVVU {
  9481  			break
  9482  		}
  9483  		_ = v_0.Args[1]
  9484  		v_0_0 := v_0.Args[0]
  9485  		if v_0_0.Op != OpMIPS64MOVVconst {
  9486  			break
  9487  		}
  9488  		c := v_0_0.AuxInt
  9489  		v_0_1 := v_0.Args[1]
  9490  		if v_0_1.Op != OpMIPS64MOVVconst {
  9491  			break
  9492  		}
  9493  		d := v_0_1.AuxInt
  9494  		v.reset(OpMIPS64MOVVconst)
  9495  		v.AuxInt = int64(uint64(c) / uint64(d))
  9496  		return true
  9497  	}
  9498  	return false
  9499  }
  9500  func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool {
  9501  	// match: (SignExt16to32 x)
  9502  	// cond:
  9503  	// result: (MOVHreg x)
  9504  	for {
  9505  		x := v.Args[0]
  9506  		v.reset(OpMIPS64MOVHreg)
  9507  		v.AddArg(x)
  9508  		return true
  9509  	}
  9510  }
  9511  func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool {
  9512  	// match: (SignExt16to64 x)
  9513  	// cond:
  9514  	// result: (MOVHreg x)
  9515  	for {
  9516  		x := v.Args[0]
  9517  		v.reset(OpMIPS64MOVHreg)
  9518  		v.AddArg(x)
  9519  		return true
  9520  	}
  9521  }
  9522  func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool {
  9523  	// match: (SignExt32to64 x)
  9524  	// cond:
  9525  	// result: (MOVWreg x)
  9526  	for {
  9527  		x := v.Args[0]
  9528  		v.reset(OpMIPS64MOVWreg)
  9529  		v.AddArg(x)
  9530  		return true
  9531  	}
  9532  }
  9533  func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool {
  9534  	// match: (SignExt8to16 x)
  9535  	// cond:
  9536  	// result: (MOVBreg x)
  9537  	for {
  9538  		x := v.Args[0]
  9539  		v.reset(OpMIPS64MOVBreg)
  9540  		v.AddArg(x)
  9541  		return true
  9542  	}
  9543  }
  9544  func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool {
  9545  	// match: (SignExt8to32 x)
  9546  	// cond:
  9547  	// result: (MOVBreg x)
  9548  	for {
  9549  		x := v.Args[0]
  9550  		v.reset(OpMIPS64MOVBreg)
  9551  		v.AddArg(x)
  9552  		return true
  9553  	}
  9554  }
  9555  func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool {
  9556  	// match: (SignExt8to64 x)
  9557  	// cond:
  9558  	// result: (MOVBreg x)
  9559  	for {
  9560  		x := v.Args[0]
  9561  		v.reset(OpMIPS64MOVBreg)
  9562  		v.AddArg(x)
  9563  		return true
  9564  	}
  9565  }
  9566  func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool {
  9567  	b := v.Block
  9568  	_ = b
  9569  	// match: (Slicemask <t> x)
  9570  	// cond:
  9571  	// result: (SRAVconst (NEGV <t> x) [63])
  9572  	for {
  9573  		t := v.Type
  9574  		x := v.Args[0]
  9575  		v.reset(OpMIPS64SRAVconst)
  9576  		v.AuxInt = 63
  9577  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  9578  		v0.AddArg(x)
  9579  		v.AddArg(v0)
  9580  		return true
  9581  	}
  9582  }
  9583  func rewriteValueMIPS64_OpSqrt_0(v *Value) bool {
  9584  	// match: (Sqrt x)
  9585  	// cond:
  9586  	// result: (SQRTD x)
  9587  	for {
  9588  		x := v.Args[0]
  9589  		v.reset(OpMIPS64SQRTD)
  9590  		v.AddArg(x)
  9591  		return true
  9592  	}
  9593  }
  9594  func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool {
  9595  	// match: (StaticCall [argwid] {target} mem)
  9596  	// cond:
  9597  	// result: (CALLstatic [argwid] {target} mem)
  9598  	for {
  9599  		argwid := v.AuxInt
  9600  		target := v.Aux
  9601  		mem := v.Args[0]
  9602  		v.reset(OpMIPS64CALLstatic)
  9603  		v.AuxInt = argwid
  9604  		v.Aux = target
  9605  		v.AddArg(mem)
  9606  		return true
  9607  	}
  9608  }
  9609  func rewriteValueMIPS64_OpStore_0(v *Value) bool {
  9610  	// match: (Store {t} ptr val mem)
  9611  	// cond: t.(*types.Type).Size() == 1
  9612  	// result: (MOVBstore ptr val mem)
  9613  	for {
  9614  		t := v.Aux
  9615  		_ = v.Args[2]
  9616  		ptr := v.Args[0]
  9617  		val := v.Args[1]
  9618  		mem := v.Args[2]
  9619  		if !(t.(*types.Type).Size() == 1) {
  9620  			break
  9621  		}
  9622  		v.reset(OpMIPS64MOVBstore)
  9623  		v.AddArg(ptr)
  9624  		v.AddArg(val)
  9625  		v.AddArg(mem)
  9626  		return true
  9627  	}
  9628  	// match: (Store {t} ptr val mem)
  9629  	// cond: t.(*types.Type).Size() == 2
  9630  	// result: (MOVHstore ptr val mem)
  9631  	for {
  9632  		t := v.Aux
  9633  		_ = v.Args[2]
  9634  		ptr := v.Args[0]
  9635  		val := v.Args[1]
  9636  		mem := v.Args[2]
  9637  		if !(t.(*types.Type).Size() == 2) {
  9638  			break
  9639  		}
  9640  		v.reset(OpMIPS64MOVHstore)
  9641  		v.AddArg(ptr)
  9642  		v.AddArg(val)
  9643  		v.AddArg(mem)
  9644  		return true
  9645  	}
  9646  	// match: (Store {t} ptr val mem)
  9647  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  9648  	// result: (MOVWstore ptr val mem)
  9649  	for {
  9650  		t := v.Aux
  9651  		_ = v.Args[2]
  9652  		ptr := v.Args[0]
  9653  		val := v.Args[1]
  9654  		mem := v.Args[2]
  9655  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  9656  			break
  9657  		}
  9658  		v.reset(OpMIPS64MOVWstore)
  9659  		v.AddArg(ptr)
  9660  		v.AddArg(val)
  9661  		v.AddArg(mem)
  9662  		return true
  9663  	}
  9664  	// match: (Store {t} ptr val mem)
  9665  	// cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)
  9666  	// result: (MOVVstore ptr val mem)
  9667  	for {
  9668  		t := v.Aux
  9669  		_ = v.Args[2]
  9670  		ptr := v.Args[0]
  9671  		val := v.Args[1]
  9672  		mem := v.Args[2]
  9673  		if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) {
  9674  			break
  9675  		}
  9676  		v.reset(OpMIPS64MOVVstore)
  9677  		v.AddArg(ptr)
  9678  		v.AddArg(val)
  9679  		v.AddArg(mem)
  9680  		return true
  9681  	}
  9682  	// match: (Store {t} ptr val mem)
  9683  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  9684  	// result: (MOVFstore ptr val mem)
  9685  	for {
  9686  		t := v.Aux
  9687  		_ = v.Args[2]
  9688  		ptr := v.Args[0]
  9689  		val := v.Args[1]
  9690  		mem := v.Args[2]
  9691  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  9692  			break
  9693  		}
  9694  		v.reset(OpMIPS64MOVFstore)
  9695  		v.AddArg(ptr)
  9696  		v.AddArg(val)
  9697  		v.AddArg(mem)
  9698  		return true
  9699  	}
  9700  	// match: (Store {t} ptr val mem)
  9701  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  9702  	// result: (MOVDstore ptr val mem)
  9703  	for {
  9704  		t := v.Aux
  9705  		_ = v.Args[2]
  9706  		ptr := v.Args[0]
  9707  		val := v.Args[1]
  9708  		mem := v.Args[2]
  9709  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  9710  			break
  9711  		}
  9712  		v.reset(OpMIPS64MOVDstore)
  9713  		v.AddArg(ptr)
  9714  		v.AddArg(val)
  9715  		v.AddArg(mem)
  9716  		return true
  9717  	}
  9718  	return false
  9719  }
  9720  func rewriteValueMIPS64_OpSub16_0(v *Value) bool {
  9721  	// match: (Sub16 x y)
  9722  	// cond:
  9723  	// result: (SUBV x y)
  9724  	for {
  9725  		_ = v.Args[1]
  9726  		x := v.Args[0]
  9727  		y := v.Args[1]
  9728  		v.reset(OpMIPS64SUBV)
  9729  		v.AddArg(x)
  9730  		v.AddArg(y)
  9731  		return true
  9732  	}
  9733  }
  9734  func rewriteValueMIPS64_OpSub32_0(v *Value) bool {
  9735  	// match: (Sub32 x y)
  9736  	// cond:
  9737  	// result: (SUBV x y)
  9738  	for {
  9739  		_ = v.Args[1]
  9740  		x := v.Args[0]
  9741  		y := v.Args[1]
  9742  		v.reset(OpMIPS64SUBV)
  9743  		v.AddArg(x)
  9744  		v.AddArg(y)
  9745  		return true
  9746  	}
  9747  }
  9748  func rewriteValueMIPS64_OpSub32F_0(v *Value) bool {
  9749  	// match: (Sub32F x y)
  9750  	// cond:
  9751  	// result: (SUBF x y)
  9752  	for {
  9753  		_ = v.Args[1]
  9754  		x := v.Args[0]
  9755  		y := v.Args[1]
  9756  		v.reset(OpMIPS64SUBF)
  9757  		v.AddArg(x)
  9758  		v.AddArg(y)
  9759  		return true
  9760  	}
  9761  }
  9762  func rewriteValueMIPS64_OpSub64_0(v *Value) bool {
  9763  	// match: (Sub64 x y)
  9764  	// cond:
  9765  	// result: (SUBV x y)
  9766  	for {
  9767  		_ = v.Args[1]
  9768  		x := v.Args[0]
  9769  		y := v.Args[1]
  9770  		v.reset(OpMIPS64SUBV)
  9771  		v.AddArg(x)
  9772  		v.AddArg(y)
  9773  		return true
  9774  	}
  9775  }
  9776  func rewriteValueMIPS64_OpSub64F_0(v *Value) bool {
  9777  	// match: (Sub64F x y)
  9778  	// cond:
  9779  	// result: (SUBD x y)
  9780  	for {
  9781  		_ = v.Args[1]
  9782  		x := v.Args[0]
  9783  		y := v.Args[1]
  9784  		v.reset(OpMIPS64SUBD)
  9785  		v.AddArg(x)
  9786  		v.AddArg(y)
  9787  		return true
  9788  	}
  9789  }
  9790  func rewriteValueMIPS64_OpSub8_0(v *Value) bool {
  9791  	// match: (Sub8 x y)
  9792  	// cond:
  9793  	// result: (SUBV x y)
  9794  	for {
  9795  		_ = v.Args[1]
  9796  		x := v.Args[0]
  9797  		y := v.Args[1]
  9798  		v.reset(OpMIPS64SUBV)
  9799  		v.AddArg(x)
  9800  		v.AddArg(y)
  9801  		return true
  9802  	}
  9803  }
  9804  func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool {
  9805  	// match: (SubPtr x y)
  9806  	// cond:
  9807  	// result: (SUBV x y)
  9808  	for {
  9809  		_ = v.Args[1]
  9810  		x := v.Args[0]
  9811  		y := v.Args[1]
  9812  		v.reset(OpMIPS64SUBV)
  9813  		v.AddArg(x)
  9814  		v.AddArg(y)
  9815  		return true
  9816  	}
  9817  }
  9818  func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool {
  9819  	// match: (Trunc16to8 x)
  9820  	// cond:
  9821  	// result: x
  9822  	for {
  9823  		x := v.Args[0]
  9824  		v.reset(OpCopy)
  9825  		v.Type = x.Type
  9826  		v.AddArg(x)
  9827  		return true
  9828  	}
  9829  }
  9830  func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool {
  9831  	// match: (Trunc32to16 x)
  9832  	// cond:
  9833  	// result: x
  9834  	for {
  9835  		x := v.Args[0]
  9836  		v.reset(OpCopy)
  9837  		v.Type = x.Type
  9838  		v.AddArg(x)
  9839  		return true
  9840  	}
  9841  }
  9842  func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool {
  9843  	// match: (Trunc32to8 x)
  9844  	// cond:
  9845  	// result: x
  9846  	for {
  9847  		x := v.Args[0]
  9848  		v.reset(OpCopy)
  9849  		v.Type = x.Type
  9850  		v.AddArg(x)
  9851  		return true
  9852  	}
  9853  }
  9854  func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool {
  9855  	// match: (Trunc64to16 x)
  9856  	// cond:
  9857  	// result: x
  9858  	for {
  9859  		x := v.Args[0]
  9860  		v.reset(OpCopy)
  9861  		v.Type = x.Type
  9862  		v.AddArg(x)
  9863  		return true
  9864  	}
  9865  }
  9866  func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool {
  9867  	// match: (Trunc64to32 x)
  9868  	// cond:
  9869  	// result: x
  9870  	for {
  9871  		x := v.Args[0]
  9872  		v.reset(OpCopy)
  9873  		v.Type = x.Type
  9874  		v.AddArg(x)
  9875  		return true
  9876  	}
  9877  }
  9878  func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool {
  9879  	// match: (Trunc64to8 x)
  9880  	// cond:
  9881  	// result: x
  9882  	for {
  9883  		x := v.Args[0]
  9884  		v.reset(OpCopy)
  9885  		v.Type = x.Type
  9886  		v.AddArg(x)
  9887  		return true
  9888  	}
  9889  }
  9890  func rewriteValueMIPS64_OpWB_0(v *Value) bool {
  9891  	// match: (WB {fn} destptr srcptr mem)
  9892  	// cond:
  9893  	// result: (LoweredWB {fn} destptr srcptr mem)
  9894  	for {
  9895  		fn := v.Aux
  9896  		_ = v.Args[2]
  9897  		destptr := v.Args[0]
  9898  		srcptr := v.Args[1]
  9899  		mem := v.Args[2]
  9900  		v.reset(OpMIPS64LoweredWB)
  9901  		v.Aux = fn
  9902  		v.AddArg(destptr)
  9903  		v.AddArg(srcptr)
  9904  		v.AddArg(mem)
  9905  		return true
  9906  	}
  9907  }
  9908  func rewriteValueMIPS64_OpXor16_0(v *Value) bool {
  9909  	// match: (Xor16 x y)
  9910  	// cond:
  9911  	// result: (XOR x y)
  9912  	for {
  9913  		_ = v.Args[1]
  9914  		x := v.Args[0]
  9915  		y := v.Args[1]
  9916  		v.reset(OpMIPS64XOR)
  9917  		v.AddArg(x)
  9918  		v.AddArg(y)
  9919  		return true
  9920  	}
  9921  }
  9922  func rewriteValueMIPS64_OpXor32_0(v *Value) bool {
  9923  	// match: (Xor32 x y)
  9924  	// cond:
  9925  	// result: (XOR x y)
  9926  	for {
  9927  		_ = v.Args[1]
  9928  		x := v.Args[0]
  9929  		y := v.Args[1]
  9930  		v.reset(OpMIPS64XOR)
  9931  		v.AddArg(x)
  9932  		v.AddArg(y)
  9933  		return true
  9934  	}
  9935  }
  9936  func rewriteValueMIPS64_OpXor64_0(v *Value) bool {
  9937  	// match: (Xor64 x y)
  9938  	// cond:
  9939  	// result: (XOR x y)
  9940  	for {
  9941  		_ = v.Args[1]
  9942  		x := v.Args[0]
  9943  		y := v.Args[1]
  9944  		v.reset(OpMIPS64XOR)
  9945  		v.AddArg(x)
  9946  		v.AddArg(y)
  9947  		return true
  9948  	}
  9949  }
  9950  func rewriteValueMIPS64_OpXor8_0(v *Value) bool {
  9951  	// match: (Xor8 x y)
  9952  	// cond:
  9953  	// result: (XOR x y)
  9954  	for {
  9955  		_ = v.Args[1]
  9956  		x := v.Args[0]
  9957  		y := v.Args[1]
  9958  		v.reset(OpMIPS64XOR)
  9959  		v.AddArg(x)
  9960  		v.AddArg(y)
  9961  		return true
  9962  	}
  9963  }
  9964  func rewriteValueMIPS64_OpZero_0(v *Value) bool {
  9965  	b := v.Block
  9966  	_ = b
  9967  	typ := &b.Func.Config.Types
  9968  	_ = typ
  9969  	// match: (Zero [0] _ mem)
  9970  	// cond:
  9971  	// result: mem
  9972  	for {
  9973  		if v.AuxInt != 0 {
  9974  			break
  9975  		}
  9976  		_ = v.Args[1]
  9977  		mem := v.Args[1]
  9978  		v.reset(OpCopy)
  9979  		v.Type = mem.Type
  9980  		v.AddArg(mem)
  9981  		return true
  9982  	}
  9983  	// match: (Zero [1] ptr mem)
  9984  	// cond:
  9985  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9986  	for {
  9987  		if v.AuxInt != 1 {
  9988  			break
  9989  		}
  9990  		_ = v.Args[1]
  9991  		ptr := v.Args[0]
  9992  		mem := v.Args[1]
  9993  		v.reset(OpMIPS64MOVBstore)
  9994  		v.AddArg(ptr)
  9995  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9996  		v0.AuxInt = 0
  9997  		v.AddArg(v0)
  9998  		v.AddArg(mem)
  9999  		return true
 10000  	}
 10001  	// match: (Zero [2] {t} ptr mem)
 10002  	// cond: t.(*types.Type).Alignment()%2 == 0
 10003  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 10004  	for {
 10005  		if v.AuxInt != 2 {
 10006  			break
 10007  		}
 10008  		t := v.Aux
 10009  		_ = v.Args[1]
 10010  		ptr := v.Args[0]
 10011  		mem := v.Args[1]
 10012  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10013  			break
 10014  		}
 10015  		v.reset(OpMIPS64MOVHstore)
 10016  		v.AddArg(ptr)
 10017  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10018  		v0.AuxInt = 0
 10019  		v.AddArg(v0)
 10020  		v.AddArg(mem)
 10021  		return true
 10022  	}
 10023  	// match: (Zero [2] ptr mem)
 10024  	// cond:
 10025  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
 10026  	for {
 10027  		if v.AuxInt != 2 {
 10028  			break
 10029  		}
 10030  		_ = v.Args[1]
 10031  		ptr := v.Args[0]
 10032  		mem := v.Args[1]
 10033  		v.reset(OpMIPS64MOVBstore)
 10034  		v.AuxInt = 1
 10035  		v.AddArg(ptr)
 10036  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10037  		v0.AuxInt = 0
 10038  		v.AddArg(v0)
 10039  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10040  		v1.AuxInt = 0
 10041  		v1.AddArg(ptr)
 10042  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10043  		v2.AuxInt = 0
 10044  		v1.AddArg(v2)
 10045  		v1.AddArg(mem)
 10046  		v.AddArg(v1)
 10047  		return true
 10048  	}
 10049  	// match: (Zero [4] {t} ptr mem)
 10050  	// cond: t.(*types.Type).Alignment()%4 == 0
 10051  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 10052  	for {
 10053  		if v.AuxInt != 4 {
 10054  			break
 10055  		}
 10056  		t := v.Aux
 10057  		_ = v.Args[1]
 10058  		ptr := v.Args[0]
 10059  		mem := v.Args[1]
 10060  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10061  			break
 10062  		}
 10063  		v.reset(OpMIPS64MOVWstore)
 10064  		v.AddArg(ptr)
 10065  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10066  		v0.AuxInt = 0
 10067  		v.AddArg(v0)
 10068  		v.AddArg(mem)
 10069  		return true
 10070  	}
 10071  	// match: (Zero [4] {t} ptr mem)
 10072  	// cond: t.(*types.Type).Alignment()%2 == 0
 10073  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
 10074  	for {
 10075  		if v.AuxInt != 4 {
 10076  			break
 10077  		}
 10078  		t := v.Aux
 10079  		_ = v.Args[1]
 10080  		ptr := v.Args[0]
 10081  		mem := v.Args[1]
 10082  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10083  			break
 10084  		}
 10085  		v.reset(OpMIPS64MOVHstore)
 10086  		v.AuxInt = 2
 10087  		v.AddArg(ptr)
 10088  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10089  		v0.AuxInt = 0
 10090  		v.AddArg(v0)
 10091  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10092  		v1.AuxInt = 0
 10093  		v1.AddArg(ptr)
 10094  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10095  		v2.AuxInt = 0
 10096  		v1.AddArg(v2)
 10097  		v1.AddArg(mem)
 10098  		v.AddArg(v1)
 10099  		return true
 10100  	}
 10101  	// match: (Zero [4] ptr mem)
 10102  	// cond:
 10103  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
 10104  	for {
 10105  		if v.AuxInt != 4 {
 10106  			break
 10107  		}
 10108  		_ = v.Args[1]
 10109  		ptr := v.Args[0]
 10110  		mem := v.Args[1]
 10111  		v.reset(OpMIPS64MOVBstore)
 10112  		v.AuxInt = 3
 10113  		v.AddArg(ptr)
 10114  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10115  		v0.AuxInt = 0
 10116  		v.AddArg(v0)
 10117  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10118  		v1.AuxInt = 2
 10119  		v1.AddArg(ptr)
 10120  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10121  		v2.AuxInt = 0
 10122  		v1.AddArg(v2)
 10123  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10124  		v3.AuxInt = 1
 10125  		v3.AddArg(ptr)
 10126  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10127  		v4.AuxInt = 0
 10128  		v3.AddArg(v4)
 10129  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10130  		v5.AuxInt = 0
 10131  		v5.AddArg(ptr)
 10132  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10133  		v6.AuxInt = 0
 10134  		v5.AddArg(v6)
 10135  		v5.AddArg(mem)
 10136  		v3.AddArg(v5)
 10137  		v1.AddArg(v3)
 10138  		v.AddArg(v1)
 10139  		return true
 10140  	}
 10141  	// match: (Zero [8] {t} ptr mem)
 10142  	// cond: t.(*types.Type).Alignment()%8 == 0
 10143  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 10144  	for {
 10145  		if v.AuxInt != 8 {
 10146  			break
 10147  		}
 10148  		t := v.Aux
 10149  		_ = v.Args[1]
 10150  		ptr := v.Args[0]
 10151  		mem := v.Args[1]
 10152  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10153  			break
 10154  		}
 10155  		v.reset(OpMIPS64MOVVstore)
 10156  		v.AddArg(ptr)
 10157  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10158  		v0.AuxInt = 0
 10159  		v.AddArg(v0)
 10160  		v.AddArg(mem)
 10161  		return true
 10162  	}
 10163  	// match: (Zero [8] {t} ptr mem)
 10164  	// cond: t.(*types.Type).Alignment()%4 == 0
 10165  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
 10166  	for {
 10167  		if v.AuxInt != 8 {
 10168  			break
 10169  		}
 10170  		t := v.Aux
 10171  		_ = v.Args[1]
 10172  		ptr := v.Args[0]
 10173  		mem := v.Args[1]
 10174  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10175  			break
 10176  		}
 10177  		v.reset(OpMIPS64MOVWstore)
 10178  		v.AuxInt = 4
 10179  		v.AddArg(ptr)
 10180  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10181  		v0.AuxInt = 0
 10182  		v.AddArg(v0)
 10183  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10184  		v1.AuxInt = 0
 10185  		v1.AddArg(ptr)
 10186  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10187  		v2.AuxInt = 0
 10188  		v1.AddArg(v2)
 10189  		v1.AddArg(mem)
 10190  		v.AddArg(v1)
 10191  		return true
 10192  	}
 10193  	// match: (Zero [8] {t} ptr mem)
 10194  	// cond: t.(*types.Type).Alignment()%2 == 0
 10195  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
 10196  	for {
 10197  		if v.AuxInt != 8 {
 10198  			break
 10199  		}
 10200  		t := v.Aux
 10201  		_ = v.Args[1]
 10202  		ptr := v.Args[0]
 10203  		mem := v.Args[1]
 10204  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10205  			break
 10206  		}
 10207  		v.reset(OpMIPS64MOVHstore)
 10208  		v.AuxInt = 6
 10209  		v.AddArg(ptr)
 10210  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10211  		v0.AuxInt = 0
 10212  		v.AddArg(v0)
 10213  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10214  		v1.AuxInt = 4
 10215  		v1.AddArg(ptr)
 10216  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10217  		v2.AuxInt = 0
 10218  		v1.AddArg(v2)
 10219  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10220  		v3.AuxInt = 2
 10221  		v3.AddArg(ptr)
 10222  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10223  		v4.AuxInt = 0
 10224  		v3.AddArg(v4)
 10225  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10226  		v5.AuxInt = 0
 10227  		v5.AddArg(ptr)
 10228  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10229  		v6.AuxInt = 0
 10230  		v5.AddArg(v6)
 10231  		v5.AddArg(mem)
 10232  		v3.AddArg(v5)
 10233  		v1.AddArg(v3)
 10234  		v.AddArg(v1)
 10235  		return true
 10236  	}
 10237  	return false
 10238  }
 10239  func rewriteValueMIPS64_OpZero_10(v *Value) bool {
 10240  	b := v.Block
 10241  	_ = b
 10242  	config := b.Func.Config
 10243  	_ = config
 10244  	typ := &b.Func.Config.Types
 10245  	_ = typ
 10246  	// match: (Zero [3] ptr mem)
 10247  	// cond:
 10248  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
 10249  	for {
 10250  		if v.AuxInt != 3 {
 10251  			break
 10252  		}
 10253  		_ = v.Args[1]
 10254  		ptr := v.Args[0]
 10255  		mem := v.Args[1]
 10256  		v.reset(OpMIPS64MOVBstore)
 10257  		v.AuxInt = 2
 10258  		v.AddArg(ptr)
 10259  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10260  		v0.AuxInt = 0
 10261  		v.AddArg(v0)
 10262  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10263  		v1.AuxInt = 1
 10264  		v1.AddArg(ptr)
 10265  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10266  		v2.AuxInt = 0
 10267  		v1.AddArg(v2)
 10268  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10269  		v3.AuxInt = 0
 10270  		v3.AddArg(ptr)
 10271  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10272  		v4.AuxInt = 0
 10273  		v3.AddArg(v4)
 10274  		v3.AddArg(mem)
 10275  		v1.AddArg(v3)
 10276  		v.AddArg(v1)
 10277  		return true
 10278  	}
 10279  	// match: (Zero [6] {t} ptr mem)
 10280  	// cond: t.(*types.Type).Alignment()%2 == 0
 10281  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
 10282  	for {
 10283  		if v.AuxInt != 6 {
 10284  			break
 10285  		}
 10286  		t := v.Aux
 10287  		_ = v.Args[1]
 10288  		ptr := v.Args[0]
 10289  		mem := v.Args[1]
 10290  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10291  			break
 10292  		}
 10293  		v.reset(OpMIPS64MOVHstore)
 10294  		v.AuxInt = 4
 10295  		v.AddArg(ptr)
 10296  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10297  		v0.AuxInt = 0
 10298  		v.AddArg(v0)
 10299  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10300  		v1.AuxInt = 2
 10301  		v1.AddArg(ptr)
 10302  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10303  		v2.AuxInt = 0
 10304  		v1.AddArg(v2)
 10305  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10306  		v3.AuxInt = 0
 10307  		v3.AddArg(ptr)
 10308  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10309  		v4.AuxInt = 0
 10310  		v3.AddArg(v4)
 10311  		v3.AddArg(mem)
 10312  		v1.AddArg(v3)
 10313  		v.AddArg(v1)
 10314  		return true
 10315  	}
 10316  	// match: (Zero [12] {t} ptr mem)
 10317  	// cond: t.(*types.Type).Alignment()%4 == 0
 10318  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
 10319  	for {
 10320  		if v.AuxInt != 12 {
 10321  			break
 10322  		}
 10323  		t := v.Aux
 10324  		_ = v.Args[1]
 10325  		ptr := v.Args[0]
 10326  		mem := v.Args[1]
 10327  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10328  			break
 10329  		}
 10330  		v.reset(OpMIPS64MOVWstore)
 10331  		v.AuxInt = 8
 10332  		v.AddArg(ptr)
 10333  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10334  		v0.AuxInt = 0
 10335  		v.AddArg(v0)
 10336  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10337  		v1.AuxInt = 4
 10338  		v1.AddArg(ptr)
 10339  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10340  		v2.AuxInt = 0
 10341  		v1.AddArg(v2)
 10342  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10343  		v3.AuxInt = 0
 10344  		v3.AddArg(ptr)
 10345  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10346  		v4.AuxInt = 0
 10347  		v3.AddArg(v4)
 10348  		v3.AddArg(mem)
 10349  		v1.AddArg(v3)
 10350  		v.AddArg(v1)
 10351  		return true
 10352  	}
 10353  	// match: (Zero [16] {t} ptr mem)
 10354  	// cond: t.(*types.Type).Alignment()%8 == 0
 10355  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
 10356  	for {
 10357  		if v.AuxInt != 16 {
 10358  			break
 10359  		}
 10360  		t := v.Aux
 10361  		_ = v.Args[1]
 10362  		ptr := v.Args[0]
 10363  		mem := v.Args[1]
 10364  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10365  			break
 10366  		}
 10367  		v.reset(OpMIPS64MOVVstore)
 10368  		v.AuxInt = 8
 10369  		v.AddArg(ptr)
 10370  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10371  		v0.AuxInt = 0
 10372  		v.AddArg(v0)
 10373  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10374  		v1.AuxInt = 0
 10375  		v1.AddArg(ptr)
 10376  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10377  		v2.AuxInt = 0
 10378  		v1.AddArg(v2)
 10379  		v1.AddArg(mem)
 10380  		v.AddArg(v1)
 10381  		return true
 10382  	}
 10383  	// match: (Zero [24] {t} ptr mem)
 10384  	// cond: t.(*types.Type).Alignment()%8 == 0
 10385  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
 10386  	for {
 10387  		if v.AuxInt != 24 {
 10388  			break
 10389  		}
 10390  		t := v.Aux
 10391  		_ = v.Args[1]
 10392  		ptr := v.Args[0]
 10393  		mem := v.Args[1]
 10394  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10395  			break
 10396  		}
 10397  		v.reset(OpMIPS64MOVVstore)
 10398  		v.AuxInt = 16
 10399  		v.AddArg(ptr)
 10400  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10401  		v0.AuxInt = 0
 10402  		v.AddArg(v0)
 10403  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10404  		v1.AuxInt = 8
 10405  		v1.AddArg(ptr)
 10406  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10407  		v2.AuxInt = 0
 10408  		v1.AddArg(v2)
 10409  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10410  		v3.AuxInt = 0
 10411  		v3.AddArg(ptr)
 10412  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10413  		v4.AuxInt = 0
 10414  		v3.AddArg(v4)
 10415  		v3.AddArg(mem)
 10416  		v1.AddArg(v3)
 10417  		v.AddArg(v1)
 10418  		return true
 10419  	}
 10420  	// match: (Zero [s] {t} ptr mem)
 10421  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
 10422  	// result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
 10423  	for {
 10424  		s := v.AuxInt
 10425  		t := v.Aux
 10426  		_ = v.Args[1]
 10427  		ptr := v.Args[0]
 10428  		mem := v.Args[1]
 10429  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) {
 10430  			break
 10431  		}
 10432  		v.reset(OpMIPS64DUFFZERO)
 10433  		v.AuxInt = 8 * (128 - int64(s/8))
 10434  		v.AddArg(ptr)
 10435  		v.AddArg(mem)
 10436  		return true
 10437  	}
 10438  	// match: (Zero [s] {t} ptr mem)
 10439  	// cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0
 10440  	// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
 10441  	for {
 10442  		s := v.AuxInt
 10443  		t := v.Aux
 10444  		_ = v.Args[1]
 10445  		ptr := v.Args[0]
 10446  		mem := v.Args[1]
 10447  		if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) {
 10448  			break
 10449  		}
 10450  		v.reset(OpMIPS64LoweredZero)
 10451  		v.AuxInt = t.(*types.Type).Alignment()
 10452  		v.AddArg(ptr)
 10453  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
 10454  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
 10455  		v0.AddArg(ptr)
 10456  		v.AddArg(v0)
 10457  		v.AddArg(mem)
 10458  		return true
 10459  	}
 10460  	return false
 10461  }
 10462  func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool {
 10463  	// match: (ZeroExt16to32 x)
 10464  	// cond:
 10465  	// result: (MOVHUreg x)
 10466  	for {
 10467  		x := v.Args[0]
 10468  		v.reset(OpMIPS64MOVHUreg)
 10469  		v.AddArg(x)
 10470  		return true
 10471  	}
 10472  }
 10473  func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool {
 10474  	// match: (ZeroExt16to64 x)
 10475  	// cond:
 10476  	// result: (MOVHUreg x)
 10477  	for {
 10478  		x := v.Args[0]
 10479  		v.reset(OpMIPS64MOVHUreg)
 10480  		v.AddArg(x)
 10481  		return true
 10482  	}
 10483  }
 10484  func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool {
 10485  	// match: (ZeroExt32to64 x)
 10486  	// cond:
 10487  	// result: (MOVWUreg x)
 10488  	for {
 10489  		x := v.Args[0]
 10490  		v.reset(OpMIPS64MOVWUreg)
 10491  		v.AddArg(x)
 10492  		return true
 10493  	}
 10494  }
 10495  func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool {
 10496  	// match: (ZeroExt8to16 x)
 10497  	// cond:
 10498  	// result: (MOVBUreg x)
 10499  	for {
 10500  		x := v.Args[0]
 10501  		v.reset(OpMIPS64MOVBUreg)
 10502  		v.AddArg(x)
 10503  		return true
 10504  	}
 10505  }
 10506  func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool {
 10507  	// match: (ZeroExt8to32 x)
 10508  	// cond:
 10509  	// result: (MOVBUreg x)
 10510  	for {
 10511  		x := v.Args[0]
 10512  		v.reset(OpMIPS64MOVBUreg)
 10513  		v.AddArg(x)
 10514  		return true
 10515  	}
 10516  }
 10517  func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool {
 10518  	// match: (ZeroExt8to64 x)
 10519  	// cond:
 10520  	// result: (MOVBUreg x)
 10521  	for {
 10522  		x := v.Args[0]
 10523  		v.reset(OpMIPS64MOVBUreg)
 10524  		v.AddArg(x)
 10525  		return true
 10526  	}
 10527  }
 10528  func rewriteBlockMIPS64(b *Block) bool {
 10529  	config := b.Func.Config
 10530  	_ = config
 10531  	fe := b.Func.fe
 10532  	_ = fe
 10533  	typ := &config.Types
 10534  	_ = typ
 10535  	switch b.Kind {
 10536  	case BlockMIPS64EQ:
 10537  		// match: (EQ (FPFlagTrue cmp) yes no)
 10538  		// cond:
 10539  		// result: (FPF cmp yes no)
 10540  		for {
 10541  			v := b.Control
 10542  			if v.Op != OpMIPS64FPFlagTrue {
 10543  				break
 10544  			}
 10545  			cmp := v.Args[0]
 10546  			b.Kind = BlockMIPS64FPF
 10547  			b.SetControl(cmp)
 10548  			b.Aux = nil
 10549  			return true
 10550  		}
 10551  		// match: (EQ (FPFlagFalse cmp) yes no)
 10552  		// cond:
 10553  		// result: (FPT cmp yes no)
 10554  		for {
 10555  			v := b.Control
 10556  			if v.Op != OpMIPS64FPFlagFalse {
 10557  				break
 10558  			}
 10559  			cmp := v.Args[0]
 10560  			b.Kind = BlockMIPS64FPT
 10561  			b.SetControl(cmp)
 10562  			b.Aux = nil
 10563  			return true
 10564  		}
 10565  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 10566  		// cond:
 10567  		// result: (NE cmp yes no)
 10568  		for {
 10569  			v := b.Control
 10570  			if v.Op != OpMIPS64XORconst {
 10571  				break
 10572  			}
 10573  			if v.AuxInt != 1 {
 10574  				break
 10575  			}
 10576  			cmp := v.Args[0]
 10577  			if cmp.Op != OpMIPS64SGT {
 10578  				break
 10579  			}
 10580  			_ = cmp.Args[1]
 10581  			b.Kind = BlockMIPS64NE
 10582  			b.SetControl(cmp)
 10583  			b.Aux = nil
 10584  			return true
 10585  		}
 10586  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 10587  		// cond:
 10588  		// result: (NE cmp yes no)
 10589  		for {
 10590  			v := b.Control
 10591  			if v.Op != OpMIPS64XORconst {
 10592  				break
 10593  			}
 10594  			if v.AuxInt != 1 {
 10595  				break
 10596  			}
 10597  			cmp := v.Args[0]
 10598  			if cmp.Op != OpMIPS64SGTU {
 10599  				break
 10600  			}
 10601  			_ = cmp.Args[1]
 10602  			b.Kind = BlockMIPS64NE
 10603  			b.SetControl(cmp)
 10604  			b.Aux = nil
 10605  			return true
 10606  		}
 10607  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 10608  		// cond:
 10609  		// result: (NE cmp yes no)
 10610  		for {
 10611  			v := b.Control
 10612  			if v.Op != OpMIPS64XORconst {
 10613  				break
 10614  			}
 10615  			if v.AuxInt != 1 {
 10616  				break
 10617  			}
 10618  			cmp := v.Args[0]
 10619  			if cmp.Op != OpMIPS64SGTconst {
 10620  				break
 10621  			}
 10622  			b.Kind = BlockMIPS64NE
 10623  			b.SetControl(cmp)
 10624  			b.Aux = nil
 10625  			return true
 10626  		}
 10627  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 10628  		// cond:
 10629  		// result: (NE cmp yes no)
 10630  		for {
 10631  			v := b.Control
 10632  			if v.Op != OpMIPS64XORconst {
 10633  				break
 10634  			}
 10635  			if v.AuxInt != 1 {
 10636  				break
 10637  			}
 10638  			cmp := v.Args[0]
 10639  			if cmp.Op != OpMIPS64SGTUconst {
 10640  				break
 10641  			}
 10642  			b.Kind = BlockMIPS64NE
 10643  			b.SetControl(cmp)
 10644  			b.Aux = nil
 10645  			return true
 10646  		}
 10647  		// match: (EQ (SGTUconst [1] x) yes no)
 10648  		// cond:
 10649  		// result: (NE x yes no)
 10650  		for {
 10651  			v := b.Control
 10652  			if v.Op != OpMIPS64SGTUconst {
 10653  				break
 10654  			}
 10655  			if v.AuxInt != 1 {
 10656  				break
 10657  			}
 10658  			x := v.Args[0]
 10659  			b.Kind = BlockMIPS64NE
 10660  			b.SetControl(x)
 10661  			b.Aux = nil
 10662  			return true
 10663  		}
 10664  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 10665  		// cond:
 10666  		// result: (EQ x yes no)
 10667  		for {
 10668  			v := b.Control
 10669  			if v.Op != OpMIPS64SGTU {
 10670  				break
 10671  			}
 10672  			_ = v.Args[1]
 10673  			x := v.Args[0]
 10674  			v_1 := v.Args[1]
 10675  			if v_1.Op != OpMIPS64MOVVconst {
 10676  				break
 10677  			}
 10678  			if v_1.AuxInt != 0 {
 10679  				break
 10680  			}
 10681  			b.Kind = BlockMIPS64EQ
 10682  			b.SetControl(x)
 10683  			b.Aux = nil
 10684  			return true
 10685  		}
 10686  		// match: (EQ (SGTconst [0] x) yes no)
 10687  		// cond:
 10688  		// result: (GEZ x yes no)
 10689  		for {
 10690  			v := b.Control
 10691  			if v.Op != OpMIPS64SGTconst {
 10692  				break
 10693  			}
 10694  			if v.AuxInt != 0 {
 10695  				break
 10696  			}
 10697  			x := v.Args[0]
 10698  			b.Kind = BlockMIPS64GEZ
 10699  			b.SetControl(x)
 10700  			b.Aux = nil
 10701  			return true
 10702  		}
 10703  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 10704  		// cond:
 10705  		// result: (LEZ x yes no)
 10706  		for {
 10707  			v := b.Control
 10708  			if v.Op != OpMIPS64SGT {
 10709  				break
 10710  			}
 10711  			_ = v.Args[1]
 10712  			x := v.Args[0]
 10713  			v_1 := v.Args[1]
 10714  			if v_1.Op != OpMIPS64MOVVconst {
 10715  				break
 10716  			}
 10717  			if v_1.AuxInt != 0 {
 10718  				break
 10719  			}
 10720  			b.Kind = BlockMIPS64LEZ
 10721  			b.SetControl(x)
 10722  			b.Aux = nil
 10723  			return true
 10724  		}
 10725  		// match: (EQ (MOVVconst [0]) yes no)
 10726  		// cond:
 10727  		// result: (First nil yes no)
 10728  		for {
 10729  			v := b.Control
 10730  			if v.Op != OpMIPS64MOVVconst {
 10731  				break
 10732  			}
 10733  			if v.AuxInt != 0 {
 10734  				break
 10735  			}
 10736  			b.Kind = BlockFirst
 10737  			b.SetControl(nil)
 10738  			b.Aux = nil
 10739  			return true
 10740  		}
 10741  		// match: (EQ (MOVVconst [c]) yes no)
 10742  		// cond: c != 0
 10743  		// result: (First nil no yes)
 10744  		for {
 10745  			v := b.Control
 10746  			if v.Op != OpMIPS64MOVVconst {
 10747  				break
 10748  			}
 10749  			c := v.AuxInt
 10750  			if !(c != 0) {
 10751  				break
 10752  			}
 10753  			b.Kind = BlockFirst
 10754  			b.SetControl(nil)
 10755  			b.Aux = nil
 10756  			b.swapSuccessors()
 10757  			return true
 10758  		}
 10759  	case BlockMIPS64GEZ:
 10760  		// match: (GEZ (MOVVconst [c]) yes no)
 10761  		// cond: c >= 0
 10762  		// result: (First nil yes no)
 10763  		for {
 10764  			v := b.Control
 10765  			if v.Op != OpMIPS64MOVVconst {
 10766  				break
 10767  			}
 10768  			c := v.AuxInt
 10769  			if !(c >= 0) {
 10770  				break
 10771  			}
 10772  			b.Kind = BlockFirst
 10773  			b.SetControl(nil)
 10774  			b.Aux = nil
 10775  			return true
 10776  		}
 10777  		// match: (GEZ (MOVVconst [c]) yes no)
 10778  		// cond: c < 0
 10779  		// result: (First nil no yes)
 10780  		for {
 10781  			v := b.Control
 10782  			if v.Op != OpMIPS64MOVVconst {
 10783  				break
 10784  			}
 10785  			c := v.AuxInt
 10786  			if !(c < 0) {
 10787  				break
 10788  			}
 10789  			b.Kind = BlockFirst
 10790  			b.SetControl(nil)
 10791  			b.Aux = nil
 10792  			b.swapSuccessors()
 10793  			return true
 10794  		}
 10795  	case BlockMIPS64GTZ:
 10796  		// match: (GTZ (MOVVconst [c]) yes no)
 10797  		// cond: c > 0
 10798  		// result: (First nil yes no)
 10799  		for {
 10800  			v := b.Control
 10801  			if v.Op != OpMIPS64MOVVconst {
 10802  				break
 10803  			}
 10804  			c := v.AuxInt
 10805  			if !(c > 0) {
 10806  				break
 10807  			}
 10808  			b.Kind = BlockFirst
 10809  			b.SetControl(nil)
 10810  			b.Aux = nil
 10811  			return true
 10812  		}
 10813  		// match: (GTZ (MOVVconst [c]) yes no)
 10814  		// cond: c <= 0
 10815  		// result: (First nil no yes)
 10816  		for {
 10817  			v := b.Control
 10818  			if v.Op != OpMIPS64MOVVconst {
 10819  				break
 10820  			}
 10821  			c := v.AuxInt
 10822  			if !(c <= 0) {
 10823  				break
 10824  			}
 10825  			b.Kind = BlockFirst
 10826  			b.SetControl(nil)
 10827  			b.Aux = nil
 10828  			b.swapSuccessors()
 10829  			return true
 10830  		}
 10831  	case BlockIf:
 10832  		// match: (If cond yes no)
 10833  		// cond:
 10834  		// result: (NE cond yes no)
 10835  		for {
 10836  			v := b.Control
 10837  			_ = v
 10838  			cond := b.Control
 10839  			b.Kind = BlockMIPS64NE
 10840  			b.SetControl(cond)
 10841  			b.Aux = nil
 10842  			return true
 10843  		}
 10844  	case BlockMIPS64LEZ:
 10845  		// match: (LEZ (MOVVconst [c]) yes no)
 10846  		// cond: c <= 0
 10847  		// result: (First nil yes no)
 10848  		for {
 10849  			v := b.Control
 10850  			if v.Op != OpMIPS64MOVVconst {
 10851  				break
 10852  			}
 10853  			c := v.AuxInt
 10854  			if !(c <= 0) {
 10855  				break
 10856  			}
 10857  			b.Kind = BlockFirst
 10858  			b.SetControl(nil)
 10859  			b.Aux = nil
 10860  			return true
 10861  		}
 10862  		// match: (LEZ (MOVVconst [c]) yes no)
 10863  		// cond: c > 0
 10864  		// result: (First nil no yes)
 10865  		for {
 10866  			v := b.Control
 10867  			if v.Op != OpMIPS64MOVVconst {
 10868  				break
 10869  			}
 10870  			c := v.AuxInt
 10871  			if !(c > 0) {
 10872  				break
 10873  			}
 10874  			b.Kind = BlockFirst
 10875  			b.SetControl(nil)
 10876  			b.Aux = nil
 10877  			b.swapSuccessors()
 10878  			return true
 10879  		}
 10880  	case BlockMIPS64LTZ:
 10881  		// match: (LTZ (MOVVconst [c]) yes no)
 10882  		// cond: c < 0
 10883  		// result: (First nil yes no)
 10884  		for {
 10885  			v := b.Control
 10886  			if v.Op != OpMIPS64MOVVconst {
 10887  				break
 10888  			}
 10889  			c := v.AuxInt
 10890  			if !(c < 0) {
 10891  				break
 10892  			}
 10893  			b.Kind = BlockFirst
 10894  			b.SetControl(nil)
 10895  			b.Aux = nil
 10896  			return true
 10897  		}
 10898  		// match: (LTZ (MOVVconst [c]) yes no)
 10899  		// cond: c >= 0
 10900  		// result: (First nil no yes)
 10901  		for {
 10902  			v := b.Control
 10903  			if v.Op != OpMIPS64MOVVconst {
 10904  				break
 10905  			}
 10906  			c := v.AuxInt
 10907  			if !(c >= 0) {
 10908  				break
 10909  			}
 10910  			b.Kind = BlockFirst
 10911  			b.SetControl(nil)
 10912  			b.Aux = nil
 10913  			b.swapSuccessors()
 10914  			return true
 10915  		}
 10916  	case BlockMIPS64NE:
 10917  		// match: (NE (FPFlagTrue cmp) yes no)
 10918  		// cond:
 10919  		// result: (FPT cmp yes no)
 10920  		for {
 10921  			v := b.Control
 10922  			if v.Op != OpMIPS64FPFlagTrue {
 10923  				break
 10924  			}
 10925  			cmp := v.Args[0]
 10926  			b.Kind = BlockMIPS64FPT
 10927  			b.SetControl(cmp)
 10928  			b.Aux = nil
 10929  			return true
 10930  		}
 10931  		// match: (NE (FPFlagFalse cmp) yes no)
 10932  		// cond:
 10933  		// result: (FPF cmp yes no)
 10934  		for {
 10935  			v := b.Control
 10936  			if v.Op != OpMIPS64FPFlagFalse {
 10937  				break
 10938  			}
 10939  			cmp := v.Args[0]
 10940  			b.Kind = BlockMIPS64FPF
 10941  			b.SetControl(cmp)
 10942  			b.Aux = nil
 10943  			return true
 10944  		}
 10945  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10946  		// cond:
 10947  		// result: (EQ cmp yes no)
 10948  		for {
 10949  			v := b.Control
 10950  			if v.Op != OpMIPS64XORconst {
 10951  				break
 10952  			}
 10953  			if v.AuxInt != 1 {
 10954  				break
 10955  			}
 10956  			cmp := v.Args[0]
 10957  			if cmp.Op != OpMIPS64SGT {
 10958  				break
 10959  			}
 10960  			_ = cmp.Args[1]
 10961  			b.Kind = BlockMIPS64EQ
 10962  			b.SetControl(cmp)
 10963  			b.Aux = nil
 10964  			return true
 10965  		}
 10966  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10967  		// cond:
 10968  		// result: (EQ cmp yes no)
 10969  		for {
 10970  			v := b.Control
 10971  			if v.Op != OpMIPS64XORconst {
 10972  				break
 10973  			}
 10974  			if v.AuxInt != 1 {
 10975  				break
 10976  			}
 10977  			cmp := v.Args[0]
 10978  			if cmp.Op != OpMIPS64SGTU {
 10979  				break
 10980  			}
 10981  			_ = cmp.Args[1]
 10982  			b.Kind = BlockMIPS64EQ
 10983  			b.SetControl(cmp)
 10984  			b.Aux = nil
 10985  			return true
 10986  		}
 10987  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10988  		// cond:
 10989  		// result: (EQ cmp yes no)
 10990  		for {
 10991  			v := b.Control
 10992  			if v.Op != OpMIPS64XORconst {
 10993  				break
 10994  			}
 10995  			if v.AuxInt != 1 {
 10996  				break
 10997  			}
 10998  			cmp := v.Args[0]
 10999  			if cmp.Op != OpMIPS64SGTconst {
 11000  				break
 11001  			}
 11002  			b.Kind = BlockMIPS64EQ
 11003  			b.SetControl(cmp)
 11004  			b.Aux = nil
 11005  			return true
 11006  		}
 11007  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 11008  		// cond:
 11009  		// result: (EQ cmp yes no)
 11010  		for {
 11011  			v := b.Control
 11012  			if v.Op != OpMIPS64XORconst {
 11013  				break
 11014  			}
 11015  			if v.AuxInt != 1 {
 11016  				break
 11017  			}
 11018  			cmp := v.Args[0]
 11019  			if cmp.Op != OpMIPS64SGTUconst {
 11020  				break
 11021  			}
 11022  			b.Kind = BlockMIPS64EQ
 11023  			b.SetControl(cmp)
 11024  			b.Aux = nil
 11025  			return true
 11026  		}
 11027  		// match: (NE (SGTUconst [1] x) yes no)
 11028  		// cond:
 11029  		// result: (EQ x yes no)
 11030  		for {
 11031  			v := b.Control
 11032  			if v.Op != OpMIPS64SGTUconst {
 11033  				break
 11034  			}
 11035  			if v.AuxInt != 1 {
 11036  				break
 11037  			}
 11038  			x := v.Args[0]
 11039  			b.Kind = BlockMIPS64EQ
 11040  			b.SetControl(x)
 11041  			b.Aux = nil
 11042  			return true
 11043  		}
 11044  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 11045  		// cond:
 11046  		// result: (NE x yes no)
 11047  		for {
 11048  			v := b.Control
 11049  			if v.Op != OpMIPS64SGTU {
 11050  				break
 11051  			}
 11052  			_ = v.Args[1]
 11053  			x := v.Args[0]
 11054  			v_1 := v.Args[1]
 11055  			if v_1.Op != OpMIPS64MOVVconst {
 11056  				break
 11057  			}
 11058  			if v_1.AuxInt != 0 {
 11059  				break
 11060  			}
 11061  			b.Kind = BlockMIPS64NE
 11062  			b.SetControl(x)
 11063  			b.Aux = nil
 11064  			return true
 11065  		}
 11066  		// match: (NE (SGTconst [0] x) yes no)
 11067  		// cond:
 11068  		// result: (LTZ x yes no)
 11069  		for {
 11070  			v := b.Control
 11071  			if v.Op != OpMIPS64SGTconst {
 11072  				break
 11073  			}
 11074  			if v.AuxInt != 0 {
 11075  				break
 11076  			}
 11077  			x := v.Args[0]
 11078  			b.Kind = BlockMIPS64LTZ
 11079  			b.SetControl(x)
 11080  			b.Aux = nil
 11081  			return true
 11082  		}
 11083  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 11084  		// cond:
 11085  		// result: (GTZ x yes no)
 11086  		for {
 11087  			v := b.Control
 11088  			if v.Op != OpMIPS64SGT {
 11089  				break
 11090  			}
 11091  			_ = v.Args[1]
 11092  			x := v.Args[0]
 11093  			v_1 := v.Args[1]
 11094  			if v_1.Op != OpMIPS64MOVVconst {
 11095  				break
 11096  			}
 11097  			if v_1.AuxInt != 0 {
 11098  				break
 11099  			}
 11100  			b.Kind = BlockMIPS64GTZ
 11101  			b.SetControl(x)
 11102  			b.Aux = nil
 11103  			return true
 11104  		}
 11105  		// match: (NE (MOVVconst [0]) yes no)
 11106  		// cond:
 11107  		// result: (First nil no yes)
 11108  		for {
 11109  			v := b.Control
 11110  			if v.Op != OpMIPS64MOVVconst {
 11111  				break
 11112  			}
 11113  			if v.AuxInt != 0 {
 11114  				break
 11115  			}
 11116  			b.Kind = BlockFirst
 11117  			b.SetControl(nil)
 11118  			b.Aux = nil
 11119  			b.swapSuccessors()
 11120  			return true
 11121  		}
 11122  		// match: (NE (MOVVconst [c]) yes no)
 11123  		// cond: c != 0
 11124  		// result: (First nil yes no)
 11125  		for {
 11126  			v := b.Control
 11127  			if v.Op != OpMIPS64MOVVconst {
 11128  				break
 11129  			}
 11130  			c := v.AuxInt
 11131  			if !(c != 0) {
 11132  				break
 11133  			}
 11134  			b.Kind = BlockFirst
 11135  			b.SetControl(nil)
 11136  			b.Aux = nil
 11137  			return true
 11138  		}
 11139  	}
 11140  	return false
 11141  }