github.com/goproxy0/go@v0.0.0-20171111080102-49cc0c489d2c/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueMIPS64(v *Value) bool {
    17  	switch v.Op {
    18  	case OpAdd16:
    19  		return rewriteValueMIPS64_OpAdd16_0(v)
    20  	case OpAdd32:
    21  		return rewriteValueMIPS64_OpAdd32_0(v)
    22  	case OpAdd32F:
    23  		return rewriteValueMIPS64_OpAdd32F_0(v)
    24  	case OpAdd64:
    25  		return rewriteValueMIPS64_OpAdd64_0(v)
    26  	case OpAdd64F:
    27  		return rewriteValueMIPS64_OpAdd64F_0(v)
    28  	case OpAdd8:
    29  		return rewriteValueMIPS64_OpAdd8_0(v)
    30  	case OpAddPtr:
    31  		return rewriteValueMIPS64_OpAddPtr_0(v)
    32  	case OpAddr:
    33  		return rewriteValueMIPS64_OpAddr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValueMIPS64_OpAnd16_0(v)
    36  	case OpAnd32:
    37  		return rewriteValueMIPS64_OpAnd32_0(v)
    38  	case OpAnd64:
    39  		return rewriteValueMIPS64_OpAnd64_0(v)
    40  	case OpAnd8:
    41  		return rewriteValueMIPS64_OpAnd8_0(v)
    42  	case OpAndB:
    43  		return rewriteValueMIPS64_OpAndB_0(v)
    44  	case OpAtomicAdd32:
    45  		return rewriteValueMIPS64_OpAtomicAdd32_0(v)
    46  	case OpAtomicAdd64:
    47  		return rewriteValueMIPS64_OpAtomicAdd64_0(v)
    48  	case OpAtomicCompareAndSwap32:
    49  		return rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v)
    50  	case OpAtomicCompareAndSwap64:
    51  		return rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v)
    52  	case OpAtomicExchange32:
    53  		return rewriteValueMIPS64_OpAtomicExchange32_0(v)
    54  	case OpAtomicExchange64:
    55  		return rewriteValueMIPS64_OpAtomicExchange64_0(v)
    56  	case OpAtomicLoad32:
    57  		return rewriteValueMIPS64_OpAtomicLoad32_0(v)
    58  	case OpAtomicLoad64:
    59  		return rewriteValueMIPS64_OpAtomicLoad64_0(v)
    60  	case OpAtomicLoadPtr:
    61  		return rewriteValueMIPS64_OpAtomicLoadPtr_0(v)
    62  	case OpAtomicStore32:
    63  		return rewriteValueMIPS64_OpAtomicStore32_0(v)
    64  	case OpAtomicStore64:
    65  		return rewriteValueMIPS64_OpAtomicStore64_0(v)
    66  	case OpAtomicStorePtrNoWB:
    67  		return rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v)
    68  	case OpAvg64u:
    69  		return rewriteValueMIPS64_OpAvg64u_0(v)
    70  	case OpClosureCall:
    71  		return rewriteValueMIPS64_OpClosureCall_0(v)
    72  	case OpCom16:
    73  		return rewriteValueMIPS64_OpCom16_0(v)
    74  	case OpCom32:
    75  		return rewriteValueMIPS64_OpCom32_0(v)
    76  	case OpCom64:
    77  		return rewriteValueMIPS64_OpCom64_0(v)
    78  	case OpCom8:
    79  		return rewriteValueMIPS64_OpCom8_0(v)
    80  	case OpConst16:
    81  		return rewriteValueMIPS64_OpConst16_0(v)
    82  	case OpConst32:
    83  		return rewriteValueMIPS64_OpConst32_0(v)
    84  	case OpConst32F:
    85  		return rewriteValueMIPS64_OpConst32F_0(v)
    86  	case OpConst64:
    87  		return rewriteValueMIPS64_OpConst64_0(v)
    88  	case OpConst64F:
    89  		return rewriteValueMIPS64_OpConst64F_0(v)
    90  	case OpConst8:
    91  		return rewriteValueMIPS64_OpConst8_0(v)
    92  	case OpConstBool:
    93  		return rewriteValueMIPS64_OpConstBool_0(v)
    94  	case OpConstNil:
    95  		return rewriteValueMIPS64_OpConstNil_0(v)
    96  	case OpConvert:
    97  		return rewriteValueMIPS64_OpConvert_0(v)
    98  	case OpCvt32Fto32:
    99  		return rewriteValueMIPS64_OpCvt32Fto32_0(v)
   100  	case OpCvt32Fto64:
   101  		return rewriteValueMIPS64_OpCvt32Fto64_0(v)
   102  	case OpCvt32Fto64F:
   103  		return rewriteValueMIPS64_OpCvt32Fto64F_0(v)
   104  	case OpCvt32to32F:
   105  		return rewriteValueMIPS64_OpCvt32to32F_0(v)
   106  	case OpCvt32to64F:
   107  		return rewriteValueMIPS64_OpCvt32to64F_0(v)
   108  	case OpCvt64Fto32:
   109  		return rewriteValueMIPS64_OpCvt64Fto32_0(v)
   110  	case OpCvt64Fto32F:
   111  		return rewriteValueMIPS64_OpCvt64Fto32F_0(v)
   112  	case OpCvt64Fto64:
   113  		return rewriteValueMIPS64_OpCvt64Fto64_0(v)
   114  	case OpCvt64to32F:
   115  		return rewriteValueMIPS64_OpCvt64to32F_0(v)
   116  	case OpCvt64to64F:
   117  		return rewriteValueMIPS64_OpCvt64to64F_0(v)
   118  	case OpDiv16:
   119  		return rewriteValueMIPS64_OpDiv16_0(v)
   120  	case OpDiv16u:
   121  		return rewriteValueMIPS64_OpDiv16u_0(v)
   122  	case OpDiv32:
   123  		return rewriteValueMIPS64_OpDiv32_0(v)
   124  	case OpDiv32F:
   125  		return rewriteValueMIPS64_OpDiv32F_0(v)
   126  	case OpDiv32u:
   127  		return rewriteValueMIPS64_OpDiv32u_0(v)
   128  	case OpDiv64:
   129  		return rewriteValueMIPS64_OpDiv64_0(v)
   130  	case OpDiv64F:
   131  		return rewriteValueMIPS64_OpDiv64F_0(v)
   132  	case OpDiv64u:
   133  		return rewriteValueMIPS64_OpDiv64u_0(v)
   134  	case OpDiv8:
   135  		return rewriteValueMIPS64_OpDiv8_0(v)
   136  	case OpDiv8u:
   137  		return rewriteValueMIPS64_OpDiv8u_0(v)
   138  	case OpEq16:
   139  		return rewriteValueMIPS64_OpEq16_0(v)
   140  	case OpEq32:
   141  		return rewriteValueMIPS64_OpEq32_0(v)
   142  	case OpEq32F:
   143  		return rewriteValueMIPS64_OpEq32F_0(v)
   144  	case OpEq64:
   145  		return rewriteValueMIPS64_OpEq64_0(v)
   146  	case OpEq64F:
   147  		return rewriteValueMIPS64_OpEq64F_0(v)
   148  	case OpEq8:
   149  		return rewriteValueMIPS64_OpEq8_0(v)
   150  	case OpEqB:
   151  		return rewriteValueMIPS64_OpEqB_0(v)
   152  	case OpEqPtr:
   153  		return rewriteValueMIPS64_OpEqPtr_0(v)
   154  	case OpGeq16:
   155  		return rewriteValueMIPS64_OpGeq16_0(v)
   156  	case OpGeq16U:
   157  		return rewriteValueMIPS64_OpGeq16U_0(v)
   158  	case OpGeq32:
   159  		return rewriteValueMIPS64_OpGeq32_0(v)
   160  	case OpGeq32F:
   161  		return rewriteValueMIPS64_OpGeq32F_0(v)
   162  	case OpGeq32U:
   163  		return rewriteValueMIPS64_OpGeq32U_0(v)
   164  	case OpGeq64:
   165  		return rewriteValueMIPS64_OpGeq64_0(v)
   166  	case OpGeq64F:
   167  		return rewriteValueMIPS64_OpGeq64F_0(v)
   168  	case OpGeq64U:
   169  		return rewriteValueMIPS64_OpGeq64U_0(v)
   170  	case OpGeq8:
   171  		return rewriteValueMIPS64_OpGeq8_0(v)
   172  	case OpGeq8U:
   173  		return rewriteValueMIPS64_OpGeq8U_0(v)
   174  	case OpGetCallerSP:
   175  		return rewriteValueMIPS64_OpGetCallerSP_0(v)
   176  	case OpGetClosurePtr:
   177  		return rewriteValueMIPS64_OpGetClosurePtr_0(v)
   178  	case OpGreater16:
   179  		return rewriteValueMIPS64_OpGreater16_0(v)
   180  	case OpGreater16U:
   181  		return rewriteValueMIPS64_OpGreater16U_0(v)
   182  	case OpGreater32:
   183  		return rewriteValueMIPS64_OpGreater32_0(v)
   184  	case OpGreater32F:
   185  		return rewriteValueMIPS64_OpGreater32F_0(v)
   186  	case OpGreater32U:
   187  		return rewriteValueMIPS64_OpGreater32U_0(v)
   188  	case OpGreater64:
   189  		return rewriteValueMIPS64_OpGreater64_0(v)
   190  	case OpGreater64F:
   191  		return rewriteValueMIPS64_OpGreater64F_0(v)
   192  	case OpGreater64U:
   193  		return rewriteValueMIPS64_OpGreater64U_0(v)
   194  	case OpGreater8:
   195  		return rewriteValueMIPS64_OpGreater8_0(v)
   196  	case OpGreater8U:
   197  		return rewriteValueMIPS64_OpGreater8U_0(v)
   198  	case OpHmul32:
   199  		return rewriteValueMIPS64_OpHmul32_0(v)
   200  	case OpHmul32u:
   201  		return rewriteValueMIPS64_OpHmul32u_0(v)
   202  	case OpHmul64:
   203  		return rewriteValueMIPS64_OpHmul64_0(v)
   204  	case OpHmul64u:
   205  		return rewriteValueMIPS64_OpHmul64u_0(v)
   206  	case OpInterCall:
   207  		return rewriteValueMIPS64_OpInterCall_0(v)
   208  	case OpIsInBounds:
   209  		return rewriteValueMIPS64_OpIsInBounds_0(v)
   210  	case OpIsNonNil:
   211  		return rewriteValueMIPS64_OpIsNonNil_0(v)
   212  	case OpIsSliceInBounds:
   213  		return rewriteValueMIPS64_OpIsSliceInBounds_0(v)
   214  	case OpLeq16:
   215  		return rewriteValueMIPS64_OpLeq16_0(v)
   216  	case OpLeq16U:
   217  		return rewriteValueMIPS64_OpLeq16U_0(v)
   218  	case OpLeq32:
   219  		return rewriteValueMIPS64_OpLeq32_0(v)
   220  	case OpLeq32F:
   221  		return rewriteValueMIPS64_OpLeq32F_0(v)
   222  	case OpLeq32U:
   223  		return rewriteValueMIPS64_OpLeq32U_0(v)
   224  	case OpLeq64:
   225  		return rewriteValueMIPS64_OpLeq64_0(v)
   226  	case OpLeq64F:
   227  		return rewriteValueMIPS64_OpLeq64F_0(v)
   228  	case OpLeq64U:
   229  		return rewriteValueMIPS64_OpLeq64U_0(v)
   230  	case OpLeq8:
   231  		return rewriteValueMIPS64_OpLeq8_0(v)
   232  	case OpLeq8U:
   233  		return rewriteValueMIPS64_OpLeq8U_0(v)
   234  	case OpLess16:
   235  		return rewriteValueMIPS64_OpLess16_0(v)
   236  	case OpLess16U:
   237  		return rewriteValueMIPS64_OpLess16U_0(v)
   238  	case OpLess32:
   239  		return rewriteValueMIPS64_OpLess32_0(v)
   240  	case OpLess32F:
   241  		return rewriteValueMIPS64_OpLess32F_0(v)
   242  	case OpLess32U:
   243  		return rewriteValueMIPS64_OpLess32U_0(v)
   244  	case OpLess64:
   245  		return rewriteValueMIPS64_OpLess64_0(v)
   246  	case OpLess64F:
   247  		return rewriteValueMIPS64_OpLess64F_0(v)
   248  	case OpLess64U:
   249  		return rewriteValueMIPS64_OpLess64U_0(v)
   250  	case OpLess8:
   251  		return rewriteValueMIPS64_OpLess8_0(v)
   252  	case OpLess8U:
   253  		return rewriteValueMIPS64_OpLess8U_0(v)
   254  	case OpLoad:
   255  		return rewriteValueMIPS64_OpLoad_0(v)
   256  	case OpLsh16x16:
   257  		return rewriteValueMIPS64_OpLsh16x16_0(v)
   258  	case OpLsh16x32:
   259  		return rewriteValueMIPS64_OpLsh16x32_0(v)
   260  	case OpLsh16x64:
   261  		return rewriteValueMIPS64_OpLsh16x64_0(v)
   262  	case OpLsh16x8:
   263  		return rewriteValueMIPS64_OpLsh16x8_0(v)
   264  	case OpLsh32x16:
   265  		return rewriteValueMIPS64_OpLsh32x16_0(v)
   266  	case OpLsh32x32:
   267  		return rewriteValueMIPS64_OpLsh32x32_0(v)
   268  	case OpLsh32x64:
   269  		return rewriteValueMIPS64_OpLsh32x64_0(v)
   270  	case OpLsh32x8:
   271  		return rewriteValueMIPS64_OpLsh32x8_0(v)
   272  	case OpLsh64x16:
   273  		return rewriteValueMIPS64_OpLsh64x16_0(v)
   274  	case OpLsh64x32:
   275  		return rewriteValueMIPS64_OpLsh64x32_0(v)
   276  	case OpLsh64x64:
   277  		return rewriteValueMIPS64_OpLsh64x64_0(v)
   278  	case OpLsh64x8:
   279  		return rewriteValueMIPS64_OpLsh64x8_0(v)
   280  	case OpLsh8x16:
   281  		return rewriteValueMIPS64_OpLsh8x16_0(v)
   282  	case OpLsh8x32:
   283  		return rewriteValueMIPS64_OpLsh8x32_0(v)
   284  	case OpLsh8x64:
   285  		return rewriteValueMIPS64_OpLsh8x64_0(v)
   286  	case OpLsh8x8:
   287  		return rewriteValueMIPS64_OpLsh8x8_0(v)
   288  	case OpMIPS64ADDV:
   289  		return rewriteValueMIPS64_OpMIPS64ADDV_0(v)
   290  	case OpMIPS64ADDVconst:
   291  		return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v)
   292  	case OpMIPS64AND:
   293  		return rewriteValueMIPS64_OpMIPS64AND_0(v)
   294  	case OpMIPS64ANDconst:
   295  		return rewriteValueMIPS64_OpMIPS64ANDconst_0(v)
   296  	case OpMIPS64LoweredAtomicAdd32:
   297  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v)
   298  	case OpMIPS64LoweredAtomicAdd64:
   299  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v)
   300  	case OpMIPS64LoweredAtomicStore32:
   301  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v)
   302  	case OpMIPS64LoweredAtomicStore64:
   303  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v)
   304  	case OpMIPS64MOVBUload:
   305  		return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v)
   306  	case OpMIPS64MOVBUreg:
   307  		return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v)
   308  	case OpMIPS64MOVBload:
   309  		return rewriteValueMIPS64_OpMIPS64MOVBload_0(v)
   310  	case OpMIPS64MOVBreg:
   311  		return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v)
   312  	case OpMIPS64MOVBstore:
   313  		return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v)
   314  	case OpMIPS64MOVBstorezero:
   315  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v)
   316  	case OpMIPS64MOVDload:
   317  		return rewriteValueMIPS64_OpMIPS64MOVDload_0(v)
   318  	case OpMIPS64MOVDstore:
   319  		return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v)
   320  	case OpMIPS64MOVFload:
   321  		return rewriteValueMIPS64_OpMIPS64MOVFload_0(v)
   322  	case OpMIPS64MOVFstore:
   323  		return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v)
   324  	case OpMIPS64MOVHUload:
   325  		return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v)
   326  	case OpMIPS64MOVHUreg:
   327  		return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v)
   328  	case OpMIPS64MOVHload:
   329  		return rewriteValueMIPS64_OpMIPS64MOVHload_0(v)
   330  	case OpMIPS64MOVHreg:
   331  		return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v)
   332  	case OpMIPS64MOVHstore:
   333  		return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v)
   334  	case OpMIPS64MOVHstorezero:
   335  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v)
   336  	case OpMIPS64MOVVload:
   337  		return rewriteValueMIPS64_OpMIPS64MOVVload_0(v)
   338  	case OpMIPS64MOVVreg:
   339  		return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v)
   340  	case OpMIPS64MOVVstore:
   341  		return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v)
   342  	case OpMIPS64MOVVstorezero:
   343  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v)
   344  	case OpMIPS64MOVWUload:
   345  		return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v)
   346  	case OpMIPS64MOVWUreg:
   347  		return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v)
   348  	case OpMIPS64MOVWload:
   349  		return rewriteValueMIPS64_OpMIPS64MOVWload_0(v)
   350  	case OpMIPS64MOVWreg:
   351  		return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v)
   352  	case OpMIPS64MOVWstore:
   353  		return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v)
   354  	case OpMIPS64MOVWstorezero:
   355  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v)
   356  	case OpMIPS64NEGV:
   357  		return rewriteValueMIPS64_OpMIPS64NEGV_0(v)
   358  	case OpMIPS64NOR:
   359  		return rewriteValueMIPS64_OpMIPS64NOR_0(v)
   360  	case OpMIPS64NORconst:
   361  		return rewriteValueMIPS64_OpMIPS64NORconst_0(v)
   362  	case OpMIPS64OR:
   363  		return rewriteValueMIPS64_OpMIPS64OR_0(v)
   364  	case OpMIPS64ORconst:
   365  		return rewriteValueMIPS64_OpMIPS64ORconst_0(v)
   366  	case OpMIPS64SGT:
   367  		return rewriteValueMIPS64_OpMIPS64SGT_0(v)
   368  	case OpMIPS64SGTU:
   369  		return rewriteValueMIPS64_OpMIPS64SGTU_0(v)
   370  	case OpMIPS64SGTUconst:
   371  		return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v)
   372  	case OpMIPS64SGTconst:
   373  		return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v)
   374  	case OpMIPS64SLLV:
   375  		return rewriteValueMIPS64_OpMIPS64SLLV_0(v)
   376  	case OpMIPS64SLLVconst:
   377  		return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v)
   378  	case OpMIPS64SRAV:
   379  		return rewriteValueMIPS64_OpMIPS64SRAV_0(v)
   380  	case OpMIPS64SRAVconst:
   381  		return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v)
   382  	case OpMIPS64SRLV:
   383  		return rewriteValueMIPS64_OpMIPS64SRLV_0(v)
   384  	case OpMIPS64SRLVconst:
   385  		return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v)
   386  	case OpMIPS64SUBV:
   387  		return rewriteValueMIPS64_OpMIPS64SUBV_0(v)
   388  	case OpMIPS64SUBVconst:
   389  		return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v)
   390  	case OpMIPS64XOR:
   391  		return rewriteValueMIPS64_OpMIPS64XOR_0(v)
   392  	case OpMIPS64XORconst:
   393  		return rewriteValueMIPS64_OpMIPS64XORconst_0(v)
   394  	case OpMod16:
   395  		return rewriteValueMIPS64_OpMod16_0(v)
   396  	case OpMod16u:
   397  		return rewriteValueMIPS64_OpMod16u_0(v)
   398  	case OpMod32:
   399  		return rewriteValueMIPS64_OpMod32_0(v)
   400  	case OpMod32u:
   401  		return rewriteValueMIPS64_OpMod32u_0(v)
   402  	case OpMod64:
   403  		return rewriteValueMIPS64_OpMod64_0(v)
   404  	case OpMod64u:
   405  		return rewriteValueMIPS64_OpMod64u_0(v)
   406  	case OpMod8:
   407  		return rewriteValueMIPS64_OpMod8_0(v)
   408  	case OpMod8u:
   409  		return rewriteValueMIPS64_OpMod8u_0(v)
   410  	case OpMove:
   411  		return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v)
   412  	case OpMul16:
   413  		return rewriteValueMIPS64_OpMul16_0(v)
   414  	case OpMul32:
   415  		return rewriteValueMIPS64_OpMul32_0(v)
   416  	case OpMul32F:
   417  		return rewriteValueMIPS64_OpMul32F_0(v)
   418  	case OpMul64:
   419  		return rewriteValueMIPS64_OpMul64_0(v)
   420  	case OpMul64F:
   421  		return rewriteValueMIPS64_OpMul64F_0(v)
   422  	case OpMul8:
   423  		return rewriteValueMIPS64_OpMul8_0(v)
   424  	case OpNeg16:
   425  		return rewriteValueMIPS64_OpNeg16_0(v)
   426  	case OpNeg32:
   427  		return rewriteValueMIPS64_OpNeg32_0(v)
   428  	case OpNeg32F:
   429  		return rewriteValueMIPS64_OpNeg32F_0(v)
   430  	case OpNeg64:
   431  		return rewriteValueMIPS64_OpNeg64_0(v)
   432  	case OpNeg64F:
   433  		return rewriteValueMIPS64_OpNeg64F_0(v)
   434  	case OpNeg8:
   435  		return rewriteValueMIPS64_OpNeg8_0(v)
   436  	case OpNeq16:
   437  		return rewriteValueMIPS64_OpNeq16_0(v)
   438  	case OpNeq32:
   439  		return rewriteValueMIPS64_OpNeq32_0(v)
   440  	case OpNeq32F:
   441  		return rewriteValueMIPS64_OpNeq32F_0(v)
   442  	case OpNeq64:
   443  		return rewriteValueMIPS64_OpNeq64_0(v)
   444  	case OpNeq64F:
   445  		return rewriteValueMIPS64_OpNeq64F_0(v)
   446  	case OpNeq8:
   447  		return rewriteValueMIPS64_OpNeq8_0(v)
   448  	case OpNeqB:
   449  		return rewriteValueMIPS64_OpNeqB_0(v)
   450  	case OpNeqPtr:
   451  		return rewriteValueMIPS64_OpNeqPtr_0(v)
   452  	case OpNilCheck:
   453  		return rewriteValueMIPS64_OpNilCheck_0(v)
   454  	case OpNot:
   455  		return rewriteValueMIPS64_OpNot_0(v)
   456  	case OpOffPtr:
   457  		return rewriteValueMIPS64_OpOffPtr_0(v)
   458  	case OpOr16:
   459  		return rewriteValueMIPS64_OpOr16_0(v)
   460  	case OpOr32:
   461  		return rewriteValueMIPS64_OpOr32_0(v)
   462  	case OpOr64:
   463  		return rewriteValueMIPS64_OpOr64_0(v)
   464  	case OpOr8:
   465  		return rewriteValueMIPS64_OpOr8_0(v)
   466  	case OpOrB:
   467  		return rewriteValueMIPS64_OpOrB_0(v)
   468  	case OpRound32F:
   469  		return rewriteValueMIPS64_OpRound32F_0(v)
   470  	case OpRound64F:
   471  		return rewriteValueMIPS64_OpRound64F_0(v)
   472  	case OpRsh16Ux16:
   473  		return rewriteValueMIPS64_OpRsh16Ux16_0(v)
   474  	case OpRsh16Ux32:
   475  		return rewriteValueMIPS64_OpRsh16Ux32_0(v)
   476  	case OpRsh16Ux64:
   477  		return rewriteValueMIPS64_OpRsh16Ux64_0(v)
   478  	case OpRsh16Ux8:
   479  		return rewriteValueMIPS64_OpRsh16Ux8_0(v)
   480  	case OpRsh16x16:
   481  		return rewriteValueMIPS64_OpRsh16x16_0(v)
   482  	case OpRsh16x32:
   483  		return rewriteValueMIPS64_OpRsh16x32_0(v)
   484  	case OpRsh16x64:
   485  		return rewriteValueMIPS64_OpRsh16x64_0(v)
   486  	case OpRsh16x8:
   487  		return rewriteValueMIPS64_OpRsh16x8_0(v)
   488  	case OpRsh32Ux16:
   489  		return rewriteValueMIPS64_OpRsh32Ux16_0(v)
   490  	case OpRsh32Ux32:
   491  		return rewriteValueMIPS64_OpRsh32Ux32_0(v)
   492  	case OpRsh32Ux64:
   493  		return rewriteValueMIPS64_OpRsh32Ux64_0(v)
   494  	case OpRsh32Ux8:
   495  		return rewriteValueMIPS64_OpRsh32Ux8_0(v)
   496  	case OpRsh32x16:
   497  		return rewriteValueMIPS64_OpRsh32x16_0(v)
   498  	case OpRsh32x32:
   499  		return rewriteValueMIPS64_OpRsh32x32_0(v)
   500  	case OpRsh32x64:
   501  		return rewriteValueMIPS64_OpRsh32x64_0(v)
   502  	case OpRsh32x8:
   503  		return rewriteValueMIPS64_OpRsh32x8_0(v)
   504  	case OpRsh64Ux16:
   505  		return rewriteValueMIPS64_OpRsh64Ux16_0(v)
   506  	case OpRsh64Ux32:
   507  		return rewriteValueMIPS64_OpRsh64Ux32_0(v)
   508  	case OpRsh64Ux64:
   509  		return rewriteValueMIPS64_OpRsh64Ux64_0(v)
   510  	case OpRsh64Ux8:
   511  		return rewriteValueMIPS64_OpRsh64Ux8_0(v)
   512  	case OpRsh64x16:
   513  		return rewriteValueMIPS64_OpRsh64x16_0(v)
   514  	case OpRsh64x32:
   515  		return rewriteValueMIPS64_OpRsh64x32_0(v)
   516  	case OpRsh64x64:
   517  		return rewriteValueMIPS64_OpRsh64x64_0(v)
   518  	case OpRsh64x8:
   519  		return rewriteValueMIPS64_OpRsh64x8_0(v)
   520  	case OpRsh8Ux16:
   521  		return rewriteValueMIPS64_OpRsh8Ux16_0(v)
   522  	case OpRsh8Ux32:
   523  		return rewriteValueMIPS64_OpRsh8Ux32_0(v)
   524  	case OpRsh8Ux64:
   525  		return rewriteValueMIPS64_OpRsh8Ux64_0(v)
   526  	case OpRsh8Ux8:
   527  		return rewriteValueMIPS64_OpRsh8Ux8_0(v)
   528  	case OpRsh8x16:
   529  		return rewriteValueMIPS64_OpRsh8x16_0(v)
   530  	case OpRsh8x32:
   531  		return rewriteValueMIPS64_OpRsh8x32_0(v)
   532  	case OpRsh8x64:
   533  		return rewriteValueMIPS64_OpRsh8x64_0(v)
   534  	case OpRsh8x8:
   535  		return rewriteValueMIPS64_OpRsh8x8_0(v)
   536  	case OpSelect0:
   537  		return rewriteValueMIPS64_OpSelect0_0(v)
   538  	case OpSelect1:
   539  		return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v)
   540  	case OpSignExt16to32:
   541  		return rewriteValueMIPS64_OpSignExt16to32_0(v)
   542  	case OpSignExt16to64:
   543  		return rewriteValueMIPS64_OpSignExt16to64_0(v)
   544  	case OpSignExt32to64:
   545  		return rewriteValueMIPS64_OpSignExt32to64_0(v)
   546  	case OpSignExt8to16:
   547  		return rewriteValueMIPS64_OpSignExt8to16_0(v)
   548  	case OpSignExt8to32:
   549  		return rewriteValueMIPS64_OpSignExt8to32_0(v)
   550  	case OpSignExt8to64:
   551  		return rewriteValueMIPS64_OpSignExt8to64_0(v)
   552  	case OpSlicemask:
   553  		return rewriteValueMIPS64_OpSlicemask_0(v)
   554  	case OpStaticCall:
   555  		return rewriteValueMIPS64_OpStaticCall_0(v)
   556  	case OpStore:
   557  		return rewriteValueMIPS64_OpStore_0(v)
   558  	case OpSub16:
   559  		return rewriteValueMIPS64_OpSub16_0(v)
   560  	case OpSub32:
   561  		return rewriteValueMIPS64_OpSub32_0(v)
   562  	case OpSub32F:
   563  		return rewriteValueMIPS64_OpSub32F_0(v)
   564  	case OpSub64:
   565  		return rewriteValueMIPS64_OpSub64_0(v)
   566  	case OpSub64F:
   567  		return rewriteValueMIPS64_OpSub64F_0(v)
   568  	case OpSub8:
   569  		return rewriteValueMIPS64_OpSub8_0(v)
   570  	case OpSubPtr:
   571  		return rewriteValueMIPS64_OpSubPtr_0(v)
   572  	case OpTrunc16to8:
   573  		return rewriteValueMIPS64_OpTrunc16to8_0(v)
   574  	case OpTrunc32to16:
   575  		return rewriteValueMIPS64_OpTrunc32to16_0(v)
   576  	case OpTrunc32to8:
   577  		return rewriteValueMIPS64_OpTrunc32to8_0(v)
   578  	case OpTrunc64to16:
   579  		return rewriteValueMIPS64_OpTrunc64to16_0(v)
   580  	case OpTrunc64to32:
   581  		return rewriteValueMIPS64_OpTrunc64to32_0(v)
   582  	case OpTrunc64to8:
   583  		return rewriteValueMIPS64_OpTrunc64to8_0(v)
   584  	case OpXor16:
   585  		return rewriteValueMIPS64_OpXor16_0(v)
   586  	case OpXor32:
   587  		return rewriteValueMIPS64_OpXor32_0(v)
   588  	case OpXor64:
   589  		return rewriteValueMIPS64_OpXor64_0(v)
   590  	case OpXor8:
   591  		return rewriteValueMIPS64_OpXor8_0(v)
   592  	case OpZero:
   593  		return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v)
   594  	case OpZeroExt16to32:
   595  		return rewriteValueMIPS64_OpZeroExt16to32_0(v)
   596  	case OpZeroExt16to64:
   597  		return rewriteValueMIPS64_OpZeroExt16to64_0(v)
   598  	case OpZeroExt32to64:
   599  		return rewriteValueMIPS64_OpZeroExt32to64_0(v)
   600  	case OpZeroExt8to16:
   601  		return rewriteValueMIPS64_OpZeroExt8to16_0(v)
   602  	case OpZeroExt8to32:
   603  		return rewriteValueMIPS64_OpZeroExt8to32_0(v)
   604  	case OpZeroExt8to64:
   605  		return rewriteValueMIPS64_OpZeroExt8to64_0(v)
   606  	}
   607  	return false
   608  }
   609  func rewriteValueMIPS64_OpAdd16_0(v *Value) bool {
   610  	// match: (Add16 x y)
   611  	// cond:
   612  	// result: (ADDV x y)
   613  	for {
   614  		_ = v.Args[1]
   615  		x := v.Args[0]
   616  		y := v.Args[1]
   617  		v.reset(OpMIPS64ADDV)
   618  		v.AddArg(x)
   619  		v.AddArg(y)
   620  		return true
   621  	}
   622  }
   623  func rewriteValueMIPS64_OpAdd32_0(v *Value) bool {
   624  	// match: (Add32 x y)
   625  	// cond:
   626  	// result: (ADDV x y)
   627  	for {
   628  		_ = v.Args[1]
   629  		x := v.Args[0]
   630  		y := v.Args[1]
   631  		v.reset(OpMIPS64ADDV)
   632  		v.AddArg(x)
   633  		v.AddArg(y)
   634  		return true
   635  	}
   636  }
   637  func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool {
   638  	// match: (Add32F x y)
   639  	// cond:
   640  	// result: (ADDF x y)
   641  	for {
   642  		_ = v.Args[1]
   643  		x := v.Args[0]
   644  		y := v.Args[1]
   645  		v.reset(OpMIPS64ADDF)
   646  		v.AddArg(x)
   647  		v.AddArg(y)
   648  		return true
   649  	}
   650  }
   651  func rewriteValueMIPS64_OpAdd64_0(v *Value) bool {
   652  	// match: (Add64 x y)
   653  	// cond:
   654  	// result: (ADDV x y)
   655  	for {
   656  		_ = v.Args[1]
   657  		x := v.Args[0]
   658  		y := v.Args[1]
   659  		v.reset(OpMIPS64ADDV)
   660  		v.AddArg(x)
   661  		v.AddArg(y)
   662  		return true
   663  	}
   664  }
   665  func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool {
   666  	// match: (Add64F x y)
   667  	// cond:
   668  	// result: (ADDD x y)
   669  	for {
   670  		_ = v.Args[1]
   671  		x := v.Args[0]
   672  		y := v.Args[1]
   673  		v.reset(OpMIPS64ADDD)
   674  		v.AddArg(x)
   675  		v.AddArg(y)
   676  		return true
   677  	}
   678  }
   679  func rewriteValueMIPS64_OpAdd8_0(v *Value) bool {
   680  	// match: (Add8 x y)
   681  	// cond:
   682  	// result: (ADDV x y)
   683  	for {
   684  		_ = v.Args[1]
   685  		x := v.Args[0]
   686  		y := v.Args[1]
   687  		v.reset(OpMIPS64ADDV)
   688  		v.AddArg(x)
   689  		v.AddArg(y)
   690  		return true
   691  	}
   692  }
   693  func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool {
   694  	// match: (AddPtr x y)
   695  	// cond:
   696  	// result: (ADDV x y)
   697  	for {
   698  		_ = v.Args[1]
   699  		x := v.Args[0]
   700  		y := v.Args[1]
   701  		v.reset(OpMIPS64ADDV)
   702  		v.AddArg(x)
   703  		v.AddArg(y)
   704  		return true
   705  	}
   706  }
   707  func rewriteValueMIPS64_OpAddr_0(v *Value) bool {
   708  	// match: (Addr {sym} base)
   709  	// cond:
   710  	// result: (MOVVaddr {sym} base)
   711  	for {
   712  		sym := v.Aux
   713  		base := v.Args[0]
   714  		v.reset(OpMIPS64MOVVaddr)
   715  		v.Aux = sym
   716  		v.AddArg(base)
   717  		return true
   718  	}
   719  }
   720  func rewriteValueMIPS64_OpAnd16_0(v *Value) bool {
   721  	// match: (And16 x y)
   722  	// cond:
   723  	// result: (AND x y)
   724  	for {
   725  		_ = v.Args[1]
   726  		x := v.Args[0]
   727  		y := v.Args[1]
   728  		v.reset(OpMIPS64AND)
   729  		v.AddArg(x)
   730  		v.AddArg(y)
   731  		return true
   732  	}
   733  }
   734  func rewriteValueMIPS64_OpAnd32_0(v *Value) bool {
   735  	// match: (And32 x y)
   736  	// cond:
   737  	// result: (AND x y)
   738  	for {
   739  		_ = v.Args[1]
   740  		x := v.Args[0]
   741  		y := v.Args[1]
   742  		v.reset(OpMIPS64AND)
   743  		v.AddArg(x)
   744  		v.AddArg(y)
   745  		return true
   746  	}
   747  }
   748  func rewriteValueMIPS64_OpAnd64_0(v *Value) bool {
   749  	// match: (And64 x y)
   750  	// cond:
   751  	// result: (AND x y)
   752  	for {
   753  		_ = v.Args[1]
   754  		x := v.Args[0]
   755  		y := v.Args[1]
   756  		v.reset(OpMIPS64AND)
   757  		v.AddArg(x)
   758  		v.AddArg(y)
   759  		return true
   760  	}
   761  }
   762  func rewriteValueMIPS64_OpAnd8_0(v *Value) bool {
   763  	// match: (And8 x y)
   764  	// cond:
   765  	// result: (AND x y)
   766  	for {
   767  		_ = v.Args[1]
   768  		x := v.Args[0]
   769  		y := v.Args[1]
   770  		v.reset(OpMIPS64AND)
   771  		v.AddArg(x)
   772  		v.AddArg(y)
   773  		return true
   774  	}
   775  }
   776  func rewriteValueMIPS64_OpAndB_0(v *Value) bool {
   777  	// match: (AndB x y)
   778  	// cond:
   779  	// result: (AND x y)
   780  	for {
   781  		_ = v.Args[1]
   782  		x := v.Args[0]
   783  		y := v.Args[1]
   784  		v.reset(OpMIPS64AND)
   785  		v.AddArg(x)
   786  		v.AddArg(y)
   787  		return true
   788  	}
   789  }
   790  func rewriteValueMIPS64_OpAtomicAdd32_0(v *Value) bool {
   791  	// match: (AtomicAdd32 ptr val mem)
   792  	// cond:
   793  	// result: (LoweredAtomicAdd32 ptr val mem)
   794  	for {
   795  		_ = v.Args[2]
   796  		ptr := v.Args[0]
   797  		val := v.Args[1]
   798  		mem := v.Args[2]
   799  		v.reset(OpMIPS64LoweredAtomicAdd32)
   800  		v.AddArg(ptr)
   801  		v.AddArg(val)
   802  		v.AddArg(mem)
   803  		return true
   804  	}
   805  }
   806  func rewriteValueMIPS64_OpAtomicAdd64_0(v *Value) bool {
   807  	// match: (AtomicAdd64 ptr val mem)
   808  	// cond:
   809  	// result: (LoweredAtomicAdd64 ptr val mem)
   810  	for {
   811  		_ = v.Args[2]
   812  		ptr := v.Args[0]
   813  		val := v.Args[1]
   814  		mem := v.Args[2]
   815  		v.reset(OpMIPS64LoweredAtomicAdd64)
   816  		v.AddArg(ptr)
   817  		v.AddArg(val)
   818  		v.AddArg(mem)
   819  		return true
   820  	}
   821  }
   822  func rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v *Value) bool {
   823  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   824  	// cond:
   825  	// result: (LoweredAtomicCas32 ptr old new_ mem)
   826  	for {
   827  		_ = v.Args[3]
   828  		ptr := v.Args[0]
   829  		old := v.Args[1]
   830  		new_ := v.Args[2]
   831  		mem := v.Args[3]
   832  		v.reset(OpMIPS64LoweredAtomicCas32)
   833  		v.AddArg(ptr)
   834  		v.AddArg(old)
   835  		v.AddArg(new_)
   836  		v.AddArg(mem)
   837  		return true
   838  	}
   839  }
   840  func rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v *Value) bool {
   841  	// match: (AtomicCompareAndSwap64 ptr old new_ mem)
   842  	// cond:
   843  	// result: (LoweredAtomicCas64 ptr old new_ mem)
   844  	for {
   845  		_ = v.Args[3]
   846  		ptr := v.Args[0]
   847  		old := v.Args[1]
   848  		new_ := v.Args[2]
   849  		mem := v.Args[3]
   850  		v.reset(OpMIPS64LoweredAtomicCas64)
   851  		v.AddArg(ptr)
   852  		v.AddArg(old)
   853  		v.AddArg(new_)
   854  		v.AddArg(mem)
   855  		return true
   856  	}
   857  }
   858  func rewriteValueMIPS64_OpAtomicExchange32_0(v *Value) bool {
   859  	// match: (AtomicExchange32 ptr val mem)
   860  	// cond:
   861  	// result: (LoweredAtomicExchange32 ptr val mem)
   862  	for {
   863  		_ = v.Args[2]
   864  		ptr := v.Args[0]
   865  		val := v.Args[1]
   866  		mem := v.Args[2]
   867  		v.reset(OpMIPS64LoweredAtomicExchange32)
   868  		v.AddArg(ptr)
   869  		v.AddArg(val)
   870  		v.AddArg(mem)
   871  		return true
   872  	}
   873  }
   874  func rewriteValueMIPS64_OpAtomicExchange64_0(v *Value) bool {
   875  	// match: (AtomicExchange64 ptr val mem)
   876  	// cond:
   877  	// result: (LoweredAtomicExchange64 ptr val mem)
   878  	for {
   879  		_ = v.Args[2]
   880  		ptr := v.Args[0]
   881  		val := v.Args[1]
   882  		mem := v.Args[2]
   883  		v.reset(OpMIPS64LoweredAtomicExchange64)
   884  		v.AddArg(ptr)
   885  		v.AddArg(val)
   886  		v.AddArg(mem)
   887  		return true
   888  	}
   889  }
   890  func rewriteValueMIPS64_OpAtomicLoad32_0(v *Value) bool {
   891  	// match: (AtomicLoad32 ptr mem)
   892  	// cond:
   893  	// result: (LoweredAtomicLoad32 ptr mem)
   894  	for {
   895  		_ = v.Args[1]
   896  		ptr := v.Args[0]
   897  		mem := v.Args[1]
   898  		v.reset(OpMIPS64LoweredAtomicLoad32)
   899  		v.AddArg(ptr)
   900  		v.AddArg(mem)
   901  		return true
   902  	}
   903  }
   904  func rewriteValueMIPS64_OpAtomicLoad64_0(v *Value) bool {
   905  	// match: (AtomicLoad64 ptr mem)
   906  	// cond:
   907  	// result: (LoweredAtomicLoad64 ptr mem)
   908  	for {
   909  		_ = v.Args[1]
   910  		ptr := v.Args[0]
   911  		mem := v.Args[1]
   912  		v.reset(OpMIPS64LoweredAtomicLoad64)
   913  		v.AddArg(ptr)
   914  		v.AddArg(mem)
   915  		return true
   916  	}
   917  }
   918  func rewriteValueMIPS64_OpAtomicLoadPtr_0(v *Value) bool {
   919  	// match: (AtomicLoadPtr ptr mem)
   920  	// cond:
   921  	// result: (LoweredAtomicLoad64 ptr mem)
   922  	for {
   923  		_ = v.Args[1]
   924  		ptr := v.Args[0]
   925  		mem := v.Args[1]
   926  		v.reset(OpMIPS64LoweredAtomicLoad64)
   927  		v.AddArg(ptr)
   928  		v.AddArg(mem)
   929  		return true
   930  	}
   931  }
   932  func rewriteValueMIPS64_OpAtomicStore32_0(v *Value) bool {
   933  	// match: (AtomicStore32 ptr val mem)
   934  	// cond:
   935  	// result: (LoweredAtomicStore32 ptr val mem)
   936  	for {
   937  		_ = v.Args[2]
   938  		ptr := v.Args[0]
   939  		val := v.Args[1]
   940  		mem := v.Args[2]
   941  		v.reset(OpMIPS64LoweredAtomicStore32)
   942  		v.AddArg(ptr)
   943  		v.AddArg(val)
   944  		v.AddArg(mem)
   945  		return true
   946  	}
   947  }
   948  func rewriteValueMIPS64_OpAtomicStore64_0(v *Value) bool {
   949  	// match: (AtomicStore64 ptr val mem)
   950  	// cond:
   951  	// result: (LoweredAtomicStore64 ptr val mem)
   952  	for {
   953  		_ = v.Args[2]
   954  		ptr := v.Args[0]
   955  		val := v.Args[1]
   956  		mem := v.Args[2]
   957  		v.reset(OpMIPS64LoweredAtomicStore64)
   958  		v.AddArg(ptr)
   959  		v.AddArg(val)
   960  		v.AddArg(mem)
   961  		return true
   962  	}
   963  }
   964  func rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v *Value) bool {
   965  	// match: (AtomicStorePtrNoWB ptr val mem)
   966  	// cond:
   967  	// result: (LoweredAtomicStore64 ptr val mem)
   968  	for {
   969  		_ = v.Args[2]
   970  		ptr := v.Args[0]
   971  		val := v.Args[1]
   972  		mem := v.Args[2]
   973  		v.reset(OpMIPS64LoweredAtomicStore64)
   974  		v.AddArg(ptr)
   975  		v.AddArg(val)
   976  		v.AddArg(mem)
   977  		return true
   978  	}
   979  }
   980  func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool {
   981  	b := v.Block
   982  	_ = b
   983  	// match: (Avg64u <t> x y)
   984  	// cond:
   985  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   986  	for {
   987  		t := v.Type
   988  		_ = v.Args[1]
   989  		x := v.Args[0]
   990  		y := v.Args[1]
   991  		v.reset(OpMIPS64ADDV)
   992  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   993  		v0.AuxInt = 1
   994  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   995  		v1.AddArg(x)
   996  		v1.AddArg(y)
   997  		v0.AddArg(v1)
   998  		v.AddArg(v0)
   999  		v.AddArg(y)
  1000  		return true
  1001  	}
  1002  }
  1003  func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool {
  1004  	// match: (ClosureCall [argwid] entry closure mem)
  1005  	// cond:
  1006  	// result: (CALLclosure [argwid] entry closure mem)
  1007  	for {
  1008  		argwid := v.AuxInt
  1009  		_ = v.Args[2]
  1010  		entry := v.Args[0]
  1011  		closure := v.Args[1]
  1012  		mem := v.Args[2]
  1013  		v.reset(OpMIPS64CALLclosure)
  1014  		v.AuxInt = argwid
  1015  		v.AddArg(entry)
  1016  		v.AddArg(closure)
  1017  		v.AddArg(mem)
  1018  		return true
  1019  	}
  1020  }
  1021  func rewriteValueMIPS64_OpCom16_0(v *Value) bool {
  1022  	b := v.Block
  1023  	_ = b
  1024  	typ := &b.Func.Config.Types
  1025  	_ = typ
  1026  	// match: (Com16 x)
  1027  	// cond:
  1028  	// result: (NOR (MOVVconst [0]) x)
  1029  	for {
  1030  		x := v.Args[0]
  1031  		v.reset(OpMIPS64NOR)
  1032  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1033  		v0.AuxInt = 0
  1034  		v.AddArg(v0)
  1035  		v.AddArg(x)
  1036  		return true
  1037  	}
  1038  }
  1039  func rewriteValueMIPS64_OpCom32_0(v *Value) bool {
  1040  	b := v.Block
  1041  	_ = b
  1042  	typ := &b.Func.Config.Types
  1043  	_ = typ
  1044  	// match: (Com32 x)
  1045  	// cond:
  1046  	// result: (NOR (MOVVconst [0]) x)
  1047  	for {
  1048  		x := v.Args[0]
  1049  		v.reset(OpMIPS64NOR)
  1050  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1051  		v0.AuxInt = 0
  1052  		v.AddArg(v0)
  1053  		v.AddArg(x)
  1054  		return true
  1055  	}
  1056  }
  1057  func rewriteValueMIPS64_OpCom64_0(v *Value) bool {
  1058  	b := v.Block
  1059  	_ = b
  1060  	typ := &b.Func.Config.Types
  1061  	_ = typ
  1062  	// match: (Com64 x)
  1063  	// cond:
  1064  	// result: (NOR (MOVVconst [0]) x)
  1065  	for {
  1066  		x := v.Args[0]
  1067  		v.reset(OpMIPS64NOR)
  1068  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1069  		v0.AuxInt = 0
  1070  		v.AddArg(v0)
  1071  		v.AddArg(x)
  1072  		return true
  1073  	}
  1074  }
  1075  func rewriteValueMIPS64_OpCom8_0(v *Value) bool {
  1076  	b := v.Block
  1077  	_ = b
  1078  	typ := &b.Func.Config.Types
  1079  	_ = typ
  1080  	// match: (Com8 x)
  1081  	// cond:
  1082  	// result: (NOR (MOVVconst [0]) x)
  1083  	for {
  1084  		x := v.Args[0]
  1085  		v.reset(OpMIPS64NOR)
  1086  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1087  		v0.AuxInt = 0
  1088  		v.AddArg(v0)
  1089  		v.AddArg(x)
  1090  		return true
  1091  	}
  1092  }
  1093  func rewriteValueMIPS64_OpConst16_0(v *Value) bool {
  1094  	// match: (Const16 [val])
  1095  	// cond:
  1096  	// result: (MOVVconst [val])
  1097  	for {
  1098  		val := v.AuxInt
  1099  		v.reset(OpMIPS64MOVVconst)
  1100  		v.AuxInt = val
  1101  		return true
  1102  	}
  1103  }
  1104  func rewriteValueMIPS64_OpConst32_0(v *Value) bool {
  1105  	// match: (Const32 [val])
  1106  	// cond:
  1107  	// result: (MOVVconst [val])
  1108  	for {
  1109  		val := v.AuxInt
  1110  		v.reset(OpMIPS64MOVVconst)
  1111  		v.AuxInt = val
  1112  		return true
  1113  	}
  1114  }
  1115  func rewriteValueMIPS64_OpConst32F_0(v *Value) bool {
  1116  	// match: (Const32F [val])
  1117  	// cond:
  1118  	// result: (MOVFconst [val])
  1119  	for {
  1120  		val := v.AuxInt
  1121  		v.reset(OpMIPS64MOVFconst)
  1122  		v.AuxInt = val
  1123  		return true
  1124  	}
  1125  }
  1126  func rewriteValueMIPS64_OpConst64_0(v *Value) bool {
  1127  	// match: (Const64 [val])
  1128  	// cond:
  1129  	// result: (MOVVconst [val])
  1130  	for {
  1131  		val := v.AuxInt
  1132  		v.reset(OpMIPS64MOVVconst)
  1133  		v.AuxInt = val
  1134  		return true
  1135  	}
  1136  }
  1137  func rewriteValueMIPS64_OpConst64F_0(v *Value) bool {
  1138  	// match: (Const64F [val])
  1139  	// cond:
  1140  	// result: (MOVDconst [val])
  1141  	for {
  1142  		val := v.AuxInt
  1143  		v.reset(OpMIPS64MOVDconst)
  1144  		v.AuxInt = val
  1145  		return true
  1146  	}
  1147  }
  1148  func rewriteValueMIPS64_OpConst8_0(v *Value) bool {
  1149  	// match: (Const8 [val])
  1150  	// cond:
  1151  	// result: (MOVVconst [val])
  1152  	for {
  1153  		val := v.AuxInt
  1154  		v.reset(OpMIPS64MOVVconst)
  1155  		v.AuxInt = val
  1156  		return true
  1157  	}
  1158  }
  1159  func rewriteValueMIPS64_OpConstBool_0(v *Value) bool {
  1160  	// match: (ConstBool [b])
  1161  	// cond:
  1162  	// result: (MOVVconst [b])
  1163  	for {
  1164  		b := v.AuxInt
  1165  		v.reset(OpMIPS64MOVVconst)
  1166  		v.AuxInt = b
  1167  		return true
  1168  	}
  1169  }
  1170  func rewriteValueMIPS64_OpConstNil_0(v *Value) bool {
  1171  	// match: (ConstNil)
  1172  	// cond:
  1173  	// result: (MOVVconst [0])
  1174  	for {
  1175  		v.reset(OpMIPS64MOVVconst)
  1176  		v.AuxInt = 0
  1177  		return true
  1178  	}
  1179  }
  1180  func rewriteValueMIPS64_OpConvert_0(v *Value) bool {
  1181  	// match: (Convert x mem)
  1182  	// cond:
  1183  	// result: (MOVVconvert x mem)
  1184  	for {
  1185  		_ = v.Args[1]
  1186  		x := v.Args[0]
  1187  		mem := v.Args[1]
  1188  		v.reset(OpMIPS64MOVVconvert)
  1189  		v.AddArg(x)
  1190  		v.AddArg(mem)
  1191  		return true
  1192  	}
  1193  }
  1194  func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool {
  1195  	// match: (Cvt32Fto32 x)
  1196  	// cond:
  1197  	// result: (TRUNCFW x)
  1198  	for {
  1199  		x := v.Args[0]
  1200  		v.reset(OpMIPS64TRUNCFW)
  1201  		v.AddArg(x)
  1202  		return true
  1203  	}
  1204  }
  1205  func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool {
  1206  	// match: (Cvt32Fto64 x)
  1207  	// cond:
  1208  	// result: (TRUNCFV x)
  1209  	for {
  1210  		x := v.Args[0]
  1211  		v.reset(OpMIPS64TRUNCFV)
  1212  		v.AddArg(x)
  1213  		return true
  1214  	}
  1215  }
  1216  func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool {
  1217  	// match: (Cvt32Fto64F x)
  1218  	// cond:
  1219  	// result: (MOVFD x)
  1220  	for {
  1221  		x := v.Args[0]
  1222  		v.reset(OpMIPS64MOVFD)
  1223  		v.AddArg(x)
  1224  		return true
  1225  	}
  1226  }
  1227  func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool {
  1228  	// match: (Cvt32to32F x)
  1229  	// cond:
  1230  	// result: (MOVWF x)
  1231  	for {
  1232  		x := v.Args[0]
  1233  		v.reset(OpMIPS64MOVWF)
  1234  		v.AddArg(x)
  1235  		return true
  1236  	}
  1237  }
  1238  func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool {
  1239  	// match: (Cvt32to64F x)
  1240  	// cond:
  1241  	// result: (MOVWD x)
  1242  	for {
  1243  		x := v.Args[0]
  1244  		v.reset(OpMIPS64MOVWD)
  1245  		v.AddArg(x)
  1246  		return true
  1247  	}
  1248  }
  1249  func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool {
  1250  	// match: (Cvt64Fto32 x)
  1251  	// cond:
  1252  	// result: (TRUNCDW x)
  1253  	for {
  1254  		x := v.Args[0]
  1255  		v.reset(OpMIPS64TRUNCDW)
  1256  		v.AddArg(x)
  1257  		return true
  1258  	}
  1259  }
  1260  func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool {
  1261  	// match: (Cvt64Fto32F x)
  1262  	// cond:
  1263  	// result: (MOVDF x)
  1264  	for {
  1265  		x := v.Args[0]
  1266  		v.reset(OpMIPS64MOVDF)
  1267  		v.AddArg(x)
  1268  		return true
  1269  	}
  1270  }
  1271  func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool {
  1272  	// match: (Cvt64Fto64 x)
  1273  	// cond:
  1274  	// result: (TRUNCDV x)
  1275  	for {
  1276  		x := v.Args[0]
  1277  		v.reset(OpMIPS64TRUNCDV)
  1278  		v.AddArg(x)
  1279  		return true
  1280  	}
  1281  }
  1282  func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool {
  1283  	// match: (Cvt64to32F x)
  1284  	// cond:
  1285  	// result: (MOVVF x)
  1286  	for {
  1287  		x := v.Args[0]
  1288  		v.reset(OpMIPS64MOVVF)
  1289  		v.AddArg(x)
  1290  		return true
  1291  	}
  1292  }
  1293  func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool {
  1294  	// match: (Cvt64to64F x)
  1295  	// cond:
  1296  	// result: (MOVVD x)
  1297  	for {
  1298  		x := v.Args[0]
  1299  		v.reset(OpMIPS64MOVVD)
  1300  		v.AddArg(x)
  1301  		return true
  1302  	}
  1303  }
  1304  func rewriteValueMIPS64_OpDiv16_0(v *Value) bool {
  1305  	b := v.Block
  1306  	_ = b
  1307  	typ := &b.Func.Config.Types
  1308  	_ = typ
  1309  	// match: (Div16 x y)
  1310  	// cond:
  1311  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1312  	for {
  1313  		_ = v.Args[1]
  1314  		x := v.Args[0]
  1315  		y := v.Args[1]
  1316  		v.reset(OpSelect1)
  1317  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1318  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1319  		v1.AddArg(x)
  1320  		v0.AddArg(v1)
  1321  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1322  		v2.AddArg(y)
  1323  		v0.AddArg(v2)
  1324  		v.AddArg(v0)
  1325  		return true
  1326  	}
  1327  }
  1328  func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool {
  1329  	b := v.Block
  1330  	_ = b
  1331  	typ := &b.Func.Config.Types
  1332  	_ = typ
  1333  	// match: (Div16u x y)
  1334  	// cond:
  1335  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1336  	for {
  1337  		_ = v.Args[1]
  1338  		x := v.Args[0]
  1339  		y := v.Args[1]
  1340  		v.reset(OpSelect1)
  1341  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1342  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1343  		v1.AddArg(x)
  1344  		v0.AddArg(v1)
  1345  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1346  		v2.AddArg(y)
  1347  		v0.AddArg(v2)
  1348  		v.AddArg(v0)
  1349  		return true
  1350  	}
  1351  }
  1352  func rewriteValueMIPS64_OpDiv32_0(v *Value) bool {
  1353  	b := v.Block
  1354  	_ = b
  1355  	typ := &b.Func.Config.Types
  1356  	_ = typ
  1357  	// match: (Div32 x y)
  1358  	// cond:
  1359  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1360  	for {
  1361  		_ = v.Args[1]
  1362  		x := v.Args[0]
  1363  		y := v.Args[1]
  1364  		v.reset(OpSelect1)
  1365  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1366  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1367  		v1.AddArg(x)
  1368  		v0.AddArg(v1)
  1369  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1370  		v2.AddArg(y)
  1371  		v0.AddArg(v2)
  1372  		v.AddArg(v0)
  1373  		return true
  1374  	}
  1375  }
  1376  func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool {
  1377  	// match: (Div32F x y)
  1378  	// cond:
  1379  	// result: (DIVF x y)
  1380  	for {
  1381  		_ = v.Args[1]
  1382  		x := v.Args[0]
  1383  		y := v.Args[1]
  1384  		v.reset(OpMIPS64DIVF)
  1385  		v.AddArg(x)
  1386  		v.AddArg(y)
  1387  		return true
  1388  	}
  1389  }
  1390  func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool {
  1391  	b := v.Block
  1392  	_ = b
  1393  	typ := &b.Func.Config.Types
  1394  	_ = typ
  1395  	// match: (Div32u x y)
  1396  	// cond:
  1397  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1398  	for {
  1399  		_ = v.Args[1]
  1400  		x := v.Args[0]
  1401  		y := v.Args[1]
  1402  		v.reset(OpSelect1)
  1403  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1404  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1405  		v1.AddArg(x)
  1406  		v0.AddArg(v1)
  1407  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1408  		v2.AddArg(y)
  1409  		v0.AddArg(v2)
  1410  		v.AddArg(v0)
  1411  		return true
  1412  	}
  1413  }
  1414  func rewriteValueMIPS64_OpDiv64_0(v *Value) bool {
  1415  	b := v.Block
  1416  	_ = b
  1417  	typ := &b.Func.Config.Types
  1418  	_ = typ
  1419  	// match: (Div64 x y)
  1420  	// cond:
  1421  	// result: (Select1 (DIVV x y))
  1422  	for {
  1423  		_ = v.Args[1]
  1424  		x := v.Args[0]
  1425  		y := v.Args[1]
  1426  		v.reset(OpSelect1)
  1427  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1428  		v0.AddArg(x)
  1429  		v0.AddArg(y)
  1430  		v.AddArg(v0)
  1431  		return true
  1432  	}
  1433  }
  1434  func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool {
  1435  	// match: (Div64F x y)
  1436  	// cond:
  1437  	// result: (DIVD x y)
  1438  	for {
  1439  		_ = v.Args[1]
  1440  		x := v.Args[0]
  1441  		y := v.Args[1]
  1442  		v.reset(OpMIPS64DIVD)
  1443  		v.AddArg(x)
  1444  		v.AddArg(y)
  1445  		return true
  1446  	}
  1447  }
  1448  func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool {
  1449  	b := v.Block
  1450  	_ = b
  1451  	typ := &b.Func.Config.Types
  1452  	_ = typ
  1453  	// match: (Div64u x y)
  1454  	// cond:
  1455  	// result: (Select1 (DIVVU x y))
  1456  	for {
  1457  		_ = v.Args[1]
  1458  		x := v.Args[0]
  1459  		y := v.Args[1]
  1460  		v.reset(OpSelect1)
  1461  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1462  		v0.AddArg(x)
  1463  		v0.AddArg(y)
  1464  		v.AddArg(v0)
  1465  		return true
  1466  	}
  1467  }
  1468  func rewriteValueMIPS64_OpDiv8_0(v *Value) bool {
  1469  	b := v.Block
  1470  	_ = b
  1471  	typ := &b.Func.Config.Types
  1472  	_ = typ
  1473  	// match: (Div8 x y)
  1474  	// cond:
  1475  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1476  	for {
  1477  		_ = v.Args[1]
  1478  		x := v.Args[0]
  1479  		y := v.Args[1]
  1480  		v.reset(OpSelect1)
  1481  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1482  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1483  		v1.AddArg(x)
  1484  		v0.AddArg(v1)
  1485  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1486  		v2.AddArg(y)
  1487  		v0.AddArg(v2)
  1488  		v.AddArg(v0)
  1489  		return true
  1490  	}
  1491  }
  1492  func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool {
  1493  	b := v.Block
  1494  	_ = b
  1495  	typ := &b.Func.Config.Types
  1496  	_ = typ
  1497  	// match: (Div8u x y)
  1498  	// cond:
  1499  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1500  	for {
  1501  		_ = v.Args[1]
  1502  		x := v.Args[0]
  1503  		y := v.Args[1]
  1504  		v.reset(OpSelect1)
  1505  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1506  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1507  		v1.AddArg(x)
  1508  		v0.AddArg(v1)
  1509  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1510  		v2.AddArg(y)
  1511  		v0.AddArg(v2)
  1512  		v.AddArg(v0)
  1513  		return true
  1514  	}
  1515  }
  1516  func rewriteValueMIPS64_OpEq16_0(v *Value) bool {
  1517  	b := v.Block
  1518  	_ = b
  1519  	typ := &b.Func.Config.Types
  1520  	_ = typ
  1521  	// match: (Eq16 x y)
  1522  	// cond:
  1523  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1524  	for {
  1525  		_ = v.Args[1]
  1526  		x := v.Args[0]
  1527  		y := v.Args[1]
  1528  		v.reset(OpMIPS64SGTU)
  1529  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1530  		v0.AuxInt = 1
  1531  		v.AddArg(v0)
  1532  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1533  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1534  		v2.AddArg(x)
  1535  		v1.AddArg(v2)
  1536  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1537  		v3.AddArg(y)
  1538  		v1.AddArg(v3)
  1539  		v.AddArg(v1)
  1540  		return true
  1541  	}
  1542  }
  1543  func rewriteValueMIPS64_OpEq32_0(v *Value) bool {
  1544  	b := v.Block
  1545  	_ = b
  1546  	typ := &b.Func.Config.Types
  1547  	_ = typ
  1548  	// match: (Eq32 x y)
  1549  	// cond:
  1550  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1551  	for {
  1552  		_ = v.Args[1]
  1553  		x := v.Args[0]
  1554  		y := v.Args[1]
  1555  		v.reset(OpMIPS64SGTU)
  1556  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1557  		v0.AuxInt = 1
  1558  		v.AddArg(v0)
  1559  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1560  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1561  		v2.AddArg(x)
  1562  		v1.AddArg(v2)
  1563  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1564  		v3.AddArg(y)
  1565  		v1.AddArg(v3)
  1566  		v.AddArg(v1)
  1567  		return true
  1568  	}
  1569  }
  1570  func rewriteValueMIPS64_OpEq32F_0(v *Value) bool {
  1571  	b := v.Block
  1572  	_ = b
  1573  	// match: (Eq32F x y)
  1574  	// cond:
  1575  	// result: (FPFlagTrue (CMPEQF x y))
  1576  	for {
  1577  		_ = v.Args[1]
  1578  		x := v.Args[0]
  1579  		y := v.Args[1]
  1580  		v.reset(OpMIPS64FPFlagTrue)
  1581  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1582  		v0.AddArg(x)
  1583  		v0.AddArg(y)
  1584  		v.AddArg(v0)
  1585  		return true
  1586  	}
  1587  }
  1588  func rewriteValueMIPS64_OpEq64_0(v *Value) bool {
  1589  	b := v.Block
  1590  	_ = b
  1591  	typ := &b.Func.Config.Types
  1592  	_ = typ
  1593  	// match: (Eq64 x y)
  1594  	// cond:
  1595  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1596  	for {
  1597  		_ = v.Args[1]
  1598  		x := v.Args[0]
  1599  		y := v.Args[1]
  1600  		v.reset(OpMIPS64SGTU)
  1601  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1602  		v0.AuxInt = 1
  1603  		v.AddArg(v0)
  1604  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1605  		v1.AddArg(x)
  1606  		v1.AddArg(y)
  1607  		v.AddArg(v1)
  1608  		return true
  1609  	}
  1610  }
  1611  func rewriteValueMIPS64_OpEq64F_0(v *Value) bool {
  1612  	b := v.Block
  1613  	_ = b
  1614  	// match: (Eq64F x y)
  1615  	// cond:
  1616  	// result: (FPFlagTrue (CMPEQD x y))
  1617  	for {
  1618  		_ = v.Args[1]
  1619  		x := v.Args[0]
  1620  		y := v.Args[1]
  1621  		v.reset(OpMIPS64FPFlagTrue)
  1622  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1623  		v0.AddArg(x)
  1624  		v0.AddArg(y)
  1625  		v.AddArg(v0)
  1626  		return true
  1627  	}
  1628  }
  1629  func rewriteValueMIPS64_OpEq8_0(v *Value) bool {
  1630  	b := v.Block
  1631  	_ = b
  1632  	typ := &b.Func.Config.Types
  1633  	_ = typ
  1634  	// match: (Eq8 x y)
  1635  	// cond:
  1636  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1637  	for {
  1638  		_ = v.Args[1]
  1639  		x := v.Args[0]
  1640  		y := v.Args[1]
  1641  		v.reset(OpMIPS64SGTU)
  1642  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1643  		v0.AuxInt = 1
  1644  		v.AddArg(v0)
  1645  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1646  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1647  		v2.AddArg(x)
  1648  		v1.AddArg(v2)
  1649  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1650  		v3.AddArg(y)
  1651  		v1.AddArg(v3)
  1652  		v.AddArg(v1)
  1653  		return true
  1654  	}
  1655  }
  1656  func rewriteValueMIPS64_OpEqB_0(v *Value) bool {
  1657  	b := v.Block
  1658  	_ = b
  1659  	typ := &b.Func.Config.Types
  1660  	_ = typ
  1661  	// match: (EqB x y)
  1662  	// cond:
  1663  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1664  	for {
  1665  		_ = v.Args[1]
  1666  		x := v.Args[0]
  1667  		y := v.Args[1]
  1668  		v.reset(OpMIPS64XOR)
  1669  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1670  		v0.AuxInt = 1
  1671  		v.AddArg(v0)
  1672  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1673  		v1.AddArg(x)
  1674  		v1.AddArg(y)
  1675  		v.AddArg(v1)
  1676  		return true
  1677  	}
  1678  }
  1679  func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool {
  1680  	b := v.Block
  1681  	_ = b
  1682  	typ := &b.Func.Config.Types
  1683  	_ = typ
  1684  	// match: (EqPtr x y)
  1685  	// cond:
  1686  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1687  	for {
  1688  		_ = v.Args[1]
  1689  		x := v.Args[0]
  1690  		y := v.Args[1]
  1691  		v.reset(OpMIPS64SGTU)
  1692  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1693  		v0.AuxInt = 1
  1694  		v.AddArg(v0)
  1695  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1696  		v1.AddArg(x)
  1697  		v1.AddArg(y)
  1698  		v.AddArg(v1)
  1699  		return true
  1700  	}
  1701  }
  1702  func rewriteValueMIPS64_OpGeq16_0(v *Value) bool {
  1703  	b := v.Block
  1704  	_ = b
  1705  	typ := &b.Func.Config.Types
  1706  	_ = typ
  1707  	// match: (Geq16 x y)
  1708  	// cond:
  1709  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1710  	for {
  1711  		_ = v.Args[1]
  1712  		x := v.Args[0]
  1713  		y := v.Args[1]
  1714  		v.reset(OpMIPS64XOR)
  1715  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1716  		v0.AuxInt = 1
  1717  		v.AddArg(v0)
  1718  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1719  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1720  		v2.AddArg(y)
  1721  		v1.AddArg(v2)
  1722  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1723  		v3.AddArg(x)
  1724  		v1.AddArg(v3)
  1725  		v.AddArg(v1)
  1726  		return true
  1727  	}
  1728  }
  1729  func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool {
  1730  	b := v.Block
  1731  	_ = b
  1732  	typ := &b.Func.Config.Types
  1733  	_ = typ
  1734  	// match: (Geq16U x y)
  1735  	// cond:
  1736  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1737  	for {
  1738  		_ = v.Args[1]
  1739  		x := v.Args[0]
  1740  		y := v.Args[1]
  1741  		v.reset(OpMIPS64XOR)
  1742  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1743  		v0.AuxInt = 1
  1744  		v.AddArg(v0)
  1745  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1746  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1747  		v2.AddArg(y)
  1748  		v1.AddArg(v2)
  1749  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1750  		v3.AddArg(x)
  1751  		v1.AddArg(v3)
  1752  		v.AddArg(v1)
  1753  		return true
  1754  	}
  1755  }
  1756  func rewriteValueMIPS64_OpGeq32_0(v *Value) bool {
  1757  	b := v.Block
  1758  	_ = b
  1759  	typ := &b.Func.Config.Types
  1760  	_ = typ
  1761  	// match: (Geq32 x y)
  1762  	// cond:
  1763  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1764  	for {
  1765  		_ = v.Args[1]
  1766  		x := v.Args[0]
  1767  		y := v.Args[1]
  1768  		v.reset(OpMIPS64XOR)
  1769  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1770  		v0.AuxInt = 1
  1771  		v.AddArg(v0)
  1772  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1773  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1774  		v2.AddArg(y)
  1775  		v1.AddArg(v2)
  1776  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1777  		v3.AddArg(x)
  1778  		v1.AddArg(v3)
  1779  		v.AddArg(v1)
  1780  		return true
  1781  	}
  1782  }
  1783  func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool {
  1784  	b := v.Block
  1785  	_ = b
  1786  	// match: (Geq32F x y)
  1787  	// cond:
  1788  	// result: (FPFlagTrue (CMPGEF x y))
  1789  	for {
  1790  		_ = v.Args[1]
  1791  		x := v.Args[0]
  1792  		y := v.Args[1]
  1793  		v.reset(OpMIPS64FPFlagTrue)
  1794  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1795  		v0.AddArg(x)
  1796  		v0.AddArg(y)
  1797  		v.AddArg(v0)
  1798  		return true
  1799  	}
  1800  }
  1801  func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool {
  1802  	b := v.Block
  1803  	_ = b
  1804  	typ := &b.Func.Config.Types
  1805  	_ = typ
  1806  	// match: (Geq32U x y)
  1807  	// cond:
  1808  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1809  	for {
  1810  		_ = v.Args[1]
  1811  		x := v.Args[0]
  1812  		y := v.Args[1]
  1813  		v.reset(OpMIPS64XOR)
  1814  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1815  		v0.AuxInt = 1
  1816  		v.AddArg(v0)
  1817  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1818  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1819  		v2.AddArg(y)
  1820  		v1.AddArg(v2)
  1821  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1822  		v3.AddArg(x)
  1823  		v1.AddArg(v3)
  1824  		v.AddArg(v1)
  1825  		return true
  1826  	}
  1827  }
  1828  func rewriteValueMIPS64_OpGeq64_0(v *Value) bool {
  1829  	b := v.Block
  1830  	_ = b
  1831  	typ := &b.Func.Config.Types
  1832  	_ = typ
  1833  	// match: (Geq64 x y)
  1834  	// cond:
  1835  	// result: (XOR (MOVVconst [1]) (SGT y x))
  1836  	for {
  1837  		_ = v.Args[1]
  1838  		x := v.Args[0]
  1839  		y := v.Args[1]
  1840  		v.reset(OpMIPS64XOR)
  1841  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1842  		v0.AuxInt = 1
  1843  		v.AddArg(v0)
  1844  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1845  		v1.AddArg(y)
  1846  		v1.AddArg(x)
  1847  		v.AddArg(v1)
  1848  		return true
  1849  	}
  1850  }
  1851  func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool {
  1852  	b := v.Block
  1853  	_ = b
  1854  	// match: (Geq64F x y)
  1855  	// cond:
  1856  	// result: (FPFlagTrue (CMPGED x y))
  1857  	for {
  1858  		_ = v.Args[1]
  1859  		x := v.Args[0]
  1860  		y := v.Args[1]
  1861  		v.reset(OpMIPS64FPFlagTrue)
  1862  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1863  		v0.AddArg(x)
  1864  		v0.AddArg(y)
  1865  		v.AddArg(v0)
  1866  		return true
  1867  	}
  1868  }
  1869  func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool {
  1870  	b := v.Block
  1871  	_ = b
  1872  	typ := &b.Func.Config.Types
  1873  	_ = typ
  1874  	// match: (Geq64U x y)
  1875  	// cond:
  1876  	// result: (XOR (MOVVconst [1]) (SGTU y x))
  1877  	for {
  1878  		_ = v.Args[1]
  1879  		x := v.Args[0]
  1880  		y := v.Args[1]
  1881  		v.reset(OpMIPS64XOR)
  1882  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1883  		v0.AuxInt = 1
  1884  		v.AddArg(v0)
  1885  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1886  		v1.AddArg(y)
  1887  		v1.AddArg(x)
  1888  		v.AddArg(v1)
  1889  		return true
  1890  	}
  1891  }
  1892  func rewriteValueMIPS64_OpGeq8_0(v *Value) bool {
  1893  	b := v.Block
  1894  	_ = b
  1895  	typ := &b.Func.Config.Types
  1896  	_ = typ
  1897  	// match: (Geq8 x y)
  1898  	// cond:
  1899  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1900  	for {
  1901  		_ = v.Args[1]
  1902  		x := v.Args[0]
  1903  		y := v.Args[1]
  1904  		v.reset(OpMIPS64XOR)
  1905  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1906  		v0.AuxInt = 1
  1907  		v.AddArg(v0)
  1908  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1909  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1910  		v2.AddArg(y)
  1911  		v1.AddArg(v2)
  1912  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1913  		v3.AddArg(x)
  1914  		v1.AddArg(v3)
  1915  		v.AddArg(v1)
  1916  		return true
  1917  	}
  1918  }
  1919  func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool {
  1920  	b := v.Block
  1921  	_ = b
  1922  	typ := &b.Func.Config.Types
  1923  	_ = typ
  1924  	// match: (Geq8U x y)
  1925  	// cond:
  1926  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1927  	for {
  1928  		_ = v.Args[1]
  1929  		x := v.Args[0]
  1930  		y := v.Args[1]
  1931  		v.reset(OpMIPS64XOR)
  1932  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1933  		v0.AuxInt = 1
  1934  		v.AddArg(v0)
  1935  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1936  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1937  		v2.AddArg(y)
  1938  		v1.AddArg(v2)
  1939  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1940  		v3.AddArg(x)
  1941  		v1.AddArg(v3)
  1942  		v.AddArg(v1)
  1943  		return true
  1944  	}
  1945  }
  1946  func rewriteValueMIPS64_OpGetCallerSP_0(v *Value) bool {
  1947  	// match: (GetCallerSP)
  1948  	// cond:
  1949  	// result: (LoweredGetCallerSP)
  1950  	for {
  1951  		v.reset(OpMIPS64LoweredGetCallerSP)
  1952  		return true
  1953  	}
  1954  }
  1955  func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool {
  1956  	// match: (GetClosurePtr)
  1957  	// cond:
  1958  	// result: (LoweredGetClosurePtr)
  1959  	for {
  1960  		v.reset(OpMIPS64LoweredGetClosurePtr)
  1961  		return true
  1962  	}
  1963  }
  1964  func rewriteValueMIPS64_OpGreater16_0(v *Value) bool {
  1965  	b := v.Block
  1966  	_ = b
  1967  	typ := &b.Func.Config.Types
  1968  	_ = typ
  1969  	// match: (Greater16 x y)
  1970  	// cond:
  1971  	// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1972  	for {
  1973  		_ = v.Args[1]
  1974  		x := v.Args[0]
  1975  		y := v.Args[1]
  1976  		v.reset(OpMIPS64SGT)
  1977  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1978  		v0.AddArg(x)
  1979  		v.AddArg(v0)
  1980  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1981  		v1.AddArg(y)
  1982  		v.AddArg(v1)
  1983  		return true
  1984  	}
  1985  }
  1986  func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool {
  1987  	b := v.Block
  1988  	_ = b
  1989  	typ := &b.Func.Config.Types
  1990  	_ = typ
  1991  	// match: (Greater16U x y)
  1992  	// cond:
  1993  	// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1994  	for {
  1995  		_ = v.Args[1]
  1996  		x := v.Args[0]
  1997  		y := v.Args[1]
  1998  		v.reset(OpMIPS64SGTU)
  1999  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2000  		v0.AddArg(x)
  2001  		v.AddArg(v0)
  2002  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2003  		v1.AddArg(y)
  2004  		v.AddArg(v1)
  2005  		return true
  2006  	}
  2007  }
  2008  func rewriteValueMIPS64_OpGreater32_0(v *Value) bool {
  2009  	b := v.Block
  2010  	_ = b
  2011  	typ := &b.Func.Config.Types
  2012  	_ = typ
  2013  	// match: (Greater32 x y)
  2014  	// cond:
  2015  	// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  2016  	for {
  2017  		_ = v.Args[1]
  2018  		x := v.Args[0]
  2019  		y := v.Args[1]
  2020  		v.reset(OpMIPS64SGT)
  2021  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2022  		v0.AddArg(x)
  2023  		v.AddArg(v0)
  2024  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2025  		v1.AddArg(y)
  2026  		v.AddArg(v1)
  2027  		return true
  2028  	}
  2029  }
  2030  func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool {
  2031  	b := v.Block
  2032  	_ = b
  2033  	// match: (Greater32F x y)
  2034  	// cond:
  2035  	// result: (FPFlagTrue (CMPGTF x y))
  2036  	for {
  2037  		_ = v.Args[1]
  2038  		x := v.Args[0]
  2039  		y := v.Args[1]
  2040  		v.reset(OpMIPS64FPFlagTrue)
  2041  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  2042  		v0.AddArg(x)
  2043  		v0.AddArg(y)
  2044  		v.AddArg(v0)
  2045  		return true
  2046  	}
  2047  }
  2048  func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool {
  2049  	b := v.Block
  2050  	_ = b
  2051  	typ := &b.Func.Config.Types
  2052  	_ = typ
  2053  	// match: (Greater32U x y)
  2054  	// cond:
  2055  	// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  2056  	for {
  2057  		_ = v.Args[1]
  2058  		x := v.Args[0]
  2059  		y := v.Args[1]
  2060  		v.reset(OpMIPS64SGTU)
  2061  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2062  		v0.AddArg(x)
  2063  		v.AddArg(v0)
  2064  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2065  		v1.AddArg(y)
  2066  		v.AddArg(v1)
  2067  		return true
  2068  	}
  2069  }
  2070  func rewriteValueMIPS64_OpGreater64_0(v *Value) bool {
  2071  	// match: (Greater64 x y)
  2072  	// cond:
  2073  	// result: (SGT x y)
  2074  	for {
  2075  		_ = v.Args[1]
  2076  		x := v.Args[0]
  2077  		y := v.Args[1]
  2078  		v.reset(OpMIPS64SGT)
  2079  		v.AddArg(x)
  2080  		v.AddArg(y)
  2081  		return true
  2082  	}
  2083  }
  2084  func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool {
  2085  	b := v.Block
  2086  	_ = b
  2087  	// match: (Greater64F x y)
  2088  	// cond:
  2089  	// result: (FPFlagTrue (CMPGTD x y))
  2090  	for {
  2091  		_ = v.Args[1]
  2092  		x := v.Args[0]
  2093  		y := v.Args[1]
  2094  		v.reset(OpMIPS64FPFlagTrue)
  2095  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  2096  		v0.AddArg(x)
  2097  		v0.AddArg(y)
  2098  		v.AddArg(v0)
  2099  		return true
  2100  	}
  2101  }
  2102  func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool {
  2103  	// match: (Greater64U x y)
  2104  	// cond:
  2105  	// result: (SGTU x y)
  2106  	for {
  2107  		_ = v.Args[1]
  2108  		x := v.Args[0]
  2109  		y := v.Args[1]
  2110  		v.reset(OpMIPS64SGTU)
  2111  		v.AddArg(x)
  2112  		v.AddArg(y)
  2113  		return true
  2114  	}
  2115  }
  2116  func rewriteValueMIPS64_OpGreater8_0(v *Value) bool {
  2117  	b := v.Block
  2118  	_ = b
  2119  	typ := &b.Func.Config.Types
  2120  	_ = typ
  2121  	// match: (Greater8 x y)
  2122  	// cond:
  2123  	// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  2124  	for {
  2125  		_ = v.Args[1]
  2126  		x := v.Args[0]
  2127  		y := v.Args[1]
  2128  		v.reset(OpMIPS64SGT)
  2129  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2130  		v0.AddArg(x)
  2131  		v.AddArg(v0)
  2132  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2133  		v1.AddArg(y)
  2134  		v.AddArg(v1)
  2135  		return true
  2136  	}
  2137  }
  2138  func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool {
  2139  	b := v.Block
  2140  	_ = b
  2141  	typ := &b.Func.Config.Types
  2142  	_ = typ
  2143  	// match: (Greater8U x y)
  2144  	// cond:
  2145  	// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  2146  	for {
  2147  		_ = v.Args[1]
  2148  		x := v.Args[0]
  2149  		y := v.Args[1]
  2150  		v.reset(OpMIPS64SGTU)
  2151  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2152  		v0.AddArg(x)
  2153  		v.AddArg(v0)
  2154  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2155  		v1.AddArg(y)
  2156  		v.AddArg(v1)
  2157  		return true
  2158  	}
  2159  }
  2160  func rewriteValueMIPS64_OpHmul32_0(v *Value) bool {
  2161  	b := v.Block
  2162  	_ = b
  2163  	typ := &b.Func.Config.Types
  2164  	_ = typ
  2165  	// match: (Hmul32 x y)
  2166  	// cond:
  2167  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  2168  	for {
  2169  		_ = v.Args[1]
  2170  		x := v.Args[0]
  2171  		y := v.Args[1]
  2172  		v.reset(OpMIPS64SRAVconst)
  2173  		v.AuxInt = 32
  2174  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  2175  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2176  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2177  		v2.AddArg(x)
  2178  		v1.AddArg(v2)
  2179  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2180  		v3.AddArg(y)
  2181  		v1.AddArg(v3)
  2182  		v0.AddArg(v1)
  2183  		v.AddArg(v0)
  2184  		return true
  2185  	}
  2186  }
  2187  func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool {
  2188  	b := v.Block
  2189  	_ = b
  2190  	typ := &b.Func.Config.Types
  2191  	_ = typ
  2192  	// match: (Hmul32u x y)
  2193  	// cond:
  2194  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  2195  	for {
  2196  		_ = v.Args[1]
  2197  		x := v.Args[0]
  2198  		y := v.Args[1]
  2199  		v.reset(OpMIPS64SRLVconst)
  2200  		v.AuxInt = 32
  2201  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  2202  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2203  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2204  		v2.AddArg(x)
  2205  		v1.AddArg(v2)
  2206  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2207  		v3.AddArg(y)
  2208  		v1.AddArg(v3)
  2209  		v0.AddArg(v1)
  2210  		v.AddArg(v0)
  2211  		return true
  2212  	}
  2213  }
  2214  func rewriteValueMIPS64_OpHmul64_0(v *Value) bool {
  2215  	b := v.Block
  2216  	_ = b
  2217  	typ := &b.Func.Config.Types
  2218  	_ = typ
  2219  	// match: (Hmul64 x y)
  2220  	// cond:
  2221  	// result: (Select0 (MULV x y))
  2222  	for {
  2223  		_ = v.Args[1]
  2224  		x := v.Args[0]
  2225  		y := v.Args[1]
  2226  		v.reset(OpSelect0)
  2227  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2228  		v0.AddArg(x)
  2229  		v0.AddArg(y)
  2230  		v.AddArg(v0)
  2231  		return true
  2232  	}
  2233  }
  2234  func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool {
  2235  	b := v.Block
  2236  	_ = b
  2237  	typ := &b.Func.Config.Types
  2238  	_ = typ
  2239  	// match: (Hmul64u x y)
  2240  	// cond:
  2241  	// result: (Select0 (MULVU x y))
  2242  	for {
  2243  		_ = v.Args[1]
  2244  		x := v.Args[0]
  2245  		y := v.Args[1]
  2246  		v.reset(OpSelect0)
  2247  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2248  		v0.AddArg(x)
  2249  		v0.AddArg(y)
  2250  		v.AddArg(v0)
  2251  		return true
  2252  	}
  2253  }
  2254  func rewriteValueMIPS64_OpInterCall_0(v *Value) bool {
  2255  	// match: (InterCall [argwid] entry mem)
  2256  	// cond:
  2257  	// result: (CALLinter [argwid] entry mem)
  2258  	for {
  2259  		argwid := v.AuxInt
  2260  		_ = v.Args[1]
  2261  		entry := v.Args[0]
  2262  		mem := v.Args[1]
  2263  		v.reset(OpMIPS64CALLinter)
  2264  		v.AuxInt = argwid
  2265  		v.AddArg(entry)
  2266  		v.AddArg(mem)
  2267  		return true
  2268  	}
  2269  }
  2270  func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool {
  2271  	// match: (IsInBounds idx len)
  2272  	// cond:
  2273  	// result: (SGTU len idx)
  2274  	for {
  2275  		_ = v.Args[1]
  2276  		idx := v.Args[0]
  2277  		len := v.Args[1]
  2278  		v.reset(OpMIPS64SGTU)
  2279  		v.AddArg(len)
  2280  		v.AddArg(idx)
  2281  		return true
  2282  	}
  2283  }
  2284  func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool {
  2285  	b := v.Block
  2286  	_ = b
  2287  	typ := &b.Func.Config.Types
  2288  	_ = typ
  2289  	// match: (IsNonNil ptr)
  2290  	// cond:
  2291  	// result: (SGTU ptr (MOVVconst [0]))
  2292  	for {
  2293  		ptr := v.Args[0]
  2294  		v.reset(OpMIPS64SGTU)
  2295  		v.AddArg(ptr)
  2296  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2297  		v0.AuxInt = 0
  2298  		v.AddArg(v0)
  2299  		return true
  2300  	}
  2301  }
  2302  func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool {
  2303  	b := v.Block
  2304  	_ = b
  2305  	typ := &b.Func.Config.Types
  2306  	_ = typ
  2307  	// match: (IsSliceInBounds idx len)
  2308  	// cond:
  2309  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2310  	for {
  2311  		_ = v.Args[1]
  2312  		idx := v.Args[0]
  2313  		len := v.Args[1]
  2314  		v.reset(OpMIPS64XOR)
  2315  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2316  		v0.AuxInt = 1
  2317  		v.AddArg(v0)
  2318  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2319  		v1.AddArg(idx)
  2320  		v1.AddArg(len)
  2321  		v.AddArg(v1)
  2322  		return true
  2323  	}
  2324  }
  2325  func rewriteValueMIPS64_OpLeq16_0(v *Value) bool {
  2326  	b := v.Block
  2327  	_ = b
  2328  	typ := &b.Func.Config.Types
  2329  	_ = typ
  2330  	// match: (Leq16 x y)
  2331  	// cond:
  2332  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2333  	for {
  2334  		_ = v.Args[1]
  2335  		x := v.Args[0]
  2336  		y := v.Args[1]
  2337  		v.reset(OpMIPS64XOR)
  2338  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2339  		v0.AuxInt = 1
  2340  		v.AddArg(v0)
  2341  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2342  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2343  		v2.AddArg(x)
  2344  		v1.AddArg(v2)
  2345  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2346  		v3.AddArg(y)
  2347  		v1.AddArg(v3)
  2348  		v.AddArg(v1)
  2349  		return true
  2350  	}
  2351  }
  2352  func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool {
  2353  	b := v.Block
  2354  	_ = b
  2355  	typ := &b.Func.Config.Types
  2356  	_ = typ
  2357  	// match: (Leq16U x y)
  2358  	// cond:
  2359  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2360  	for {
  2361  		_ = v.Args[1]
  2362  		x := v.Args[0]
  2363  		y := v.Args[1]
  2364  		v.reset(OpMIPS64XOR)
  2365  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2366  		v0.AuxInt = 1
  2367  		v.AddArg(v0)
  2368  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2369  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2370  		v2.AddArg(x)
  2371  		v1.AddArg(v2)
  2372  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2373  		v3.AddArg(y)
  2374  		v1.AddArg(v3)
  2375  		v.AddArg(v1)
  2376  		return true
  2377  	}
  2378  }
  2379  func rewriteValueMIPS64_OpLeq32_0(v *Value) bool {
  2380  	b := v.Block
  2381  	_ = b
  2382  	typ := &b.Func.Config.Types
  2383  	_ = typ
  2384  	// match: (Leq32 x y)
  2385  	// cond:
  2386  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2387  	for {
  2388  		_ = v.Args[1]
  2389  		x := v.Args[0]
  2390  		y := v.Args[1]
  2391  		v.reset(OpMIPS64XOR)
  2392  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2393  		v0.AuxInt = 1
  2394  		v.AddArg(v0)
  2395  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2396  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2397  		v2.AddArg(x)
  2398  		v1.AddArg(v2)
  2399  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2400  		v3.AddArg(y)
  2401  		v1.AddArg(v3)
  2402  		v.AddArg(v1)
  2403  		return true
  2404  	}
  2405  }
  2406  func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool {
  2407  	b := v.Block
  2408  	_ = b
  2409  	// match: (Leq32F x y)
  2410  	// cond:
  2411  	// result: (FPFlagTrue (CMPGEF y x))
  2412  	for {
  2413  		_ = v.Args[1]
  2414  		x := v.Args[0]
  2415  		y := v.Args[1]
  2416  		v.reset(OpMIPS64FPFlagTrue)
  2417  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  2418  		v0.AddArg(y)
  2419  		v0.AddArg(x)
  2420  		v.AddArg(v0)
  2421  		return true
  2422  	}
  2423  }
  2424  func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool {
  2425  	b := v.Block
  2426  	_ = b
  2427  	typ := &b.Func.Config.Types
  2428  	_ = typ
  2429  	// match: (Leq32U x y)
  2430  	// cond:
  2431  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2432  	for {
  2433  		_ = v.Args[1]
  2434  		x := v.Args[0]
  2435  		y := v.Args[1]
  2436  		v.reset(OpMIPS64XOR)
  2437  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2438  		v0.AuxInt = 1
  2439  		v.AddArg(v0)
  2440  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2441  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2442  		v2.AddArg(x)
  2443  		v1.AddArg(v2)
  2444  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2445  		v3.AddArg(y)
  2446  		v1.AddArg(v3)
  2447  		v.AddArg(v1)
  2448  		return true
  2449  	}
  2450  }
  2451  func rewriteValueMIPS64_OpLeq64_0(v *Value) bool {
  2452  	b := v.Block
  2453  	_ = b
  2454  	typ := &b.Func.Config.Types
  2455  	_ = typ
  2456  	// match: (Leq64 x y)
  2457  	// cond:
  2458  	// result: (XOR (MOVVconst [1]) (SGT x y))
  2459  	for {
  2460  		_ = v.Args[1]
  2461  		x := v.Args[0]
  2462  		y := v.Args[1]
  2463  		v.reset(OpMIPS64XOR)
  2464  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2465  		v0.AuxInt = 1
  2466  		v.AddArg(v0)
  2467  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2468  		v1.AddArg(x)
  2469  		v1.AddArg(y)
  2470  		v.AddArg(v1)
  2471  		return true
  2472  	}
  2473  }
  2474  func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool {
  2475  	b := v.Block
  2476  	_ = b
  2477  	// match: (Leq64F x y)
  2478  	// cond:
  2479  	// result: (FPFlagTrue (CMPGED y x))
  2480  	for {
  2481  		_ = v.Args[1]
  2482  		x := v.Args[0]
  2483  		y := v.Args[1]
  2484  		v.reset(OpMIPS64FPFlagTrue)
  2485  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  2486  		v0.AddArg(y)
  2487  		v0.AddArg(x)
  2488  		v.AddArg(v0)
  2489  		return true
  2490  	}
  2491  }
  2492  func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool {
  2493  	b := v.Block
  2494  	_ = b
  2495  	typ := &b.Func.Config.Types
  2496  	_ = typ
  2497  	// match: (Leq64U x y)
  2498  	// cond:
  2499  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  2500  	for {
  2501  		_ = v.Args[1]
  2502  		x := v.Args[0]
  2503  		y := v.Args[1]
  2504  		v.reset(OpMIPS64XOR)
  2505  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2506  		v0.AuxInt = 1
  2507  		v.AddArg(v0)
  2508  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2509  		v1.AddArg(x)
  2510  		v1.AddArg(y)
  2511  		v.AddArg(v1)
  2512  		return true
  2513  	}
  2514  }
  2515  func rewriteValueMIPS64_OpLeq8_0(v *Value) bool {
  2516  	b := v.Block
  2517  	_ = b
  2518  	typ := &b.Func.Config.Types
  2519  	_ = typ
  2520  	// match: (Leq8 x y)
  2521  	// cond:
  2522  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2523  	for {
  2524  		_ = v.Args[1]
  2525  		x := v.Args[0]
  2526  		y := v.Args[1]
  2527  		v.reset(OpMIPS64XOR)
  2528  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2529  		v0.AuxInt = 1
  2530  		v.AddArg(v0)
  2531  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2532  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2533  		v2.AddArg(x)
  2534  		v1.AddArg(v2)
  2535  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2536  		v3.AddArg(y)
  2537  		v1.AddArg(v3)
  2538  		v.AddArg(v1)
  2539  		return true
  2540  	}
  2541  }
  2542  func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool {
  2543  	b := v.Block
  2544  	_ = b
  2545  	typ := &b.Func.Config.Types
  2546  	_ = typ
  2547  	// match: (Leq8U x y)
  2548  	// cond:
  2549  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2550  	for {
  2551  		_ = v.Args[1]
  2552  		x := v.Args[0]
  2553  		y := v.Args[1]
  2554  		v.reset(OpMIPS64XOR)
  2555  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2556  		v0.AuxInt = 1
  2557  		v.AddArg(v0)
  2558  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2559  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2560  		v2.AddArg(x)
  2561  		v1.AddArg(v2)
  2562  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2563  		v3.AddArg(y)
  2564  		v1.AddArg(v3)
  2565  		v.AddArg(v1)
  2566  		return true
  2567  	}
  2568  }
  2569  func rewriteValueMIPS64_OpLess16_0(v *Value) bool {
  2570  	b := v.Block
  2571  	_ = b
  2572  	typ := &b.Func.Config.Types
  2573  	_ = typ
  2574  	// match: (Less16 x y)
  2575  	// cond:
  2576  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2577  	for {
  2578  		_ = v.Args[1]
  2579  		x := v.Args[0]
  2580  		y := v.Args[1]
  2581  		v.reset(OpMIPS64SGT)
  2582  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2583  		v0.AddArg(y)
  2584  		v.AddArg(v0)
  2585  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2586  		v1.AddArg(x)
  2587  		v.AddArg(v1)
  2588  		return true
  2589  	}
  2590  }
  2591  func rewriteValueMIPS64_OpLess16U_0(v *Value) bool {
  2592  	b := v.Block
  2593  	_ = b
  2594  	typ := &b.Func.Config.Types
  2595  	_ = typ
  2596  	// match: (Less16U x y)
  2597  	// cond:
  2598  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2599  	for {
  2600  		_ = v.Args[1]
  2601  		x := v.Args[0]
  2602  		y := v.Args[1]
  2603  		v.reset(OpMIPS64SGTU)
  2604  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2605  		v0.AddArg(y)
  2606  		v.AddArg(v0)
  2607  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2608  		v1.AddArg(x)
  2609  		v.AddArg(v1)
  2610  		return true
  2611  	}
  2612  }
  2613  func rewriteValueMIPS64_OpLess32_0(v *Value) bool {
  2614  	b := v.Block
  2615  	_ = b
  2616  	typ := &b.Func.Config.Types
  2617  	_ = typ
  2618  	// match: (Less32 x y)
  2619  	// cond:
  2620  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2621  	for {
  2622  		_ = v.Args[1]
  2623  		x := v.Args[0]
  2624  		y := v.Args[1]
  2625  		v.reset(OpMIPS64SGT)
  2626  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2627  		v0.AddArg(y)
  2628  		v.AddArg(v0)
  2629  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2630  		v1.AddArg(x)
  2631  		v.AddArg(v1)
  2632  		return true
  2633  	}
  2634  }
  2635  func rewriteValueMIPS64_OpLess32F_0(v *Value) bool {
  2636  	b := v.Block
  2637  	_ = b
  2638  	// match: (Less32F x y)
  2639  	// cond:
  2640  	// result: (FPFlagTrue (CMPGTF y x))
  2641  	for {
  2642  		_ = v.Args[1]
  2643  		x := v.Args[0]
  2644  		y := v.Args[1]
  2645  		v.reset(OpMIPS64FPFlagTrue)
  2646  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  2647  		v0.AddArg(y)
  2648  		v0.AddArg(x)
  2649  		v.AddArg(v0)
  2650  		return true
  2651  	}
  2652  }
  2653  func rewriteValueMIPS64_OpLess32U_0(v *Value) bool {
  2654  	b := v.Block
  2655  	_ = b
  2656  	typ := &b.Func.Config.Types
  2657  	_ = typ
  2658  	// match: (Less32U x y)
  2659  	// cond:
  2660  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2661  	for {
  2662  		_ = v.Args[1]
  2663  		x := v.Args[0]
  2664  		y := v.Args[1]
  2665  		v.reset(OpMIPS64SGTU)
  2666  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2667  		v0.AddArg(y)
  2668  		v.AddArg(v0)
  2669  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2670  		v1.AddArg(x)
  2671  		v.AddArg(v1)
  2672  		return true
  2673  	}
  2674  }
  2675  func rewriteValueMIPS64_OpLess64_0(v *Value) bool {
  2676  	// match: (Less64 x y)
  2677  	// cond:
  2678  	// result: (SGT y x)
  2679  	for {
  2680  		_ = v.Args[1]
  2681  		x := v.Args[0]
  2682  		y := v.Args[1]
  2683  		v.reset(OpMIPS64SGT)
  2684  		v.AddArg(y)
  2685  		v.AddArg(x)
  2686  		return true
  2687  	}
  2688  }
  2689  func rewriteValueMIPS64_OpLess64F_0(v *Value) bool {
  2690  	b := v.Block
  2691  	_ = b
  2692  	// match: (Less64F x y)
  2693  	// cond:
  2694  	// result: (FPFlagTrue (CMPGTD y x))
  2695  	for {
  2696  		_ = v.Args[1]
  2697  		x := v.Args[0]
  2698  		y := v.Args[1]
  2699  		v.reset(OpMIPS64FPFlagTrue)
  2700  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  2701  		v0.AddArg(y)
  2702  		v0.AddArg(x)
  2703  		v.AddArg(v0)
  2704  		return true
  2705  	}
  2706  }
  2707  func rewriteValueMIPS64_OpLess64U_0(v *Value) bool {
  2708  	// match: (Less64U x y)
  2709  	// cond:
  2710  	// result: (SGTU y x)
  2711  	for {
  2712  		_ = v.Args[1]
  2713  		x := v.Args[0]
  2714  		y := v.Args[1]
  2715  		v.reset(OpMIPS64SGTU)
  2716  		v.AddArg(y)
  2717  		v.AddArg(x)
  2718  		return true
  2719  	}
  2720  }
  2721  func rewriteValueMIPS64_OpLess8_0(v *Value) bool {
  2722  	b := v.Block
  2723  	_ = b
  2724  	typ := &b.Func.Config.Types
  2725  	_ = typ
  2726  	// match: (Less8 x y)
  2727  	// cond:
  2728  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2729  	for {
  2730  		_ = v.Args[1]
  2731  		x := v.Args[0]
  2732  		y := v.Args[1]
  2733  		v.reset(OpMIPS64SGT)
  2734  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2735  		v0.AddArg(y)
  2736  		v.AddArg(v0)
  2737  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2738  		v1.AddArg(x)
  2739  		v.AddArg(v1)
  2740  		return true
  2741  	}
  2742  }
  2743  func rewriteValueMIPS64_OpLess8U_0(v *Value) bool {
  2744  	b := v.Block
  2745  	_ = b
  2746  	typ := &b.Func.Config.Types
  2747  	_ = typ
  2748  	// match: (Less8U x y)
  2749  	// cond:
  2750  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2751  	for {
  2752  		_ = v.Args[1]
  2753  		x := v.Args[0]
  2754  		y := v.Args[1]
  2755  		v.reset(OpMIPS64SGTU)
  2756  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2757  		v0.AddArg(y)
  2758  		v.AddArg(v0)
  2759  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2760  		v1.AddArg(x)
  2761  		v.AddArg(v1)
  2762  		return true
  2763  	}
  2764  }
  2765  func rewriteValueMIPS64_OpLoad_0(v *Value) bool {
  2766  	// match: (Load <t> ptr mem)
  2767  	// cond: t.IsBoolean()
  2768  	// result: (MOVBUload ptr mem)
  2769  	for {
  2770  		t := v.Type
  2771  		_ = v.Args[1]
  2772  		ptr := v.Args[0]
  2773  		mem := v.Args[1]
  2774  		if !(t.IsBoolean()) {
  2775  			break
  2776  		}
  2777  		v.reset(OpMIPS64MOVBUload)
  2778  		v.AddArg(ptr)
  2779  		v.AddArg(mem)
  2780  		return true
  2781  	}
  2782  	// match: (Load <t> ptr mem)
  2783  	// cond: (is8BitInt(t) && isSigned(t))
  2784  	// result: (MOVBload ptr mem)
  2785  	for {
  2786  		t := v.Type
  2787  		_ = v.Args[1]
  2788  		ptr := v.Args[0]
  2789  		mem := v.Args[1]
  2790  		if !(is8BitInt(t) && isSigned(t)) {
  2791  			break
  2792  		}
  2793  		v.reset(OpMIPS64MOVBload)
  2794  		v.AddArg(ptr)
  2795  		v.AddArg(mem)
  2796  		return true
  2797  	}
  2798  	// match: (Load <t> ptr mem)
  2799  	// cond: (is8BitInt(t) && !isSigned(t))
  2800  	// result: (MOVBUload ptr mem)
  2801  	for {
  2802  		t := v.Type
  2803  		_ = v.Args[1]
  2804  		ptr := v.Args[0]
  2805  		mem := v.Args[1]
  2806  		if !(is8BitInt(t) && !isSigned(t)) {
  2807  			break
  2808  		}
  2809  		v.reset(OpMIPS64MOVBUload)
  2810  		v.AddArg(ptr)
  2811  		v.AddArg(mem)
  2812  		return true
  2813  	}
  2814  	// match: (Load <t> ptr mem)
  2815  	// cond: (is16BitInt(t) && isSigned(t))
  2816  	// result: (MOVHload ptr mem)
  2817  	for {
  2818  		t := v.Type
  2819  		_ = v.Args[1]
  2820  		ptr := v.Args[0]
  2821  		mem := v.Args[1]
  2822  		if !(is16BitInt(t) && isSigned(t)) {
  2823  			break
  2824  		}
  2825  		v.reset(OpMIPS64MOVHload)
  2826  		v.AddArg(ptr)
  2827  		v.AddArg(mem)
  2828  		return true
  2829  	}
  2830  	// match: (Load <t> ptr mem)
  2831  	// cond: (is16BitInt(t) && !isSigned(t))
  2832  	// result: (MOVHUload ptr mem)
  2833  	for {
  2834  		t := v.Type
  2835  		_ = v.Args[1]
  2836  		ptr := v.Args[0]
  2837  		mem := v.Args[1]
  2838  		if !(is16BitInt(t) && !isSigned(t)) {
  2839  			break
  2840  		}
  2841  		v.reset(OpMIPS64MOVHUload)
  2842  		v.AddArg(ptr)
  2843  		v.AddArg(mem)
  2844  		return true
  2845  	}
  2846  	// match: (Load <t> ptr mem)
  2847  	// cond: (is32BitInt(t) && isSigned(t))
  2848  	// result: (MOVWload ptr mem)
  2849  	for {
  2850  		t := v.Type
  2851  		_ = v.Args[1]
  2852  		ptr := v.Args[0]
  2853  		mem := v.Args[1]
  2854  		if !(is32BitInt(t) && isSigned(t)) {
  2855  			break
  2856  		}
  2857  		v.reset(OpMIPS64MOVWload)
  2858  		v.AddArg(ptr)
  2859  		v.AddArg(mem)
  2860  		return true
  2861  	}
  2862  	// match: (Load <t> ptr mem)
  2863  	// cond: (is32BitInt(t) && !isSigned(t))
  2864  	// result: (MOVWUload ptr mem)
  2865  	for {
  2866  		t := v.Type
  2867  		_ = v.Args[1]
  2868  		ptr := v.Args[0]
  2869  		mem := v.Args[1]
  2870  		if !(is32BitInt(t) && !isSigned(t)) {
  2871  			break
  2872  		}
  2873  		v.reset(OpMIPS64MOVWUload)
  2874  		v.AddArg(ptr)
  2875  		v.AddArg(mem)
  2876  		return true
  2877  	}
  2878  	// match: (Load <t> ptr mem)
  2879  	// cond: (is64BitInt(t) || isPtr(t))
  2880  	// result: (MOVVload ptr mem)
  2881  	for {
  2882  		t := v.Type
  2883  		_ = v.Args[1]
  2884  		ptr := v.Args[0]
  2885  		mem := v.Args[1]
  2886  		if !(is64BitInt(t) || isPtr(t)) {
  2887  			break
  2888  		}
  2889  		v.reset(OpMIPS64MOVVload)
  2890  		v.AddArg(ptr)
  2891  		v.AddArg(mem)
  2892  		return true
  2893  	}
  2894  	// match: (Load <t> ptr mem)
  2895  	// cond: is32BitFloat(t)
  2896  	// result: (MOVFload ptr mem)
  2897  	for {
  2898  		t := v.Type
  2899  		_ = v.Args[1]
  2900  		ptr := v.Args[0]
  2901  		mem := v.Args[1]
  2902  		if !(is32BitFloat(t)) {
  2903  			break
  2904  		}
  2905  		v.reset(OpMIPS64MOVFload)
  2906  		v.AddArg(ptr)
  2907  		v.AddArg(mem)
  2908  		return true
  2909  	}
  2910  	// match: (Load <t> ptr mem)
  2911  	// cond: is64BitFloat(t)
  2912  	// result: (MOVDload ptr mem)
  2913  	for {
  2914  		t := v.Type
  2915  		_ = v.Args[1]
  2916  		ptr := v.Args[0]
  2917  		mem := v.Args[1]
  2918  		if !(is64BitFloat(t)) {
  2919  			break
  2920  		}
  2921  		v.reset(OpMIPS64MOVDload)
  2922  		v.AddArg(ptr)
  2923  		v.AddArg(mem)
  2924  		return true
  2925  	}
  2926  	return false
  2927  }
  2928  func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
  2929  	b := v.Block
  2930  	_ = b
  2931  	typ := &b.Func.Config.Types
  2932  	_ = typ
  2933  	// match: (Lsh16x16 <t> x y)
  2934  	// cond:
  2935  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2936  	for {
  2937  		t := v.Type
  2938  		_ = v.Args[1]
  2939  		x := v.Args[0]
  2940  		y := v.Args[1]
  2941  		v.reset(OpMIPS64AND)
  2942  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2943  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2944  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2945  		v2.AuxInt = 64
  2946  		v1.AddArg(v2)
  2947  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2948  		v3.AddArg(y)
  2949  		v1.AddArg(v3)
  2950  		v0.AddArg(v1)
  2951  		v.AddArg(v0)
  2952  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2953  		v4.AddArg(x)
  2954  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2955  		v5.AddArg(y)
  2956  		v4.AddArg(v5)
  2957  		v.AddArg(v4)
  2958  		return true
  2959  	}
  2960  }
  2961  func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
  2962  	b := v.Block
  2963  	_ = b
  2964  	typ := &b.Func.Config.Types
  2965  	_ = typ
  2966  	// match: (Lsh16x32 <t> x y)
  2967  	// cond:
  2968  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2969  	for {
  2970  		t := v.Type
  2971  		_ = v.Args[1]
  2972  		x := v.Args[0]
  2973  		y := v.Args[1]
  2974  		v.reset(OpMIPS64AND)
  2975  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2976  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2977  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2978  		v2.AuxInt = 64
  2979  		v1.AddArg(v2)
  2980  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2981  		v3.AddArg(y)
  2982  		v1.AddArg(v3)
  2983  		v0.AddArg(v1)
  2984  		v.AddArg(v0)
  2985  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2986  		v4.AddArg(x)
  2987  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2988  		v5.AddArg(y)
  2989  		v4.AddArg(v5)
  2990  		v.AddArg(v4)
  2991  		return true
  2992  	}
  2993  }
  2994  func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
  2995  	b := v.Block
  2996  	_ = b
  2997  	typ := &b.Func.Config.Types
  2998  	_ = typ
  2999  	// match: (Lsh16x64 <t> x y)
  3000  	// cond:
  3001  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3002  	for {
  3003  		t := v.Type
  3004  		_ = v.Args[1]
  3005  		x := v.Args[0]
  3006  		y := v.Args[1]
  3007  		v.reset(OpMIPS64AND)
  3008  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3009  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3010  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3011  		v2.AuxInt = 64
  3012  		v1.AddArg(v2)
  3013  		v1.AddArg(y)
  3014  		v0.AddArg(v1)
  3015  		v.AddArg(v0)
  3016  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3017  		v3.AddArg(x)
  3018  		v3.AddArg(y)
  3019  		v.AddArg(v3)
  3020  		return true
  3021  	}
  3022  }
  3023  func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
  3024  	b := v.Block
  3025  	_ = b
  3026  	typ := &b.Func.Config.Types
  3027  	_ = typ
  3028  	// match: (Lsh16x8 <t> x y)
  3029  	// cond:
  3030  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3031  	for {
  3032  		t := v.Type
  3033  		_ = v.Args[1]
  3034  		x := v.Args[0]
  3035  		y := v.Args[1]
  3036  		v.reset(OpMIPS64AND)
  3037  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3038  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3039  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3040  		v2.AuxInt = 64
  3041  		v1.AddArg(v2)
  3042  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3043  		v3.AddArg(y)
  3044  		v1.AddArg(v3)
  3045  		v0.AddArg(v1)
  3046  		v.AddArg(v0)
  3047  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3048  		v4.AddArg(x)
  3049  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3050  		v5.AddArg(y)
  3051  		v4.AddArg(v5)
  3052  		v.AddArg(v4)
  3053  		return true
  3054  	}
  3055  }
  3056  func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
  3057  	b := v.Block
  3058  	_ = b
  3059  	typ := &b.Func.Config.Types
  3060  	_ = typ
  3061  	// match: (Lsh32x16 <t> x y)
  3062  	// cond:
  3063  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3064  	for {
  3065  		t := v.Type
  3066  		_ = v.Args[1]
  3067  		x := v.Args[0]
  3068  		y := v.Args[1]
  3069  		v.reset(OpMIPS64AND)
  3070  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3071  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3072  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3073  		v2.AuxInt = 64
  3074  		v1.AddArg(v2)
  3075  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3076  		v3.AddArg(y)
  3077  		v1.AddArg(v3)
  3078  		v0.AddArg(v1)
  3079  		v.AddArg(v0)
  3080  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3081  		v4.AddArg(x)
  3082  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3083  		v5.AddArg(y)
  3084  		v4.AddArg(v5)
  3085  		v.AddArg(v4)
  3086  		return true
  3087  	}
  3088  }
  3089  func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
  3090  	b := v.Block
  3091  	_ = b
  3092  	typ := &b.Func.Config.Types
  3093  	_ = typ
  3094  	// match: (Lsh32x32 <t> x y)
  3095  	// cond:
  3096  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3097  	for {
  3098  		t := v.Type
  3099  		_ = v.Args[1]
  3100  		x := v.Args[0]
  3101  		y := v.Args[1]
  3102  		v.reset(OpMIPS64AND)
  3103  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3104  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3105  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3106  		v2.AuxInt = 64
  3107  		v1.AddArg(v2)
  3108  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3109  		v3.AddArg(y)
  3110  		v1.AddArg(v3)
  3111  		v0.AddArg(v1)
  3112  		v.AddArg(v0)
  3113  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3114  		v4.AddArg(x)
  3115  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3116  		v5.AddArg(y)
  3117  		v4.AddArg(v5)
  3118  		v.AddArg(v4)
  3119  		return true
  3120  	}
  3121  }
  3122  func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
  3123  	b := v.Block
  3124  	_ = b
  3125  	typ := &b.Func.Config.Types
  3126  	_ = typ
  3127  	// match: (Lsh32x64 <t> x y)
  3128  	// cond:
  3129  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3130  	for {
  3131  		t := v.Type
  3132  		_ = v.Args[1]
  3133  		x := v.Args[0]
  3134  		y := v.Args[1]
  3135  		v.reset(OpMIPS64AND)
  3136  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3137  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3138  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3139  		v2.AuxInt = 64
  3140  		v1.AddArg(v2)
  3141  		v1.AddArg(y)
  3142  		v0.AddArg(v1)
  3143  		v.AddArg(v0)
  3144  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3145  		v3.AddArg(x)
  3146  		v3.AddArg(y)
  3147  		v.AddArg(v3)
  3148  		return true
  3149  	}
  3150  }
  3151  func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
  3152  	b := v.Block
  3153  	_ = b
  3154  	typ := &b.Func.Config.Types
  3155  	_ = typ
  3156  	// match: (Lsh32x8 <t> x y)
  3157  	// cond:
  3158  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3159  	for {
  3160  		t := v.Type
  3161  		_ = v.Args[1]
  3162  		x := v.Args[0]
  3163  		y := v.Args[1]
  3164  		v.reset(OpMIPS64AND)
  3165  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3166  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3167  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3168  		v2.AuxInt = 64
  3169  		v1.AddArg(v2)
  3170  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3171  		v3.AddArg(y)
  3172  		v1.AddArg(v3)
  3173  		v0.AddArg(v1)
  3174  		v.AddArg(v0)
  3175  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3176  		v4.AddArg(x)
  3177  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3178  		v5.AddArg(y)
  3179  		v4.AddArg(v5)
  3180  		v.AddArg(v4)
  3181  		return true
  3182  	}
  3183  }
  3184  func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
  3185  	b := v.Block
  3186  	_ = b
  3187  	typ := &b.Func.Config.Types
  3188  	_ = typ
  3189  	// match: (Lsh64x16 <t> x y)
  3190  	// cond:
  3191  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3192  	for {
  3193  		t := v.Type
  3194  		_ = v.Args[1]
  3195  		x := v.Args[0]
  3196  		y := v.Args[1]
  3197  		v.reset(OpMIPS64AND)
  3198  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3199  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3200  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3201  		v2.AuxInt = 64
  3202  		v1.AddArg(v2)
  3203  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3204  		v3.AddArg(y)
  3205  		v1.AddArg(v3)
  3206  		v0.AddArg(v1)
  3207  		v.AddArg(v0)
  3208  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3209  		v4.AddArg(x)
  3210  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3211  		v5.AddArg(y)
  3212  		v4.AddArg(v5)
  3213  		v.AddArg(v4)
  3214  		return true
  3215  	}
  3216  }
  3217  func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
  3218  	b := v.Block
  3219  	_ = b
  3220  	typ := &b.Func.Config.Types
  3221  	_ = typ
  3222  	// match: (Lsh64x32 <t> x y)
  3223  	// cond:
  3224  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3225  	for {
  3226  		t := v.Type
  3227  		_ = v.Args[1]
  3228  		x := v.Args[0]
  3229  		y := v.Args[1]
  3230  		v.reset(OpMIPS64AND)
  3231  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3232  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3233  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3234  		v2.AuxInt = 64
  3235  		v1.AddArg(v2)
  3236  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3237  		v3.AddArg(y)
  3238  		v1.AddArg(v3)
  3239  		v0.AddArg(v1)
  3240  		v.AddArg(v0)
  3241  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3242  		v4.AddArg(x)
  3243  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3244  		v5.AddArg(y)
  3245  		v4.AddArg(v5)
  3246  		v.AddArg(v4)
  3247  		return true
  3248  	}
  3249  }
  3250  func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
  3251  	b := v.Block
  3252  	_ = b
  3253  	typ := &b.Func.Config.Types
  3254  	_ = typ
  3255  	// match: (Lsh64x64 <t> x y)
  3256  	// cond:
  3257  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3258  	for {
  3259  		t := v.Type
  3260  		_ = v.Args[1]
  3261  		x := v.Args[0]
  3262  		y := v.Args[1]
  3263  		v.reset(OpMIPS64AND)
  3264  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3265  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3266  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3267  		v2.AuxInt = 64
  3268  		v1.AddArg(v2)
  3269  		v1.AddArg(y)
  3270  		v0.AddArg(v1)
  3271  		v.AddArg(v0)
  3272  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3273  		v3.AddArg(x)
  3274  		v3.AddArg(y)
  3275  		v.AddArg(v3)
  3276  		return true
  3277  	}
  3278  }
  3279  func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
  3280  	b := v.Block
  3281  	_ = b
  3282  	typ := &b.Func.Config.Types
  3283  	_ = typ
  3284  	// match: (Lsh64x8 <t> x y)
  3285  	// cond:
  3286  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3287  	for {
  3288  		t := v.Type
  3289  		_ = v.Args[1]
  3290  		x := v.Args[0]
  3291  		y := v.Args[1]
  3292  		v.reset(OpMIPS64AND)
  3293  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3294  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3295  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3296  		v2.AuxInt = 64
  3297  		v1.AddArg(v2)
  3298  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3299  		v3.AddArg(y)
  3300  		v1.AddArg(v3)
  3301  		v0.AddArg(v1)
  3302  		v.AddArg(v0)
  3303  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3304  		v4.AddArg(x)
  3305  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3306  		v5.AddArg(y)
  3307  		v4.AddArg(v5)
  3308  		v.AddArg(v4)
  3309  		return true
  3310  	}
  3311  }
  3312  func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
  3313  	b := v.Block
  3314  	_ = b
  3315  	typ := &b.Func.Config.Types
  3316  	_ = typ
  3317  	// match: (Lsh8x16 <t> x y)
  3318  	// cond:
  3319  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3320  	for {
  3321  		t := v.Type
  3322  		_ = v.Args[1]
  3323  		x := v.Args[0]
  3324  		y := v.Args[1]
  3325  		v.reset(OpMIPS64AND)
  3326  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3327  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3328  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3329  		v2.AuxInt = 64
  3330  		v1.AddArg(v2)
  3331  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3332  		v3.AddArg(y)
  3333  		v1.AddArg(v3)
  3334  		v0.AddArg(v1)
  3335  		v.AddArg(v0)
  3336  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3337  		v4.AddArg(x)
  3338  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3339  		v5.AddArg(y)
  3340  		v4.AddArg(v5)
  3341  		v.AddArg(v4)
  3342  		return true
  3343  	}
  3344  }
  3345  func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
  3346  	b := v.Block
  3347  	_ = b
  3348  	typ := &b.Func.Config.Types
  3349  	_ = typ
  3350  	// match: (Lsh8x32 <t> x y)
  3351  	// cond:
  3352  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3353  	for {
  3354  		t := v.Type
  3355  		_ = v.Args[1]
  3356  		x := v.Args[0]
  3357  		y := v.Args[1]
  3358  		v.reset(OpMIPS64AND)
  3359  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3360  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3361  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3362  		v2.AuxInt = 64
  3363  		v1.AddArg(v2)
  3364  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3365  		v3.AddArg(y)
  3366  		v1.AddArg(v3)
  3367  		v0.AddArg(v1)
  3368  		v.AddArg(v0)
  3369  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3370  		v4.AddArg(x)
  3371  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3372  		v5.AddArg(y)
  3373  		v4.AddArg(v5)
  3374  		v.AddArg(v4)
  3375  		return true
  3376  	}
  3377  }
  3378  func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
  3379  	b := v.Block
  3380  	_ = b
  3381  	typ := &b.Func.Config.Types
  3382  	_ = typ
  3383  	// match: (Lsh8x64 <t> x y)
  3384  	// cond:
  3385  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3386  	for {
  3387  		t := v.Type
  3388  		_ = v.Args[1]
  3389  		x := v.Args[0]
  3390  		y := v.Args[1]
  3391  		v.reset(OpMIPS64AND)
  3392  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3393  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3394  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3395  		v2.AuxInt = 64
  3396  		v1.AddArg(v2)
  3397  		v1.AddArg(y)
  3398  		v0.AddArg(v1)
  3399  		v.AddArg(v0)
  3400  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3401  		v3.AddArg(x)
  3402  		v3.AddArg(y)
  3403  		v.AddArg(v3)
  3404  		return true
  3405  	}
  3406  }
  3407  func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
  3408  	b := v.Block
  3409  	_ = b
  3410  	typ := &b.Func.Config.Types
  3411  	_ = typ
  3412  	// match: (Lsh8x8 <t> x y)
  3413  	// cond:
  3414  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3415  	for {
  3416  		t := v.Type
  3417  		_ = v.Args[1]
  3418  		x := v.Args[0]
  3419  		y := v.Args[1]
  3420  		v.reset(OpMIPS64AND)
  3421  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3422  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3423  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3424  		v2.AuxInt = 64
  3425  		v1.AddArg(v2)
  3426  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3427  		v3.AddArg(y)
  3428  		v1.AddArg(v3)
  3429  		v0.AddArg(v1)
  3430  		v.AddArg(v0)
  3431  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3432  		v4.AddArg(x)
  3433  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3434  		v5.AddArg(y)
  3435  		v4.AddArg(v5)
  3436  		v.AddArg(v4)
  3437  		return true
  3438  	}
  3439  }
  3440  func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool {
  3441  	// match: (ADDV x (MOVVconst [c]))
  3442  	// cond: is32Bit(c)
  3443  	// result: (ADDVconst [c] x)
  3444  	for {
  3445  		_ = v.Args[1]
  3446  		x := v.Args[0]
  3447  		v_1 := v.Args[1]
  3448  		if v_1.Op != OpMIPS64MOVVconst {
  3449  			break
  3450  		}
  3451  		c := v_1.AuxInt
  3452  		if !(is32Bit(c)) {
  3453  			break
  3454  		}
  3455  		v.reset(OpMIPS64ADDVconst)
  3456  		v.AuxInt = c
  3457  		v.AddArg(x)
  3458  		return true
  3459  	}
  3460  	// match: (ADDV (MOVVconst [c]) x)
  3461  	// cond: is32Bit(c)
  3462  	// result: (ADDVconst [c] x)
  3463  	for {
  3464  		_ = v.Args[1]
  3465  		v_0 := v.Args[0]
  3466  		if v_0.Op != OpMIPS64MOVVconst {
  3467  			break
  3468  		}
  3469  		c := v_0.AuxInt
  3470  		x := v.Args[1]
  3471  		if !(is32Bit(c)) {
  3472  			break
  3473  		}
  3474  		v.reset(OpMIPS64ADDVconst)
  3475  		v.AuxInt = c
  3476  		v.AddArg(x)
  3477  		return true
  3478  	}
  3479  	// match: (ADDV x (NEGV y))
  3480  	// cond:
  3481  	// result: (SUBV x y)
  3482  	for {
  3483  		_ = v.Args[1]
  3484  		x := v.Args[0]
  3485  		v_1 := v.Args[1]
  3486  		if v_1.Op != OpMIPS64NEGV {
  3487  			break
  3488  		}
  3489  		y := v_1.Args[0]
  3490  		v.reset(OpMIPS64SUBV)
  3491  		v.AddArg(x)
  3492  		v.AddArg(y)
  3493  		return true
  3494  	}
  3495  	// match: (ADDV (NEGV y) x)
  3496  	// cond:
  3497  	// result: (SUBV x y)
  3498  	for {
  3499  		_ = v.Args[1]
  3500  		v_0 := v.Args[0]
  3501  		if v_0.Op != OpMIPS64NEGV {
  3502  			break
  3503  		}
  3504  		y := v_0.Args[0]
  3505  		x := v.Args[1]
  3506  		v.reset(OpMIPS64SUBV)
  3507  		v.AddArg(x)
  3508  		v.AddArg(y)
  3509  		return true
  3510  	}
  3511  	return false
  3512  }
  3513  func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool {
  3514  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3515  	// cond:
  3516  	// result: (MOVVaddr [off1+off2] {sym} ptr)
  3517  	for {
  3518  		off1 := v.AuxInt
  3519  		v_0 := v.Args[0]
  3520  		if v_0.Op != OpMIPS64MOVVaddr {
  3521  			break
  3522  		}
  3523  		off2 := v_0.AuxInt
  3524  		sym := v_0.Aux
  3525  		ptr := v_0.Args[0]
  3526  		v.reset(OpMIPS64MOVVaddr)
  3527  		v.AuxInt = off1 + off2
  3528  		v.Aux = sym
  3529  		v.AddArg(ptr)
  3530  		return true
  3531  	}
  3532  	// match: (ADDVconst [0] x)
  3533  	// cond:
  3534  	// result: x
  3535  	for {
  3536  		if v.AuxInt != 0 {
  3537  			break
  3538  		}
  3539  		x := v.Args[0]
  3540  		v.reset(OpCopy)
  3541  		v.Type = x.Type
  3542  		v.AddArg(x)
  3543  		return true
  3544  	}
  3545  	// match: (ADDVconst [c] (MOVVconst [d]))
  3546  	// cond:
  3547  	// result: (MOVVconst [c+d])
  3548  	for {
  3549  		c := v.AuxInt
  3550  		v_0 := v.Args[0]
  3551  		if v_0.Op != OpMIPS64MOVVconst {
  3552  			break
  3553  		}
  3554  		d := v_0.AuxInt
  3555  		v.reset(OpMIPS64MOVVconst)
  3556  		v.AuxInt = c + d
  3557  		return true
  3558  	}
  3559  	// match: (ADDVconst [c] (ADDVconst [d] x))
  3560  	// cond: is32Bit(c+d)
  3561  	// result: (ADDVconst [c+d] x)
  3562  	for {
  3563  		c := v.AuxInt
  3564  		v_0 := v.Args[0]
  3565  		if v_0.Op != OpMIPS64ADDVconst {
  3566  			break
  3567  		}
  3568  		d := v_0.AuxInt
  3569  		x := v_0.Args[0]
  3570  		if !(is32Bit(c + d)) {
  3571  			break
  3572  		}
  3573  		v.reset(OpMIPS64ADDVconst)
  3574  		v.AuxInt = c + d
  3575  		v.AddArg(x)
  3576  		return true
  3577  	}
  3578  	// match: (ADDVconst [c] (SUBVconst [d] x))
  3579  	// cond: is32Bit(c-d)
  3580  	// result: (ADDVconst [c-d] x)
  3581  	for {
  3582  		c := v.AuxInt
  3583  		v_0 := v.Args[0]
  3584  		if v_0.Op != OpMIPS64SUBVconst {
  3585  			break
  3586  		}
  3587  		d := v_0.AuxInt
  3588  		x := v_0.Args[0]
  3589  		if !(is32Bit(c - d)) {
  3590  			break
  3591  		}
  3592  		v.reset(OpMIPS64ADDVconst)
  3593  		v.AuxInt = c - d
  3594  		v.AddArg(x)
  3595  		return true
  3596  	}
  3597  	return false
  3598  }
  3599  func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool {
  3600  	// match: (AND x (MOVVconst [c]))
  3601  	// cond: is32Bit(c)
  3602  	// result: (ANDconst [c] x)
  3603  	for {
  3604  		_ = v.Args[1]
  3605  		x := v.Args[0]
  3606  		v_1 := v.Args[1]
  3607  		if v_1.Op != OpMIPS64MOVVconst {
  3608  			break
  3609  		}
  3610  		c := v_1.AuxInt
  3611  		if !(is32Bit(c)) {
  3612  			break
  3613  		}
  3614  		v.reset(OpMIPS64ANDconst)
  3615  		v.AuxInt = c
  3616  		v.AddArg(x)
  3617  		return true
  3618  	}
  3619  	// match: (AND (MOVVconst [c]) x)
  3620  	// cond: is32Bit(c)
  3621  	// result: (ANDconst [c] x)
  3622  	for {
  3623  		_ = v.Args[1]
  3624  		v_0 := v.Args[0]
  3625  		if v_0.Op != OpMIPS64MOVVconst {
  3626  			break
  3627  		}
  3628  		c := v_0.AuxInt
  3629  		x := v.Args[1]
  3630  		if !(is32Bit(c)) {
  3631  			break
  3632  		}
  3633  		v.reset(OpMIPS64ANDconst)
  3634  		v.AuxInt = c
  3635  		v.AddArg(x)
  3636  		return true
  3637  	}
  3638  	// match: (AND x x)
  3639  	// cond:
  3640  	// result: x
  3641  	for {
  3642  		_ = v.Args[1]
  3643  		x := v.Args[0]
  3644  		if x != v.Args[1] {
  3645  			break
  3646  		}
  3647  		v.reset(OpCopy)
  3648  		v.Type = x.Type
  3649  		v.AddArg(x)
  3650  		return true
  3651  	}
  3652  	return false
  3653  }
  3654  func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool {
  3655  	// match: (ANDconst [0] _)
  3656  	// cond:
  3657  	// result: (MOVVconst [0])
  3658  	for {
  3659  		if v.AuxInt != 0 {
  3660  			break
  3661  		}
  3662  		v.reset(OpMIPS64MOVVconst)
  3663  		v.AuxInt = 0
  3664  		return true
  3665  	}
  3666  	// match: (ANDconst [-1] x)
  3667  	// cond:
  3668  	// result: x
  3669  	for {
  3670  		if v.AuxInt != -1 {
  3671  			break
  3672  		}
  3673  		x := v.Args[0]
  3674  		v.reset(OpCopy)
  3675  		v.Type = x.Type
  3676  		v.AddArg(x)
  3677  		return true
  3678  	}
  3679  	// match: (ANDconst [c] (MOVVconst [d]))
  3680  	// cond:
  3681  	// result: (MOVVconst [c&d])
  3682  	for {
  3683  		c := v.AuxInt
  3684  		v_0 := v.Args[0]
  3685  		if v_0.Op != OpMIPS64MOVVconst {
  3686  			break
  3687  		}
  3688  		d := v_0.AuxInt
  3689  		v.reset(OpMIPS64MOVVconst)
  3690  		v.AuxInt = c & d
  3691  		return true
  3692  	}
  3693  	// match: (ANDconst [c] (ANDconst [d] x))
  3694  	// cond:
  3695  	// result: (ANDconst [c&d] x)
  3696  	for {
  3697  		c := v.AuxInt
  3698  		v_0 := v.Args[0]
  3699  		if v_0.Op != OpMIPS64ANDconst {
  3700  			break
  3701  		}
  3702  		d := v_0.AuxInt
  3703  		x := v_0.Args[0]
  3704  		v.reset(OpMIPS64ANDconst)
  3705  		v.AuxInt = c & d
  3706  		v.AddArg(x)
  3707  		return true
  3708  	}
  3709  	return false
  3710  }
  3711  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v *Value) bool {
  3712  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  3713  	// cond: is32Bit(c)
  3714  	// result: (LoweredAtomicAddconst32 [c] ptr mem)
  3715  	for {
  3716  		_ = v.Args[2]
  3717  		ptr := v.Args[0]
  3718  		v_1 := v.Args[1]
  3719  		if v_1.Op != OpMIPS64MOVVconst {
  3720  			break
  3721  		}
  3722  		c := v_1.AuxInt
  3723  		mem := v.Args[2]
  3724  		if !(is32Bit(c)) {
  3725  			break
  3726  		}
  3727  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  3728  		v.AuxInt = c
  3729  		v.AddArg(ptr)
  3730  		v.AddArg(mem)
  3731  		return true
  3732  	}
  3733  	return false
  3734  }
  3735  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v *Value) bool {
  3736  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  3737  	// cond: is32Bit(c)
  3738  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  3739  	for {
  3740  		_ = v.Args[2]
  3741  		ptr := v.Args[0]
  3742  		v_1 := v.Args[1]
  3743  		if v_1.Op != OpMIPS64MOVVconst {
  3744  			break
  3745  		}
  3746  		c := v_1.AuxInt
  3747  		mem := v.Args[2]
  3748  		if !(is32Bit(c)) {
  3749  			break
  3750  		}
  3751  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  3752  		v.AuxInt = c
  3753  		v.AddArg(ptr)
  3754  		v.AddArg(mem)
  3755  		return true
  3756  	}
  3757  	return false
  3758  }
  3759  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v *Value) bool {
  3760  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  3761  	// cond:
  3762  	// result: (LoweredAtomicStorezero32 ptr mem)
  3763  	for {
  3764  		_ = v.Args[2]
  3765  		ptr := v.Args[0]
  3766  		v_1 := v.Args[1]
  3767  		if v_1.Op != OpMIPS64MOVVconst {
  3768  			break
  3769  		}
  3770  		if v_1.AuxInt != 0 {
  3771  			break
  3772  		}
  3773  		mem := v.Args[2]
  3774  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  3775  		v.AddArg(ptr)
  3776  		v.AddArg(mem)
  3777  		return true
  3778  	}
  3779  	return false
  3780  }
  3781  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v *Value) bool {
  3782  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  3783  	// cond:
  3784  	// result: (LoweredAtomicStorezero64 ptr mem)
  3785  	for {
  3786  		_ = v.Args[2]
  3787  		ptr := v.Args[0]
  3788  		v_1 := v.Args[1]
  3789  		if v_1.Op != OpMIPS64MOVVconst {
  3790  			break
  3791  		}
  3792  		if v_1.AuxInt != 0 {
  3793  			break
  3794  		}
  3795  		mem := v.Args[2]
  3796  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  3797  		v.AddArg(ptr)
  3798  		v.AddArg(mem)
  3799  		return true
  3800  	}
  3801  	return false
  3802  }
  3803  func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool {
  3804  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3805  	// cond: is32Bit(off1+off2)
  3806  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3807  	for {
  3808  		off1 := v.AuxInt
  3809  		sym := v.Aux
  3810  		_ = v.Args[1]
  3811  		v_0 := v.Args[0]
  3812  		if v_0.Op != OpMIPS64ADDVconst {
  3813  			break
  3814  		}
  3815  		off2 := v_0.AuxInt
  3816  		ptr := v_0.Args[0]
  3817  		mem := v.Args[1]
  3818  		if !(is32Bit(off1 + off2)) {
  3819  			break
  3820  		}
  3821  		v.reset(OpMIPS64MOVBUload)
  3822  		v.AuxInt = off1 + off2
  3823  		v.Aux = sym
  3824  		v.AddArg(ptr)
  3825  		v.AddArg(mem)
  3826  		return true
  3827  	}
  3828  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3829  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3830  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3831  	for {
  3832  		off1 := v.AuxInt
  3833  		sym1 := v.Aux
  3834  		_ = v.Args[1]
  3835  		v_0 := v.Args[0]
  3836  		if v_0.Op != OpMIPS64MOVVaddr {
  3837  			break
  3838  		}
  3839  		off2 := v_0.AuxInt
  3840  		sym2 := v_0.Aux
  3841  		ptr := v_0.Args[0]
  3842  		mem := v.Args[1]
  3843  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3844  			break
  3845  		}
  3846  		v.reset(OpMIPS64MOVBUload)
  3847  		v.AuxInt = off1 + off2
  3848  		v.Aux = mergeSym(sym1, sym2)
  3849  		v.AddArg(ptr)
  3850  		v.AddArg(mem)
  3851  		return true
  3852  	}
  3853  	return false
  3854  }
  3855  func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool {
  3856  	// match: (MOVBUreg x:(MOVBUload _ _))
  3857  	// cond:
  3858  	// result: (MOVVreg x)
  3859  	for {
  3860  		x := v.Args[0]
  3861  		if x.Op != OpMIPS64MOVBUload {
  3862  			break
  3863  		}
  3864  		_ = x.Args[1]
  3865  		v.reset(OpMIPS64MOVVreg)
  3866  		v.AddArg(x)
  3867  		return true
  3868  	}
  3869  	// match: (MOVBUreg x:(MOVBUreg _))
  3870  	// cond:
  3871  	// result: (MOVVreg x)
  3872  	for {
  3873  		x := v.Args[0]
  3874  		if x.Op != OpMIPS64MOVBUreg {
  3875  			break
  3876  		}
  3877  		v.reset(OpMIPS64MOVVreg)
  3878  		v.AddArg(x)
  3879  		return true
  3880  	}
  3881  	// match: (MOVBUreg (MOVVconst [c]))
  3882  	// cond:
  3883  	// result: (MOVVconst [int64(uint8(c))])
  3884  	for {
  3885  		v_0 := v.Args[0]
  3886  		if v_0.Op != OpMIPS64MOVVconst {
  3887  			break
  3888  		}
  3889  		c := v_0.AuxInt
  3890  		v.reset(OpMIPS64MOVVconst)
  3891  		v.AuxInt = int64(uint8(c))
  3892  		return true
  3893  	}
  3894  	return false
  3895  }
  3896  func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool {
  3897  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3898  	// cond: is32Bit(off1+off2)
  3899  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3900  	for {
  3901  		off1 := v.AuxInt
  3902  		sym := v.Aux
  3903  		_ = v.Args[1]
  3904  		v_0 := v.Args[0]
  3905  		if v_0.Op != OpMIPS64ADDVconst {
  3906  			break
  3907  		}
  3908  		off2 := v_0.AuxInt
  3909  		ptr := v_0.Args[0]
  3910  		mem := v.Args[1]
  3911  		if !(is32Bit(off1 + off2)) {
  3912  			break
  3913  		}
  3914  		v.reset(OpMIPS64MOVBload)
  3915  		v.AuxInt = off1 + off2
  3916  		v.Aux = sym
  3917  		v.AddArg(ptr)
  3918  		v.AddArg(mem)
  3919  		return true
  3920  	}
  3921  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3922  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3923  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3924  	for {
  3925  		off1 := v.AuxInt
  3926  		sym1 := v.Aux
  3927  		_ = v.Args[1]
  3928  		v_0 := v.Args[0]
  3929  		if v_0.Op != OpMIPS64MOVVaddr {
  3930  			break
  3931  		}
  3932  		off2 := v_0.AuxInt
  3933  		sym2 := v_0.Aux
  3934  		ptr := v_0.Args[0]
  3935  		mem := v.Args[1]
  3936  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3937  			break
  3938  		}
  3939  		v.reset(OpMIPS64MOVBload)
  3940  		v.AuxInt = off1 + off2
  3941  		v.Aux = mergeSym(sym1, sym2)
  3942  		v.AddArg(ptr)
  3943  		v.AddArg(mem)
  3944  		return true
  3945  	}
  3946  	return false
  3947  }
  3948  func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool {
  3949  	// match: (MOVBreg x:(MOVBload _ _))
  3950  	// cond:
  3951  	// result: (MOVVreg x)
  3952  	for {
  3953  		x := v.Args[0]
  3954  		if x.Op != OpMIPS64MOVBload {
  3955  			break
  3956  		}
  3957  		_ = x.Args[1]
  3958  		v.reset(OpMIPS64MOVVreg)
  3959  		v.AddArg(x)
  3960  		return true
  3961  	}
  3962  	// match: (MOVBreg x:(MOVBreg _))
  3963  	// cond:
  3964  	// result: (MOVVreg x)
  3965  	for {
  3966  		x := v.Args[0]
  3967  		if x.Op != OpMIPS64MOVBreg {
  3968  			break
  3969  		}
  3970  		v.reset(OpMIPS64MOVVreg)
  3971  		v.AddArg(x)
  3972  		return true
  3973  	}
  3974  	// match: (MOVBreg (MOVVconst [c]))
  3975  	// cond:
  3976  	// result: (MOVVconst [int64(int8(c))])
  3977  	for {
  3978  		v_0 := v.Args[0]
  3979  		if v_0.Op != OpMIPS64MOVVconst {
  3980  			break
  3981  		}
  3982  		c := v_0.AuxInt
  3983  		v.reset(OpMIPS64MOVVconst)
  3984  		v.AuxInt = int64(int8(c))
  3985  		return true
  3986  	}
  3987  	return false
  3988  }
  3989  func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool {
  3990  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3991  	// cond: is32Bit(off1+off2)
  3992  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3993  	for {
  3994  		off1 := v.AuxInt
  3995  		sym := v.Aux
  3996  		_ = v.Args[2]
  3997  		v_0 := v.Args[0]
  3998  		if v_0.Op != OpMIPS64ADDVconst {
  3999  			break
  4000  		}
  4001  		off2 := v_0.AuxInt
  4002  		ptr := v_0.Args[0]
  4003  		val := v.Args[1]
  4004  		mem := v.Args[2]
  4005  		if !(is32Bit(off1 + off2)) {
  4006  			break
  4007  		}
  4008  		v.reset(OpMIPS64MOVBstore)
  4009  		v.AuxInt = off1 + off2
  4010  		v.Aux = sym
  4011  		v.AddArg(ptr)
  4012  		v.AddArg(val)
  4013  		v.AddArg(mem)
  4014  		return true
  4015  	}
  4016  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4017  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4018  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4019  	for {
  4020  		off1 := v.AuxInt
  4021  		sym1 := v.Aux
  4022  		_ = v.Args[2]
  4023  		v_0 := v.Args[0]
  4024  		if v_0.Op != OpMIPS64MOVVaddr {
  4025  			break
  4026  		}
  4027  		off2 := v_0.AuxInt
  4028  		sym2 := v_0.Aux
  4029  		ptr := v_0.Args[0]
  4030  		val := v.Args[1]
  4031  		mem := v.Args[2]
  4032  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4033  			break
  4034  		}
  4035  		v.reset(OpMIPS64MOVBstore)
  4036  		v.AuxInt = off1 + off2
  4037  		v.Aux = mergeSym(sym1, sym2)
  4038  		v.AddArg(ptr)
  4039  		v.AddArg(val)
  4040  		v.AddArg(mem)
  4041  		return true
  4042  	}
  4043  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  4044  	// cond:
  4045  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4046  	for {
  4047  		off := v.AuxInt
  4048  		sym := v.Aux
  4049  		_ = v.Args[2]
  4050  		ptr := v.Args[0]
  4051  		v_1 := v.Args[1]
  4052  		if v_1.Op != OpMIPS64MOVVconst {
  4053  			break
  4054  		}
  4055  		if v_1.AuxInt != 0 {
  4056  			break
  4057  		}
  4058  		mem := v.Args[2]
  4059  		v.reset(OpMIPS64MOVBstorezero)
  4060  		v.AuxInt = off
  4061  		v.Aux = sym
  4062  		v.AddArg(ptr)
  4063  		v.AddArg(mem)
  4064  		return true
  4065  	}
  4066  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4067  	// cond:
  4068  	// result: (MOVBstore [off] {sym} ptr x mem)
  4069  	for {
  4070  		off := v.AuxInt
  4071  		sym := v.Aux
  4072  		_ = v.Args[2]
  4073  		ptr := v.Args[0]
  4074  		v_1 := v.Args[1]
  4075  		if v_1.Op != OpMIPS64MOVBreg {
  4076  			break
  4077  		}
  4078  		x := v_1.Args[0]
  4079  		mem := v.Args[2]
  4080  		v.reset(OpMIPS64MOVBstore)
  4081  		v.AuxInt = off
  4082  		v.Aux = sym
  4083  		v.AddArg(ptr)
  4084  		v.AddArg(x)
  4085  		v.AddArg(mem)
  4086  		return true
  4087  	}
  4088  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4089  	// cond:
  4090  	// result: (MOVBstore [off] {sym} ptr x mem)
  4091  	for {
  4092  		off := v.AuxInt
  4093  		sym := v.Aux
  4094  		_ = v.Args[2]
  4095  		ptr := v.Args[0]
  4096  		v_1 := v.Args[1]
  4097  		if v_1.Op != OpMIPS64MOVBUreg {
  4098  			break
  4099  		}
  4100  		x := v_1.Args[0]
  4101  		mem := v.Args[2]
  4102  		v.reset(OpMIPS64MOVBstore)
  4103  		v.AuxInt = off
  4104  		v.Aux = sym
  4105  		v.AddArg(ptr)
  4106  		v.AddArg(x)
  4107  		v.AddArg(mem)
  4108  		return true
  4109  	}
  4110  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4111  	// cond:
  4112  	// result: (MOVBstore [off] {sym} ptr x mem)
  4113  	for {
  4114  		off := v.AuxInt
  4115  		sym := v.Aux
  4116  		_ = v.Args[2]
  4117  		ptr := v.Args[0]
  4118  		v_1 := v.Args[1]
  4119  		if v_1.Op != OpMIPS64MOVHreg {
  4120  			break
  4121  		}
  4122  		x := v_1.Args[0]
  4123  		mem := v.Args[2]
  4124  		v.reset(OpMIPS64MOVBstore)
  4125  		v.AuxInt = off
  4126  		v.Aux = sym
  4127  		v.AddArg(ptr)
  4128  		v.AddArg(x)
  4129  		v.AddArg(mem)
  4130  		return true
  4131  	}
  4132  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4133  	// cond:
  4134  	// result: (MOVBstore [off] {sym} ptr x mem)
  4135  	for {
  4136  		off := v.AuxInt
  4137  		sym := v.Aux
  4138  		_ = v.Args[2]
  4139  		ptr := v.Args[0]
  4140  		v_1 := v.Args[1]
  4141  		if v_1.Op != OpMIPS64MOVHUreg {
  4142  			break
  4143  		}
  4144  		x := v_1.Args[0]
  4145  		mem := v.Args[2]
  4146  		v.reset(OpMIPS64MOVBstore)
  4147  		v.AuxInt = off
  4148  		v.Aux = sym
  4149  		v.AddArg(ptr)
  4150  		v.AddArg(x)
  4151  		v.AddArg(mem)
  4152  		return true
  4153  	}
  4154  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4155  	// cond:
  4156  	// result: (MOVBstore [off] {sym} ptr x mem)
  4157  	for {
  4158  		off := v.AuxInt
  4159  		sym := v.Aux
  4160  		_ = v.Args[2]
  4161  		ptr := v.Args[0]
  4162  		v_1 := v.Args[1]
  4163  		if v_1.Op != OpMIPS64MOVWreg {
  4164  			break
  4165  		}
  4166  		x := v_1.Args[0]
  4167  		mem := v.Args[2]
  4168  		v.reset(OpMIPS64MOVBstore)
  4169  		v.AuxInt = off
  4170  		v.Aux = sym
  4171  		v.AddArg(ptr)
  4172  		v.AddArg(x)
  4173  		v.AddArg(mem)
  4174  		return true
  4175  	}
  4176  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4177  	// cond:
  4178  	// result: (MOVBstore [off] {sym} ptr x mem)
  4179  	for {
  4180  		off := v.AuxInt
  4181  		sym := v.Aux
  4182  		_ = v.Args[2]
  4183  		ptr := v.Args[0]
  4184  		v_1 := v.Args[1]
  4185  		if v_1.Op != OpMIPS64MOVWUreg {
  4186  			break
  4187  		}
  4188  		x := v_1.Args[0]
  4189  		mem := v.Args[2]
  4190  		v.reset(OpMIPS64MOVBstore)
  4191  		v.AuxInt = off
  4192  		v.Aux = sym
  4193  		v.AddArg(ptr)
  4194  		v.AddArg(x)
  4195  		v.AddArg(mem)
  4196  		return true
  4197  	}
  4198  	return false
  4199  }
  4200  func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool {
  4201  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4202  	// cond: is32Bit(off1+off2)
  4203  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  4204  	for {
  4205  		off1 := v.AuxInt
  4206  		sym := v.Aux
  4207  		_ = v.Args[1]
  4208  		v_0 := v.Args[0]
  4209  		if v_0.Op != OpMIPS64ADDVconst {
  4210  			break
  4211  		}
  4212  		off2 := v_0.AuxInt
  4213  		ptr := v_0.Args[0]
  4214  		mem := v.Args[1]
  4215  		if !(is32Bit(off1 + off2)) {
  4216  			break
  4217  		}
  4218  		v.reset(OpMIPS64MOVBstorezero)
  4219  		v.AuxInt = off1 + off2
  4220  		v.Aux = sym
  4221  		v.AddArg(ptr)
  4222  		v.AddArg(mem)
  4223  		return true
  4224  	}
  4225  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4226  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4227  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4228  	for {
  4229  		off1 := v.AuxInt
  4230  		sym1 := v.Aux
  4231  		_ = v.Args[1]
  4232  		v_0 := v.Args[0]
  4233  		if v_0.Op != OpMIPS64MOVVaddr {
  4234  			break
  4235  		}
  4236  		off2 := v_0.AuxInt
  4237  		sym2 := v_0.Aux
  4238  		ptr := v_0.Args[0]
  4239  		mem := v.Args[1]
  4240  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4241  			break
  4242  		}
  4243  		v.reset(OpMIPS64MOVBstorezero)
  4244  		v.AuxInt = off1 + off2
  4245  		v.Aux = mergeSym(sym1, sym2)
  4246  		v.AddArg(ptr)
  4247  		v.AddArg(mem)
  4248  		return true
  4249  	}
  4250  	return false
  4251  }
  4252  func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool {
  4253  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4254  	// cond: is32Bit(off1+off2)
  4255  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  4256  	for {
  4257  		off1 := v.AuxInt
  4258  		sym := v.Aux
  4259  		_ = v.Args[1]
  4260  		v_0 := v.Args[0]
  4261  		if v_0.Op != OpMIPS64ADDVconst {
  4262  			break
  4263  		}
  4264  		off2 := v_0.AuxInt
  4265  		ptr := v_0.Args[0]
  4266  		mem := v.Args[1]
  4267  		if !(is32Bit(off1 + off2)) {
  4268  			break
  4269  		}
  4270  		v.reset(OpMIPS64MOVDload)
  4271  		v.AuxInt = off1 + off2
  4272  		v.Aux = sym
  4273  		v.AddArg(ptr)
  4274  		v.AddArg(mem)
  4275  		return true
  4276  	}
  4277  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4278  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4279  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4280  	for {
  4281  		off1 := v.AuxInt
  4282  		sym1 := v.Aux
  4283  		_ = v.Args[1]
  4284  		v_0 := v.Args[0]
  4285  		if v_0.Op != OpMIPS64MOVVaddr {
  4286  			break
  4287  		}
  4288  		off2 := v_0.AuxInt
  4289  		sym2 := v_0.Aux
  4290  		ptr := v_0.Args[0]
  4291  		mem := v.Args[1]
  4292  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4293  			break
  4294  		}
  4295  		v.reset(OpMIPS64MOVDload)
  4296  		v.AuxInt = off1 + off2
  4297  		v.Aux = mergeSym(sym1, sym2)
  4298  		v.AddArg(ptr)
  4299  		v.AddArg(mem)
  4300  		return true
  4301  	}
  4302  	return false
  4303  }
  4304  func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool {
  4305  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4306  	// cond: is32Bit(off1+off2)
  4307  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4308  	for {
  4309  		off1 := v.AuxInt
  4310  		sym := v.Aux
  4311  		_ = v.Args[2]
  4312  		v_0 := v.Args[0]
  4313  		if v_0.Op != OpMIPS64ADDVconst {
  4314  			break
  4315  		}
  4316  		off2 := v_0.AuxInt
  4317  		ptr := v_0.Args[0]
  4318  		val := v.Args[1]
  4319  		mem := v.Args[2]
  4320  		if !(is32Bit(off1 + off2)) {
  4321  			break
  4322  		}
  4323  		v.reset(OpMIPS64MOVDstore)
  4324  		v.AuxInt = off1 + off2
  4325  		v.Aux = sym
  4326  		v.AddArg(ptr)
  4327  		v.AddArg(val)
  4328  		v.AddArg(mem)
  4329  		return true
  4330  	}
  4331  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4332  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4333  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4334  	for {
  4335  		off1 := v.AuxInt
  4336  		sym1 := v.Aux
  4337  		_ = v.Args[2]
  4338  		v_0 := v.Args[0]
  4339  		if v_0.Op != OpMIPS64MOVVaddr {
  4340  			break
  4341  		}
  4342  		off2 := v_0.AuxInt
  4343  		sym2 := v_0.Aux
  4344  		ptr := v_0.Args[0]
  4345  		val := v.Args[1]
  4346  		mem := v.Args[2]
  4347  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4348  			break
  4349  		}
  4350  		v.reset(OpMIPS64MOVDstore)
  4351  		v.AuxInt = off1 + off2
  4352  		v.Aux = mergeSym(sym1, sym2)
  4353  		v.AddArg(ptr)
  4354  		v.AddArg(val)
  4355  		v.AddArg(mem)
  4356  		return true
  4357  	}
  4358  	return false
  4359  }
  4360  func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool {
  4361  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4362  	// cond: is32Bit(off1+off2)
  4363  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  4364  	for {
  4365  		off1 := v.AuxInt
  4366  		sym := v.Aux
  4367  		_ = v.Args[1]
  4368  		v_0 := v.Args[0]
  4369  		if v_0.Op != OpMIPS64ADDVconst {
  4370  			break
  4371  		}
  4372  		off2 := v_0.AuxInt
  4373  		ptr := v_0.Args[0]
  4374  		mem := v.Args[1]
  4375  		if !(is32Bit(off1 + off2)) {
  4376  			break
  4377  		}
  4378  		v.reset(OpMIPS64MOVFload)
  4379  		v.AuxInt = off1 + off2
  4380  		v.Aux = sym
  4381  		v.AddArg(ptr)
  4382  		v.AddArg(mem)
  4383  		return true
  4384  	}
  4385  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4386  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4387  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4388  	for {
  4389  		off1 := v.AuxInt
  4390  		sym1 := v.Aux
  4391  		_ = v.Args[1]
  4392  		v_0 := v.Args[0]
  4393  		if v_0.Op != OpMIPS64MOVVaddr {
  4394  			break
  4395  		}
  4396  		off2 := v_0.AuxInt
  4397  		sym2 := v_0.Aux
  4398  		ptr := v_0.Args[0]
  4399  		mem := v.Args[1]
  4400  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4401  			break
  4402  		}
  4403  		v.reset(OpMIPS64MOVFload)
  4404  		v.AuxInt = off1 + off2
  4405  		v.Aux = mergeSym(sym1, sym2)
  4406  		v.AddArg(ptr)
  4407  		v.AddArg(mem)
  4408  		return true
  4409  	}
  4410  	return false
  4411  }
  4412  func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool {
  4413  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4414  	// cond: is32Bit(off1+off2)
  4415  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4416  	for {
  4417  		off1 := v.AuxInt
  4418  		sym := v.Aux
  4419  		_ = v.Args[2]
  4420  		v_0 := v.Args[0]
  4421  		if v_0.Op != OpMIPS64ADDVconst {
  4422  			break
  4423  		}
  4424  		off2 := v_0.AuxInt
  4425  		ptr := v_0.Args[0]
  4426  		val := v.Args[1]
  4427  		mem := v.Args[2]
  4428  		if !(is32Bit(off1 + off2)) {
  4429  			break
  4430  		}
  4431  		v.reset(OpMIPS64MOVFstore)
  4432  		v.AuxInt = off1 + off2
  4433  		v.Aux = sym
  4434  		v.AddArg(ptr)
  4435  		v.AddArg(val)
  4436  		v.AddArg(mem)
  4437  		return true
  4438  	}
  4439  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4440  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4441  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4442  	for {
  4443  		off1 := v.AuxInt
  4444  		sym1 := v.Aux
  4445  		_ = v.Args[2]
  4446  		v_0 := v.Args[0]
  4447  		if v_0.Op != OpMIPS64MOVVaddr {
  4448  			break
  4449  		}
  4450  		off2 := v_0.AuxInt
  4451  		sym2 := v_0.Aux
  4452  		ptr := v_0.Args[0]
  4453  		val := v.Args[1]
  4454  		mem := v.Args[2]
  4455  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4456  			break
  4457  		}
  4458  		v.reset(OpMIPS64MOVFstore)
  4459  		v.AuxInt = off1 + off2
  4460  		v.Aux = mergeSym(sym1, sym2)
  4461  		v.AddArg(ptr)
  4462  		v.AddArg(val)
  4463  		v.AddArg(mem)
  4464  		return true
  4465  	}
  4466  	return false
  4467  }
  4468  func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool {
  4469  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4470  	// cond: is32Bit(off1+off2)
  4471  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4472  	for {
  4473  		off1 := v.AuxInt
  4474  		sym := v.Aux
  4475  		_ = v.Args[1]
  4476  		v_0 := v.Args[0]
  4477  		if v_0.Op != OpMIPS64ADDVconst {
  4478  			break
  4479  		}
  4480  		off2 := v_0.AuxInt
  4481  		ptr := v_0.Args[0]
  4482  		mem := v.Args[1]
  4483  		if !(is32Bit(off1 + off2)) {
  4484  			break
  4485  		}
  4486  		v.reset(OpMIPS64MOVHUload)
  4487  		v.AuxInt = off1 + off2
  4488  		v.Aux = sym
  4489  		v.AddArg(ptr)
  4490  		v.AddArg(mem)
  4491  		return true
  4492  	}
  4493  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4494  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4495  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4496  	for {
  4497  		off1 := v.AuxInt
  4498  		sym1 := v.Aux
  4499  		_ = v.Args[1]
  4500  		v_0 := v.Args[0]
  4501  		if v_0.Op != OpMIPS64MOVVaddr {
  4502  			break
  4503  		}
  4504  		off2 := v_0.AuxInt
  4505  		sym2 := v_0.Aux
  4506  		ptr := v_0.Args[0]
  4507  		mem := v.Args[1]
  4508  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4509  			break
  4510  		}
  4511  		v.reset(OpMIPS64MOVHUload)
  4512  		v.AuxInt = off1 + off2
  4513  		v.Aux = mergeSym(sym1, sym2)
  4514  		v.AddArg(ptr)
  4515  		v.AddArg(mem)
  4516  		return true
  4517  	}
  4518  	return false
  4519  }
  4520  func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool {
  4521  	// match: (MOVHUreg x:(MOVBUload _ _))
  4522  	// cond:
  4523  	// result: (MOVVreg x)
  4524  	for {
  4525  		x := v.Args[0]
  4526  		if x.Op != OpMIPS64MOVBUload {
  4527  			break
  4528  		}
  4529  		_ = x.Args[1]
  4530  		v.reset(OpMIPS64MOVVreg)
  4531  		v.AddArg(x)
  4532  		return true
  4533  	}
  4534  	// match: (MOVHUreg x:(MOVHUload _ _))
  4535  	// cond:
  4536  	// result: (MOVVreg x)
  4537  	for {
  4538  		x := v.Args[0]
  4539  		if x.Op != OpMIPS64MOVHUload {
  4540  			break
  4541  		}
  4542  		_ = x.Args[1]
  4543  		v.reset(OpMIPS64MOVVreg)
  4544  		v.AddArg(x)
  4545  		return true
  4546  	}
  4547  	// match: (MOVHUreg x:(MOVBUreg _))
  4548  	// cond:
  4549  	// result: (MOVVreg x)
  4550  	for {
  4551  		x := v.Args[0]
  4552  		if x.Op != OpMIPS64MOVBUreg {
  4553  			break
  4554  		}
  4555  		v.reset(OpMIPS64MOVVreg)
  4556  		v.AddArg(x)
  4557  		return true
  4558  	}
  4559  	// match: (MOVHUreg x:(MOVHUreg _))
  4560  	// cond:
  4561  	// result: (MOVVreg x)
  4562  	for {
  4563  		x := v.Args[0]
  4564  		if x.Op != OpMIPS64MOVHUreg {
  4565  			break
  4566  		}
  4567  		v.reset(OpMIPS64MOVVreg)
  4568  		v.AddArg(x)
  4569  		return true
  4570  	}
  4571  	// match: (MOVHUreg (MOVVconst [c]))
  4572  	// cond:
  4573  	// result: (MOVVconst [int64(uint16(c))])
  4574  	for {
  4575  		v_0 := v.Args[0]
  4576  		if v_0.Op != OpMIPS64MOVVconst {
  4577  			break
  4578  		}
  4579  		c := v_0.AuxInt
  4580  		v.reset(OpMIPS64MOVVconst)
  4581  		v.AuxInt = int64(uint16(c))
  4582  		return true
  4583  	}
  4584  	return false
  4585  }
  4586  func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool {
  4587  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4588  	// cond: is32Bit(off1+off2)
  4589  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4590  	for {
  4591  		off1 := v.AuxInt
  4592  		sym := v.Aux
  4593  		_ = v.Args[1]
  4594  		v_0 := v.Args[0]
  4595  		if v_0.Op != OpMIPS64ADDVconst {
  4596  			break
  4597  		}
  4598  		off2 := v_0.AuxInt
  4599  		ptr := v_0.Args[0]
  4600  		mem := v.Args[1]
  4601  		if !(is32Bit(off1 + off2)) {
  4602  			break
  4603  		}
  4604  		v.reset(OpMIPS64MOVHload)
  4605  		v.AuxInt = off1 + off2
  4606  		v.Aux = sym
  4607  		v.AddArg(ptr)
  4608  		v.AddArg(mem)
  4609  		return true
  4610  	}
  4611  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4612  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4613  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4614  	for {
  4615  		off1 := v.AuxInt
  4616  		sym1 := v.Aux
  4617  		_ = v.Args[1]
  4618  		v_0 := v.Args[0]
  4619  		if v_0.Op != OpMIPS64MOVVaddr {
  4620  			break
  4621  		}
  4622  		off2 := v_0.AuxInt
  4623  		sym2 := v_0.Aux
  4624  		ptr := v_0.Args[0]
  4625  		mem := v.Args[1]
  4626  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4627  			break
  4628  		}
  4629  		v.reset(OpMIPS64MOVHload)
  4630  		v.AuxInt = off1 + off2
  4631  		v.Aux = mergeSym(sym1, sym2)
  4632  		v.AddArg(ptr)
  4633  		v.AddArg(mem)
  4634  		return true
  4635  	}
  4636  	return false
  4637  }
  4638  func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool {
  4639  	// match: (MOVHreg x:(MOVBload _ _))
  4640  	// cond:
  4641  	// result: (MOVVreg x)
  4642  	for {
  4643  		x := v.Args[0]
  4644  		if x.Op != OpMIPS64MOVBload {
  4645  			break
  4646  		}
  4647  		_ = x.Args[1]
  4648  		v.reset(OpMIPS64MOVVreg)
  4649  		v.AddArg(x)
  4650  		return true
  4651  	}
  4652  	// match: (MOVHreg x:(MOVBUload _ _))
  4653  	// cond:
  4654  	// result: (MOVVreg x)
  4655  	for {
  4656  		x := v.Args[0]
  4657  		if x.Op != OpMIPS64MOVBUload {
  4658  			break
  4659  		}
  4660  		_ = x.Args[1]
  4661  		v.reset(OpMIPS64MOVVreg)
  4662  		v.AddArg(x)
  4663  		return true
  4664  	}
  4665  	// match: (MOVHreg x:(MOVHload _ _))
  4666  	// cond:
  4667  	// result: (MOVVreg x)
  4668  	for {
  4669  		x := v.Args[0]
  4670  		if x.Op != OpMIPS64MOVHload {
  4671  			break
  4672  		}
  4673  		_ = x.Args[1]
  4674  		v.reset(OpMIPS64MOVVreg)
  4675  		v.AddArg(x)
  4676  		return true
  4677  	}
  4678  	// match: (MOVHreg x:(MOVBreg _))
  4679  	// cond:
  4680  	// result: (MOVVreg x)
  4681  	for {
  4682  		x := v.Args[0]
  4683  		if x.Op != OpMIPS64MOVBreg {
  4684  			break
  4685  		}
  4686  		v.reset(OpMIPS64MOVVreg)
  4687  		v.AddArg(x)
  4688  		return true
  4689  	}
  4690  	// match: (MOVHreg x:(MOVBUreg _))
  4691  	// cond:
  4692  	// result: (MOVVreg x)
  4693  	for {
  4694  		x := v.Args[0]
  4695  		if x.Op != OpMIPS64MOVBUreg {
  4696  			break
  4697  		}
  4698  		v.reset(OpMIPS64MOVVreg)
  4699  		v.AddArg(x)
  4700  		return true
  4701  	}
  4702  	// match: (MOVHreg x:(MOVHreg _))
  4703  	// cond:
  4704  	// result: (MOVVreg x)
  4705  	for {
  4706  		x := v.Args[0]
  4707  		if x.Op != OpMIPS64MOVHreg {
  4708  			break
  4709  		}
  4710  		v.reset(OpMIPS64MOVVreg)
  4711  		v.AddArg(x)
  4712  		return true
  4713  	}
  4714  	// match: (MOVHreg (MOVVconst [c]))
  4715  	// cond:
  4716  	// result: (MOVVconst [int64(int16(c))])
  4717  	for {
  4718  		v_0 := v.Args[0]
  4719  		if v_0.Op != OpMIPS64MOVVconst {
  4720  			break
  4721  		}
  4722  		c := v_0.AuxInt
  4723  		v.reset(OpMIPS64MOVVconst)
  4724  		v.AuxInt = int64(int16(c))
  4725  		return true
  4726  	}
  4727  	return false
  4728  }
  4729  func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool {
  4730  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4731  	// cond: is32Bit(off1+off2)
  4732  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4733  	for {
  4734  		off1 := v.AuxInt
  4735  		sym := v.Aux
  4736  		_ = v.Args[2]
  4737  		v_0 := v.Args[0]
  4738  		if v_0.Op != OpMIPS64ADDVconst {
  4739  			break
  4740  		}
  4741  		off2 := v_0.AuxInt
  4742  		ptr := v_0.Args[0]
  4743  		val := v.Args[1]
  4744  		mem := v.Args[2]
  4745  		if !(is32Bit(off1 + off2)) {
  4746  			break
  4747  		}
  4748  		v.reset(OpMIPS64MOVHstore)
  4749  		v.AuxInt = off1 + off2
  4750  		v.Aux = sym
  4751  		v.AddArg(ptr)
  4752  		v.AddArg(val)
  4753  		v.AddArg(mem)
  4754  		return true
  4755  	}
  4756  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4757  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4758  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4759  	for {
  4760  		off1 := v.AuxInt
  4761  		sym1 := v.Aux
  4762  		_ = v.Args[2]
  4763  		v_0 := v.Args[0]
  4764  		if v_0.Op != OpMIPS64MOVVaddr {
  4765  			break
  4766  		}
  4767  		off2 := v_0.AuxInt
  4768  		sym2 := v_0.Aux
  4769  		ptr := v_0.Args[0]
  4770  		val := v.Args[1]
  4771  		mem := v.Args[2]
  4772  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4773  			break
  4774  		}
  4775  		v.reset(OpMIPS64MOVHstore)
  4776  		v.AuxInt = off1 + off2
  4777  		v.Aux = mergeSym(sym1, sym2)
  4778  		v.AddArg(ptr)
  4779  		v.AddArg(val)
  4780  		v.AddArg(mem)
  4781  		return true
  4782  	}
  4783  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4784  	// cond:
  4785  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4786  	for {
  4787  		off := v.AuxInt
  4788  		sym := v.Aux
  4789  		_ = v.Args[2]
  4790  		ptr := v.Args[0]
  4791  		v_1 := v.Args[1]
  4792  		if v_1.Op != OpMIPS64MOVVconst {
  4793  			break
  4794  		}
  4795  		if v_1.AuxInt != 0 {
  4796  			break
  4797  		}
  4798  		mem := v.Args[2]
  4799  		v.reset(OpMIPS64MOVHstorezero)
  4800  		v.AuxInt = off
  4801  		v.Aux = sym
  4802  		v.AddArg(ptr)
  4803  		v.AddArg(mem)
  4804  		return true
  4805  	}
  4806  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4807  	// cond:
  4808  	// result: (MOVHstore [off] {sym} ptr x mem)
  4809  	for {
  4810  		off := v.AuxInt
  4811  		sym := v.Aux
  4812  		_ = v.Args[2]
  4813  		ptr := v.Args[0]
  4814  		v_1 := v.Args[1]
  4815  		if v_1.Op != OpMIPS64MOVHreg {
  4816  			break
  4817  		}
  4818  		x := v_1.Args[0]
  4819  		mem := v.Args[2]
  4820  		v.reset(OpMIPS64MOVHstore)
  4821  		v.AuxInt = off
  4822  		v.Aux = sym
  4823  		v.AddArg(ptr)
  4824  		v.AddArg(x)
  4825  		v.AddArg(mem)
  4826  		return true
  4827  	}
  4828  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4829  	// cond:
  4830  	// result: (MOVHstore [off] {sym} ptr x mem)
  4831  	for {
  4832  		off := v.AuxInt
  4833  		sym := v.Aux
  4834  		_ = v.Args[2]
  4835  		ptr := v.Args[0]
  4836  		v_1 := v.Args[1]
  4837  		if v_1.Op != OpMIPS64MOVHUreg {
  4838  			break
  4839  		}
  4840  		x := v_1.Args[0]
  4841  		mem := v.Args[2]
  4842  		v.reset(OpMIPS64MOVHstore)
  4843  		v.AuxInt = off
  4844  		v.Aux = sym
  4845  		v.AddArg(ptr)
  4846  		v.AddArg(x)
  4847  		v.AddArg(mem)
  4848  		return true
  4849  	}
  4850  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4851  	// cond:
  4852  	// result: (MOVHstore [off] {sym} ptr x mem)
  4853  	for {
  4854  		off := v.AuxInt
  4855  		sym := v.Aux
  4856  		_ = v.Args[2]
  4857  		ptr := v.Args[0]
  4858  		v_1 := v.Args[1]
  4859  		if v_1.Op != OpMIPS64MOVWreg {
  4860  			break
  4861  		}
  4862  		x := v_1.Args[0]
  4863  		mem := v.Args[2]
  4864  		v.reset(OpMIPS64MOVHstore)
  4865  		v.AuxInt = off
  4866  		v.Aux = sym
  4867  		v.AddArg(ptr)
  4868  		v.AddArg(x)
  4869  		v.AddArg(mem)
  4870  		return true
  4871  	}
  4872  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4873  	// cond:
  4874  	// result: (MOVHstore [off] {sym} ptr x mem)
  4875  	for {
  4876  		off := v.AuxInt
  4877  		sym := v.Aux
  4878  		_ = v.Args[2]
  4879  		ptr := v.Args[0]
  4880  		v_1 := v.Args[1]
  4881  		if v_1.Op != OpMIPS64MOVWUreg {
  4882  			break
  4883  		}
  4884  		x := v_1.Args[0]
  4885  		mem := v.Args[2]
  4886  		v.reset(OpMIPS64MOVHstore)
  4887  		v.AuxInt = off
  4888  		v.Aux = sym
  4889  		v.AddArg(ptr)
  4890  		v.AddArg(x)
  4891  		v.AddArg(mem)
  4892  		return true
  4893  	}
  4894  	return false
  4895  }
  4896  func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool {
  4897  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4898  	// cond: is32Bit(off1+off2)
  4899  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4900  	for {
  4901  		off1 := v.AuxInt
  4902  		sym := v.Aux
  4903  		_ = v.Args[1]
  4904  		v_0 := v.Args[0]
  4905  		if v_0.Op != OpMIPS64ADDVconst {
  4906  			break
  4907  		}
  4908  		off2 := v_0.AuxInt
  4909  		ptr := v_0.Args[0]
  4910  		mem := v.Args[1]
  4911  		if !(is32Bit(off1 + off2)) {
  4912  			break
  4913  		}
  4914  		v.reset(OpMIPS64MOVHstorezero)
  4915  		v.AuxInt = off1 + off2
  4916  		v.Aux = sym
  4917  		v.AddArg(ptr)
  4918  		v.AddArg(mem)
  4919  		return true
  4920  	}
  4921  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4922  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4923  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4924  	for {
  4925  		off1 := v.AuxInt
  4926  		sym1 := v.Aux
  4927  		_ = v.Args[1]
  4928  		v_0 := v.Args[0]
  4929  		if v_0.Op != OpMIPS64MOVVaddr {
  4930  			break
  4931  		}
  4932  		off2 := v_0.AuxInt
  4933  		sym2 := v_0.Aux
  4934  		ptr := v_0.Args[0]
  4935  		mem := v.Args[1]
  4936  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4937  			break
  4938  		}
  4939  		v.reset(OpMIPS64MOVHstorezero)
  4940  		v.AuxInt = off1 + off2
  4941  		v.Aux = mergeSym(sym1, sym2)
  4942  		v.AddArg(ptr)
  4943  		v.AddArg(mem)
  4944  		return true
  4945  	}
  4946  	return false
  4947  }
  4948  func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool {
  4949  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4950  	// cond: is32Bit(off1+off2)
  4951  	// result: (MOVVload  [off1+off2] {sym} ptr mem)
  4952  	for {
  4953  		off1 := v.AuxInt
  4954  		sym := v.Aux
  4955  		_ = v.Args[1]
  4956  		v_0 := v.Args[0]
  4957  		if v_0.Op != OpMIPS64ADDVconst {
  4958  			break
  4959  		}
  4960  		off2 := v_0.AuxInt
  4961  		ptr := v_0.Args[0]
  4962  		mem := v.Args[1]
  4963  		if !(is32Bit(off1 + off2)) {
  4964  			break
  4965  		}
  4966  		v.reset(OpMIPS64MOVVload)
  4967  		v.AuxInt = off1 + off2
  4968  		v.Aux = sym
  4969  		v.AddArg(ptr)
  4970  		v.AddArg(mem)
  4971  		return true
  4972  	}
  4973  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4974  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4975  	// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4976  	for {
  4977  		off1 := v.AuxInt
  4978  		sym1 := v.Aux
  4979  		_ = v.Args[1]
  4980  		v_0 := v.Args[0]
  4981  		if v_0.Op != OpMIPS64MOVVaddr {
  4982  			break
  4983  		}
  4984  		off2 := v_0.AuxInt
  4985  		sym2 := v_0.Aux
  4986  		ptr := v_0.Args[0]
  4987  		mem := v.Args[1]
  4988  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4989  			break
  4990  		}
  4991  		v.reset(OpMIPS64MOVVload)
  4992  		v.AuxInt = off1 + off2
  4993  		v.Aux = mergeSym(sym1, sym2)
  4994  		v.AddArg(ptr)
  4995  		v.AddArg(mem)
  4996  		return true
  4997  	}
  4998  	return false
  4999  }
  5000  func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool {
  5001  	// match: (MOVVreg x)
  5002  	// cond: x.Uses == 1
  5003  	// result: (MOVVnop x)
  5004  	for {
  5005  		x := v.Args[0]
  5006  		if !(x.Uses == 1) {
  5007  			break
  5008  		}
  5009  		v.reset(OpMIPS64MOVVnop)
  5010  		v.AddArg(x)
  5011  		return true
  5012  	}
  5013  	// match: (MOVVreg (MOVVconst [c]))
  5014  	// cond:
  5015  	// result: (MOVVconst [c])
  5016  	for {
  5017  		v_0 := v.Args[0]
  5018  		if v_0.Op != OpMIPS64MOVVconst {
  5019  			break
  5020  		}
  5021  		c := v_0.AuxInt
  5022  		v.reset(OpMIPS64MOVVconst)
  5023  		v.AuxInt = c
  5024  		return true
  5025  	}
  5026  	return false
  5027  }
  5028  func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool {
  5029  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5030  	// cond: is32Bit(off1+off2)
  5031  	// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  5032  	for {
  5033  		off1 := v.AuxInt
  5034  		sym := v.Aux
  5035  		_ = v.Args[2]
  5036  		v_0 := v.Args[0]
  5037  		if v_0.Op != OpMIPS64ADDVconst {
  5038  			break
  5039  		}
  5040  		off2 := v_0.AuxInt
  5041  		ptr := v_0.Args[0]
  5042  		val := v.Args[1]
  5043  		mem := v.Args[2]
  5044  		if !(is32Bit(off1 + off2)) {
  5045  			break
  5046  		}
  5047  		v.reset(OpMIPS64MOVVstore)
  5048  		v.AuxInt = off1 + off2
  5049  		v.Aux = sym
  5050  		v.AddArg(ptr)
  5051  		v.AddArg(val)
  5052  		v.AddArg(mem)
  5053  		return true
  5054  	}
  5055  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5056  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5057  	// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5058  	for {
  5059  		off1 := v.AuxInt
  5060  		sym1 := v.Aux
  5061  		_ = v.Args[2]
  5062  		v_0 := v.Args[0]
  5063  		if v_0.Op != OpMIPS64MOVVaddr {
  5064  			break
  5065  		}
  5066  		off2 := v_0.AuxInt
  5067  		sym2 := v_0.Aux
  5068  		ptr := v_0.Args[0]
  5069  		val := v.Args[1]
  5070  		mem := v.Args[2]
  5071  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5072  			break
  5073  		}
  5074  		v.reset(OpMIPS64MOVVstore)
  5075  		v.AuxInt = off1 + off2
  5076  		v.Aux = mergeSym(sym1, sym2)
  5077  		v.AddArg(ptr)
  5078  		v.AddArg(val)
  5079  		v.AddArg(mem)
  5080  		return true
  5081  	}
  5082  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  5083  	// cond:
  5084  	// result: (MOVVstorezero [off] {sym} ptr mem)
  5085  	for {
  5086  		off := v.AuxInt
  5087  		sym := v.Aux
  5088  		_ = v.Args[2]
  5089  		ptr := v.Args[0]
  5090  		v_1 := v.Args[1]
  5091  		if v_1.Op != OpMIPS64MOVVconst {
  5092  			break
  5093  		}
  5094  		if v_1.AuxInt != 0 {
  5095  			break
  5096  		}
  5097  		mem := v.Args[2]
  5098  		v.reset(OpMIPS64MOVVstorezero)
  5099  		v.AuxInt = off
  5100  		v.Aux = sym
  5101  		v.AddArg(ptr)
  5102  		v.AddArg(mem)
  5103  		return true
  5104  	}
  5105  	return false
  5106  }
  5107  func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool {
  5108  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5109  	// cond: is32Bit(off1+off2)
  5110  	// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  5111  	for {
  5112  		off1 := v.AuxInt
  5113  		sym := v.Aux
  5114  		_ = v.Args[1]
  5115  		v_0 := v.Args[0]
  5116  		if v_0.Op != OpMIPS64ADDVconst {
  5117  			break
  5118  		}
  5119  		off2 := v_0.AuxInt
  5120  		ptr := v_0.Args[0]
  5121  		mem := v.Args[1]
  5122  		if !(is32Bit(off1 + off2)) {
  5123  			break
  5124  		}
  5125  		v.reset(OpMIPS64MOVVstorezero)
  5126  		v.AuxInt = off1 + off2
  5127  		v.Aux = sym
  5128  		v.AddArg(ptr)
  5129  		v.AddArg(mem)
  5130  		return true
  5131  	}
  5132  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5133  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5134  	// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5135  	for {
  5136  		off1 := v.AuxInt
  5137  		sym1 := v.Aux
  5138  		_ = v.Args[1]
  5139  		v_0 := v.Args[0]
  5140  		if v_0.Op != OpMIPS64MOVVaddr {
  5141  			break
  5142  		}
  5143  		off2 := v_0.AuxInt
  5144  		sym2 := v_0.Aux
  5145  		ptr := v_0.Args[0]
  5146  		mem := v.Args[1]
  5147  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5148  			break
  5149  		}
  5150  		v.reset(OpMIPS64MOVVstorezero)
  5151  		v.AuxInt = off1 + off2
  5152  		v.Aux = mergeSym(sym1, sym2)
  5153  		v.AddArg(ptr)
  5154  		v.AddArg(mem)
  5155  		return true
  5156  	}
  5157  	return false
  5158  }
  5159  func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool {
  5160  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5161  	// cond: is32Bit(off1+off2)
  5162  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  5163  	for {
  5164  		off1 := v.AuxInt
  5165  		sym := v.Aux
  5166  		_ = v.Args[1]
  5167  		v_0 := v.Args[0]
  5168  		if v_0.Op != OpMIPS64ADDVconst {
  5169  			break
  5170  		}
  5171  		off2 := v_0.AuxInt
  5172  		ptr := v_0.Args[0]
  5173  		mem := v.Args[1]
  5174  		if !(is32Bit(off1 + off2)) {
  5175  			break
  5176  		}
  5177  		v.reset(OpMIPS64MOVWUload)
  5178  		v.AuxInt = off1 + off2
  5179  		v.Aux = sym
  5180  		v.AddArg(ptr)
  5181  		v.AddArg(mem)
  5182  		return true
  5183  	}
  5184  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5185  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5186  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5187  	for {
  5188  		off1 := v.AuxInt
  5189  		sym1 := v.Aux
  5190  		_ = v.Args[1]
  5191  		v_0 := v.Args[0]
  5192  		if v_0.Op != OpMIPS64MOVVaddr {
  5193  			break
  5194  		}
  5195  		off2 := v_0.AuxInt
  5196  		sym2 := v_0.Aux
  5197  		ptr := v_0.Args[0]
  5198  		mem := v.Args[1]
  5199  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5200  			break
  5201  		}
  5202  		v.reset(OpMIPS64MOVWUload)
  5203  		v.AuxInt = off1 + off2
  5204  		v.Aux = mergeSym(sym1, sym2)
  5205  		v.AddArg(ptr)
  5206  		v.AddArg(mem)
  5207  		return true
  5208  	}
  5209  	return false
  5210  }
  5211  func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool {
  5212  	// match: (MOVWUreg x:(MOVBUload _ _))
  5213  	// cond:
  5214  	// result: (MOVVreg x)
  5215  	for {
  5216  		x := v.Args[0]
  5217  		if x.Op != OpMIPS64MOVBUload {
  5218  			break
  5219  		}
  5220  		_ = x.Args[1]
  5221  		v.reset(OpMIPS64MOVVreg)
  5222  		v.AddArg(x)
  5223  		return true
  5224  	}
  5225  	// match: (MOVWUreg x:(MOVHUload _ _))
  5226  	// cond:
  5227  	// result: (MOVVreg x)
  5228  	for {
  5229  		x := v.Args[0]
  5230  		if x.Op != OpMIPS64MOVHUload {
  5231  			break
  5232  		}
  5233  		_ = x.Args[1]
  5234  		v.reset(OpMIPS64MOVVreg)
  5235  		v.AddArg(x)
  5236  		return true
  5237  	}
  5238  	// match: (MOVWUreg x:(MOVWUload _ _))
  5239  	// cond:
  5240  	// result: (MOVVreg x)
  5241  	for {
  5242  		x := v.Args[0]
  5243  		if x.Op != OpMIPS64MOVWUload {
  5244  			break
  5245  		}
  5246  		_ = x.Args[1]
  5247  		v.reset(OpMIPS64MOVVreg)
  5248  		v.AddArg(x)
  5249  		return true
  5250  	}
  5251  	// match: (MOVWUreg x:(MOVBUreg _))
  5252  	// cond:
  5253  	// result: (MOVVreg x)
  5254  	for {
  5255  		x := v.Args[0]
  5256  		if x.Op != OpMIPS64MOVBUreg {
  5257  			break
  5258  		}
  5259  		v.reset(OpMIPS64MOVVreg)
  5260  		v.AddArg(x)
  5261  		return true
  5262  	}
  5263  	// match: (MOVWUreg x:(MOVHUreg _))
  5264  	// cond:
  5265  	// result: (MOVVreg x)
  5266  	for {
  5267  		x := v.Args[0]
  5268  		if x.Op != OpMIPS64MOVHUreg {
  5269  			break
  5270  		}
  5271  		v.reset(OpMIPS64MOVVreg)
  5272  		v.AddArg(x)
  5273  		return true
  5274  	}
  5275  	// match: (MOVWUreg x:(MOVWUreg _))
  5276  	// cond:
  5277  	// result: (MOVVreg x)
  5278  	for {
  5279  		x := v.Args[0]
  5280  		if x.Op != OpMIPS64MOVWUreg {
  5281  			break
  5282  		}
  5283  		v.reset(OpMIPS64MOVVreg)
  5284  		v.AddArg(x)
  5285  		return true
  5286  	}
  5287  	// match: (MOVWUreg (MOVVconst [c]))
  5288  	// cond:
  5289  	// result: (MOVVconst [int64(uint32(c))])
  5290  	for {
  5291  		v_0 := v.Args[0]
  5292  		if v_0.Op != OpMIPS64MOVVconst {
  5293  			break
  5294  		}
  5295  		c := v_0.AuxInt
  5296  		v.reset(OpMIPS64MOVVconst)
  5297  		v.AuxInt = int64(uint32(c))
  5298  		return true
  5299  	}
  5300  	return false
  5301  }
  5302  func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool {
  5303  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5304  	// cond: is32Bit(off1+off2)
  5305  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  5306  	for {
  5307  		off1 := v.AuxInt
  5308  		sym := v.Aux
  5309  		_ = v.Args[1]
  5310  		v_0 := v.Args[0]
  5311  		if v_0.Op != OpMIPS64ADDVconst {
  5312  			break
  5313  		}
  5314  		off2 := v_0.AuxInt
  5315  		ptr := v_0.Args[0]
  5316  		mem := v.Args[1]
  5317  		if !(is32Bit(off1 + off2)) {
  5318  			break
  5319  		}
  5320  		v.reset(OpMIPS64MOVWload)
  5321  		v.AuxInt = off1 + off2
  5322  		v.Aux = sym
  5323  		v.AddArg(ptr)
  5324  		v.AddArg(mem)
  5325  		return true
  5326  	}
  5327  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5328  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5329  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5330  	for {
  5331  		off1 := v.AuxInt
  5332  		sym1 := v.Aux
  5333  		_ = v.Args[1]
  5334  		v_0 := v.Args[0]
  5335  		if v_0.Op != OpMIPS64MOVVaddr {
  5336  			break
  5337  		}
  5338  		off2 := v_0.AuxInt
  5339  		sym2 := v_0.Aux
  5340  		ptr := v_0.Args[0]
  5341  		mem := v.Args[1]
  5342  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5343  			break
  5344  		}
  5345  		v.reset(OpMIPS64MOVWload)
  5346  		v.AuxInt = off1 + off2
  5347  		v.Aux = mergeSym(sym1, sym2)
  5348  		v.AddArg(ptr)
  5349  		v.AddArg(mem)
  5350  		return true
  5351  	}
  5352  	return false
  5353  }
  5354  func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool {
  5355  	// match: (MOVWreg x:(MOVBload _ _))
  5356  	// cond:
  5357  	// result: (MOVVreg x)
  5358  	for {
  5359  		x := v.Args[0]
  5360  		if x.Op != OpMIPS64MOVBload {
  5361  			break
  5362  		}
  5363  		_ = x.Args[1]
  5364  		v.reset(OpMIPS64MOVVreg)
  5365  		v.AddArg(x)
  5366  		return true
  5367  	}
  5368  	// match: (MOVWreg x:(MOVBUload _ _))
  5369  	// cond:
  5370  	// result: (MOVVreg x)
  5371  	for {
  5372  		x := v.Args[0]
  5373  		if x.Op != OpMIPS64MOVBUload {
  5374  			break
  5375  		}
  5376  		_ = x.Args[1]
  5377  		v.reset(OpMIPS64MOVVreg)
  5378  		v.AddArg(x)
  5379  		return true
  5380  	}
  5381  	// match: (MOVWreg x:(MOVHload _ _))
  5382  	// cond:
  5383  	// result: (MOVVreg x)
  5384  	for {
  5385  		x := v.Args[0]
  5386  		if x.Op != OpMIPS64MOVHload {
  5387  			break
  5388  		}
  5389  		_ = x.Args[1]
  5390  		v.reset(OpMIPS64MOVVreg)
  5391  		v.AddArg(x)
  5392  		return true
  5393  	}
  5394  	// match: (MOVWreg x:(MOVHUload _ _))
  5395  	// cond:
  5396  	// result: (MOVVreg x)
  5397  	for {
  5398  		x := v.Args[0]
  5399  		if x.Op != OpMIPS64MOVHUload {
  5400  			break
  5401  		}
  5402  		_ = x.Args[1]
  5403  		v.reset(OpMIPS64MOVVreg)
  5404  		v.AddArg(x)
  5405  		return true
  5406  	}
  5407  	// match: (MOVWreg x:(MOVWload _ _))
  5408  	// cond:
  5409  	// result: (MOVVreg x)
  5410  	for {
  5411  		x := v.Args[0]
  5412  		if x.Op != OpMIPS64MOVWload {
  5413  			break
  5414  		}
  5415  		_ = x.Args[1]
  5416  		v.reset(OpMIPS64MOVVreg)
  5417  		v.AddArg(x)
  5418  		return true
  5419  	}
  5420  	// match: (MOVWreg x:(MOVBreg _))
  5421  	// cond:
  5422  	// result: (MOVVreg x)
  5423  	for {
  5424  		x := v.Args[0]
  5425  		if x.Op != OpMIPS64MOVBreg {
  5426  			break
  5427  		}
  5428  		v.reset(OpMIPS64MOVVreg)
  5429  		v.AddArg(x)
  5430  		return true
  5431  	}
  5432  	// match: (MOVWreg x:(MOVBUreg _))
  5433  	// cond:
  5434  	// result: (MOVVreg x)
  5435  	for {
  5436  		x := v.Args[0]
  5437  		if x.Op != OpMIPS64MOVBUreg {
  5438  			break
  5439  		}
  5440  		v.reset(OpMIPS64MOVVreg)
  5441  		v.AddArg(x)
  5442  		return true
  5443  	}
  5444  	// match: (MOVWreg x:(MOVHreg _))
  5445  	// cond:
  5446  	// result: (MOVVreg x)
  5447  	for {
  5448  		x := v.Args[0]
  5449  		if x.Op != OpMIPS64MOVHreg {
  5450  			break
  5451  		}
  5452  		v.reset(OpMIPS64MOVVreg)
  5453  		v.AddArg(x)
  5454  		return true
  5455  	}
  5456  	// match: (MOVWreg x:(MOVHreg _))
  5457  	// cond:
  5458  	// result: (MOVVreg x)
  5459  	for {
  5460  		x := v.Args[0]
  5461  		if x.Op != OpMIPS64MOVHreg {
  5462  			break
  5463  		}
  5464  		v.reset(OpMIPS64MOVVreg)
  5465  		v.AddArg(x)
  5466  		return true
  5467  	}
  5468  	// match: (MOVWreg x:(MOVWreg _))
  5469  	// cond:
  5470  	// result: (MOVVreg x)
  5471  	for {
  5472  		x := v.Args[0]
  5473  		if x.Op != OpMIPS64MOVWreg {
  5474  			break
  5475  		}
  5476  		v.reset(OpMIPS64MOVVreg)
  5477  		v.AddArg(x)
  5478  		return true
  5479  	}
  5480  	return false
  5481  }
  5482  func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool {
  5483  	// match: (MOVWreg (MOVVconst [c]))
  5484  	// cond:
  5485  	// result: (MOVVconst [int64(int32(c))])
  5486  	for {
  5487  		v_0 := v.Args[0]
  5488  		if v_0.Op != OpMIPS64MOVVconst {
  5489  			break
  5490  		}
  5491  		c := v_0.AuxInt
  5492  		v.reset(OpMIPS64MOVVconst)
  5493  		v.AuxInt = int64(int32(c))
  5494  		return true
  5495  	}
  5496  	return false
  5497  }
  5498  func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool {
  5499  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5500  	// cond: is32Bit(off1+off2)
  5501  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5502  	for {
  5503  		off1 := v.AuxInt
  5504  		sym := v.Aux
  5505  		_ = v.Args[2]
  5506  		v_0 := v.Args[0]
  5507  		if v_0.Op != OpMIPS64ADDVconst {
  5508  			break
  5509  		}
  5510  		off2 := v_0.AuxInt
  5511  		ptr := v_0.Args[0]
  5512  		val := v.Args[1]
  5513  		mem := v.Args[2]
  5514  		if !(is32Bit(off1 + off2)) {
  5515  			break
  5516  		}
  5517  		v.reset(OpMIPS64MOVWstore)
  5518  		v.AuxInt = off1 + off2
  5519  		v.Aux = sym
  5520  		v.AddArg(ptr)
  5521  		v.AddArg(val)
  5522  		v.AddArg(mem)
  5523  		return true
  5524  	}
  5525  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5526  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5527  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5528  	for {
  5529  		off1 := v.AuxInt
  5530  		sym1 := v.Aux
  5531  		_ = v.Args[2]
  5532  		v_0 := v.Args[0]
  5533  		if v_0.Op != OpMIPS64MOVVaddr {
  5534  			break
  5535  		}
  5536  		off2 := v_0.AuxInt
  5537  		sym2 := v_0.Aux
  5538  		ptr := v_0.Args[0]
  5539  		val := v.Args[1]
  5540  		mem := v.Args[2]
  5541  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5542  			break
  5543  		}
  5544  		v.reset(OpMIPS64MOVWstore)
  5545  		v.AuxInt = off1 + off2
  5546  		v.Aux = mergeSym(sym1, sym2)
  5547  		v.AddArg(ptr)
  5548  		v.AddArg(val)
  5549  		v.AddArg(mem)
  5550  		return true
  5551  	}
  5552  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5553  	// cond:
  5554  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5555  	for {
  5556  		off := v.AuxInt
  5557  		sym := v.Aux
  5558  		_ = v.Args[2]
  5559  		ptr := v.Args[0]
  5560  		v_1 := v.Args[1]
  5561  		if v_1.Op != OpMIPS64MOVVconst {
  5562  			break
  5563  		}
  5564  		if v_1.AuxInt != 0 {
  5565  			break
  5566  		}
  5567  		mem := v.Args[2]
  5568  		v.reset(OpMIPS64MOVWstorezero)
  5569  		v.AuxInt = off
  5570  		v.Aux = sym
  5571  		v.AddArg(ptr)
  5572  		v.AddArg(mem)
  5573  		return true
  5574  	}
  5575  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5576  	// cond:
  5577  	// result: (MOVWstore [off] {sym} ptr x mem)
  5578  	for {
  5579  		off := v.AuxInt
  5580  		sym := v.Aux
  5581  		_ = v.Args[2]
  5582  		ptr := v.Args[0]
  5583  		v_1 := v.Args[1]
  5584  		if v_1.Op != OpMIPS64MOVWreg {
  5585  			break
  5586  		}
  5587  		x := v_1.Args[0]
  5588  		mem := v.Args[2]
  5589  		v.reset(OpMIPS64MOVWstore)
  5590  		v.AuxInt = off
  5591  		v.Aux = sym
  5592  		v.AddArg(ptr)
  5593  		v.AddArg(x)
  5594  		v.AddArg(mem)
  5595  		return true
  5596  	}
  5597  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5598  	// cond:
  5599  	// result: (MOVWstore [off] {sym} ptr x mem)
  5600  	for {
  5601  		off := v.AuxInt
  5602  		sym := v.Aux
  5603  		_ = v.Args[2]
  5604  		ptr := v.Args[0]
  5605  		v_1 := v.Args[1]
  5606  		if v_1.Op != OpMIPS64MOVWUreg {
  5607  			break
  5608  		}
  5609  		x := v_1.Args[0]
  5610  		mem := v.Args[2]
  5611  		v.reset(OpMIPS64MOVWstore)
  5612  		v.AuxInt = off
  5613  		v.Aux = sym
  5614  		v.AddArg(ptr)
  5615  		v.AddArg(x)
  5616  		v.AddArg(mem)
  5617  		return true
  5618  	}
  5619  	return false
  5620  }
  5621  func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool {
  5622  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5623  	// cond: is32Bit(off1+off2)
  5624  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5625  	for {
  5626  		off1 := v.AuxInt
  5627  		sym := v.Aux
  5628  		_ = v.Args[1]
  5629  		v_0 := v.Args[0]
  5630  		if v_0.Op != OpMIPS64ADDVconst {
  5631  			break
  5632  		}
  5633  		off2 := v_0.AuxInt
  5634  		ptr := v_0.Args[0]
  5635  		mem := v.Args[1]
  5636  		if !(is32Bit(off1 + off2)) {
  5637  			break
  5638  		}
  5639  		v.reset(OpMIPS64MOVWstorezero)
  5640  		v.AuxInt = off1 + off2
  5641  		v.Aux = sym
  5642  		v.AddArg(ptr)
  5643  		v.AddArg(mem)
  5644  		return true
  5645  	}
  5646  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5647  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5648  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5649  	for {
  5650  		off1 := v.AuxInt
  5651  		sym1 := v.Aux
  5652  		_ = v.Args[1]
  5653  		v_0 := v.Args[0]
  5654  		if v_0.Op != OpMIPS64MOVVaddr {
  5655  			break
  5656  		}
  5657  		off2 := v_0.AuxInt
  5658  		sym2 := v_0.Aux
  5659  		ptr := v_0.Args[0]
  5660  		mem := v.Args[1]
  5661  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5662  			break
  5663  		}
  5664  		v.reset(OpMIPS64MOVWstorezero)
  5665  		v.AuxInt = off1 + off2
  5666  		v.Aux = mergeSym(sym1, sym2)
  5667  		v.AddArg(ptr)
  5668  		v.AddArg(mem)
  5669  		return true
  5670  	}
  5671  	return false
  5672  }
  5673  func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool {
  5674  	// match: (NEGV (MOVVconst [c]))
  5675  	// cond:
  5676  	// result: (MOVVconst [-c])
  5677  	for {
  5678  		v_0 := v.Args[0]
  5679  		if v_0.Op != OpMIPS64MOVVconst {
  5680  			break
  5681  		}
  5682  		c := v_0.AuxInt
  5683  		v.reset(OpMIPS64MOVVconst)
  5684  		v.AuxInt = -c
  5685  		return true
  5686  	}
  5687  	return false
  5688  }
  5689  func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool {
  5690  	// match: (NOR x (MOVVconst [c]))
  5691  	// cond: is32Bit(c)
  5692  	// result: (NORconst [c] x)
  5693  	for {
  5694  		_ = v.Args[1]
  5695  		x := v.Args[0]
  5696  		v_1 := v.Args[1]
  5697  		if v_1.Op != OpMIPS64MOVVconst {
  5698  			break
  5699  		}
  5700  		c := v_1.AuxInt
  5701  		if !(is32Bit(c)) {
  5702  			break
  5703  		}
  5704  		v.reset(OpMIPS64NORconst)
  5705  		v.AuxInt = c
  5706  		v.AddArg(x)
  5707  		return true
  5708  	}
  5709  	// match: (NOR (MOVVconst [c]) x)
  5710  	// cond: is32Bit(c)
  5711  	// result: (NORconst [c] x)
  5712  	for {
  5713  		_ = v.Args[1]
  5714  		v_0 := v.Args[0]
  5715  		if v_0.Op != OpMIPS64MOVVconst {
  5716  			break
  5717  		}
  5718  		c := v_0.AuxInt
  5719  		x := v.Args[1]
  5720  		if !(is32Bit(c)) {
  5721  			break
  5722  		}
  5723  		v.reset(OpMIPS64NORconst)
  5724  		v.AuxInt = c
  5725  		v.AddArg(x)
  5726  		return true
  5727  	}
  5728  	return false
  5729  }
  5730  func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool {
  5731  	// match: (NORconst [c] (MOVVconst [d]))
  5732  	// cond:
  5733  	// result: (MOVVconst [^(c|d)])
  5734  	for {
  5735  		c := v.AuxInt
  5736  		v_0 := v.Args[0]
  5737  		if v_0.Op != OpMIPS64MOVVconst {
  5738  			break
  5739  		}
  5740  		d := v_0.AuxInt
  5741  		v.reset(OpMIPS64MOVVconst)
  5742  		v.AuxInt = ^(c | d)
  5743  		return true
  5744  	}
  5745  	return false
  5746  }
  5747  func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
  5748  	// match: (OR x (MOVVconst [c]))
  5749  	// cond: is32Bit(c)
  5750  	// result: (ORconst  [c] x)
  5751  	for {
  5752  		_ = v.Args[1]
  5753  		x := v.Args[0]
  5754  		v_1 := v.Args[1]
  5755  		if v_1.Op != OpMIPS64MOVVconst {
  5756  			break
  5757  		}
  5758  		c := v_1.AuxInt
  5759  		if !(is32Bit(c)) {
  5760  			break
  5761  		}
  5762  		v.reset(OpMIPS64ORconst)
  5763  		v.AuxInt = c
  5764  		v.AddArg(x)
  5765  		return true
  5766  	}
  5767  	// match: (OR (MOVVconst [c]) x)
  5768  	// cond: is32Bit(c)
  5769  	// result: (ORconst  [c] x)
  5770  	for {
  5771  		_ = v.Args[1]
  5772  		v_0 := v.Args[0]
  5773  		if v_0.Op != OpMIPS64MOVVconst {
  5774  			break
  5775  		}
  5776  		c := v_0.AuxInt
  5777  		x := v.Args[1]
  5778  		if !(is32Bit(c)) {
  5779  			break
  5780  		}
  5781  		v.reset(OpMIPS64ORconst)
  5782  		v.AuxInt = c
  5783  		v.AddArg(x)
  5784  		return true
  5785  	}
  5786  	// match: (OR x x)
  5787  	// cond:
  5788  	// result: x
  5789  	for {
  5790  		_ = v.Args[1]
  5791  		x := v.Args[0]
  5792  		if x != v.Args[1] {
  5793  			break
  5794  		}
  5795  		v.reset(OpCopy)
  5796  		v.Type = x.Type
  5797  		v.AddArg(x)
  5798  		return true
  5799  	}
  5800  	return false
  5801  }
  5802  func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool {
  5803  	// match: (ORconst [0] x)
  5804  	// cond:
  5805  	// result: x
  5806  	for {
  5807  		if v.AuxInt != 0 {
  5808  			break
  5809  		}
  5810  		x := v.Args[0]
  5811  		v.reset(OpCopy)
  5812  		v.Type = x.Type
  5813  		v.AddArg(x)
  5814  		return true
  5815  	}
  5816  	// match: (ORconst [-1] _)
  5817  	// cond:
  5818  	// result: (MOVVconst [-1])
  5819  	for {
  5820  		if v.AuxInt != -1 {
  5821  			break
  5822  		}
  5823  		v.reset(OpMIPS64MOVVconst)
  5824  		v.AuxInt = -1
  5825  		return true
  5826  	}
  5827  	// match: (ORconst [c] (MOVVconst [d]))
  5828  	// cond:
  5829  	// result: (MOVVconst [c|d])
  5830  	for {
  5831  		c := v.AuxInt
  5832  		v_0 := v.Args[0]
  5833  		if v_0.Op != OpMIPS64MOVVconst {
  5834  			break
  5835  		}
  5836  		d := v_0.AuxInt
  5837  		v.reset(OpMIPS64MOVVconst)
  5838  		v.AuxInt = c | d
  5839  		return true
  5840  	}
  5841  	// match: (ORconst [c] (ORconst [d] x))
  5842  	// cond: is32Bit(c|d)
  5843  	// result: (ORconst [c|d] x)
  5844  	for {
  5845  		c := v.AuxInt
  5846  		v_0 := v.Args[0]
  5847  		if v_0.Op != OpMIPS64ORconst {
  5848  			break
  5849  		}
  5850  		d := v_0.AuxInt
  5851  		x := v_0.Args[0]
  5852  		if !(is32Bit(c | d)) {
  5853  			break
  5854  		}
  5855  		v.reset(OpMIPS64ORconst)
  5856  		v.AuxInt = c | d
  5857  		v.AddArg(x)
  5858  		return true
  5859  	}
  5860  	return false
  5861  }
  5862  func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool {
  5863  	// match: (SGT (MOVVconst [c]) x)
  5864  	// cond: is32Bit(c)
  5865  	// result: (SGTconst  [c] x)
  5866  	for {
  5867  		_ = v.Args[1]
  5868  		v_0 := v.Args[0]
  5869  		if v_0.Op != OpMIPS64MOVVconst {
  5870  			break
  5871  		}
  5872  		c := v_0.AuxInt
  5873  		x := v.Args[1]
  5874  		if !(is32Bit(c)) {
  5875  			break
  5876  		}
  5877  		v.reset(OpMIPS64SGTconst)
  5878  		v.AuxInt = c
  5879  		v.AddArg(x)
  5880  		return true
  5881  	}
  5882  	return false
  5883  }
  5884  func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool {
  5885  	// match: (SGTU (MOVVconst [c]) x)
  5886  	// cond: is32Bit(c)
  5887  	// result: (SGTUconst [c] x)
  5888  	for {
  5889  		_ = v.Args[1]
  5890  		v_0 := v.Args[0]
  5891  		if v_0.Op != OpMIPS64MOVVconst {
  5892  			break
  5893  		}
  5894  		c := v_0.AuxInt
  5895  		x := v.Args[1]
  5896  		if !(is32Bit(c)) {
  5897  			break
  5898  		}
  5899  		v.reset(OpMIPS64SGTUconst)
  5900  		v.AuxInt = c
  5901  		v.AddArg(x)
  5902  		return true
  5903  	}
  5904  	return false
  5905  }
  5906  func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool {
  5907  	// match: (SGTUconst [c] (MOVVconst [d]))
  5908  	// cond: uint64(c)>uint64(d)
  5909  	// result: (MOVVconst [1])
  5910  	for {
  5911  		c := v.AuxInt
  5912  		v_0 := v.Args[0]
  5913  		if v_0.Op != OpMIPS64MOVVconst {
  5914  			break
  5915  		}
  5916  		d := v_0.AuxInt
  5917  		if !(uint64(c) > uint64(d)) {
  5918  			break
  5919  		}
  5920  		v.reset(OpMIPS64MOVVconst)
  5921  		v.AuxInt = 1
  5922  		return true
  5923  	}
  5924  	// match: (SGTUconst [c] (MOVVconst [d]))
  5925  	// cond: uint64(c)<=uint64(d)
  5926  	// result: (MOVVconst [0])
  5927  	for {
  5928  		c := v.AuxInt
  5929  		v_0 := v.Args[0]
  5930  		if v_0.Op != OpMIPS64MOVVconst {
  5931  			break
  5932  		}
  5933  		d := v_0.AuxInt
  5934  		if !(uint64(c) <= uint64(d)) {
  5935  			break
  5936  		}
  5937  		v.reset(OpMIPS64MOVVconst)
  5938  		v.AuxInt = 0
  5939  		return true
  5940  	}
  5941  	// match: (SGTUconst [c] (MOVBUreg _))
  5942  	// cond: 0xff < uint64(c)
  5943  	// result: (MOVVconst [1])
  5944  	for {
  5945  		c := v.AuxInt
  5946  		v_0 := v.Args[0]
  5947  		if v_0.Op != OpMIPS64MOVBUreg {
  5948  			break
  5949  		}
  5950  		if !(0xff < uint64(c)) {
  5951  			break
  5952  		}
  5953  		v.reset(OpMIPS64MOVVconst)
  5954  		v.AuxInt = 1
  5955  		return true
  5956  	}
  5957  	// match: (SGTUconst [c] (MOVHUreg _))
  5958  	// cond: 0xffff < uint64(c)
  5959  	// result: (MOVVconst [1])
  5960  	for {
  5961  		c := v.AuxInt
  5962  		v_0 := v.Args[0]
  5963  		if v_0.Op != OpMIPS64MOVHUreg {
  5964  			break
  5965  		}
  5966  		if !(0xffff < uint64(c)) {
  5967  			break
  5968  		}
  5969  		v.reset(OpMIPS64MOVVconst)
  5970  		v.AuxInt = 1
  5971  		return true
  5972  	}
  5973  	// match: (SGTUconst [c] (ANDconst [m] _))
  5974  	// cond: uint64(m) < uint64(c)
  5975  	// result: (MOVVconst [1])
  5976  	for {
  5977  		c := v.AuxInt
  5978  		v_0 := v.Args[0]
  5979  		if v_0.Op != OpMIPS64ANDconst {
  5980  			break
  5981  		}
  5982  		m := v_0.AuxInt
  5983  		if !(uint64(m) < uint64(c)) {
  5984  			break
  5985  		}
  5986  		v.reset(OpMIPS64MOVVconst)
  5987  		v.AuxInt = 1
  5988  		return true
  5989  	}
  5990  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5991  	// cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)
  5992  	// result: (MOVVconst [1])
  5993  	for {
  5994  		c := v.AuxInt
  5995  		v_0 := v.Args[0]
  5996  		if v_0.Op != OpMIPS64SRLVconst {
  5997  			break
  5998  		}
  5999  		d := v_0.AuxInt
  6000  		if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) {
  6001  			break
  6002  		}
  6003  		v.reset(OpMIPS64MOVVconst)
  6004  		v.AuxInt = 1
  6005  		return true
  6006  	}
  6007  	return false
  6008  }
  6009  func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool {
  6010  	// match: (SGTconst [c] (MOVVconst [d]))
  6011  	// cond: int64(c)>int64(d)
  6012  	// result: (MOVVconst [1])
  6013  	for {
  6014  		c := v.AuxInt
  6015  		v_0 := v.Args[0]
  6016  		if v_0.Op != OpMIPS64MOVVconst {
  6017  			break
  6018  		}
  6019  		d := v_0.AuxInt
  6020  		if !(int64(c) > int64(d)) {
  6021  			break
  6022  		}
  6023  		v.reset(OpMIPS64MOVVconst)
  6024  		v.AuxInt = 1
  6025  		return true
  6026  	}
  6027  	// match: (SGTconst [c] (MOVVconst [d]))
  6028  	// cond: int64(c)<=int64(d)
  6029  	// result: (MOVVconst [0])
  6030  	for {
  6031  		c := v.AuxInt
  6032  		v_0 := v.Args[0]
  6033  		if v_0.Op != OpMIPS64MOVVconst {
  6034  			break
  6035  		}
  6036  		d := v_0.AuxInt
  6037  		if !(int64(c) <= int64(d)) {
  6038  			break
  6039  		}
  6040  		v.reset(OpMIPS64MOVVconst)
  6041  		v.AuxInt = 0
  6042  		return true
  6043  	}
  6044  	// match: (SGTconst [c] (MOVBreg _))
  6045  	// cond: 0x7f < int64(c)
  6046  	// result: (MOVVconst [1])
  6047  	for {
  6048  		c := v.AuxInt
  6049  		v_0 := v.Args[0]
  6050  		if v_0.Op != OpMIPS64MOVBreg {
  6051  			break
  6052  		}
  6053  		if !(0x7f < int64(c)) {
  6054  			break
  6055  		}
  6056  		v.reset(OpMIPS64MOVVconst)
  6057  		v.AuxInt = 1
  6058  		return true
  6059  	}
  6060  	// match: (SGTconst [c] (MOVBreg _))
  6061  	// cond: int64(c) <= -0x80
  6062  	// result: (MOVVconst [0])
  6063  	for {
  6064  		c := v.AuxInt
  6065  		v_0 := v.Args[0]
  6066  		if v_0.Op != OpMIPS64MOVBreg {
  6067  			break
  6068  		}
  6069  		if !(int64(c) <= -0x80) {
  6070  			break
  6071  		}
  6072  		v.reset(OpMIPS64MOVVconst)
  6073  		v.AuxInt = 0
  6074  		return true
  6075  	}
  6076  	// match: (SGTconst [c] (MOVBUreg _))
  6077  	// cond: 0xff < int64(c)
  6078  	// result: (MOVVconst [1])
  6079  	for {
  6080  		c := v.AuxInt
  6081  		v_0 := v.Args[0]
  6082  		if v_0.Op != OpMIPS64MOVBUreg {
  6083  			break
  6084  		}
  6085  		if !(0xff < int64(c)) {
  6086  			break
  6087  		}
  6088  		v.reset(OpMIPS64MOVVconst)
  6089  		v.AuxInt = 1
  6090  		return true
  6091  	}
  6092  	// match: (SGTconst [c] (MOVBUreg _))
  6093  	// cond: int64(c) < 0
  6094  	// result: (MOVVconst [0])
  6095  	for {
  6096  		c := v.AuxInt
  6097  		v_0 := v.Args[0]
  6098  		if v_0.Op != OpMIPS64MOVBUreg {
  6099  			break
  6100  		}
  6101  		if !(int64(c) < 0) {
  6102  			break
  6103  		}
  6104  		v.reset(OpMIPS64MOVVconst)
  6105  		v.AuxInt = 0
  6106  		return true
  6107  	}
  6108  	// match: (SGTconst [c] (MOVHreg _))
  6109  	// cond: 0x7fff < int64(c)
  6110  	// result: (MOVVconst [1])
  6111  	for {
  6112  		c := v.AuxInt
  6113  		v_0 := v.Args[0]
  6114  		if v_0.Op != OpMIPS64MOVHreg {
  6115  			break
  6116  		}
  6117  		if !(0x7fff < int64(c)) {
  6118  			break
  6119  		}
  6120  		v.reset(OpMIPS64MOVVconst)
  6121  		v.AuxInt = 1
  6122  		return true
  6123  	}
  6124  	// match: (SGTconst [c] (MOVHreg _))
  6125  	// cond: int64(c) <= -0x8000
  6126  	// result: (MOVVconst [0])
  6127  	for {
  6128  		c := v.AuxInt
  6129  		v_0 := v.Args[0]
  6130  		if v_0.Op != OpMIPS64MOVHreg {
  6131  			break
  6132  		}
  6133  		if !(int64(c) <= -0x8000) {
  6134  			break
  6135  		}
  6136  		v.reset(OpMIPS64MOVVconst)
  6137  		v.AuxInt = 0
  6138  		return true
  6139  	}
  6140  	// match: (SGTconst [c] (MOVHUreg _))
  6141  	// cond: 0xffff < int64(c)
  6142  	// result: (MOVVconst [1])
  6143  	for {
  6144  		c := v.AuxInt
  6145  		v_0 := v.Args[0]
  6146  		if v_0.Op != OpMIPS64MOVHUreg {
  6147  			break
  6148  		}
  6149  		if !(0xffff < int64(c)) {
  6150  			break
  6151  		}
  6152  		v.reset(OpMIPS64MOVVconst)
  6153  		v.AuxInt = 1
  6154  		return true
  6155  	}
  6156  	// match: (SGTconst [c] (MOVHUreg _))
  6157  	// cond: int64(c) < 0
  6158  	// result: (MOVVconst [0])
  6159  	for {
  6160  		c := v.AuxInt
  6161  		v_0 := v.Args[0]
  6162  		if v_0.Op != OpMIPS64MOVHUreg {
  6163  			break
  6164  		}
  6165  		if !(int64(c) < 0) {
  6166  			break
  6167  		}
  6168  		v.reset(OpMIPS64MOVVconst)
  6169  		v.AuxInt = 0
  6170  		return true
  6171  	}
  6172  	return false
  6173  }
  6174  func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool {
  6175  	// match: (SGTconst [c] (MOVWUreg _))
  6176  	// cond: int64(c) < 0
  6177  	// result: (MOVVconst [0])
  6178  	for {
  6179  		c := v.AuxInt
  6180  		v_0 := v.Args[0]
  6181  		if v_0.Op != OpMIPS64MOVWUreg {
  6182  			break
  6183  		}
  6184  		if !(int64(c) < 0) {
  6185  			break
  6186  		}
  6187  		v.reset(OpMIPS64MOVVconst)
  6188  		v.AuxInt = 0
  6189  		return true
  6190  	}
  6191  	// match: (SGTconst [c] (ANDconst [m] _))
  6192  	// cond: 0 <= m && m < c
  6193  	// result: (MOVVconst [1])
  6194  	for {
  6195  		c := v.AuxInt
  6196  		v_0 := v.Args[0]
  6197  		if v_0.Op != OpMIPS64ANDconst {
  6198  			break
  6199  		}
  6200  		m := v_0.AuxInt
  6201  		if !(0 <= m && m < c) {
  6202  			break
  6203  		}
  6204  		v.reset(OpMIPS64MOVVconst)
  6205  		v.AuxInt = 1
  6206  		return true
  6207  	}
  6208  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6209  	// cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c
  6210  	// result: (MOVVconst [1])
  6211  	for {
  6212  		c := v.AuxInt
  6213  		v_0 := v.Args[0]
  6214  		if v_0.Op != OpMIPS64SRLVconst {
  6215  			break
  6216  		}
  6217  		d := v_0.AuxInt
  6218  		if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) {
  6219  			break
  6220  		}
  6221  		v.reset(OpMIPS64MOVVconst)
  6222  		v.AuxInt = 1
  6223  		return true
  6224  	}
  6225  	return false
  6226  }
  6227  func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool {
  6228  	// match: (SLLV _ (MOVVconst [c]))
  6229  	// cond: uint64(c)>=64
  6230  	// result: (MOVVconst [0])
  6231  	for {
  6232  		_ = v.Args[1]
  6233  		v_1 := v.Args[1]
  6234  		if v_1.Op != OpMIPS64MOVVconst {
  6235  			break
  6236  		}
  6237  		c := v_1.AuxInt
  6238  		if !(uint64(c) >= 64) {
  6239  			break
  6240  		}
  6241  		v.reset(OpMIPS64MOVVconst)
  6242  		v.AuxInt = 0
  6243  		return true
  6244  	}
  6245  	// match: (SLLV x (MOVVconst [c]))
  6246  	// cond:
  6247  	// result: (SLLVconst x [c])
  6248  	for {
  6249  		_ = v.Args[1]
  6250  		x := v.Args[0]
  6251  		v_1 := v.Args[1]
  6252  		if v_1.Op != OpMIPS64MOVVconst {
  6253  			break
  6254  		}
  6255  		c := v_1.AuxInt
  6256  		v.reset(OpMIPS64SLLVconst)
  6257  		v.AuxInt = c
  6258  		v.AddArg(x)
  6259  		return true
  6260  	}
  6261  	return false
  6262  }
  6263  func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool {
  6264  	// match: (SLLVconst [c] (MOVVconst [d]))
  6265  	// cond:
  6266  	// result: (MOVVconst [int64(d)<<uint64(c)])
  6267  	for {
  6268  		c := v.AuxInt
  6269  		v_0 := v.Args[0]
  6270  		if v_0.Op != OpMIPS64MOVVconst {
  6271  			break
  6272  		}
  6273  		d := v_0.AuxInt
  6274  		v.reset(OpMIPS64MOVVconst)
  6275  		v.AuxInt = int64(d) << uint64(c)
  6276  		return true
  6277  	}
  6278  	return false
  6279  }
  6280  func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool {
  6281  	// match: (SRAV x (MOVVconst [c]))
  6282  	// cond: uint64(c)>=64
  6283  	// result: (SRAVconst x [63])
  6284  	for {
  6285  		_ = v.Args[1]
  6286  		x := v.Args[0]
  6287  		v_1 := v.Args[1]
  6288  		if v_1.Op != OpMIPS64MOVVconst {
  6289  			break
  6290  		}
  6291  		c := v_1.AuxInt
  6292  		if !(uint64(c) >= 64) {
  6293  			break
  6294  		}
  6295  		v.reset(OpMIPS64SRAVconst)
  6296  		v.AuxInt = 63
  6297  		v.AddArg(x)
  6298  		return true
  6299  	}
  6300  	// match: (SRAV x (MOVVconst [c]))
  6301  	// cond:
  6302  	// result: (SRAVconst x [c])
  6303  	for {
  6304  		_ = v.Args[1]
  6305  		x := v.Args[0]
  6306  		v_1 := v.Args[1]
  6307  		if v_1.Op != OpMIPS64MOVVconst {
  6308  			break
  6309  		}
  6310  		c := v_1.AuxInt
  6311  		v.reset(OpMIPS64SRAVconst)
  6312  		v.AuxInt = c
  6313  		v.AddArg(x)
  6314  		return true
  6315  	}
  6316  	return false
  6317  }
  6318  func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool {
  6319  	// match: (SRAVconst [c] (MOVVconst [d]))
  6320  	// cond:
  6321  	// result: (MOVVconst [int64(d)>>uint64(c)])
  6322  	for {
  6323  		c := v.AuxInt
  6324  		v_0 := v.Args[0]
  6325  		if v_0.Op != OpMIPS64MOVVconst {
  6326  			break
  6327  		}
  6328  		d := v_0.AuxInt
  6329  		v.reset(OpMIPS64MOVVconst)
  6330  		v.AuxInt = int64(d) >> uint64(c)
  6331  		return true
  6332  	}
  6333  	return false
  6334  }
  6335  func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool {
  6336  	// match: (SRLV _ (MOVVconst [c]))
  6337  	// cond: uint64(c)>=64
  6338  	// result: (MOVVconst [0])
  6339  	for {
  6340  		_ = v.Args[1]
  6341  		v_1 := v.Args[1]
  6342  		if v_1.Op != OpMIPS64MOVVconst {
  6343  			break
  6344  		}
  6345  		c := v_1.AuxInt
  6346  		if !(uint64(c) >= 64) {
  6347  			break
  6348  		}
  6349  		v.reset(OpMIPS64MOVVconst)
  6350  		v.AuxInt = 0
  6351  		return true
  6352  	}
  6353  	// match: (SRLV x (MOVVconst [c]))
  6354  	// cond:
  6355  	// result: (SRLVconst x [c])
  6356  	for {
  6357  		_ = v.Args[1]
  6358  		x := v.Args[0]
  6359  		v_1 := v.Args[1]
  6360  		if v_1.Op != OpMIPS64MOVVconst {
  6361  			break
  6362  		}
  6363  		c := v_1.AuxInt
  6364  		v.reset(OpMIPS64SRLVconst)
  6365  		v.AuxInt = c
  6366  		v.AddArg(x)
  6367  		return true
  6368  	}
  6369  	return false
  6370  }
  6371  func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool {
  6372  	// match: (SRLVconst [c] (MOVVconst [d]))
  6373  	// cond:
  6374  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6375  	for {
  6376  		c := v.AuxInt
  6377  		v_0 := v.Args[0]
  6378  		if v_0.Op != OpMIPS64MOVVconst {
  6379  			break
  6380  		}
  6381  		d := v_0.AuxInt
  6382  		v.reset(OpMIPS64MOVVconst)
  6383  		v.AuxInt = int64(uint64(d) >> uint64(c))
  6384  		return true
  6385  	}
  6386  	return false
  6387  }
  6388  func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool {
  6389  	// match: (SUBV x (MOVVconst [c]))
  6390  	// cond: is32Bit(c)
  6391  	// result: (SUBVconst [c] x)
  6392  	for {
  6393  		_ = v.Args[1]
  6394  		x := v.Args[0]
  6395  		v_1 := v.Args[1]
  6396  		if v_1.Op != OpMIPS64MOVVconst {
  6397  			break
  6398  		}
  6399  		c := v_1.AuxInt
  6400  		if !(is32Bit(c)) {
  6401  			break
  6402  		}
  6403  		v.reset(OpMIPS64SUBVconst)
  6404  		v.AuxInt = c
  6405  		v.AddArg(x)
  6406  		return true
  6407  	}
  6408  	// match: (SUBV x x)
  6409  	// cond:
  6410  	// result: (MOVVconst [0])
  6411  	for {
  6412  		_ = v.Args[1]
  6413  		x := v.Args[0]
  6414  		if x != v.Args[1] {
  6415  			break
  6416  		}
  6417  		v.reset(OpMIPS64MOVVconst)
  6418  		v.AuxInt = 0
  6419  		return true
  6420  	}
  6421  	// match: (SUBV (MOVVconst [0]) x)
  6422  	// cond:
  6423  	// result: (NEGV x)
  6424  	for {
  6425  		_ = v.Args[1]
  6426  		v_0 := v.Args[0]
  6427  		if v_0.Op != OpMIPS64MOVVconst {
  6428  			break
  6429  		}
  6430  		if v_0.AuxInt != 0 {
  6431  			break
  6432  		}
  6433  		x := v.Args[1]
  6434  		v.reset(OpMIPS64NEGV)
  6435  		v.AddArg(x)
  6436  		return true
  6437  	}
  6438  	return false
  6439  }
  6440  func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool {
  6441  	// match: (SUBVconst [0] x)
  6442  	// cond:
  6443  	// result: x
  6444  	for {
  6445  		if v.AuxInt != 0 {
  6446  			break
  6447  		}
  6448  		x := v.Args[0]
  6449  		v.reset(OpCopy)
  6450  		v.Type = x.Type
  6451  		v.AddArg(x)
  6452  		return true
  6453  	}
  6454  	// match: (SUBVconst [c] (MOVVconst [d]))
  6455  	// cond:
  6456  	// result: (MOVVconst [d-c])
  6457  	for {
  6458  		c := v.AuxInt
  6459  		v_0 := v.Args[0]
  6460  		if v_0.Op != OpMIPS64MOVVconst {
  6461  			break
  6462  		}
  6463  		d := v_0.AuxInt
  6464  		v.reset(OpMIPS64MOVVconst)
  6465  		v.AuxInt = d - c
  6466  		return true
  6467  	}
  6468  	// match: (SUBVconst [c] (SUBVconst [d] x))
  6469  	// cond: is32Bit(-c-d)
  6470  	// result: (ADDVconst [-c-d] x)
  6471  	for {
  6472  		c := v.AuxInt
  6473  		v_0 := v.Args[0]
  6474  		if v_0.Op != OpMIPS64SUBVconst {
  6475  			break
  6476  		}
  6477  		d := v_0.AuxInt
  6478  		x := v_0.Args[0]
  6479  		if !(is32Bit(-c - d)) {
  6480  			break
  6481  		}
  6482  		v.reset(OpMIPS64ADDVconst)
  6483  		v.AuxInt = -c - d
  6484  		v.AddArg(x)
  6485  		return true
  6486  	}
  6487  	// match: (SUBVconst [c] (ADDVconst [d] x))
  6488  	// cond: is32Bit(-c+d)
  6489  	// result: (ADDVconst [-c+d] x)
  6490  	for {
  6491  		c := v.AuxInt
  6492  		v_0 := v.Args[0]
  6493  		if v_0.Op != OpMIPS64ADDVconst {
  6494  			break
  6495  		}
  6496  		d := v_0.AuxInt
  6497  		x := v_0.Args[0]
  6498  		if !(is32Bit(-c + d)) {
  6499  			break
  6500  		}
  6501  		v.reset(OpMIPS64ADDVconst)
  6502  		v.AuxInt = -c + d
  6503  		v.AddArg(x)
  6504  		return true
  6505  	}
  6506  	return false
  6507  }
  6508  func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool {
  6509  	// match: (XOR x (MOVVconst [c]))
  6510  	// cond: is32Bit(c)
  6511  	// result: (XORconst [c] x)
  6512  	for {
  6513  		_ = v.Args[1]
  6514  		x := v.Args[0]
  6515  		v_1 := v.Args[1]
  6516  		if v_1.Op != OpMIPS64MOVVconst {
  6517  			break
  6518  		}
  6519  		c := v_1.AuxInt
  6520  		if !(is32Bit(c)) {
  6521  			break
  6522  		}
  6523  		v.reset(OpMIPS64XORconst)
  6524  		v.AuxInt = c
  6525  		v.AddArg(x)
  6526  		return true
  6527  	}
  6528  	// match: (XOR (MOVVconst [c]) x)
  6529  	// cond: is32Bit(c)
  6530  	// result: (XORconst [c] x)
  6531  	for {
  6532  		_ = v.Args[1]
  6533  		v_0 := v.Args[0]
  6534  		if v_0.Op != OpMIPS64MOVVconst {
  6535  			break
  6536  		}
  6537  		c := v_0.AuxInt
  6538  		x := v.Args[1]
  6539  		if !(is32Bit(c)) {
  6540  			break
  6541  		}
  6542  		v.reset(OpMIPS64XORconst)
  6543  		v.AuxInt = c
  6544  		v.AddArg(x)
  6545  		return true
  6546  	}
  6547  	// match: (XOR x x)
  6548  	// cond:
  6549  	// result: (MOVVconst [0])
  6550  	for {
  6551  		_ = v.Args[1]
  6552  		x := v.Args[0]
  6553  		if x != v.Args[1] {
  6554  			break
  6555  		}
  6556  		v.reset(OpMIPS64MOVVconst)
  6557  		v.AuxInt = 0
  6558  		return true
  6559  	}
  6560  	return false
  6561  }
  6562  func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool {
  6563  	// match: (XORconst [0] x)
  6564  	// cond:
  6565  	// result: x
  6566  	for {
  6567  		if v.AuxInt != 0 {
  6568  			break
  6569  		}
  6570  		x := v.Args[0]
  6571  		v.reset(OpCopy)
  6572  		v.Type = x.Type
  6573  		v.AddArg(x)
  6574  		return true
  6575  	}
  6576  	// match: (XORconst [-1] x)
  6577  	// cond:
  6578  	// result: (NORconst [0] x)
  6579  	for {
  6580  		if v.AuxInt != -1 {
  6581  			break
  6582  		}
  6583  		x := v.Args[0]
  6584  		v.reset(OpMIPS64NORconst)
  6585  		v.AuxInt = 0
  6586  		v.AddArg(x)
  6587  		return true
  6588  	}
  6589  	// match: (XORconst [c] (MOVVconst [d]))
  6590  	// cond:
  6591  	// result: (MOVVconst [c^d])
  6592  	for {
  6593  		c := v.AuxInt
  6594  		v_0 := v.Args[0]
  6595  		if v_0.Op != OpMIPS64MOVVconst {
  6596  			break
  6597  		}
  6598  		d := v_0.AuxInt
  6599  		v.reset(OpMIPS64MOVVconst)
  6600  		v.AuxInt = c ^ d
  6601  		return true
  6602  	}
  6603  	// match: (XORconst [c] (XORconst [d] x))
  6604  	// cond: is32Bit(c^d)
  6605  	// result: (XORconst [c^d] x)
  6606  	for {
  6607  		c := v.AuxInt
  6608  		v_0 := v.Args[0]
  6609  		if v_0.Op != OpMIPS64XORconst {
  6610  			break
  6611  		}
  6612  		d := v_0.AuxInt
  6613  		x := v_0.Args[0]
  6614  		if !(is32Bit(c ^ d)) {
  6615  			break
  6616  		}
  6617  		v.reset(OpMIPS64XORconst)
  6618  		v.AuxInt = c ^ d
  6619  		v.AddArg(x)
  6620  		return true
  6621  	}
  6622  	return false
  6623  }
  6624  func rewriteValueMIPS64_OpMod16_0(v *Value) bool {
  6625  	b := v.Block
  6626  	_ = b
  6627  	typ := &b.Func.Config.Types
  6628  	_ = typ
  6629  	// match: (Mod16 x y)
  6630  	// cond:
  6631  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6632  	for {
  6633  		_ = v.Args[1]
  6634  		x := v.Args[0]
  6635  		y := v.Args[1]
  6636  		v.reset(OpSelect0)
  6637  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6638  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6639  		v1.AddArg(x)
  6640  		v0.AddArg(v1)
  6641  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6642  		v2.AddArg(y)
  6643  		v0.AddArg(v2)
  6644  		v.AddArg(v0)
  6645  		return true
  6646  	}
  6647  }
  6648  func rewriteValueMIPS64_OpMod16u_0(v *Value) bool {
  6649  	b := v.Block
  6650  	_ = b
  6651  	typ := &b.Func.Config.Types
  6652  	_ = typ
  6653  	// match: (Mod16u x y)
  6654  	// cond:
  6655  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6656  	for {
  6657  		_ = v.Args[1]
  6658  		x := v.Args[0]
  6659  		y := v.Args[1]
  6660  		v.reset(OpSelect0)
  6661  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6662  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6663  		v1.AddArg(x)
  6664  		v0.AddArg(v1)
  6665  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6666  		v2.AddArg(y)
  6667  		v0.AddArg(v2)
  6668  		v.AddArg(v0)
  6669  		return true
  6670  	}
  6671  }
  6672  func rewriteValueMIPS64_OpMod32_0(v *Value) bool {
  6673  	b := v.Block
  6674  	_ = b
  6675  	typ := &b.Func.Config.Types
  6676  	_ = typ
  6677  	// match: (Mod32 x y)
  6678  	// cond:
  6679  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6680  	for {
  6681  		_ = v.Args[1]
  6682  		x := v.Args[0]
  6683  		y := v.Args[1]
  6684  		v.reset(OpSelect0)
  6685  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6686  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6687  		v1.AddArg(x)
  6688  		v0.AddArg(v1)
  6689  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6690  		v2.AddArg(y)
  6691  		v0.AddArg(v2)
  6692  		v.AddArg(v0)
  6693  		return true
  6694  	}
  6695  }
  6696  func rewriteValueMIPS64_OpMod32u_0(v *Value) bool {
  6697  	b := v.Block
  6698  	_ = b
  6699  	typ := &b.Func.Config.Types
  6700  	_ = typ
  6701  	// match: (Mod32u x y)
  6702  	// cond:
  6703  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6704  	for {
  6705  		_ = v.Args[1]
  6706  		x := v.Args[0]
  6707  		y := v.Args[1]
  6708  		v.reset(OpSelect0)
  6709  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6710  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6711  		v1.AddArg(x)
  6712  		v0.AddArg(v1)
  6713  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6714  		v2.AddArg(y)
  6715  		v0.AddArg(v2)
  6716  		v.AddArg(v0)
  6717  		return true
  6718  	}
  6719  }
  6720  func rewriteValueMIPS64_OpMod64_0(v *Value) bool {
  6721  	b := v.Block
  6722  	_ = b
  6723  	typ := &b.Func.Config.Types
  6724  	_ = typ
  6725  	// match: (Mod64 x y)
  6726  	// cond:
  6727  	// result: (Select0 (DIVV x y))
  6728  	for {
  6729  		_ = v.Args[1]
  6730  		x := v.Args[0]
  6731  		y := v.Args[1]
  6732  		v.reset(OpSelect0)
  6733  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6734  		v0.AddArg(x)
  6735  		v0.AddArg(y)
  6736  		v.AddArg(v0)
  6737  		return true
  6738  	}
  6739  }
  6740  func rewriteValueMIPS64_OpMod64u_0(v *Value) bool {
  6741  	b := v.Block
  6742  	_ = b
  6743  	typ := &b.Func.Config.Types
  6744  	_ = typ
  6745  	// match: (Mod64u x y)
  6746  	// cond:
  6747  	// result: (Select0 (DIVVU x y))
  6748  	for {
  6749  		_ = v.Args[1]
  6750  		x := v.Args[0]
  6751  		y := v.Args[1]
  6752  		v.reset(OpSelect0)
  6753  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6754  		v0.AddArg(x)
  6755  		v0.AddArg(y)
  6756  		v.AddArg(v0)
  6757  		return true
  6758  	}
  6759  }
  6760  func rewriteValueMIPS64_OpMod8_0(v *Value) bool {
  6761  	b := v.Block
  6762  	_ = b
  6763  	typ := &b.Func.Config.Types
  6764  	_ = typ
  6765  	// match: (Mod8 x y)
  6766  	// cond:
  6767  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6768  	for {
  6769  		_ = v.Args[1]
  6770  		x := v.Args[0]
  6771  		y := v.Args[1]
  6772  		v.reset(OpSelect0)
  6773  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6774  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6775  		v1.AddArg(x)
  6776  		v0.AddArg(v1)
  6777  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6778  		v2.AddArg(y)
  6779  		v0.AddArg(v2)
  6780  		v.AddArg(v0)
  6781  		return true
  6782  	}
  6783  }
  6784  func rewriteValueMIPS64_OpMod8u_0(v *Value) bool {
  6785  	b := v.Block
  6786  	_ = b
  6787  	typ := &b.Func.Config.Types
  6788  	_ = typ
  6789  	// match: (Mod8u x y)
  6790  	// cond:
  6791  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6792  	for {
  6793  		_ = v.Args[1]
  6794  		x := v.Args[0]
  6795  		y := v.Args[1]
  6796  		v.reset(OpSelect0)
  6797  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6798  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6799  		v1.AddArg(x)
  6800  		v0.AddArg(v1)
  6801  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6802  		v2.AddArg(y)
  6803  		v0.AddArg(v2)
  6804  		v.AddArg(v0)
  6805  		return true
  6806  	}
  6807  }
  6808  func rewriteValueMIPS64_OpMove_0(v *Value) bool {
  6809  	b := v.Block
  6810  	_ = b
  6811  	typ := &b.Func.Config.Types
  6812  	_ = typ
  6813  	// match: (Move [0] _ _ mem)
  6814  	// cond:
  6815  	// result: mem
  6816  	for {
  6817  		if v.AuxInt != 0 {
  6818  			break
  6819  		}
  6820  		_ = v.Args[2]
  6821  		mem := v.Args[2]
  6822  		v.reset(OpCopy)
  6823  		v.Type = mem.Type
  6824  		v.AddArg(mem)
  6825  		return true
  6826  	}
  6827  	// match: (Move [1] dst src mem)
  6828  	// cond:
  6829  	// result: (MOVBstore dst (MOVBload src mem) mem)
  6830  	for {
  6831  		if v.AuxInt != 1 {
  6832  			break
  6833  		}
  6834  		_ = v.Args[2]
  6835  		dst := v.Args[0]
  6836  		src := v.Args[1]
  6837  		mem := v.Args[2]
  6838  		v.reset(OpMIPS64MOVBstore)
  6839  		v.AddArg(dst)
  6840  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6841  		v0.AddArg(src)
  6842  		v0.AddArg(mem)
  6843  		v.AddArg(v0)
  6844  		v.AddArg(mem)
  6845  		return true
  6846  	}
  6847  	// match: (Move [2] {t} dst src mem)
  6848  	// cond: t.(*types.Type).Alignment()%2 == 0
  6849  	// result: (MOVHstore dst (MOVHload src mem) mem)
  6850  	for {
  6851  		if v.AuxInt != 2 {
  6852  			break
  6853  		}
  6854  		t := v.Aux
  6855  		_ = v.Args[2]
  6856  		dst := v.Args[0]
  6857  		src := v.Args[1]
  6858  		mem := v.Args[2]
  6859  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6860  			break
  6861  		}
  6862  		v.reset(OpMIPS64MOVHstore)
  6863  		v.AddArg(dst)
  6864  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6865  		v0.AddArg(src)
  6866  		v0.AddArg(mem)
  6867  		v.AddArg(v0)
  6868  		v.AddArg(mem)
  6869  		return true
  6870  	}
  6871  	// match: (Move [2] dst src mem)
  6872  	// cond:
  6873  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) 		(MOVBstore dst (MOVBload src mem) mem))
  6874  	for {
  6875  		if v.AuxInt != 2 {
  6876  			break
  6877  		}
  6878  		_ = v.Args[2]
  6879  		dst := v.Args[0]
  6880  		src := v.Args[1]
  6881  		mem := v.Args[2]
  6882  		v.reset(OpMIPS64MOVBstore)
  6883  		v.AuxInt = 1
  6884  		v.AddArg(dst)
  6885  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6886  		v0.AuxInt = 1
  6887  		v0.AddArg(src)
  6888  		v0.AddArg(mem)
  6889  		v.AddArg(v0)
  6890  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6891  		v1.AddArg(dst)
  6892  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6893  		v2.AddArg(src)
  6894  		v2.AddArg(mem)
  6895  		v1.AddArg(v2)
  6896  		v1.AddArg(mem)
  6897  		v.AddArg(v1)
  6898  		return true
  6899  	}
  6900  	// match: (Move [4] {t} dst src mem)
  6901  	// cond: t.(*types.Type).Alignment()%4 == 0
  6902  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6903  	for {
  6904  		if v.AuxInt != 4 {
  6905  			break
  6906  		}
  6907  		t := v.Aux
  6908  		_ = v.Args[2]
  6909  		dst := v.Args[0]
  6910  		src := v.Args[1]
  6911  		mem := v.Args[2]
  6912  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6913  			break
  6914  		}
  6915  		v.reset(OpMIPS64MOVWstore)
  6916  		v.AddArg(dst)
  6917  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6918  		v0.AddArg(src)
  6919  		v0.AddArg(mem)
  6920  		v.AddArg(v0)
  6921  		v.AddArg(mem)
  6922  		return true
  6923  	}
  6924  	// match: (Move [4] {t} dst src mem)
  6925  	// cond: t.(*types.Type).Alignment()%2 == 0
  6926  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) 		(MOVHstore dst (MOVHload src mem) mem))
  6927  	for {
  6928  		if v.AuxInt != 4 {
  6929  			break
  6930  		}
  6931  		t := v.Aux
  6932  		_ = v.Args[2]
  6933  		dst := v.Args[0]
  6934  		src := v.Args[1]
  6935  		mem := v.Args[2]
  6936  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6937  			break
  6938  		}
  6939  		v.reset(OpMIPS64MOVHstore)
  6940  		v.AuxInt = 2
  6941  		v.AddArg(dst)
  6942  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6943  		v0.AuxInt = 2
  6944  		v0.AddArg(src)
  6945  		v0.AddArg(mem)
  6946  		v.AddArg(v0)
  6947  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6948  		v1.AddArg(dst)
  6949  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6950  		v2.AddArg(src)
  6951  		v2.AddArg(mem)
  6952  		v1.AddArg(v2)
  6953  		v1.AddArg(mem)
  6954  		v.AddArg(v1)
  6955  		return true
  6956  	}
  6957  	// match: (Move [4] dst src mem)
  6958  	// cond:
  6959  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) 		(MOVBstore [2] dst (MOVBload [2] src mem) 			(MOVBstore [1] dst (MOVBload [1] src mem) 				(MOVBstore dst (MOVBload src mem) mem))))
  6960  	for {
  6961  		if v.AuxInt != 4 {
  6962  			break
  6963  		}
  6964  		_ = v.Args[2]
  6965  		dst := v.Args[0]
  6966  		src := v.Args[1]
  6967  		mem := v.Args[2]
  6968  		v.reset(OpMIPS64MOVBstore)
  6969  		v.AuxInt = 3
  6970  		v.AddArg(dst)
  6971  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6972  		v0.AuxInt = 3
  6973  		v0.AddArg(src)
  6974  		v0.AddArg(mem)
  6975  		v.AddArg(v0)
  6976  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6977  		v1.AuxInt = 2
  6978  		v1.AddArg(dst)
  6979  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6980  		v2.AuxInt = 2
  6981  		v2.AddArg(src)
  6982  		v2.AddArg(mem)
  6983  		v1.AddArg(v2)
  6984  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6985  		v3.AuxInt = 1
  6986  		v3.AddArg(dst)
  6987  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6988  		v4.AuxInt = 1
  6989  		v4.AddArg(src)
  6990  		v4.AddArg(mem)
  6991  		v3.AddArg(v4)
  6992  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6993  		v5.AddArg(dst)
  6994  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6995  		v6.AddArg(src)
  6996  		v6.AddArg(mem)
  6997  		v5.AddArg(v6)
  6998  		v5.AddArg(mem)
  6999  		v3.AddArg(v5)
  7000  		v1.AddArg(v3)
  7001  		v.AddArg(v1)
  7002  		return true
  7003  	}
  7004  	// match: (Move [8] {t} dst src mem)
  7005  	// cond: t.(*types.Type).Alignment()%8 == 0
  7006  	// result: (MOVVstore dst (MOVVload src mem) mem)
  7007  	for {
  7008  		if v.AuxInt != 8 {
  7009  			break
  7010  		}
  7011  		t := v.Aux
  7012  		_ = v.Args[2]
  7013  		dst := v.Args[0]
  7014  		src := v.Args[1]
  7015  		mem := v.Args[2]
  7016  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7017  			break
  7018  		}
  7019  		v.reset(OpMIPS64MOVVstore)
  7020  		v.AddArg(dst)
  7021  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7022  		v0.AddArg(src)
  7023  		v0.AddArg(mem)
  7024  		v.AddArg(v0)
  7025  		v.AddArg(mem)
  7026  		return true
  7027  	}
  7028  	// match: (Move [8] {t} dst src mem)
  7029  	// cond: t.(*types.Type).Alignment()%4 == 0
  7030  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  7031  	for {
  7032  		if v.AuxInt != 8 {
  7033  			break
  7034  		}
  7035  		t := v.Aux
  7036  		_ = v.Args[2]
  7037  		dst := v.Args[0]
  7038  		src := v.Args[1]
  7039  		mem := v.Args[2]
  7040  		if !(t.(*types.Type).Alignment()%4 == 0) {
  7041  			break
  7042  		}
  7043  		v.reset(OpMIPS64MOVWstore)
  7044  		v.AuxInt = 4
  7045  		v.AddArg(dst)
  7046  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7047  		v0.AuxInt = 4
  7048  		v0.AddArg(src)
  7049  		v0.AddArg(mem)
  7050  		v.AddArg(v0)
  7051  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7052  		v1.AddArg(dst)
  7053  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7054  		v2.AddArg(src)
  7055  		v2.AddArg(mem)
  7056  		v1.AddArg(v2)
  7057  		v1.AddArg(mem)
  7058  		v.AddArg(v1)
  7059  		return true
  7060  	}
  7061  	// match: (Move [8] {t} dst src mem)
  7062  	// cond: t.(*types.Type).Alignment()%2 == 0
  7063  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  7064  	for {
  7065  		if v.AuxInt != 8 {
  7066  			break
  7067  		}
  7068  		t := v.Aux
  7069  		_ = v.Args[2]
  7070  		dst := v.Args[0]
  7071  		src := v.Args[1]
  7072  		mem := v.Args[2]
  7073  		if !(t.(*types.Type).Alignment()%2 == 0) {
  7074  			break
  7075  		}
  7076  		v.reset(OpMIPS64MOVHstore)
  7077  		v.AuxInt = 6
  7078  		v.AddArg(dst)
  7079  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7080  		v0.AuxInt = 6
  7081  		v0.AddArg(src)
  7082  		v0.AddArg(mem)
  7083  		v.AddArg(v0)
  7084  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7085  		v1.AuxInt = 4
  7086  		v1.AddArg(dst)
  7087  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7088  		v2.AuxInt = 4
  7089  		v2.AddArg(src)
  7090  		v2.AddArg(mem)
  7091  		v1.AddArg(v2)
  7092  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7093  		v3.AuxInt = 2
  7094  		v3.AddArg(dst)
  7095  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7096  		v4.AuxInt = 2
  7097  		v4.AddArg(src)
  7098  		v4.AddArg(mem)
  7099  		v3.AddArg(v4)
  7100  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7101  		v5.AddArg(dst)
  7102  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7103  		v6.AddArg(src)
  7104  		v6.AddArg(mem)
  7105  		v5.AddArg(v6)
  7106  		v5.AddArg(mem)
  7107  		v3.AddArg(v5)
  7108  		v1.AddArg(v3)
  7109  		v.AddArg(v1)
  7110  		return true
  7111  	}
  7112  	return false
  7113  }
  7114  func rewriteValueMIPS64_OpMove_10(v *Value) bool {
  7115  	b := v.Block
  7116  	_ = b
  7117  	config := b.Func.Config
  7118  	_ = config
  7119  	typ := &b.Func.Config.Types
  7120  	_ = typ
  7121  	// match: (Move [3] dst src mem)
  7122  	// cond:
  7123  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) 		(MOVBstore [1] dst (MOVBload [1] src mem) 			(MOVBstore dst (MOVBload src mem) mem)))
  7124  	for {
  7125  		if v.AuxInt != 3 {
  7126  			break
  7127  		}
  7128  		_ = v.Args[2]
  7129  		dst := v.Args[0]
  7130  		src := v.Args[1]
  7131  		mem := v.Args[2]
  7132  		v.reset(OpMIPS64MOVBstore)
  7133  		v.AuxInt = 2
  7134  		v.AddArg(dst)
  7135  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7136  		v0.AuxInt = 2
  7137  		v0.AddArg(src)
  7138  		v0.AddArg(mem)
  7139  		v.AddArg(v0)
  7140  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7141  		v1.AuxInt = 1
  7142  		v1.AddArg(dst)
  7143  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7144  		v2.AuxInt = 1
  7145  		v2.AddArg(src)
  7146  		v2.AddArg(mem)
  7147  		v1.AddArg(v2)
  7148  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7149  		v3.AddArg(dst)
  7150  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  7151  		v4.AddArg(src)
  7152  		v4.AddArg(mem)
  7153  		v3.AddArg(v4)
  7154  		v3.AddArg(mem)
  7155  		v1.AddArg(v3)
  7156  		v.AddArg(v1)
  7157  		return true
  7158  	}
  7159  	// match: (Move [6] {t} dst src mem)
  7160  	// cond: t.(*types.Type).Alignment()%2 == 0
  7161  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  7162  	for {
  7163  		if v.AuxInt != 6 {
  7164  			break
  7165  		}
  7166  		t := v.Aux
  7167  		_ = v.Args[2]
  7168  		dst := v.Args[0]
  7169  		src := v.Args[1]
  7170  		mem := v.Args[2]
  7171  		if !(t.(*types.Type).Alignment()%2 == 0) {
  7172  			break
  7173  		}
  7174  		v.reset(OpMIPS64MOVHstore)
  7175  		v.AuxInt = 4
  7176  		v.AddArg(dst)
  7177  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7178  		v0.AuxInt = 4
  7179  		v0.AddArg(src)
  7180  		v0.AddArg(mem)
  7181  		v.AddArg(v0)
  7182  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7183  		v1.AuxInt = 2
  7184  		v1.AddArg(dst)
  7185  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7186  		v2.AuxInt = 2
  7187  		v2.AddArg(src)
  7188  		v2.AddArg(mem)
  7189  		v1.AddArg(v2)
  7190  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7191  		v3.AddArg(dst)
  7192  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  7193  		v4.AddArg(src)
  7194  		v4.AddArg(mem)
  7195  		v3.AddArg(v4)
  7196  		v3.AddArg(mem)
  7197  		v1.AddArg(v3)
  7198  		v.AddArg(v1)
  7199  		return true
  7200  	}
  7201  	// match: (Move [12] {t} dst src mem)
  7202  	// cond: t.(*types.Type).Alignment()%4 == 0
  7203  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  7204  	for {
  7205  		if v.AuxInt != 12 {
  7206  			break
  7207  		}
  7208  		t := v.Aux
  7209  		_ = v.Args[2]
  7210  		dst := v.Args[0]
  7211  		src := v.Args[1]
  7212  		mem := v.Args[2]
  7213  		if !(t.(*types.Type).Alignment()%4 == 0) {
  7214  			break
  7215  		}
  7216  		v.reset(OpMIPS64MOVWstore)
  7217  		v.AuxInt = 8
  7218  		v.AddArg(dst)
  7219  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7220  		v0.AuxInt = 8
  7221  		v0.AddArg(src)
  7222  		v0.AddArg(mem)
  7223  		v.AddArg(v0)
  7224  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7225  		v1.AuxInt = 4
  7226  		v1.AddArg(dst)
  7227  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7228  		v2.AuxInt = 4
  7229  		v2.AddArg(src)
  7230  		v2.AddArg(mem)
  7231  		v1.AddArg(v2)
  7232  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7233  		v3.AddArg(dst)
  7234  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  7235  		v4.AddArg(src)
  7236  		v4.AddArg(mem)
  7237  		v3.AddArg(v4)
  7238  		v3.AddArg(mem)
  7239  		v1.AddArg(v3)
  7240  		v.AddArg(v1)
  7241  		return true
  7242  	}
  7243  	// match: (Move [16] {t} dst src mem)
  7244  	// cond: t.(*types.Type).Alignment()%8 == 0
  7245  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) 		(MOVVstore dst (MOVVload src mem) mem))
  7246  	for {
  7247  		if v.AuxInt != 16 {
  7248  			break
  7249  		}
  7250  		t := v.Aux
  7251  		_ = v.Args[2]
  7252  		dst := v.Args[0]
  7253  		src := v.Args[1]
  7254  		mem := v.Args[2]
  7255  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7256  			break
  7257  		}
  7258  		v.reset(OpMIPS64MOVVstore)
  7259  		v.AuxInt = 8
  7260  		v.AddArg(dst)
  7261  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7262  		v0.AuxInt = 8
  7263  		v0.AddArg(src)
  7264  		v0.AddArg(mem)
  7265  		v.AddArg(v0)
  7266  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7267  		v1.AddArg(dst)
  7268  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7269  		v2.AddArg(src)
  7270  		v2.AddArg(mem)
  7271  		v1.AddArg(v2)
  7272  		v1.AddArg(mem)
  7273  		v.AddArg(v1)
  7274  		return true
  7275  	}
  7276  	// match: (Move [24] {t} dst src mem)
  7277  	// cond: t.(*types.Type).Alignment()%8 == 0
  7278  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) 		(MOVVstore [8] dst (MOVVload [8] src mem) 			(MOVVstore dst (MOVVload src mem) mem)))
  7279  	for {
  7280  		if v.AuxInt != 24 {
  7281  			break
  7282  		}
  7283  		t := v.Aux
  7284  		_ = v.Args[2]
  7285  		dst := v.Args[0]
  7286  		src := v.Args[1]
  7287  		mem := v.Args[2]
  7288  		if !(t.(*types.Type).Alignment()%8 == 0) {
  7289  			break
  7290  		}
  7291  		v.reset(OpMIPS64MOVVstore)
  7292  		v.AuxInt = 16
  7293  		v.AddArg(dst)
  7294  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7295  		v0.AuxInt = 16
  7296  		v0.AddArg(src)
  7297  		v0.AddArg(mem)
  7298  		v.AddArg(v0)
  7299  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7300  		v1.AuxInt = 8
  7301  		v1.AddArg(dst)
  7302  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7303  		v2.AuxInt = 8
  7304  		v2.AddArg(src)
  7305  		v2.AddArg(mem)
  7306  		v1.AddArg(v2)
  7307  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7308  		v3.AddArg(dst)
  7309  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  7310  		v4.AddArg(src)
  7311  		v4.AddArg(mem)
  7312  		v3.AddArg(v4)
  7313  		v3.AddArg(mem)
  7314  		v1.AddArg(v3)
  7315  		v.AddArg(v1)
  7316  		return true
  7317  	}
  7318  	// match: (Move [s] {t} dst src mem)
  7319  	// cond: s > 24 || t.(*types.Type).Alignment()%8 != 0
  7320  	// result: (LoweredMove [t.(*types.Type).Alignment()] 		dst 		src 		(ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
  7321  	for {
  7322  		s := v.AuxInt
  7323  		t := v.Aux
  7324  		_ = v.Args[2]
  7325  		dst := v.Args[0]
  7326  		src := v.Args[1]
  7327  		mem := v.Args[2]
  7328  		if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) {
  7329  			break
  7330  		}
  7331  		v.reset(OpMIPS64LoweredMove)
  7332  		v.AuxInt = t.(*types.Type).Alignment()
  7333  		v.AddArg(dst)
  7334  		v.AddArg(src)
  7335  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  7336  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  7337  		v0.AddArg(src)
  7338  		v.AddArg(v0)
  7339  		v.AddArg(mem)
  7340  		return true
  7341  	}
  7342  	return false
  7343  }
  7344  func rewriteValueMIPS64_OpMul16_0(v *Value) bool {
  7345  	b := v.Block
  7346  	_ = b
  7347  	typ := &b.Func.Config.Types
  7348  	_ = typ
  7349  	// match: (Mul16 x y)
  7350  	// cond:
  7351  	// result: (Select1 (MULVU x y))
  7352  	for {
  7353  		_ = v.Args[1]
  7354  		x := v.Args[0]
  7355  		y := v.Args[1]
  7356  		v.reset(OpSelect1)
  7357  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7358  		v0.AddArg(x)
  7359  		v0.AddArg(y)
  7360  		v.AddArg(v0)
  7361  		return true
  7362  	}
  7363  }
  7364  func rewriteValueMIPS64_OpMul32_0(v *Value) bool {
  7365  	b := v.Block
  7366  	_ = b
  7367  	typ := &b.Func.Config.Types
  7368  	_ = typ
  7369  	// match: (Mul32 x y)
  7370  	// cond:
  7371  	// result: (Select1 (MULVU x y))
  7372  	for {
  7373  		_ = v.Args[1]
  7374  		x := v.Args[0]
  7375  		y := v.Args[1]
  7376  		v.reset(OpSelect1)
  7377  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7378  		v0.AddArg(x)
  7379  		v0.AddArg(y)
  7380  		v.AddArg(v0)
  7381  		return true
  7382  	}
  7383  }
  7384  func rewriteValueMIPS64_OpMul32F_0(v *Value) bool {
  7385  	// match: (Mul32F x y)
  7386  	// cond:
  7387  	// result: (MULF x y)
  7388  	for {
  7389  		_ = v.Args[1]
  7390  		x := v.Args[0]
  7391  		y := v.Args[1]
  7392  		v.reset(OpMIPS64MULF)
  7393  		v.AddArg(x)
  7394  		v.AddArg(y)
  7395  		return true
  7396  	}
  7397  }
  7398  func rewriteValueMIPS64_OpMul64_0(v *Value) bool {
  7399  	b := v.Block
  7400  	_ = b
  7401  	typ := &b.Func.Config.Types
  7402  	_ = typ
  7403  	// match: (Mul64 x y)
  7404  	// cond:
  7405  	// result: (Select1 (MULVU x y))
  7406  	for {
  7407  		_ = v.Args[1]
  7408  		x := v.Args[0]
  7409  		y := v.Args[1]
  7410  		v.reset(OpSelect1)
  7411  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7412  		v0.AddArg(x)
  7413  		v0.AddArg(y)
  7414  		v.AddArg(v0)
  7415  		return true
  7416  	}
  7417  }
  7418  func rewriteValueMIPS64_OpMul64F_0(v *Value) bool {
  7419  	// match: (Mul64F x y)
  7420  	// cond:
  7421  	// result: (MULD x y)
  7422  	for {
  7423  		_ = v.Args[1]
  7424  		x := v.Args[0]
  7425  		y := v.Args[1]
  7426  		v.reset(OpMIPS64MULD)
  7427  		v.AddArg(x)
  7428  		v.AddArg(y)
  7429  		return true
  7430  	}
  7431  }
  7432  func rewriteValueMIPS64_OpMul8_0(v *Value) bool {
  7433  	b := v.Block
  7434  	_ = b
  7435  	typ := &b.Func.Config.Types
  7436  	_ = typ
  7437  	// match: (Mul8 x y)
  7438  	// cond:
  7439  	// result: (Select1 (MULVU x y))
  7440  	for {
  7441  		_ = v.Args[1]
  7442  		x := v.Args[0]
  7443  		y := v.Args[1]
  7444  		v.reset(OpSelect1)
  7445  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7446  		v0.AddArg(x)
  7447  		v0.AddArg(y)
  7448  		v.AddArg(v0)
  7449  		return true
  7450  	}
  7451  }
  7452  func rewriteValueMIPS64_OpNeg16_0(v *Value) bool {
  7453  	// match: (Neg16 x)
  7454  	// cond:
  7455  	// result: (NEGV x)
  7456  	for {
  7457  		x := v.Args[0]
  7458  		v.reset(OpMIPS64NEGV)
  7459  		v.AddArg(x)
  7460  		return true
  7461  	}
  7462  }
  7463  func rewriteValueMIPS64_OpNeg32_0(v *Value) bool {
  7464  	// match: (Neg32 x)
  7465  	// cond:
  7466  	// result: (NEGV x)
  7467  	for {
  7468  		x := v.Args[0]
  7469  		v.reset(OpMIPS64NEGV)
  7470  		v.AddArg(x)
  7471  		return true
  7472  	}
  7473  }
  7474  func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool {
  7475  	// match: (Neg32F x)
  7476  	// cond:
  7477  	// result: (NEGF x)
  7478  	for {
  7479  		x := v.Args[0]
  7480  		v.reset(OpMIPS64NEGF)
  7481  		v.AddArg(x)
  7482  		return true
  7483  	}
  7484  }
  7485  func rewriteValueMIPS64_OpNeg64_0(v *Value) bool {
  7486  	// match: (Neg64 x)
  7487  	// cond:
  7488  	// result: (NEGV x)
  7489  	for {
  7490  		x := v.Args[0]
  7491  		v.reset(OpMIPS64NEGV)
  7492  		v.AddArg(x)
  7493  		return true
  7494  	}
  7495  }
  7496  func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool {
  7497  	// match: (Neg64F x)
  7498  	// cond:
  7499  	// result: (NEGD x)
  7500  	for {
  7501  		x := v.Args[0]
  7502  		v.reset(OpMIPS64NEGD)
  7503  		v.AddArg(x)
  7504  		return true
  7505  	}
  7506  }
  7507  func rewriteValueMIPS64_OpNeg8_0(v *Value) bool {
  7508  	// match: (Neg8 x)
  7509  	// cond:
  7510  	// result: (NEGV x)
  7511  	for {
  7512  		x := v.Args[0]
  7513  		v.reset(OpMIPS64NEGV)
  7514  		v.AddArg(x)
  7515  		return true
  7516  	}
  7517  }
  7518  func rewriteValueMIPS64_OpNeq16_0(v *Value) bool {
  7519  	b := v.Block
  7520  	_ = b
  7521  	typ := &b.Func.Config.Types
  7522  	_ = typ
  7523  	// match: (Neq16 x y)
  7524  	// cond:
  7525  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  7526  	for {
  7527  		_ = v.Args[1]
  7528  		x := v.Args[0]
  7529  		y := v.Args[1]
  7530  		v.reset(OpMIPS64SGTU)
  7531  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7532  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7533  		v1.AddArg(x)
  7534  		v0.AddArg(v1)
  7535  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7536  		v2.AddArg(y)
  7537  		v0.AddArg(v2)
  7538  		v.AddArg(v0)
  7539  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7540  		v3.AuxInt = 0
  7541  		v.AddArg(v3)
  7542  		return true
  7543  	}
  7544  }
  7545  func rewriteValueMIPS64_OpNeq32_0(v *Value) bool {
  7546  	b := v.Block
  7547  	_ = b
  7548  	typ := &b.Func.Config.Types
  7549  	_ = typ
  7550  	// match: (Neq32 x y)
  7551  	// cond:
  7552  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  7553  	for {
  7554  		_ = v.Args[1]
  7555  		x := v.Args[0]
  7556  		y := v.Args[1]
  7557  		v.reset(OpMIPS64SGTU)
  7558  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7559  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7560  		v1.AddArg(x)
  7561  		v0.AddArg(v1)
  7562  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7563  		v2.AddArg(y)
  7564  		v0.AddArg(v2)
  7565  		v.AddArg(v0)
  7566  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7567  		v3.AuxInt = 0
  7568  		v.AddArg(v3)
  7569  		return true
  7570  	}
  7571  }
  7572  func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool {
  7573  	b := v.Block
  7574  	_ = b
  7575  	// match: (Neq32F x y)
  7576  	// cond:
  7577  	// result: (FPFlagFalse (CMPEQF x y))
  7578  	for {
  7579  		_ = v.Args[1]
  7580  		x := v.Args[0]
  7581  		y := v.Args[1]
  7582  		v.reset(OpMIPS64FPFlagFalse)
  7583  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  7584  		v0.AddArg(x)
  7585  		v0.AddArg(y)
  7586  		v.AddArg(v0)
  7587  		return true
  7588  	}
  7589  }
  7590  func rewriteValueMIPS64_OpNeq64_0(v *Value) bool {
  7591  	b := v.Block
  7592  	_ = b
  7593  	typ := &b.Func.Config.Types
  7594  	_ = typ
  7595  	// match: (Neq64 x y)
  7596  	// cond:
  7597  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7598  	for {
  7599  		_ = v.Args[1]
  7600  		x := v.Args[0]
  7601  		y := v.Args[1]
  7602  		v.reset(OpMIPS64SGTU)
  7603  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7604  		v0.AddArg(x)
  7605  		v0.AddArg(y)
  7606  		v.AddArg(v0)
  7607  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7608  		v1.AuxInt = 0
  7609  		v.AddArg(v1)
  7610  		return true
  7611  	}
  7612  }
  7613  func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool {
  7614  	b := v.Block
  7615  	_ = b
  7616  	// match: (Neq64F x y)
  7617  	// cond:
  7618  	// result: (FPFlagFalse (CMPEQD x y))
  7619  	for {
  7620  		_ = v.Args[1]
  7621  		x := v.Args[0]
  7622  		y := v.Args[1]
  7623  		v.reset(OpMIPS64FPFlagFalse)
  7624  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  7625  		v0.AddArg(x)
  7626  		v0.AddArg(y)
  7627  		v.AddArg(v0)
  7628  		return true
  7629  	}
  7630  }
  7631  func rewriteValueMIPS64_OpNeq8_0(v *Value) bool {
  7632  	b := v.Block
  7633  	_ = b
  7634  	typ := &b.Func.Config.Types
  7635  	_ = typ
  7636  	// match: (Neq8 x y)
  7637  	// cond:
  7638  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7639  	for {
  7640  		_ = v.Args[1]
  7641  		x := v.Args[0]
  7642  		y := v.Args[1]
  7643  		v.reset(OpMIPS64SGTU)
  7644  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7645  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7646  		v1.AddArg(x)
  7647  		v0.AddArg(v1)
  7648  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7649  		v2.AddArg(y)
  7650  		v0.AddArg(v2)
  7651  		v.AddArg(v0)
  7652  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7653  		v3.AuxInt = 0
  7654  		v.AddArg(v3)
  7655  		return true
  7656  	}
  7657  }
  7658  func rewriteValueMIPS64_OpNeqB_0(v *Value) bool {
  7659  	// match: (NeqB x y)
  7660  	// cond:
  7661  	// result: (XOR x y)
  7662  	for {
  7663  		_ = v.Args[1]
  7664  		x := v.Args[0]
  7665  		y := v.Args[1]
  7666  		v.reset(OpMIPS64XOR)
  7667  		v.AddArg(x)
  7668  		v.AddArg(y)
  7669  		return true
  7670  	}
  7671  }
  7672  func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool {
  7673  	b := v.Block
  7674  	_ = b
  7675  	typ := &b.Func.Config.Types
  7676  	_ = typ
  7677  	// match: (NeqPtr x y)
  7678  	// cond:
  7679  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7680  	for {
  7681  		_ = v.Args[1]
  7682  		x := v.Args[0]
  7683  		y := v.Args[1]
  7684  		v.reset(OpMIPS64SGTU)
  7685  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7686  		v0.AddArg(x)
  7687  		v0.AddArg(y)
  7688  		v.AddArg(v0)
  7689  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7690  		v1.AuxInt = 0
  7691  		v.AddArg(v1)
  7692  		return true
  7693  	}
  7694  }
  7695  func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool {
  7696  	// match: (NilCheck ptr mem)
  7697  	// cond:
  7698  	// result: (LoweredNilCheck ptr mem)
  7699  	for {
  7700  		_ = v.Args[1]
  7701  		ptr := v.Args[0]
  7702  		mem := v.Args[1]
  7703  		v.reset(OpMIPS64LoweredNilCheck)
  7704  		v.AddArg(ptr)
  7705  		v.AddArg(mem)
  7706  		return true
  7707  	}
  7708  }
  7709  func rewriteValueMIPS64_OpNot_0(v *Value) bool {
  7710  	// match: (Not x)
  7711  	// cond:
  7712  	// result: (XORconst [1] x)
  7713  	for {
  7714  		x := v.Args[0]
  7715  		v.reset(OpMIPS64XORconst)
  7716  		v.AuxInt = 1
  7717  		v.AddArg(x)
  7718  		return true
  7719  	}
  7720  }
  7721  func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool {
  7722  	// match: (OffPtr [off] ptr:(SP))
  7723  	// cond:
  7724  	// result: (MOVVaddr [off] ptr)
  7725  	for {
  7726  		off := v.AuxInt
  7727  		ptr := v.Args[0]
  7728  		if ptr.Op != OpSP {
  7729  			break
  7730  		}
  7731  		v.reset(OpMIPS64MOVVaddr)
  7732  		v.AuxInt = off
  7733  		v.AddArg(ptr)
  7734  		return true
  7735  	}
  7736  	// match: (OffPtr [off] ptr)
  7737  	// cond:
  7738  	// result: (ADDVconst [off] ptr)
  7739  	for {
  7740  		off := v.AuxInt
  7741  		ptr := v.Args[0]
  7742  		v.reset(OpMIPS64ADDVconst)
  7743  		v.AuxInt = off
  7744  		v.AddArg(ptr)
  7745  		return true
  7746  	}
  7747  }
  7748  func rewriteValueMIPS64_OpOr16_0(v *Value) bool {
  7749  	// match: (Or16 x y)
  7750  	// cond:
  7751  	// result: (OR x y)
  7752  	for {
  7753  		_ = v.Args[1]
  7754  		x := v.Args[0]
  7755  		y := v.Args[1]
  7756  		v.reset(OpMIPS64OR)
  7757  		v.AddArg(x)
  7758  		v.AddArg(y)
  7759  		return true
  7760  	}
  7761  }
  7762  func rewriteValueMIPS64_OpOr32_0(v *Value) bool {
  7763  	// match: (Or32 x y)
  7764  	// cond:
  7765  	// result: (OR x y)
  7766  	for {
  7767  		_ = v.Args[1]
  7768  		x := v.Args[0]
  7769  		y := v.Args[1]
  7770  		v.reset(OpMIPS64OR)
  7771  		v.AddArg(x)
  7772  		v.AddArg(y)
  7773  		return true
  7774  	}
  7775  }
  7776  func rewriteValueMIPS64_OpOr64_0(v *Value) bool {
  7777  	// match: (Or64 x y)
  7778  	// cond:
  7779  	// result: (OR x y)
  7780  	for {
  7781  		_ = v.Args[1]
  7782  		x := v.Args[0]
  7783  		y := v.Args[1]
  7784  		v.reset(OpMIPS64OR)
  7785  		v.AddArg(x)
  7786  		v.AddArg(y)
  7787  		return true
  7788  	}
  7789  }
  7790  func rewriteValueMIPS64_OpOr8_0(v *Value) bool {
  7791  	// match: (Or8 x y)
  7792  	// cond:
  7793  	// result: (OR x y)
  7794  	for {
  7795  		_ = v.Args[1]
  7796  		x := v.Args[0]
  7797  		y := v.Args[1]
  7798  		v.reset(OpMIPS64OR)
  7799  		v.AddArg(x)
  7800  		v.AddArg(y)
  7801  		return true
  7802  	}
  7803  }
  7804  func rewriteValueMIPS64_OpOrB_0(v *Value) bool {
  7805  	// match: (OrB x y)
  7806  	// cond:
  7807  	// result: (OR x y)
  7808  	for {
  7809  		_ = v.Args[1]
  7810  		x := v.Args[0]
  7811  		y := v.Args[1]
  7812  		v.reset(OpMIPS64OR)
  7813  		v.AddArg(x)
  7814  		v.AddArg(y)
  7815  		return true
  7816  	}
  7817  }
  7818  func rewriteValueMIPS64_OpRound32F_0(v *Value) bool {
  7819  	// match: (Round32F x)
  7820  	// cond:
  7821  	// result: x
  7822  	for {
  7823  		x := v.Args[0]
  7824  		v.reset(OpCopy)
  7825  		v.Type = x.Type
  7826  		v.AddArg(x)
  7827  		return true
  7828  	}
  7829  }
  7830  func rewriteValueMIPS64_OpRound64F_0(v *Value) bool {
  7831  	// match: (Round64F x)
  7832  	// cond:
  7833  	// result: x
  7834  	for {
  7835  		x := v.Args[0]
  7836  		v.reset(OpCopy)
  7837  		v.Type = x.Type
  7838  		v.AddArg(x)
  7839  		return true
  7840  	}
  7841  }
  7842  func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
  7843  	b := v.Block
  7844  	_ = b
  7845  	typ := &b.Func.Config.Types
  7846  	_ = typ
  7847  	// match: (Rsh16Ux16 <t> x y)
  7848  	// cond:
  7849  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7850  	for {
  7851  		t := v.Type
  7852  		_ = v.Args[1]
  7853  		x := v.Args[0]
  7854  		y := v.Args[1]
  7855  		v.reset(OpMIPS64AND)
  7856  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7857  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7858  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7859  		v2.AuxInt = 64
  7860  		v1.AddArg(v2)
  7861  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7862  		v3.AddArg(y)
  7863  		v1.AddArg(v3)
  7864  		v0.AddArg(v1)
  7865  		v.AddArg(v0)
  7866  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7867  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7868  		v5.AddArg(x)
  7869  		v4.AddArg(v5)
  7870  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7871  		v6.AddArg(y)
  7872  		v4.AddArg(v6)
  7873  		v.AddArg(v4)
  7874  		return true
  7875  	}
  7876  }
  7877  func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
  7878  	b := v.Block
  7879  	_ = b
  7880  	typ := &b.Func.Config.Types
  7881  	_ = typ
  7882  	// match: (Rsh16Ux32 <t> x y)
  7883  	// cond:
  7884  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7885  	for {
  7886  		t := v.Type
  7887  		_ = v.Args[1]
  7888  		x := v.Args[0]
  7889  		y := v.Args[1]
  7890  		v.reset(OpMIPS64AND)
  7891  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7892  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7893  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7894  		v2.AuxInt = 64
  7895  		v1.AddArg(v2)
  7896  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7897  		v3.AddArg(y)
  7898  		v1.AddArg(v3)
  7899  		v0.AddArg(v1)
  7900  		v.AddArg(v0)
  7901  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7902  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7903  		v5.AddArg(x)
  7904  		v4.AddArg(v5)
  7905  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7906  		v6.AddArg(y)
  7907  		v4.AddArg(v6)
  7908  		v.AddArg(v4)
  7909  		return true
  7910  	}
  7911  }
  7912  func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
  7913  	b := v.Block
  7914  	_ = b
  7915  	typ := &b.Func.Config.Types
  7916  	_ = typ
  7917  	// match: (Rsh16Ux64 <t> x y)
  7918  	// cond:
  7919  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7920  	for {
  7921  		t := v.Type
  7922  		_ = v.Args[1]
  7923  		x := v.Args[0]
  7924  		y := v.Args[1]
  7925  		v.reset(OpMIPS64AND)
  7926  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7927  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7928  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7929  		v2.AuxInt = 64
  7930  		v1.AddArg(v2)
  7931  		v1.AddArg(y)
  7932  		v0.AddArg(v1)
  7933  		v.AddArg(v0)
  7934  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7935  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7936  		v4.AddArg(x)
  7937  		v3.AddArg(v4)
  7938  		v3.AddArg(y)
  7939  		v.AddArg(v3)
  7940  		return true
  7941  	}
  7942  }
  7943  func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
  7944  	b := v.Block
  7945  	_ = b
  7946  	typ := &b.Func.Config.Types
  7947  	_ = typ
  7948  	// match: (Rsh16Ux8 <t> x y)
  7949  	// cond:
  7950  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64  y)))
  7951  	for {
  7952  		t := v.Type
  7953  		_ = v.Args[1]
  7954  		x := v.Args[0]
  7955  		y := v.Args[1]
  7956  		v.reset(OpMIPS64AND)
  7957  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7958  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7959  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7960  		v2.AuxInt = 64
  7961  		v1.AddArg(v2)
  7962  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7963  		v3.AddArg(y)
  7964  		v1.AddArg(v3)
  7965  		v0.AddArg(v1)
  7966  		v.AddArg(v0)
  7967  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7968  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7969  		v5.AddArg(x)
  7970  		v4.AddArg(v5)
  7971  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7972  		v6.AddArg(y)
  7973  		v4.AddArg(v6)
  7974  		v.AddArg(v4)
  7975  		return true
  7976  	}
  7977  }
  7978  func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
  7979  	b := v.Block
  7980  	_ = b
  7981  	typ := &b.Func.Config.Types
  7982  	_ = typ
  7983  	// match: (Rsh16x16 <t> x y)
  7984  	// cond:
  7985  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7986  	for {
  7987  		t := v.Type
  7988  		_ = v.Args[1]
  7989  		x := v.Args[0]
  7990  		y := v.Args[1]
  7991  		v.reset(OpMIPS64SRAV)
  7992  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7993  		v0.AddArg(x)
  7994  		v.AddArg(v0)
  7995  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7996  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7997  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7998  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7999  		v4.AddArg(y)
  8000  		v3.AddArg(v4)
  8001  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8002  		v5.AuxInt = 63
  8003  		v3.AddArg(v5)
  8004  		v2.AddArg(v3)
  8005  		v1.AddArg(v2)
  8006  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8007  		v6.AddArg(y)
  8008  		v1.AddArg(v6)
  8009  		v.AddArg(v1)
  8010  		return true
  8011  	}
  8012  }
  8013  func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
  8014  	b := v.Block
  8015  	_ = b
  8016  	typ := &b.Func.Config.Types
  8017  	_ = typ
  8018  	// match: (Rsh16x32 <t> x y)
  8019  	// cond:
  8020  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8021  	for {
  8022  		t := v.Type
  8023  		_ = v.Args[1]
  8024  		x := v.Args[0]
  8025  		y := v.Args[1]
  8026  		v.reset(OpMIPS64SRAV)
  8027  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8028  		v0.AddArg(x)
  8029  		v.AddArg(v0)
  8030  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8031  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8032  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8033  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8034  		v4.AddArg(y)
  8035  		v3.AddArg(v4)
  8036  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8037  		v5.AuxInt = 63
  8038  		v3.AddArg(v5)
  8039  		v2.AddArg(v3)
  8040  		v1.AddArg(v2)
  8041  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8042  		v6.AddArg(y)
  8043  		v1.AddArg(v6)
  8044  		v.AddArg(v1)
  8045  		return true
  8046  	}
  8047  }
  8048  func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
  8049  	b := v.Block
  8050  	_ = b
  8051  	typ := &b.Func.Config.Types
  8052  	_ = typ
  8053  	// match: (Rsh16x64 <t> x y)
  8054  	// cond:
  8055  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8056  	for {
  8057  		t := v.Type
  8058  		_ = v.Args[1]
  8059  		x := v.Args[0]
  8060  		y := v.Args[1]
  8061  		v.reset(OpMIPS64SRAV)
  8062  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8063  		v0.AddArg(x)
  8064  		v.AddArg(v0)
  8065  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8066  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8067  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8068  		v3.AddArg(y)
  8069  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8070  		v4.AuxInt = 63
  8071  		v3.AddArg(v4)
  8072  		v2.AddArg(v3)
  8073  		v1.AddArg(v2)
  8074  		v1.AddArg(y)
  8075  		v.AddArg(v1)
  8076  		return true
  8077  	}
  8078  }
  8079  func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
  8080  	b := v.Block
  8081  	_ = b
  8082  	typ := &b.Func.Config.Types
  8083  	_ = typ
  8084  	// match: (Rsh16x8 <t> x y)
  8085  	// cond:
  8086  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64  y)))
  8087  	for {
  8088  		t := v.Type
  8089  		_ = v.Args[1]
  8090  		x := v.Args[0]
  8091  		y := v.Args[1]
  8092  		v.reset(OpMIPS64SRAV)
  8093  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8094  		v0.AddArg(x)
  8095  		v.AddArg(v0)
  8096  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8097  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8098  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8099  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8100  		v4.AddArg(y)
  8101  		v3.AddArg(v4)
  8102  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8103  		v5.AuxInt = 63
  8104  		v3.AddArg(v5)
  8105  		v2.AddArg(v3)
  8106  		v1.AddArg(v2)
  8107  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8108  		v6.AddArg(y)
  8109  		v1.AddArg(v6)
  8110  		v.AddArg(v1)
  8111  		return true
  8112  	}
  8113  }
  8114  func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
  8115  	b := v.Block
  8116  	_ = b
  8117  	typ := &b.Func.Config.Types
  8118  	_ = typ
  8119  	// match: (Rsh32Ux16 <t> x y)
  8120  	// cond:
  8121  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  8122  	for {
  8123  		t := v.Type
  8124  		_ = v.Args[1]
  8125  		x := v.Args[0]
  8126  		y := v.Args[1]
  8127  		v.reset(OpMIPS64AND)
  8128  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8129  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8130  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8131  		v2.AuxInt = 64
  8132  		v1.AddArg(v2)
  8133  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8134  		v3.AddArg(y)
  8135  		v1.AddArg(v3)
  8136  		v0.AddArg(v1)
  8137  		v.AddArg(v0)
  8138  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8139  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8140  		v5.AddArg(x)
  8141  		v4.AddArg(v5)
  8142  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8143  		v6.AddArg(y)
  8144  		v4.AddArg(v6)
  8145  		v.AddArg(v4)
  8146  		return true
  8147  	}
  8148  }
  8149  func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
  8150  	b := v.Block
  8151  	_ = b
  8152  	typ := &b.Func.Config.Types
  8153  	_ = typ
  8154  	// match: (Rsh32Ux32 <t> x y)
  8155  	// cond:
  8156  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  8157  	for {
  8158  		t := v.Type
  8159  		_ = v.Args[1]
  8160  		x := v.Args[0]
  8161  		y := v.Args[1]
  8162  		v.reset(OpMIPS64AND)
  8163  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8164  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8165  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8166  		v2.AuxInt = 64
  8167  		v1.AddArg(v2)
  8168  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8169  		v3.AddArg(y)
  8170  		v1.AddArg(v3)
  8171  		v0.AddArg(v1)
  8172  		v.AddArg(v0)
  8173  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8174  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8175  		v5.AddArg(x)
  8176  		v4.AddArg(v5)
  8177  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8178  		v6.AddArg(y)
  8179  		v4.AddArg(v6)
  8180  		v.AddArg(v4)
  8181  		return true
  8182  	}
  8183  }
  8184  func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
  8185  	b := v.Block
  8186  	_ = b
  8187  	typ := &b.Func.Config.Types
  8188  	_ = typ
  8189  	// match: (Rsh32Ux64 <t> x y)
  8190  	// cond:
  8191  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  8192  	for {
  8193  		t := v.Type
  8194  		_ = v.Args[1]
  8195  		x := v.Args[0]
  8196  		y := v.Args[1]
  8197  		v.reset(OpMIPS64AND)
  8198  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8199  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8200  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8201  		v2.AuxInt = 64
  8202  		v1.AddArg(v2)
  8203  		v1.AddArg(y)
  8204  		v0.AddArg(v1)
  8205  		v.AddArg(v0)
  8206  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8207  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8208  		v4.AddArg(x)
  8209  		v3.AddArg(v4)
  8210  		v3.AddArg(y)
  8211  		v.AddArg(v3)
  8212  		return true
  8213  	}
  8214  }
  8215  func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
  8216  	b := v.Block
  8217  	_ = b
  8218  	typ := &b.Func.Config.Types
  8219  	_ = typ
  8220  	// match: (Rsh32Ux8 <t> x y)
  8221  	// cond:
  8222  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64  y)))
  8223  	for {
  8224  		t := v.Type
  8225  		_ = v.Args[1]
  8226  		x := v.Args[0]
  8227  		y := v.Args[1]
  8228  		v.reset(OpMIPS64AND)
  8229  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8230  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8231  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8232  		v2.AuxInt = 64
  8233  		v1.AddArg(v2)
  8234  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8235  		v3.AddArg(y)
  8236  		v1.AddArg(v3)
  8237  		v0.AddArg(v1)
  8238  		v.AddArg(v0)
  8239  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8240  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8241  		v5.AddArg(x)
  8242  		v4.AddArg(v5)
  8243  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8244  		v6.AddArg(y)
  8245  		v4.AddArg(v6)
  8246  		v.AddArg(v4)
  8247  		return true
  8248  	}
  8249  }
  8250  func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
  8251  	b := v.Block
  8252  	_ = b
  8253  	typ := &b.Func.Config.Types
  8254  	_ = typ
  8255  	// match: (Rsh32x16 <t> x y)
  8256  	// cond:
  8257  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8258  	for {
  8259  		t := v.Type
  8260  		_ = v.Args[1]
  8261  		x := v.Args[0]
  8262  		y := v.Args[1]
  8263  		v.reset(OpMIPS64SRAV)
  8264  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8265  		v0.AddArg(x)
  8266  		v.AddArg(v0)
  8267  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8268  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8269  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8270  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8271  		v4.AddArg(y)
  8272  		v3.AddArg(v4)
  8273  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8274  		v5.AuxInt = 63
  8275  		v3.AddArg(v5)
  8276  		v2.AddArg(v3)
  8277  		v1.AddArg(v2)
  8278  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8279  		v6.AddArg(y)
  8280  		v1.AddArg(v6)
  8281  		v.AddArg(v1)
  8282  		return true
  8283  	}
  8284  }
  8285  func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
  8286  	b := v.Block
  8287  	_ = b
  8288  	typ := &b.Func.Config.Types
  8289  	_ = typ
  8290  	// match: (Rsh32x32 <t> x y)
  8291  	// cond:
  8292  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8293  	for {
  8294  		t := v.Type
  8295  		_ = v.Args[1]
  8296  		x := v.Args[0]
  8297  		y := v.Args[1]
  8298  		v.reset(OpMIPS64SRAV)
  8299  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8300  		v0.AddArg(x)
  8301  		v.AddArg(v0)
  8302  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8303  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8304  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8305  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8306  		v4.AddArg(y)
  8307  		v3.AddArg(v4)
  8308  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8309  		v5.AuxInt = 63
  8310  		v3.AddArg(v5)
  8311  		v2.AddArg(v3)
  8312  		v1.AddArg(v2)
  8313  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8314  		v6.AddArg(y)
  8315  		v1.AddArg(v6)
  8316  		v.AddArg(v1)
  8317  		return true
  8318  	}
  8319  }
  8320  func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
  8321  	b := v.Block
  8322  	_ = b
  8323  	typ := &b.Func.Config.Types
  8324  	_ = typ
  8325  	// match: (Rsh32x64 <t> x y)
  8326  	// cond:
  8327  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8328  	for {
  8329  		t := v.Type
  8330  		_ = v.Args[1]
  8331  		x := v.Args[0]
  8332  		y := v.Args[1]
  8333  		v.reset(OpMIPS64SRAV)
  8334  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8335  		v0.AddArg(x)
  8336  		v.AddArg(v0)
  8337  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8338  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8339  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8340  		v3.AddArg(y)
  8341  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8342  		v4.AuxInt = 63
  8343  		v3.AddArg(v4)
  8344  		v2.AddArg(v3)
  8345  		v1.AddArg(v2)
  8346  		v1.AddArg(y)
  8347  		v.AddArg(v1)
  8348  		return true
  8349  	}
  8350  }
  8351  func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
  8352  	b := v.Block
  8353  	_ = b
  8354  	typ := &b.Func.Config.Types
  8355  	_ = typ
  8356  	// match: (Rsh32x8 <t> x y)
  8357  	// cond:
  8358  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64  y)))
  8359  	for {
  8360  		t := v.Type
  8361  		_ = v.Args[1]
  8362  		x := v.Args[0]
  8363  		y := v.Args[1]
  8364  		v.reset(OpMIPS64SRAV)
  8365  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8366  		v0.AddArg(x)
  8367  		v.AddArg(v0)
  8368  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8369  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8370  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8371  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8372  		v4.AddArg(y)
  8373  		v3.AddArg(v4)
  8374  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8375  		v5.AuxInt = 63
  8376  		v3.AddArg(v5)
  8377  		v2.AddArg(v3)
  8378  		v1.AddArg(v2)
  8379  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8380  		v6.AddArg(y)
  8381  		v1.AddArg(v6)
  8382  		v.AddArg(v1)
  8383  		return true
  8384  	}
  8385  }
  8386  func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
  8387  	b := v.Block
  8388  	_ = b
  8389  	typ := &b.Func.Config.Types
  8390  	_ = typ
  8391  	// match: (Rsh64Ux16 <t> x y)
  8392  	// cond:
  8393  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  8394  	for {
  8395  		t := v.Type
  8396  		_ = v.Args[1]
  8397  		x := v.Args[0]
  8398  		y := v.Args[1]
  8399  		v.reset(OpMIPS64AND)
  8400  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8401  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8402  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8403  		v2.AuxInt = 64
  8404  		v1.AddArg(v2)
  8405  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8406  		v3.AddArg(y)
  8407  		v1.AddArg(v3)
  8408  		v0.AddArg(v1)
  8409  		v.AddArg(v0)
  8410  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8411  		v4.AddArg(x)
  8412  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8413  		v5.AddArg(y)
  8414  		v4.AddArg(v5)
  8415  		v.AddArg(v4)
  8416  		return true
  8417  	}
  8418  }
  8419  func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
  8420  	b := v.Block
  8421  	_ = b
  8422  	typ := &b.Func.Config.Types
  8423  	_ = typ
  8424  	// match: (Rsh64Ux32 <t> x y)
  8425  	// cond:
  8426  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  8427  	for {
  8428  		t := v.Type
  8429  		_ = v.Args[1]
  8430  		x := v.Args[0]
  8431  		y := v.Args[1]
  8432  		v.reset(OpMIPS64AND)
  8433  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8434  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8435  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8436  		v2.AuxInt = 64
  8437  		v1.AddArg(v2)
  8438  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8439  		v3.AddArg(y)
  8440  		v1.AddArg(v3)
  8441  		v0.AddArg(v1)
  8442  		v.AddArg(v0)
  8443  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8444  		v4.AddArg(x)
  8445  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8446  		v5.AddArg(y)
  8447  		v4.AddArg(v5)
  8448  		v.AddArg(v4)
  8449  		return true
  8450  	}
  8451  }
  8452  func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
  8453  	b := v.Block
  8454  	_ = b
  8455  	typ := &b.Func.Config.Types
  8456  	_ = typ
  8457  	// match: (Rsh64Ux64 <t> x y)
  8458  	// cond:
  8459  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  8460  	for {
  8461  		t := v.Type
  8462  		_ = v.Args[1]
  8463  		x := v.Args[0]
  8464  		y := v.Args[1]
  8465  		v.reset(OpMIPS64AND)
  8466  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8467  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8468  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8469  		v2.AuxInt = 64
  8470  		v1.AddArg(v2)
  8471  		v1.AddArg(y)
  8472  		v0.AddArg(v1)
  8473  		v.AddArg(v0)
  8474  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8475  		v3.AddArg(x)
  8476  		v3.AddArg(y)
  8477  		v.AddArg(v3)
  8478  		return true
  8479  	}
  8480  }
  8481  func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
  8482  	b := v.Block
  8483  	_ = b
  8484  	typ := &b.Func.Config.Types
  8485  	_ = typ
  8486  	// match: (Rsh64Ux8 <t> x y)
  8487  	// cond:
  8488  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> x (ZeroExt8to64  y)))
  8489  	for {
  8490  		t := v.Type
  8491  		_ = v.Args[1]
  8492  		x := v.Args[0]
  8493  		y := v.Args[1]
  8494  		v.reset(OpMIPS64AND)
  8495  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8496  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8497  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8498  		v2.AuxInt = 64
  8499  		v1.AddArg(v2)
  8500  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8501  		v3.AddArg(y)
  8502  		v1.AddArg(v3)
  8503  		v0.AddArg(v1)
  8504  		v.AddArg(v0)
  8505  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8506  		v4.AddArg(x)
  8507  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8508  		v5.AddArg(y)
  8509  		v4.AddArg(v5)
  8510  		v.AddArg(v4)
  8511  		return true
  8512  	}
  8513  }
  8514  func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
  8515  	b := v.Block
  8516  	_ = b
  8517  	typ := &b.Func.Config.Types
  8518  	_ = typ
  8519  	// match: (Rsh64x16 <t> x y)
  8520  	// cond:
  8521  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8522  	for {
  8523  		t := v.Type
  8524  		_ = v.Args[1]
  8525  		x := v.Args[0]
  8526  		y := v.Args[1]
  8527  		v.reset(OpMIPS64SRAV)
  8528  		v.AddArg(x)
  8529  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8530  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8531  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8532  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8533  		v3.AddArg(y)
  8534  		v2.AddArg(v3)
  8535  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8536  		v4.AuxInt = 63
  8537  		v2.AddArg(v4)
  8538  		v1.AddArg(v2)
  8539  		v0.AddArg(v1)
  8540  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8541  		v5.AddArg(y)
  8542  		v0.AddArg(v5)
  8543  		v.AddArg(v0)
  8544  		return true
  8545  	}
  8546  }
  8547  func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
  8548  	b := v.Block
  8549  	_ = b
  8550  	typ := &b.Func.Config.Types
  8551  	_ = typ
  8552  	// match: (Rsh64x32 <t> x y)
  8553  	// cond:
  8554  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8555  	for {
  8556  		t := v.Type
  8557  		_ = v.Args[1]
  8558  		x := v.Args[0]
  8559  		y := v.Args[1]
  8560  		v.reset(OpMIPS64SRAV)
  8561  		v.AddArg(x)
  8562  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8563  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8564  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8565  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8566  		v3.AddArg(y)
  8567  		v2.AddArg(v3)
  8568  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8569  		v4.AuxInt = 63
  8570  		v2.AddArg(v4)
  8571  		v1.AddArg(v2)
  8572  		v0.AddArg(v1)
  8573  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8574  		v5.AddArg(y)
  8575  		v0.AddArg(v5)
  8576  		v.AddArg(v0)
  8577  		return true
  8578  	}
  8579  }
  8580  func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
  8581  	b := v.Block
  8582  	_ = b
  8583  	typ := &b.Func.Config.Types
  8584  	_ = typ
  8585  	// match: (Rsh64x64 <t> x y)
  8586  	// cond:
  8587  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8588  	for {
  8589  		t := v.Type
  8590  		_ = v.Args[1]
  8591  		x := v.Args[0]
  8592  		y := v.Args[1]
  8593  		v.reset(OpMIPS64SRAV)
  8594  		v.AddArg(x)
  8595  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8596  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8597  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8598  		v2.AddArg(y)
  8599  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8600  		v3.AuxInt = 63
  8601  		v2.AddArg(v3)
  8602  		v1.AddArg(v2)
  8603  		v0.AddArg(v1)
  8604  		v0.AddArg(y)
  8605  		v.AddArg(v0)
  8606  		return true
  8607  	}
  8608  }
  8609  func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
  8610  	b := v.Block
  8611  	_ = b
  8612  	typ := &b.Func.Config.Types
  8613  	_ = typ
  8614  	// match: (Rsh64x8 <t> x y)
  8615  	// cond:
  8616  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64  y)))
  8617  	for {
  8618  		t := v.Type
  8619  		_ = v.Args[1]
  8620  		x := v.Args[0]
  8621  		y := v.Args[1]
  8622  		v.reset(OpMIPS64SRAV)
  8623  		v.AddArg(x)
  8624  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8625  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8626  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8627  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8628  		v3.AddArg(y)
  8629  		v2.AddArg(v3)
  8630  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8631  		v4.AuxInt = 63
  8632  		v2.AddArg(v4)
  8633  		v1.AddArg(v2)
  8634  		v0.AddArg(v1)
  8635  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8636  		v5.AddArg(y)
  8637  		v0.AddArg(v5)
  8638  		v.AddArg(v0)
  8639  		return true
  8640  	}
  8641  }
  8642  func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
  8643  	b := v.Block
  8644  	_ = b
  8645  	typ := &b.Func.Config.Types
  8646  	_ = typ
  8647  	// match: (Rsh8Ux16 <t> x y)
  8648  	// cond:
  8649  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8650  	for {
  8651  		t := v.Type
  8652  		_ = v.Args[1]
  8653  		x := v.Args[0]
  8654  		y := v.Args[1]
  8655  		v.reset(OpMIPS64AND)
  8656  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8657  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8658  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8659  		v2.AuxInt = 64
  8660  		v1.AddArg(v2)
  8661  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8662  		v3.AddArg(y)
  8663  		v1.AddArg(v3)
  8664  		v0.AddArg(v1)
  8665  		v.AddArg(v0)
  8666  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8667  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8668  		v5.AddArg(x)
  8669  		v4.AddArg(v5)
  8670  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8671  		v6.AddArg(y)
  8672  		v4.AddArg(v6)
  8673  		v.AddArg(v4)
  8674  		return true
  8675  	}
  8676  }
  8677  func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
  8678  	b := v.Block
  8679  	_ = b
  8680  	typ := &b.Func.Config.Types
  8681  	_ = typ
  8682  	// match: (Rsh8Ux32 <t> x y)
  8683  	// cond:
  8684  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8685  	for {
  8686  		t := v.Type
  8687  		_ = v.Args[1]
  8688  		x := v.Args[0]
  8689  		y := v.Args[1]
  8690  		v.reset(OpMIPS64AND)
  8691  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8692  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8693  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8694  		v2.AuxInt = 64
  8695  		v1.AddArg(v2)
  8696  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8697  		v3.AddArg(y)
  8698  		v1.AddArg(v3)
  8699  		v0.AddArg(v1)
  8700  		v.AddArg(v0)
  8701  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8702  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8703  		v5.AddArg(x)
  8704  		v4.AddArg(v5)
  8705  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8706  		v6.AddArg(y)
  8707  		v4.AddArg(v6)
  8708  		v.AddArg(v4)
  8709  		return true
  8710  	}
  8711  }
  8712  func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
  8713  	b := v.Block
  8714  	_ = b
  8715  	typ := &b.Func.Config.Types
  8716  	_ = typ
  8717  	// match: (Rsh8Ux64 <t> x y)
  8718  	// cond:
  8719  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8720  	for {
  8721  		t := v.Type
  8722  		_ = v.Args[1]
  8723  		x := v.Args[0]
  8724  		y := v.Args[1]
  8725  		v.reset(OpMIPS64AND)
  8726  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8727  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8728  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8729  		v2.AuxInt = 64
  8730  		v1.AddArg(v2)
  8731  		v1.AddArg(y)
  8732  		v0.AddArg(v1)
  8733  		v.AddArg(v0)
  8734  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8735  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8736  		v4.AddArg(x)
  8737  		v3.AddArg(v4)
  8738  		v3.AddArg(y)
  8739  		v.AddArg(v3)
  8740  		return true
  8741  	}
  8742  }
  8743  func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
  8744  	b := v.Block
  8745  	_ = b
  8746  	typ := &b.Func.Config.Types
  8747  	_ = typ
  8748  	// match: (Rsh8Ux8 <t> x y)
  8749  	// cond:
  8750  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64  y)))
  8751  	for {
  8752  		t := v.Type
  8753  		_ = v.Args[1]
  8754  		x := v.Args[0]
  8755  		y := v.Args[1]
  8756  		v.reset(OpMIPS64AND)
  8757  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8758  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8759  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8760  		v2.AuxInt = 64
  8761  		v1.AddArg(v2)
  8762  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8763  		v3.AddArg(y)
  8764  		v1.AddArg(v3)
  8765  		v0.AddArg(v1)
  8766  		v.AddArg(v0)
  8767  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8768  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8769  		v5.AddArg(x)
  8770  		v4.AddArg(v5)
  8771  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8772  		v6.AddArg(y)
  8773  		v4.AddArg(v6)
  8774  		v.AddArg(v4)
  8775  		return true
  8776  	}
  8777  }
  8778  func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
  8779  	b := v.Block
  8780  	_ = b
  8781  	typ := &b.Func.Config.Types
  8782  	_ = typ
  8783  	// match: (Rsh8x16 <t> x y)
  8784  	// cond:
  8785  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8786  	for {
  8787  		t := v.Type
  8788  		_ = v.Args[1]
  8789  		x := v.Args[0]
  8790  		y := v.Args[1]
  8791  		v.reset(OpMIPS64SRAV)
  8792  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8793  		v0.AddArg(x)
  8794  		v.AddArg(v0)
  8795  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8796  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8797  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8798  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8799  		v4.AddArg(y)
  8800  		v3.AddArg(v4)
  8801  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8802  		v5.AuxInt = 63
  8803  		v3.AddArg(v5)
  8804  		v2.AddArg(v3)
  8805  		v1.AddArg(v2)
  8806  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8807  		v6.AddArg(y)
  8808  		v1.AddArg(v6)
  8809  		v.AddArg(v1)
  8810  		return true
  8811  	}
  8812  }
  8813  func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
  8814  	b := v.Block
  8815  	_ = b
  8816  	typ := &b.Func.Config.Types
  8817  	_ = typ
  8818  	// match: (Rsh8x32 <t> x y)
  8819  	// cond:
  8820  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8821  	for {
  8822  		t := v.Type
  8823  		_ = v.Args[1]
  8824  		x := v.Args[0]
  8825  		y := v.Args[1]
  8826  		v.reset(OpMIPS64SRAV)
  8827  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8828  		v0.AddArg(x)
  8829  		v.AddArg(v0)
  8830  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8831  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8832  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8833  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8834  		v4.AddArg(y)
  8835  		v3.AddArg(v4)
  8836  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8837  		v5.AuxInt = 63
  8838  		v3.AddArg(v5)
  8839  		v2.AddArg(v3)
  8840  		v1.AddArg(v2)
  8841  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8842  		v6.AddArg(y)
  8843  		v1.AddArg(v6)
  8844  		v.AddArg(v1)
  8845  		return true
  8846  	}
  8847  }
  8848  func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
  8849  	b := v.Block
  8850  	_ = b
  8851  	typ := &b.Func.Config.Types
  8852  	_ = typ
  8853  	// match: (Rsh8x64 <t> x y)
  8854  	// cond:
  8855  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8856  	for {
  8857  		t := v.Type
  8858  		_ = v.Args[1]
  8859  		x := v.Args[0]
  8860  		y := v.Args[1]
  8861  		v.reset(OpMIPS64SRAV)
  8862  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8863  		v0.AddArg(x)
  8864  		v.AddArg(v0)
  8865  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8866  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8867  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8868  		v3.AddArg(y)
  8869  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8870  		v4.AuxInt = 63
  8871  		v3.AddArg(v4)
  8872  		v2.AddArg(v3)
  8873  		v1.AddArg(v2)
  8874  		v1.AddArg(y)
  8875  		v.AddArg(v1)
  8876  		return true
  8877  	}
  8878  }
  8879  func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
  8880  	b := v.Block
  8881  	_ = b
  8882  	typ := &b.Func.Config.Types
  8883  	_ = typ
  8884  	// match: (Rsh8x8 <t> x y)
  8885  	// cond:
  8886  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64  y)))
  8887  	for {
  8888  		t := v.Type
  8889  		_ = v.Args[1]
  8890  		x := v.Args[0]
  8891  		y := v.Args[1]
  8892  		v.reset(OpMIPS64SRAV)
  8893  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8894  		v0.AddArg(x)
  8895  		v.AddArg(v0)
  8896  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8897  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8898  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8899  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8900  		v4.AddArg(y)
  8901  		v3.AddArg(v4)
  8902  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8903  		v5.AuxInt = 63
  8904  		v3.AddArg(v5)
  8905  		v2.AddArg(v3)
  8906  		v1.AddArg(v2)
  8907  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8908  		v6.AddArg(y)
  8909  		v1.AddArg(v6)
  8910  		v.AddArg(v1)
  8911  		return true
  8912  	}
  8913  }
  8914  func rewriteValueMIPS64_OpSelect0_0(v *Value) bool {
  8915  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8916  	// cond:
  8917  	// result: (MOVVconst [0])
  8918  	for {
  8919  		v_0 := v.Args[0]
  8920  		if v_0.Op != OpMIPS64DIVVU {
  8921  			break
  8922  		}
  8923  		_ = v_0.Args[1]
  8924  		v_0_1 := v_0.Args[1]
  8925  		if v_0_1.Op != OpMIPS64MOVVconst {
  8926  			break
  8927  		}
  8928  		if v_0_1.AuxInt != 1 {
  8929  			break
  8930  		}
  8931  		v.reset(OpMIPS64MOVVconst)
  8932  		v.AuxInt = 0
  8933  		return true
  8934  	}
  8935  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  8936  	// cond: isPowerOfTwo(c)
  8937  	// result: (ANDconst [c-1] x)
  8938  	for {
  8939  		v_0 := v.Args[0]
  8940  		if v_0.Op != OpMIPS64DIVVU {
  8941  			break
  8942  		}
  8943  		_ = v_0.Args[1]
  8944  		x := v_0.Args[0]
  8945  		v_0_1 := v_0.Args[1]
  8946  		if v_0_1.Op != OpMIPS64MOVVconst {
  8947  			break
  8948  		}
  8949  		c := v_0_1.AuxInt
  8950  		if !(isPowerOfTwo(c)) {
  8951  			break
  8952  		}
  8953  		v.reset(OpMIPS64ANDconst)
  8954  		v.AuxInt = c - 1
  8955  		v.AddArg(x)
  8956  		return true
  8957  	}
  8958  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8959  	// cond:
  8960  	// result: (MOVVconst [int64(c)%int64(d)])
  8961  	for {
  8962  		v_0 := v.Args[0]
  8963  		if v_0.Op != OpMIPS64DIVV {
  8964  			break
  8965  		}
  8966  		_ = v_0.Args[1]
  8967  		v_0_0 := v_0.Args[0]
  8968  		if v_0_0.Op != OpMIPS64MOVVconst {
  8969  			break
  8970  		}
  8971  		c := v_0_0.AuxInt
  8972  		v_0_1 := v_0.Args[1]
  8973  		if v_0_1.Op != OpMIPS64MOVVconst {
  8974  			break
  8975  		}
  8976  		d := v_0_1.AuxInt
  8977  		v.reset(OpMIPS64MOVVconst)
  8978  		v.AuxInt = int64(c) % int64(d)
  8979  		return true
  8980  	}
  8981  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8982  	// cond:
  8983  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8984  	for {
  8985  		v_0 := v.Args[0]
  8986  		if v_0.Op != OpMIPS64DIVVU {
  8987  			break
  8988  		}
  8989  		_ = v_0.Args[1]
  8990  		v_0_0 := v_0.Args[0]
  8991  		if v_0_0.Op != OpMIPS64MOVVconst {
  8992  			break
  8993  		}
  8994  		c := v_0_0.AuxInt
  8995  		v_0_1 := v_0.Args[1]
  8996  		if v_0_1.Op != OpMIPS64MOVVconst {
  8997  			break
  8998  		}
  8999  		d := v_0_1.AuxInt
  9000  		v.reset(OpMIPS64MOVVconst)
  9001  		v.AuxInt = int64(uint64(c) % uint64(d))
  9002  		return true
  9003  	}
  9004  	return false
  9005  }
  9006  func rewriteValueMIPS64_OpSelect1_0(v *Value) bool {
  9007  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  9008  	// cond:
  9009  	// result: (NEGV x)
  9010  	for {
  9011  		v_0 := v.Args[0]
  9012  		if v_0.Op != OpMIPS64MULVU {
  9013  			break
  9014  		}
  9015  		_ = v_0.Args[1]
  9016  		x := v_0.Args[0]
  9017  		v_0_1 := v_0.Args[1]
  9018  		if v_0_1.Op != OpMIPS64MOVVconst {
  9019  			break
  9020  		}
  9021  		if v_0_1.AuxInt != -1 {
  9022  			break
  9023  		}
  9024  		v.reset(OpMIPS64NEGV)
  9025  		v.AddArg(x)
  9026  		return true
  9027  	}
  9028  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  9029  	// cond:
  9030  	// result: (NEGV x)
  9031  	for {
  9032  		v_0 := v.Args[0]
  9033  		if v_0.Op != OpMIPS64MULVU {
  9034  			break
  9035  		}
  9036  		_ = v_0.Args[1]
  9037  		v_0_0 := v_0.Args[0]
  9038  		if v_0_0.Op != OpMIPS64MOVVconst {
  9039  			break
  9040  		}
  9041  		if v_0_0.AuxInt != -1 {
  9042  			break
  9043  		}
  9044  		x := v_0.Args[1]
  9045  		v.reset(OpMIPS64NEGV)
  9046  		v.AddArg(x)
  9047  		return true
  9048  	}
  9049  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  9050  	// cond:
  9051  	// result: (MOVVconst [0])
  9052  	for {
  9053  		v_0 := v.Args[0]
  9054  		if v_0.Op != OpMIPS64MULVU {
  9055  			break
  9056  		}
  9057  		_ = v_0.Args[1]
  9058  		v_0_1 := v_0.Args[1]
  9059  		if v_0_1.Op != OpMIPS64MOVVconst {
  9060  			break
  9061  		}
  9062  		if v_0_1.AuxInt != 0 {
  9063  			break
  9064  		}
  9065  		v.reset(OpMIPS64MOVVconst)
  9066  		v.AuxInt = 0
  9067  		return true
  9068  	}
  9069  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  9070  	// cond:
  9071  	// result: (MOVVconst [0])
  9072  	for {
  9073  		v_0 := v.Args[0]
  9074  		if v_0.Op != OpMIPS64MULVU {
  9075  			break
  9076  		}
  9077  		_ = v_0.Args[1]
  9078  		v_0_0 := v_0.Args[0]
  9079  		if v_0_0.Op != OpMIPS64MOVVconst {
  9080  			break
  9081  		}
  9082  		if v_0_0.AuxInt != 0 {
  9083  			break
  9084  		}
  9085  		v.reset(OpMIPS64MOVVconst)
  9086  		v.AuxInt = 0
  9087  		return true
  9088  	}
  9089  	// match: (Select1 (MULVU x (MOVVconst [1])))
  9090  	// cond:
  9091  	// result: x
  9092  	for {
  9093  		v_0 := v.Args[0]
  9094  		if v_0.Op != OpMIPS64MULVU {
  9095  			break
  9096  		}
  9097  		_ = v_0.Args[1]
  9098  		x := v_0.Args[0]
  9099  		v_0_1 := v_0.Args[1]
  9100  		if v_0_1.Op != OpMIPS64MOVVconst {
  9101  			break
  9102  		}
  9103  		if v_0_1.AuxInt != 1 {
  9104  			break
  9105  		}
  9106  		v.reset(OpCopy)
  9107  		v.Type = x.Type
  9108  		v.AddArg(x)
  9109  		return true
  9110  	}
  9111  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  9112  	// cond:
  9113  	// result: x
  9114  	for {
  9115  		v_0 := v.Args[0]
  9116  		if v_0.Op != OpMIPS64MULVU {
  9117  			break
  9118  		}
  9119  		_ = v_0.Args[1]
  9120  		v_0_0 := v_0.Args[0]
  9121  		if v_0_0.Op != OpMIPS64MOVVconst {
  9122  			break
  9123  		}
  9124  		if v_0_0.AuxInt != 1 {
  9125  			break
  9126  		}
  9127  		x := v_0.Args[1]
  9128  		v.reset(OpCopy)
  9129  		v.Type = x.Type
  9130  		v.AddArg(x)
  9131  		return true
  9132  	}
  9133  	// match: (Select1 (MULVU x (MOVVconst [c])))
  9134  	// cond: isPowerOfTwo(c)
  9135  	// result: (SLLVconst [log2(c)] x)
  9136  	for {
  9137  		v_0 := v.Args[0]
  9138  		if v_0.Op != OpMIPS64MULVU {
  9139  			break
  9140  		}
  9141  		_ = v_0.Args[1]
  9142  		x := v_0.Args[0]
  9143  		v_0_1 := v_0.Args[1]
  9144  		if v_0_1.Op != OpMIPS64MOVVconst {
  9145  			break
  9146  		}
  9147  		c := v_0_1.AuxInt
  9148  		if !(isPowerOfTwo(c)) {
  9149  			break
  9150  		}
  9151  		v.reset(OpMIPS64SLLVconst)
  9152  		v.AuxInt = log2(c)
  9153  		v.AddArg(x)
  9154  		return true
  9155  	}
  9156  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  9157  	// cond: isPowerOfTwo(c)
  9158  	// result: (SLLVconst [log2(c)] x)
  9159  	for {
  9160  		v_0 := v.Args[0]
  9161  		if v_0.Op != OpMIPS64MULVU {
  9162  			break
  9163  		}
  9164  		_ = v_0.Args[1]
  9165  		v_0_0 := v_0.Args[0]
  9166  		if v_0_0.Op != OpMIPS64MOVVconst {
  9167  			break
  9168  		}
  9169  		c := v_0_0.AuxInt
  9170  		x := v_0.Args[1]
  9171  		if !(isPowerOfTwo(c)) {
  9172  			break
  9173  		}
  9174  		v.reset(OpMIPS64SLLVconst)
  9175  		v.AuxInt = log2(c)
  9176  		v.AddArg(x)
  9177  		return true
  9178  	}
  9179  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  9180  	// cond:
  9181  	// result: (NEGV x)
  9182  	for {
  9183  		v_0 := v.Args[0]
  9184  		if v_0.Op != OpMIPS64MULVU {
  9185  			break
  9186  		}
  9187  		_ = v_0.Args[1]
  9188  		v_0_0 := v_0.Args[0]
  9189  		if v_0_0.Op != OpMIPS64MOVVconst {
  9190  			break
  9191  		}
  9192  		if v_0_0.AuxInt != -1 {
  9193  			break
  9194  		}
  9195  		x := v_0.Args[1]
  9196  		v.reset(OpMIPS64NEGV)
  9197  		v.AddArg(x)
  9198  		return true
  9199  	}
  9200  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  9201  	// cond:
  9202  	// result: (NEGV x)
  9203  	for {
  9204  		v_0 := v.Args[0]
  9205  		if v_0.Op != OpMIPS64MULVU {
  9206  			break
  9207  		}
  9208  		_ = v_0.Args[1]
  9209  		x := v_0.Args[0]
  9210  		v_0_1 := v_0.Args[1]
  9211  		if v_0_1.Op != OpMIPS64MOVVconst {
  9212  			break
  9213  		}
  9214  		if v_0_1.AuxInt != -1 {
  9215  			break
  9216  		}
  9217  		v.reset(OpMIPS64NEGV)
  9218  		v.AddArg(x)
  9219  		return true
  9220  	}
  9221  	return false
  9222  }
  9223  func rewriteValueMIPS64_OpSelect1_10(v *Value) bool {
  9224  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  9225  	// cond:
  9226  	// result: (MOVVconst [0])
  9227  	for {
  9228  		v_0 := v.Args[0]
  9229  		if v_0.Op != OpMIPS64MULVU {
  9230  			break
  9231  		}
  9232  		_ = v_0.Args[1]
  9233  		v_0_0 := v_0.Args[0]
  9234  		if v_0_0.Op != OpMIPS64MOVVconst {
  9235  			break
  9236  		}
  9237  		if v_0_0.AuxInt != 0 {
  9238  			break
  9239  		}
  9240  		v.reset(OpMIPS64MOVVconst)
  9241  		v.AuxInt = 0
  9242  		return true
  9243  	}
  9244  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  9245  	// cond:
  9246  	// result: (MOVVconst [0])
  9247  	for {
  9248  		v_0 := v.Args[0]
  9249  		if v_0.Op != OpMIPS64MULVU {
  9250  			break
  9251  		}
  9252  		_ = v_0.Args[1]
  9253  		v_0_1 := v_0.Args[1]
  9254  		if v_0_1.Op != OpMIPS64MOVVconst {
  9255  			break
  9256  		}
  9257  		if v_0_1.AuxInt != 0 {
  9258  			break
  9259  		}
  9260  		v.reset(OpMIPS64MOVVconst)
  9261  		v.AuxInt = 0
  9262  		return true
  9263  	}
  9264  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  9265  	// cond:
  9266  	// result: x
  9267  	for {
  9268  		v_0 := v.Args[0]
  9269  		if v_0.Op != OpMIPS64MULVU {
  9270  			break
  9271  		}
  9272  		_ = v_0.Args[1]
  9273  		v_0_0 := v_0.Args[0]
  9274  		if v_0_0.Op != OpMIPS64MOVVconst {
  9275  			break
  9276  		}
  9277  		if v_0_0.AuxInt != 1 {
  9278  			break
  9279  		}
  9280  		x := v_0.Args[1]
  9281  		v.reset(OpCopy)
  9282  		v.Type = x.Type
  9283  		v.AddArg(x)
  9284  		return true
  9285  	}
  9286  	// match: (Select1 (MULVU x (MOVVconst [1])))
  9287  	// cond:
  9288  	// result: x
  9289  	for {
  9290  		v_0 := v.Args[0]
  9291  		if v_0.Op != OpMIPS64MULVU {
  9292  			break
  9293  		}
  9294  		_ = v_0.Args[1]
  9295  		x := v_0.Args[0]
  9296  		v_0_1 := v_0.Args[1]
  9297  		if v_0_1.Op != OpMIPS64MOVVconst {
  9298  			break
  9299  		}
  9300  		if v_0_1.AuxInt != 1 {
  9301  			break
  9302  		}
  9303  		v.reset(OpCopy)
  9304  		v.Type = x.Type
  9305  		v.AddArg(x)
  9306  		return true
  9307  	}
  9308  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  9309  	// cond: isPowerOfTwo(c)
  9310  	// result: (SLLVconst [log2(c)] x)
  9311  	for {
  9312  		v_0 := v.Args[0]
  9313  		if v_0.Op != OpMIPS64MULVU {
  9314  			break
  9315  		}
  9316  		_ = v_0.Args[1]
  9317  		v_0_0 := v_0.Args[0]
  9318  		if v_0_0.Op != OpMIPS64MOVVconst {
  9319  			break
  9320  		}
  9321  		c := v_0_0.AuxInt
  9322  		x := v_0.Args[1]
  9323  		if !(isPowerOfTwo(c)) {
  9324  			break
  9325  		}
  9326  		v.reset(OpMIPS64SLLVconst)
  9327  		v.AuxInt = log2(c)
  9328  		v.AddArg(x)
  9329  		return true
  9330  	}
  9331  	// match: (Select1 (MULVU x (MOVVconst [c])))
  9332  	// cond: isPowerOfTwo(c)
  9333  	// result: (SLLVconst [log2(c)] x)
  9334  	for {
  9335  		v_0 := v.Args[0]
  9336  		if v_0.Op != OpMIPS64MULVU {
  9337  			break
  9338  		}
  9339  		_ = v_0.Args[1]
  9340  		x := v_0.Args[0]
  9341  		v_0_1 := v_0.Args[1]
  9342  		if v_0_1.Op != OpMIPS64MOVVconst {
  9343  			break
  9344  		}
  9345  		c := v_0_1.AuxInt
  9346  		if !(isPowerOfTwo(c)) {
  9347  			break
  9348  		}
  9349  		v.reset(OpMIPS64SLLVconst)
  9350  		v.AuxInt = log2(c)
  9351  		v.AddArg(x)
  9352  		return true
  9353  	}
  9354  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  9355  	// cond:
  9356  	// result: x
  9357  	for {
  9358  		v_0 := v.Args[0]
  9359  		if v_0.Op != OpMIPS64DIVVU {
  9360  			break
  9361  		}
  9362  		_ = v_0.Args[1]
  9363  		x := v_0.Args[0]
  9364  		v_0_1 := v_0.Args[1]
  9365  		if v_0_1.Op != OpMIPS64MOVVconst {
  9366  			break
  9367  		}
  9368  		if v_0_1.AuxInt != 1 {
  9369  			break
  9370  		}
  9371  		v.reset(OpCopy)
  9372  		v.Type = x.Type
  9373  		v.AddArg(x)
  9374  		return true
  9375  	}
  9376  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  9377  	// cond: isPowerOfTwo(c)
  9378  	// result: (SRLVconst [log2(c)] x)
  9379  	for {
  9380  		v_0 := v.Args[0]
  9381  		if v_0.Op != OpMIPS64DIVVU {
  9382  			break
  9383  		}
  9384  		_ = v_0.Args[1]
  9385  		x := v_0.Args[0]
  9386  		v_0_1 := v_0.Args[1]
  9387  		if v_0_1.Op != OpMIPS64MOVVconst {
  9388  			break
  9389  		}
  9390  		c := v_0_1.AuxInt
  9391  		if !(isPowerOfTwo(c)) {
  9392  			break
  9393  		}
  9394  		v.reset(OpMIPS64SRLVconst)
  9395  		v.AuxInt = log2(c)
  9396  		v.AddArg(x)
  9397  		return true
  9398  	}
  9399  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  9400  	// cond:
  9401  	// result: (MOVVconst [c*d])
  9402  	for {
  9403  		v_0 := v.Args[0]
  9404  		if v_0.Op != OpMIPS64MULVU {
  9405  			break
  9406  		}
  9407  		_ = v_0.Args[1]
  9408  		v_0_0 := v_0.Args[0]
  9409  		if v_0_0.Op != OpMIPS64MOVVconst {
  9410  			break
  9411  		}
  9412  		c := v_0_0.AuxInt
  9413  		v_0_1 := v_0.Args[1]
  9414  		if v_0_1.Op != OpMIPS64MOVVconst {
  9415  			break
  9416  		}
  9417  		d := v_0_1.AuxInt
  9418  		v.reset(OpMIPS64MOVVconst)
  9419  		v.AuxInt = c * d
  9420  		return true
  9421  	}
  9422  	// match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c])))
  9423  	// cond:
  9424  	// result: (MOVVconst [c*d])
  9425  	for {
  9426  		v_0 := v.Args[0]
  9427  		if v_0.Op != OpMIPS64MULVU {
  9428  			break
  9429  		}
  9430  		_ = v_0.Args[1]
  9431  		v_0_0 := v_0.Args[0]
  9432  		if v_0_0.Op != OpMIPS64MOVVconst {
  9433  			break
  9434  		}
  9435  		d := v_0_0.AuxInt
  9436  		v_0_1 := v_0.Args[1]
  9437  		if v_0_1.Op != OpMIPS64MOVVconst {
  9438  			break
  9439  		}
  9440  		c := v_0_1.AuxInt
  9441  		v.reset(OpMIPS64MOVVconst)
  9442  		v.AuxInt = c * d
  9443  		return true
  9444  	}
  9445  	return false
  9446  }
  9447  func rewriteValueMIPS64_OpSelect1_20(v *Value) bool {
  9448  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  9449  	// cond:
  9450  	// result: (MOVVconst [int64(c)/int64(d)])
  9451  	for {
  9452  		v_0 := v.Args[0]
  9453  		if v_0.Op != OpMIPS64DIVV {
  9454  			break
  9455  		}
  9456  		_ = v_0.Args[1]
  9457  		v_0_0 := v_0.Args[0]
  9458  		if v_0_0.Op != OpMIPS64MOVVconst {
  9459  			break
  9460  		}
  9461  		c := v_0_0.AuxInt
  9462  		v_0_1 := v_0.Args[1]
  9463  		if v_0_1.Op != OpMIPS64MOVVconst {
  9464  			break
  9465  		}
  9466  		d := v_0_1.AuxInt
  9467  		v.reset(OpMIPS64MOVVconst)
  9468  		v.AuxInt = int64(c) / int64(d)
  9469  		return true
  9470  	}
  9471  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  9472  	// cond:
  9473  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  9474  	for {
  9475  		v_0 := v.Args[0]
  9476  		if v_0.Op != OpMIPS64DIVVU {
  9477  			break
  9478  		}
  9479  		_ = v_0.Args[1]
  9480  		v_0_0 := v_0.Args[0]
  9481  		if v_0_0.Op != OpMIPS64MOVVconst {
  9482  			break
  9483  		}
  9484  		c := v_0_0.AuxInt
  9485  		v_0_1 := v_0.Args[1]
  9486  		if v_0_1.Op != OpMIPS64MOVVconst {
  9487  			break
  9488  		}
  9489  		d := v_0_1.AuxInt
  9490  		v.reset(OpMIPS64MOVVconst)
  9491  		v.AuxInt = int64(uint64(c) / uint64(d))
  9492  		return true
  9493  	}
  9494  	return false
  9495  }
  9496  func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool {
  9497  	// match: (SignExt16to32 x)
  9498  	// cond:
  9499  	// result: (MOVHreg x)
  9500  	for {
  9501  		x := v.Args[0]
  9502  		v.reset(OpMIPS64MOVHreg)
  9503  		v.AddArg(x)
  9504  		return true
  9505  	}
  9506  }
  9507  func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool {
  9508  	// match: (SignExt16to64 x)
  9509  	// cond:
  9510  	// result: (MOVHreg x)
  9511  	for {
  9512  		x := v.Args[0]
  9513  		v.reset(OpMIPS64MOVHreg)
  9514  		v.AddArg(x)
  9515  		return true
  9516  	}
  9517  }
  9518  func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool {
  9519  	// match: (SignExt32to64 x)
  9520  	// cond:
  9521  	// result: (MOVWreg x)
  9522  	for {
  9523  		x := v.Args[0]
  9524  		v.reset(OpMIPS64MOVWreg)
  9525  		v.AddArg(x)
  9526  		return true
  9527  	}
  9528  }
  9529  func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool {
  9530  	// match: (SignExt8to16 x)
  9531  	// cond:
  9532  	// result: (MOVBreg x)
  9533  	for {
  9534  		x := v.Args[0]
  9535  		v.reset(OpMIPS64MOVBreg)
  9536  		v.AddArg(x)
  9537  		return true
  9538  	}
  9539  }
  9540  func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool {
  9541  	// match: (SignExt8to32 x)
  9542  	// cond:
  9543  	// result: (MOVBreg x)
  9544  	for {
  9545  		x := v.Args[0]
  9546  		v.reset(OpMIPS64MOVBreg)
  9547  		v.AddArg(x)
  9548  		return true
  9549  	}
  9550  }
  9551  func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool {
  9552  	// match: (SignExt8to64 x)
  9553  	// cond:
  9554  	// result: (MOVBreg x)
  9555  	for {
  9556  		x := v.Args[0]
  9557  		v.reset(OpMIPS64MOVBreg)
  9558  		v.AddArg(x)
  9559  		return true
  9560  	}
  9561  }
  9562  func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool {
  9563  	b := v.Block
  9564  	_ = b
  9565  	// match: (Slicemask <t> x)
  9566  	// cond:
  9567  	// result: (SRAVconst (NEGV <t> x) [63])
  9568  	for {
  9569  		t := v.Type
  9570  		x := v.Args[0]
  9571  		v.reset(OpMIPS64SRAVconst)
  9572  		v.AuxInt = 63
  9573  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  9574  		v0.AddArg(x)
  9575  		v.AddArg(v0)
  9576  		return true
  9577  	}
  9578  }
  9579  func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool {
  9580  	// match: (StaticCall [argwid] {target} mem)
  9581  	// cond:
  9582  	// result: (CALLstatic [argwid] {target} mem)
  9583  	for {
  9584  		argwid := v.AuxInt
  9585  		target := v.Aux
  9586  		mem := v.Args[0]
  9587  		v.reset(OpMIPS64CALLstatic)
  9588  		v.AuxInt = argwid
  9589  		v.Aux = target
  9590  		v.AddArg(mem)
  9591  		return true
  9592  	}
  9593  }
  9594  func rewriteValueMIPS64_OpStore_0(v *Value) bool {
  9595  	// match: (Store {t} ptr val mem)
  9596  	// cond: t.(*types.Type).Size() == 1
  9597  	// result: (MOVBstore ptr val mem)
  9598  	for {
  9599  		t := v.Aux
  9600  		_ = v.Args[2]
  9601  		ptr := v.Args[0]
  9602  		val := v.Args[1]
  9603  		mem := v.Args[2]
  9604  		if !(t.(*types.Type).Size() == 1) {
  9605  			break
  9606  		}
  9607  		v.reset(OpMIPS64MOVBstore)
  9608  		v.AddArg(ptr)
  9609  		v.AddArg(val)
  9610  		v.AddArg(mem)
  9611  		return true
  9612  	}
  9613  	// match: (Store {t} ptr val mem)
  9614  	// cond: t.(*types.Type).Size() == 2
  9615  	// result: (MOVHstore ptr val mem)
  9616  	for {
  9617  		t := v.Aux
  9618  		_ = v.Args[2]
  9619  		ptr := v.Args[0]
  9620  		val := v.Args[1]
  9621  		mem := v.Args[2]
  9622  		if !(t.(*types.Type).Size() == 2) {
  9623  			break
  9624  		}
  9625  		v.reset(OpMIPS64MOVHstore)
  9626  		v.AddArg(ptr)
  9627  		v.AddArg(val)
  9628  		v.AddArg(mem)
  9629  		return true
  9630  	}
  9631  	// match: (Store {t} ptr val mem)
  9632  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  9633  	// result: (MOVWstore ptr val mem)
  9634  	for {
  9635  		t := v.Aux
  9636  		_ = v.Args[2]
  9637  		ptr := v.Args[0]
  9638  		val := v.Args[1]
  9639  		mem := v.Args[2]
  9640  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  9641  			break
  9642  		}
  9643  		v.reset(OpMIPS64MOVWstore)
  9644  		v.AddArg(ptr)
  9645  		v.AddArg(val)
  9646  		v.AddArg(mem)
  9647  		return true
  9648  	}
  9649  	// match: (Store {t} ptr val mem)
  9650  	// cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)
  9651  	// result: (MOVVstore ptr val mem)
  9652  	for {
  9653  		t := v.Aux
  9654  		_ = v.Args[2]
  9655  		ptr := v.Args[0]
  9656  		val := v.Args[1]
  9657  		mem := v.Args[2]
  9658  		if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) {
  9659  			break
  9660  		}
  9661  		v.reset(OpMIPS64MOVVstore)
  9662  		v.AddArg(ptr)
  9663  		v.AddArg(val)
  9664  		v.AddArg(mem)
  9665  		return true
  9666  	}
  9667  	// match: (Store {t} ptr val mem)
  9668  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  9669  	// result: (MOVFstore ptr val mem)
  9670  	for {
  9671  		t := v.Aux
  9672  		_ = v.Args[2]
  9673  		ptr := v.Args[0]
  9674  		val := v.Args[1]
  9675  		mem := v.Args[2]
  9676  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  9677  			break
  9678  		}
  9679  		v.reset(OpMIPS64MOVFstore)
  9680  		v.AddArg(ptr)
  9681  		v.AddArg(val)
  9682  		v.AddArg(mem)
  9683  		return true
  9684  	}
  9685  	// match: (Store {t} ptr val mem)
  9686  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  9687  	// result: (MOVDstore ptr val mem)
  9688  	for {
  9689  		t := v.Aux
  9690  		_ = v.Args[2]
  9691  		ptr := v.Args[0]
  9692  		val := v.Args[1]
  9693  		mem := v.Args[2]
  9694  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  9695  			break
  9696  		}
  9697  		v.reset(OpMIPS64MOVDstore)
  9698  		v.AddArg(ptr)
  9699  		v.AddArg(val)
  9700  		v.AddArg(mem)
  9701  		return true
  9702  	}
  9703  	return false
  9704  }
  9705  func rewriteValueMIPS64_OpSub16_0(v *Value) bool {
  9706  	// match: (Sub16 x y)
  9707  	// cond:
  9708  	// result: (SUBV x y)
  9709  	for {
  9710  		_ = v.Args[1]
  9711  		x := v.Args[0]
  9712  		y := v.Args[1]
  9713  		v.reset(OpMIPS64SUBV)
  9714  		v.AddArg(x)
  9715  		v.AddArg(y)
  9716  		return true
  9717  	}
  9718  }
  9719  func rewriteValueMIPS64_OpSub32_0(v *Value) bool {
  9720  	// match: (Sub32 x y)
  9721  	// cond:
  9722  	// result: (SUBV x y)
  9723  	for {
  9724  		_ = v.Args[1]
  9725  		x := v.Args[0]
  9726  		y := v.Args[1]
  9727  		v.reset(OpMIPS64SUBV)
  9728  		v.AddArg(x)
  9729  		v.AddArg(y)
  9730  		return true
  9731  	}
  9732  }
  9733  func rewriteValueMIPS64_OpSub32F_0(v *Value) bool {
  9734  	// match: (Sub32F x y)
  9735  	// cond:
  9736  	// result: (SUBF x y)
  9737  	for {
  9738  		_ = v.Args[1]
  9739  		x := v.Args[0]
  9740  		y := v.Args[1]
  9741  		v.reset(OpMIPS64SUBF)
  9742  		v.AddArg(x)
  9743  		v.AddArg(y)
  9744  		return true
  9745  	}
  9746  }
  9747  func rewriteValueMIPS64_OpSub64_0(v *Value) bool {
  9748  	// match: (Sub64 x y)
  9749  	// cond:
  9750  	// result: (SUBV x y)
  9751  	for {
  9752  		_ = v.Args[1]
  9753  		x := v.Args[0]
  9754  		y := v.Args[1]
  9755  		v.reset(OpMIPS64SUBV)
  9756  		v.AddArg(x)
  9757  		v.AddArg(y)
  9758  		return true
  9759  	}
  9760  }
  9761  func rewriteValueMIPS64_OpSub64F_0(v *Value) bool {
  9762  	// match: (Sub64F x y)
  9763  	// cond:
  9764  	// result: (SUBD x y)
  9765  	for {
  9766  		_ = v.Args[1]
  9767  		x := v.Args[0]
  9768  		y := v.Args[1]
  9769  		v.reset(OpMIPS64SUBD)
  9770  		v.AddArg(x)
  9771  		v.AddArg(y)
  9772  		return true
  9773  	}
  9774  }
  9775  func rewriteValueMIPS64_OpSub8_0(v *Value) bool {
  9776  	// match: (Sub8 x y)
  9777  	// cond:
  9778  	// result: (SUBV x y)
  9779  	for {
  9780  		_ = v.Args[1]
  9781  		x := v.Args[0]
  9782  		y := v.Args[1]
  9783  		v.reset(OpMIPS64SUBV)
  9784  		v.AddArg(x)
  9785  		v.AddArg(y)
  9786  		return true
  9787  	}
  9788  }
  9789  func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool {
  9790  	// match: (SubPtr x y)
  9791  	// cond:
  9792  	// result: (SUBV x y)
  9793  	for {
  9794  		_ = v.Args[1]
  9795  		x := v.Args[0]
  9796  		y := v.Args[1]
  9797  		v.reset(OpMIPS64SUBV)
  9798  		v.AddArg(x)
  9799  		v.AddArg(y)
  9800  		return true
  9801  	}
  9802  }
  9803  func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool {
  9804  	// match: (Trunc16to8 x)
  9805  	// cond:
  9806  	// result: x
  9807  	for {
  9808  		x := v.Args[0]
  9809  		v.reset(OpCopy)
  9810  		v.Type = x.Type
  9811  		v.AddArg(x)
  9812  		return true
  9813  	}
  9814  }
  9815  func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool {
  9816  	// match: (Trunc32to16 x)
  9817  	// cond:
  9818  	// result: x
  9819  	for {
  9820  		x := v.Args[0]
  9821  		v.reset(OpCopy)
  9822  		v.Type = x.Type
  9823  		v.AddArg(x)
  9824  		return true
  9825  	}
  9826  }
  9827  func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool {
  9828  	// match: (Trunc32to8 x)
  9829  	// cond:
  9830  	// result: x
  9831  	for {
  9832  		x := v.Args[0]
  9833  		v.reset(OpCopy)
  9834  		v.Type = x.Type
  9835  		v.AddArg(x)
  9836  		return true
  9837  	}
  9838  }
  9839  func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool {
  9840  	// match: (Trunc64to16 x)
  9841  	// cond:
  9842  	// result: x
  9843  	for {
  9844  		x := v.Args[0]
  9845  		v.reset(OpCopy)
  9846  		v.Type = x.Type
  9847  		v.AddArg(x)
  9848  		return true
  9849  	}
  9850  }
  9851  func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool {
  9852  	// match: (Trunc64to32 x)
  9853  	// cond:
  9854  	// result: x
  9855  	for {
  9856  		x := v.Args[0]
  9857  		v.reset(OpCopy)
  9858  		v.Type = x.Type
  9859  		v.AddArg(x)
  9860  		return true
  9861  	}
  9862  }
  9863  func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool {
  9864  	// match: (Trunc64to8 x)
  9865  	// cond:
  9866  	// result: x
  9867  	for {
  9868  		x := v.Args[0]
  9869  		v.reset(OpCopy)
  9870  		v.Type = x.Type
  9871  		v.AddArg(x)
  9872  		return true
  9873  	}
  9874  }
  9875  func rewriteValueMIPS64_OpXor16_0(v *Value) bool {
  9876  	// match: (Xor16 x y)
  9877  	// cond:
  9878  	// result: (XOR x y)
  9879  	for {
  9880  		_ = v.Args[1]
  9881  		x := v.Args[0]
  9882  		y := v.Args[1]
  9883  		v.reset(OpMIPS64XOR)
  9884  		v.AddArg(x)
  9885  		v.AddArg(y)
  9886  		return true
  9887  	}
  9888  }
  9889  func rewriteValueMIPS64_OpXor32_0(v *Value) bool {
  9890  	// match: (Xor32 x y)
  9891  	// cond:
  9892  	// result: (XOR x y)
  9893  	for {
  9894  		_ = v.Args[1]
  9895  		x := v.Args[0]
  9896  		y := v.Args[1]
  9897  		v.reset(OpMIPS64XOR)
  9898  		v.AddArg(x)
  9899  		v.AddArg(y)
  9900  		return true
  9901  	}
  9902  }
  9903  func rewriteValueMIPS64_OpXor64_0(v *Value) bool {
  9904  	// match: (Xor64 x y)
  9905  	// cond:
  9906  	// result: (XOR x y)
  9907  	for {
  9908  		_ = v.Args[1]
  9909  		x := v.Args[0]
  9910  		y := v.Args[1]
  9911  		v.reset(OpMIPS64XOR)
  9912  		v.AddArg(x)
  9913  		v.AddArg(y)
  9914  		return true
  9915  	}
  9916  }
  9917  func rewriteValueMIPS64_OpXor8_0(v *Value) bool {
  9918  	// match: (Xor8 x y)
  9919  	// cond:
  9920  	// result: (XOR x y)
  9921  	for {
  9922  		_ = v.Args[1]
  9923  		x := v.Args[0]
  9924  		y := v.Args[1]
  9925  		v.reset(OpMIPS64XOR)
  9926  		v.AddArg(x)
  9927  		v.AddArg(y)
  9928  		return true
  9929  	}
  9930  }
  9931  func rewriteValueMIPS64_OpZero_0(v *Value) bool {
  9932  	b := v.Block
  9933  	_ = b
  9934  	typ := &b.Func.Config.Types
  9935  	_ = typ
  9936  	// match: (Zero [0] _ mem)
  9937  	// cond:
  9938  	// result: mem
  9939  	for {
  9940  		if v.AuxInt != 0 {
  9941  			break
  9942  		}
  9943  		_ = v.Args[1]
  9944  		mem := v.Args[1]
  9945  		v.reset(OpCopy)
  9946  		v.Type = mem.Type
  9947  		v.AddArg(mem)
  9948  		return true
  9949  	}
  9950  	// match: (Zero [1] ptr mem)
  9951  	// cond:
  9952  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9953  	for {
  9954  		if v.AuxInt != 1 {
  9955  			break
  9956  		}
  9957  		_ = v.Args[1]
  9958  		ptr := v.Args[0]
  9959  		mem := v.Args[1]
  9960  		v.reset(OpMIPS64MOVBstore)
  9961  		v.AddArg(ptr)
  9962  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9963  		v0.AuxInt = 0
  9964  		v.AddArg(v0)
  9965  		v.AddArg(mem)
  9966  		return true
  9967  	}
  9968  	// match: (Zero [2] {t} ptr mem)
  9969  	// cond: t.(*types.Type).Alignment()%2 == 0
  9970  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  9971  	for {
  9972  		if v.AuxInt != 2 {
  9973  			break
  9974  		}
  9975  		t := v.Aux
  9976  		_ = v.Args[1]
  9977  		ptr := v.Args[0]
  9978  		mem := v.Args[1]
  9979  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9980  			break
  9981  		}
  9982  		v.reset(OpMIPS64MOVHstore)
  9983  		v.AddArg(ptr)
  9984  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9985  		v0.AuxInt = 0
  9986  		v.AddArg(v0)
  9987  		v.AddArg(mem)
  9988  		return true
  9989  	}
  9990  	// match: (Zero [2] ptr mem)
  9991  	// cond:
  9992  	// result: (MOVBstore [1] ptr (MOVVconst [0]) 		(MOVBstore [0] ptr (MOVVconst [0]) mem))
  9993  	for {
  9994  		if v.AuxInt != 2 {
  9995  			break
  9996  		}
  9997  		_ = v.Args[1]
  9998  		ptr := v.Args[0]
  9999  		mem := v.Args[1]
 10000  		v.reset(OpMIPS64MOVBstore)
 10001  		v.AuxInt = 1
 10002  		v.AddArg(ptr)
 10003  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10004  		v0.AuxInt = 0
 10005  		v.AddArg(v0)
 10006  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10007  		v1.AuxInt = 0
 10008  		v1.AddArg(ptr)
 10009  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10010  		v2.AuxInt = 0
 10011  		v1.AddArg(v2)
 10012  		v1.AddArg(mem)
 10013  		v.AddArg(v1)
 10014  		return true
 10015  	}
 10016  	// match: (Zero [4] {t} ptr mem)
 10017  	// cond: t.(*types.Type).Alignment()%4 == 0
 10018  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 10019  	for {
 10020  		if v.AuxInt != 4 {
 10021  			break
 10022  		}
 10023  		t := v.Aux
 10024  		_ = v.Args[1]
 10025  		ptr := v.Args[0]
 10026  		mem := v.Args[1]
 10027  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10028  			break
 10029  		}
 10030  		v.reset(OpMIPS64MOVWstore)
 10031  		v.AddArg(ptr)
 10032  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10033  		v0.AuxInt = 0
 10034  		v.AddArg(v0)
 10035  		v.AddArg(mem)
 10036  		return true
 10037  	}
 10038  	// match: (Zero [4] {t} ptr mem)
 10039  	// cond: t.(*types.Type).Alignment()%2 == 0
 10040  	// result: (MOVHstore [2] ptr (MOVVconst [0]) 		(MOVHstore [0] ptr (MOVVconst [0]) mem))
 10041  	for {
 10042  		if v.AuxInt != 4 {
 10043  			break
 10044  		}
 10045  		t := v.Aux
 10046  		_ = v.Args[1]
 10047  		ptr := v.Args[0]
 10048  		mem := v.Args[1]
 10049  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10050  			break
 10051  		}
 10052  		v.reset(OpMIPS64MOVHstore)
 10053  		v.AuxInt = 2
 10054  		v.AddArg(ptr)
 10055  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10056  		v0.AuxInt = 0
 10057  		v.AddArg(v0)
 10058  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10059  		v1.AuxInt = 0
 10060  		v1.AddArg(ptr)
 10061  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10062  		v2.AuxInt = 0
 10063  		v1.AddArg(v2)
 10064  		v1.AddArg(mem)
 10065  		v.AddArg(v1)
 10066  		return true
 10067  	}
 10068  	// match: (Zero [4] ptr mem)
 10069  	// cond:
 10070  	// result: (MOVBstore [3] ptr (MOVVconst [0]) 		(MOVBstore [2] ptr (MOVVconst [0]) 			(MOVBstore [1] ptr (MOVVconst [0]) 				(MOVBstore [0] ptr (MOVVconst [0]) mem))))
 10071  	for {
 10072  		if v.AuxInt != 4 {
 10073  			break
 10074  		}
 10075  		_ = v.Args[1]
 10076  		ptr := v.Args[0]
 10077  		mem := v.Args[1]
 10078  		v.reset(OpMIPS64MOVBstore)
 10079  		v.AuxInt = 3
 10080  		v.AddArg(ptr)
 10081  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10082  		v0.AuxInt = 0
 10083  		v.AddArg(v0)
 10084  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10085  		v1.AuxInt = 2
 10086  		v1.AddArg(ptr)
 10087  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10088  		v2.AuxInt = 0
 10089  		v1.AddArg(v2)
 10090  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10091  		v3.AuxInt = 1
 10092  		v3.AddArg(ptr)
 10093  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10094  		v4.AuxInt = 0
 10095  		v3.AddArg(v4)
 10096  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10097  		v5.AuxInt = 0
 10098  		v5.AddArg(ptr)
 10099  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10100  		v6.AuxInt = 0
 10101  		v5.AddArg(v6)
 10102  		v5.AddArg(mem)
 10103  		v3.AddArg(v5)
 10104  		v1.AddArg(v3)
 10105  		v.AddArg(v1)
 10106  		return true
 10107  	}
 10108  	// match: (Zero [8] {t} ptr mem)
 10109  	// cond: t.(*types.Type).Alignment()%8 == 0
 10110  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 10111  	for {
 10112  		if v.AuxInt != 8 {
 10113  			break
 10114  		}
 10115  		t := v.Aux
 10116  		_ = v.Args[1]
 10117  		ptr := v.Args[0]
 10118  		mem := v.Args[1]
 10119  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10120  			break
 10121  		}
 10122  		v.reset(OpMIPS64MOVVstore)
 10123  		v.AddArg(ptr)
 10124  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10125  		v0.AuxInt = 0
 10126  		v.AddArg(v0)
 10127  		v.AddArg(mem)
 10128  		return true
 10129  	}
 10130  	// match: (Zero [8] {t} ptr mem)
 10131  	// cond: t.(*types.Type).Alignment()%4 == 0
 10132  	// result: (MOVWstore [4] ptr (MOVVconst [0]) 		(MOVWstore [0] ptr (MOVVconst [0]) mem))
 10133  	for {
 10134  		if v.AuxInt != 8 {
 10135  			break
 10136  		}
 10137  		t := v.Aux
 10138  		_ = v.Args[1]
 10139  		ptr := v.Args[0]
 10140  		mem := v.Args[1]
 10141  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10142  			break
 10143  		}
 10144  		v.reset(OpMIPS64MOVWstore)
 10145  		v.AuxInt = 4
 10146  		v.AddArg(ptr)
 10147  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10148  		v0.AuxInt = 0
 10149  		v.AddArg(v0)
 10150  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10151  		v1.AuxInt = 0
 10152  		v1.AddArg(ptr)
 10153  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10154  		v2.AuxInt = 0
 10155  		v1.AddArg(v2)
 10156  		v1.AddArg(mem)
 10157  		v.AddArg(v1)
 10158  		return true
 10159  	}
 10160  	// match: (Zero [8] {t} ptr mem)
 10161  	// cond: t.(*types.Type).Alignment()%2 == 0
 10162  	// result: (MOVHstore [6] ptr (MOVVconst [0]) 		(MOVHstore [4] ptr (MOVVconst [0]) 			(MOVHstore [2] ptr (MOVVconst [0]) 				(MOVHstore [0] ptr (MOVVconst [0]) mem))))
 10163  	for {
 10164  		if v.AuxInt != 8 {
 10165  			break
 10166  		}
 10167  		t := v.Aux
 10168  		_ = v.Args[1]
 10169  		ptr := v.Args[0]
 10170  		mem := v.Args[1]
 10171  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10172  			break
 10173  		}
 10174  		v.reset(OpMIPS64MOVHstore)
 10175  		v.AuxInt = 6
 10176  		v.AddArg(ptr)
 10177  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10178  		v0.AuxInt = 0
 10179  		v.AddArg(v0)
 10180  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10181  		v1.AuxInt = 4
 10182  		v1.AddArg(ptr)
 10183  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10184  		v2.AuxInt = 0
 10185  		v1.AddArg(v2)
 10186  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10187  		v3.AuxInt = 2
 10188  		v3.AddArg(ptr)
 10189  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10190  		v4.AuxInt = 0
 10191  		v3.AddArg(v4)
 10192  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10193  		v5.AuxInt = 0
 10194  		v5.AddArg(ptr)
 10195  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10196  		v6.AuxInt = 0
 10197  		v5.AddArg(v6)
 10198  		v5.AddArg(mem)
 10199  		v3.AddArg(v5)
 10200  		v1.AddArg(v3)
 10201  		v.AddArg(v1)
 10202  		return true
 10203  	}
 10204  	return false
 10205  }
 10206  func rewriteValueMIPS64_OpZero_10(v *Value) bool {
 10207  	b := v.Block
 10208  	_ = b
 10209  	config := b.Func.Config
 10210  	_ = config
 10211  	typ := &b.Func.Config.Types
 10212  	_ = typ
 10213  	// match: (Zero [3] ptr mem)
 10214  	// cond:
 10215  	// result: (MOVBstore [2] ptr (MOVVconst [0]) 		(MOVBstore [1] ptr (MOVVconst [0]) 			(MOVBstore [0] ptr (MOVVconst [0]) mem)))
 10216  	for {
 10217  		if v.AuxInt != 3 {
 10218  			break
 10219  		}
 10220  		_ = v.Args[1]
 10221  		ptr := v.Args[0]
 10222  		mem := v.Args[1]
 10223  		v.reset(OpMIPS64MOVBstore)
 10224  		v.AuxInt = 2
 10225  		v.AddArg(ptr)
 10226  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10227  		v0.AuxInt = 0
 10228  		v.AddArg(v0)
 10229  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10230  		v1.AuxInt = 1
 10231  		v1.AddArg(ptr)
 10232  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10233  		v2.AuxInt = 0
 10234  		v1.AddArg(v2)
 10235  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
 10236  		v3.AuxInt = 0
 10237  		v3.AddArg(ptr)
 10238  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10239  		v4.AuxInt = 0
 10240  		v3.AddArg(v4)
 10241  		v3.AddArg(mem)
 10242  		v1.AddArg(v3)
 10243  		v.AddArg(v1)
 10244  		return true
 10245  	}
 10246  	// match: (Zero [6] {t} ptr mem)
 10247  	// cond: t.(*types.Type).Alignment()%2 == 0
 10248  	// result: (MOVHstore [4] ptr (MOVVconst [0]) 		(MOVHstore [2] ptr (MOVVconst [0]) 			(MOVHstore [0] ptr (MOVVconst [0]) mem)))
 10249  	for {
 10250  		if v.AuxInt != 6 {
 10251  			break
 10252  		}
 10253  		t := v.Aux
 10254  		_ = v.Args[1]
 10255  		ptr := v.Args[0]
 10256  		mem := v.Args[1]
 10257  		if !(t.(*types.Type).Alignment()%2 == 0) {
 10258  			break
 10259  		}
 10260  		v.reset(OpMIPS64MOVHstore)
 10261  		v.AuxInt = 4
 10262  		v.AddArg(ptr)
 10263  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10264  		v0.AuxInt = 0
 10265  		v.AddArg(v0)
 10266  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10267  		v1.AuxInt = 2
 10268  		v1.AddArg(ptr)
 10269  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10270  		v2.AuxInt = 0
 10271  		v1.AddArg(v2)
 10272  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
 10273  		v3.AuxInt = 0
 10274  		v3.AddArg(ptr)
 10275  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10276  		v4.AuxInt = 0
 10277  		v3.AddArg(v4)
 10278  		v3.AddArg(mem)
 10279  		v1.AddArg(v3)
 10280  		v.AddArg(v1)
 10281  		return true
 10282  	}
 10283  	// match: (Zero [12] {t} ptr mem)
 10284  	// cond: t.(*types.Type).Alignment()%4 == 0
 10285  	// result: (MOVWstore [8] ptr (MOVVconst [0]) 		(MOVWstore [4] ptr (MOVVconst [0]) 			(MOVWstore [0] ptr (MOVVconst [0]) mem)))
 10286  	for {
 10287  		if v.AuxInt != 12 {
 10288  			break
 10289  		}
 10290  		t := v.Aux
 10291  		_ = v.Args[1]
 10292  		ptr := v.Args[0]
 10293  		mem := v.Args[1]
 10294  		if !(t.(*types.Type).Alignment()%4 == 0) {
 10295  			break
 10296  		}
 10297  		v.reset(OpMIPS64MOVWstore)
 10298  		v.AuxInt = 8
 10299  		v.AddArg(ptr)
 10300  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10301  		v0.AuxInt = 0
 10302  		v.AddArg(v0)
 10303  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10304  		v1.AuxInt = 4
 10305  		v1.AddArg(ptr)
 10306  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10307  		v2.AuxInt = 0
 10308  		v1.AddArg(v2)
 10309  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10310  		v3.AuxInt = 0
 10311  		v3.AddArg(ptr)
 10312  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10313  		v4.AuxInt = 0
 10314  		v3.AddArg(v4)
 10315  		v3.AddArg(mem)
 10316  		v1.AddArg(v3)
 10317  		v.AddArg(v1)
 10318  		return true
 10319  	}
 10320  	// match: (Zero [16] {t} ptr mem)
 10321  	// cond: t.(*types.Type).Alignment()%8 == 0
 10322  	// result: (MOVVstore [8] ptr (MOVVconst [0]) 		(MOVVstore [0] ptr (MOVVconst [0]) mem))
 10323  	for {
 10324  		if v.AuxInt != 16 {
 10325  			break
 10326  		}
 10327  		t := v.Aux
 10328  		_ = v.Args[1]
 10329  		ptr := v.Args[0]
 10330  		mem := v.Args[1]
 10331  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10332  			break
 10333  		}
 10334  		v.reset(OpMIPS64MOVVstore)
 10335  		v.AuxInt = 8
 10336  		v.AddArg(ptr)
 10337  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10338  		v0.AuxInt = 0
 10339  		v.AddArg(v0)
 10340  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10341  		v1.AuxInt = 0
 10342  		v1.AddArg(ptr)
 10343  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10344  		v2.AuxInt = 0
 10345  		v1.AddArg(v2)
 10346  		v1.AddArg(mem)
 10347  		v.AddArg(v1)
 10348  		return true
 10349  	}
 10350  	// match: (Zero [24] {t} ptr mem)
 10351  	// cond: t.(*types.Type).Alignment()%8 == 0
 10352  	// result: (MOVVstore [16] ptr (MOVVconst [0]) 		(MOVVstore [8] ptr (MOVVconst [0]) 			(MOVVstore [0] ptr (MOVVconst [0]) mem)))
 10353  	for {
 10354  		if v.AuxInt != 24 {
 10355  			break
 10356  		}
 10357  		t := v.Aux
 10358  		_ = v.Args[1]
 10359  		ptr := v.Args[0]
 10360  		mem := v.Args[1]
 10361  		if !(t.(*types.Type).Alignment()%8 == 0) {
 10362  			break
 10363  		}
 10364  		v.reset(OpMIPS64MOVVstore)
 10365  		v.AuxInt = 16
 10366  		v.AddArg(ptr)
 10367  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10368  		v0.AuxInt = 0
 10369  		v.AddArg(v0)
 10370  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10371  		v1.AuxInt = 8
 10372  		v1.AddArg(ptr)
 10373  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10374  		v2.AuxInt = 0
 10375  		v1.AddArg(v2)
 10376  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10377  		v3.AuxInt = 0
 10378  		v3.AddArg(ptr)
 10379  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10380  		v4.AuxInt = 0
 10381  		v3.AddArg(v4)
 10382  		v3.AddArg(mem)
 10383  		v1.AddArg(v3)
 10384  		v.AddArg(v1)
 10385  		return true
 10386  	}
 10387  	// match: (Zero [s] {t} ptr mem)
 10388  	// cond: s%8 == 0 && s > 24 && s <= 8*128 	&& t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
 10389  	// result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
 10390  	for {
 10391  		s := v.AuxInt
 10392  		t := v.Aux
 10393  		_ = v.Args[1]
 10394  		ptr := v.Args[0]
 10395  		mem := v.Args[1]
 10396  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) {
 10397  			break
 10398  		}
 10399  		v.reset(OpMIPS64DUFFZERO)
 10400  		v.AuxInt = 8 * (128 - int64(s/8))
 10401  		v.AddArg(ptr)
 10402  		v.AddArg(mem)
 10403  		return true
 10404  	}
 10405  	// match: (Zero [s] {t} ptr mem)
 10406  	// cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0
 10407  	// result: (LoweredZero [t.(*types.Type).Alignment()] 		ptr 		(ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
 10408  	for {
 10409  		s := v.AuxInt
 10410  		t := v.Aux
 10411  		_ = v.Args[1]
 10412  		ptr := v.Args[0]
 10413  		mem := v.Args[1]
 10414  		if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) {
 10415  			break
 10416  		}
 10417  		v.reset(OpMIPS64LoweredZero)
 10418  		v.AuxInt = t.(*types.Type).Alignment()
 10419  		v.AddArg(ptr)
 10420  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
 10421  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
 10422  		v0.AddArg(ptr)
 10423  		v.AddArg(v0)
 10424  		v.AddArg(mem)
 10425  		return true
 10426  	}
 10427  	return false
 10428  }
 10429  func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool {
 10430  	// match: (ZeroExt16to32 x)
 10431  	// cond:
 10432  	// result: (MOVHUreg x)
 10433  	for {
 10434  		x := v.Args[0]
 10435  		v.reset(OpMIPS64MOVHUreg)
 10436  		v.AddArg(x)
 10437  		return true
 10438  	}
 10439  }
 10440  func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool {
 10441  	// match: (ZeroExt16to64 x)
 10442  	// cond:
 10443  	// result: (MOVHUreg x)
 10444  	for {
 10445  		x := v.Args[0]
 10446  		v.reset(OpMIPS64MOVHUreg)
 10447  		v.AddArg(x)
 10448  		return true
 10449  	}
 10450  }
 10451  func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool {
 10452  	// match: (ZeroExt32to64 x)
 10453  	// cond:
 10454  	// result: (MOVWUreg x)
 10455  	for {
 10456  		x := v.Args[0]
 10457  		v.reset(OpMIPS64MOVWUreg)
 10458  		v.AddArg(x)
 10459  		return true
 10460  	}
 10461  }
 10462  func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool {
 10463  	// match: (ZeroExt8to16 x)
 10464  	// cond:
 10465  	// result: (MOVBUreg x)
 10466  	for {
 10467  		x := v.Args[0]
 10468  		v.reset(OpMIPS64MOVBUreg)
 10469  		v.AddArg(x)
 10470  		return true
 10471  	}
 10472  }
 10473  func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool {
 10474  	// match: (ZeroExt8to32 x)
 10475  	// cond:
 10476  	// result: (MOVBUreg x)
 10477  	for {
 10478  		x := v.Args[0]
 10479  		v.reset(OpMIPS64MOVBUreg)
 10480  		v.AddArg(x)
 10481  		return true
 10482  	}
 10483  }
 10484  func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool {
 10485  	// match: (ZeroExt8to64 x)
 10486  	// cond:
 10487  	// result: (MOVBUreg x)
 10488  	for {
 10489  		x := v.Args[0]
 10490  		v.reset(OpMIPS64MOVBUreg)
 10491  		v.AddArg(x)
 10492  		return true
 10493  	}
 10494  }
 10495  func rewriteBlockMIPS64(b *Block) bool {
 10496  	config := b.Func.Config
 10497  	_ = config
 10498  	fe := b.Func.fe
 10499  	_ = fe
 10500  	typ := &config.Types
 10501  	_ = typ
 10502  	switch b.Kind {
 10503  	case BlockMIPS64EQ:
 10504  		// match: (EQ (FPFlagTrue cmp) yes no)
 10505  		// cond:
 10506  		// result: (FPF cmp yes no)
 10507  		for {
 10508  			v := b.Control
 10509  			if v.Op != OpMIPS64FPFlagTrue {
 10510  				break
 10511  			}
 10512  			cmp := v.Args[0]
 10513  			b.Kind = BlockMIPS64FPF
 10514  			b.SetControl(cmp)
 10515  			b.Aux = nil
 10516  			return true
 10517  		}
 10518  		// match: (EQ (FPFlagFalse cmp) yes no)
 10519  		// cond:
 10520  		// result: (FPT cmp yes no)
 10521  		for {
 10522  			v := b.Control
 10523  			if v.Op != OpMIPS64FPFlagFalse {
 10524  				break
 10525  			}
 10526  			cmp := v.Args[0]
 10527  			b.Kind = BlockMIPS64FPT
 10528  			b.SetControl(cmp)
 10529  			b.Aux = nil
 10530  			return true
 10531  		}
 10532  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 10533  		// cond:
 10534  		// result: (NE cmp yes no)
 10535  		for {
 10536  			v := b.Control
 10537  			if v.Op != OpMIPS64XORconst {
 10538  				break
 10539  			}
 10540  			if v.AuxInt != 1 {
 10541  				break
 10542  			}
 10543  			cmp := v.Args[0]
 10544  			if cmp.Op != OpMIPS64SGT {
 10545  				break
 10546  			}
 10547  			_ = cmp.Args[1]
 10548  			b.Kind = BlockMIPS64NE
 10549  			b.SetControl(cmp)
 10550  			b.Aux = nil
 10551  			return true
 10552  		}
 10553  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 10554  		// cond:
 10555  		// result: (NE cmp yes no)
 10556  		for {
 10557  			v := b.Control
 10558  			if v.Op != OpMIPS64XORconst {
 10559  				break
 10560  			}
 10561  			if v.AuxInt != 1 {
 10562  				break
 10563  			}
 10564  			cmp := v.Args[0]
 10565  			if cmp.Op != OpMIPS64SGTU {
 10566  				break
 10567  			}
 10568  			_ = cmp.Args[1]
 10569  			b.Kind = BlockMIPS64NE
 10570  			b.SetControl(cmp)
 10571  			b.Aux = nil
 10572  			return true
 10573  		}
 10574  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 10575  		// cond:
 10576  		// result: (NE cmp yes no)
 10577  		for {
 10578  			v := b.Control
 10579  			if v.Op != OpMIPS64XORconst {
 10580  				break
 10581  			}
 10582  			if v.AuxInt != 1 {
 10583  				break
 10584  			}
 10585  			cmp := v.Args[0]
 10586  			if cmp.Op != OpMIPS64SGTconst {
 10587  				break
 10588  			}
 10589  			b.Kind = BlockMIPS64NE
 10590  			b.SetControl(cmp)
 10591  			b.Aux = nil
 10592  			return true
 10593  		}
 10594  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 10595  		// cond:
 10596  		// result: (NE cmp yes no)
 10597  		for {
 10598  			v := b.Control
 10599  			if v.Op != OpMIPS64XORconst {
 10600  				break
 10601  			}
 10602  			if v.AuxInt != 1 {
 10603  				break
 10604  			}
 10605  			cmp := v.Args[0]
 10606  			if cmp.Op != OpMIPS64SGTUconst {
 10607  				break
 10608  			}
 10609  			b.Kind = BlockMIPS64NE
 10610  			b.SetControl(cmp)
 10611  			b.Aux = nil
 10612  			return true
 10613  		}
 10614  		// match: (EQ (SGTUconst [1] x) yes no)
 10615  		// cond:
 10616  		// result: (NE x yes no)
 10617  		for {
 10618  			v := b.Control
 10619  			if v.Op != OpMIPS64SGTUconst {
 10620  				break
 10621  			}
 10622  			if v.AuxInt != 1 {
 10623  				break
 10624  			}
 10625  			x := v.Args[0]
 10626  			b.Kind = BlockMIPS64NE
 10627  			b.SetControl(x)
 10628  			b.Aux = nil
 10629  			return true
 10630  		}
 10631  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 10632  		// cond:
 10633  		// result: (EQ x yes no)
 10634  		for {
 10635  			v := b.Control
 10636  			if v.Op != OpMIPS64SGTU {
 10637  				break
 10638  			}
 10639  			_ = v.Args[1]
 10640  			x := v.Args[0]
 10641  			v_1 := v.Args[1]
 10642  			if v_1.Op != OpMIPS64MOVVconst {
 10643  				break
 10644  			}
 10645  			if v_1.AuxInt != 0 {
 10646  				break
 10647  			}
 10648  			b.Kind = BlockMIPS64EQ
 10649  			b.SetControl(x)
 10650  			b.Aux = nil
 10651  			return true
 10652  		}
 10653  		// match: (EQ (SGTconst [0] x) yes no)
 10654  		// cond:
 10655  		// result: (GEZ x yes no)
 10656  		for {
 10657  			v := b.Control
 10658  			if v.Op != OpMIPS64SGTconst {
 10659  				break
 10660  			}
 10661  			if v.AuxInt != 0 {
 10662  				break
 10663  			}
 10664  			x := v.Args[0]
 10665  			b.Kind = BlockMIPS64GEZ
 10666  			b.SetControl(x)
 10667  			b.Aux = nil
 10668  			return true
 10669  		}
 10670  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 10671  		// cond:
 10672  		// result: (LEZ x yes no)
 10673  		for {
 10674  			v := b.Control
 10675  			if v.Op != OpMIPS64SGT {
 10676  				break
 10677  			}
 10678  			_ = v.Args[1]
 10679  			x := v.Args[0]
 10680  			v_1 := v.Args[1]
 10681  			if v_1.Op != OpMIPS64MOVVconst {
 10682  				break
 10683  			}
 10684  			if v_1.AuxInt != 0 {
 10685  				break
 10686  			}
 10687  			b.Kind = BlockMIPS64LEZ
 10688  			b.SetControl(x)
 10689  			b.Aux = nil
 10690  			return true
 10691  		}
 10692  		// match: (EQ (MOVVconst [0]) yes no)
 10693  		// cond:
 10694  		// result: (First nil yes no)
 10695  		for {
 10696  			v := b.Control
 10697  			if v.Op != OpMIPS64MOVVconst {
 10698  				break
 10699  			}
 10700  			if v.AuxInt != 0 {
 10701  				break
 10702  			}
 10703  			b.Kind = BlockFirst
 10704  			b.SetControl(nil)
 10705  			b.Aux = nil
 10706  			return true
 10707  		}
 10708  		// match: (EQ (MOVVconst [c]) yes no)
 10709  		// cond: c != 0
 10710  		// result: (First nil no yes)
 10711  		for {
 10712  			v := b.Control
 10713  			if v.Op != OpMIPS64MOVVconst {
 10714  				break
 10715  			}
 10716  			c := v.AuxInt
 10717  			if !(c != 0) {
 10718  				break
 10719  			}
 10720  			b.Kind = BlockFirst
 10721  			b.SetControl(nil)
 10722  			b.Aux = nil
 10723  			b.swapSuccessors()
 10724  			return true
 10725  		}
 10726  	case BlockMIPS64GEZ:
 10727  		// match: (GEZ (MOVVconst [c]) yes no)
 10728  		// cond: c >= 0
 10729  		// result: (First nil yes no)
 10730  		for {
 10731  			v := b.Control
 10732  			if v.Op != OpMIPS64MOVVconst {
 10733  				break
 10734  			}
 10735  			c := v.AuxInt
 10736  			if !(c >= 0) {
 10737  				break
 10738  			}
 10739  			b.Kind = BlockFirst
 10740  			b.SetControl(nil)
 10741  			b.Aux = nil
 10742  			return true
 10743  		}
 10744  		// match: (GEZ (MOVVconst [c]) yes no)
 10745  		// cond: c <  0
 10746  		// result: (First nil no yes)
 10747  		for {
 10748  			v := b.Control
 10749  			if v.Op != OpMIPS64MOVVconst {
 10750  				break
 10751  			}
 10752  			c := v.AuxInt
 10753  			if !(c < 0) {
 10754  				break
 10755  			}
 10756  			b.Kind = BlockFirst
 10757  			b.SetControl(nil)
 10758  			b.Aux = nil
 10759  			b.swapSuccessors()
 10760  			return true
 10761  		}
 10762  	case BlockMIPS64GTZ:
 10763  		// match: (GTZ (MOVVconst [c]) yes no)
 10764  		// cond: c >  0
 10765  		// result: (First nil yes no)
 10766  		for {
 10767  			v := b.Control
 10768  			if v.Op != OpMIPS64MOVVconst {
 10769  				break
 10770  			}
 10771  			c := v.AuxInt
 10772  			if !(c > 0) {
 10773  				break
 10774  			}
 10775  			b.Kind = BlockFirst
 10776  			b.SetControl(nil)
 10777  			b.Aux = nil
 10778  			return true
 10779  		}
 10780  		// match: (GTZ (MOVVconst [c]) yes no)
 10781  		// cond: c <= 0
 10782  		// result: (First nil no yes)
 10783  		for {
 10784  			v := b.Control
 10785  			if v.Op != OpMIPS64MOVVconst {
 10786  				break
 10787  			}
 10788  			c := v.AuxInt
 10789  			if !(c <= 0) {
 10790  				break
 10791  			}
 10792  			b.Kind = BlockFirst
 10793  			b.SetControl(nil)
 10794  			b.Aux = nil
 10795  			b.swapSuccessors()
 10796  			return true
 10797  		}
 10798  	case BlockIf:
 10799  		// match: (If cond yes no)
 10800  		// cond:
 10801  		// result: (NE cond yes no)
 10802  		for {
 10803  			v := b.Control
 10804  			_ = v
 10805  			cond := b.Control
 10806  			b.Kind = BlockMIPS64NE
 10807  			b.SetControl(cond)
 10808  			b.Aux = nil
 10809  			return true
 10810  		}
 10811  	case BlockMIPS64LEZ:
 10812  		// match: (LEZ (MOVVconst [c]) yes no)
 10813  		// cond: c <= 0
 10814  		// result: (First nil yes no)
 10815  		for {
 10816  			v := b.Control
 10817  			if v.Op != OpMIPS64MOVVconst {
 10818  				break
 10819  			}
 10820  			c := v.AuxInt
 10821  			if !(c <= 0) {
 10822  				break
 10823  			}
 10824  			b.Kind = BlockFirst
 10825  			b.SetControl(nil)
 10826  			b.Aux = nil
 10827  			return true
 10828  		}
 10829  		// match: (LEZ (MOVVconst [c]) yes no)
 10830  		// cond: c >  0
 10831  		// result: (First nil no yes)
 10832  		for {
 10833  			v := b.Control
 10834  			if v.Op != OpMIPS64MOVVconst {
 10835  				break
 10836  			}
 10837  			c := v.AuxInt
 10838  			if !(c > 0) {
 10839  				break
 10840  			}
 10841  			b.Kind = BlockFirst
 10842  			b.SetControl(nil)
 10843  			b.Aux = nil
 10844  			b.swapSuccessors()
 10845  			return true
 10846  		}
 10847  	case BlockMIPS64LTZ:
 10848  		// match: (LTZ (MOVVconst [c]) yes no)
 10849  		// cond: c <  0
 10850  		// result: (First nil yes no)
 10851  		for {
 10852  			v := b.Control
 10853  			if v.Op != OpMIPS64MOVVconst {
 10854  				break
 10855  			}
 10856  			c := v.AuxInt
 10857  			if !(c < 0) {
 10858  				break
 10859  			}
 10860  			b.Kind = BlockFirst
 10861  			b.SetControl(nil)
 10862  			b.Aux = nil
 10863  			return true
 10864  		}
 10865  		// match: (LTZ (MOVVconst [c]) yes no)
 10866  		// cond: c >= 0
 10867  		// result: (First nil no yes)
 10868  		for {
 10869  			v := b.Control
 10870  			if v.Op != OpMIPS64MOVVconst {
 10871  				break
 10872  			}
 10873  			c := v.AuxInt
 10874  			if !(c >= 0) {
 10875  				break
 10876  			}
 10877  			b.Kind = BlockFirst
 10878  			b.SetControl(nil)
 10879  			b.Aux = nil
 10880  			b.swapSuccessors()
 10881  			return true
 10882  		}
 10883  	case BlockMIPS64NE:
 10884  		// match: (NE (FPFlagTrue cmp) yes no)
 10885  		// cond:
 10886  		// result: (FPT cmp yes no)
 10887  		for {
 10888  			v := b.Control
 10889  			if v.Op != OpMIPS64FPFlagTrue {
 10890  				break
 10891  			}
 10892  			cmp := v.Args[0]
 10893  			b.Kind = BlockMIPS64FPT
 10894  			b.SetControl(cmp)
 10895  			b.Aux = nil
 10896  			return true
 10897  		}
 10898  		// match: (NE (FPFlagFalse cmp) yes no)
 10899  		// cond:
 10900  		// result: (FPF cmp yes no)
 10901  		for {
 10902  			v := b.Control
 10903  			if v.Op != OpMIPS64FPFlagFalse {
 10904  				break
 10905  			}
 10906  			cmp := v.Args[0]
 10907  			b.Kind = BlockMIPS64FPF
 10908  			b.SetControl(cmp)
 10909  			b.Aux = nil
 10910  			return true
 10911  		}
 10912  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10913  		// cond:
 10914  		// result: (EQ cmp yes no)
 10915  		for {
 10916  			v := b.Control
 10917  			if v.Op != OpMIPS64XORconst {
 10918  				break
 10919  			}
 10920  			if v.AuxInt != 1 {
 10921  				break
 10922  			}
 10923  			cmp := v.Args[0]
 10924  			if cmp.Op != OpMIPS64SGT {
 10925  				break
 10926  			}
 10927  			_ = cmp.Args[1]
 10928  			b.Kind = BlockMIPS64EQ
 10929  			b.SetControl(cmp)
 10930  			b.Aux = nil
 10931  			return true
 10932  		}
 10933  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10934  		// cond:
 10935  		// result: (EQ cmp yes no)
 10936  		for {
 10937  			v := b.Control
 10938  			if v.Op != OpMIPS64XORconst {
 10939  				break
 10940  			}
 10941  			if v.AuxInt != 1 {
 10942  				break
 10943  			}
 10944  			cmp := v.Args[0]
 10945  			if cmp.Op != OpMIPS64SGTU {
 10946  				break
 10947  			}
 10948  			_ = cmp.Args[1]
 10949  			b.Kind = BlockMIPS64EQ
 10950  			b.SetControl(cmp)
 10951  			b.Aux = nil
 10952  			return true
 10953  		}
 10954  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10955  		// cond:
 10956  		// result: (EQ cmp yes no)
 10957  		for {
 10958  			v := b.Control
 10959  			if v.Op != OpMIPS64XORconst {
 10960  				break
 10961  			}
 10962  			if v.AuxInt != 1 {
 10963  				break
 10964  			}
 10965  			cmp := v.Args[0]
 10966  			if cmp.Op != OpMIPS64SGTconst {
 10967  				break
 10968  			}
 10969  			b.Kind = BlockMIPS64EQ
 10970  			b.SetControl(cmp)
 10971  			b.Aux = nil
 10972  			return true
 10973  		}
 10974  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10975  		// cond:
 10976  		// result: (EQ cmp yes no)
 10977  		for {
 10978  			v := b.Control
 10979  			if v.Op != OpMIPS64XORconst {
 10980  				break
 10981  			}
 10982  			if v.AuxInt != 1 {
 10983  				break
 10984  			}
 10985  			cmp := v.Args[0]
 10986  			if cmp.Op != OpMIPS64SGTUconst {
 10987  				break
 10988  			}
 10989  			b.Kind = BlockMIPS64EQ
 10990  			b.SetControl(cmp)
 10991  			b.Aux = nil
 10992  			return true
 10993  		}
 10994  		// match: (NE (SGTUconst [1] x) yes no)
 10995  		// cond:
 10996  		// result: (EQ x yes no)
 10997  		for {
 10998  			v := b.Control
 10999  			if v.Op != OpMIPS64SGTUconst {
 11000  				break
 11001  			}
 11002  			if v.AuxInt != 1 {
 11003  				break
 11004  			}
 11005  			x := v.Args[0]
 11006  			b.Kind = BlockMIPS64EQ
 11007  			b.SetControl(x)
 11008  			b.Aux = nil
 11009  			return true
 11010  		}
 11011  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 11012  		// cond:
 11013  		// result: (NE x yes no)
 11014  		for {
 11015  			v := b.Control
 11016  			if v.Op != OpMIPS64SGTU {
 11017  				break
 11018  			}
 11019  			_ = v.Args[1]
 11020  			x := v.Args[0]
 11021  			v_1 := v.Args[1]
 11022  			if v_1.Op != OpMIPS64MOVVconst {
 11023  				break
 11024  			}
 11025  			if v_1.AuxInt != 0 {
 11026  				break
 11027  			}
 11028  			b.Kind = BlockMIPS64NE
 11029  			b.SetControl(x)
 11030  			b.Aux = nil
 11031  			return true
 11032  		}
 11033  		// match: (NE (SGTconst [0] x) yes no)
 11034  		// cond:
 11035  		// result: (LTZ x yes no)
 11036  		for {
 11037  			v := b.Control
 11038  			if v.Op != OpMIPS64SGTconst {
 11039  				break
 11040  			}
 11041  			if v.AuxInt != 0 {
 11042  				break
 11043  			}
 11044  			x := v.Args[0]
 11045  			b.Kind = BlockMIPS64LTZ
 11046  			b.SetControl(x)
 11047  			b.Aux = nil
 11048  			return true
 11049  		}
 11050  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 11051  		// cond:
 11052  		// result: (GTZ x yes no)
 11053  		for {
 11054  			v := b.Control
 11055  			if v.Op != OpMIPS64SGT {
 11056  				break
 11057  			}
 11058  			_ = v.Args[1]
 11059  			x := v.Args[0]
 11060  			v_1 := v.Args[1]
 11061  			if v_1.Op != OpMIPS64MOVVconst {
 11062  				break
 11063  			}
 11064  			if v_1.AuxInt != 0 {
 11065  				break
 11066  			}
 11067  			b.Kind = BlockMIPS64GTZ
 11068  			b.SetControl(x)
 11069  			b.Aux = nil
 11070  			return true
 11071  		}
 11072  		// match: (NE (MOVVconst [0]) yes no)
 11073  		// cond:
 11074  		// result: (First nil no yes)
 11075  		for {
 11076  			v := b.Control
 11077  			if v.Op != OpMIPS64MOVVconst {
 11078  				break
 11079  			}
 11080  			if v.AuxInt != 0 {
 11081  				break
 11082  			}
 11083  			b.Kind = BlockFirst
 11084  			b.SetControl(nil)
 11085  			b.Aux = nil
 11086  			b.swapSuccessors()
 11087  			return true
 11088  		}
 11089  		// match: (NE (MOVVconst [c]) yes no)
 11090  		// cond: c != 0
 11091  		// result: (First nil yes no)
 11092  		for {
 11093  			v := b.Control
 11094  			if v.Op != OpMIPS64MOVVconst {
 11095  				break
 11096  			}
 11097  			c := v.AuxInt
 11098  			if !(c != 0) {
 11099  				break
 11100  			}
 11101  			b.Kind = BlockFirst
 11102  			b.SetControl(nil)
 11103  			b.Aux = nil
 11104  			return true
 11105  		}
 11106  	}
 11107  	return false
 11108  }