github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  
    10  var _ = math.MinInt8  // in case not otherwise used
    11  var _ = obj.ANOP      // in case not otherwise used
    12  var _ = objabi.GOROOT // in case not otherwise used
    13  
    14  func rewriteValueMIPS64(v *Value) bool {
    15  	switch v.Op {
    16  	case OpAdd16:
    17  		return rewriteValueMIPS64_OpAdd16_0(v)
    18  	case OpAdd32:
    19  		return rewriteValueMIPS64_OpAdd32_0(v)
    20  	case OpAdd32F:
    21  		return rewriteValueMIPS64_OpAdd32F_0(v)
    22  	case OpAdd64:
    23  		return rewriteValueMIPS64_OpAdd64_0(v)
    24  	case OpAdd64F:
    25  		return rewriteValueMIPS64_OpAdd64F_0(v)
    26  	case OpAdd8:
    27  		return rewriteValueMIPS64_OpAdd8_0(v)
    28  	case OpAddPtr:
    29  		return rewriteValueMIPS64_OpAddPtr_0(v)
    30  	case OpAddr:
    31  		return rewriteValueMIPS64_OpAddr_0(v)
    32  	case OpAnd16:
    33  		return rewriteValueMIPS64_OpAnd16_0(v)
    34  	case OpAnd32:
    35  		return rewriteValueMIPS64_OpAnd32_0(v)
    36  	case OpAnd64:
    37  		return rewriteValueMIPS64_OpAnd64_0(v)
    38  	case OpAnd8:
    39  		return rewriteValueMIPS64_OpAnd8_0(v)
    40  	case OpAndB:
    41  		return rewriteValueMIPS64_OpAndB_0(v)
    42  	case OpAvg64u:
    43  		return rewriteValueMIPS64_OpAvg64u_0(v)
    44  	case OpClosureCall:
    45  		return rewriteValueMIPS64_OpClosureCall_0(v)
    46  	case OpCom16:
    47  		return rewriteValueMIPS64_OpCom16_0(v)
    48  	case OpCom32:
    49  		return rewriteValueMIPS64_OpCom32_0(v)
    50  	case OpCom64:
    51  		return rewriteValueMIPS64_OpCom64_0(v)
    52  	case OpCom8:
    53  		return rewriteValueMIPS64_OpCom8_0(v)
    54  	case OpConst16:
    55  		return rewriteValueMIPS64_OpConst16_0(v)
    56  	case OpConst32:
    57  		return rewriteValueMIPS64_OpConst32_0(v)
    58  	case OpConst32F:
    59  		return rewriteValueMIPS64_OpConst32F_0(v)
    60  	case OpConst64:
    61  		return rewriteValueMIPS64_OpConst64_0(v)
    62  	case OpConst64F:
    63  		return rewriteValueMIPS64_OpConst64F_0(v)
    64  	case OpConst8:
    65  		return rewriteValueMIPS64_OpConst8_0(v)
    66  	case OpConstBool:
    67  		return rewriteValueMIPS64_OpConstBool_0(v)
    68  	case OpConstNil:
    69  		return rewriteValueMIPS64_OpConstNil_0(v)
    70  	case OpConvert:
    71  		return rewriteValueMIPS64_OpConvert_0(v)
    72  	case OpCvt32Fto32:
    73  		return rewriteValueMIPS64_OpCvt32Fto32_0(v)
    74  	case OpCvt32Fto64:
    75  		return rewriteValueMIPS64_OpCvt32Fto64_0(v)
    76  	case OpCvt32Fto64F:
    77  		return rewriteValueMIPS64_OpCvt32Fto64F_0(v)
    78  	case OpCvt32to32F:
    79  		return rewriteValueMIPS64_OpCvt32to32F_0(v)
    80  	case OpCvt32to64F:
    81  		return rewriteValueMIPS64_OpCvt32to64F_0(v)
    82  	case OpCvt64Fto32:
    83  		return rewriteValueMIPS64_OpCvt64Fto32_0(v)
    84  	case OpCvt64Fto32F:
    85  		return rewriteValueMIPS64_OpCvt64Fto32F_0(v)
    86  	case OpCvt64Fto64:
    87  		return rewriteValueMIPS64_OpCvt64Fto64_0(v)
    88  	case OpCvt64to32F:
    89  		return rewriteValueMIPS64_OpCvt64to32F_0(v)
    90  	case OpCvt64to64F:
    91  		return rewriteValueMIPS64_OpCvt64to64F_0(v)
    92  	case OpDiv16:
    93  		return rewriteValueMIPS64_OpDiv16_0(v)
    94  	case OpDiv16u:
    95  		return rewriteValueMIPS64_OpDiv16u_0(v)
    96  	case OpDiv32:
    97  		return rewriteValueMIPS64_OpDiv32_0(v)
    98  	case OpDiv32F:
    99  		return rewriteValueMIPS64_OpDiv32F_0(v)
   100  	case OpDiv32u:
   101  		return rewriteValueMIPS64_OpDiv32u_0(v)
   102  	case OpDiv64:
   103  		return rewriteValueMIPS64_OpDiv64_0(v)
   104  	case OpDiv64F:
   105  		return rewriteValueMIPS64_OpDiv64F_0(v)
   106  	case OpDiv64u:
   107  		return rewriteValueMIPS64_OpDiv64u_0(v)
   108  	case OpDiv8:
   109  		return rewriteValueMIPS64_OpDiv8_0(v)
   110  	case OpDiv8u:
   111  		return rewriteValueMIPS64_OpDiv8u_0(v)
   112  	case OpEq16:
   113  		return rewriteValueMIPS64_OpEq16_0(v)
   114  	case OpEq32:
   115  		return rewriteValueMIPS64_OpEq32_0(v)
   116  	case OpEq32F:
   117  		return rewriteValueMIPS64_OpEq32F_0(v)
   118  	case OpEq64:
   119  		return rewriteValueMIPS64_OpEq64_0(v)
   120  	case OpEq64F:
   121  		return rewriteValueMIPS64_OpEq64F_0(v)
   122  	case OpEq8:
   123  		return rewriteValueMIPS64_OpEq8_0(v)
   124  	case OpEqB:
   125  		return rewriteValueMIPS64_OpEqB_0(v)
   126  	case OpEqPtr:
   127  		return rewriteValueMIPS64_OpEqPtr_0(v)
   128  	case OpGeq16:
   129  		return rewriteValueMIPS64_OpGeq16_0(v)
   130  	case OpGeq16U:
   131  		return rewriteValueMIPS64_OpGeq16U_0(v)
   132  	case OpGeq32:
   133  		return rewriteValueMIPS64_OpGeq32_0(v)
   134  	case OpGeq32F:
   135  		return rewriteValueMIPS64_OpGeq32F_0(v)
   136  	case OpGeq32U:
   137  		return rewriteValueMIPS64_OpGeq32U_0(v)
   138  	case OpGeq64:
   139  		return rewriteValueMIPS64_OpGeq64_0(v)
   140  	case OpGeq64F:
   141  		return rewriteValueMIPS64_OpGeq64F_0(v)
   142  	case OpGeq64U:
   143  		return rewriteValueMIPS64_OpGeq64U_0(v)
   144  	case OpGeq8:
   145  		return rewriteValueMIPS64_OpGeq8_0(v)
   146  	case OpGeq8U:
   147  		return rewriteValueMIPS64_OpGeq8U_0(v)
   148  	case OpGetClosurePtr:
   149  		return rewriteValueMIPS64_OpGetClosurePtr_0(v)
   150  	case OpGreater16:
   151  		return rewriteValueMIPS64_OpGreater16_0(v)
   152  	case OpGreater16U:
   153  		return rewriteValueMIPS64_OpGreater16U_0(v)
   154  	case OpGreater32:
   155  		return rewriteValueMIPS64_OpGreater32_0(v)
   156  	case OpGreater32F:
   157  		return rewriteValueMIPS64_OpGreater32F_0(v)
   158  	case OpGreater32U:
   159  		return rewriteValueMIPS64_OpGreater32U_0(v)
   160  	case OpGreater64:
   161  		return rewriteValueMIPS64_OpGreater64_0(v)
   162  	case OpGreater64F:
   163  		return rewriteValueMIPS64_OpGreater64F_0(v)
   164  	case OpGreater64U:
   165  		return rewriteValueMIPS64_OpGreater64U_0(v)
   166  	case OpGreater8:
   167  		return rewriteValueMIPS64_OpGreater8_0(v)
   168  	case OpGreater8U:
   169  		return rewriteValueMIPS64_OpGreater8U_0(v)
   170  	case OpHmul32:
   171  		return rewriteValueMIPS64_OpHmul32_0(v)
   172  	case OpHmul32u:
   173  		return rewriteValueMIPS64_OpHmul32u_0(v)
   174  	case OpHmul64:
   175  		return rewriteValueMIPS64_OpHmul64_0(v)
   176  	case OpHmul64u:
   177  		return rewriteValueMIPS64_OpHmul64u_0(v)
   178  	case OpInterCall:
   179  		return rewriteValueMIPS64_OpInterCall_0(v)
   180  	case OpIsInBounds:
   181  		return rewriteValueMIPS64_OpIsInBounds_0(v)
   182  	case OpIsNonNil:
   183  		return rewriteValueMIPS64_OpIsNonNil_0(v)
   184  	case OpIsSliceInBounds:
   185  		return rewriteValueMIPS64_OpIsSliceInBounds_0(v)
   186  	case OpLeq16:
   187  		return rewriteValueMIPS64_OpLeq16_0(v)
   188  	case OpLeq16U:
   189  		return rewriteValueMIPS64_OpLeq16U_0(v)
   190  	case OpLeq32:
   191  		return rewriteValueMIPS64_OpLeq32_0(v)
   192  	case OpLeq32F:
   193  		return rewriteValueMIPS64_OpLeq32F_0(v)
   194  	case OpLeq32U:
   195  		return rewriteValueMIPS64_OpLeq32U_0(v)
   196  	case OpLeq64:
   197  		return rewriteValueMIPS64_OpLeq64_0(v)
   198  	case OpLeq64F:
   199  		return rewriteValueMIPS64_OpLeq64F_0(v)
   200  	case OpLeq64U:
   201  		return rewriteValueMIPS64_OpLeq64U_0(v)
   202  	case OpLeq8:
   203  		return rewriteValueMIPS64_OpLeq8_0(v)
   204  	case OpLeq8U:
   205  		return rewriteValueMIPS64_OpLeq8U_0(v)
   206  	case OpLess16:
   207  		return rewriteValueMIPS64_OpLess16_0(v)
   208  	case OpLess16U:
   209  		return rewriteValueMIPS64_OpLess16U_0(v)
   210  	case OpLess32:
   211  		return rewriteValueMIPS64_OpLess32_0(v)
   212  	case OpLess32F:
   213  		return rewriteValueMIPS64_OpLess32F_0(v)
   214  	case OpLess32U:
   215  		return rewriteValueMIPS64_OpLess32U_0(v)
   216  	case OpLess64:
   217  		return rewriteValueMIPS64_OpLess64_0(v)
   218  	case OpLess64F:
   219  		return rewriteValueMIPS64_OpLess64F_0(v)
   220  	case OpLess64U:
   221  		return rewriteValueMIPS64_OpLess64U_0(v)
   222  	case OpLess8:
   223  		return rewriteValueMIPS64_OpLess8_0(v)
   224  	case OpLess8U:
   225  		return rewriteValueMIPS64_OpLess8U_0(v)
   226  	case OpLoad:
   227  		return rewriteValueMIPS64_OpLoad_0(v)
   228  	case OpLsh16x16:
   229  		return rewriteValueMIPS64_OpLsh16x16_0(v)
   230  	case OpLsh16x32:
   231  		return rewriteValueMIPS64_OpLsh16x32_0(v)
   232  	case OpLsh16x64:
   233  		return rewriteValueMIPS64_OpLsh16x64_0(v)
   234  	case OpLsh16x8:
   235  		return rewriteValueMIPS64_OpLsh16x8_0(v)
   236  	case OpLsh32x16:
   237  		return rewriteValueMIPS64_OpLsh32x16_0(v)
   238  	case OpLsh32x32:
   239  		return rewriteValueMIPS64_OpLsh32x32_0(v)
   240  	case OpLsh32x64:
   241  		return rewriteValueMIPS64_OpLsh32x64_0(v)
   242  	case OpLsh32x8:
   243  		return rewriteValueMIPS64_OpLsh32x8_0(v)
   244  	case OpLsh64x16:
   245  		return rewriteValueMIPS64_OpLsh64x16_0(v)
   246  	case OpLsh64x32:
   247  		return rewriteValueMIPS64_OpLsh64x32_0(v)
   248  	case OpLsh64x64:
   249  		return rewriteValueMIPS64_OpLsh64x64_0(v)
   250  	case OpLsh64x8:
   251  		return rewriteValueMIPS64_OpLsh64x8_0(v)
   252  	case OpLsh8x16:
   253  		return rewriteValueMIPS64_OpLsh8x16_0(v)
   254  	case OpLsh8x32:
   255  		return rewriteValueMIPS64_OpLsh8x32_0(v)
   256  	case OpLsh8x64:
   257  		return rewriteValueMIPS64_OpLsh8x64_0(v)
   258  	case OpLsh8x8:
   259  		return rewriteValueMIPS64_OpLsh8x8_0(v)
   260  	case OpMIPS64ADDV:
   261  		return rewriteValueMIPS64_OpMIPS64ADDV_0(v)
   262  	case OpMIPS64ADDVconst:
   263  		return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v)
   264  	case OpMIPS64AND:
   265  		return rewriteValueMIPS64_OpMIPS64AND_0(v)
   266  	case OpMIPS64ANDconst:
   267  		return rewriteValueMIPS64_OpMIPS64ANDconst_0(v)
   268  	case OpMIPS64MOVBUload:
   269  		return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v)
   270  	case OpMIPS64MOVBUreg:
   271  		return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v)
   272  	case OpMIPS64MOVBload:
   273  		return rewriteValueMIPS64_OpMIPS64MOVBload_0(v)
   274  	case OpMIPS64MOVBreg:
   275  		return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v)
   276  	case OpMIPS64MOVBstore:
   277  		return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v)
   278  	case OpMIPS64MOVBstorezero:
   279  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v)
   280  	case OpMIPS64MOVDload:
   281  		return rewriteValueMIPS64_OpMIPS64MOVDload_0(v)
   282  	case OpMIPS64MOVDstore:
   283  		return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v)
   284  	case OpMIPS64MOVFload:
   285  		return rewriteValueMIPS64_OpMIPS64MOVFload_0(v)
   286  	case OpMIPS64MOVFstore:
   287  		return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v)
   288  	case OpMIPS64MOVHUload:
   289  		return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v)
   290  	case OpMIPS64MOVHUreg:
   291  		return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v)
   292  	case OpMIPS64MOVHload:
   293  		return rewriteValueMIPS64_OpMIPS64MOVHload_0(v)
   294  	case OpMIPS64MOVHreg:
   295  		return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v)
   296  	case OpMIPS64MOVHstore:
   297  		return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v)
   298  	case OpMIPS64MOVHstorezero:
   299  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v)
   300  	case OpMIPS64MOVVload:
   301  		return rewriteValueMIPS64_OpMIPS64MOVVload_0(v)
   302  	case OpMIPS64MOVVreg:
   303  		return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v)
   304  	case OpMIPS64MOVVstore:
   305  		return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v)
   306  	case OpMIPS64MOVVstorezero:
   307  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v)
   308  	case OpMIPS64MOVWUload:
   309  		return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v)
   310  	case OpMIPS64MOVWUreg:
   311  		return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v)
   312  	case OpMIPS64MOVWload:
   313  		return rewriteValueMIPS64_OpMIPS64MOVWload_0(v)
   314  	case OpMIPS64MOVWreg:
   315  		return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v)
   316  	case OpMIPS64MOVWstore:
   317  		return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v)
   318  	case OpMIPS64MOVWstorezero:
   319  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v)
   320  	case OpMIPS64NEGV:
   321  		return rewriteValueMIPS64_OpMIPS64NEGV_0(v)
   322  	case OpMIPS64NOR:
   323  		return rewriteValueMIPS64_OpMIPS64NOR_0(v)
   324  	case OpMIPS64NORconst:
   325  		return rewriteValueMIPS64_OpMIPS64NORconst_0(v)
   326  	case OpMIPS64OR:
   327  		return rewriteValueMIPS64_OpMIPS64OR_0(v)
   328  	case OpMIPS64ORconst:
   329  		return rewriteValueMIPS64_OpMIPS64ORconst_0(v)
   330  	case OpMIPS64SGT:
   331  		return rewriteValueMIPS64_OpMIPS64SGT_0(v)
   332  	case OpMIPS64SGTU:
   333  		return rewriteValueMIPS64_OpMIPS64SGTU_0(v)
   334  	case OpMIPS64SGTUconst:
   335  		return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v)
   336  	case OpMIPS64SGTconst:
   337  		return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v)
   338  	case OpMIPS64SLLV:
   339  		return rewriteValueMIPS64_OpMIPS64SLLV_0(v)
   340  	case OpMIPS64SLLVconst:
   341  		return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v)
   342  	case OpMIPS64SRAV:
   343  		return rewriteValueMIPS64_OpMIPS64SRAV_0(v)
   344  	case OpMIPS64SRAVconst:
   345  		return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v)
   346  	case OpMIPS64SRLV:
   347  		return rewriteValueMIPS64_OpMIPS64SRLV_0(v)
   348  	case OpMIPS64SRLVconst:
   349  		return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v)
   350  	case OpMIPS64SUBV:
   351  		return rewriteValueMIPS64_OpMIPS64SUBV_0(v)
   352  	case OpMIPS64SUBVconst:
   353  		return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v)
   354  	case OpMIPS64XOR:
   355  		return rewriteValueMIPS64_OpMIPS64XOR_0(v)
   356  	case OpMIPS64XORconst:
   357  		return rewriteValueMIPS64_OpMIPS64XORconst_0(v)
   358  	case OpMod16:
   359  		return rewriteValueMIPS64_OpMod16_0(v)
   360  	case OpMod16u:
   361  		return rewriteValueMIPS64_OpMod16u_0(v)
   362  	case OpMod32:
   363  		return rewriteValueMIPS64_OpMod32_0(v)
   364  	case OpMod32u:
   365  		return rewriteValueMIPS64_OpMod32u_0(v)
   366  	case OpMod64:
   367  		return rewriteValueMIPS64_OpMod64_0(v)
   368  	case OpMod64u:
   369  		return rewriteValueMIPS64_OpMod64u_0(v)
   370  	case OpMod8:
   371  		return rewriteValueMIPS64_OpMod8_0(v)
   372  	case OpMod8u:
   373  		return rewriteValueMIPS64_OpMod8u_0(v)
   374  	case OpMove:
   375  		return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v)
   376  	case OpMul16:
   377  		return rewriteValueMIPS64_OpMul16_0(v)
   378  	case OpMul32:
   379  		return rewriteValueMIPS64_OpMul32_0(v)
   380  	case OpMul32F:
   381  		return rewriteValueMIPS64_OpMul32F_0(v)
   382  	case OpMul64:
   383  		return rewriteValueMIPS64_OpMul64_0(v)
   384  	case OpMul64F:
   385  		return rewriteValueMIPS64_OpMul64F_0(v)
   386  	case OpMul8:
   387  		return rewriteValueMIPS64_OpMul8_0(v)
   388  	case OpNeg16:
   389  		return rewriteValueMIPS64_OpNeg16_0(v)
   390  	case OpNeg32:
   391  		return rewriteValueMIPS64_OpNeg32_0(v)
   392  	case OpNeg32F:
   393  		return rewriteValueMIPS64_OpNeg32F_0(v)
   394  	case OpNeg64:
   395  		return rewriteValueMIPS64_OpNeg64_0(v)
   396  	case OpNeg64F:
   397  		return rewriteValueMIPS64_OpNeg64F_0(v)
   398  	case OpNeg8:
   399  		return rewriteValueMIPS64_OpNeg8_0(v)
   400  	case OpNeq16:
   401  		return rewriteValueMIPS64_OpNeq16_0(v)
   402  	case OpNeq32:
   403  		return rewriteValueMIPS64_OpNeq32_0(v)
   404  	case OpNeq32F:
   405  		return rewriteValueMIPS64_OpNeq32F_0(v)
   406  	case OpNeq64:
   407  		return rewriteValueMIPS64_OpNeq64_0(v)
   408  	case OpNeq64F:
   409  		return rewriteValueMIPS64_OpNeq64F_0(v)
   410  	case OpNeq8:
   411  		return rewriteValueMIPS64_OpNeq8_0(v)
   412  	case OpNeqB:
   413  		return rewriteValueMIPS64_OpNeqB_0(v)
   414  	case OpNeqPtr:
   415  		return rewriteValueMIPS64_OpNeqPtr_0(v)
   416  	case OpNilCheck:
   417  		return rewriteValueMIPS64_OpNilCheck_0(v)
   418  	case OpNot:
   419  		return rewriteValueMIPS64_OpNot_0(v)
   420  	case OpOffPtr:
   421  		return rewriteValueMIPS64_OpOffPtr_0(v)
   422  	case OpOr16:
   423  		return rewriteValueMIPS64_OpOr16_0(v)
   424  	case OpOr32:
   425  		return rewriteValueMIPS64_OpOr32_0(v)
   426  	case OpOr64:
   427  		return rewriteValueMIPS64_OpOr64_0(v)
   428  	case OpOr8:
   429  		return rewriteValueMIPS64_OpOr8_0(v)
   430  	case OpOrB:
   431  		return rewriteValueMIPS64_OpOrB_0(v)
   432  	case OpRound32F:
   433  		return rewriteValueMIPS64_OpRound32F_0(v)
   434  	case OpRound64F:
   435  		return rewriteValueMIPS64_OpRound64F_0(v)
   436  	case OpRsh16Ux16:
   437  		return rewriteValueMIPS64_OpRsh16Ux16_0(v)
   438  	case OpRsh16Ux32:
   439  		return rewriteValueMIPS64_OpRsh16Ux32_0(v)
   440  	case OpRsh16Ux64:
   441  		return rewriteValueMIPS64_OpRsh16Ux64_0(v)
   442  	case OpRsh16Ux8:
   443  		return rewriteValueMIPS64_OpRsh16Ux8_0(v)
   444  	case OpRsh16x16:
   445  		return rewriteValueMIPS64_OpRsh16x16_0(v)
   446  	case OpRsh16x32:
   447  		return rewriteValueMIPS64_OpRsh16x32_0(v)
   448  	case OpRsh16x64:
   449  		return rewriteValueMIPS64_OpRsh16x64_0(v)
   450  	case OpRsh16x8:
   451  		return rewriteValueMIPS64_OpRsh16x8_0(v)
   452  	case OpRsh32Ux16:
   453  		return rewriteValueMIPS64_OpRsh32Ux16_0(v)
   454  	case OpRsh32Ux32:
   455  		return rewriteValueMIPS64_OpRsh32Ux32_0(v)
   456  	case OpRsh32Ux64:
   457  		return rewriteValueMIPS64_OpRsh32Ux64_0(v)
   458  	case OpRsh32Ux8:
   459  		return rewriteValueMIPS64_OpRsh32Ux8_0(v)
   460  	case OpRsh32x16:
   461  		return rewriteValueMIPS64_OpRsh32x16_0(v)
   462  	case OpRsh32x32:
   463  		return rewriteValueMIPS64_OpRsh32x32_0(v)
   464  	case OpRsh32x64:
   465  		return rewriteValueMIPS64_OpRsh32x64_0(v)
   466  	case OpRsh32x8:
   467  		return rewriteValueMIPS64_OpRsh32x8_0(v)
   468  	case OpRsh64Ux16:
   469  		return rewriteValueMIPS64_OpRsh64Ux16_0(v)
   470  	case OpRsh64Ux32:
   471  		return rewriteValueMIPS64_OpRsh64Ux32_0(v)
   472  	case OpRsh64Ux64:
   473  		return rewriteValueMIPS64_OpRsh64Ux64_0(v)
   474  	case OpRsh64Ux8:
   475  		return rewriteValueMIPS64_OpRsh64Ux8_0(v)
   476  	case OpRsh64x16:
   477  		return rewriteValueMIPS64_OpRsh64x16_0(v)
   478  	case OpRsh64x32:
   479  		return rewriteValueMIPS64_OpRsh64x32_0(v)
   480  	case OpRsh64x64:
   481  		return rewriteValueMIPS64_OpRsh64x64_0(v)
   482  	case OpRsh64x8:
   483  		return rewriteValueMIPS64_OpRsh64x8_0(v)
   484  	case OpRsh8Ux16:
   485  		return rewriteValueMIPS64_OpRsh8Ux16_0(v)
   486  	case OpRsh8Ux32:
   487  		return rewriteValueMIPS64_OpRsh8Ux32_0(v)
   488  	case OpRsh8Ux64:
   489  		return rewriteValueMIPS64_OpRsh8Ux64_0(v)
   490  	case OpRsh8Ux8:
   491  		return rewriteValueMIPS64_OpRsh8Ux8_0(v)
   492  	case OpRsh8x16:
   493  		return rewriteValueMIPS64_OpRsh8x16_0(v)
   494  	case OpRsh8x32:
   495  		return rewriteValueMIPS64_OpRsh8x32_0(v)
   496  	case OpRsh8x64:
   497  		return rewriteValueMIPS64_OpRsh8x64_0(v)
   498  	case OpRsh8x8:
   499  		return rewriteValueMIPS64_OpRsh8x8_0(v)
   500  	case OpSelect0:
   501  		return rewriteValueMIPS64_OpSelect0_0(v)
   502  	case OpSelect1:
   503  		return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v)
   504  	case OpSignExt16to32:
   505  		return rewriteValueMIPS64_OpSignExt16to32_0(v)
   506  	case OpSignExt16to64:
   507  		return rewriteValueMIPS64_OpSignExt16to64_0(v)
   508  	case OpSignExt32to64:
   509  		return rewriteValueMIPS64_OpSignExt32to64_0(v)
   510  	case OpSignExt8to16:
   511  		return rewriteValueMIPS64_OpSignExt8to16_0(v)
   512  	case OpSignExt8to32:
   513  		return rewriteValueMIPS64_OpSignExt8to32_0(v)
   514  	case OpSignExt8to64:
   515  		return rewriteValueMIPS64_OpSignExt8to64_0(v)
   516  	case OpSlicemask:
   517  		return rewriteValueMIPS64_OpSlicemask_0(v)
   518  	case OpStaticCall:
   519  		return rewriteValueMIPS64_OpStaticCall_0(v)
   520  	case OpStore:
   521  		return rewriteValueMIPS64_OpStore_0(v)
   522  	case OpSub16:
   523  		return rewriteValueMIPS64_OpSub16_0(v)
   524  	case OpSub32:
   525  		return rewriteValueMIPS64_OpSub32_0(v)
   526  	case OpSub32F:
   527  		return rewriteValueMIPS64_OpSub32F_0(v)
   528  	case OpSub64:
   529  		return rewriteValueMIPS64_OpSub64_0(v)
   530  	case OpSub64F:
   531  		return rewriteValueMIPS64_OpSub64F_0(v)
   532  	case OpSub8:
   533  		return rewriteValueMIPS64_OpSub8_0(v)
   534  	case OpSubPtr:
   535  		return rewriteValueMIPS64_OpSubPtr_0(v)
   536  	case OpTrunc16to8:
   537  		return rewriteValueMIPS64_OpTrunc16to8_0(v)
   538  	case OpTrunc32to16:
   539  		return rewriteValueMIPS64_OpTrunc32to16_0(v)
   540  	case OpTrunc32to8:
   541  		return rewriteValueMIPS64_OpTrunc32to8_0(v)
   542  	case OpTrunc64to16:
   543  		return rewriteValueMIPS64_OpTrunc64to16_0(v)
   544  	case OpTrunc64to32:
   545  		return rewriteValueMIPS64_OpTrunc64to32_0(v)
   546  	case OpTrunc64to8:
   547  		return rewriteValueMIPS64_OpTrunc64to8_0(v)
   548  	case OpXor16:
   549  		return rewriteValueMIPS64_OpXor16_0(v)
   550  	case OpXor32:
   551  		return rewriteValueMIPS64_OpXor32_0(v)
   552  	case OpXor64:
   553  		return rewriteValueMIPS64_OpXor64_0(v)
   554  	case OpXor8:
   555  		return rewriteValueMIPS64_OpXor8_0(v)
   556  	case OpZero:
   557  		return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v)
   558  	case OpZeroExt16to32:
   559  		return rewriteValueMIPS64_OpZeroExt16to32_0(v)
   560  	case OpZeroExt16to64:
   561  		return rewriteValueMIPS64_OpZeroExt16to64_0(v)
   562  	case OpZeroExt32to64:
   563  		return rewriteValueMIPS64_OpZeroExt32to64_0(v)
   564  	case OpZeroExt8to16:
   565  		return rewriteValueMIPS64_OpZeroExt8to16_0(v)
   566  	case OpZeroExt8to32:
   567  		return rewriteValueMIPS64_OpZeroExt8to32_0(v)
   568  	case OpZeroExt8to64:
   569  		return rewriteValueMIPS64_OpZeroExt8to64_0(v)
   570  	}
   571  	return false
   572  }
   573  func rewriteValueMIPS64_OpAdd16_0(v *Value) bool {
   574  	// match: (Add16 x y)
   575  	// cond:
   576  	// result: (ADDV x y)
   577  	for {
   578  		x := v.Args[0]
   579  		y := v.Args[1]
   580  		v.reset(OpMIPS64ADDV)
   581  		v.AddArg(x)
   582  		v.AddArg(y)
   583  		return true
   584  	}
   585  }
   586  func rewriteValueMIPS64_OpAdd32_0(v *Value) bool {
   587  	// match: (Add32 x y)
   588  	// cond:
   589  	// result: (ADDV x y)
   590  	for {
   591  		x := v.Args[0]
   592  		y := v.Args[1]
   593  		v.reset(OpMIPS64ADDV)
   594  		v.AddArg(x)
   595  		v.AddArg(y)
   596  		return true
   597  	}
   598  }
   599  func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool {
   600  	// match: (Add32F x y)
   601  	// cond:
   602  	// result: (ADDF x y)
   603  	for {
   604  		x := v.Args[0]
   605  		y := v.Args[1]
   606  		v.reset(OpMIPS64ADDF)
   607  		v.AddArg(x)
   608  		v.AddArg(y)
   609  		return true
   610  	}
   611  }
   612  func rewriteValueMIPS64_OpAdd64_0(v *Value) bool {
   613  	// match: (Add64 x y)
   614  	// cond:
   615  	// result: (ADDV x y)
   616  	for {
   617  		x := v.Args[0]
   618  		y := v.Args[1]
   619  		v.reset(OpMIPS64ADDV)
   620  		v.AddArg(x)
   621  		v.AddArg(y)
   622  		return true
   623  	}
   624  }
   625  func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool {
   626  	// match: (Add64F x y)
   627  	// cond:
   628  	// result: (ADDD x y)
   629  	for {
   630  		x := v.Args[0]
   631  		y := v.Args[1]
   632  		v.reset(OpMIPS64ADDD)
   633  		v.AddArg(x)
   634  		v.AddArg(y)
   635  		return true
   636  	}
   637  }
   638  func rewriteValueMIPS64_OpAdd8_0(v *Value) bool {
   639  	// match: (Add8 x y)
   640  	// cond:
   641  	// result: (ADDV x y)
   642  	for {
   643  		x := v.Args[0]
   644  		y := v.Args[1]
   645  		v.reset(OpMIPS64ADDV)
   646  		v.AddArg(x)
   647  		v.AddArg(y)
   648  		return true
   649  	}
   650  }
   651  func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool {
   652  	// match: (AddPtr x y)
   653  	// cond:
   654  	// result: (ADDV x y)
   655  	for {
   656  		x := v.Args[0]
   657  		y := v.Args[1]
   658  		v.reset(OpMIPS64ADDV)
   659  		v.AddArg(x)
   660  		v.AddArg(y)
   661  		return true
   662  	}
   663  }
   664  func rewriteValueMIPS64_OpAddr_0(v *Value) bool {
   665  	// match: (Addr {sym} base)
   666  	// cond:
   667  	// result: (MOVVaddr {sym} base)
   668  	for {
   669  		sym := v.Aux
   670  		base := v.Args[0]
   671  		v.reset(OpMIPS64MOVVaddr)
   672  		v.Aux = sym
   673  		v.AddArg(base)
   674  		return true
   675  	}
   676  }
   677  func rewriteValueMIPS64_OpAnd16_0(v *Value) bool {
   678  	// match: (And16 x y)
   679  	// cond:
   680  	// result: (AND x y)
   681  	for {
   682  		x := v.Args[0]
   683  		y := v.Args[1]
   684  		v.reset(OpMIPS64AND)
   685  		v.AddArg(x)
   686  		v.AddArg(y)
   687  		return true
   688  	}
   689  }
   690  func rewriteValueMIPS64_OpAnd32_0(v *Value) bool {
   691  	// match: (And32 x y)
   692  	// cond:
   693  	// result: (AND x y)
   694  	for {
   695  		x := v.Args[0]
   696  		y := v.Args[1]
   697  		v.reset(OpMIPS64AND)
   698  		v.AddArg(x)
   699  		v.AddArg(y)
   700  		return true
   701  	}
   702  }
   703  func rewriteValueMIPS64_OpAnd64_0(v *Value) bool {
   704  	// match: (And64 x y)
   705  	// cond:
   706  	// result: (AND x y)
   707  	for {
   708  		x := v.Args[0]
   709  		y := v.Args[1]
   710  		v.reset(OpMIPS64AND)
   711  		v.AddArg(x)
   712  		v.AddArg(y)
   713  		return true
   714  	}
   715  }
   716  func rewriteValueMIPS64_OpAnd8_0(v *Value) bool {
   717  	// match: (And8 x y)
   718  	// cond:
   719  	// result: (AND x y)
   720  	for {
   721  		x := v.Args[0]
   722  		y := v.Args[1]
   723  		v.reset(OpMIPS64AND)
   724  		v.AddArg(x)
   725  		v.AddArg(y)
   726  		return true
   727  	}
   728  }
   729  func rewriteValueMIPS64_OpAndB_0(v *Value) bool {
   730  	// match: (AndB x y)
   731  	// cond:
   732  	// result: (AND x y)
   733  	for {
   734  		x := v.Args[0]
   735  		y := v.Args[1]
   736  		v.reset(OpMIPS64AND)
   737  		v.AddArg(x)
   738  		v.AddArg(y)
   739  		return true
   740  	}
   741  }
   742  func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool {
   743  	b := v.Block
   744  	_ = b
   745  	// match: (Avg64u <t> x y)
   746  	// cond:
   747  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   748  	for {
   749  		t := v.Type
   750  		x := v.Args[0]
   751  		y := v.Args[1]
   752  		v.reset(OpMIPS64ADDV)
   753  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   754  		v0.AuxInt = 1
   755  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   756  		v1.AddArg(x)
   757  		v1.AddArg(y)
   758  		v0.AddArg(v1)
   759  		v.AddArg(v0)
   760  		v.AddArg(y)
   761  		return true
   762  	}
   763  }
   764  func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool {
   765  	// match: (ClosureCall [argwid] entry closure mem)
   766  	// cond:
   767  	// result: (CALLclosure [argwid] entry closure mem)
   768  	for {
   769  		argwid := v.AuxInt
   770  		entry := v.Args[0]
   771  		closure := v.Args[1]
   772  		mem := v.Args[2]
   773  		v.reset(OpMIPS64CALLclosure)
   774  		v.AuxInt = argwid
   775  		v.AddArg(entry)
   776  		v.AddArg(closure)
   777  		v.AddArg(mem)
   778  		return true
   779  	}
   780  }
   781  func rewriteValueMIPS64_OpCom16_0(v *Value) bool {
   782  	b := v.Block
   783  	_ = b
   784  	types := &b.Func.Config.Types
   785  	_ = types
   786  	// match: (Com16 x)
   787  	// cond:
   788  	// result: (NOR (MOVVconst [0]) x)
   789  	for {
   790  		x := v.Args[0]
   791  		v.reset(OpMIPS64NOR)
   792  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   793  		v0.AuxInt = 0
   794  		v.AddArg(v0)
   795  		v.AddArg(x)
   796  		return true
   797  	}
   798  }
   799  func rewriteValueMIPS64_OpCom32_0(v *Value) bool {
   800  	b := v.Block
   801  	_ = b
   802  	types := &b.Func.Config.Types
   803  	_ = types
   804  	// match: (Com32 x)
   805  	// cond:
   806  	// result: (NOR (MOVVconst [0]) x)
   807  	for {
   808  		x := v.Args[0]
   809  		v.reset(OpMIPS64NOR)
   810  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   811  		v0.AuxInt = 0
   812  		v.AddArg(v0)
   813  		v.AddArg(x)
   814  		return true
   815  	}
   816  }
   817  func rewriteValueMIPS64_OpCom64_0(v *Value) bool {
   818  	b := v.Block
   819  	_ = b
   820  	types := &b.Func.Config.Types
   821  	_ = types
   822  	// match: (Com64 x)
   823  	// cond:
   824  	// result: (NOR (MOVVconst [0]) x)
   825  	for {
   826  		x := v.Args[0]
   827  		v.reset(OpMIPS64NOR)
   828  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   829  		v0.AuxInt = 0
   830  		v.AddArg(v0)
   831  		v.AddArg(x)
   832  		return true
   833  	}
   834  }
   835  func rewriteValueMIPS64_OpCom8_0(v *Value) bool {
   836  	b := v.Block
   837  	_ = b
   838  	types := &b.Func.Config.Types
   839  	_ = types
   840  	// match: (Com8 x)
   841  	// cond:
   842  	// result: (NOR (MOVVconst [0]) x)
   843  	for {
   844  		x := v.Args[0]
   845  		v.reset(OpMIPS64NOR)
   846  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
   847  		v0.AuxInt = 0
   848  		v.AddArg(v0)
   849  		v.AddArg(x)
   850  		return true
   851  	}
   852  }
   853  func rewriteValueMIPS64_OpConst16_0(v *Value) bool {
   854  	// match: (Const16 [val])
   855  	// cond:
   856  	// result: (MOVVconst [val])
   857  	for {
   858  		val := v.AuxInt
   859  		v.reset(OpMIPS64MOVVconst)
   860  		v.AuxInt = val
   861  		return true
   862  	}
   863  }
   864  func rewriteValueMIPS64_OpConst32_0(v *Value) bool {
   865  	// match: (Const32 [val])
   866  	// cond:
   867  	// result: (MOVVconst [val])
   868  	for {
   869  		val := v.AuxInt
   870  		v.reset(OpMIPS64MOVVconst)
   871  		v.AuxInt = val
   872  		return true
   873  	}
   874  }
   875  func rewriteValueMIPS64_OpConst32F_0(v *Value) bool {
   876  	// match: (Const32F [val])
   877  	// cond:
   878  	// result: (MOVFconst [val])
   879  	for {
   880  		val := v.AuxInt
   881  		v.reset(OpMIPS64MOVFconst)
   882  		v.AuxInt = val
   883  		return true
   884  	}
   885  }
   886  func rewriteValueMIPS64_OpConst64_0(v *Value) bool {
   887  	// match: (Const64 [val])
   888  	// cond:
   889  	// result: (MOVVconst [val])
   890  	for {
   891  		val := v.AuxInt
   892  		v.reset(OpMIPS64MOVVconst)
   893  		v.AuxInt = val
   894  		return true
   895  	}
   896  }
   897  func rewriteValueMIPS64_OpConst64F_0(v *Value) bool {
   898  	// match: (Const64F [val])
   899  	// cond:
   900  	// result: (MOVDconst [val])
   901  	for {
   902  		val := v.AuxInt
   903  		v.reset(OpMIPS64MOVDconst)
   904  		v.AuxInt = val
   905  		return true
   906  	}
   907  }
   908  func rewriteValueMIPS64_OpConst8_0(v *Value) bool {
   909  	// match: (Const8 [val])
   910  	// cond:
   911  	// result: (MOVVconst [val])
   912  	for {
   913  		val := v.AuxInt
   914  		v.reset(OpMIPS64MOVVconst)
   915  		v.AuxInt = val
   916  		return true
   917  	}
   918  }
   919  func rewriteValueMIPS64_OpConstBool_0(v *Value) bool {
   920  	// match: (ConstBool [b])
   921  	// cond:
   922  	// result: (MOVVconst [b])
   923  	for {
   924  		b := v.AuxInt
   925  		v.reset(OpMIPS64MOVVconst)
   926  		v.AuxInt = b
   927  		return true
   928  	}
   929  }
   930  func rewriteValueMIPS64_OpConstNil_0(v *Value) bool {
   931  	// match: (ConstNil)
   932  	// cond:
   933  	// result: (MOVVconst [0])
   934  	for {
   935  		v.reset(OpMIPS64MOVVconst)
   936  		v.AuxInt = 0
   937  		return true
   938  	}
   939  }
   940  func rewriteValueMIPS64_OpConvert_0(v *Value) bool {
   941  	// match: (Convert x mem)
   942  	// cond:
   943  	// result: (MOVVconvert x mem)
   944  	for {
   945  		x := v.Args[0]
   946  		mem := v.Args[1]
   947  		v.reset(OpMIPS64MOVVconvert)
   948  		v.AddArg(x)
   949  		v.AddArg(mem)
   950  		return true
   951  	}
   952  }
   953  func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool {
   954  	// match: (Cvt32Fto32 x)
   955  	// cond:
   956  	// result: (TRUNCFW x)
   957  	for {
   958  		x := v.Args[0]
   959  		v.reset(OpMIPS64TRUNCFW)
   960  		v.AddArg(x)
   961  		return true
   962  	}
   963  }
   964  func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool {
   965  	// match: (Cvt32Fto64 x)
   966  	// cond:
   967  	// result: (TRUNCFV x)
   968  	for {
   969  		x := v.Args[0]
   970  		v.reset(OpMIPS64TRUNCFV)
   971  		v.AddArg(x)
   972  		return true
   973  	}
   974  }
   975  func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool {
   976  	// match: (Cvt32Fto64F x)
   977  	// cond:
   978  	// result: (MOVFD x)
   979  	for {
   980  		x := v.Args[0]
   981  		v.reset(OpMIPS64MOVFD)
   982  		v.AddArg(x)
   983  		return true
   984  	}
   985  }
   986  func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool {
   987  	// match: (Cvt32to32F x)
   988  	// cond:
   989  	// result: (MOVWF x)
   990  	for {
   991  		x := v.Args[0]
   992  		v.reset(OpMIPS64MOVWF)
   993  		v.AddArg(x)
   994  		return true
   995  	}
   996  }
   997  func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool {
   998  	// match: (Cvt32to64F x)
   999  	// cond:
  1000  	// result: (MOVWD x)
  1001  	for {
  1002  		x := v.Args[0]
  1003  		v.reset(OpMIPS64MOVWD)
  1004  		v.AddArg(x)
  1005  		return true
  1006  	}
  1007  }
  1008  func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool {
  1009  	// match: (Cvt64Fto32 x)
  1010  	// cond:
  1011  	// result: (TRUNCDW x)
  1012  	for {
  1013  		x := v.Args[0]
  1014  		v.reset(OpMIPS64TRUNCDW)
  1015  		v.AddArg(x)
  1016  		return true
  1017  	}
  1018  }
  1019  func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool {
  1020  	// match: (Cvt64Fto32F x)
  1021  	// cond:
  1022  	// result: (MOVDF x)
  1023  	for {
  1024  		x := v.Args[0]
  1025  		v.reset(OpMIPS64MOVDF)
  1026  		v.AddArg(x)
  1027  		return true
  1028  	}
  1029  }
  1030  func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool {
  1031  	// match: (Cvt64Fto64 x)
  1032  	// cond:
  1033  	// result: (TRUNCDV x)
  1034  	for {
  1035  		x := v.Args[0]
  1036  		v.reset(OpMIPS64TRUNCDV)
  1037  		v.AddArg(x)
  1038  		return true
  1039  	}
  1040  }
  1041  func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool {
  1042  	// match: (Cvt64to32F x)
  1043  	// cond:
  1044  	// result: (MOVVF x)
  1045  	for {
  1046  		x := v.Args[0]
  1047  		v.reset(OpMIPS64MOVVF)
  1048  		v.AddArg(x)
  1049  		return true
  1050  	}
  1051  }
  1052  func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool {
  1053  	// match: (Cvt64to64F x)
  1054  	// cond:
  1055  	// result: (MOVVD x)
  1056  	for {
  1057  		x := v.Args[0]
  1058  		v.reset(OpMIPS64MOVVD)
  1059  		v.AddArg(x)
  1060  		return true
  1061  	}
  1062  }
  1063  func rewriteValueMIPS64_OpDiv16_0(v *Value) bool {
  1064  	b := v.Block
  1065  	_ = b
  1066  	types := &b.Func.Config.Types
  1067  	_ = types
  1068  	// match: (Div16 x y)
  1069  	// cond:
  1070  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1071  	for {
  1072  		x := v.Args[0]
  1073  		y := v.Args[1]
  1074  		v.reset(OpSelect1)
  1075  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1076  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1077  		v1.AddArg(x)
  1078  		v0.AddArg(v1)
  1079  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1080  		v2.AddArg(y)
  1081  		v0.AddArg(v2)
  1082  		v.AddArg(v0)
  1083  		return true
  1084  	}
  1085  }
  1086  func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool {
  1087  	b := v.Block
  1088  	_ = b
  1089  	types := &b.Func.Config.Types
  1090  	_ = types
  1091  	// match: (Div16u x y)
  1092  	// cond:
  1093  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1094  	for {
  1095  		x := v.Args[0]
  1096  		y := v.Args[1]
  1097  		v.reset(OpSelect1)
  1098  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1099  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1100  		v1.AddArg(x)
  1101  		v0.AddArg(v1)
  1102  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1103  		v2.AddArg(y)
  1104  		v0.AddArg(v2)
  1105  		v.AddArg(v0)
  1106  		return true
  1107  	}
  1108  }
  1109  func rewriteValueMIPS64_OpDiv32_0(v *Value) bool {
  1110  	b := v.Block
  1111  	_ = b
  1112  	types := &b.Func.Config.Types
  1113  	_ = types
  1114  	// match: (Div32 x y)
  1115  	// cond:
  1116  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1117  	for {
  1118  		x := v.Args[0]
  1119  		y := v.Args[1]
  1120  		v.reset(OpSelect1)
  1121  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1122  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1123  		v1.AddArg(x)
  1124  		v0.AddArg(v1)
  1125  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1126  		v2.AddArg(y)
  1127  		v0.AddArg(v2)
  1128  		v.AddArg(v0)
  1129  		return true
  1130  	}
  1131  }
  1132  func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool {
  1133  	// match: (Div32F x y)
  1134  	// cond:
  1135  	// result: (DIVF x y)
  1136  	for {
  1137  		x := v.Args[0]
  1138  		y := v.Args[1]
  1139  		v.reset(OpMIPS64DIVF)
  1140  		v.AddArg(x)
  1141  		v.AddArg(y)
  1142  		return true
  1143  	}
  1144  }
  1145  func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool {
  1146  	b := v.Block
  1147  	_ = b
  1148  	types := &b.Func.Config.Types
  1149  	_ = types
  1150  	// match: (Div32u x y)
  1151  	// cond:
  1152  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1153  	for {
  1154  		x := v.Args[0]
  1155  		y := v.Args[1]
  1156  		v.reset(OpSelect1)
  1157  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1158  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1159  		v1.AddArg(x)
  1160  		v0.AddArg(v1)
  1161  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1162  		v2.AddArg(y)
  1163  		v0.AddArg(v2)
  1164  		v.AddArg(v0)
  1165  		return true
  1166  	}
  1167  }
  1168  func rewriteValueMIPS64_OpDiv64_0(v *Value) bool {
  1169  	b := v.Block
  1170  	_ = b
  1171  	types := &b.Func.Config.Types
  1172  	_ = types
  1173  	// match: (Div64 x y)
  1174  	// cond:
  1175  	// result: (Select1 (DIVV x y))
  1176  	for {
  1177  		x := v.Args[0]
  1178  		y := v.Args[1]
  1179  		v.reset(OpSelect1)
  1180  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1181  		v0.AddArg(x)
  1182  		v0.AddArg(y)
  1183  		v.AddArg(v0)
  1184  		return true
  1185  	}
  1186  }
  1187  func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool {
  1188  	// match: (Div64F x y)
  1189  	// cond:
  1190  	// result: (DIVD x y)
  1191  	for {
  1192  		x := v.Args[0]
  1193  		y := v.Args[1]
  1194  		v.reset(OpMIPS64DIVD)
  1195  		v.AddArg(x)
  1196  		v.AddArg(y)
  1197  		return true
  1198  	}
  1199  }
  1200  func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool {
  1201  	b := v.Block
  1202  	_ = b
  1203  	types := &b.Func.Config.Types
  1204  	_ = types
  1205  	// match: (Div64u x y)
  1206  	// cond:
  1207  	// result: (Select1 (DIVVU x y))
  1208  	for {
  1209  		x := v.Args[0]
  1210  		y := v.Args[1]
  1211  		v.reset(OpSelect1)
  1212  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1213  		v0.AddArg(x)
  1214  		v0.AddArg(y)
  1215  		v.AddArg(v0)
  1216  		return true
  1217  	}
  1218  }
  1219  func rewriteValueMIPS64_OpDiv8_0(v *Value) bool {
  1220  	b := v.Block
  1221  	_ = b
  1222  	types := &b.Func.Config.Types
  1223  	_ = types
  1224  	// match: (Div8 x y)
  1225  	// cond:
  1226  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1227  	for {
  1228  		x := v.Args[0]
  1229  		y := v.Args[1]
  1230  		v.reset(OpSelect1)
  1231  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  1232  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1233  		v1.AddArg(x)
  1234  		v0.AddArg(v1)
  1235  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1236  		v2.AddArg(y)
  1237  		v0.AddArg(v2)
  1238  		v.AddArg(v0)
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool {
  1243  	b := v.Block
  1244  	_ = b
  1245  	types := &b.Func.Config.Types
  1246  	_ = types
  1247  	// match: (Div8u x y)
  1248  	// cond:
  1249  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1250  	for {
  1251  		x := v.Args[0]
  1252  		y := v.Args[1]
  1253  		v.reset(OpSelect1)
  1254  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  1255  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1256  		v1.AddArg(x)
  1257  		v0.AddArg(v1)
  1258  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1259  		v2.AddArg(y)
  1260  		v0.AddArg(v2)
  1261  		v.AddArg(v0)
  1262  		return true
  1263  	}
  1264  }
  1265  func rewriteValueMIPS64_OpEq16_0(v *Value) bool {
  1266  	b := v.Block
  1267  	_ = b
  1268  	types := &b.Func.Config.Types
  1269  	_ = types
  1270  	// match: (Eq16 x y)
  1271  	// cond:
  1272  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1273  	for {
  1274  		x := v.Args[0]
  1275  		y := v.Args[1]
  1276  		v.reset(OpMIPS64SGTU)
  1277  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1278  		v0.AuxInt = 1
  1279  		v.AddArg(v0)
  1280  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1281  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1282  		v2.AddArg(x)
  1283  		v1.AddArg(v2)
  1284  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1285  		v3.AddArg(y)
  1286  		v1.AddArg(v3)
  1287  		v.AddArg(v1)
  1288  		return true
  1289  	}
  1290  }
  1291  func rewriteValueMIPS64_OpEq32_0(v *Value) bool {
  1292  	b := v.Block
  1293  	_ = b
  1294  	types := &b.Func.Config.Types
  1295  	_ = types
  1296  	// match: (Eq32 x y)
  1297  	// cond:
  1298  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1299  	for {
  1300  		x := v.Args[0]
  1301  		y := v.Args[1]
  1302  		v.reset(OpMIPS64SGTU)
  1303  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1304  		v0.AuxInt = 1
  1305  		v.AddArg(v0)
  1306  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1307  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1308  		v2.AddArg(x)
  1309  		v1.AddArg(v2)
  1310  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1311  		v3.AddArg(y)
  1312  		v1.AddArg(v3)
  1313  		v.AddArg(v1)
  1314  		return true
  1315  	}
  1316  }
  1317  func rewriteValueMIPS64_OpEq32F_0(v *Value) bool {
  1318  	b := v.Block
  1319  	_ = b
  1320  	// match: (Eq32F x y)
  1321  	// cond:
  1322  	// result: (FPFlagTrue (CMPEQF x y))
  1323  	for {
  1324  		x := v.Args[0]
  1325  		y := v.Args[1]
  1326  		v.reset(OpMIPS64FPFlagTrue)
  1327  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, TypeFlags)
  1328  		v0.AddArg(x)
  1329  		v0.AddArg(y)
  1330  		v.AddArg(v0)
  1331  		return true
  1332  	}
  1333  }
  1334  func rewriteValueMIPS64_OpEq64_0(v *Value) bool {
  1335  	b := v.Block
  1336  	_ = b
  1337  	types := &b.Func.Config.Types
  1338  	_ = types
  1339  	// match: (Eq64 x y)
  1340  	// cond:
  1341  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1342  	for {
  1343  		x := v.Args[0]
  1344  		y := v.Args[1]
  1345  		v.reset(OpMIPS64SGTU)
  1346  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1347  		v0.AuxInt = 1
  1348  		v.AddArg(v0)
  1349  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1350  		v1.AddArg(x)
  1351  		v1.AddArg(y)
  1352  		v.AddArg(v1)
  1353  		return true
  1354  	}
  1355  }
  1356  func rewriteValueMIPS64_OpEq64F_0(v *Value) bool {
  1357  	b := v.Block
  1358  	_ = b
  1359  	// match: (Eq64F x y)
  1360  	// cond:
  1361  	// result: (FPFlagTrue (CMPEQD x y))
  1362  	for {
  1363  		x := v.Args[0]
  1364  		y := v.Args[1]
  1365  		v.reset(OpMIPS64FPFlagTrue)
  1366  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, TypeFlags)
  1367  		v0.AddArg(x)
  1368  		v0.AddArg(y)
  1369  		v.AddArg(v0)
  1370  		return true
  1371  	}
  1372  }
  1373  func rewriteValueMIPS64_OpEq8_0(v *Value) bool {
  1374  	b := v.Block
  1375  	_ = b
  1376  	types := &b.Func.Config.Types
  1377  	_ = types
  1378  	// match: (Eq8 x y)
  1379  	// cond:
  1380  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1381  	for {
  1382  		x := v.Args[0]
  1383  		y := v.Args[1]
  1384  		v.reset(OpMIPS64SGTU)
  1385  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1386  		v0.AuxInt = 1
  1387  		v.AddArg(v0)
  1388  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1389  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1390  		v2.AddArg(x)
  1391  		v1.AddArg(v2)
  1392  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1393  		v3.AddArg(y)
  1394  		v1.AddArg(v3)
  1395  		v.AddArg(v1)
  1396  		return true
  1397  	}
  1398  }
  1399  func rewriteValueMIPS64_OpEqB_0(v *Value) bool {
  1400  	b := v.Block
  1401  	_ = b
  1402  	types := &b.Func.Config.Types
  1403  	_ = types
  1404  	// match: (EqB x y)
  1405  	// cond:
  1406  	// result: (XOR (MOVVconst [1]) (XOR <types.Bool> x y))
  1407  	for {
  1408  		x := v.Args[0]
  1409  		y := v.Args[1]
  1410  		v.reset(OpMIPS64XOR)
  1411  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1412  		v0.AuxInt = 1
  1413  		v.AddArg(v0)
  1414  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.Bool)
  1415  		v1.AddArg(x)
  1416  		v1.AddArg(y)
  1417  		v.AddArg(v1)
  1418  		return true
  1419  	}
  1420  }
  1421  func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool {
  1422  	b := v.Block
  1423  	_ = b
  1424  	types := &b.Func.Config.Types
  1425  	_ = types
  1426  	// match: (EqPtr x y)
  1427  	// cond:
  1428  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1429  	for {
  1430  		x := v.Args[0]
  1431  		y := v.Args[1]
  1432  		v.reset(OpMIPS64SGTU)
  1433  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1434  		v0.AuxInt = 1
  1435  		v.AddArg(v0)
  1436  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  1437  		v1.AddArg(x)
  1438  		v1.AddArg(y)
  1439  		v.AddArg(v1)
  1440  		return true
  1441  	}
  1442  }
  1443  func rewriteValueMIPS64_OpGeq16_0(v *Value) bool {
  1444  	b := v.Block
  1445  	_ = b
  1446  	types := &b.Func.Config.Types
  1447  	_ = types
  1448  	// match: (Geq16 x y)
  1449  	// cond:
  1450  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1451  	for {
  1452  		x := v.Args[0]
  1453  		y := v.Args[1]
  1454  		v.reset(OpMIPS64XOR)
  1455  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1456  		v0.AuxInt = 1
  1457  		v.AddArg(v0)
  1458  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1459  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1460  		v2.AddArg(y)
  1461  		v1.AddArg(v2)
  1462  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1463  		v3.AddArg(x)
  1464  		v1.AddArg(v3)
  1465  		v.AddArg(v1)
  1466  		return true
  1467  	}
  1468  }
  1469  func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool {
  1470  	b := v.Block
  1471  	_ = b
  1472  	types := &b.Func.Config.Types
  1473  	_ = types
  1474  	// match: (Geq16U x y)
  1475  	// cond:
  1476  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1477  	for {
  1478  		x := v.Args[0]
  1479  		y := v.Args[1]
  1480  		v.reset(OpMIPS64XOR)
  1481  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1482  		v0.AuxInt = 1
  1483  		v.AddArg(v0)
  1484  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1485  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1486  		v2.AddArg(y)
  1487  		v1.AddArg(v2)
  1488  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1489  		v3.AddArg(x)
  1490  		v1.AddArg(v3)
  1491  		v.AddArg(v1)
  1492  		return true
  1493  	}
  1494  }
  1495  func rewriteValueMIPS64_OpGeq32_0(v *Value) bool {
  1496  	b := v.Block
  1497  	_ = b
  1498  	types := &b.Func.Config.Types
  1499  	_ = types
  1500  	// match: (Geq32 x y)
  1501  	// cond:
  1502  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1503  	for {
  1504  		x := v.Args[0]
  1505  		y := v.Args[1]
  1506  		v.reset(OpMIPS64XOR)
  1507  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1508  		v0.AuxInt = 1
  1509  		v.AddArg(v0)
  1510  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1511  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1512  		v2.AddArg(y)
  1513  		v1.AddArg(v2)
  1514  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1515  		v3.AddArg(x)
  1516  		v1.AddArg(v3)
  1517  		v.AddArg(v1)
  1518  		return true
  1519  	}
  1520  }
  1521  func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool {
  1522  	b := v.Block
  1523  	_ = b
  1524  	// match: (Geq32F x y)
  1525  	// cond:
  1526  	// result: (FPFlagTrue (CMPGEF x y))
  1527  	for {
  1528  		x := v.Args[0]
  1529  		y := v.Args[1]
  1530  		v.reset(OpMIPS64FPFlagTrue)
  1531  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, TypeFlags)
  1532  		v0.AddArg(x)
  1533  		v0.AddArg(y)
  1534  		v.AddArg(v0)
  1535  		return true
  1536  	}
  1537  }
  1538  func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool {
  1539  	b := v.Block
  1540  	_ = b
  1541  	types := &b.Func.Config.Types
  1542  	_ = types
  1543  	// match: (Geq32U x y)
  1544  	// cond:
  1545  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1546  	for {
  1547  		x := v.Args[0]
  1548  		y := v.Args[1]
  1549  		v.reset(OpMIPS64XOR)
  1550  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1551  		v0.AuxInt = 1
  1552  		v.AddArg(v0)
  1553  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1554  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1555  		v2.AddArg(y)
  1556  		v1.AddArg(v2)
  1557  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1558  		v3.AddArg(x)
  1559  		v1.AddArg(v3)
  1560  		v.AddArg(v1)
  1561  		return true
  1562  	}
  1563  }
  1564  func rewriteValueMIPS64_OpGeq64_0(v *Value) bool {
  1565  	b := v.Block
  1566  	_ = b
  1567  	types := &b.Func.Config.Types
  1568  	_ = types
  1569  	// match: (Geq64 x y)
  1570  	// cond:
  1571  	// result: (XOR (MOVVconst [1]) (SGT y x))
  1572  	for {
  1573  		x := v.Args[0]
  1574  		y := v.Args[1]
  1575  		v.reset(OpMIPS64XOR)
  1576  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1577  		v0.AuxInt = 1
  1578  		v.AddArg(v0)
  1579  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1580  		v1.AddArg(y)
  1581  		v1.AddArg(x)
  1582  		v.AddArg(v1)
  1583  		return true
  1584  	}
  1585  }
  1586  func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool {
  1587  	b := v.Block
  1588  	_ = b
  1589  	// match: (Geq64F x y)
  1590  	// cond:
  1591  	// result: (FPFlagTrue (CMPGED x y))
  1592  	for {
  1593  		x := v.Args[0]
  1594  		y := v.Args[1]
  1595  		v.reset(OpMIPS64FPFlagTrue)
  1596  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, TypeFlags)
  1597  		v0.AddArg(x)
  1598  		v0.AddArg(y)
  1599  		v.AddArg(v0)
  1600  		return true
  1601  	}
  1602  }
  1603  func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool {
  1604  	b := v.Block
  1605  	_ = b
  1606  	types := &b.Func.Config.Types
  1607  	_ = types
  1608  	// match: (Geq64U x y)
  1609  	// cond:
  1610  	// result: (XOR (MOVVconst [1]) (SGTU y x))
  1611  	for {
  1612  		x := v.Args[0]
  1613  		y := v.Args[1]
  1614  		v.reset(OpMIPS64XOR)
  1615  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1616  		v0.AuxInt = 1
  1617  		v.AddArg(v0)
  1618  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1619  		v1.AddArg(y)
  1620  		v1.AddArg(x)
  1621  		v.AddArg(v1)
  1622  		return true
  1623  	}
  1624  }
  1625  func rewriteValueMIPS64_OpGeq8_0(v *Value) bool {
  1626  	b := v.Block
  1627  	_ = b
  1628  	types := &b.Func.Config.Types
  1629  	_ = types
  1630  	// match: (Geq8 x y)
  1631  	// cond:
  1632  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1633  	for {
  1634  		x := v.Args[0]
  1635  		y := v.Args[1]
  1636  		v.reset(OpMIPS64XOR)
  1637  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1638  		v0.AuxInt = 1
  1639  		v.AddArg(v0)
  1640  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  1641  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1642  		v2.AddArg(y)
  1643  		v1.AddArg(v2)
  1644  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1645  		v3.AddArg(x)
  1646  		v1.AddArg(v3)
  1647  		v.AddArg(v1)
  1648  		return true
  1649  	}
  1650  }
  1651  func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool {
  1652  	b := v.Block
  1653  	_ = b
  1654  	types := &b.Func.Config.Types
  1655  	_ = types
  1656  	// match: (Geq8U x y)
  1657  	// cond:
  1658  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1659  	for {
  1660  		x := v.Args[0]
  1661  		y := v.Args[1]
  1662  		v.reset(OpMIPS64XOR)
  1663  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  1664  		v0.AuxInt = 1
  1665  		v.AddArg(v0)
  1666  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  1667  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1668  		v2.AddArg(y)
  1669  		v1.AddArg(v2)
  1670  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1671  		v3.AddArg(x)
  1672  		v1.AddArg(v3)
  1673  		v.AddArg(v1)
  1674  		return true
  1675  	}
  1676  }
  1677  func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool {
  1678  	// match: (GetClosurePtr)
  1679  	// cond:
  1680  	// result: (LoweredGetClosurePtr)
  1681  	for {
  1682  		v.reset(OpMIPS64LoweredGetClosurePtr)
  1683  		return true
  1684  	}
  1685  }
  1686  func rewriteValueMIPS64_OpGreater16_0(v *Value) bool {
  1687  	b := v.Block
  1688  	_ = b
  1689  	types := &b.Func.Config.Types
  1690  	_ = types
  1691  	// match: (Greater16 x y)
  1692  	// cond:
  1693  	// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1694  	for {
  1695  		x := v.Args[0]
  1696  		y := v.Args[1]
  1697  		v.reset(OpMIPS64SGT)
  1698  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1699  		v0.AddArg(x)
  1700  		v.AddArg(v0)
  1701  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  1702  		v1.AddArg(y)
  1703  		v.AddArg(v1)
  1704  		return true
  1705  	}
  1706  }
  1707  func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool {
  1708  	b := v.Block
  1709  	_ = b
  1710  	types := &b.Func.Config.Types
  1711  	_ = types
  1712  	// match: (Greater16U x y)
  1713  	// cond:
  1714  	// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1715  	for {
  1716  		x := v.Args[0]
  1717  		y := v.Args[1]
  1718  		v.reset(OpMIPS64SGTU)
  1719  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1720  		v0.AddArg(x)
  1721  		v.AddArg(v0)
  1722  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  1723  		v1.AddArg(y)
  1724  		v.AddArg(v1)
  1725  		return true
  1726  	}
  1727  }
  1728  func rewriteValueMIPS64_OpGreater32_0(v *Value) bool {
  1729  	b := v.Block
  1730  	_ = b
  1731  	types := &b.Func.Config.Types
  1732  	_ = types
  1733  	// match: (Greater32 x y)
  1734  	// cond:
  1735  	// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  1736  	for {
  1737  		x := v.Args[0]
  1738  		y := v.Args[1]
  1739  		v.reset(OpMIPS64SGT)
  1740  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1741  		v0.AddArg(x)
  1742  		v.AddArg(v0)
  1743  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1744  		v1.AddArg(y)
  1745  		v.AddArg(v1)
  1746  		return true
  1747  	}
  1748  }
  1749  func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool {
  1750  	b := v.Block
  1751  	_ = b
  1752  	// match: (Greater32F x y)
  1753  	// cond:
  1754  	// result: (FPFlagTrue (CMPGTF x y))
  1755  	for {
  1756  		x := v.Args[0]
  1757  		y := v.Args[1]
  1758  		v.reset(OpMIPS64FPFlagTrue)
  1759  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, TypeFlags)
  1760  		v0.AddArg(x)
  1761  		v0.AddArg(y)
  1762  		v.AddArg(v0)
  1763  		return true
  1764  	}
  1765  }
  1766  func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool {
  1767  	b := v.Block
  1768  	_ = b
  1769  	types := &b.Func.Config.Types
  1770  	_ = types
  1771  	// match: (Greater32U x y)
  1772  	// cond:
  1773  	// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1774  	for {
  1775  		x := v.Args[0]
  1776  		y := v.Args[1]
  1777  		v.reset(OpMIPS64SGTU)
  1778  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1779  		v0.AddArg(x)
  1780  		v.AddArg(v0)
  1781  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1782  		v1.AddArg(y)
  1783  		v.AddArg(v1)
  1784  		return true
  1785  	}
  1786  }
  1787  func rewriteValueMIPS64_OpGreater64_0(v *Value) bool {
  1788  	// match: (Greater64 x y)
  1789  	// cond:
  1790  	// result: (SGT x y)
  1791  	for {
  1792  		x := v.Args[0]
  1793  		y := v.Args[1]
  1794  		v.reset(OpMIPS64SGT)
  1795  		v.AddArg(x)
  1796  		v.AddArg(y)
  1797  		return true
  1798  	}
  1799  }
  1800  func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool {
  1801  	b := v.Block
  1802  	_ = b
  1803  	// match: (Greater64F x y)
  1804  	// cond:
  1805  	// result: (FPFlagTrue (CMPGTD x y))
  1806  	for {
  1807  		x := v.Args[0]
  1808  		y := v.Args[1]
  1809  		v.reset(OpMIPS64FPFlagTrue)
  1810  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, TypeFlags)
  1811  		v0.AddArg(x)
  1812  		v0.AddArg(y)
  1813  		v.AddArg(v0)
  1814  		return true
  1815  	}
  1816  }
  1817  func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool {
  1818  	// match: (Greater64U x y)
  1819  	// cond:
  1820  	// result: (SGTU x y)
  1821  	for {
  1822  		x := v.Args[0]
  1823  		y := v.Args[1]
  1824  		v.reset(OpMIPS64SGTU)
  1825  		v.AddArg(x)
  1826  		v.AddArg(y)
  1827  		return true
  1828  	}
  1829  }
  1830  func rewriteValueMIPS64_OpGreater8_0(v *Value) bool {
  1831  	b := v.Block
  1832  	_ = b
  1833  	types := &b.Func.Config.Types
  1834  	_ = types
  1835  	// match: (Greater8 x y)
  1836  	// cond:
  1837  	// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  1838  	for {
  1839  		x := v.Args[0]
  1840  		y := v.Args[1]
  1841  		v.reset(OpMIPS64SGT)
  1842  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1843  		v0.AddArg(x)
  1844  		v.AddArg(v0)
  1845  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  1846  		v1.AddArg(y)
  1847  		v.AddArg(v1)
  1848  		return true
  1849  	}
  1850  }
  1851  func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool {
  1852  	b := v.Block
  1853  	_ = b
  1854  	types := &b.Func.Config.Types
  1855  	_ = types
  1856  	// match: (Greater8U x y)
  1857  	// cond:
  1858  	// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1859  	for {
  1860  		x := v.Args[0]
  1861  		y := v.Args[1]
  1862  		v.reset(OpMIPS64SGTU)
  1863  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1864  		v0.AddArg(x)
  1865  		v.AddArg(v0)
  1866  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  1867  		v1.AddArg(y)
  1868  		v.AddArg(v1)
  1869  		return true
  1870  	}
  1871  }
  1872  func rewriteValueMIPS64_OpHmul32_0(v *Value) bool {
  1873  	b := v.Block
  1874  	_ = b
  1875  	types := &b.Func.Config.Types
  1876  	_ = types
  1877  	// match: (Hmul32 x y)
  1878  	// cond:
  1879  	// result: (SRAVconst (Select1 <types.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1880  	for {
  1881  		x := v.Args[0]
  1882  		y := v.Args[1]
  1883  		v.reset(OpMIPS64SRAVconst)
  1884  		v.AuxInt = 32
  1885  		v0 := b.NewValue0(v.Pos, OpSelect1, types.Int64)
  1886  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, MakeTuple(types.Int64, types.Int64))
  1887  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1888  		v2.AddArg(x)
  1889  		v1.AddArg(v2)
  1890  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  1891  		v3.AddArg(y)
  1892  		v1.AddArg(v3)
  1893  		v0.AddArg(v1)
  1894  		v.AddArg(v0)
  1895  		return true
  1896  	}
  1897  }
  1898  func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool {
  1899  	b := v.Block
  1900  	_ = b
  1901  	types := &b.Func.Config.Types
  1902  	_ = types
  1903  	// match: (Hmul32u x y)
  1904  	// cond:
  1905  	// result: (SRLVconst (Select1 <types.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1906  	for {
  1907  		x := v.Args[0]
  1908  		y := v.Args[1]
  1909  		v.reset(OpMIPS64SRLVconst)
  1910  		v.AuxInt = 32
  1911  		v0 := b.NewValue0(v.Pos, OpSelect1, types.UInt64)
  1912  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  1913  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1914  		v2.AddArg(x)
  1915  		v1.AddArg(v2)
  1916  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  1917  		v3.AddArg(y)
  1918  		v1.AddArg(v3)
  1919  		v0.AddArg(v1)
  1920  		v.AddArg(v0)
  1921  		return true
  1922  	}
  1923  }
  1924  func rewriteValueMIPS64_OpHmul64_0(v *Value) bool {
  1925  	b := v.Block
  1926  	_ = b
  1927  	types := &b.Func.Config.Types
  1928  	_ = types
  1929  	// match: (Hmul64 x y)
  1930  	// cond:
  1931  	// result: (Select0 (MULV x y))
  1932  	for {
  1933  		x := v.Args[0]
  1934  		y := v.Args[1]
  1935  		v.reset(OpSelect0)
  1936  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, MakeTuple(types.Int64, types.Int64))
  1937  		v0.AddArg(x)
  1938  		v0.AddArg(y)
  1939  		v.AddArg(v0)
  1940  		return true
  1941  	}
  1942  }
  1943  func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool {
  1944  	b := v.Block
  1945  	_ = b
  1946  	types := &b.Func.Config.Types
  1947  	_ = types
  1948  	// match: (Hmul64u x y)
  1949  	// cond:
  1950  	// result: (Select0 (MULVU x y))
  1951  	for {
  1952  		x := v.Args[0]
  1953  		y := v.Args[1]
  1954  		v.reset(OpSelect0)
  1955  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  1956  		v0.AddArg(x)
  1957  		v0.AddArg(y)
  1958  		v.AddArg(v0)
  1959  		return true
  1960  	}
  1961  }
  1962  func rewriteValueMIPS64_OpInterCall_0(v *Value) bool {
  1963  	// match: (InterCall [argwid] entry mem)
  1964  	// cond:
  1965  	// result: (CALLinter [argwid] entry mem)
  1966  	for {
  1967  		argwid := v.AuxInt
  1968  		entry := v.Args[0]
  1969  		mem := v.Args[1]
  1970  		v.reset(OpMIPS64CALLinter)
  1971  		v.AuxInt = argwid
  1972  		v.AddArg(entry)
  1973  		v.AddArg(mem)
  1974  		return true
  1975  	}
  1976  }
  1977  func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool {
  1978  	// match: (IsInBounds idx len)
  1979  	// cond:
  1980  	// result: (SGTU len idx)
  1981  	for {
  1982  		idx := v.Args[0]
  1983  		len := v.Args[1]
  1984  		v.reset(OpMIPS64SGTU)
  1985  		v.AddArg(len)
  1986  		v.AddArg(idx)
  1987  		return true
  1988  	}
  1989  }
  1990  func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool {
  1991  	b := v.Block
  1992  	_ = b
  1993  	types := &b.Func.Config.Types
  1994  	_ = types
  1995  	// match: (IsNonNil ptr)
  1996  	// cond:
  1997  	// result: (SGTU ptr (MOVVconst [0]))
  1998  	for {
  1999  		ptr := v.Args[0]
  2000  		v.reset(OpMIPS64SGTU)
  2001  		v.AddArg(ptr)
  2002  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2003  		v0.AuxInt = 0
  2004  		v.AddArg(v0)
  2005  		return true
  2006  	}
  2007  }
  2008  func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool {
  2009  	b := v.Block
  2010  	_ = b
  2011  	types := &b.Func.Config.Types
  2012  	_ = types
  2013  	// match: (IsSliceInBounds idx len)
  2014  	// cond:
  2015  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2016  	for {
  2017  		idx := v.Args[0]
  2018  		len := v.Args[1]
  2019  		v.reset(OpMIPS64XOR)
  2020  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2021  		v0.AuxInt = 1
  2022  		v.AddArg(v0)
  2023  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2024  		v1.AddArg(idx)
  2025  		v1.AddArg(len)
  2026  		v.AddArg(v1)
  2027  		return true
  2028  	}
  2029  }
  2030  func rewriteValueMIPS64_OpLeq16_0(v *Value) bool {
  2031  	b := v.Block
  2032  	_ = b
  2033  	types := &b.Func.Config.Types
  2034  	_ = types
  2035  	// match: (Leq16 x y)
  2036  	// cond:
  2037  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2038  	for {
  2039  		x := v.Args[0]
  2040  		y := v.Args[1]
  2041  		v.reset(OpMIPS64XOR)
  2042  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2043  		v0.AuxInt = 1
  2044  		v.AddArg(v0)
  2045  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2046  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2047  		v2.AddArg(x)
  2048  		v1.AddArg(v2)
  2049  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2050  		v3.AddArg(y)
  2051  		v1.AddArg(v3)
  2052  		v.AddArg(v1)
  2053  		return true
  2054  	}
  2055  }
  2056  func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool {
  2057  	b := v.Block
  2058  	_ = b
  2059  	types := &b.Func.Config.Types
  2060  	_ = types
  2061  	// match: (Leq16U x y)
  2062  	// cond:
  2063  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2064  	for {
  2065  		x := v.Args[0]
  2066  		y := v.Args[1]
  2067  		v.reset(OpMIPS64XOR)
  2068  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2069  		v0.AuxInt = 1
  2070  		v.AddArg(v0)
  2071  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2072  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2073  		v2.AddArg(x)
  2074  		v1.AddArg(v2)
  2075  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2076  		v3.AddArg(y)
  2077  		v1.AddArg(v3)
  2078  		v.AddArg(v1)
  2079  		return true
  2080  	}
  2081  }
  2082  func rewriteValueMIPS64_OpLeq32_0(v *Value) bool {
  2083  	b := v.Block
  2084  	_ = b
  2085  	types := &b.Func.Config.Types
  2086  	_ = types
  2087  	// match: (Leq32 x y)
  2088  	// cond:
  2089  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2090  	for {
  2091  		x := v.Args[0]
  2092  		y := v.Args[1]
  2093  		v.reset(OpMIPS64XOR)
  2094  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2095  		v0.AuxInt = 1
  2096  		v.AddArg(v0)
  2097  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2098  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2099  		v2.AddArg(x)
  2100  		v1.AddArg(v2)
  2101  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2102  		v3.AddArg(y)
  2103  		v1.AddArg(v3)
  2104  		v.AddArg(v1)
  2105  		return true
  2106  	}
  2107  }
  2108  func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool {
  2109  	b := v.Block
  2110  	_ = b
  2111  	// match: (Leq32F x y)
  2112  	// cond:
  2113  	// result: (FPFlagTrue (CMPGEF y x))
  2114  	for {
  2115  		x := v.Args[0]
  2116  		y := v.Args[1]
  2117  		v.reset(OpMIPS64FPFlagTrue)
  2118  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, TypeFlags)
  2119  		v0.AddArg(y)
  2120  		v0.AddArg(x)
  2121  		v.AddArg(v0)
  2122  		return true
  2123  	}
  2124  }
  2125  func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool {
  2126  	b := v.Block
  2127  	_ = b
  2128  	types := &b.Func.Config.Types
  2129  	_ = types
  2130  	// match: (Leq32U x y)
  2131  	// cond:
  2132  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2133  	for {
  2134  		x := v.Args[0]
  2135  		y := v.Args[1]
  2136  		v.reset(OpMIPS64XOR)
  2137  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2138  		v0.AuxInt = 1
  2139  		v.AddArg(v0)
  2140  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2141  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2142  		v2.AddArg(x)
  2143  		v1.AddArg(v2)
  2144  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2145  		v3.AddArg(y)
  2146  		v1.AddArg(v3)
  2147  		v.AddArg(v1)
  2148  		return true
  2149  	}
  2150  }
  2151  func rewriteValueMIPS64_OpLeq64_0(v *Value) bool {
  2152  	b := v.Block
  2153  	_ = b
  2154  	types := &b.Func.Config.Types
  2155  	_ = types
  2156  	// match: (Leq64 x y)
  2157  	// cond:
  2158  	// result: (XOR (MOVVconst [1]) (SGT x y))
  2159  	for {
  2160  		x := v.Args[0]
  2161  		y := v.Args[1]
  2162  		v.reset(OpMIPS64XOR)
  2163  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2164  		v0.AuxInt = 1
  2165  		v.AddArg(v0)
  2166  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2167  		v1.AddArg(x)
  2168  		v1.AddArg(y)
  2169  		v.AddArg(v1)
  2170  		return true
  2171  	}
  2172  }
  2173  func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool {
  2174  	b := v.Block
  2175  	_ = b
  2176  	// match: (Leq64F x y)
  2177  	// cond:
  2178  	// result: (FPFlagTrue (CMPGED y x))
  2179  	for {
  2180  		x := v.Args[0]
  2181  		y := v.Args[1]
  2182  		v.reset(OpMIPS64FPFlagTrue)
  2183  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, TypeFlags)
  2184  		v0.AddArg(y)
  2185  		v0.AddArg(x)
  2186  		v.AddArg(v0)
  2187  		return true
  2188  	}
  2189  }
  2190  func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool {
  2191  	b := v.Block
  2192  	_ = b
  2193  	types := &b.Func.Config.Types
  2194  	_ = types
  2195  	// match: (Leq64U x y)
  2196  	// cond:
  2197  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  2198  	for {
  2199  		x := v.Args[0]
  2200  		y := v.Args[1]
  2201  		v.reset(OpMIPS64XOR)
  2202  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2203  		v0.AuxInt = 1
  2204  		v.AddArg(v0)
  2205  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2206  		v1.AddArg(x)
  2207  		v1.AddArg(y)
  2208  		v.AddArg(v1)
  2209  		return true
  2210  	}
  2211  }
  2212  func rewriteValueMIPS64_OpLeq8_0(v *Value) bool {
  2213  	b := v.Block
  2214  	_ = b
  2215  	types := &b.Func.Config.Types
  2216  	_ = types
  2217  	// match: (Leq8 x y)
  2218  	// cond:
  2219  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2220  	for {
  2221  		x := v.Args[0]
  2222  		y := v.Args[1]
  2223  		v.reset(OpMIPS64XOR)
  2224  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2225  		v0.AuxInt = 1
  2226  		v.AddArg(v0)
  2227  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, types.Bool)
  2228  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2229  		v2.AddArg(x)
  2230  		v1.AddArg(v2)
  2231  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2232  		v3.AddArg(y)
  2233  		v1.AddArg(v3)
  2234  		v.AddArg(v1)
  2235  		return true
  2236  	}
  2237  }
  2238  func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool {
  2239  	b := v.Block
  2240  	_ = b
  2241  	types := &b.Func.Config.Types
  2242  	_ = types
  2243  	// match: (Leq8U x y)
  2244  	// cond:
  2245  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2246  	for {
  2247  		x := v.Args[0]
  2248  		y := v.Args[1]
  2249  		v.reset(OpMIPS64XOR)
  2250  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  2251  		v0.AuxInt = 1
  2252  		v.AddArg(v0)
  2253  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2254  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2255  		v2.AddArg(x)
  2256  		v1.AddArg(v2)
  2257  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2258  		v3.AddArg(y)
  2259  		v1.AddArg(v3)
  2260  		v.AddArg(v1)
  2261  		return true
  2262  	}
  2263  }
  2264  func rewriteValueMIPS64_OpLess16_0(v *Value) bool {
  2265  	b := v.Block
  2266  	_ = b
  2267  	types := &b.Func.Config.Types
  2268  	_ = types
  2269  	// match: (Less16 x y)
  2270  	// cond:
  2271  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2272  	for {
  2273  		x := v.Args[0]
  2274  		y := v.Args[1]
  2275  		v.reset(OpMIPS64SGT)
  2276  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2277  		v0.AddArg(y)
  2278  		v.AddArg(v0)
  2279  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  2280  		v1.AddArg(x)
  2281  		v.AddArg(v1)
  2282  		return true
  2283  	}
  2284  }
  2285  func rewriteValueMIPS64_OpLess16U_0(v *Value) bool {
  2286  	b := v.Block
  2287  	_ = b
  2288  	types := &b.Func.Config.Types
  2289  	_ = types
  2290  	// match: (Less16U x y)
  2291  	// cond:
  2292  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2293  	for {
  2294  		x := v.Args[0]
  2295  		y := v.Args[1]
  2296  		v.reset(OpMIPS64SGTU)
  2297  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2298  		v0.AddArg(y)
  2299  		v.AddArg(v0)
  2300  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2301  		v1.AddArg(x)
  2302  		v.AddArg(v1)
  2303  		return true
  2304  	}
  2305  }
  2306  func rewriteValueMIPS64_OpLess32_0(v *Value) bool {
  2307  	b := v.Block
  2308  	_ = b
  2309  	types := &b.Func.Config.Types
  2310  	_ = types
  2311  	// match: (Less32 x y)
  2312  	// cond:
  2313  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2314  	for {
  2315  		x := v.Args[0]
  2316  		y := v.Args[1]
  2317  		v.reset(OpMIPS64SGT)
  2318  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2319  		v0.AddArg(y)
  2320  		v.AddArg(v0)
  2321  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  2322  		v1.AddArg(x)
  2323  		v.AddArg(v1)
  2324  		return true
  2325  	}
  2326  }
  2327  func rewriteValueMIPS64_OpLess32F_0(v *Value) bool {
  2328  	b := v.Block
  2329  	_ = b
  2330  	// match: (Less32F x y)
  2331  	// cond:
  2332  	// result: (FPFlagTrue (CMPGTF y x))
  2333  	for {
  2334  		x := v.Args[0]
  2335  		y := v.Args[1]
  2336  		v.reset(OpMIPS64FPFlagTrue)
  2337  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, TypeFlags)
  2338  		v0.AddArg(y)
  2339  		v0.AddArg(x)
  2340  		v.AddArg(v0)
  2341  		return true
  2342  	}
  2343  }
  2344  func rewriteValueMIPS64_OpLess32U_0(v *Value) bool {
  2345  	b := v.Block
  2346  	_ = b
  2347  	types := &b.Func.Config.Types
  2348  	_ = types
  2349  	// match: (Less32U x y)
  2350  	// cond:
  2351  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2352  	for {
  2353  		x := v.Args[0]
  2354  		y := v.Args[1]
  2355  		v.reset(OpMIPS64SGTU)
  2356  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2357  		v0.AddArg(y)
  2358  		v.AddArg(v0)
  2359  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2360  		v1.AddArg(x)
  2361  		v.AddArg(v1)
  2362  		return true
  2363  	}
  2364  }
  2365  func rewriteValueMIPS64_OpLess64_0(v *Value) bool {
  2366  	// match: (Less64 x y)
  2367  	// cond:
  2368  	// result: (SGT y x)
  2369  	for {
  2370  		x := v.Args[0]
  2371  		y := v.Args[1]
  2372  		v.reset(OpMIPS64SGT)
  2373  		v.AddArg(y)
  2374  		v.AddArg(x)
  2375  		return true
  2376  	}
  2377  }
  2378  func rewriteValueMIPS64_OpLess64F_0(v *Value) bool {
  2379  	b := v.Block
  2380  	_ = b
  2381  	// match: (Less64F x y)
  2382  	// cond:
  2383  	// result: (FPFlagTrue (CMPGTD y x))
  2384  	for {
  2385  		x := v.Args[0]
  2386  		y := v.Args[1]
  2387  		v.reset(OpMIPS64FPFlagTrue)
  2388  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, TypeFlags)
  2389  		v0.AddArg(y)
  2390  		v0.AddArg(x)
  2391  		v.AddArg(v0)
  2392  		return true
  2393  	}
  2394  }
  2395  func rewriteValueMIPS64_OpLess64U_0(v *Value) bool {
  2396  	// match: (Less64U x y)
  2397  	// cond:
  2398  	// result: (SGTU y x)
  2399  	for {
  2400  		x := v.Args[0]
  2401  		y := v.Args[1]
  2402  		v.reset(OpMIPS64SGTU)
  2403  		v.AddArg(y)
  2404  		v.AddArg(x)
  2405  		return true
  2406  	}
  2407  }
  2408  func rewriteValueMIPS64_OpLess8_0(v *Value) bool {
  2409  	b := v.Block
  2410  	_ = b
  2411  	types := &b.Func.Config.Types
  2412  	_ = types
  2413  	// match: (Less8 x y)
  2414  	// cond:
  2415  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2416  	for {
  2417  		x := v.Args[0]
  2418  		y := v.Args[1]
  2419  		v.reset(OpMIPS64SGT)
  2420  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2421  		v0.AddArg(y)
  2422  		v.AddArg(v0)
  2423  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  2424  		v1.AddArg(x)
  2425  		v.AddArg(v1)
  2426  		return true
  2427  	}
  2428  }
  2429  func rewriteValueMIPS64_OpLess8U_0(v *Value) bool {
  2430  	b := v.Block
  2431  	_ = b
  2432  	types := &b.Func.Config.Types
  2433  	_ = types
  2434  	// match: (Less8U x y)
  2435  	// cond:
  2436  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2437  	for {
  2438  		x := v.Args[0]
  2439  		y := v.Args[1]
  2440  		v.reset(OpMIPS64SGTU)
  2441  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2442  		v0.AddArg(y)
  2443  		v.AddArg(v0)
  2444  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2445  		v1.AddArg(x)
  2446  		v.AddArg(v1)
  2447  		return true
  2448  	}
  2449  }
  2450  func rewriteValueMIPS64_OpLoad_0(v *Value) bool {
  2451  	// match: (Load <t> ptr mem)
  2452  	// cond: t.IsBoolean()
  2453  	// result: (MOVBUload ptr mem)
  2454  	for {
  2455  		t := v.Type
  2456  		ptr := v.Args[0]
  2457  		mem := v.Args[1]
  2458  		if !(t.IsBoolean()) {
  2459  			break
  2460  		}
  2461  		v.reset(OpMIPS64MOVBUload)
  2462  		v.AddArg(ptr)
  2463  		v.AddArg(mem)
  2464  		return true
  2465  	}
  2466  	// match: (Load <t> ptr mem)
  2467  	// cond: (is8BitInt(t) && isSigned(t))
  2468  	// result: (MOVBload ptr mem)
  2469  	for {
  2470  		t := v.Type
  2471  		ptr := v.Args[0]
  2472  		mem := v.Args[1]
  2473  		if !(is8BitInt(t) && isSigned(t)) {
  2474  			break
  2475  		}
  2476  		v.reset(OpMIPS64MOVBload)
  2477  		v.AddArg(ptr)
  2478  		v.AddArg(mem)
  2479  		return true
  2480  	}
  2481  	// match: (Load <t> ptr mem)
  2482  	// cond: (is8BitInt(t) && !isSigned(t))
  2483  	// result: (MOVBUload ptr mem)
  2484  	for {
  2485  		t := v.Type
  2486  		ptr := v.Args[0]
  2487  		mem := v.Args[1]
  2488  		if !(is8BitInt(t) && !isSigned(t)) {
  2489  			break
  2490  		}
  2491  		v.reset(OpMIPS64MOVBUload)
  2492  		v.AddArg(ptr)
  2493  		v.AddArg(mem)
  2494  		return true
  2495  	}
  2496  	// match: (Load <t> ptr mem)
  2497  	// cond: (is16BitInt(t) && isSigned(t))
  2498  	// result: (MOVHload ptr mem)
  2499  	for {
  2500  		t := v.Type
  2501  		ptr := v.Args[0]
  2502  		mem := v.Args[1]
  2503  		if !(is16BitInt(t) && isSigned(t)) {
  2504  			break
  2505  		}
  2506  		v.reset(OpMIPS64MOVHload)
  2507  		v.AddArg(ptr)
  2508  		v.AddArg(mem)
  2509  		return true
  2510  	}
  2511  	// match: (Load <t> ptr mem)
  2512  	// cond: (is16BitInt(t) && !isSigned(t))
  2513  	// result: (MOVHUload ptr mem)
  2514  	for {
  2515  		t := v.Type
  2516  		ptr := v.Args[0]
  2517  		mem := v.Args[1]
  2518  		if !(is16BitInt(t) && !isSigned(t)) {
  2519  			break
  2520  		}
  2521  		v.reset(OpMIPS64MOVHUload)
  2522  		v.AddArg(ptr)
  2523  		v.AddArg(mem)
  2524  		return true
  2525  	}
  2526  	// match: (Load <t> ptr mem)
  2527  	// cond: (is32BitInt(t) && isSigned(t))
  2528  	// result: (MOVWload ptr mem)
  2529  	for {
  2530  		t := v.Type
  2531  		ptr := v.Args[0]
  2532  		mem := v.Args[1]
  2533  		if !(is32BitInt(t) && isSigned(t)) {
  2534  			break
  2535  		}
  2536  		v.reset(OpMIPS64MOVWload)
  2537  		v.AddArg(ptr)
  2538  		v.AddArg(mem)
  2539  		return true
  2540  	}
  2541  	// match: (Load <t> ptr mem)
  2542  	// cond: (is32BitInt(t) && !isSigned(t))
  2543  	// result: (MOVWUload ptr mem)
  2544  	for {
  2545  		t := v.Type
  2546  		ptr := v.Args[0]
  2547  		mem := v.Args[1]
  2548  		if !(is32BitInt(t) && !isSigned(t)) {
  2549  			break
  2550  		}
  2551  		v.reset(OpMIPS64MOVWUload)
  2552  		v.AddArg(ptr)
  2553  		v.AddArg(mem)
  2554  		return true
  2555  	}
  2556  	// match: (Load <t> ptr mem)
  2557  	// cond: (is64BitInt(t) || isPtr(t))
  2558  	// result: (MOVVload ptr mem)
  2559  	for {
  2560  		t := v.Type
  2561  		ptr := v.Args[0]
  2562  		mem := v.Args[1]
  2563  		if !(is64BitInt(t) || isPtr(t)) {
  2564  			break
  2565  		}
  2566  		v.reset(OpMIPS64MOVVload)
  2567  		v.AddArg(ptr)
  2568  		v.AddArg(mem)
  2569  		return true
  2570  	}
  2571  	// match: (Load <t> ptr mem)
  2572  	// cond: is32BitFloat(t)
  2573  	// result: (MOVFload ptr mem)
  2574  	for {
  2575  		t := v.Type
  2576  		ptr := v.Args[0]
  2577  		mem := v.Args[1]
  2578  		if !(is32BitFloat(t)) {
  2579  			break
  2580  		}
  2581  		v.reset(OpMIPS64MOVFload)
  2582  		v.AddArg(ptr)
  2583  		v.AddArg(mem)
  2584  		return true
  2585  	}
  2586  	// match: (Load <t> ptr mem)
  2587  	// cond: is64BitFloat(t)
  2588  	// result: (MOVDload ptr mem)
  2589  	for {
  2590  		t := v.Type
  2591  		ptr := v.Args[0]
  2592  		mem := v.Args[1]
  2593  		if !(is64BitFloat(t)) {
  2594  			break
  2595  		}
  2596  		v.reset(OpMIPS64MOVDload)
  2597  		v.AddArg(ptr)
  2598  		v.AddArg(mem)
  2599  		return true
  2600  	}
  2601  	return false
  2602  }
  2603  func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
  2604  	b := v.Block
  2605  	_ = b
  2606  	types := &b.Func.Config.Types
  2607  	_ = types
  2608  	// match: (Lsh16x16 <t> x y)
  2609  	// cond:
  2610  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2611  	for {
  2612  		t := v.Type
  2613  		x := v.Args[0]
  2614  		y := v.Args[1]
  2615  		v.reset(OpMIPS64AND)
  2616  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2617  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2618  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2619  		v2.AuxInt = 64
  2620  		v1.AddArg(v2)
  2621  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2622  		v3.AddArg(y)
  2623  		v1.AddArg(v3)
  2624  		v0.AddArg(v1)
  2625  		v.AddArg(v0)
  2626  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2627  		v4.AddArg(x)
  2628  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2629  		v5.AddArg(y)
  2630  		v4.AddArg(v5)
  2631  		v.AddArg(v4)
  2632  		return true
  2633  	}
  2634  }
  2635  func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
  2636  	b := v.Block
  2637  	_ = b
  2638  	types := &b.Func.Config.Types
  2639  	_ = types
  2640  	// match: (Lsh16x32 <t> x y)
  2641  	// cond:
  2642  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2643  	for {
  2644  		t := v.Type
  2645  		x := v.Args[0]
  2646  		y := v.Args[1]
  2647  		v.reset(OpMIPS64AND)
  2648  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2649  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2650  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2651  		v2.AuxInt = 64
  2652  		v1.AddArg(v2)
  2653  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2654  		v3.AddArg(y)
  2655  		v1.AddArg(v3)
  2656  		v0.AddArg(v1)
  2657  		v.AddArg(v0)
  2658  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2659  		v4.AddArg(x)
  2660  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2661  		v5.AddArg(y)
  2662  		v4.AddArg(v5)
  2663  		v.AddArg(v4)
  2664  		return true
  2665  	}
  2666  }
  2667  func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
  2668  	b := v.Block
  2669  	_ = b
  2670  	types := &b.Func.Config.Types
  2671  	_ = types
  2672  	// match: (Lsh16x64 <t> x y)
  2673  	// cond:
  2674  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2675  	for {
  2676  		t := v.Type
  2677  		x := v.Args[0]
  2678  		y := v.Args[1]
  2679  		v.reset(OpMIPS64AND)
  2680  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2681  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2682  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2683  		v2.AuxInt = 64
  2684  		v1.AddArg(v2)
  2685  		v1.AddArg(y)
  2686  		v0.AddArg(v1)
  2687  		v.AddArg(v0)
  2688  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2689  		v3.AddArg(x)
  2690  		v3.AddArg(y)
  2691  		v.AddArg(v3)
  2692  		return true
  2693  	}
  2694  }
  2695  func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
  2696  	b := v.Block
  2697  	_ = b
  2698  	types := &b.Func.Config.Types
  2699  	_ = types
  2700  	// match: (Lsh16x8 <t> x y)
  2701  	// cond:
  2702  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2703  	for {
  2704  		t := v.Type
  2705  		x := v.Args[0]
  2706  		y := v.Args[1]
  2707  		v.reset(OpMIPS64AND)
  2708  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2709  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2710  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2711  		v2.AuxInt = 64
  2712  		v1.AddArg(v2)
  2713  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2714  		v3.AddArg(y)
  2715  		v1.AddArg(v3)
  2716  		v0.AddArg(v1)
  2717  		v.AddArg(v0)
  2718  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2719  		v4.AddArg(x)
  2720  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2721  		v5.AddArg(y)
  2722  		v4.AddArg(v5)
  2723  		v.AddArg(v4)
  2724  		return true
  2725  	}
  2726  }
  2727  func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
  2728  	b := v.Block
  2729  	_ = b
  2730  	types := &b.Func.Config.Types
  2731  	_ = types
  2732  	// match: (Lsh32x16 <t> x y)
  2733  	// cond:
  2734  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2735  	for {
  2736  		t := v.Type
  2737  		x := v.Args[0]
  2738  		y := v.Args[1]
  2739  		v.reset(OpMIPS64AND)
  2740  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2741  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2742  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2743  		v2.AuxInt = 64
  2744  		v1.AddArg(v2)
  2745  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2746  		v3.AddArg(y)
  2747  		v1.AddArg(v3)
  2748  		v0.AddArg(v1)
  2749  		v.AddArg(v0)
  2750  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2751  		v4.AddArg(x)
  2752  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2753  		v5.AddArg(y)
  2754  		v4.AddArg(v5)
  2755  		v.AddArg(v4)
  2756  		return true
  2757  	}
  2758  }
  2759  func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
  2760  	b := v.Block
  2761  	_ = b
  2762  	types := &b.Func.Config.Types
  2763  	_ = types
  2764  	// match: (Lsh32x32 <t> x y)
  2765  	// cond:
  2766  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2767  	for {
  2768  		t := v.Type
  2769  		x := v.Args[0]
  2770  		y := v.Args[1]
  2771  		v.reset(OpMIPS64AND)
  2772  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2773  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2774  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2775  		v2.AuxInt = 64
  2776  		v1.AddArg(v2)
  2777  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2778  		v3.AddArg(y)
  2779  		v1.AddArg(v3)
  2780  		v0.AddArg(v1)
  2781  		v.AddArg(v0)
  2782  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2783  		v4.AddArg(x)
  2784  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2785  		v5.AddArg(y)
  2786  		v4.AddArg(v5)
  2787  		v.AddArg(v4)
  2788  		return true
  2789  	}
  2790  }
  2791  func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
  2792  	b := v.Block
  2793  	_ = b
  2794  	types := &b.Func.Config.Types
  2795  	_ = types
  2796  	// match: (Lsh32x64 <t> x y)
  2797  	// cond:
  2798  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2799  	for {
  2800  		t := v.Type
  2801  		x := v.Args[0]
  2802  		y := v.Args[1]
  2803  		v.reset(OpMIPS64AND)
  2804  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2805  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2806  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2807  		v2.AuxInt = 64
  2808  		v1.AddArg(v2)
  2809  		v1.AddArg(y)
  2810  		v0.AddArg(v1)
  2811  		v.AddArg(v0)
  2812  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2813  		v3.AddArg(x)
  2814  		v3.AddArg(y)
  2815  		v.AddArg(v3)
  2816  		return true
  2817  	}
  2818  }
  2819  func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
  2820  	b := v.Block
  2821  	_ = b
  2822  	types := &b.Func.Config.Types
  2823  	_ = types
  2824  	// match: (Lsh32x8 <t> x y)
  2825  	// cond:
  2826  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2827  	for {
  2828  		t := v.Type
  2829  		x := v.Args[0]
  2830  		y := v.Args[1]
  2831  		v.reset(OpMIPS64AND)
  2832  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2833  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2834  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2835  		v2.AuxInt = 64
  2836  		v1.AddArg(v2)
  2837  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2838  		v3.AddArg(y)
  2839  		v1.AddArg(v3)
  2840  		v0.AddArg(v1)
  2841  		v.AddArg(v0)
  2842  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2843  		v4.AddArg(x)
  2844  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2845  		v5.AddArg(y)
  2846  		v4.AddArg(v5)
  2847  		v.AddArg(v4)
  2848  		return true
  2849  	}
  2850  }
  2851  func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
  2852  	b := v.Block
  2853  	_ = b
  2854  	types := &b.Func.Config.Types
  2855  	_ = types
  2856  	// match: (Lsh64x16 <t> x y)
  2857  	// cond:
  2858  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2859  	for {
  2860  		t := v.Type
  2861  		x := v.Args[0]
  2862  		y := v.Args[1]
  2863  		v.reset(OpMIPS64AND)
  2864  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2865  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2866  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2867  		v2.AuxInt = 64
  2868  		v1.AddArg(v2)
  2869  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2870  		v3.AddArg(y)
  2871  		v1.AddArg(v3)
  2872  		v0.AddArg(v1)
  2873  		v.AddArg(v0)
  2874  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2875  		v4.AddArg(x)
  2876  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2877  		v5.AddArg(y)
  2878  		v4.AddArg(v5)
  2879  		v.AddArg(v4)
  2880  		return true
  2881  	}
  2882  }
  2883  func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
  2884  	b := v.Block
  2885  	_ = b
  2886  	types := &b.Func.Config.Types
  2887  	_ = types
  2888  	// match: (Lsh64x32 <t> x y)
  2889  	// cond:
  2890  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2891  	for {
  2892  		t := v.Type
  2893  		x := v.Args[0]
  2894  		y := v.Args[1]
  2895  		v.reset(OpMIPS64AND)
  2896  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2897  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2898  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2899  		v2.AuxInt = 64
  2900  		v1.AddArg(v2)
  2901  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2902  		v3.AddArg(y)
  2903  		v1.AddArg(v3)
  2904  		v0.AddArg(v1)
  2905  		v.AddArg(v0)
  2906  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2907  		v4.AddArg(x)
  2908  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  2909  		v5.AddArg(y)
  2910  		v4.AddArg(v5)
  2911  		v.AddArg(v4)
  2912  		return true
  2913  	}
  2914  }
  2915  func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
  2916  	b := v.Block
  2917  	_ = b
  2918  	types := &b.Func.Config.Types
  2919  	_ = types
  2920  	// match: (Lsh64x64 <t> x y)
  2921  	// cond:
  2922  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  2923  	for {
  2924  		t := v.Type
  2925  		x := v.Args[0]
  2926  		y := v.Args[1]
  2927  		v.reset(OpMIPS64AND)
  2928  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2929  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2930  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2931  		v2.AuxInt = 64
  2932  		v1.AddArg(v2)
  2933  		v1.AddArg(y)
  2934  		v0.AddArg(v1)
  2935  		v.AddArg(v0)
  2936  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2937  		v3.AddArg(x)
  2938  		v3.AddArg(y)
  2939  		v.AddArg(v3)
  2940  		return true
  2941  	}
  2942  }
  2943  func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
  2944  	b := v.Block
  2945  	_ = b
  2946  	types := &b.Func.Config.Types
  2947  	_ = types
  2948  	// match: (Lsh64x8 <t> x y)
  2949  	// cond:
  2950  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  2951  	for {
  2952  		t := v.Type
  2953  		x := v.Args[0]
  2954  		y := v.Args[1]
  2955  		v.reset(OpMIPS64AND)
  2956  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2957  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2958  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2959  		v2.AuxInt = 64
  2960  		v1.AddArg(v2)
  2961  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2962  		v3.AddArg(y)
  2963  		v1.AddArg(v3)
  2964  		v0.AddArg(v1)
  2965  		v.AddArg(v0)
  2966  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2967  		v4.AddArg(x)
  2968  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  2969  		v5.AddArg(y)
  2970  		v4.AddArg(v5)
  2971  		v.AddArg(v4)
  2972  		return true
  2973  	}
  2974  }
  2975  func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
  2976  	b := v.Block
  2977  	_ = b
  2978  	types := &b.Func.Config.Types
  2979  	_ = types
  2980  	// match: (Lsh8x16 <t> x y)
  2981  	// cond:
  2982  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2983  	for {
  2984  		t := v.Type
  2985  		x := v.Args[0]
  2986  		y := v.Args[1]
  2987  		v.reset(OpMIPS64AND)
  2988  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2989  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  2990  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  2991  		v2.AuxInt = 64
  2992  		v1.AddArg(v2)
  2993  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  2994  		v3.AddArg(y)
  2995  		v1.AddArg(v3)
  2996  		v0.AddArg(v1)
  2997  		v.AddArg(v0)
  2998  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2999  		v4.AddArg(x)
  3000  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  3001  		v5.AddArg(y)
  3002  		v4.AddArg(v5)
  3003  		v.AddArg(v4)
  3004  		return true
  3005  	}
  3006  }
  3007  func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
  3008  	b := v.Block
  3009  	_ = b
  3010  	types := &b.Func.Config.Types
  3011  	_ = types
  3012  	// match: (Lsh8x32 <t> x y)
  3013  	// cond:
  3014  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3015  	for {
  3016  		t := v.Type
  3017  		x := v.Args[0]
  3018  		y := v.Args[1]
  3019  		v.reset(OpMIPS64AND)
  3020  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3021  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3022  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3023  		v2.AuxInt = 64
  3024  		v1.AddArg(v2)
  3025  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  3026  		v3.AddArg(y)
  3027  		v1.AddArg(v3)
  3028  		v0.AddArg(v1)
  3029  		v.AddArg(v0)
  3030  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3031  		v4.AddArg(x)
  3032  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  3033  		v5.AddArg(y)
  3034  		v4.AddArg(v5)
  3035  		v.AddArg(v4)
  3036  		return true
  3037  	}
  3038  }
  3039  func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
  3040  	b := v.Block
  3041  	_ = b
  3042  	types := &b.Func.Config.Types
  3043  	_ = types
  3044  	// match: (Lsh8x64 <t> x y)
  3045  	// cond:
  3046  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SLLV <t> x y))
  3047  	for {
  3048  		t := v.Type
  3049  		x := v.Args[0]
  3050  		y := v.Args[1]
  3051  		v.reset(OpMIPS64AND)
  3052  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3053  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3054  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3055  		v2.AuxInt = 64
  3056  		v1.AddArg(v2)
  3057  		v1.AddArg(y)
  3058  		v0.AddArg(v1)
  3059  		v.AddArg(v0)
  3060  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3061  		v3.AddArg(x)
  3062  		v3.AddArg(y)
  3063  		v.AddArg(v3)
  3064  		return true
  3065  	}
  3066  }
  3067  func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
  3068  	b := v.Block
  3069  	_ = b
  3070  	types := &b.Func.Config.Types
  3071  	_ = types
  3072  	// match: (Lsh8x8 <t> x y)
  3073  	// cond:
  3074  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SLLV <t> x (ZeroExt8to64  y)))
  3075  	for {
  3076  		t := v.Type
  3077  		x := v.Args[0]
  3078  		y := v.Args[1]
  3079  		v.reset(OpMIPS64AND)
  3080  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3081  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  3082  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  3083  		v2.AuxInt = 64
  3084  		v1.AddArg(v2)
  3085  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  3086  		v3.AddArg(y)
  3087  		v1.AddArg(v3)
  3088  		v0.AddArg(v1)
  3089  		v.AddArg(v0)
  3090  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3091  		v4.AddArg(x)
  3092  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  3093  		v5.AddArg(y)
  3094  		v4.AddArg(v5)
  3095  		v.AddArg(v4)
  3096  		return true
  3097  	}
  3098  }
  3099  func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool {
  3100  	// match: (ADDV x (MOVVconst [c]))
  3101  	// cond: is32Bit(c)
  3102  	// result: (ADDVconst [c] x)
  3103  	for {
  3104  		x := v.Args[0]
  3105  		v_1 := v.Args[1]
  3106  		if v_1.Op != OpMIPS64MOVVconst {
  3107  			break
  3108  		}
  3109  		c := v_1.AuxInt
  3110  		if !(is32Bit(c)) {
  3111  			break
  3112  		}
  3113  		v.reset(OpMIPS64ADDVconst)
  3114  		v.AuxInt = c
  3115  		v.AddArg(x)
  3116  		return true
  3117  	}
  3118  	// match: (ADDV (MOVVconst [c]) x)
  3119  	// cond: is32Bit(c)
  3120  	// result: (ADDVconst [c] x)
  3121  	for {
  3122  		v_0 := v.Args[0]
  3123  		if v_0.Op != OpMIPS64MOVVconst {
  3124  			break
  3125  		}
  3126  		c := v_0.AuxInt
  3127  		x := v.Args[1]
  3128  		if !(is32Bit(c)) {
  3129  			break
  3130  		}
  3131  		v.reset(OpMIPS64ADDVconst)
  3132  		v.AuxInt = c
  3133  		v.AddArg(x)
  3134  		return true
  3135  	}
  3136  	// match: (ADDV x (NEGV y))
  3137  	// cond:
  3138  	// result: (SUBV x y)
  3139  	for {
  3140  		x := v.Args[0]
  3141  		v_1 := v.Args[1]
  3142  		if v_1.Op != OpMIPS64NEGV {
  3143  			break
  3144  		}
  3145  		y := v_1.Args[0]
  3146  		v.reset(OpMIPS64SUBV)
  3147  		v.AddArg(x)
  3148  		v.AddArg(y)
  3149  		return true
  3150  	}
  3151  	// match: (ADDV (NEGV y) x)
  3152  	// cond:
  3153  	// result: (SUBV x y)
  3154  	for {
  3155  		v_0 := v.Args[0]
  3156  		if v_0.Op != OpMIPS64NEGV {
  3157  			break
  3158  		}
  3159  		y := v_0.Args[0]
  3160  		x := v.Args[1]
  3161  		v.reset(OpMIPS64SUBV)
  3162  		v.AddArg(x)
  3163  		v.AddArg(y)
  3164  		return true
  3165  	}
  3166  	return false
  3167  }
  3168  func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool {
  3169  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3170  	// cond:
  3171  	// result: (MOVVaddr [off1+off2] {sym} ptr)
  3172  	for {
  3173  		off1 := v.AuxInt
  3174  		v_0 := v.Args[0]
  3175  		if v_0.Op != OpMIPS64MOVVaddr {
  3176  			break
  3177  		}
  3178  		off2 := v_0.AuxInt
  3179  		sym := v_0.Aux
  3180  		ptr := v_0.Args[0]
  3181  		v.reset(OpMIPS64MOVVaddr)
  3182  		v.AuxInt = off1 + off2
  3183  		v.Aux = sym
  3184  		v.AddArg(ptr)
  3185  		return true
  3186  	}
  3187  	// match: (ADDVconst [0] x)
  3188  	// cond:
  3189  	// result: x
  3190  	for {
  3191  		if v.AuxInt != 0 {
  3192  			break
  3193  		}
  3194  		x := v.Args[0]
  3195  		v.reset(OpCopy)
  3196  		v.Type = x.Type
  3197  		v.AddArg(x)
  3198  		return true
  3199  	}
  3200  	// match: (ADDVconst [c] (MOVVconst [d]))
  3201  	// cond:
  3202  	// result: (MOVVconst [c+d])
  3203  	for {
  3204  		c := v.AuxInt
  3205  		v_0 := v.Args[0]
  3206  		if v_0.Op != OpMIPS64MOVVconst {
  3207  			break
  3208  		}
  3209  		d := v_0.AuxInt
  3210  		v.reset(OpMIPS64MOVVconst)
  3211  		v.AuxInt = c + d
  3212  		return true
  3213  	}
  3214  	// match: (ADDVconst [c] (ADDVconst [d] x))
  3215  	// cond: is32Bit(c+d)
  3216  	// result: (ADDVconst [c+d] x)
  3217  	for {
  3218  		c := v.AuxInt
  3219  		v_0 := v.Args[0]
  3220  		if v_0.Op != OpMIPS64ADDVconst {
  3221  			break
  3222  		}
  3223  		d := v_0.AuxInt
  3224  		x := v_0.Args[0]
  3225  		if !(is32Bit(c + d)) {
  3226  			break
  3227  		}
  3228  		v.reset(OpMIPS64ADDVconst)
  3229  		v.AuxInt = c + d
  3230  		v.AddArg(x)
  3231  		return true
  3232  	}
  3233  	// match: (ADDVconst [c] (SUBVconst [d] x))
  3234  	// cond: is32Bit(c-d)
  3235  	// result: (ADDVconst [c-d] x)
  3236  	for {
  3237  		c := v.AuxInt
  3238  		v_0 := v.Args[0]
  3239  		if v_0.Op != OpMIPS64SUBVconst {
  3240  			break
  3241  		}
  3242  		d := v_0.AuxInt
  3243  		x := v_0.Args[0]
  3244  		if !(is32Bit(c - d)) {
  3245  			break
  3246  		}
  3247  		v.reset(OpMIPS64ADDVconst)
  3248  		v.AuxInt = c - d
  3249  		v.AddArg(x)
  3250  		return true
  3251  	}
  3252  	return false
  3253  }
  3254  func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool {
  3255  	// match: (AND x (MOVVconst [c]))
  3256  	// cond: is32Bit(c)
  3257  	// result: (ANDconst [c] x)
  3258  	for {
  3259  		x := v.Args[0]
  3260  		v_1 := v.Args[1]
  3261  		if v_1.Op != OpMIPS64MOVVconst {
  3262  			break
  3263  		}
  3264  		c := v_1.AuxInt
  3265  		if !(is32Bit(c)) {
  3266  			break
  3267  		}
  3268  		v.reset(OpMIPS64ANDconst)
  3269  		v.AuxInt = c
  3270  		v.AddArg(x)
  3271  		return true
  3272  	}
  3273  	// match: (AND (MOVVconst [c]) x)
  3274  	// cond: is32Bit(c)
  3275  	// result: (ANDconst [c] x)
  3276  	for {
  3277  		v_0 := v.Args[0]
  3278  		if v_0.Op != OpMIPS64MOVVconst {
  3279  			break
  3280  		}
  3281  		c := v_0.AuxInt
  3282  		x := v.Args[1]
  3283  		if !(is32Bit(c)) {
  3284  			break
  3285  		}
  3286  		v.reset(OpMIPS64ANDconst)
  3287  		v.AuxInt = c
  3288  		v.AddArg(x)
  3289  		return true
  3290  	}
  3291  	// match: (AND x x)
  3292  	// cond:
  3293  	// result: x
  3294  	for {
  3295  		x := v.Args[0]
  3296  		if x != v.Args[1] {
  3297  			break
  3298  		}
  3299  		v.reset(OpCopy)
  3300  		v.Type = x.Type
  3301  		v.AddArg(x)
  3302  		return true
  3303  	}
  3304  	return false
  3305  }
  3306  func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool {
  3307  	// match: (ANDconst [0] _)
  3308  	// cond:
  3309  	// result: (MOVVconst [0])
  3310  	for {
  3311  		if v.AuxInt != 0 {
  3312  			break
  3313  		}
  3314  		v.reset(OpMIPS64MOVVconst)
  3315  		v.AuxInt = 0
  3316  		return true
  3317  	}
  3318  	// match: (ANDconst [-1] x)
  3319  	// cond:
  3320  	// result: x
  3321  	for {
  3322  		if v.AuxInt != -1 {
  3323  			break
  3324  		}
  3325  		x := v.Args[0]
  3326  		v.reset(OpCopy)
  3327  		v.Type = x.Type
  3328  		v.AddArg(x)
  3329  		return true
  3330  	}
  3331  	// match: (ANDconst [c] (MOVVconst [d]))
  3332  	// cond:
  3333  	// result: (MOVVconst [c&d])
  3334  	for {
  3335  		c := v.AuxInt
  3336  		v_0 := v.Args[0]
  3337  		if v_0.Op != OpMIPS64MOVVconst {
  3338  			break
  3339  		}
  3340  		d := v_0.AuxInt
  3341  		v.reset(OpMIPS64MOVVconst)
  3342  		v.AuxInt = c & d
  3343  		return true
  3344  	}
  3345  	// match: (ANDconst [c] (ANDconst [d] x))
  3346  	// cond:
  3347  	// result: (ANDconst [c&d] x)
  3348  	for {
  3349  		c := v.AuxInt
  3350  		v_0 := v.Args[0]
  3351  		if v_0.Op != OpMIPS64ANDconst {
  3352  			break
  3353  		}
  3354  		d := v_0.AuxInt
  3355  		x := v_0.Args[0]
  3356  		v.reset(OpMIPS64ANDconst)
  3357  		v.AuxInt = c & d
  3358  		v.AddArg(x)
  3359  		return true
  3360  	}
  3361  	return false
  3362  }
  3363  func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool {
  3364  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3365  	// cond: is32Bit(off1+off2)
  3366  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3367  	for {
  3368  		off1 := v.AuxInt
  3369  		sym := v.Aux
  3370  		v_0 := v.Args[0]
  3371  		if v_0.Op != OpMIPS64ADDVconst {
  3372  			break
  3373  		}
  3374  		off2 := v_0.AuxInt
  3375  		ptr := v_0.Args[0]
  3376  		mem := v.Args[1]
  3377  		if !(is32Bit(off1 + off2)) {
  3378  			break
  3379  		}
  3380  		v.reset(OpMIPS64MOVBUload)
  3381  		v.AuxInt = off1 + off2
  3382  		v.Aux = sym
  3383  		v.AddArg(ptr)
  3384  		v.AddArg(mem)
  3385  		return true
  3386  	}
  3387  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3388  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3389  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3390  	for {
  3391  		off1 := v.AuxInt
  3392  		sym1 := v.Aux
  3393  		v_0 := v.Args[0]
  3394  		if v_0.Op != OpMIPS64MOVVaddr {
  3395  			break
  3396  		}
  3397  		off2 := v_0.AuxInt
  3398  		sym2 := v_0.Aux
  3399  		ptr := v_0.Args[0]
  3400  		mem := v.Args[1]
  3401  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3402  			break
  3403  		}
  3404  		v.reset(OpMIPS64MOVBUload)
  3405  		v.AuxInt = off1 + off2
  3406  		v.Aux = mergeSym(sym1, sym2)
  3407  		v.AddArg(ptr)
  3408  		v.AddArg(mem)
  3409  		return true
  3410  	}
  3411  	return false
  3412  }
  3413  func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool {
  3414  	// match: (MOVBUreg x:(MOVBUload _ _))
  3415  	// cond:
  3416  	// result: (MOVVreg x)
  3417  	for {
  3418  		x := v.Args[0]
  3419  		if x.Op != OpMIPS64MOVBUload {
  3420  			break
  3421  		}
  3422  		v.reset(OpMIPS64MOVVreg)
  3423  		v.AddArg(x)
  3424  		return true
  3425  	}
  3426  	// match: (MOVBUreg x:(MOVBUreg _))
  3427  	// cond:
  3428  	// result: (MOVVreg x)
  3429  	for {
  3430  		x := v.Args[0]
  3431  		if x.Op != OpMIPS64MOVBUreg {
  3432  			break
  3433  		}
  3434  		v.reset(OpMIPS64MOVVreg)
  3435  		v.AddArg(x)
  3436  		return true
  3437  	}
  3438  	// match: (MOVBUreg (MOVVconst [c]))
  3439  	// cond:
  3440  	// result: (MOVVconst [int64(uint8(c))])
  3441  	for {
  3442  		v_0 := v.Args[0]
  3443  		if v_0.Op != OpMIPS64MOVVconst {
  3444  			break
  3445  		}
  3446  		c := v_0.AuxInt
  3447  		v.reset(OpMIPS64MOVVconst)
  3448  		v.AuxInt = int64(uint8(c))
  3449  		return true
  3450  	}
  3451  	return false
  3452  }
  3453  func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool {
  3454  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3455  	// cond: is32Bit(off1+off2)
  3456  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3457  	for {
  3458  		off1 := v.AuxInt
  3459  		sym := v.Aux
  3460  		v_0 := v.Args[0]
  3461  		if v_0.Op != OpMIPS64ADDVconst {
  3462  			break
  3463  		}
  3464  		off2 := v_0.AuxInt
  3465  		ptr := v_0.Args[0]
  3466  		mem := v.Args[1]
  3467  		if !(is32Bit(off1 + off2)) {
  3468  			break
  3469  		}
  3470  		v.reset(OpMIPS64MOVBload)
  3471  		v.AuxInt = off1 + off2
  3472  		v.Aux = sym
  3473  		v.AddArg(ptr)
  3474  		v.AddArg(mem)
  3475  		return true
  3476  	}
  3477  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3478  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3479  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3480  	for {
  3481  		off1 := v.AuxInt
  3482  		sym1 := v.Aux
  3483  		v_0 := v.Args[0]
  3484  		if v_0.Op != OpMIPS64MOVVaddr {
  3485  			break
  3486  		}
  3487  		off2 := v_0.AuxInt
  3488  		sym2 := v_0.Aux
  3489  		ptr := v_0.Args[0]
  3490  		mem := v.Args[1]
  3491  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3492  			break
  3493  		}
  3494  		v.reset(OpMIPS64MOVBload)
  3495  		v.AuxInt = off1 + off2
  3496  		v.Aux = mergeSym(sym1, sym2)
  3497  		v.AddArg(ptr)
  3498  		v.AddArg(mem)
  3499  		return true
  3500  	}
  3501  	return false
  3502  }
  3503  func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool {
  3504  	// match: (MOVBreg x:(MOVBload _ _))
  3505  	// cond:
  3506  	// result: (MOVVreg x)
  3507  	for {
  3508  		x := v.Args[0]
  3509  		if x.Op != OpMIPS64MOVBload {
  3510  			break
  3511  		}
  3512  		v.reset(OpMIPS64MOVVreg)
  3513  		v.AddArg(x)
  3514  		return true
  3515  	}
  3516  	// match: (MOVBreg x:(MOVBreg _))
  3517  	// cond:
  3518  	// result: (MOVVreg x)
  3519  	for {
  3520  		x := v.Args[0]
  3521  		if x.Op != OpMIPS64MOVBreg {
  3522  			break
  3523  		}
  3524  		v.reset(OpMIPS64MOVVreg)
  3525  		v.AddArg(x)
  3526  		return true
  3527  	}
  3528  	// match: (MOVBreg (MOVVconst [c]))
  3529  	// cond:
  3530  	// result: (MOVVconst [int64(int8(c))])
  3531  	for {
  3532  		v_0 := v.Args[0]
  3533  		if v_0.Op != OpMIPS64MOVVconst {
  3534  			break
  3535  		}
  3536  		c := v_0.AuxInt
  3537  		v.reset(OpMIPS64MOVVconst)
  3538  		v.AuxInt = int64(int8(c))
  3539  		return true
  3540  	}
  3541  	return false
  3542  }
  3543  func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool {
  3544  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3545  	// cond: is32Bit(off1+off2)
  3546  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3547  	for {
  3548  		off1 := v.AuxInt
  3549  		sym := v.Aux
  3550  		v_0 := v.Args[0]
  3551  		if v_0.Op != OpMIPS64ADDVconst {
  3552  			break
  3553  		}
  3554  		off2 := v_0.AuxInt
  3555  		ptr := v_0.Args[0]
  3556  		val := v.Args[1]
  3557  		mem := v.Args[2]
  3558  		if !(is32Bit(off1 + off2)) {
  3559  			break
  3560  		}
  3561  		v.reset(OpMIPS64MOVBstore)
  3562  		v.AuxInt = off1 + off2
  3563  		v.Aux = sym
  3564  		v.AddArg(ptr)
  3565  		v.AddArg(val)
  3566  		v.AddArg(mem)
  3567  		return true
  3568  	}
  3569  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3570  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3571  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3572  	for {
  3573  		off1 := v.AuxInt
  3574  		sym1 := v.Aux
  3575  		v_0 := v.Args[0]
  3576  		if v_0.Op != OpMIPS64MOVVaddr {
  3577  			break
  3578  		}
  3579  		off2 := v_0.AuxInt
  3580  		sym2 := v_0.Aux
  3581  		ptr := v_0.Args[0]
  3582  		val := v.Args[1]
  3583  		mem := v.Args[2]
  3584  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3585  			break
  3586  		}
  3587  		v.reset(OpMIPS64MOVBstore)
  3588  		v.AuxInt = off1 + off2
  3589  		v.Aux = mergeSym(sym1, sym2)
  3590  		v.AddArg(ptr)
  3591  		v.AddArg(val)
  3592  		v.AddArg(mem)
  3593  		return true
  3594  	}
  3595  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  3596  	// cond:
  3597  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3598  	for {
  3599  		off := v.AuxInt
  3600  		sym := v.Aux
  3601  		ptr := v.Args[0]
  3602  		v_1 := v.Args[1]
  3603  		if v_1.Op != OpMIPS64MOVVconst {
  3604  			break
  3605  		}
  3606  		if v_1.AuxInt != 0 {
  3607  			break
  3608  		}
  3609  		mem := v.Args[2]
  3610  		v.reset(OpMIPS64MOVBstorezero)
  3611  		v.AuxInt = off
  3612  		v.Aux = sym
  3613  		v.AddArg(ptr)
  3614  		v.AddArg(mem)
  3615  		return true
  3616  	}
  3617  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3618  	// cond:
  3619  	// result: (MOVBstore [off] {sym} ptr x mem)
  3620  	for {
  3621  		off := v.AuxInt
  3622  		sym := v.Aux
  3623  		ptr := v.Args[0]
  3624  		v_1 := v.Args[1]
  3625  		if v_1.Op != OpMIPS64MOVBreg {
  3626  			break
  3627  		}
  3628  		x := v_1.Args[0]
  3629  		mem := v.Args[2]
  3630  		v.reset(OpMIPS64MOVBstore)
  3631  		v.AuxInt = off
  3632  		v.Aux = sym
  3633  		v.AddArg(ptr)
  3634  		v.AddArg(x)
  3635  		v.AddArg(mem)
  3636  		return true
  3637  	}
  3638  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3639  	// cond:
  3640  	// result: (MOVBstore [off] {sym} ptr x mem)
  3641  	for {
  3642  		off := v.AuxInt
  3643  		sym := v.Aux
  3644  		ptr := v.Args[0]
  3645  		v_1 := v.Args[1]
  3646  		if v_1.Op != OpMIPS64MOVBUreg {
  3647  			break
  3648  		}
  3649  		x := v_1.Args[0]
  3650  		mem := v.Args[2]
  3651  		v.reset(OpMIPS64MOVBstore)
  3652  		v.AuxInt = off
  3653  		v.Aux = sym
  3654  		v.AddArg(ptr)
  3655  		v.AddArg(x)
  3656  		v.AddArg(mem)
  3657  		return true
  3658  	}
  3659  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3660  	// cond:
  3661  	// result: (MOVBstore [off] {sym} ptr x mem)
  3662  	for {
  3663  		off := v.AuxInt
  3664  		sym := v.Aux
  3665  		ptr := v.Args[0]
  3666  		v_1 := v.Args[1]
  3667  		if v_1.Op != OpMIPS64MOVHreg {
  3668  			break
  3669  		}
  3670  		x := v_1.Args[0]
  3671  		mem := v.Args[2]
  3672  		v.reset(OpMIPS64MOVBstore)
  3673  		v.AuxInt = off
  3674  		v.Aux = sym
  3675  		v.AddArg(ptr)
  3676  		v.AddArg(x)
  3677  		v.AddArg(mem)
  3678  		return true
  3679  	}
  3680  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3681  	// cond:
  3682  	// result: (MOVBstore [off] {sym} ptr x mem)
  3683  	for {
  3684  		off := v.AuxInt
  3685  		sym := v.Aux
  3686  		ptr := v.Args[0]
  3687  		v_1 := v.Args[1]
  3688  		if v_1.Op != OpMIPS64MOVHUreg {
  3689  			break
  3690  		}
  3691  		x := v_1.Args[0]
  3692  		mem := v.Args[2]
  3693  		v.reset(OpMIPS64MOVBstore)
  3694  		v.AuxInt = off
  3695  		v.Aux = sym
  3696  		v.AddArg(ptr)
  3697  		v.AddArg(x)
  3698  		v.AddArg(mem)
  3699  		return true
  3700  	}
  3701  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3702  	// cond:
  3703  	// result: (MOVBstore [off] {sym} ptr x mem)
  3704  	for {
  3705  		off := v.AuxInt
  3706  		sym := v.Aux
  3707  		ptr := v.Args[0]
  3708  		v_1 := v.Args[1]
  3709  		if v_1.Op != OpMIPS64MOVWreg {
  3710  			break
  3711  		}
  3712  		x := v_1.Args[0]
  3713  		mem := v.Args[2]
  3714  		v.reset(OpMIPS64MOVBstore)
  3715  		v.AuxInt = off
  3716  		v.Aux = sym
  3717  		v.AddArg(ptr)
  3718  		v.AddArg(x)
  3719  		v.AddArg(mem)
  3720  		return true
  3721  	}
  3722  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3723  	// cond:
  3724  	// result: (MOVBstore [off] {sym} ptr x mem)
  3725  	for {
  3726  		off := v.AuxInt
  3727  		sym := v.Aux
  3728  		ptr := v.Args[0]
  3729  		v_1 := v.Args[1]
  3730  		if v_1.Op != OpMIPS64MOVWUreg {
  3731  			break
  3732  		}
  3733  		x := v_1.Args[0]
  3734  		mem := v.Args[2]
  3735  		v.reset(OpMIPS64MOVBstore)
  3736  		v.AuxInt = off
  3737  		v.Aux = sym
  3738  		v.AddArg(ptr)
  3739  		v.AddArg(x)
  3740  		v.AddArg(mem)
  3741  		return true
  3742  	}
  3743  	return false
  3744  }
  3745  func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool {
  3746  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3747  	// cond: is32Bit(off1+off2)
  3748  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3749  	for {
  3750  		off1 := v.AuxInt
  3751  		sym := v.Aux
  3752  		v_0 := v.Args[0]
  3753  		if v_0.Op != OpMIPS64ADDVconst {
  3754  			break
  3755  		}
  3756  		off2 := v_0.AuxInt
  3757  		ptr := v_0.Args[0]
  3758  		mem := v.Args[1]
  3759  		if !(is32Bit(off1 + off2)) {
  3760  			break
  3761  		}
  3762  		v.reset(OpMIPS64MOVBstorezero)
  3763  		v.AuxInt = off1 + off2
  3764  		v.Aux = sym
  3765  		v.AddArg(ptr)
  3766  		v.AddArg(mem)
  3767  		return true
  3768  	}
  3769  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3770  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3771  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3772  	for {
  3773  		off1 := v.AuxInt
  3774  		sym1 := v.Aux
  3775  		v_0 := v.Args[0]
  3776  		if v_0.Op != OpMIPS64MOVVaddr {
  3777  			break
  3778  		}
  3779  		off2 := v_0.AuxInt
  3780  		sym2 := v_0.Aux
  3781  		ptr := v_0.Args[0]
  3782  		mem := v.Args[1]
  3783  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3784  			break
  3785  		}
  3786  		v.reset(OpMIPS64MOVBstorezero)
  3787  		v.AuxInt = off1 + off2
  3788  		v.Aux = mergeSym(sym1, sym2)
  3789  		v.AddArg(ptr)
  3790  		v.AddArg(mem)
  3791  		return true
  3792  	}
  3793  	return false
  3794  }
  3795  func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool {
  3796  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3797  	// cond: is32Bit(off1+off2)
  3798  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3799  	for {
  3800  		off1 := v.AuxInt
  3801  		sym := v.Aux
  3802  		v_0 := v.Args[0]
  3803  		if v_0.Op != OpMIPS64ADDVconst {
  3804  			break
  3805  		}
  3806  		off2 := v_0.AuxInt
  3807  		ptr := v_0.Args[0]
  3808  		mem := v.Args[1]
  3809  		if !(is32Bit(off1 + off2)) {
  3810  			break
  3811  		}
  3812  		v.reset(OpMIPS64MOVDload)
  3813  		v.AuxInt = off1 + off2
  3814  		v.Aux = sym
  3815  		v.AddArg(ptr)
  3816  		v.AddArg(mem)
  3817  		return true
  3818  	}
  3819  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3820  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3821  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3822  	for {
  3823  		off1 := v.AuxInt
  3824  		sym1 := v.Aux
  3825  		v_0 := v.Args[0]
  3826  		if v_0.Op != OpMIPS64MOVVaddr {
  3827  			break
  3828  		}
  3829  		off2 := v_0.AuxInt
  3830  		sym2 := v_0.Aux
  3831  		ptr := v_0.Args[0]
  3832  		mem := v.Args[1]
  3833  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3834  			break
  3835  		}
  3836  		v.reset(OpMIPS64MOVDload)
  3837  		v.AuxInt = off1 + off2
  3838  		v.Aux = mergeSym(sym1, sym2)
  3839  		v.AddArg(ptr)
  3840  		v.AddArg(mem)
  3841  		return true
  3842  	}
  3843  	return false
  3844  }
  3845  func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool {
  3846  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3847  	// cond: is32Bit(off1+off2)
  3848  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3849  	for {
  3850  		off1 := v.AuxInt
  3851  		sym := v.Aux
  3852  		v_0 := v.Args[0]
  3853  		if v_0.Op != OpMIPS64ADDVconst {
  3854  			break
  3855  		}
  3856  		off2 := v_0.AuxInt
  3857  		ptr := v_0.Args[0]
  3858  		val := v.Args[1]
  3859  		mem := v.Args[2]
  3860  		if !(is32Bit(off1 + off2)) {
  3861  			break
  3862  		}
  3863  		v.reset(OpMIPS64MOVDstore)
  3864  		v.AuxInt = off1 + off2
  3865  		v.Aux = sym
  3866  		v.AddArg(ptr)
  3867  		v.AddArg(val)
  3868  		v.AddArg(mem)
  3869  		return true
  3870  	}
  3871  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3872  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3873  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3874  	for {
  3875  		off1 := v.AuxInt
  3876  		sym1 := v.Aux
  3877  		v_0 := v.Args[0]
  3878  		if v_0.Op != OpMIPS64MOVVaddr {
  3879  			break
  3880  		}
  3881  		off2 := v_0.AuxInt
  3882  		sym2 := v_0.Aux
  3883  		ptr := v_0.Args[0]
  3884  		val := v.Args[1]
  3885  		mem := v.Args[2]
  3886  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3887  			break
  3888  		}
  3889  		v.reset(OpMIPS64MOVDstore)
  3890  		v.AuxInt = off1 + off2
  3891  		v.Aux = mergeSym(sym1, sym2)
  3892  		v.AddArg(ptr)
  3893  		v.AddArg(val)
  3894  		v.AddArg(mem)
  3895  		return true
  3896  	}
  3897  	return false
  3898  }
  3899  func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool {
  3900  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3901  	// cond: is32Bit(off1+off2)
  3902  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  3903  	for {
  3904  		off1 := v.AuxInt
  3905  		sym := v.Aux
  3906  		v_0 := v.Args[0]
  3907  		if v_0.Op != OpMIPS64ADDVconst {
  3908  			break
  3909  		}
  3910  		off2 := v_0.AuxInt
  3911  		ptr := v_0.Args[0]
  3912  		mem := v.Args[1]
  3913  		if !(is32Bit(off1 + off2)) {
  3914  			break
  3915  		}
  3916  		v.reset(OpMIPS64MOVFload)
  3917  		v.AuxInt = off1 + off2
  3918  		v.Aux = sym
  3919  		v.AddArg(ptr)
  3920  		v.AddArg(mem)
  3921  		return true
  3922  	}
  3923  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3924  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3925  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3926  	for {
  3927  		off1 := v.AuxInt
  3928  		sym1 := v.Aux
  3929  		v_0 := v.Args[0]
  3930  		if v_0.Op != OpMIPS64MOVVaddr {
  3931  			break
  3932  		}
  3933  		off2 := v_0.AuxInt
  3934  		sym2 := v_0.Aux
  3935  		ptr := v_0.Args[0]
  3936  		mem := v.Args[1]
  3937  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3938  			break
  3939  		}
  3940  		v.reset(OpMIPS64MOVFload)
  3941  		v.AuxInt = off1 + off2
  3942  		v.Aux = mergeSym(sym1, sym2)
  3943  		v.AddArg(ptr)
  3944  		v.AddArg(mem)
  3945  		return true
  3946  	}
  3947  	return false
  3948  }
  3949  func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool {
  3950  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3951  	// cond: is32Bit(off1+off2)
  3952  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  3953  	for {
  3954  		off1 := v.AuxInt
  3955  		sym := v.Aux
  3956  		v_0 := v.Args[0]
  3957  		if v_0.Op != OpMIPS64ADDVconst {
  3958  			break
  3959  		}
  3960  		off2 := v_0.AuxInt
  3961  		ptr := v_0.Args[0]
  3962  		val := v.Args[1]
  3963  		mem := v.Args[2]
  3964  		if !(is32Bit(off1 + off2)) {
  3965  			break
  3966  		}
  3967  		v.reset(OpMIPS64MOVFstore)
  3968  		v.AuxInt = off1 + off2
  3969  		v.Aux = sym
  3970  		v.AddArg(ptr)
  3971  		v.AddArg(val)
  3972  		v.AddArg(mem)
  3973  		return true
  3974  	}
  3975  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3976  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3977  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3978  	for {
  3979  		off1 := v.AuxInt
  3980  		sym1 := v.Aux
  3981  		v_0 := v.Args[0]
  3982  		if v_0.Op != OpMIPS64MOVVaddr {
  3983  			break
  3984  		}
  3985  		off2 := v_0.AuxInt
  3986  		sym2 := v_0.Aux
  3987  		ptr := v_0.Args[0]
  3988  		val := v.Args[1]
  3989  		mem := v.Args[2]
  3990  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3991  			break
  3992  		}
  3993  		v.reset(OpMIPS64MOVFstore)
  3994  		v.AuxInt = off1 + off2
  3995  		v.Aux = mergeSym(sym1, sym2)
  3996  		v.AddArg(ptr)
  3997  		v.AddArg(val)
  3998  		v.AddArg(mem)
  3999  		return true
  4000  	}
  4001  	return false
  4002  }
  4003  func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool {
  4004  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4005  	// cond: is32Bit(off1+off2)
  4006  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4007  	for {
  4008  		off1 := v.AuxInt
  4009  		sym := v.Aux
  4010  		v_0 := v.Args[0]
  4011  		if v_0.Op != OpMIPS64ADDVconst {
  4012  			break
  4013  		}
  4014  		off2 := v_0.AuxInt
  4015  		ptr := v_0.Args[0]
  4016  		mem := v.Args[1]
  4017  		if !(is32Bit(off1 + off2)) {
  4018  			break
  4019  		}
  4020  		v.reset(OpMIPS64MOVHUload)
  4021  		v.AuxInt = off1 + off2
  4022  		v.Aux = sym
  4023  		v.AddArg(ptr)
  4024  		v.AddArg(mem)
  4025  		return true
  4026  	}
  4027  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4028  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4029  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4030  	for {
  4031  		off1 := v.AuxInt
  4032  		sym1 := v.Aux
  4033  		v_0 := v.Args[0]
  4034  		if v_0.Op != OpMIPS64MOVVaddr {
  4035  			break
  4036  		}
  4037  		off2 := v_0.AuxInt
  4038  		sym2 := v_0.Aux
  4039  		ptr := v_0.Args[0]
  4040  		mem := v.Args[1]
  4041  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4042  			break
  4043  		}
  4044  		v.reset(OpMIPS64MOVHUload)
  4045  		v.AuxInt = off1 + off2
  4046  		v.Aux = mergeSym(sym1, sym2)
  4047  		v.AddArg(ptr)
  4048  		v.AddArg(mem)
  4049  		return true
  4050  	}
  4051  	return false
  4052  }
  4053  func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool {
  4054  	// match: (MOVHUreg x:(MOVBUload _ _))
  4055  	// cond:
  4056  	// result: (MOVVreg x)
  4057  	for {
  4058  		x := v.Args[0]
  4059  		if x.Op != OpMIPS64MOVBUload {
  4060  			break
  4061  		}
  4062  		v.reset(OpMIPS64MOVVreg)
  4063  		v.AddArg(x)
  4064  		return true
  4065  	}
  4066  	// match: (MOVHUreg x:(MOVHUload _ _))
  4067  	// cond:
  4068  	// result: (MOVVreg x)
  4069  	for {
  4070  		x := v.Args[0]
  4071  		if x.Op != OpMIPS64MOVHUload {
  4072  			break
  4073  		}
  4074  		v.reset(OpMIPS64MOVVreg)
  4075  		v.AddArg(x)
  4076  		return true
  4077  	}
  4078  	// match: (MOVHUreg x:(MOVBUreg _))
  4079  	// cond:
  4080  	// result: (MOVVreg x)
  4081  	for {
  4082  		x := v.Args[0]
  4083  		if x.Op != OpMIPS64MOVBUreg {
  4084  			break
  4085  		}
  4086  		v.reset(OpMIPS64MOVVreg)
  4087  		v.AddArg(x)
  4088  		return true
  4089  	}
  4090  	// match: (MOVHUreg x:(MOVHUreg _))
  4091  	// cond:
  4092  	// result: (MOVVreg x)
  4093  	for {
  4094  		x := v.Args[0]
  4095  		if x.Op != OpMIPS64MOVHUreg {
  4096  			break
  4097  		}
  4098  		v.reset(OpMIPS64MOVVreg)
  4099  		v.AddArg(x)
  4100  		return true
  4101  	}
  4102  	// match: (MOVHUreg (MOVVconst [c]))
  4103  	// cond:
  4104  	// result: (MOVVconst [int64(uint16(c))])
  4105  	for {
  4106  		v_0 := v.Args[0]
  4107  		if v_0.Op != OpMIPS64MOVVconst {
  4108  			break
  4109  		}
  4110  		c := v_0.AuxInt
  4111  		v.reset(OpMIPS64MOVVconst)
  4112  		v.AuxInt = int64(uint16(c))
  4113  		return true
  4114  	}
  4115  	return false
  4116  }
  4117  func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool {
  4118  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4119  	// cond: is32Bit(off1+off2)
  4120  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4121  	for {
  4122  		off1 := v.AuxInt
  4123  		sym := v.Aux
  4124  		v_0 := v.Args[0]
  4125  		if v_0.Op != OpMIPS64ADDVconst {
  4126  			break
  4127  		}
  4128  		off2 := v_0.AuxInt
  4129  		ptr := v_0.Args[0]
  4130  		mem := v.Args[1]
  4131  		if !(is32Bit(off1 + off2)) {
  4132  			break
  4133  		}
  4134  		v.reset(OpMIPS64MOVHload)
  4135  		v.AuxInt = off1 + off2
  4136  		v.Aux = sym
  4137  		v.AddArg(ptr)
  4138  		v.AddArg(mem)
  4139  		return true
  4140  	}
  4141  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4142  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4143  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4144  	for {
  4145  		off1 := v.AuxInt
  4146  		sym1 := v.Aux
  4147  		v_0 := v.Args[0]
  4148  		if v_0.Op != OpMIPS64MOVVaddr {
  4149  			break
  4150  		}
  4151  		off2 := v_0.AuxInt
  4152  		sym2 := v_0.Aux
  4153  		ptr := v_0.Args[0]
  4154  		mem := v.Args[1]
  4155  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4156  			break
  4157  		}
  4158  		v.reset(OpMIPS64MOVHload)
  4159  		v.AuxInt = off1 + off2
  4160  		v.Aux = mergeSym(sym1, sym2)
  4161  		v.AddArg(ptr)
  4162  		v.AddArg(mem)
  4163  		return true
  4164  	}
  4165  	return false
  4166  }
  4167  func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool {
  4168  	// match: (MOVHreg x:(MOVBload _ _))
  4169  	// cond:
  4170  	// result: (MOVVreg x)
  4171  	for {
  4172  		x := v.Args[0]
  4173  		if x.Op != OpMIPS64MOVBload {
  4174  			break
  4175  		}
  4176  		v.reset(OpMIPS64MOVVreg)
  4177  		v.AddArg(x)
  4178  		return true
  4179  	}
  4180  	// match: (MOVHreg x:(MOVBUload _ _))
  4181  	// cond:
  4182  	// result: (MOVVreg x)
  4183  	for {
  4184  		x := v.Args[0]
  4185  		if x.Op != OpMIPS64MOVBUload {
  4186  			break
  4187  		}
  4188  		v.reset(OpMIPS64MOVVreg)
  4189  		v.AddArg(x)
  4190  		return true
  4191  	}
  4192  	// match: (MOVHreg x:(MOVHload _ _))
  4193  	// cond:
  4194  	// result: (MOVVreg x)
  4195  	for {
  4196  		x := v.Args[0]
  4197  		if x.Op != OpMIPS64MOVHload {
  4198  			break
  4199  		}
  4200  		v.reset(OpMIPS64MOVVreg)
  4201  		v.AddArg(x)
  4202  		return true
  4203  	}
  4204  	// match: (MOVHreg x:(MOVBreg _))
  4205  	// cond:
  4206  	// result: (MOVVreg x)
  4207  	for {
  4208  		x := v.Args[0]
  4209  		if x.Op != OpMIPS64MOVBreg {
  4210  			break
  4211  		}
  4212  		v.reset(OpMIPS64MOVVreg)
  4213  		v.AddArg(x)
  4214  		return true
  4215  	}
  4216  	// match: (MOVHreg x:(MOVBUreg _))
  4217  	// cond:
  4218  	// result: (MOVVreg x)
  4219  	for {
  4220  		x := v.Args[0]
  4221  		if x.Op != OpMIPS64MOVBUreg {
  4222  			break
  4223  		}
  4224  		v.reset(OpMIPS64MOVVreg)
  4225  		v.AddArg(x)
  4226  		return true
  4227  	}
  4228  	// match: (MOVHreg x:(MOVHreg _))
  4229  	// cond:
  4230  	// result: (MOVVreg x)
  4231  	for {
  4232  		x := v.Args[0]
  4233  		if x.Op != OpMIPS64MOVHreg {
  4234  			break
  4235  		}
  4236  		v.reset(OpMIPS64MOVVreg)
  4237  		v.AddArg(x)
  4238  		return true
  4239  	}
  4240  	// match: (MOVHreg (MOVVconst [c]))
  4241  	// cond:
  4242  	// result: (MOVVconst [int64(int16(c))])
  4243  	for {
  4244  		v_0 := v.Args[0]
  4245  		if v_0.Op != OpMIPS64MOVVconst {
  4246  			break
  4247  		}
  4248  		c := v_0.AuxInt
  4249  		v.reset(OpMIPS64MOVVconst)
  4250  		v.AuxInt = int64(int16(c))
  4251  		return true
  4252  	}
  4253  	return false
  4254  }
  4255  func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool {
  4256  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4257  	// cond: is32Bit(off1+off2)
  4258  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4259  	for {
  4260  		off1 := v.AuxInt
  4261  		sym := v.Aux
  4262  		v_0 := v.Args[0]
  4263  		if v_0.Op != OpMIPS64ADDVconst {
  4264  			break
  4265  		}
  4266  		off2 := v_0.AuxInt
  4267  		ptr := v_0.Args[0]
  4268  		val := v.Args[1]
  4269  		mem := v.Args[2]
  4270  		if !(is32Bit(off1 + off2)) {
  4271  			break
  4272  		}
  4273  		v.reset(OpMIPS64MOVHstore)
  4274  		v.AuxInt = off1 + off2
  4275  		v.Aux = sym
  4276  		v.AddArg(ptr)
  4277  		v.AddArg(val)
  4278  		v.AddArg(mem)
  4279  		return true
  4280  	}
  4281  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4282  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4283  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4284  	for {
  4285  		off1 := v.AuxInt
  4286  		sym1 := v.Aux
  4287  		v_0 := v.Args[0]
  4288  		if v_0.Op != OpMIPS64MOVVaddr {
  4289  			break
  4290  		}
  4291  		off2 := v_0.AuxInt
  4292  		sym2 := v_0.Aux
  4293  		ptr := v_0.Args[0]
  4294  		val := v.Args[1]
  4295  		mem := v.Args[2]
  4296  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4297  			break
  4298  		}
  4299  		v.reset(OpMIPS64MOVHstore)
  4300  		v.AuxInt = off1 + off2
  4301  		v.Aux = mergeSym(sym1, sym2)
  4302  		v.AddArg(ptr)
  4303  		v.AddArg(val)
  4304  		v.AddArg(mem)
  4305  		return true
  4306  	}
  4307  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4308  	// cond:
  4309  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4310  	for {
  4311  		off := v.AuxInt
  4312  		sym := v.Aux
  4313  		ptr := v.Args[0]
  4314  		v_1 := v.Args[1]
  4315  		if v_1.Op != OpMIPS64MOVVconst {
  4316  			break
  4317  		}
  4318  		if v_1.AuxInt != 0 {
  4319  			break
  4320  		}
  4321  		mem := v.Args[2]
  4322  		v.reset(OpMIPS64MOVHstorezero)
  4323  		v.AuxInt = off
  4324  		v.Aux = sym
  4325  		v.AddArg(ptr)
  4326  		v.AddArg(mem)
  4327  		return true
  4328  	}
  4329  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4330  	// cond:
  4331  	// result: (MOVHstore [off] {sym} ptr x mem)
  4332  	for {
  4333  		off := v.AuxInt
  4334  		sym := v.Aux
  4335  		ptr := v.Args[0]
  4336  		v_1 := v.Args[1]
  4337  		if v_1.Op != OpMIPS64MOVHreg {
  4338  			break
  4339  		}
  4340  		x := v_1.Args[0]
  4341  		mem := v.Args[2]
  4342  		v.reset(OpMIPS64MOVHstore)
  4343  		v.AuxInt = off
  4344  		v.Aux = sym
  4345  		v.AddArg(ptr)
  4346  		v.AddArg(x)
  4347  		v.AddArg(mem)
  4348  		return true
  4349  	}
  4350  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4351  	// cond:
  4352  	// result: (MOVHstore [off] {sym} ptr x mem)
  4353  	for {
  4354  		off := v.AuxInt
  4355  		sym := v.Aux
  4356  		ptr := v.Args[0]
  4357  		v_1 := v.Args[1]
  4358  		if v_1.Op != OpMIPS64MOVHUreg {
  4359  			break
  4360  		}
  4361  		x := v_1.Args[0]
  4362  		mem := v.Args[2]
  4363  		v.reset(OpMIPS64MOVHstore)
  4364  		v.AuxInt = off
  4365  		v.Aux = sym
  4366  		v.AddArg(ptr)
  4367  		v.AddArg(x)
  4368  		v.AddArg(mem)
  4369  		return true
  4370  	}
  4371  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4372  	// cond:
  4373  	// result: (MOVHstore [off] {sym} ptr x mem)
  4374  	for {
  4375  		off := v.AuxInt
  4376  		sym := v.Aux
  4377  		ptr := v.Args[0]
  4378  		v_1 := v.Args[1]
  4379  		if v_1.Op != OpMIPS64MOVWreg {
  4380  			break
  4381  		}
  4382  		x := v_1.Args[0]
  4383  		mem := v.Args[2]
  4384  		v.reset(OpMIPS64MOVHstore)
  4385  		v.AuxInt = off
  4386  		v.Aux = sym
  4387  		v.AddArg(ptr)
  4388  		v.AddArg(x)
  4389  		v.AddArg(mem)
  4390  		return true
  4391  	}
  4392  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4393  	// cond:
  4394  	// result: (MOVHstore [off] {sym} ptr x mem)
  4395  	for {
  4396  		off := v.AuxInt
  4397  		sym := v.Aux
  4398  		ptr := v.Args[0]
  4399  		v_1 := v.Args[1]
  4400  		if v_1.Op != OpMIPS64MOVWUreg {
  4401  			break
  4402  		}
  4403  		x := v_1.Args[0]
  4404  		mem := v.Args[2]
  4405  		v.reset(OpMIPS64MOVHstore)
  4406  		v.AuxInt = off
  4407  		v.Aux = sym
  4408  		v.AddArg(ptr)
  4409  		v.AddArg(x)
  4410  		v.AddArg(mem)
  4411  		return true
  4412  	}
  4413  	return false
  4414  }
  4415  func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool {
  4416  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4417  	// cond: is32Bit(off1+off2)
  4418  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4419  	for {
  4420  		off1 := v.AuxInt
  4421  		sym := v.Aux
  4422  		v_0 := v.Args[0]
  4423  		if v_0.Op != OpMIPS64ADDVconst {
  4424  			break
  4425  		}
  4426  		off2 := v_0.AuxInt
  4427  		ptr := v_0.Args[0]
  4428  		mem := v.Args[1]
  4429  		if !(is32Bit(off1 + off2)) {
  4430  			break
  4431  		}
  4432  		v.reset(OpMIPS64MOVHstorezero)
  4433  		v.AuxInt = off1 + off2
  4434  		v.Aux = sym
  4435  		v.AddArg(ptr)
  4436  		v.AddArg(mem)
  4437  		return true
  4438  	}
  4439  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4440  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4441  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4442  	for {
  4443  		off1 := v.AuxInt
  4444  		sym1 := v.Aux
  4445  		v_0 := v.Args[0]
  4446  		if v_0.Op != OpMIPS64MOVVaddr {
  4447  			break
  4448  		}
  4449  		off2 := v_0.AuxInt
  4450  		sym2 := v_0.Aux
  4451  		ptr := v_0.Args[0]
  4452  		mem := v.Args[1]
  4453  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4454  			break
  4455  		}
  4456  		v.reset(OpMIPS64MOVHstorezero)
  4457  		v.AuxInt = off1 + off2
  4458  		v.Aux = mergeSym(sym1, sym2)
  4459  		v.AddArg(ptr)
  4460  		v.AddArg(mem)
  4461  		return true
  4462  	}
  4463  	return false
  4464  }
  4465  func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool {
  4466  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4467  	// cond: is32Bit(off1+off2)
  4468  	// result: (MOVVload  [off1+off2] {sym} ptr mem)
  4469  	for {
  4470  		off1 := v.AuxInt
  4471  		sym := v.Aux
  4472  		v_0 := v.Args[0]
  4473  		if v_0.Op != OpMIPS64ADDVconst {
  4474  			break
  4475  		}
  4476  		off2 := v_0.AuxInt
  4477  		ptr := v_0.Args[0]
  4478  		mem := v.Args[1]
  4479  		if !(is32Bit(off1 + off2)) {
  4480  			break
  4481  		}
  4482  		v.reset(OpMIPS64MOVVload)
  4483  		v.AuxInt = off1 + off2
  4484  		v.Aux = sym
  4485  		v.AddArg(ptr)
  4486  		v.AddArg(mem)
  4487  		return true
  4488  	}
  4489  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4490  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4491  	// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4492  	for {
  4493  		off1 := v.AuxInt
  4494  		sym1 := v.Aux
  4495  		v_0 := v.Args[0]
  4496  		if v_0.Op != OpMIPS64MOVVaddr {
  4497  			break
  4498  		}
  4499  		off2 := v_0.AuxInt
  4500  		sym2 := v_0.Aux
  4501  		ptr := v_0.Args[0]
  4502  		mem := v.Args[1]
  4503  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4504  			break
  4505  		}
  4506  		v.reset(OpMIPS64MOVVload)
  4507  		v.AuxInt = off1 + off2
  4508  		v.Aux = mergeSym(sym1, sym2)
  4509  		v.AddArg(ptr)
  4510  		v.AddArg(mem)
  4511  		return true
  4512  	}
  4513  	return false
  4514  }
  4515  func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool {
  4516  	// match: (MOVVreg x)
  4517  	// cond: x.Uses == 1
  4518  	// result: (MOVVnop x)
  4519  	for {
  4520  		x := v.Args[0]
  4521  		if !(x.Uses == 1) {
  4522  			break
  4523  		}
  4524  		v.reset(OpMIPS64MOVVnop)
  4525  		v.AddArg(x)
  4526  		return true
  4527  	}
  4528  	// match: (MOVVreg (MOVVconst [c]))
  4529  	// cond:
  4530  	// result: (MOVVconst [c])
  4531  	for {
  4532  		v_0 := v.Args[0]
  4533  		if v_0.Op != OpMIPS64MOVVconst {
  4534  			break
  4535  		}
  4536  		c := v_0.AuxInt
  4537  		v.reset(OpMIPS64MOVVconst)
  4538  		v.AuxInt = c
  4539  		return true
  4540  	}
  4541  	return false
  4542  }
  4543  func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool {
  4544  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4545  	// cond: is32Bit(off1+off2)
  4546  	// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  4547  	for {
  4548  		off1 := v.AuxInt
  4549  		sym := v.Aux
  4550  		v_0 := v.Args[0]
  4551  		if v_0.Op != OpMIPS64ADDVconst {
  4552  			break
  4553  		}
  4554  		off2 := v_0.AuxInt
  4555  		ptr := v_0.Args[0]
  4556  		val := v.Args[1]
  4557  		mem := v.Args[2]
  4558  		if !(is32Bit(off1 + off2)) {
  4559  			break
  4560  		}
  4561  		v.reset(OpMIPS64MOVVstore)
  4562  		v.AuxInt = off1 + off2
  4563  		v.Aux = sym
  4564  		v.AddArg(ptr)
  4565  		v.AddArg(val)
  4566  		v.AddArg(mem)
  4567  		return true
  4568  	}
  4569  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4570  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4571  	// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4572  	for {
  4573  		off1 := v.AuxInt
  4574  		sym1 := v.Aux
  4575  		v_0 := v.Args[0]
  4576  		if v_0.Op != OpMIPS64MOVVaddr {
  4577  			break
  4578  		}
  4579  		off2 := v_0.AuxInt
  4580  		sym2 := v_0.Aux
  4581  		ptr := v_0.Args[0]
  4582  		val := v.Args[1]
  4583  		mem := v.Args[2]
  4584  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4585  			break
  4586  		}
  4587  		v.reset(OpMIPS64MOVVstore)
  4588  		v.AuxInt = off1 + off2
  4589  		v.Aux = mergeSym(sym1, sym2)
  4590  		v.AddArg(ptr)
  4591  		v.AddArg(val)
  4592  		v.AddArg(mem)
  4593  		return true
  4594  	}
  4595  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4596  	// cond:
  4597  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4598  	for {
  4599  		off := v.AuxInt
  4600  		sym := v.Aux
  4601  		ptr := v.Args[0]
  4602  		v_1 := v.Args[1]
  4603  		if v_1.Op != OpMIPS64MOVVconst {
  4604  			break
  4605  		}
  4606  		if v_1.AuxInt != 0 {
  4607  			break
  4608  		}
  4609  		mem := v.Args[2]
  4610  		v.reset(OpMIPS64MOVVstorezero)
  4611  		v.AuxInt = off
  4612  		v.Aux = sym
  4613  		v.AddArg(ptr)
  4614  		v.AddArg(mem)
  4615  		return true
  4616  	}
  4617  	return false
  4618  }
  4619  func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool {
  4620  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4621  	// cond: is32Bit(off1+off2)
  4622  	// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  4623  	for {
  4624  		off1 := v.AuxInt
  4625  		sym := v.Aux
  4626  		v_0 := v.Args[0]
  4627  		if v_0.Op != OpMIPS64ADDVconst {
  4628  			break
  4629  		}
  4630  		off2 := v_0.AuxInt
  4631  		ptr := v_0.Args[0]
  4632  		mem := v.Args[1]
  4633  		if !(is32Bit(off1 + off2)) {
  4634  			break
  4635  		}
  4636  		v.reset(OpMIPS64MOVVstorezero)
  4637  		v.AuxInt = off1 + off2
  4638  		v.Aux = sym
  4639  		v.AddArg(ptr)
  4640  		v.AddArg(mem)
  4641  		return true
  4642  	}
  4643  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4644  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4645  	// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4646  	for {
  4647  		off1 := v.AuxInt
  4648  		sym1 := v.Aux
  4649  		v_0 := v.Args[0]
  4650  		if v_0.Op != OpMIPS64MOVVaddr {
  4651  			break
  4652  		}
  4653  		off2 := v_0.AuxInt
  4654  		sym2 := v_0.Aux
  4655  		ptr := v_0.Args[0]
  4656  		mem := v.Args[1]
  4657  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4658  			break
  4659  		}
  4660  		v.reset(OpMIPS64MOVVstorezero)
  4661  		v.AuxInt = off1 + off2
  4662  		v.Aux = mergeSym(sym1, sym2)
  4663  		v.AddArg(ptr)
  4664  		v.AddArg(mem)
  4665  		return true
  4666  	}
  4667  	return false
  4668  }
  4669  func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool {
  4670  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4671  	// cond: is32Bit(off1+off2)
  4672  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4673  	for {
  4674  		off1 := v.AuxInt
  4675  		sym := v.Aux
  4676  		v_0 := v.Args[0]
  4677  		if v_0.Op != OpMIPS64ADDVconst {
  4678  			break
  4679  		}
  4680  		off2 := v_0.AuxInt
  4681  		ptr := v_0.Args[0]
  4682  		mem := v.Args[1]
  4683  		if !(is32Bit(off1 + off2)) {
  4684  			break
  4685  		}
  4686  		v.reset(OpMIPS64MOVWUload)
  4687  		v.AuxInt = off1 + off2
  4688  		v.Aux = sym
  4689  		v.AddArg(ptr)
  4690  		v.AddArg(mem)
  4691  		return true
  4692  	}
  4693  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4694  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4695  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4696  	for {
  4697  		off1 := v.AuxInt
  4698  		sym1 := v.Aux
  4699  		v_0 := v.Args[0]
  4700  		if v_0.Op != OpMIPS64MOVVaddr {
  4701  			break
  4702  		}
  4703  		off2 := v_0.AuxInt
  4704  		sym2 := v_0.Aux
  4705  		ptr := v_0.Args[0]
  4706  		mem := v.Args[1]
  4707  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4708  			break
  4709  		}
  4710  		v.reset(OpMIPS64MOVWUload)
  4711  		v.AuxInt = off1 + off2
  4712  		v.Aux = mergeSym(sym1, sym2)
  4713  		v.AddArg(ptr)
  4714  		v.AddArg(mem)
  4715  		return true
  4716  	}
  4717  	return false
  4718  }
  4719  func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool {
  4720  	// match: (MOVWUreg x:(MOVBUload _ _))
  4721  	// cond:
  4722  	// result: (MOVVreg x)
  4723  	for {
  4724  		x := v.Args[0]
  4725  		if x.Op != OpMIPS64MOVBUload {
  4726  			break
  4727  		}
  4728  		v.reset(OpMIPS64MOVVreg)
  4729  		v.AddArg(x)
  4730  		return true
  4731  	}
  4732  	// match: (MOVWUreg x:(MOVHUload _ _))
  4733  	// cond:
  4734  	// result: (MOVVreg x)
  4735  	for {
  4736  		x := v.Args[0]
  4737  		if x.Op != OpMIPS64MOVHUload {
  4738  			break
  4739  		}
  4740  		v.reset(OpMIPS64MOVVreg)
  4741  		v.AddArg(x)
  4742  		return true
  4743  	}
  4744  	// match: (MOVWUreg x:(MOVWUload _ _))
  4745  	// cond:
  4746  	// result: (MOVVreg x)
  4747  	for {
  4748  		x := v.Args[0]
  4749  		if x.Op != OpMIPS64MOVWUload {
  4750  			break
  4751  		}
  4752  		v.reset(OpMIPS64MOVVreg)
  4753  		v.AddArg(x)
  4754  		return true
  4755  	}
  4756  	// match: (MOVWUreg x:(MOVBUreg _))
  4757  	// cond:
  4758  	// result: (MOVVreg x)
  4759  	for {
  4760  		x := v.Args[0]
  4761  		if x.Op != OpMIPS64MOVBUreg {
  4762  			break
  4763  		}
  4764  		v.reset(OpMIPS64MOVVreg)
  4765  		v.AddArg(x)
  4766  		return true
  4767  	}
  4768  	// match: (MOVWUreg x:(MOVHUreg _))
  4769  	// cond:
  4770  	// result: (MOVVreg x)
  4771  	for {
  4772  		x := v.Args[0]
  4773  		if x.Op != OpMIPS64MOVHUreg {
  4774  			break
  4775  		}
  4776  		v.reset(OpMIPS64MOVVreg)
  4777  		v.AddArg(x)
  4778  		return true
  4779  	}
  4780  	// match: (MOVWUreg x:(MOVWUreg _))
  4781  	// cond:
  4782  	// result: (MOVVreg x)
  4783  	for {
  4784  		x := v.Args[0]
  4785  		if x.Op != OpMIPS64MOVWUreg {
  4786  			break
  4787  		}
  4788  		v.reset(OpMIPS64MOVVreg)
  4789  		v.AddArg(x)
  4790  		return true
  4791  	}
  4792  	// match: (MOVWUreg (MOVVconst [c]))
  4793  	// cond:
  4794  	// result: (MOVVconst [int64(uint32(c))])
  4795  	for {
  4796  		v_0 := v.Args[0]
  4797  		if v_0.Op != OpMIPS64MOVVconst {
  4798  			break
  4799  		}
  4800  		c := v_0.AuxInt
  4801  		v.reset(OpMIPS64MOVVconst)
  4802  		v.AuxInt = int64(uint32(c))
  4803  		return true
  4804  	}
  4805  	return false
  4806  }
  4807  func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool {
  4808  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4809  	// cond: is32Bit(off1+off2)
  4810  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4811  	for {
  4812  		off1 := v.AuxInt
  4813  		sym := v.Aux
  4814  		v_0 := v.Args[0]
  4815  		if v_0.Op != OpMIPS64ADDVconst {
  4816  			break
  4817  		}
  4818  		off2 := v_0.AuxInt
  4819  		ptr := v_0.Args[0]
  4820  		mem := v.Args[1]
  4821  		if !(is32Bit(off1 + off2)) {
  4822  			break
  4823  		}
  4824  		v.reset(OpMIPS64MOVWload)
  4825  		v.AuxInt = off1 + off2
  4826  		v.Aux = sym
  4827  		v.AddArg(ptr)
  4828  		v.AddArg(mem)
  4829  		return true
  4830  	}
  4831  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4832  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4833  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4834  	for {
  4835  		off1 := v.AuxInt
  4836  		sym1 := v.Aux
  4837  		v_0 := v.Args[0]
  4838  		if v_0.Op != OpMIPS64MOVVaddr {
  4839  			break
  4840  		}
  4841  		off2 := v_0.AuxInt
  4842  		sym2 := v_0.Aux
  4843  		ptr := v_0.Args[0]
  4844  		mem := v.Args[1]
  4845  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4846  			break
  4847  		}
  4848  		v.reset(OpMIPS64MOVWload)
  4849  		v.AuxInt = off1 + off2
  4850  		v.Aux = mergeSym(sym1, sym2)
  4851  		v.AddArg(ptr)
  4852  		v.AddArg(mem)
  4853  		return true
  4854  	}
  4855  	return false
  4856  }
  4857  func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool {
  4858  	// match: (MOVWreg x:(MOVBload _ _))
  4859  	// cond:
  4860  	// result: (MOVVreg x)
  4861  	for {
  4862  		x := v.Args[0]
  4863  		if x.Op != OpMIPS64MOVBload {
  4864  			break
  4865  		}
  4866  		v.reset(OpMIPS64MOVVreg)
  4867  		v.AddArg(x)
  4868  		return true
  4869  	}
  4870  	// match: (MOVWreg x:(MOVBUload _ _))
  4871  	// cond:
  4872  	// result: (MOVVreg x)
  4873  	for {
  4874  		x := v.Args[0]
  4875  		if x.Op != OpMIPS64MOVBUload {
  4876  			break
  4877  		}
  4878  		v.reset(OpMIPS64MOVVreg)
  4879  		v.AddArg(x)
  4880  		return true
  4881  	}
  4882  	// match: (MOVWreg x:(MOVHload _ _))
  4883  	// cond:
  4884  	// result: (MOVVreg x)
  4885  	for {
  4886  		x := v.Args[0]
  4887  		if x.Op != OpMIPS64MOVHload {
  4888  			break
  4889  		}
  4890  		v.reset(OpMIPS64MOVVreg)
  4891  		v.AddArg(x)
  4892  		return true
  4893  	}
  4894  	// match: (MOVWreg x:(MOVHUload _ _))
  4895  	// cond:
  4896  	// result: (MOVVreg x)
  4897  	for {
  4898  		x := v.Args[0]
  4899  		if x.Op != OpMIPS64MOVHUload {
  4900  			break
  4901  		}
  4902  		v.reset(OpMIPS64MOVVreg)
  4903  		v.AddArg(x)
  4904  		return true
  4905  	}
  4906  	// match: (MOVWreg x:(MOVWload _ _))
  4907  	// cond:
  4908  	// result: (MOVVreg x)
  4909  	for {
  4910  		x := v.Args[0]
  4911  		if x.Op != OpMIPS64MOVWload {
  4912  			break
  4913  		}
  4914  		v.reset(OpMIPS64MOVVreg)
  4915  		v.AddArg(x)
  4916  		return true
  4917  	}
  4918  	// match: (MOVWreg x:(MOVBreg _))
  4919  	// cond:
  4920  	// result: (MOVVreg x)
  4921  	for {
  4922  		x := v.Args[0]
  4923  		if x.Op != OpMIPS64MOVBreg {
  4924  			break
  4925  		}
  4926  		v.reset(OpMIPS64MOVVreg)
  4927  		v.AddArg(x)
  4928  		return true
  4929  	}
  4930  	// match: (MOVWreg x:(MOVBUreg _))
  4931  	// cond:
  4932  	// result: (MOVVreg x)
  4933  	for {
  4934  		x := v.Args[0]
  4935  		if x.Op != OpMIPS64MOVBUreg {
  4936  			break
  4937  		}
  4938  		v.reset(OpMIPS64MOVVreg)
  4939  		v.AddArg(x)
  4940  		return true
  4941  	}
  4942  	// match: (MOVWreg x:(MOVHreg _))
  4943  	// cond:
  4944  	// result: (MOVVreg x)
  4945  	for {
  4946  		x := v.Args[0]
  4947  		if x.Op != OpMIPS64MOVHreg {
  4948  			break
  4949  		}
  4950  		v.reset(OpMIPS64MOVVreg)
  4951  		v.AddArg(x)
  4952  		return true
  4953  	}
  4954  	// match: (MOVWreg x:(MOVHreg _))
  4955  	// cond:
  4956  	// result: (MOVVreg x)
  4957  	for {
  4958  		x := v.Args[0]
  4959  		if x.Op != OpMIPS64MOVHreg {
  4960  			break
  4961  		}
  4962  		v.reset(OpMIPS64MOVVreg)
  4963  		v.AddArg(x)
  4964  		return true
  4965  	}
  4966  	// match: (MOVWreg x:(MOVWreg _))
  4967  	// cond:
  4968  	// result: (MOVVreg x)
  4969  	for {
  4970  		x := v.Args[0]
  4971  		if x.Op != OpMIPS64MOVWreg {
  4972  			break
  4973  		}
  4974  		v.reset(OpMIPS64MOVVreg)
  4975  		v.AddArg(x)
  4976  		return true
  4977  	}
  4978  	return false
  4979  }
  4980  func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool {
  4981  	// match: (MOVWreg (MOVVconst [c]))
  4982  	// cond:
  4983  	// result: (MOVVconst [int64(int32(c))])
  4984  	for {
  4985  		v_0 := v.Args[0]
  4986  		if v_0.Op != OpMIPS64MOVVconst {
  4987  			break
  4988  		}
  4989  		c := v_0.AuxInt
  4990  		v.reset(OpMIPS64MOVVconst)
  4991  		v.AuxInt = int64(int32(c))
  4992  		return true
  4993  	}
  4994  	return false
  4995  }
  4996  func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool {
  4997  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4998  	// cond: is32Bit(off1+off2)
  4999  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5000  	for {
  5001  		off1 := v.AuxInt
  5002  		sym := v.Aux
  5003  		v_0 := v.Args[0]
  5004  		if v_0.Op != OpMIPS64ADDVconst {
  5005  			break
  5006  		}
  5007  		off2 := v_0.AuxInt
  5008  		ptr := v_0.Args[0]
  5009  		val := v.Args[1]
  5010  		mem := v.Args[2]
  5011  		if !(is32Bit(off1 + off2)) {
  5012  			break
  5013  		}
  5014  		v.reset(OpMIPS64MOVWstore)
  5015  		v.AuxInt = off1 + off2
  5016  		v.Aux = sym
  5017  		v.AddArg(ptr)
  5018  		v.AddArg(val)
  5019  		v.AddArg(mem)
  5020  		return true
  5021  	}
  5022  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5023  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5024  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5025  	for {
  5026  		off1 := v.AuxInt
  5027  		sym1 := v.Aux
  5028  		v_0 := v.Args[0]
  5029  		if v_0.Op != OpMIPS64MOVVaddr {
  5030  			break
  5031  		}
  5032  		off2 := v_0.AuxInt
  5033  		sym2 := v_0.Aux
  5034  		ptr := v_0.Args[0]
  5035  		val := v.Args[1]
  5036  		mem := v.Args[2]
  5037  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5038  			break
  5039  		}
  5040  		v.reset(OpMIPS64MOVWstore)
  5041  		v.AuxInt = off1 + off2
  5042  		v.Aux = mergeSym(sym1, sym2)
  5043  		v.AddArg(ptr)
  5044  		v.AddArg(val)
  5045  		v.AddArg(mem)
  5046  		return true
  5047  	}
  5048  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5049  	// cond:
  5050  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5051  	for {
  5052  		off := v.AuxInt
  5053  		sym := v.Aux
  5054  		ptr := v.Args[0]
  5055  		v_1 := v.Args[1]
  5056  		if v_1.Op != OpMIPS64MOVVconst {
  5057  			break
  5058  		}
  5059  		if v_1.AuxInt != 0 {
  5060  			break
  5061  		}
  5062  		mem := v.Args[2]
  5063  		v.reset(OpMIPS64MOVWstorezero)
  5064  		v.AuxInt = off
  5065  		v.Aux = sym
  5066  		v.AddArg(ptr)
  5067  		v.AddArg(mem)
  5068  		return true
  5069  	}
  5070  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5071  	// cond:
  5072  	// result: (MOVWstore [off] {sym} ptr x mem)
  5073  	for {
  5074  		off := v.AuxInt
  5075  		sym := v.Aux
  5076  		ptr := v.Args[0]
  5077  		v_1 := v.Args[1]
  5078  		if v_1.Op != OpMIPS64MOVWreg {
  5079  			break
  5080  		}
  5081  		x := v_1.Args[0]
  5082  		mem := v.Args[2]
  5083  		v.reset(OpMIPS64MOVWstore)
  5084  		v.AuxInt = off
  5085  		v.Aux = sym
  5086  		v.AddArg(ptr)
  5087  		v.AddArg(x)
  5088  		v.AddArg(mem)
  5089  		return true
  5090  	}
  5091  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5092  	// cond:
  5093  	// result: (MOVWstore [off] {sym} ptr x mem)
  5094  	for {
  5095  		off := v.AuxInt
  5096  		sym := v.Aux
  5097  		ptr := v.Args[0]
  5098  		v_1 := v.Args[1]
  5099  		if v_1.Op != OpMIPS64MOVWUreg {
  5100  			break
  5101  		}
  5102  		x := v_1.Args[0]
  5103  		mem := v.Args[2]
  5104  		v.reset(OpMIPS64MOVWstore)
  5105  		v.AuxInt = off
  5106  		v.Aux = sym
  5107  		v.AddArg(ptr)
  5108  		v.AddArg(x)
  5109  		v.AddArg(mem)
  5110  		return true
  5111  	}
  5112  	return false
  5113  }
  5114  func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool {
  5115  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5116  	// cond: is32Bit(off1+off2)
  5117  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5118  	for {
  5119  		off1 := v.AuxInt
  5120  		sym := v.Aux
  5121  		v_0 := v.Args[0]
  5122  		if v_0.Op != OpMIPS64ADDVconst {
  5123  			break
  5124  		}
  5125  		off2 := v_0.AuxInt
  5126  		ptr := v_0.Args[0]
  5127  		mem := v.Args[1]
  5128  		if !(is32Bit(off1 + off2)) {
  5129  			break
  5130  		}
  5131  		v.reset(OpMIPS64MOVWstorezero)
  5132  		v.AuxInt = off1 + off2
  5133  		v.Aux = sym
  5134  		v.AddArg(ptr)
  5135  		v.AddArg(mem)
  5136  		return true
  5137  	}
  5138  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5139  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5140  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5141  	for {
  5142  		off1 := v.AuxInt
  5143  		sym1 := v.Aux
  5144  		v_0 := v.Args[0]
  5145  		if v_0.Op != OpMIPS64MOVVaddr {
  5146  			break
  5147  		}
  5148  		off2 := v_0.AuxInt
  5149  		sym2 := v_0.Aux
  5150  		ptr := v_0.Args[0]
  5151  		mem := v.Args[1]
  5152  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5153  			break
  5154  		}
  5155  		v.reset(OpMIPS64MOVWstorezero)
  5156  		v.AuxInt = off1 + off2
  5157  		v.Aux = mergeSym(sym1, sym2)
  5158  		v.AddArg(ptr)
  5159  		v.AddArg(mem)
  5160  		return true
  5161  	}
  5162  	return false
  5163  }
  5164  func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool {
  5165  	// match: (NEGV (MOVVconst [c]))
  5166  	// cond:
  5167  	// result: (MOVVconst [-c])
  5168  	for {
  5169  		v_0 := v.Args[0]
  5170  		if v_0.Op != OpMIPS64MOVVconst {
  5171  			break
  5172  		}
  5173  		c := v_0.AuxInt
  5174  		v.reset(OpMIPS64MOVVconst)
  5175  		v.AuxInt = -c
  5176  		return true
  5177  	}
  5178  	return false
  5179  }
  5180  func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool {
  5181  	// match: (NOR x (MOVVconst [c]))
  5182  	// cond: is32Bit(c)
  5183  	// result: (NORconst [c] x)
  5184  	for {
  5185  		x := v.Args[0]
  5186  		v_1 := v.Args[1]
  5187  		if v_1.Op != OpMIPS64MOVVconst {
  5188  			break
  5189  		}
  5190  		c := v_1.AuxInt
  5191  		if !(is32Bit(c)) {
  5192  			break
  5193  		}
  5194  		v.reset(OpMIPS64NORconst)
  5195  		v.AuxInt = c
  5196  		v.AddArg(x)
  5197  		return true
  5198  	}
  5199  	// match: (NOR (MOVVconst [c]) x)
  5200  	// cond: is32Bit(c)
  5201  	// result: (NORconst [c] x)
  5202  	for {
  5203  		v_0 := v.Args[0]
  5204  		if v_0.Op != OpMIPS64MOVVconst {
  5205  			break
  5206  		}
  5207  		c := v_0.AuxInt
  5208  		x := v.Args[1]
  5209  		if !(is32Bit(c)) {
  5210  			break
  5211  		}
  5212  		v.reset(OpMIPS64NORconst)
  5213  		v.AuxInt = c
  5214  		v.AddArg(x)
  5215  		return true
  5216  	}
  5217  	return false
  5218  }
  5219  func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool {
  5220  	// match: (NORconst [c] (MOVVconst [d]))
  5221  	// cond:
  5222  	// result: (MOVVconst [^(c|d)])
  5223  	for {
  5224  		c := v.AuxInt
  5225  		v_0 := v.Args[0]
  5226  		if v_0.Op != OpMIPS64MOVVconst {
  5227  			break
  5228  		}
  5229  		d := v_0.AuxInt
  5230  		v.reset(OpMIPS64MOVVconst)
  5231  		v.AuxInt = ^(c | d)
  5232  		return true
  5233  	}
  5234  	return false
  5235  }
  5236  func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
  5237  	// match: (OR x (MOVVconst [c]))
  5238  	// cond: is32Bit(c)
  5239  	// result: (ORconst  [c] x)
  5240  	for {
  5241  		x := v.Args[0]
  5242  		v_1 := v.Args[1]
  5243  		if v_1.Op != OpMIPS64MOVVconst {
  5244  			break
  5245  		}
  5246  		c := v_1.AuxInt
  5247  		if !(is32Bit(c)) {
  5248  			break
  5249  		}
  5250  		v.reset(OpMIPS64ORconst)
  5251  		v.AuxInt = c
  5252  		v.AddArg(x)
  5253  		return true
  5254  	}
  5255  	// match: (OR (MOVVconst [c]) x)
  5256  	// cond: is32Bit(c)
  5257  	// result: (ORconst  [c] x)
  5258  	for {
  5259  		v_0 := v.Args[0]
  5260  		if v_0.Op != OpMIPS64MOVVconst {
  5261  			break
  5262  		}
  5263  		c := v_0.AuxInt
  5264  		x := v.Args[1]
  5265  		if !(is32Bit(c)) {
  5266  			break
  5267  		}
  5268  		v.reset(OpMIPS64ORconst)
  5269  		v.AuxInt = c
  5270  		v.AddArg(x)
  5271  		return true
  5272  	}
  5273  	// match: (OR x x)
  5274  	// cond:
  5275  	// result: x
  5276  	for {
  5277  		x := v.Args[0]
  5278  		if x != v.Args[1] {
  5279  			break
  5280  		}
  5281  		v.reset(OpCopy)
  5282  		v.Type = x.Type
  5283  		v.AddArg(x)
  5284  		return true
  5285  	}
  5286  	return false
  5287  }
  5288  func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool {
  5289  	// match: (ORconst [0] x)
  5290  	// cond:
  5291  	// result: x
  5292  	for {
  5293  		if v.AuxInt != 0 {
  5294  			break
  5295  		}
  5296  		x := v.Args[0]
  5297  		v.reset(OpCopy)
  5298  		v.Type = x.Type
  5299  		v.AddArg(x)
  5300  		return true
  5301  	}
  5302  	// match: (ORconst [-1] _)
  5303  	// cond:
  5304  	// result: (MOVVconst [-1])
  5305  	for {
  5306  		if v.AuxInt != -1 {
  5307  			break
  5308  		}
  5309  		v.reset(OpMIPS64MOVVconst)
  5310  		v.AuxInt = -1
  5311  		return true
  5312  	}
  5313  	// match: (ORconst [c] (MOVVconst [d]))
  5314  	// cond:
  5315  	// result: (MOVVconst [c|d])
  5316  	for {
  5317  		c := v.AuxInt
  5318  		v_0 := v.Args[0]
  5319  		if v_0.Op != OpMIPS64MOVVconst {
  5320  			break
  5321  		}
  5322  		d := v_0.AuxInt
  5323  		v.reset(OpMIPS64MOVVconst)
  5324  		v.AuxInt = c | d
  5325  		return true
  5326  	}
  5327  	// match: (ORconst [c] (ORconst [d] x))
  5328  	// cond: is32Bit(c|d)
  5329  	// result: (ORconst [c|d] x)
  5330  	for {
  5331  		c := v.AuxInt
  5332  		v_0 := v.Args[0]
  5333  		if v_0.Op != OpMIPS64ORconst {
  5334  			break
  5335  		}
  5336  		d := v_0.AuxInt
  5337  		x := v_0.Args[0]
  5338  		if !(is32Bit(c | d)) {
  5339  			break
  5340  		}
  5341  		v.reset(OpMIPS64ORconst)
  5342  		v.AuxInt = c | d
  5343  		v.AddArg(x)
  5344  		return true
  5345  	}
  5346  	return false
  5347  }
  5348  func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool {
  5349  	// match: (SGT (MOVVconst [c]) x)
  5350  	// cond: is32Bit(c)
  5351  	// result: (SGTconst  [c] x)
  5352  	for {
  5353  		v_0 := v.Args[0]
  5354  		if v_0.Op != OpMIPS64MOVVconst {
  5355  			break
  5356  		}
  5357  		c := v_0.AuxInt
  5358  		x := v.Args[1]
  5359  		if !(is32Bit(c)) {
  5360  			break
  5361  		}
  5362  		v.reset(OpMIPS64SGTconst)
  5363  		v.AuxInt = c
  5364  		v.AddArg(x)
  5365  		return true
  5366  	}
  5367  	return false
  5368  }
  5369  func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool {
  5370  	// match: (SGTU (MOVVconst [c]) x)
  5371  	// cond: is32Bit(c)
  5372  	// result: (SGTUconst [c] x)
  5373  	for {
  5374  		v_0 := v.Args[0]
  5375  		if v_0.Op != OpMIPS64MOVVconst {
  5376  			break
  5377  		}
  5378  		c := v_0.AuxInt
  5379  		x := v.Args[1]
  5380  		if !(is32Bit(c)) {
  5381  			break
  5382  		}
  5383  		v.reset(OpMIPS64SGTUconst)
  5384  		v.AuxInt = c
  5385  		v.AddArg(x)
  5386  		return true
  5387  	}
  5388  	return false
  5389  }
  5390  func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool {
  5391  	// match: (SGTUconst [c] (MOVVconst [d]))
  5392  	// cond: uint64(c)>uint64(d)
  5393  	// result: (MOVVconst [1])
  5394  	for {
  5395  		c := v.AuxInt
  5396  		v_0 := v.Args[0]
  5397  		if v_0.Op != OpMIPS64MOVVconst {
  5398  			break
  5399  		}
  5400  		d := v_0.AuxInt
  5401  		if !(uint64(c) > uint64(d)) {
  5402  			break
  5403  		}
  5404  		v.reset(OpMIPS64MOVVconst)
  5405  		v.AuxInt = 1
  5406  		return true
  5407  	}
  5408  	// match: (SGTUconst [c] (MOVVconst [d]))
  5409  	// cond: uint64(c)<=uint64(d)
  5410  	// result: (MOVVconst [0])
  5411  	for {
  5412  		c := v.AuxInt
  5413  		v_0 := v.Args[0]
  5414  		if v_0.Op != OpMIPS64MOVVconst {
  5415  			break
  5416  		}
  5417  		d := v_0.AuxInt
  5418  		if !(uint64(c) <= uint64(d)) {
  5419  			break
  5420  		}
  5421  		v.reset(OpMIPS64MOVVconst)
  5422  		v.AuxInt = 0
  5423  		return true
  5424  	}
  5425  	// match: (SGTUconst [c] (MOVBUreg _))
  5426  	// cond: 0xff < uint64(c)
  5427  	// result: (MOVVconst [1])
  5428  	for {
  5429  		c := v.AuxInt
  5430  		v_0 := v.Args[0]
  5431  		if v_0.Op != OpMIPS64MOVBUreg {
  5432  			break
  5433  		}
  5434  		if !(0xff < uint64(c)) {
  5435  			break
  5436  		}
  5437  		v.reset(OpMIPS64MOVVconst)
  5438  		v.AuxInt = 1
  5439  		return true
  5440  	}
  5441  	// match: (SGTUconst [c] (MOVHUreg _))
  5442  	// cond: 0xffff < uint64(c)
  5443  	// result: (MOVVconst [1])
  5444  	for {
  5445  		c := v.AuxInt
  5446  		v_0 := v.Args[0]
  5447  		if v_0.Op != OpMIPS64MOVHUreg {
  5448  			break
  5449  		}
  5450  		if !(0xffff < uint64(c)) {
  5451  			break
  5452  		}
  5453  		v.reset(OpMIPS64MOVVconst)
  5454  		v.AuxInt = 1
  5455  		return true
  5456  	}
  5457  	// match: (SGTUconst [c] (ANDconst [m] _))
  5458  	// cond: uint64(m) < uint64(c)
  5459  	// result: (MOVVconst [1])
  5460  	for {
  5461  		c := v.AuxInt
  5462  		v_0 := v.Args[0]
  5463  		if v_0.Op != OpMIPS64ANDconst {
  5464  			break
  5465  		}
  5466  		m := v_0.AuxInt
  5467  		if !(uint64(m) < uint64(c)) {
  5468  			break
  5469  		}
  5470  		v.reset(OpMIPS64MOVVconst)
  5471  		v.AuxInt = 1
  5472  		return true
  5473  	}
  5474  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5475  	// cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)
  5476  	// result: (MOVVconst [1])
  5477  	for {
  5478  		c := v.AuxInt
  5479  		v_0 := v.Args[0]
  5480  		if v_0.Op != OpMIPS64SRLVconst {
  5481  			break
  5482  		}
  5483  		d := v_0.AuxInt
  5484  		if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) {
  5485  			break
  5486  		}
  5487  		v.reset(OpMIPS64MOVVconst)
  5488  		v.AuxInt = 1
  5489  		return true
  5490  	}
  5491  	return false
  5492  }
  5493  func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool {
  5494  	// match: (SGTconst [c] (MOVVconst [d]))
  5495  	// cond: int64(c)>int64(d)
  5496  	// result: (MOVVconst [1])
  5497  	for {
  5498  		c := v.AuxInt
  5499  		v_0 := v.Args[0]
  5500  		if v_0.Op != OpMIPS64MOVVconst {
  5501  			break
  5502  		}
  5503  		d := v_0.AuxInt
  5504  		if !(int64(c) > int64(d)) {
  5505  			break
  5506  		}
  5507  		v.reset(OpMIPS64MOVVconst)
  5508  		v.AuxInt = 1
  5509  		return true
  5510  	}
  5511  	// match: (SGTconst [c] (MOVVconst [d]))
  5512  	// cond: int64(c)<=int64(d)
  5513  	// result: (MOVVconst [0])
  5514  	for {
  5515  		c := v.AuxInt
  5516  		v_0 := v.Args[0]
  5517  		if v_0.Op != OpMIPS64MOVVconst {
  5518  			break
  5519  		}
  5520  		d := v_0.AuxInt
  5521  		if !(int64(c) <= int64(d)) {
  5522  			break
  5523  		}
  5524  		v.reset(OpMIPS64MOVVconst)
  5525  		v.AuxInt = 0
  5526  		return true
  5527  	}
  5528  	// match: (SGTconst [c] (MOVBreg _))
  5529  	// cond: 0x7f < int64(c)
  5530  	// result: (MOVVconst [1])
  5531  	for {
  5532  		c := v.AuxInt
  5533  		v_0 := v.Args[0]
  5534  		if v_0.Op != OpMIPS64MOVBreg {
  5535  			break
  5536  		}
  5537  		if !(0x7f < int64(c)) {
  5538  			break
  5539  		}
  5540  		v.reset(OpMIPS64MOVVconst)
  5541  		v.AuxInt = 1
  5542  		return true
  5543  	}
  5544  	// match: (SGTconst [c] (MOVBreg _))
  5545  	// cond: int64(c) <= -0x80
  5546  	// result: (MOVVconst [0])
  5547  	for {
  5548  		c := v.AuxInt
  5549  		v_0 := v.Args[0]
  5550  		if v_0.Op != OpMIPS64MOVBreg {
  5551  			break
  5552  		}
  5553  		if !(int64(c) <= -0x80) {
  5554  			break
  5555  		}
  5556  		v.reset(OpMIPS64MOVVconst)
  5557  		v.AuxInt = 0
  5558  		return true
  5559  	}
  5560  	// match: (SGTconst [c] (MOVBUreg _))
  5561  	// cond: 0xff < int64(c)
  5562  	// result: (MOVVconst [1])
  5563  	for {
  5564  		c := v.AuxInt
  5565  		v_0 := v.Args[0]
  5566  		if v_0.Op != OpMIPS64MOVBUreg {
  5567  			break
  5568  		}
  5569  		if !(0xff < int64(c)) {
  5570  			break
  5571  		}
  5572  		v.reset(OpMIPS64MOVVconst)
  5573  		v.AuxInt = 1
  5574  		return true
  5575  	}
  5576  	// match: (SGTconst [c] (MOVBUreg _))
  5577  	// cond: int64(c) < 0
  5578  	// result: (MOVVconst [0])
  5579  	for {
  5580  		c := v.AuxInt
  5581  		v_0 := v.Args[0]
  5582  		if v_0.Op != OpMIPS64MOVBUreg {
  5583  			break
  5584  		}
  5585  		if !(int64(c) < 0) {
  5586  			break
  5587  		}
  5588  		v.reset(OpMIPS64MOVVconst)
  5589  		v.AuxInt = 0
  5590  		return true
  5591  	}
  5592  	// match: (SGTconst [c] (MOVHreg _))
  5593  	// cond: 0x7fff < int64(c)
  5594  	// result: (MOVVconst [1])
  5595  	for {
  5596  		c := v.AuxInt
  5597  		v_0 := v.Args[0]
  5598  		if v_0.Op != OpMIPS64MOVHreg {
  5599  			break
  5600  		}
  5601  		if !(0x7fff < int64(c)) {
  5602  			break
  5603  		}
  5604  		v.reset(OpMIPS64MOVVconst)
  5605  		v.AuxInt = 1
  5606  		return true
  5607  	}
  5608  	// match: (SGTconst [c] (MOVHreg _))
  5609  	// cond: int64(c) <= -0x8000
  5610  	// result: (MOVVconst [0])
  5611  	for {
  5612  		c := v.AuxInt
  5613  		v_0 := v.Args[0]
  5614  		if v_0.Op != OpMIPS64MOVHreg {
  5615  			break
  5616  		}
  5617  		if !(int64(c) <= -0x8000) {
  5618  			break
  5619  		}
  5620  		v.reset(OpMIPS64MOVVconst)
  5621  		v.AuxInt = 0
  5622  		return true
  5623  	}
  5624  	// match: (SGTconst [c] (MOVHUreg _))
  5625  	// cond: 0xffff < int64(c)
  5626  	// result: (MOVVconst [1])
  5627  	for {
  5628  		c := v.AuxInt
  5629  		v_0 := v.Args[0]
  5630  		if v_0.Op != OpMIPS64MOVHUreg {
  5631  			break
  5632  		}
  5633  		if !(0xffff < int64(c)) {
  5634  			break
  5635  		}
  5636  		v.reset(OpMIPS64MOVVconst)
  5637  		v.AuxInt = 1
  5638  		return true
  5639  	}
  5640  	// match: (SGTconst [c] (MOVHUreg _))
  5641  	// cond: int64(c) < 0
  5642  	// result: (MOVVconst [0])
  5643  	for {
  5644  		c := v.AuxInt
  5645  		v_0 := v.Args[0]
  5646  		if v_0.Op != OpMIPS64MOVHUreg {
  5647  			break
  5648  		}
  5649  		if !(int64(c) < 0) {
  5650  			break
  5651  		}
  5652  		v.reset(OpMIPS64MOVVconst)
  5653  		v.AuxInt = 0
  5654  		return true
  5655  	}
  5656  	return false
  5657  }
  5658  func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool {
  5659  	// match: (SGTconst [c] (MOVWUreg _))
  5660  	// cond: int64(c) < 0
  5661  	// result: (MOVVconst [0])
  5662  	for {
  5663  		c := v.AuxInt
  5664  		v_0 := v.Args[0]
  5665  		if v_0.Op != OpMIPS64MOVWUreg {
  5666  			break
  5667  		}
  5668  		if !(int64(c) < 0) {
  5669  			break
  5670  		}
  5671  		v.reset(OpMIPS64MOVVconst)
  5672  		v.AuxInt = 0
  5673  		return true
  5674  	}
  5675  	// match: (SGTconst [c] (ANDconst [m] _))
  5676  	// cond: 0 <= m && m < c
  5677  	// result: (MOVVconst [1])
  5678  	for {
  5679  		c := v.AuxInt
  5680  		v_0 := v.Args[0]
  5681  		if v_0.Op != OpMIPS64ANDconst {
  5682  			break
  5683  		}
  5684  		m := v_0.AuxInt
  5685  		if !(0 <= m && m < c) {
  5686  			break
  5687  		}
  5688  		v.reset(OpMIPS64MOVVconst)
  5689  		v.AuxInt = 1
  5690  		return true
  5691  	}
  5692  	// match: (SGTconst [c] (SRLVconst _ [d]))
  5693  	// cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c
  5694  	// result: (MOVVconst [1])
  5695  	for {
  5696  		c := v.AuxInt
  5697  		v_0 := v.Args[0]
  5698  		if v_0.Op != OpMIPS64SRLVconst {
  5699  			break
  5700  		}
  5701  		d := v_0.AuxInt
  5702  		if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) {
  5703  			break
  5704  		}
  5705  		v.reset(OpMIPS64MOVVconst)
  5706  		v.AuxInt = 1
  5707  		return true
  5708  	}
  5709  	return false
  5710  }
  5711  func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool {
  5712  	// match: (SLLV _ (MOVVconst [c]))
  5713  	// cond: uint64(c)>=64
  5714  	// result: (MOVVconst [0])
  5715  	for {
  5716  		v_1 := v.Args[1]
  5717  		if v_1.Op != OpMIPS64MOVVconst {
  5718  			break
  5719  		}
  5720  		c := v_1.AuxInt
  5721  		if !(uint64(c) >= 64) {
  5722  			break
  5723  		}
  5724  		v.reset(OpMIPS64MOVVconst)
  5725  		v.AuxInt = 0
  5726  		return true
  5727  	}
  5728  	// match: (SLLV x (MOVVconst [c]))
  5729  	// cond:
  5730  	// result: (SLLVconst x [c])
  5731  	for {
  5732  		x := v.Args[0]
  5733  		v_1 := v.Args[1]
  5734  		if v_1.Op != OpMIPS64MOVVconst {
  5735  			break
  5736  		}
  5737  		c := v_1.AuxInt
  5738  		v.reset(OpMIPS64SLLVconst)
  5739  		v.AuxInt = c
  5740  		v.AddArg(x)
  5741  		return true
  5742  	}
  5743  	return false
  5744  }
  5745  func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool {
  5746  	// match: (SLLVconst [c] (MOVVconst [d]))
  5747  	// cond:
  5748  	// result: (MOVVconst [int64(d)<<uint64(c)])
  5749  	for {
  5750  		c := v.AuxInt
  5751  		v_0 := v.Args[0]
  5752  		if v_0.Op != OpMIPS64MOVVconst {
  5753  			break
  5754  		}
  5755  		d := v_0.AuxInt
  5756  		v.reset(OpMIPS64MOVVconst)
  5757  		v.AuxInt = int64(d) << uint64(c)
  5758  		return true
  5759  	}
  5760  	return false
  5761  }
  5762  func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool {
  5763  	// match: (SRAV x (MOVVconst [c]))
  5764  	// cond: uint64(c)>=64
  5765  	// result: (SRAVconst x [63])
  5766  	for {
  5767  		x := v.Args[0]
  5768  		v_1 := v.Args[1]
  5769  		if v_1.Op != OpMIPS64MOVVconst {
  5770  			break
  5771  		}
  5772  		c := v_1.AuxInt
  5773  		if !(uint64(c) >= 64) {
  5774  			break
  5775  		}
  5776  		v.reset(OpMIPS64SRAVconst)
  5777  		v.AuxInt = 63
  5778  		v.AddArg(x)
  5779  		return true
  5780  	}
  5781  	// match: (SRAV x (MOVVconst [c]))
  5782  	// cond:
  5783  	// result: (SRAVconst x [c])
  5784  	for {
  5785  		x := v.Args[0]
  5786  		v_1 := v.Args[1]
  5787  		if v_1.Op != OpMIPS64MOVVconst {
  5788  			break
  5789  		}
  5790  		c := v_1.AuxInt
  5791  		v.reset(OpMIPS64SRAVconst)
  5792  		v.AuxInt = c
  5793  		v.AddArg(x)
  5794  		return true
  5795  	}
  5796  	return false
  5797  }
  5798  func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool {
  5799  	// match: (SRAVconst [c] (MOVVconst [d]))
  5800  	// cond:
  5801  	// result: (MOVVconst [int64(d)>>uint64(c)])
  5802  	for {
  5803  		c := v.AuxInt
  5804  		v_0 := v.Args[0]
  5805  		if v_0.Op != OpMIPS64MOVVconst {
  5806  			break
  5807  		}
  5808  		d := v_0.AuxInt
  5809  		v.reset(OpMIPS64MOVVconst)
  5810  		v.AuxInt = int64(d) >> uint64(c)
  5811  		return true
  5812  	}
  5813  	return false
  5814  }
  5815  func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool {
  5816  	// match: (SRLV _ (MOVVconst [c]))
  5817  	// cond: uint64(c)>=64
  5818  	// result: (MOVVconst [0])
  5819  	for {
  5820  		v_1 := v.Args[1]
  5821  		if v_1.Op != OpMIPS64MOVVconst {
  5822  			break
  5823  		}
  5824  		c := v_1.AuxInt
  5825  		if !(uint64(c) >= 64) {
  5826  			break
  5827  		}
  5828  		v.reset(OpMIPS64MOVVconst)
  5829  		v.AuxInt = 0
  5830  		return true
  5831  	}
  5832  	// match: (SRLV x (MOVVconst [c]))
  5833  	// cond:
  5834  	// result: (SRLVconst x [c])
  5835  	for {
  5836  		x := v.Args[0]
  5837  		v_1 := v.Args[1]
  5838  		if v_1.Op != OpMIPS64MOVVconst {
  5839  			break
  5840  		}
  5841  		c := v_1.AuxInt
  5842  		v.reset(OpMIPS64SRLVconst)
  5843  		v.AuxInt = c
  5844  		v.AddArg(x)
  5845  		return true
  5846  	}
  5847  	return false
  5848  }
  5849  func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool {
  5850  	// match: (SRLVconst [c] (MOVVconst [d]))
  5851  	// cond:
  5852  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  5853  	for {
  5854  		c := v.AuxInt
  5855  		v_0 := v.Args[0]
  5856  		if v_0.Op != OpMIPS64MOVVconst {
  5857  			break
  5858  		}
  5859  		d := v_0.AuxInt
  5860  		v.reset(OpMIPS64MOVVconst)
  5861  		v.AuxInt = int64(uint64(d) >> uint64(c))
  5862  		return true
  5863  	}
  5864  	return false
  5865  }
  5866  func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool {
  5867  	// match: (SUBV x (MOVVconst [c]))
  5868  	// cond: is32Bit(c)
  5869  	// result: (SUBVconst [c] x)
  5870  	for {
  5871  		x := v.Args[0]
  5872  		v_1 := v.Args[1]
  5873  		if v_1.Op != OpMIPS64MOVVconst {
  5874  			break
  5875  		}
  5876  		c := v_1.AuxInt
  5877  		if !(is32Bit(c)) {
  5878  			break
  5879  		}
  5880  		v.reset(OpMIPS64SUBVconst)
  5881  		v.AuxInt = c
  5882  		v.AddArg(x)
  5883  		return true
  5884  	}
  5885  	// match: (SUBV x x)
  5886  	// cond:
  5887  	// result: (MOVVconst [0])
  5888  	for {
  5889  		x := v.Args[0]
  5890  		if x != v.Args[1] {
  5891  			break
  5892  		}
  5893  		v.reset(OpMIPS64MOVVconst)
  5894  		v.AuxInt = 0
  5895  		return true
  5896  	}
  5897  	// match: (SUBV (MOVVconst [0]) x)
  5898  	// cond:
  5899  	// result: (NEGV x)
  5900  	for {
  5901  		v_0 := v.Args[0]
  5902  		if v_0.Op != OpMIPS64MOVVconst {
  5903  			break
  5904  		}
  5905  		if v_0.AuxInt != 0 {
  5906  			break
  5907  		}
  5908  		x := v.Args[1]
  5909  		v.reset(OpMIPS64NEGV)
  5910  		v.AddArg(x)
  5911  		return true
  5912  	}
  5913  	return false
  5914  }
  5915  func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool {
  5916  	// match: (SUBVconst [0] x)
  5917  	// cond:
  5918  	// result: x
  5919  	for {
  5920  		if v.AuxInt != 0 {
  5921  			break
  5922  		}
  5923  		x := v.Args[0]
  5924  		v.reset(OpCopy)
  5925  		v.Type = x.Type
  5926  		v.AddArg(x)
  5927  		return true
  5928  	}
  5929  	// match: (SUBVconst [c] (MOVVconst [d]))
  5930  	// cond:
  5931  	// result: (MOVVconst [d-c])
  5932  	for {
  5933  		c := v.AuxInt
  5934  		v_0 := v.Args[0]
  5935  		if v_0.Op != OpMIPS64MOVVconst {
  5936  			break
  5937  		}
  5938  		d := v_0.AuxInt
  5939  		v.reset(OpMIPS64MOVVconst)
  5940  		v.AuxInt = d - c
  5941  		return true
  5942  	}
  5943  	// match: (SUBVconst [c] (SUBVconst [d] x))
  5944  	// cond: is32Bit(-c-d)
  5945  	// result: (ADDVconst [-c-d] x)
  5946  	for {
  5947  		c := v.AuxInt
  5948  		v_0 := v.Args[0]
  5949  		if v_0.Op != OpMIPS64SUBVconst {
  5950  			break
  5951  		}
  5952  		d := v_0.AuxInt
  5953  		x := v_0.Args[0]
  5954  		if !(is32Bit(-c - d)) {
  5955  			break
  5956  		}
  5957  		v.reset(OpMIPS64ADDVconst)
  5958  		v.AuxInt = -c - d
  5959  		v.AddArg(x)
  5960  		return true
  5961  	}
  5962  	// match: (SUBVconst [c] (ADDVconst [d] x))
  5963  	// cond: is32Bit(-c+d)
  5964  	// result: (ADDVconst [-c+d] x)
  5965  	for {
  5966  		c := v.AuxInt
  5967  		v_0 := v.Args[0]
  5968  		if v_0.Op != OpMIPS64ADDVconst {
  5969  			break
  5970  		}
  5971  		d := v_0.AuxInt
  5972  		x := v_0.Args[0]
  5973  		if !(is32Bit(-c + d)) {
  5974  			break
  5975  		}
  5976  		v.reset(OpMIPS64ADDVconst)
  5977  		v.AuxInt = -c + d
  5978  		v.AddArg(x)
  5979  		return true
  5980  	}
  5981  	return false
  5982  }
  5983  func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool {
  5984  	// match: (XOR x (MOVVconst [c]))
  5985  	// cond: is32Bit(c)
  5986  	// result: (XORconst [c] x)
  5987  	for {
  5988  		x := v.Args[0]
  5989  		v_1 := v.Args[1]
  5990  		if v_1.Op != OpMIPS64MOVVconst {
  5991  			break
  5992  		}
  5993  		c := v_1.AuxInt
  5994  		if !(is32Bit(c)) {
  5995  			break
  5996  		}
  5997  		v.reset(OpMIPS64XORconst)
  5998  		v.AuxInt = c
  5999  		v.AddArg(x)
  6000  		return true
  6001  	}
  6002  	// match: (XOR (MOVVconst [c]) x)
  6003  	// cond: is32Bit(c)
  6004  	// result: (XORconst [c] x)
  6005  	for {
  6006  		v_0 := v.Args[0]
  6007  		if v_0.Op != OpMIPS64MOVVconst {
  6008  			break
  6009  		}
  6010  		c := v_0.AuxInt
  6011  		x := v.Args[1]
  6012  		if !(is32Bit(c)) {
  6013  			break
  6014  		}
  6015  		v.reset(OpMIPS64XORconst)
  6016  		v.AuxInt = c
  6017  		v.AddArg(x)
  6018  		return true
  6019  	}
  6020  	// match: (XOR x x)
  6021  	// cond:
  6022  	// result: (MOVVconst [0])
  6023  	for {
  6024  		x := v.Args[0]
  6025  		if x != v.Args[1] {
  6026  			break
  6027  		}
  6028  		v.reset(OpMIPS64MOVVconst)
  6029  		v.AuxInt = 0
  6030  		return true
  6031  	}
  6032  	return false
  6033  }
  6034  func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool {
  6035  	// match: (XORconst [0] x)
  6036  	// cond:
  6037  	// result: x
  6038  	for {
  6039  		if v.AuxInt != 0 {
  6040  			break
  6041  		}
  6042  		x := v.Args[0]
  6043  		v.reset(OpCopy)
  6044  		v.Type = x.Type
  6045  		v.AddArg(x)
  6046  		return true
  6047  	}
  6048  	// match: (XORconst [-1] x)
  6049  	// cond:
  6050  	// result: (NORconst [0] x)
  6051  	for {
  6052  		if v.AuxInt != -1 {
  6053  			break
  6054  		}
  6055  		x := v.Args[0]
  6056  		v.reset(OpMIPS64NORconst)
  6057  		v.AuxInt = 0
  6058  		v.AddArg(x)
  6059  		return true
  6060  	}
  6061  	// match: (XORconst [c] (MOVVconst [d]))
  6062  	// cond:
  6063  	// result: (MOVVconst [c^d])
  6064  	for {
  6065  		c := v.AuxInt
  6066  		v_0 := v.Args[0]
  6067  		if v_0.Op != OpMIPS64MOVVconst {
  6068  			break
  6069  		}
  6070  		d := v_0.AuxInt
  6071  		v.reset(OpMIPS64MOVVconst)
  6072  		v.AuxInt = c ^ d
  6073  		return true
  6074  	}
  6075  	// match: (XORconst [c] (XORconst [d] x))
  6076  	// cond: is32Bit(c^d)
  6077  	// result: (XORconst [c^d] x)
  6078  	for {
  6079  		c := v.AuxInt
  6080  		v_0 := v.Args[0]
  6081  		if v_0.Op != OpMIPS64XORconst {
  6082  			break
  6083  		}
  6084  		d := v_0.AuxInt
  6085  		x := v_0.Args[0]
  6086  		if !(is32Bit(c ^ d)) {
  6087  			break
  6088  		}
  6089  		v.reset(OpMIPS64XORconst)
  6090  		v.AuxInt = c ^ d
  6091  		v.AddArg(x)
  6092  		return true
  6093  	}
  6094  	return false
  6095  }
  6096  func rewriteValueMIPS64_OpMod16_0(v *Value) bool {
  6097  	b := v.Block
  6098  	_ = b
  6099  	types := &b.Func.Config.Types
  6100  	_ = types
  6101  	// match: (Mod16 x y)
  6102  	// cond:
  6103  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6104  	for {
  6105  		x := v.Args[0]
  6106  		y := v.Args[1]
  6107  		v.reset(OpSelect0)
  6108  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6109  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  6110  		v1.AddArg(x)
  6111  		v0.AddArg(v1)
  6112  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  6113  		v2.AddArg(y)
  6114  		v0.AddArg(v2)
  6115  		v.AddArg(v0)
  6116  		return true
  6117  	}
  6118  }
  6119  func rewriteValueMIPS64_OpMod16u_0(v *Value) bool {
  6120  	b := v.Block
  6121  	_ = b
  6122  	types := &b.Func.Config.Types
  6123  	_ = types
  6124  	// match: (Mod16u x y)
  6125  	// cond:
  6126  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6127  	for {
  6128  		x := v.Args[0]
  6129  		y := v.Args[1]
  6130  		v.reset(OpSelect0)
  6131  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6132  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6133  		v1.AddArg(x)
  6134  		v0.AddArg(v1)
  6135  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6136  		v2.AddArg(y)
  6137  		v0.AddArg(v2)
  6138  		v.AddArg(v0)
  6139  		return true
  6140  	}
  6141  }
  6142  func rewriteValueMIPS64_OpMod32_0(v *Value) bool {
  6143  	b := v.Block
  6144  	_ = b
  6145  	types := &b.Func.Config.Types
  6146  	_ = types
  6147  	// match: (Mod32 x y)
  6148  	// cond:
  6149  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6150  	for {
  6151  		x := v.Args[0]
  6152  		y := v.Args[1]
  6153  		v.reset(OpSelect0)
  6154  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6155  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  6156  		v1.AddArg(x)
  6157  		v0.AddArg(v1)
  6158  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  6159  		v2.AddArg(y)
  6160  		v0.AddArg(v2)
  6161  		v.AddArg(v0)
  6162  		return true
  6163  	}
  6164  }
  6165  func rewriteValueMIPS64_OpMod32u_0(v *Value) bool {
  6166  	b := v.Block
  6167  	_ = b
  6168  	types := &b.Func.Config.Types
  6169  	_ = types
  6170  	// match: (Mod32u x y)
  6171  	// cond:
  6172  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6173  	for {
  6174  		x := v.Args[0]
  6175  		y := v.Args[1]
  6176  		v.reset(OpSelect0)
  6177  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6178  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6179  		v1.AddArg(x)
  6180  		v0.AddArg(v1)
  6181  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  6182  		v2.AddArg(y)
  6183  		v0.AddArg(v2)
  6184  		v.AddArg(v0)
  6185  		return true
  6186  	}
  6187  }
  6188  func rewriteValueMIPS64_OpMod64_0(v *Value) bool {
  6189  	b := v.Block
  6190  	_ = b
  6191  	types := &b.Func.Config.Types
  6192  	_ = types
  6193  	// match: (Mod64 x y)
  6194  	// cond:
  6195  	// result: (Select0 (DIVV x y))
  6196  	for {
  6197  		x := v.Args[0]
  6198  		y := v.Args[1]
  6199  		v.reset(OpSelect0)
  6200  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6201  		v0.AddArg(x)
  6202  		v0.AddArg(y)
  6203  		v.AddArg(v0)
  6204  		return true
  6205  	}
  6206  }
  6207  func rewriteValueMIPS64_OpMod64u_0(v *Value) bool {
  6208  	b := v.Block
  6209  	_ = b
  6210  	types := &b.Func.Config.Types
  6211  	_ = types
  6212  	// match: (Mod64u x y)
  6213  	// cond:
  6214  	// result: (Select0 (DIVVU x y))
  6215  	for {
  6216  		x := v.Args[0]
  6217  		y := v.Args[1]
  6218  		v.reset(OpSelect0)
  6219  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6220  		v0.AddArg(x)
  6221  		v0.AddArg(y)
  6222  		v.AddArg(v0)
  6223  		return true
  6224  	}
  6225  }
  6226  func rewriteValueMIPS64_OpMod8_0(v *Value) bool {
  6227  	b := v.Block
  6228  	_ = b
  6229  	types := &b.Func.Config.Types
  6230  	_ = types
  6231  	// match: (Mod8 x y)
  6232  	// cond:
  6233  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6234  	for {
  6235  		x := v.Args[0]
  6236  		y := v.Args[1]
  6237  		v.reset(OpSelect0)
  6238  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, MakeTuple(types.Int64, types.Int64))
  6239  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  6240  		v1.AddArg(x)
  6241  		v0.AddArg(v1)
  6242  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  6243  		v2.AddArg(y)
  6244  		v0.AddArg(v2)
  6245  		v.AddArg(v0)
  6246  		return true
  6247  	}
  6248  }
  6249  func rewriteValueMIPS64_OpMod8u_0(v *Value) bool {
  6250  	b := v.Block
  6251  	_ = b
  6252  	types := &b.Func.Config.Types
  6253  	_ = types
  6254  	// match: (Mod8u x y)
  6255  	// cond:
  6256  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6257  	for {
  6258  		x := v.Args[0]
  6259  		y := v.Args[1]
  6260  		v.reset(OpSelect0)
  6261  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, MakeTuple(types.UInt64, types.UInt64))
  6262  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  6263  		v1.AddArg(x)
  6264  		v0.AddArg(v1)
  6265  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  6266  		v2.AddArg(y)
  6267  		v0.AddArg(v2)
  6268  		v.AddArg(v0)
  6269  		return true
  6270  	}
  6271  }
  6272  func rewriteValueMIPS64_OpMove_0(v *Value) bool {
  6273  	b := v.Block
  6274  	_ = b
  6275  	types := &b.Func.Config.Types
  6276  	_ = types
  6277  	// match: (Move [0] _ _ mem)
  6278  	// cond:
  6279  	// result: mem
  6280  	for {
  6281  		if v.AuxInt != 0 {
  6282  			break
  6283  		}
  6284  		mem := v.Args[2]
  6285  		v.reset(OpCopy)
  6286  		v.Type = mem.Type
  6287  		v.AddArg(mem)
  6288  		return true
  6289  	}
  6290  	// match: (Move [1] dst src mem)
  6291  	// cond:
  6292  	// result: (MOVBstore dst (MOVBload src mem) mem)
  6293  	for {
  6294  		if v.AuxInt != 1 {
  6295  			break
  6296  		}
  6297  		dst := v.Args[0]
  6298  		src := v.Args[1]
  6299  		mem := v.Args[2]
  6300  		v.reset(OpMIPS64MOVBstore)
  6301  		v.AddArg(dst)
  6302  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6303  		v0.AddArg(src)
  6304  		v0.AddArg(mem)
  6305  		v.AddArg(v0)
  6306  		v.AddArg(mem)
  6307  		return true
  6308  	}
  6309  	// match: (Move [2] {t} dst src mem)
  6310  	// cond: t.(Type).Alignment()%2 == 0
  6311  	// result: (MOVHstore dst (MOVHload src mem) mem)
  6312  	for {
  6313  		if v.AuxInt != 2 {
  6314  			break
  6315  		}
  6316  		t := v.Aux
  6317  		dst := v.Args[0]
  6318  		src := v.Args[1]
  6319  		mem := v.Args[2]
  6320  		if !(t.(Type).Alignment()%2 == 0) {
  6321  			break
  6322  		}
  6323  		v.reset(OpMIPS64MOVHstore)
  6324  		v.AddArg(dst)
  6325  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6326  		v0.AddArg(src)
  6327  		v0.AddArg(mem)
  6328  		v.AddArg(v0)
  6329  		v.AddArg(mem)
  6330  		return true
  6331  	}
  6332  	// match: (Move [2] dst src mem)
  6333  	// cond:
  6334  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) 		(MOVBstore dst (MOVBload src mem) mem))
  6335  	for {
  6336  		if v.AuxInt != 2 {
  6337  			break
  6338  		}
  6339  		dst := v.Args[0]
  6340  		src := v.Args[1]
  6341  		mem := v.Args[2]
  6342  		v.reset(OpMIPS64MOVBstore)
  6343  		v.AuxInt = 1
  6344  		v.AddArg(dst)
  6345  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6346  		v0.AuxInt = 1
  6347  		v0.AddArg(src)
  6348  		v0.AddArg(mem)
  6349  		v.AddArg(v0)
  6350  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6351  		v1.AddArg(dst)
  6352  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6353  		v2.AddArg(src)
  6354  		v2.AddArg(mem)
  6355  		v1.AddArg(v2)
  6356  		v1.AddArg(mem)
  6357  		v.AddArg(v1)
  6358  		return true
  6359  	}
  6360  	// match: (Move [4] {t} dst src mem)
  6361  	// cond: t.(Type).Alignment()%4 == 0
  6362  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6363  	for {
  6364  		if v.AuxInt != 4 {
  6365  			break
  6366  		}
  6367  		t := v.Aux
  6368  		dst := v.Args[0]
  6369  		src := v.Args[1]
  6370  		mem := v.Args[2]
  6371  		if !(t.(Type).Alignment()%4 == 0) {
  6372  			break
  6373  		}
  6374  		v.reset(OpMIPS64MOVWstore)
  6375  		v.AddArg(dst)
  6376  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6377  		v0.AddArg(src)
  6378  		v0.AddArg(mem)
  6379  		v.AddArg(v0)
  6380  		v.AddArg(mem)
  6381  		return true
  6382  	}
  6383  	// match: (Move [4] {t} dst src mem)
  6384  	// cond: t.(Type).Alignment()%2 == 0
  6385  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) 		(MOVHstore dst (MOVHload src mem) mem))
  6386  	for {
  6387  		if v.AuxInt != 4 {
  6388  			break
  6389  		}
  6390  		t := v.Aux
  6391  		dst := v.Args[0]
  6392  		src := v.Args[1]
  6393  		mem := v.Args[2]
  6394  		if !(t.(Type).Alignment()%2 == 0) {
  6395  			break
  6396  		}
  6397  		v.reset(OpMIPS64MOVHstore)
  6398  		v.AuxInt = 2
  6399  		v.AddArg(dst)
  6400  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6401  		v0.AuxInt = 2
  6402  		v0.AddArg(src)
  6403  		v0.AddArg(mem)
  6404  		v.AddArg(v0)
  6405  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6406  		v1.AddArg(dst)
  6407  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6408  		v2.AddArg(src)
  6409  		v2.AddArg(mem)
  6410  		v1.AddArg(v2)
  6411  		v1.AddArg(mem)
  6412  		v.AddArg(v1)
  6413  		return true
  6414  	}
  6415  	// match: (Move [4] dst src mem)
  6416  	// cond:
  6417  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) 		(MOVBstore [2] dst (MOVBload [2] src mem) 			(MOVBstore [1] dst (MOVBload [1] src mem) 				(MOVBstore dst (MOVBload src mem) mem))))
  6418  	for {
  6419  		if v.AuxInt != 4 {
  6420  			break
  6421  		}
  6422  		dst := v.Args[0]
  6423  		src := v.Args[1]
  6424  		mem := v.Args[2]
  6425  		v.reset(OpMIPS64MOVBstore)
  6426  		v.AuxInt = 3
  6427  		v.AddArg(dst)
  6428  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6429  		v0.AuxInt = 3
  6430  		v0.AddArg(src)
  6431  		v0.AddArg(mem)
  6432  		v.AddArg(v0)
  6433  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6434  		v1.AuxInt = 2
  6435  		v1.AddArg(dst)
  6436  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6437  		v2.AuxInt = 2
  6438  		v2.AddArg(src)
  6439  		v2.AddArg(mem)
  6440  		v1.AddArg(v2)
  6441  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6442  		v3.AuxInt = 1
  6443  		v3.AddArg(dst)
  6444  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6445  		v4.AuxInt = 1
  6446  		v4.AddArg(src)
  6447  		v4.AddArg(mem)
  6448  		v3.AddArg(v4)
  6449  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6450  		v5.AddArg(dst)
  6451  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6452  		v6.AddArg(src)
  6453  		v6.AddArg(mem)
  6454  		v5.AddArg(v6)
  6455  		v5.AddArg(mem)
  6456  		v3.AddArg(v5)
  6457  		v1.AddArg(v3)
  6458  		v.AddArg(v1)
  6459  		return true
  6460  	}
  6461  	// match: (Move [8] {t} dst src mem)
  6462  	// cond: t.(Type).Alignment()%8 == 0
  6463  	// result: (MOVVstore dst (MOVVload src mem) mem)
  6464  	for {
  6465  		if v.AuxInt != 8 {
  6466  			break
  6467  		}
  6468  		t := v.Aux
  6469  		dst := v.Args[0]
  6470  		src := v.Args[1]
  6471  		mem := v.Args[2]
  6472  		if !(t.(Type).Alignment()%8 == 0) {
  6473  			break
  6474  		}
  6475  		v.reset(OpMIPS64MOVVstore)
  6476  		v.AddArg(dst)
  6477  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6478  		v0.AddArg(src)
  6479  		v0.AddArg(mem)
  6480  		v.AddArg(v0)
  6481  		v.AddArg(mem)
  6482  		return true
  6483  	}
  6484  	// match: (Move [8] {t} dst src mem)
  6485  	// cond: t.(Type).Alignment()%4 == 0
  6486  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6487  	for {
  6488  		if v.AuxInt != 8 {
  6489  			break
  6490  		}
  6491  		t := v.Aux
  6492  		dst := v.Args[0]
  6493  		src := v.Args[1]
  6494  		mem := v.Args[2]
  6495  		if !(t.(Type).Alignment()%4 == 0) {
  6496  			break
  6497  		}
  6498  		v.reset(OpMIPS64MOVWstore)
  6499  		v.AuxInt = 4
  6500  		v.AddArg(dst)
  6501  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6502  		v0.AuxInt = 4
  6503  		v0.AddArg(src)
  6504  		v0.AddArg(mem)
  6505  		v.AddArg(v0)
  6506  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6507  		v1.AddArg(dst)
  6508  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6509  		v2.AddArg(src)
  6510  		v2.AddArg(mem)
  6511  		v1.AddArg(v2)
  6512  		v1.AddArg(mem)
  6513  		v.AddArg(v1)
  6514  		return true
  6515  	}
  6516  	// match: (Move [8] {t} dst src mem)
  6517  	// cond: t.(Type).Alignment()%2 == 0
  6518  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6519  	for {
  6520  		if v.AuxInt != 8 {
  6521  			break
  6522  		}
  6523  		t := v.Aux
  6524  		dst := v.Args[0]
  6525  		src := v.Args[1]
  6526  		mem := v.Args[2]
  6527  		if !(t.(Type).Alignment()%2 == 0) {
  6528  			break
  6529  		}
  6530  		v.reset(OpMIPS64MOVHstore)
  6531  		v.AuxInt = 6
  6532  		v.AddArg(dst)
  6533  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6534  		v0.AuxInt = 6
  6535  		v0.AddArg(src)
  6536  		v0.AddArg(mem)
  6537  		v.AddArg(v0)
  6538  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6539  		v1.AuxInt = 4
  6540  		v1.AddArg(dst)
  6541  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6542  		v2.AuxInt = 4
  6543  		v2.AddArg(src)
  6544  		v2.AddArg(mem)
  6545  		v1.AddArg(v2)
  6546  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6547  		v3.AuxInt = 2
  6548  		v3.AddArg(dst)
  6549  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6550  		v4.AuxInt = 2
  6551  		v4.AddArg(src)
  6552  		v4.AddArg(mem)
  6553  		v3.AddArg(v4)
  6554  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6555  		v5.AddArg(dst)
  6556  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6557  		v6.AddArg(src)
  6558  		v6.AddArg(mem)
  6559  		v5.AddArg(v6)
  6560  		v5.AddArg(mem)
  6561  		v3.AddArg(v5)
  6562  		v1.AddArg(v3)
  6563  		v.AddArg(v1)
  6564  		return true
  6565  	}
  6566  	return false
  6567  }
  6568  func rewriteValueMIPS64_OpMove_10(v *Value) bool {
  6569  	b := v.Block
  6570  	_ = b
  6571  	config := b.Func.Config
  6572  	_ = config
  6573  	types := &b.Func.Config.Types
  6574  	_ = types
  6575  	// match: (Move [3] dst src mem)
  6576  	// cond:
  6577  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) 		(MOVBstore [1] dst (MOVBload [1] src mem) 			(MOVBstore dst (MOVBload src mem) mem)))
  6578  	for {
  6579  		if v.AuxInt != 3 {
  6580  			break
  6581  		}
  6582  		dst := v.Args[0]
  6583  		src := v.Args[1]
  6584  		mem := v.Args[2]
  6585  		v.reset(OpMIPS64MOVBstore)
  6586  		v.AuxInt = 2
  6587  		v.AddArg(dst)
  6588  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6589  		v0.AuxInt = 2
  6590  		v0.AddArg(src)
  6591  		v0.AddArg(mem)
  6592  		v.AddArg(v0)
  6593  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6594  		v1.AuxInt = 1
  6595  		v1.AddArg(dst)
  6596  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6597  		v2.AuxInt = 1
  6598  		v2.AddArg(src)
  6599  		v2.AddArg(mem)
  6600  		v1.AddArg(v2)
  6601  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  6602  		v3.AddArg(dst)
  6603  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, types.Int8)
  6604  		v4.AddArg(src)
  6605  		v4.AddArg(mem)
  6606  		v3.AddArg(v4)
  6607  		v3.AddArg(mem)
  6608  		v1.AddArg(v3)
  6609  		v.AddArg(v1)
  6610  		return true
  6611  	}
  6612  	// match: (Move [6] {t} dst src mem)
  6613  	// cond: t.(Type).Alignment()%2 == 0
  6614  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6615  	for {
  6616  		if v.AuxInt != 6 {
  6617  			break
  6618  		}
  6619  		t := v.Aux
  6620  		dst := v.Args[0]
  6621  		src := v.Args[1]
  6622  		mem := v.Args[2]
  6623  		if !(t.(Type).Alignment()%2 == 0) {
  6624  			break
  6625  		}
  6626  		v.reset(OpMIPS64MOVHstore)
  6627  		v.AuxInt = 4
  6628  		v.AddArg(dst)
  6629  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6630  		v0.AuxInt = 4
  6631  		v0.AddArg(src)
  6632  		v0.AddArg(mem)
  6633  		v.AddArg(v0)
  6634  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6635  		v1.AuxInt = 2
  6636  		v1.AddArg(dst)
  6637  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6638  		v2.AuxInt = 2
  6639  		v2.AddArg(src)
  6640  		v2.AddArg(mem)
  6641  		v1.AddArg(v2)
  6642  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  6643  		v3.AddArg(dst)
  6644  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, types.Int16)
  6645  		v4.AddArg(src)
  6646  		v4.AddArg(mem)
  6647  		v3.AddArg(v4)
  6648  		v3.AddArg(mem)
  6649  		v1.AddArg(v3)
  6650  		v.AddArg(v1)
  6651  		return true
  6652  	}
  6653  	// match: (Move [12] {t} dst src mem)
  6654  	// cond: t.(Type).Alignment()%4 == 0
  6655  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6656  	for {
  6657  		if v.AuxInt != 12 {
  6658  			break
  6659  		}
  6660  		t := v.Aux
  6661  		dst := v.Args[0]
  6662  		src := v.Args[1]
  6663  		mem := v.Args[2]
  6664  		if !(t.(Type).Alignment()%4 == 0) {
  6665  			break
  6666  		}
  6667  		v.reset(OpMIPS64MOVWstore)
  6668  		v.AuxInt = 8
  6669  		v.AddArg(dst)
  6670  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6671  		v0.AuxInt = 8
  6672  		v0.AddArg(src)
  6673  		v0.AddArg(mem)
  6674  		v.AddArg(v0)
  6675  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6676  		v1.AuxInt = 4
  6677  		v1.AddArg(dst)
  6678  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6679  		v2.AuxInt = 4
  6680  		v2.AddArg(src)
  6681  		v2.AddArg(mem)
  6682  		v1.AddArg(v2)
  6683  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  6684  		v3.AddArg(dst)
  6685  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, types.Int32)
  6686  		v4.AddArg(src)
  6687  		v4.AddArg(mem)
  6688  		v3.AddArg(v4)
  6689  		v3.AddArg(mem)
  6690  		v1.AddArg(v3)
  6691  		v.AddArg(v1)
  6692  		return true
  6693  	}
  6694  	// match: (Move [16] {t} dst src mem)
  6695  	// cond: t.(Type).Alignment()%8 == 0
  6696  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) 		(MOVVstore dst (MOVVload src mem) mem))
  6697  	for {
  6698  		if v.AuxInt != 16 {
  6699  			break
  6700  		}
  6701  		t := v.Aux
  6702  		dst := v.Args[0]
  6703  		src := v.Args[1]
  6704  		mem := v.Args[2]
  6705  		if !(t.(Type).Alignment()%8 == 0) {
  6706  			break
  6707  		}
  6708  		v.reset(OpMIPS64MOVVstore)
  6709  		v.AuxInt = 8
  6710  		v.AddArg(dst)
  6711  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6712  		v0.AuxInt = 8
  6713  		v0.AddArg(src)
  6714  		v0.AddArg(mem)
  6715  		v.AddArg(v0)
  6716  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6717  		v1.AddArg(dst)
  6718  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6719  		v2.AddArg(src)
  6720  		v2.AddArg(mem)
  6721  		v1.AddArg(v2)
  6722  		v1.AddArg(mem)
  6723  		v.AddArg(v1)
  6724  		return true
  6725  	}
  6726  	// match: (Move [24] {t} dst src mem)
  6727  	// cond: t.(Type).Alignment()%8 == 0
  6728  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) 		(MOVVstore [8] dst (MOVVload [8] src mem) 			(MOVVstore dst (MOVVload src mem) mem)))
  6729  	for {
  6730  		if v.AuxInt != 24 {
  6731  			break
  6732  		}
  6733  		t := v.Aux
  6734  		dst := v.Args[0]
  6735  		src := v.Args[1]
  6736  		mem := v.Args[2]
  6737  		if !(t.(Type).Alignment()%8 == 0) {
  6738  			break
  6739  		}
  6740  		v.reset(OpMIPS64MOVVstore)
  6741  		v.AuxInt = 16
  6742  		v.AddArg(dst)
  6743  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6744  		v0.AuxInt = 16
  6745  		v0.AddArg(src)
  6746  		v0.AddArg(mem)
  6747  		v.AddArg(v0)
  6748  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6749  		v1.AuxInt = 8
  6750  		v1.AddArg(dst)
  6751  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6752  		v2.AuxInt = 8
  6753  		v2.AddArg(src)
  6754  		v2.AddArg(mem)
  6755  		v1.AddArg(v2)
  6756  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  6757  		v3.AddArg(dst)
  6758  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, types.UInt64)
  6759  		v4.AddArg(src)
  6760  		v4.AddArg(mem)
  6761  		v3.AddArg(v4)
  6762  		v3.AddArg(mem)
  6763  		v1.AddArg(v3)
  6764  		v.AddArg(v1)
  6765  		return true
  6766  	}
  6767  	// match: (Move [s] {t} dst src mem)
  6768  	// cond: s > 24 || t.(Type).Alignment()%8 != 0
  6769  	// result: (LoweredMove [t.(Type).Alignment()] 		dst 		src 		(ADDVconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  6770  	for {
  6771  		s := v.AuxInt
  6772  		t := v.Aux
  6773  		dst := v.Args[0]
  6774  		src := v.Args[1]
  6775  		mem := v.Args[2]
  6776  		if !(s > 24 || t.(Type).Alignment()%8 != 0) {
  6777  			break
  6778  		}
  6779  		v.reset(OpMIPS64LoweredMove)
  6780  		v.AuxInt = t.(Type).Alignment()
  6781  		v.AddArg(dst)
  6782  		v.AddArg(src)
  6783  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  6784  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  6785  		v0.AddArg(src)
  6786  		v.AddArg(v0)
  6787  		v.AddArg(mem)
  6788  		return true
  6789  	}
  6790  	return false
  6791  }
  6792  func rewriteValueMIPS64_OpMul16_0(v *Value) bool {
  6793  	b := v.Block
  6794  	_ = b
  6795  	types := &b.Func.Config.Types
  6796  	_ = types
  6797  	// match: (Mul16 x y)
  6798  	// cond:
  6799  	// result: (Select1 (MULVU x y))
  6800  	for {
  6801  		x := v.Args[0]
  6802  		y := v.Args[1]
  6803  		v.reset(OpSelect1)
  6804  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6805  		v0.AddArg(x)
  6806  		v0.AddArg(y)
  6807  		v.AddArg(v0)
  6808  		return true
  6809  	}
  6810  }
  6811  func rewriteValueMIPS64_OpMul32_0(v *Value) bool {
  6812  	b := v.Block
  6813  	_ = b
  6814  	types := &b.Func.Config.Types
  6815  	_ = types
  6816  	// match: (Mul32 x y)
  6817  	// cond:
  6818  	// result: (Select1 (MULVU x y))
  6819  	for {
  6820  		x := v.Args[0]
  6821  		y := v.Args[1]
  6822  		v.reset(OpSelect1)
  6823  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6824  		v0.AddArg(x)
  6825  		v0.AddArg(y)
  6826  		v.AddArg(v0)
  6827  		return true
  6828  	}
  6829  }
  6830  func rewriteValueMIPS64_OpMul32F_0(v *Value) bool {
  6831  	// match: (Mul32F x y)
  6832  	// cond:
  6833  	// result: (MULF x y)
  6834  	for {
  6835  		x := v.Args[0]
  6836  		y := v.Args[1]
  6837  		v.reset(OpMIPS64MULF)
  6838  		v.AddArg(x)
  6839  		v.AddArg(y)
  6840  		return true
  6841  	}
  6842  }
  6843  func rewriteValueMIPS64_OpMul64_0(v *Value) bool {
  6844  	b := v.Block
  6845  	_ = b
  6846  	types := &b.Func.Config.Types
  6847  	_ = types
  6848  	// match: (Mul64 x y)
  6849  	// cond:
  6850  	// result: (Select1 (MULVU x y))
  6851  	for {
  6852  		x := v.Args[0]
  6853  		y := v.Args[1]
  6854  		v.reset(OpSelect1)
  6855  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6856  		v0.AddArg(x)
  6857  		v0.AddArg(y)
  6858  		v.AddArg(v0)
  6859  		return true
  6860  	}
  6861  }
  6862  func rewriteValueMIPS64_OpMul64F_0(v *Value) bool {
  6863  	// match: (Mul64F x y)
  6864  	// cond:
  6865  	// result: (MULD x y)
  6866  	for {
  6867  		x := v.Args[0]
  6868  		y := v.Args[1]
  6869  		v.reset(OpMIPS64MULD)
  6870  		v.AddArg(x)
  6871  		v.AddArg(y)
  6872  		return true
  6873  	}
  6874  }
  6875  func rewriteValueMIPS64_OpMul8_0(v *Value) bool {
  6876  	b := v.Block
  6877  	_ = b
  6878  	types := &b.Func.Config.Types
  6879  	_ = types
  6880  	// match: (Mul8 x y)
  6881  	// cond:
  6882  	// result: (Select1 (MULVU x y))
  6883  	for {
  6884  		x := v.Args[0]
  6885  		y := v.Args[1]
  6886  		v.reset(OpSelect1)
  6887  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, MakeTuple(types.UInt64, types.UInt64))
  6888  		v0.AddArg(x)
  6889  		v0.AddArg(y)
  6890  		v.AddArg(v0)
  6891  		return true
  6892  	}
  6893  }
  6894  func rewriteValueMIPS64_OpNeg16_0(v *Value) bool {
  6895  	// match: (Neg16 x)
  6896  	// cond:
  6897  	// result: (NEGV x)
  6898  	for {
  6899  		x := v.Args[0]
  6900  		v.reset(OpMIPS64NEGV)
  6901  		v.AddArg(x)
  6902  		return true
  6903  	}
  6904  }
  6905  func rewriteValueMIPS64_OpNeg32_0(v *Value) bool {
  6906  	// match: (Neg32 x)
  6907  	// cond:
  6908  	// result: (NEGV x)
  6909  	for {
  6910  		x := v.Args[0]
  6911  		v.reset(OpMIPS64NEGV)
  6912  		v.AddArg(x)
  6913  		return true
  6914  	}
  6915  }
  6916  func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool {
  6917  	// match: (Neg32F x)
  6918  	// cond:
  6919  	// result: (NEGF x)
  6920  	for {
  6921  		x := v.Args[0]
  6922  		v.reset(OpMIPS64NEGF)
  6923  		v.AddArg(x)
  6924  		return true
  6925  	}
  6926  }
  6927  func rewriteValueMIPS64_OpNeg64_0(v *Value) bool {
  6928  	// match: (Neg64 x)
  6929  	// cond:
  6930  	// result: (NEGV x)
  6931  	for {
  6932  		x := v.Args[0]
  6933  		v.reset(OpMIPS64NEGV)
  6934  		v.AddArg(x)
  6935  		return true
  6936  	}
  6937  }
  6938  func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool {
  6939  	// match: (Neg64F x)
  6940  	// cond:
  6941  	// result: (NEGD x)
  6942  	for {
  6943  		x := v.Args[0]
  6944  		v.reset(OpMIPS64NEGD)
  6945  		v.AddArg(x)
  6946  		return true
  6947  	}
  6948  }
  6949  func rewriteValueMIPS64_OpNeg8_0(v *Value) bool {
  6950  	// match: (Neg8 x)
  6951  	// cond:
  6952  	// result: (NEGV x)
  6953  	for {
  6954  		x := v.Args[0]
  6955  		v.reset(OpMIPS64NEGV)
  6956  		v.AddArg(x)
  6957  		return true
  6958  	}
  6959  }
  6960  func rewriteValueMIPS64_OpNeq16_0(v *Value) bool {
  6961  	b := v.Block
  6962  	_ = b
  6963  	types := &b.Func.Config.Types
  6964  	_ = types
  6965  	// match: (Neq16 x y)
  6966  	// cond:
  6967  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  6968  	for {
  6969  		x := v.Args[0]
  6970  		y := v.Args[1]
  6971  		v.reset(OpMIPS64SGTU)
  6972  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  6973  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6974  		v1.AddArg(x)
  6975  		v0.AddArg(v1)
  6976  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  6977  		v2.AddArg(y)
  6978  		v0.AddArg(v2)
  6979  		v.AddArg(v0)
  6980  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  6981  		v3.AuxInt = 0
  6982  		v.AddArg(v3)
  6983  		return true
  6984  	}
  6985  }
  6986  func rewriteValueMIPS64_OpNeq32_0(v *Value) bool {
  6987  	b := v.Block
  6988  	_ = b
  6989  	types := &b.Func.Config.Types
  6990  	_ = types
  6991  	// match: (Neq32 x y)
  6992  	// cond:
  6993  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  6994  	for {
  6995  		x := v.Args[0]
  6996  		y := v.Args[1]
  6997  		v.reset(OpMIPS64SGTU)
  6998  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  6999  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7000  		v1.AddArg(x)
  7001  		v0.AddArg(v1)
  7002  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7003  		v2.AddArg(y)
  7004  		v0.AddArg(v2)
  7005  		v.AddArg(v0)
  7006  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7007  		v3.AuxInt = 0
  7008  		v.AddArg(v3)
  7009  		return true
  7010  	}
  7011  }
  7012  func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool {
  7013  	b := v.Block
  7014  	_ = b
  7015  	// match: (Neq32F x y)
  7016  	// cond:
  7017  	// result: (FPFlagFalse (CMPEQF x y))
  7018  	for {
  7019  		x := v.Args[0]
  7020  		y := v.Args[1]
  7021  		v.reset(OpMIPS64FPFlagFalse)
  7022  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, TypeFlags)
  7023  		v0.AddArg(x)
  7024  		v0.AddArg(y)
  7025  		v.AddArg(v0)
  7026  		return true
  7027  	}
  7028  }
  7029  func rewriteValueMIPS64_OpNeq64_0(v *Value) bool {
  7030  	b := v.Block
  7031  	_ = b
  7032  	types := &b.Func.Config.Types
  7033  	_ = types
  7034  	// match: (Neq64 x y)
  7035  	// cond:
  7036  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7037  	for {
  7038  		x := v.Args[0]
  7039  		y := v.Args[1]
  7040  		v.reset(OpMIPS64SGTU)
  7041  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7042  		v0.AddArg(x)
  7043  		v0.AddArg(y)
  7044  		v.AddArg(v0)
  7045  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7046  		v1.AuxInt = 0
  7047  		v.AddArg(v1)
  7048  		return true
  7049  	}
  7050  }
  7051  func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool {
  7052  	b := v.Block
  7053  	_ = b
  7054  	// match: (Neq64F x y)
  7055  	// cond:
  7056  	// result: (FPFlagFalse (CMPEQD x y))
  7057  	for {
  7058  		x := v.Args[0]
  7059  		y := v.Args[1]
  7060  		v.reset(OpMIPS64FPFlagFalse)
  7061  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, TypeFlags)
  7062  		v0.AddArg(x)
  7063  		v0.AddArg(y)
  7064  		v.AddArg(v0)
  7065  		return true
  7066  	}
  7067  }
  7068  func rewriteValueMIPS64_OpNeq8_0(v *Value) bool {
  7069  	b := v.Block
  7070  	_ = b
  7071  	types := &b.Func.Config.Types
  7072  	_ = types
  7073  	// match: (Neq8 x y)
  7074  	// cond:
  7075  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7076  	for {
  7077  		x := v.Args[0]
  7078  		y := v.Args[1]
  7079  		v.reset(OpMIPS64SGTU)
  7080  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7081  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7082  		v1.AddArg(x)
  7083  		v0.AddArg(v1)
  7084  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7085  		v2.AddArg(y)
  7086  		v0.AddArg(v2)
  7087  		v.AddArg(v0)
  7088  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7089  		v3.AuxInt = 0
  7090  		v.AddArg(v3)
  7091  		return true
  7092  	}
  7093  }
  7094  func rewriteValueMIPS64_OpNeqB_0(v *Value) bool {
  7095  	// match: (NeqB x y)
  7096  	// cond:
  7097  	// result: (XOR x y)
  7098  	for {
  7099  		x := v.Args[0]
  7100  		y := v.Args[1]
  7101  		v.reset(OpMIPS64XOR)
  7102  		v.AddArg(x)
  7103  		v.AddArg(y)
  7104  		return true
  7105  	}
  7106  }
  7107  func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool {
  7108  	b := v.Block
  7109  	_ = b
  7110  	types := &b.Func.Config.Types
  7111  	_ = types
  7112  	// match: (NeqPtr x y)
  7113  	// cond:
  7114  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  7115  	for {
  7116  		x := v.Args[0]
  7117  		y := v.Args[1]
  7118  		v.reset(OpMIPS64SGTU)
  7119  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, types.UInt64)
  7120  		v0.AddArg(x)
  7121  		v0.AddArg(y)
  7122  		v.AddArg(v0)
  7123  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  7124  		v1.AuxInt = 0
  7125  		v.AddArg(v1)
  7126  		return true
  7127  	}
  7128  }
  7129  func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool {
  7130  	// match: (NilCheck ptr mem)
  7131  	// cond:
  7132  	// result: (LoweredNilCheck ptr mem)
  7133  	for {
  7134  		ptr := v.Args[0]
  7135  		mem := v.Args[1]
  7136  		v.reset(OpMIPS64LoweredNilCheck)
  7137  		v.AddArg(ptr)
  7138  		v.AddArg(mem)
  7139  		return true
  7140  	}
  7141  }
  7142  func rewriteValueMIPS64_OpNot_0(v *Value) bool {
  7143  	// match: (Not x)
  7144  	// cond:
  7145  	// result: (XORconst [1] x)
  7146  	for {
  7147  		x := v.Args[0]
  7148  		v.reset(OpMIPS64XORconst)
  7149  		v.AuxInt = 1
  7150  		v.AddArg(x)
  7151  		return true
  7152  	}
  7153  }
  7154  func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool {
  7155  	// match: (OffPtr [off] ptr:(SP))
  7156  	// cond:
  7157  	// result: (MOVVaddr [off] ptr)
  7158  	for {
  7159  		off := v.AuxInt
  7160  		ptr := v.Args[0]
  7161  		if ptr.Op != OpSP {
  7162  			break
  7163  		}
  7164  		v.reset(OpMIPS64MOVVaddr)
  7165  		v.AuxInt = off
  7166  		v.AddArg(ptr)
  7167  		return true
  7168  	}
  7169  	// match: (OffPtr [off] ptr)
  7170  	// cond:
  7171  	// result: (ADDVconst [off] ptr)
  7172  	for {
  7173  		off := v.AuxInt
  7174  		ptr := v.Args[0]
  7175  		v.reset(OpMIPS64ADDVconst)
  7176  		v.AuxInt = off
  7177  		v.AddArg(ptr)
  7178  		return true
  7179  	}
  7180  }
  7181  func rewriteValueMIPS64_OpOr16_0(v *Value) bool {
  7182  	// match: (Or16 x y)
  7183  	// cond:
  7184  	// result: (OR x y)
  7185  	for {
  7186  		x := v.Args[0]
  7187  		y := v.Args[1]
  7188  		v.reset(OpMIPS64OR)
  7189  		v.AddArg(x)
  7190  		v.AddArg(y)
  7191  		return true
  7192  	}
  7193  }
  7194  func rewriteValueMIPS64_OpOr32_0(v *Value) bool {
  7195  	// match: (Or32 x y)
  7196  	// cond:
  7197  	// result: (OR x y)
  7198  	for {
  7199  		x := v.Args[0]
  7200  		y := v.Args[1]
  7201  		v.reset(OpMIPS64OR)
  7202  		v.AddArg(x)
  7203  		v.AddArg(y)
  7204  		return true
  7205  	}
  7206  }
  7207  func rewriteValueMIPS64_OpOr64_0(v *Value) bool {
  7208  	// match: (Or64 x y)
  7209  	// cond:
  7210  	// result: (OR x y)
  7211  	for {
  7212  		x := v.Args[0]
  7213  		y := v.Args[1]
  7214  		v.reset(OpMIPS64OR)
  7215  		v.AddArg(x)
  7216  		v.AddArg(y)
  7217  		return true
  7218  	}
  7219  }
  7220  func rewriteValueMIPS64_OpOr8_0(v *Value) bool {
  7221  	// match: (Or8 x y)
  7222  	// cond:
  7223  	// result: (OR x y)
  7224  	for {
  7225  		x := v.Args[0]
  7226  		y := v.Args[1]
  7227  		v.reset(OpMIPS64OR)
  7228  		v.AddArg(x)
  7229  		v.AddArg(y)
  7230  		return true
  7231  	}
  7232  }
  7233  func rewriteValueMIPS64_OpOrB_0(v *Value) bool {
  7234  	// match: (OrB x y)
  7235  	// cond:
  7236  	// result: (OR x y)
  7237  	for {
  7238  		x := v.Args[0]
  7239  		y := v.Args[1]
  7240  		v.reset(OpMIPS64OR)
  7241  		v.AddArg(x)
  7242  		v.AddArg(y)
  7243  		return true
  7244  	}
  7245  }
  7246  func rewriteValueMIPS64_OpRound32F_0(v *Value) bool {
  7247  	// match: (Round32F x)
  7248  	// cond:
  7249  	// result: x
  7250  	for {
  7251  		x := v.Args[0]
  7252  		v.reset(OpCopy)
  7253  		v.Type = x.Type
  7254  		v.AddArg(x)
  7255  		return true
  7256  	}
  7257  }
  7258  func rewriteValueMIPS64_OpRound64F_0(v *Value) bool {
  7259  	// match: (Round64F x)
  7260  	// cond:
  7261  	// result: x
  7262  	for {
  7263  		x := v.Args[0]
  7264  		v.reset(OpCopy)
  7265  		v.Type = x.Type
  7266  		v.AddArg(x)
  7267  		return true
  7268  	}
  7269  }
  7270  func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
  7271  	b := v.Block
  7272  	_ = b
  7273  	types := &b.Func.Config.Types
  7274  	_ = types
  7275  	// match: (Rsh16Ux16 <t> x y)
  7276  	// cond:
  7277  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7278  	for {
  7279  		t := v.Type
  7280  		x := v.Args[0]
  7281  		y := v.Args[1]
  7282  		v.reset(OpMIPS64AND)
  7283  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7284  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7285  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7286  		v2.AuxInt = 64
  7287  		v1.AddArg(v2)
  7288  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7289  		v3.AddArg(y)
  7290  		v1.AddArg(v3)
  7291  		v0.AddArg(v1)
  7292  		v.AddArg(v0)
  7293  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7294  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7295  		v5.AddArg(x)
  7296  		v4.AddArg(v5)
  7297  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7298  		v6.AddArg(y)
  7299  		v4.AddArg(v6)
  7300  		v.AddArg(v4)
  7301  		return true
  7302  	}
  7303  }
  7304  func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
  7305  	b := v.Block
  7306  	_ = b
  7307  	types := &b.Func.Config.Types
  7308  	_ = types
  7309  	// match: (Rsh16Ux32 <t> x y)
  7310  	// cond:
  7311  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7312  	for {
  7313  		t := v.Type
  7314  		x := v.Args[0]
  7315  		y := v.Args[1]
  7316  		v.reset(OpMIPS64AND)
  7317  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7318  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7319  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7320  		v2.AuxInt = 64
  7321  		v1.AddArg(v2)
  7322  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7323  		v3.AddArg(y)
  7324  		v1.AddArg(v3)
  7325  		v0.AddArg(v1)
  7326  		v.AddArg(v0)
  7327  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7328  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7329  		v5.AddArg(x)
  7330  		v4.AddArg(v5)
  7331  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7332  		v6.AddArg(y)
  7333  		v4.AddArg(v6)
  7334  		v.AddArg(v4)
  7335  		return true
  7336  	}
  7337  }
  7338  func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
  7339  	b := v.Block
  7340  	_ = b
  7341  	types := &b.Func.Config.Types
  7342  	_ = types
  7343  	// match: (Rsh16Ux64 <t> x y)
  7344  	// cond:
  7345  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7346  	for {
  7347  		t := v.Type
  7348  		x := v.Args[0]
  7349  		y := v.Args[1]
  7350  		v.reset(OpMIPS64AND)
  7351  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7352  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7353  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7354  		v2.AuxInt = 64
  7355  		v1.AddArg(v2)
  7356  		v1.AddArg(y)
  7357  		v0.AddArg(v1)
  7358  		v.AddArg(v0)
  7359  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7360  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7361  		v4.AddArg(x)
  7362  		v3.AddArg(v4)
  7363  		v3.AddArg(y)
  7364  		v.AddArg(v3)
  7365  		return true
  7366  	}
  7367  }
  7368  func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
  7369  	b := v.Block
  7370  	_ = b
  7371  	types := &b.Func.Config.Types
  7372  	_ = types
  7373  	// match: (Rsh16Ux8 <t> x y)
  7374  	// cond:
  7375  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64  y)))
  7376  	for {
  7377  		t := v.Type
  7378  		x := v.Args[0]
  7379  		y := v.Args[1]
  7380  		v.reset(OpMIPS64AND)
  7381  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7382  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7383  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7384  		v2.AuxInt = 64
  7385  		v1.AddArg(v2)
  7386  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7387  		v3.AddArg(y)
  7388  		v1.AddArg(v3)
  7389  		v0.AddArg(v1)
  7390  		v.AddArg(v0)
  7391  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7392  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7393  		v5.AddArg(x)
  7394  		v4.AddArg(v5)
  7395  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7396  		v6.AddArg(y)
  7397  		v4.AddArg(v6)
  7398  		v.AddArg(v4)
  7399  		return true
  7400  	}
  7401  }
  7402  func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
  7403  	b := v.Block
  7404  	_ = b
  7405  	types := &b.Func.Config.Types
  7406  	_ = types
  7407  	// match: (Rsh16x16 <t> x y)
  7408  	// cond:
  7409  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7410  	for {
  7411  		t := v.Type
  7412  		x := v.Args[0]
  7413  		y := v.Args[1]
  7414  		v.reset(OpMIPS64SRAV)
  7415  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7416  		v0.AddArg(x)
  7417  		v.AddArg(v0)
  7418  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7419  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7420  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7421  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7422  		v4.AddArg(y)
  7423  		v3.AddArg(v4)
  7424  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7425  		v5.AuxInt = 63
  7426  		v3.AddArg(v5)
  7427  		v2.AddArg(v3)
  7428  		v1.AddArg(v2)
  7429  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7430  		v6.AddArg(y)
  7431  		v1.AddArg(v6)
  7432  		v.AddArg(v1)
  7433  		return true
  7434  	}
  7435  }
  7436  func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
  7437  	b := v.Block
  7438  	_ = b
  7439  	types := &b.Func.Config.Types
  7440  	_ = types
  7441  	// match: (Rsh16x32 <t> x y)
  7442  	// cond:
  7443  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7444  	for {
  7445  		t := v.Type
  7446  		x := v.Args[0]
  7447  		y := v.Args[1]
  7448  		v.reset(OpMIPS64SRAV)
  7449  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7450  		v0.AddArg(x)
  7451  		v.AddArg(v0)
  7452  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7453  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7454  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7455  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7456  		v4.AddArg(y)
  7457  		v3.AddArg(v4)
  7458  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7459  		v5.AuxInt = 63
  7460  		v3.AddArg(v5)
  7461  		v2.AddArg(v3)
  7462  		v1.AddArg(v2)
  7463  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7464  		v6.AddArg(y)
  7465  		v1.AddArg(v6)
  7466  		v.AddArg(v1)
  7467  		return true
  7468  	}
  7469  }
  7470  func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
  7471  	b := v.Block
  7472  	_ = b
  7473  	types := &b.Func.Config.Types
  7474  	_ = types
  7475  	// match: (Rsh16x64 <t> x y)
  7476  	// cond:
  7477  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7478  	for {
  7479  		t := v.Type
  7480  		x := v.Args[0]
  7481  		y := v.Args[1]
  7482  		v.reset(OpMIPS64SRAV)
  7483  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7484  		v0.AddArg(x)
  7485  		v.AddArg(v0)
  7486  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7487  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7488  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7489  		v3.AddArg(y)
  7490  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7491  		v4.AuxInt = 63
  7492  		v3.AddArg(v4)
  7493  		v2.AddArg(v3)
  7494  		v1.AddArg(v2)
  7495  		v1.AddArg(y)
  7496  		v.AddArg(v1)
  7497  		return true
  7498  	}
  7499  }
  7500  func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
  7501  	b := v.Block
  7502  	_ = b
  7503  	types := &b.Func.Config.Types
  7504  	_ = types
  7505  	// match: (Rsh16x8 <t> x y)
  7506  	// cond:
  7507  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  7508  	for {
  7509  		t := v.Type
  7510  		x := v.Args[0]
  7511  		y := v.Args[1]
  7512  		v.reset(OpMIPS64SRAV)
  7513  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, types.Int64)
  7514  		v0.AddArg(x)
  7515  		v.AddArg(v0)
  7516  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7517  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7518  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7519  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7520  		v4.AddArg(y)
  7521  		v3.AddArg(v4)
  7522  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7523  		v5.AuxInt = 63
  7524  		v3.AddArg(v5)
  7525  		v2.AddArg(v3)
  7526  		v1.AddArg(v2)
  7527  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7528  		v6.AddArg(y)
  7529  		v1.AddArg(v6)
  7530  		v.AddArg(v1)
  7531  		return true
  7532  	}
  7533  }
  7534  func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
  7535  	b := v.Block
  7536  	_ = b
  7537  	types := &b.Func.Config.Types
  7538  	_ = types
  7539  	// match: (Rsh32Ux16 <t> x y)
  7540  	// cond:
  7541  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  7542  	for {
  7543  		t := v.Type
  7544  		x := v.Args[0]
  7545  		y := v.Args[1]
  7546  		v.reset(OpMIPS64AND)
  7547  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7548  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7549  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7550  		v2.AuxInt = 64
  7551  		v1.AddArg(v2)
  7552  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7553  		v3.AddArg(y)
  7554  		v1.AddArg(v3)
  7555  		v0.AddArg(v1)
  7556  		v.AddArg(v0)
  7557  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7558  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7559  		v5.AddArg(x)
  7560  		v4.AddArg(v5)
  7561  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7562  		v6.AddArg(y)
  7563  		v4.AddArg(v6)
  7564  		v.AddArg(v4)
  7565  		return true
  7566  	}
  7567  }
  7568  func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
  7569  	b := v.Block
  7570  	_ = b
  7571  	types := &b.Func.Config.Types
  7572  	_ = types
  7573  	// match: (Rsh32Ux32 <t> x y)
  7574  	// cond:
  7575  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7576  	for {
  7577  		t := v.Type
  7578  		x := v.Args[0]
  7579  		y := v.Args[1]
  7580  		v.reset(OpMIPS64AND)
  7581  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7582  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7583  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7584  		v2.AuxInt = 64
  7585  		v1.AddArg(v2)
  7586  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7587  		v3.AddArg(y)
  7588  		v1.AddArg(v3)
  7589  		v0.AddArg(v1)
  7590  		v.AddArg(v0)
  7591  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7592  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7593  		v5.AddArg(x)
  7594  		v4.AddArg(v5)
  7595  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7596  		v6.AddArg(y)
  7597  		v4.AddArg(v6)
  7598  		v.AddArg(v4)
  7599  		return true
  7600  	}
  7601  }
  7602  func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
  7603  	b := v.Block
  7604  	_ = b
  7605  	types := &b.Func.Config.Types
  7606  	_ = types
  7607  	// match: (Rsh32Ux64 <t> x y)
  7608  	// cond:
  7609  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  7610  	for {
  7611  		t := v.Type
  7612  		x := v.Args[0]
  7613  		y := v.Args[1]
  7614  		v.reset(OpMIPS64AND)
  7615  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7616  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7617  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7618  		v2.AuxInt = 64
  7619  		v1.AddArg(v2)
  7620  		v1.AddArg(y)
  7621  		v0.AddArg(v1)
  7622  		v.AddArg(v0)
  7623  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7624  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7625  		v4.AddArg(x)
  7626  		v3.AddArg(v4)
  7627  		v3.AddArg(y)
  7628  		v.AddArg(v3)
  7629  		return true
  7630  	}
  7631  }
  7632  func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
  7633  	b := v.Block
  7634  	_ = b
  7635  	types := &b.Func.Config.Types
  7636  	_ = types
  7637  	// match: (Rsh32Ux8 <t> x y)
  7638  	// cond:
  7639  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64  y)))
  7640  	for {
  7641  		t := v.Type
  7642  		x := v.Args[0]
  7643  		y := v.Args[1]
  7644  		v.reset(OpMIPS64AND)
  7645  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7646  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7647  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7648  		v2.AuxInt = 64
  7649  		v1.AddArg(v2)
  7650  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7651  		v3.AddArg(y)
  7652  		v1.AddArg(v3)
  7653  		v0.AddArg(v1)
  7654  		v.AddArg(v0)
  7655  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7656  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7657  		v5.AddArg(x)
  7658  		v4.AddArg(v5)
  7659  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7660  		v6.AddArg(y)
  7661  		v4.AddArg(v6)
  7662  		v.AddArg(v4)
  7663  		return true
  7664  	}
  7665  }
  7666  func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
  7667  	b := v.Block
  7668  	_ = b
  7669  	types := &b.Func.Config.Types
  7670  	_ = types
  7671  	// match: (Rsh32x16 <t> x y)
  7672  	// cond:
  7673  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7674  	for {
  7675  		t := v.Type
  7676  		x := v.Args[0]
  7677  		y := v.Args[1]
  7678  		v.reset(OpMIPS64SRAV)
  7679  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7680  		v0.AddArg(x)
  7681  		v.AddArg(v0)
  7682  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7683  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7684  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7685  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7686  		v4.AddArg(y)
  7687  		v3.AddArg(v4)
  7688  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7689  		v5.AuxInt = 63
  7690  		v3.AddArg(v5)
  7691  		v2.AddArg(v3)
  7692  		v1.AddArg(v2)
  7693  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7694  		v6.AddArg(y)
  7695  		v1.AddArg(v6)
  7696  		v.AddArg(v1)
  7697  		return true
  7698  	}
  7699  }
  7700  func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
  7701  	b := v.Block
  7702  	_ = b
  7703  	types := &b.Func.Config.Types
  7704  	_ = types
  7705  	// match: (Rsh32x32 <t> x y)
  7706  	// cond:
  7707  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7708  	for {
  7709  		t := v.Type
  7710  		x := v.Args[0]
  7711  		y := v.Args[1]
  7712  		v.reset(OpMIPS64SRAV)
  7713  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7714  		v0.AddArg(x)
  7715  		v.AddArg(v0)
  7716  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7717  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7718  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7719  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7720  		v4.AddArg(y)
  7721  		v3.AddArg(v4)
  7722  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7723  		v5.AuxInt = 63
  7724  		v3.AddArg(v5)
  7725  		v2.AddArg(v3)
  7726  		v1.AddArg(v2)
  7727  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7728  		v6.AddArg(y)
  7729  		v1.AddArg(v6)
  7730  		v.AddArg(v1)
  7731  		return true
  7732  	}
  7733  }
  7734  func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
  7735  	b := v.Block
  7736  	_ = b
  7737  	types := &b.Func.Config.Types
  7738  	_ = types
  7739  	// match: (Rsh32x64 <t> x y)
  7740  	// cond:
  7741  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7742  	for {
  7743  		t := v.Type
  7744  		x := v.Args[0]
  7745  		y := v.Args[1]
  7746  		v.reset(OpMIPS64SRAV)
  7747  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7748  		v0.AddArg(x)
  7749  		v.AddArg(v0)
  7750  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7751  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7752  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7753  		v3.AddArg(y)
  7754  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7755  		v4.AuxInt = 63
  7756  		v3.AddArg(v4)
  7757  		v2.AddArg(v3)
  7758  		v1.AddArg(v2)
  7759  		v1.AddArg(y)
  7760  		v.AddArg(v1)
  7761  		return true
  7762  	}
  7763  }
  7764  func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
  7765  	b := v.Block
  7766  	_ = b
  7767  	types := &b.Func.Config.Types
  7768  	_ = types
  7769  	// match: (Rsh32x8 <t> x y)
  7770  	// cond:
  7771  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  7772  	for {
  7773  		t := v.Type
  7774  		x := v.Args[0]
  7775  		y := v.Args[1]
  7776  		v.reset(OpMIPS64SRAV)
  7777  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, types.Int64)
  7778  		v0.AddArg(x)
  7779  		v.AddArg(v0)
  7780  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7781  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7782  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7783  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7784  		v4.AddArg(y)
  7785  		v3.AddArg(v4)
  7786  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7787  		v5.AuxInt = 63
  7788  		v3.AddArg(v5)
  7789  		v2.AddArg(v3)
  7790  		v1.AddArg(v2)
  7791  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7792  		v6.AddArg(y)
  7793  		v1.AddArg(v6)
  7794  		v.AddArg(v1)
  7795  		return true
  7796  	}
  7797  }
  7798  func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
  7799  	b := v.Block
  7800  	_ = b
  7801  	types := &b.Func.Config.Types
  7802  	_ = types
  7803  	// match: (Rsh64Ux16 <t> x y)
  7804  	// cond:
  7805  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  7806  	for {
  7807  		t := v.Type
  7808  		x := v.Args[0]
  7809  		y := v.Args[1]
  7810  		v.reset(OpMIPS64AND)
  7811  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7812  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7813  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7814  		v2.AuxInt = 64
  7815  		v1.AddArg(v2)
  7816  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7817  		v3.AddArg(y)
  7818  		v1.AddArg(v3)
  7819  		v0.AddArg(v1)
  7820  		v.AddArg(v0)
  7821  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7822  		v4.AddArg(x)
  7823  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7824  		v5.AddArg(y)
  7825  		v4.AddArg(v5)
  7826  		v.AddArg(v4)
  7827  		return true
  7828  	}
  7829  }
  7830  func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
  7831  	b := v.Block
  7832  	_ = b
  7833  	types := &b.Func.Config.Types
  7834  	_ = types
  7835  	// match: (Rsh64Ux32 <t> x y)
  7836  	// cond:
  7837  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  7838  	for {
  7839  		t := v.Type
  7840  		x := v.Args[0]
  7841  		y := v.Args[1]
  7842  		v.reset(OpMIPS64AND)
  7843  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7844  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7845  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7846  		v2.AuxInt = 64
  7847  		v1.AddArg(v2)
  7848  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7849  		v3.AddArg(y)
  7850  		v1.AddArg(v3)
  7851  		v0.AddArg(v1)
  7852  		v.AddArg(v0)
  7853  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7854  		v4.AddArg(x)
  7855  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7856  		v5.AddArg(y)
  7857  		v4.AddArg(v5)
  7858  		v.AddArg(v4)
  7859  		return true
  7860  	}
  7861  }
  7862  func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
  7863  	b := v.Block
  7864  	_ = b
  7865  	types := &b.Func.Config.Types
  7866  	_ = types
  7867  	// match: (Rsh64Ux64 <t> x y)
  7868  	// cond:
  7869  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> x y))
  7870  	for {
  7871  		t := v.Type
  7872  		x := v.Args[0]
  7873  		y := v.Args[1]
  7874  		v.reset(OpMIPS64AND)
  7875  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7876  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7877  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7878  		v2.AuxInt = 64
  7879  		v1.AddArg(v2)
  7880  		v1.AddArg(y)
  7881  		v0.AddArg(v1)
  7882  		v.AddArg(v0)
  7883  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7884  		v3.AddArg(x)
  7885  		v3.AddArg(y)
  7886  		v.AddArg(v3)
  7887  		return true
  7888  	}
  7889  }
  7890  func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
  7891  	b := v.Block
  7892  	_ = b
  7893  	types := &b.Func.Config.Types
  7894  	_ = types
  7895  	// match: (Rsh64Ux8 <t> x y)
  7896  	// cond:
  7897  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> x (ZeroExt8to64  y)))
  7898  	for {
  7899  		t := v.Type
  7900  		x := v.Args[0]
  7901  		y := v.Args[1]
  7902  		v.reset(OpMIPS64AND)
  7903  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7904  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7905  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7906  		v2.AuxInt = 64
  7907  		v1.AddArg(v2)
  7908  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7909  		v3.AddArg(y)
  7910  		v1.AddArg(v3)
  7911  		v0.AddArg(v1)
  7912  		v.AddArg(v0)
  7913  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7914  		v4.AddArg(x)
  7915  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  7916  		v5.AddArg(y)
  7917  		v4.AddArg(v5)
  7918  		v.AddArg(v4)
  7919  		return true
  7920  	}
  7921  }
  7922  func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
  7923  	b := v.Block
  7924  	_ = b
  7925  	types := &b.Func.Config.Types
  7926  	_ = types
  7927  	// match: (Rsh64x16 <t> x y)
  7928  	// cond:
  7929  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  7930  	for {
  7931  		t := v.Type
  7932  		x := v.Args[0]
  7933  		y := v.Args[1]
  7934  		v.reset(OpMIPS64SRAV)
  7935  		v.AddArg(x)
  7936  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7937  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7938  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7939  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7940  		v3.AddArg(y)
  7941  		v2.AddArg(v3)
  7942  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7943  		v4.AuxInt = 63
  7944  		v2.AddArg(v4)
  7945  		v1.AddArg(v2)
  7946  		v0.AddArg(v1)
  7947  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  7948  		v5.AddArg(y)
  7949  		v0.AddArg(v5)
  7950  		v.AddArg(v0)
  7951  		return true
  7952  	}
  7953  }
  7954  func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
  7955  	b := v.Block
  7956  	_ = b
  7957  	types := &b.Func.Config.Types
  7958  	_ = types
  7959  	// match: (Rsh64x32 <t> x y)
  7960  	// cond:
  7961  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  7962  	for {
  7963  		t := v.Type
  7964  		x := v.Args[0]
  7965  		y := v.Args[1]
  7966  		v.reset(OpMIPS64SRAV)
  7967  		v.AddArg(x)
  7968  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7969  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7970  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  7971  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7972  		v3.AddArg(y)
  7973  		v2.AddArg(v3)
  7974  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  7975  		v4.AuxInt = 63
  7976  		v2.AddArg(v4)
  7977  		v1.AddArg(v2)
  7978  		v0.AddArg(v1)
  7979  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  7980  		v5.AddArg(y)
  7981  		v0.AddArg(v5)
  7982  		v.AddArg(v0)
  7983  		return true
  7984  	}
  7985  }
  7986  func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
  7987  	b := v.Block
  7988  	_ = b
  7989  	types := &b.Func.Config.Types
  7990  	_ = types
  7991  	// match: (Rsh64x64 <t> x y)
  7992  	// cond:
  7993  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  7994  	for {
  7995  		t := v.Type
  7996  		x := v.Args[0]
  7997  		y := v.Args[1]
  7998  		v.reset(OpMIPS64SRAV)
  7999  		v.AddArg(x)
  8000  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8001  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8002  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8003  		v2.AddArg(y)
  8004  		v3 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8005  		v3.AuxInt = 63
  8006  		v2.AddArg(v3)
  8007  		v1.AddArg(v2)
  8008  		v0.AddArg(v1)
  8009  		v0.AddArg(y)
  8010  		v.AddArg(v0)
  8011  		return true
  8012  	}
  8013  }
  8014  func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
  8015  	b := v.Block
  8016  	_ = b
  8017  	types := &b.Func.Config.Types
  8018  	_ = types
  8019  	// match: (Rsh64x8 <t> x y)
  8020  	// cond:
  8021  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  8022  	for {
  8023  		t := v.Type
  8024  		x := v.Args[0]
  8025  		y := v.Args[1]
  8026  		v.reset(OpMIPS64SRAV)
  8027  		v.AddArg(x)
  8028  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8029  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8030  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8031  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8032  		v3.AddArg(y)
  8033  		v2.AddArg(v3)
  8034  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8035  		v4.AuxInt = 63
  8036  		v2.AddArg(v4)
  8037  		v1.AddArg(v2)
  8038  		v0.AddArg(v1)
  8039  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8040  		v5.AddArg(y)
  8041  		v0.AddArg(v5)
  8042  		v.AddArg(v0)
  8043  		return true
  8044  	}
  8045  }
  8046  func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
  8047  	b := v.Block
  8048  	_ = b
  8049  	types := &b.Func.Config.Types
  8050  	_ = types
  8051  	// match: (Rsh8Ux16 <t> x y)
  8052  	// cond:
  8053  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8054  	for {
  8055  		t := v.Type
  8056  		x := v.Args[0]
  8057  		y := v.Args[1]
  8058  		v.reset(OpMIPS64AND)
  8059  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8060  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8061  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8062  		v2.AuxInt = 64
  8063  		v1.AddArg(v2)
  8064  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8065  		v3.AddArg(y)
  8066  		v1.AddArg(v3)
  8067  		v0.AddArg(v1)
  8068  		v.AddArg(v0)
  8069  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8070  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8071  		v5.AddArg(x)
  8072  		v4.AddArg(v5)
  8073  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8074  		v6.AddArg(y)
  8075  		v4.AddArg(v6)
  8076  		v.AddArg(v4)
  8077  		return true
  8078  	}
  8079  }
  8080  func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
  8081  	b := v.Block
  8082  	_ = b
  8083  	types := &b.Func.Config.Types
  8084  	_ = types
  8085  	// match: (Rsh8Ux32 <t> x y)
  8086  	// cond:
  8087  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8088  	for {
  8089  		t := v.Type
  8090  		x := v.Args[0]
  8091  		y := v.Args[1]
  8092  		v.reset(OpMIPS64AND)
  8093  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8094  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8095  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8096  		v2.AuxInt = 64
  8097  		v1.AddArg(v2)
  8098  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8099  		v3.AddArg(y)
  8100  		v1.AddArg(v3)
  8101  		v0.AddArg(v1)
  8102  		v.AddArg(v0)
  8103  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8104  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8105  		v5.AddArg(x)
  8106  		v4.AddArg(v5)
  8107  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8108  		v6.AddArg(y)
  8109  		v4.AddArg(v6)
  8110  		v.AddArg(v4)
  8111  		return true
  8112  	}
  8113  }
  8114  func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
  8115  	b := v.Block
  8116  	_ = b
  8117  	types := &b.Func.Config.Types
  8118  	_ = types
  8119  	// match: (Rsh8Ux64 <t> x y)
  8120  	// cond:
  8121  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8122  	for {
  8123  		t := v.Type
  8124  		x := v.Args[0]
  8125  		y := v.Args[1]
  8126  		v.reset(OpMIPS64AND)
  8127  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8128  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8129  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8130  		v2.AuxInt = 64
  8131  		v1.AddArg(v2)
  8132  		v1.AddArg(y)
  8133  		v0.AddArg(v1)
  8134  		v.AddArg(v0)
  8135  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8136  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8137  		v4.AddArg(x)
  8138  		v3.AddArg(v4)
  8139  		v3.AddArg(y)
  8140  		v.AddArg(v3)
  8141  		return true
  8142  	}
  8143  }
  8144  func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
  8145  	b := v.Block
  8146  	_ = b
  8147  	types := &b.Func.Config.Types
  8148  	_ = types
  8149  	// match: (Rsh8Ux8 <t> x y)
  8150  	// cond:
  8151  	// result: (AND (NEGV <t> (SGTU (Const64 <types.UInt64> [64]) (ZeroExt8to64  y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64  y)))
  8152  	for {
  8153  		t := v.Type
  8154  		x := v.Args[0]
  8155  		y := v.Args[1]
  8156  		v.reset(OpMIPS64AND)
  8157  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8158  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8159  		v2 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8160  		v2.AuxInt = 64
  8161  		v1.AddArg(v2)
  8162  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8163  		v3.AddArg(y)
  8164  		v1.AddArg(v3)
  8165  		v0.AddArg(v1)
  8166  		v.AddArg(v0)
  8167  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8168  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8169  		v5.AddArg(x)
  8170  		v4.AddArg(v5)
  8171  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8172  		v6.AddArg(y)
  8173  		v4.AddArg(v6)
  8174  		v.AddArg(v4)
  8175  		return true
  8176  	}
  8177  }
  8178  func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
  8179  	b := v.Block
  8180  	_ = b
  8181  	types := &b.Func.Config.Types
  8182  	_ = types
  8183  	// match: (Rsh8x16 <t> x y)
  8184  	// cond:
  8185  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt16to64 y)))
  8186  	for {
  8187  		t := v.Type
  8188  		x := v.Args[0]
  8189  		y := v.Args[1]
  8190  		v.reset(OpMIPS64SRAV)
  8191  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8192  		v0.AddArg(x)
  8193  		v.AddArg(v0)
  8194  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8195  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8196  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8197  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8198  		v4.AddArg(y)
  8199  		v3.AddArg(v4)
  8200  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8201  		v5.AuxInt = 63
  8202  		v3.AddArg(v5)
  8203  		v2.AddArg(v3)
  8204  		v1.AddArg(v2)
  8205  		v6 := b.NewValue0(v.Pos, OpZeroExt16to64, types.UInt64)
  8206  		v6.AddArg(y)
  8207  		v1.AddArg(v6)
  8208  		v.AddArg(v1)
  8209  		return true
  8210  	}
  8211  }
  8212  func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
  8213  	b := v.Block
  8214  	_ = b
  8215  	types := &b.Func.Config.Types
  8216  	_ = types
  8217  	// match: (Rsh8x32 <t> x y)
  8218  	// cond:
  8219  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <types.UInt64> [63]))) (ZeroExt32to64 y)))
  8220  	for {
  8221  		t := v.Type
  8222  		x := v.Args[0]
  8223  		y := v.Args[1]
  8224  		v.reset(OpMIPS64SRAV)
  8225  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8226  		v0.AddArg(x)
  8227  		v.AddArg(v0)
  8228  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8229  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8230  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8231  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8232  		v4.AddArg(y)
  8233  		v3.AddArg(v4)
  8234  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8235  		v5.AuxInt = 63
  8236  		v3.AddArg(v5)
  8237  		v2.AddArg(v3)
  8238  		v1.AddArg(v2)
  8239  		v6 := b.NewValue0(v.Pos, OpZeroExt32to64, types.UInt64)
  8240  		v6.AddArg(y)
  8241  		v1.AddArg(v6)
  8242  		v.AddArg(v1)
  8243  		return true
  8244  	}
  8245  }
  8246  func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
  8247  	b := v.Block
  8248  	_ = b
  8249  	types := &b.Func.Config.Types
  8250  	_ = types
  8251  	// match: (Rsh8x64 <t> x y)
  8252  	// cond:
  8253  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <types.UInt64> [63]))) y))
  8254  	for {
  8255  		t := v.Type
  8256  		x := v.Args[0]
  8257  		y := v.Args[1]
  8258  		v.reset(OpMIPS64SRAV)
  8259  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8260  		v0.AddArg(x)
  8261  		v.AddArg(v0)
  8262  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8263  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8264  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8265  		v3.AddArg(y)
  8266  		v4 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8267  		v4.AuxInt = 63
  8268  		v3.AddArg(v4)
  8269  		v2.AddArg(v3)
  8270  		v1.AddArg(v2)
  8271  		v1.AddArg(y)
  8272  		v.AddArg(v1)
  8273  		return true
  8274  	}
  8275  }
  8276  func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
  8277  	b := v.Block
  8278  	_ = b
  8279  	types := &b.Func.Config.Types
  8280  	_ = types
  8281  	// match: (Rsh8x8 <t> x y)
  8282  	// cond:
  8283  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64  y) (Const64 <types.UInt64> [63]))) (ZeroExt8to64  y)))
  8284  	for {
  8285  		t := v.Type
  8286  		x := v.Args[0]
  8287  		y := v.Args[1]
  8288  		v.reset(OpMIPS64SRAV)
  8289  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, types.Int64)
  8290  		v0.AddArg(x)
  8291  		v.AddArg(v0)
  8292  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8293  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8294  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, types.Bool)
  8295  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8296  		v4.AddArg(y)
  8297  		v3.AddArg(v4)
  8298  		v5 := b.NewValue0(v.Pos, OpConst64, types.UInt64)
  8299  		v5.AuxInt = 63
  8300  		v3.AddArg(v5)
  8301  		v2.AddArg(v3)
  8302  		v1.AddArg(v2)
  8303  		v6 := b.NewValue0(v.Pos, OpZeroExt8to64, types.UInt64)
  8304  		v6.AddArg(y)
  8305  		v1.AddArg(v6)
  8306  		v.AddArg(v1)
  8307  		return true
  8308  	}
  8309  }
  8310  func rewriteValueMIPS64_OpSelect0_0(v *Value) bool {
  8311  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8312  	// cond:
  8313  	// result: (MOVVconst [0])
  8314  	for {
  8315  		v_0 := v.Args[0]
  8316  		if v_0.Op != OpMIPS64DIVVU {
  8317  			break
  8318  		}
  8319  		v_0_1 := v_0.Args[1]
  8320  		if v_0_1.Op != OpMIPS64MOVVconst {
  8321  			break
  8322  		}
  8323  		if v_0_1.AuxInt != 1 {
  8324  			break
  8325  		}
  8326  		v.reset(OpMIPS64MOVVconst)
  8327  		v.AuxInt = 0
  8328  		return true
  8329  	}
  8330  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  8331  	// cond: isPowerOfTwo(c)
  8332  	// result: (ANDconst [c-1] x)
  8333  	for {
  8334  		v_0 := v.Args[0]
  8335  		if v_0.Op != OpMIPS64DIVVU {
  8336  			break
  8337  		}
  8338  		x := v_0.Args[0]
  8339  		v_0_1 := v_0.Args[1]
  8340  		if v_0_1.Op != OpMIPS64MOVVconst {
  8341  			break
  8342  		}
  8343  		c := v_0_1.AuxInt
  8344  		if !(isPowerOfTwo(c)) {
  8345  			break
  8346  		}
  8347  		v.reset(OpMIPS64ANDconst)
  8348  		v.AuxInt = c - 1
  8349  		v.AddArg(x)
  8350  		return true
  8351  	}
  8352  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8353  	// cond:
  8354  	// result: (MOVVconst [int64(c)%int64(d)])
  8355  	for {
  8356  		v_0 := v.Args[0]
  8357  		if v_0.Op != OpMIPS64DIVV {
  8358  			break
  8359  		}
  8360  		v_0_0 := v_0.Args[0]
  8361  		if v_0_0.Op != OpMIPS64MOVVconst {
  8362  			break
  8363  		}
  8364  		c := v_0_0.AuxInt
  8365  		v_0_1 := v_0.Args[1]
  8366  		if v_0_1.Op != OpMIPS64MOVVconst {
  8367  			break
  8368  		}
  8369  		d := v_0_1.AuxInt
  8370  		v.reset(OpMIPS64MOVVconst)
  8371  		v.AuxInt = int64(c) % int64(d)
  8372  		return true
  8373  	}
  8374  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8375  	// cond:
  8376  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8377  	for {
  8378  		v_0 := v.Args[0]
  8379  		if v_0.Op != OpMIPS64DIVVU {
  8380  			break
  8381  		}
  8382  		v_0_0 := v_0.Args[0]
  8383  		if v_0_0.Op != OpMIPS64MOVVconst {
  8384  			break
  8385  		}
  8386  		c := v_0_0.AuxInt
  8387  		v_0_1 := v_0.Args[1]
  8388  		if v_0_1.Op != OpMIPS64MOVVconst {
  8389  			break
  8390  		}
  8391  		d := v_0_1.AuxInt
  8392  		v.reset(OpMIPS64MOVVconst)
  8393  		v.AuxInt = int64(uint64(c) % uint64(d))
  8394  		return true
  8395  	}
  8396  	return false
  8397  }
  8398  func rewriteValueMIPS64_OpSelect1_0(v *Value) bool {
  8399  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  8400  	// cond:
  8401  	// result: (NEGV x)
  8402  	for {
  8403  		v_0 := v.Args[0]
  8404  		if v_0.Op != OpMIPS64MULVU {
  8405  			break
  8406  		}
  8407  		x := v_0.Args[0]
  8408  		v_0_1 := v_0.Args[1]
  8409  		if v_0_1.Op != OpMIPS64MOVVconst {
  8410  			break
  8411  		}
  8412  		if v_0_1.AuxInt != -1 {
  8413  			break
  8414  		}
  8415  		v.reset(OpMIPS64NEGV)
  8416  		v.AddArg(x)
  8417  		return true
  8418  	}
  8419  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8420  	// cond:
  8421  	// result: (NEGV x)
  8422  	for {
  8423  		v_0 := v.Args[0]
  8424  		if v_0.Op != OpMIPS64MULVU {
  8425  			break
  8426  		}
  8427  		v_0_0 := v_0.Args[0]
  8428  		if v_0_0.Op != OpMIPS64MOVVconst {
  8429  			break
  8430  		}
  8431  		if v_0_0.AuxInt != -1 {
  8432  			break
  8433  		}
  8434  		x := v_0.Args[1]
  8435  		v.reset(OpMIPS64NEGV)
  8436  		v.AddArg(x)
  8437  		return true
  8438  	}
  8439  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  8440  	// cond:
  8441  	// result: (MOVVconst [0])
  8442  	for {
  8443  		v_0 := v.Args[0]
  8444  		if v_0.Op != OpMIPS64MULVU {
  8445  			break
  8446  		}
  8447  		v_0_1 := v_0.Args[1]
  8448  		if v_0_1.Op != OpMIPS64MOVVconst {
  8449  			break
  8450  		}
  8451  		if v_0_1.AuxInt != 0 {
  8452  			break
  8453  		}
  8454  		v.reset(OpMIPS64MOVVconst)
  8455  		v.AuxInt = 0
  8456  		return true
  8457  	}
  8458  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  8459  	// cond:
  8460  	// result: (MOVVconst [0])
  8461  	for {
  8462  		v_0 := v.Args[0]
  8463  		if v_0.Op != OpMIPS64MULVU {
  8464  			break
  8465  		}
  8466  		v_0_0 := v_0.Args[0]
  8467  		if v_0_0.Op != OpMIPS64MOVVconst {
  8468  			break
  8469  		}
  8470  		if v_0_0.AuxInt != 0 {
  8471  			break
  8472  		}
  8473  		v.reset(OpMIPS64MOVVconst)
  8474  		v.AuxInt = 0
  8475  		return true
  8476  	}
  8477  	// match: (Select1 (MULVU x (MOVVconst [1])))
  8478  	// cond:
  8479  	// result: x
  8480  	for {
  8481  		v_0 := v.Args[0]
  8482  		if v_0.Op != OpMIPS64MULVU {
  8483  			break
  8484  		}
  8485  		x := v_0.Args[0]
  8486  		v_0_1 := v_0.Args[1]
  8487  		if v_0_1.Op != OpMIPS64MOVVconst {
  8488  			break
  8489  		}
  8490  		if v_0_1.AuxInt != 1 {
  8491  			break
  8492  		}
  8493  		v.reset(OpCopy)
  8494  		v.Type = x.Type
  8495  		v.AddArg(x)
  8496  		return true
  8497  	}
  8498  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  8499  	// cond:
  8500  	// result: x
  8501  	for {
  8502  		v_0 := v.Args[0]
  8503  		if v_0.Op != OpMIPS64MULVU {
  8504  			break
  8505  		}
  8506  		v_0_0 := v_0.Args[0]
  8507  		if v_0_0.Op != OpMIPS64MOVVconst {
  8508  			break
  8509  		}
  8510  		if v_0_0.AuxInt != 1 {
  8511  			break
  8512  		}
  8513  		x := v_0.Args[1]
  8514  		v.reset(OpCopy)
  8515  		v.Type = x.Type
  8516  		v.AddArg(x)
  8517  		return true
  8518  	}
  8519  	// match: (Select1 (MULVU x (MOVVconst [c])))
  8520  	// cond: isPowerOfTwo(c)
  8521  	// result: (SLLVconst [log2(c)] x)
  8522  	for {
  8523  		v_0 := v.Args[0]
  8524  		if v_0.Op != OpMIPS64MULVU {
  8525  			break
  8526  		}
  8527  		x := v_0.Args[0]
  8528  		v_0_1 := v_0.Args[1]
  8529  		if v_0_1.Op != OpMIPS64MOVVconst {
  8530  			break
  8531  		}
  8532  		c := v_0_1.AuxInt
  8533  		if !(isPowerOfTwo(c)) {
  8534  			break
  8535  		}
  8536  		v.reset(OpMIPS64SLLVconst)
  8537  		v.AuxInt = log2(c)
  8538  		v.AddArg(x)
  8539  		return true
  8540  	}
  8541  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  8542  	// cond: isPowerOfTwo(c)
  8543  	// result: (SLLVconst [log2(c)] x)
  8544  	for {
  8545  		v_0 := v.Args[0]
  8546  		if v_0.Op != OpMIPS64MULVU {
  8547  			break
  8548  		}
  8549  		v_0_0 := v_0.Args[0]
  8550  		if v_0_0.Op != OpMIPS64MOVVconst {
  8551  			break
  8552  		}
  8553  		c := v_0_0.AuxInt
  8554  		x := v_0.Args[1]
  8555  		if !(isPowerOfTwo(c)) {
  8556  			break
  8557  		}
  8558  		v.reset(OpMIPS64SLLVconst)
  8559  		v.AuxInt = log2(c)
  8560  		v.AddArg(x)
  8561  		return true
  8562  	}
  8563  	// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8564  	// cond:
  8565  	// result: (NEGV x)
  8566  	for {
  8567  		v_0 := v.Args[0]
  8568  		if v_0.Op != OpMIPS64MULVU {
  8569  			break
  8570  		}
  8571  		v_0_0 := v_0.Args[0]
  8572  		if v_0_0.Op != OpMIPS64MOVVconst {
  8573  			break
  8574  		}
  8575  		if v_0_0.AuxInt != -1 {
  8576  			break
  8577  		}
  8578  		x := v_0.Args[1]
  8579  		v.reset(OpMIPS64NEGV)
  8580  		v.AddArg(x)
  8581  		return true
  8582  	}
  8583  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  8584  	// cond:
  8585  	// result: (NEGV x)
  8586  	for {
  8587  		v_0 := v.Args[0]
  8588  		if v_0.Op != OpMIPS64MULVU {
  8589  			break
  8590  		}
  8591  		x := v_0.Args[0]
  8592  		v_0_1 := v_0.Args[1]
  8593  		if v_0_1.Op != OpMIPS64MOVVconst {
  8594  			break
  8595  		}
  8596  		if v_0_1.AuxInt != -1 {
  8597  			break
  8598  		}
  8599  		v.reset(OpMIPS64NEGV)
  8600  		v.AddArg(x)
  8601  		return true
  8602  	}
  8603  	return false
  8604  }
  8605  func rewriteValueMIPS64_OpSelect1_10(v *Value) bool {
  8606  	// match: (Select1 (MULVU (MOVVconst [0]) _))
  8607  	// cond:
  8608  	// result: (MOVVconst [0])
  8609  	for {
  8610  		v_0 := v.Args[0]
  8611  		if v_0.Op != OpMIPS64MULVU {
  8612  			break
  8613  		}
  8614  		v_0_0 := v_0.Args[0]
  8615  		if v_0_0.Op != OpMIPS64MOVVconst {
  8616  			break
  8617  		}
  8618  		if v_0_0.AuxInt != 0 {
  8619  			break
  8620  		}
  8621  		v.reset(OpMIPS64MOVVconst)
  8622  		v.AuxInt = 0
  8623  		return true
  8624  	}
  8625  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  8626  	// cond:
  8627  	// result: (MOVVconst [0])
  8628  	for {
  8629  		v_0 := v.Args[0]
  8630  		if v_0.Op != OpMIPS64MULVU {
  8631  			break
  8632  		}
  8633  		v_0_1 := v_0.Args[1]
  8634  		if v_0_1.Op != OpMIPS64MOVVconst {
  8635  			break
  8636  		}
  8637  		if v_0_1.AuxInt != 0 {
  8638  			break
  8639  		}
  8640  		v.reset(OpMIPS64MOVVconst)
  8641  		v.AuxInt = 0
  8642  		return true
  8643  	}
  8644  	// match: (Select1 (MULVU (MOVVconst [1]) x))
  8645  	// cond:
  8646  	// result: x
  8647  	for {
  8648  		v_0 := v.Args[0]
  8649  		if v_0.Op != OpMIPS64MULVU {
  8650  			break
  8651  		}
  8652  		v_0_0 := v_0.Args[0]
  8653  		if v_0_0.Op != OpMIPS64MOVVconst {
  8654  			break
  8655  		}
  8656  		if v_0_0.AuxInt != 1 {
  8657  			break
  8658  		}
  8659  		x := v_0.Args[1]
  8660  		v.reset(OpCopy)
  8661  		v.Type = x.Type
  8662  		v.AddArg(x)
  8663  		return true
  8664  	}
  8665  	// match: (Select1 (MULVU x (MOVVconst [1])))
  8666  	// cond:
  8667  	// result: x
  8668  	for {
  8669  		v_0 := v.Args[0]
  8670  		if v_0.Op != OpMIPS64MULVU {
  8671  			break
  8672  		}
  8673  		x := v_0.Args[0]
  8674  		v_0_1 := v_0.Args[1]
  8675  		if v_0_1.Op != OpMIPS64MOVVconst {
  8676  			break
  8677  		}
  8678  		if v_0_1.AuxInt != 1 {
  8679  			break
  8680  		}
  8681  		v.reset(OpCopy)
  8682  		v.Type = x.Type
  8683  		v.AddArg(x)
  8684  		return true
  8685  	}
  8686  	// match: (Select1 (MULVU (MOVVconst [c]) x))
  8687  	// cond: isPowerOfTwo(c)
  8688  	// result: (SLLVconst [log2(c)] x)
  8689  	for {
  8690  		v_0 := v.Args[0]
  8691  		if v_0.Op != OpMIPS64MULVU {
  8692  			break
  8693  		}
  8694  		v_0_0 := v_0.Args[0]
  8695  		if v_0_0.Op != OpMIPS64MOVVconst {
  8696  			break
  8697  		}
  8698  		c := v_0_0.AuxInt
  8699  		x := v_0.Args[1]
  8700  		if !(isPowerOfTwo(c)) {
  8701  			break
  8702  		}
  8703  		v.reset(OpMIPS64SLLVconst)
  8704  		v.AuxInt = log2(c)
  8705  		v.AddArg(x)
  8706  		return true
  8707  	}
  8708  	// match: (Select1 (MULVU x (MOVVconst [c])))
  8709  	// cond: isPowerOfTwo(c)
  8710  	// result: (SLLVconst [log2(c)] x)
  8711  	for {
  8712  		v_0 := v.Args[0]
  8713  		if v_0.Op != OpMIPS64MULVU {
  8714  			break
  8715  		}
  8716  		x := v_0.Args[0]
  8717  		v_0_1 := v_0.Args[1]
  8718  		if v_0_1.Op != OpMIPS64MOVVconst {
  8719  			break
  8720  		}
  8721  		c := v_0_1.AuxInt
  8722  		if !(isPowerOfTwo(c)) {
  8723  			break
  8724  		}
  8725  		v.reset(OpMIPS64SLLVconst)
  8726  		v.AuxInt = log2(c)
  8727  		v.AddArg(x)
  8728  		return true
  8729  	}
  8730  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  8731  	// cond:
  8732  	// result: x
  8733  	for {
  8734  		v_0 := v.Args[0]
  8735  		if v_0.Op != OpMIPS64DIVVU {
  8736  			break
  8737  		}
  8738  		x := v_0.Args[0]
  8739  		v_0_1 := v_0.Args[1]
  8740  		if v_0_1.Op != OpMIPS64MOVVconst {
  8741  			break
  8742  		}
  8743  		if v_0_1.AuxInt != 1 {
  8744  			break
  8745  		}
  8746  		v.reset(OpCopy)
  8747  		v.Type = x.Type
  8748  		v.AddArg(x)
  8749  		return true
  8750  	}
  8751  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  8752  	// cond: isPowerOfTwo(c)
  8753  	// result: (SRLVconst [log2(c)] x)
  8754  	for {
  8755  		v_0 := v.Args[0]
  8756  		if v_0.Op != OpMIPS64DIVVU {
  8757  			break
  8758  		}
  8759  		x := v_0.Args[0]
  8760  		v_0_1 := v_0.Args[1]
  8761  		if v_0_1.Op != OpMIPS64MOVVconst {
  8762  			break
  8763  		}
  8764  		c := v_0_1.AuxInt
  8765  		if !(isPowerOfTwo(c)) {
  8766  			break
  8767  		}
  8768  		v.reset(OpMIPS64SRLVconst)
  8769  		v.AuxInt = log2(c)
  8770  		v.AddArg(x)
  8771  		return true
  8772  	}
  8773  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  8774  	// cond:
  8775  	// result: (MOVVconst [c*d])
  8776  	for {
  8777  		v_0 := v.Args[0]
  8778  		if v_0.Op != OpMIPS64MULVU {
  8779  			break
  8780  		}
  8781  		v_0_0 := v_0.Args[0]
  8782  		if v_0_0.Op != OpMIPS64MOVVconst {
  8783  			break
  8784  		}
  8785  		c := v_0_0.AuxInt
  8786  		v_0_1 := v_0.Args[1]
  8787  		if v_0_1.Op != OpMIPS64MOVVconst {
  8788  			break
  8789  		}
  8790  		d := v_0_1.AuxInt
  8791  		v.reset(OpMIPS64MOVVconst)
  8792  		v.AuxInt = c * d
  8793  		return true
  8794  	}
  8795  	// match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c])))
  8796  	// cond:
  8797  	// result: (MOVVconst [c*d])
  8798  	for {
  8799  		v_0 := v.Args[0]
  8800  		if v_0.Op != OpMIPS64MULVU {
  8801  			break
  8802  		}
  8803  		v_0_0 := v_0.Args[0]
  8804  		if v_0_0.Op != OpMIPS64MOVVconst {
  8805  			break
  8806  		}
  8807  		d := v_0_0.AuxInt
  8808  		v_0_1 := v_0.Args[1]
  8809  		if v_0_1.Op != OpMIPS64MOVVconst {
  8810  			break
  8811  		}
  8812  		c := v_0_1.AuxInt
  8813  		v.reset(OpMIPS64MOVVconst)
  8814  		v.AuxInt = c * d
  8815  		return true
  8816  	}
  8817  	return false
  8818  }
  8819  func rewriteValueMIPS64_OpSelect1_20(v *Value) bool {
  8820  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8821  	// cond:
  8822  	// result: (MOVVconst [int64(c)/int64(d)])
  8823  	for {
  8824  		v_0 := v.Args[0]
  8825  		if v_0.Op != OpMIPS64DIVV {
  8826  			break
  8827  		}
  8828  		v_0_0 := v_0.Args[0]
  8829  		if v_0_0.Op != OpMIPS64MOVVconst {
  8830  			break
  8831  		}
  8832  		c := v_0_0.AuxInt
  8833  		v_0_1 := v_0.Args[1]
  8834  		if v_0_1.Op != OpMIPS64MOVVconst {
  8835  			break
  8836  		}
  8837  		d := v_0_1.AuxInt
  8838  		v.reset(OpMIPS64MOVVconst)
  8839  		v.AuxInt = int64(c) / int64(d)
  8840  		return true
  8841  	}
  8842  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8843  	// cond:
  8844  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  8845  	for {
  8846  		v_0 := v.Args[0]
  8847  		if v_0.Op != OpMIPS64DIVVU {
  8848  			break
  8849  		}
  8850  		v_0_0 := v_0.Args[0]
  8851  		if v_0_0.Op != OpMIPS64MOVVconst {
  8852  			break
  8853  		}
  8854  		c := v_0_0.AuxInt
  8855  		v_0_1 := v_0.Args[1]
  8856  		if v_0_1.Op != OpMIPS64MOVVconst {
  8857  			break
  8858  		}
  8859  		d := v_0_1.AuxInt
  8860  		v.reset(OpMIPS64MOVVconst)
  8861  		v.AuxInt = int64(uint64(c) / uint64(d))
  8862  		return true
  8863  	}
  8864  	return false
  8865  }
  8866  func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool {
  8867  	// match: (SignExt16to32 x)
  8868  	// cond:
  8869  	// result: (MOVHreg x)
  8870  	for {
  8871  		x := v.Args[0]
  8872  		v.reset(OpMIPS64MOVHreg)
  8873  		v.AddArg(x)
  8874  		return true
  8875  	}
  8876  }
  8877  func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool {
  8878  	// match: (SignExt16to64 x)
  8879  	// cond:
  8880  	// result: (MOVHreg x)
  8881  	for {
  8882  		x := v.Args[0]
  8883  		v.reset(OpMIPS64MOVHreg)
  8884  		v.AddArg(x)
  8885  		return true
  8886  	}
  8887  }
  8888  func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool {
  8889  	// match: (SignExt32to64 x)
  8890  	// cond:
  8891  	// result: (MOVWreg x)
  8892  	for {
  8893  		x := v.Args[0]
  8894  		v.reset(OpMIPS64MOVWreg)
  8895  		v.AddArg(x)
  8896  		return true
  8897  	}
  8898  }
  8899  func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool {
  8900  	// match: (SignExt8to16 x)
  8901  	// cond:
  8902  	// result: (MOVBreg x)
  8903  	for {
  8904  		x := v.Args[0]
  8905  		v.reset(OpMIPS64MOVBreg)
  8906  		v.AddArg(x)
  8907  		return true
  8908  	}
  8909  }
  8910  func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool {
  8911  	// match: (SignExt8to32 x)
  8912  	// cond:
  8913  	// result: (MOVBreg x)
  8914  	for {
  8915  		x := v.Args[0]
  8916  		v.reset(OpMIPS64MOVBreg)
  8917  		v.AddArg(x)
  8918  		return true
  8919  	}
  8920  }
  8921  func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool {
  8922  	// match: (SignExt8to64 x)
  8923  	// cond:
  8924  	// result: (MOVBreg x)
  8925  	for {
  8926  		x := v.Args[0]
  8927  		v.reset(OpMIPS64MOVBreg)
  8928  		v.AddArg(x)
  8929  		return true
  8930  	}
  8931  }
  8932  func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool {
  8933  	b := v.Block
  8934  	_ = b
  8935  	// match: (Slicemask <t> x)
  8936  	// cond:
  8937  	// result: (SRAVconst (NEGV <t> x) [63])
  8938  	for {
  8939  		t := v.Type
  8940  		x := v.Args[0]
  8941  		v.reset(OpMIPS64SRAVconst)
  8942  		v.AuxInt = 63
  8943  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8944  		v0.AddArg(x)
  8945  		v.AddArg(v0)
  8946  		return true
  8947  	}
  8948  }
  8949  func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool {
  8950  	// match: (StaticCall [argwid] {target} mem)
  8951  	// cond:
  8952  	// result: (CALLstatic [argwid] {target} mem)
  8953  	for {
  8954  		argwid := v.AuxInt
  8955  		target := v.Aux
  8956  		mem := v.Args[0]
  8957  		v.reset(OpMIPS64CALLstatic)
  8958  		v.AuxInt = argwid
  8959  		v.Aux = target
  8960  		v.AddArg(mem)
  8961  		return true
  8962  	}
  8963  }
  8964  func rewriteValueMIPS64_OpStore_0(v *Value) bool {
  8965  	// match: (Store {t} ptr val mem)
  8966  	// cond: t.(Type).Size() == 1
  8967  	// result: (MOVBstore ptr val mem)
  8968  	for {
  8969  		t := v.Aux
  8970  		ptr := v.Args[0]
  8971  		val := v.Args[1]
  8972  		mem := v.Args[2]
  8973  		if !(t.(Type).Size() == 1) {
  8974  			break
  8975  		}
  8976  		v.reset(OpMIPS64MOVBstore)
  8977  		v.AddArg(ptr)
  8978  		v.AddArg(val)
  8979  		v.AddArg(mem)
  8980  		return true
  8981  	}
  8982  	// match: (Store {t} ptr val mem)
  8983  	// cond: t.(Type).Size() == 2
  8984  	// result: (MOVHstore ptr val mem)
  8985  	for {
  8986  		t := v.Aux
  8987  		ptr := v.Args[0]
  8988  		val := v.Args[1]
  8989  		mem := v.Args[2]
  8990  		if !(t.(Type).Size() == 2) {
  8991  			break
  8992  		}
  8993  		v.reset(OpMIPS64MOVHstore)
  8994  		v.AddArg(ptr)
  8995  		v.AddArg(val)
  8996  		v.AddArg(mem)
  8997  		return true
  8998  	}
  8999  	// match: (Store {t} ptr val mem)
  9000  	// cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
  9001  	// result: (MOVWstore ptr val mem)
  9002  	for {
  9003  		t := v.Aux
  9004  		ptr := v.Args[0]
  9005  		val := v.Args[1]
  9006  		mem := v.Args[2]
  9007  		if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
  9008  			break
  9009  		}
  9010  		v.reset(OpMIPS64MOVWstore)
  9011  		v.AddArg(ptr)
  9012  		v.AddArg(val)
  9013  		v.AddArg(mem)
  9014  		return true
  9015  	}
  9016  	// match: (Store {t} ptr val mem)
  9017  	// cond: t.(Type).Size() == 8 && !is64BitFloat(val.Type)
  9018  	// result: (MOVVstore ptr val mem)
  9019  	for {
  9020  		t := v.Aux
  9021  		ptr := v.Args[0]
  9022  		val := v.Args[1]
  9023  		mem := v.Args[2]
  9024  		if !(t.(Type).Size() == 8 && !is64BitFloat(val.Type)) {
  9025  			break
  9026  		}
  9027  		v.reset(OpMIPS64MOVVstore)
  9028  		v.AddArg(ptr)
  9029  		v.AddArg(val)
  9030  		v.AddArg(mem)
  9031  		return true
  9032  	}
  9033  	// match: (Store {t} ptr val mem)
  9034  	// cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
  9035  	// result: (MOVFstore ptr val mem)
  9036  	for {
  9037  		t := v.Aux
  9038  		ptr := v.Args[0]
  9039  		val := v.Args[1]
  9040  		mem := v.Args[2]
  9041  		if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
  9042  			break
  9043  		}
  9044  		v.reset(OpMIPS64MOVFstore)
  9045  		v.AddArg(ptr)
  9046  		v.AddArg(val)
  9047  		v.AddArg(mem)
  9048  		return true
  9049  	}
  9050  	// match: (Store {t} ptr val mem)
  9051  	// cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
  9052  	// result: (MOVDstore ptr val mem)
  9053  	for {
  9054  		t := v.Aux
  9055  		ptr := v.Args[0]
  9056  		val := v.Args[1]
  9057  		mem := v.Args[2]
  9058  		if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
  9059  			break
  9060  		}
  9061  		v.reset(OpMIPS64MOVDstore)
  9062  		v.AddArg(ptr)
  9063  		v.AddArg(val)
  9064  		v.AddArg(mem)
  9065  		return true
  9066  	}
  9067  	return false
  9068  }
  9069  func rewriteValueMIPS64_OpSub16_0(v *Value) bool {
  9070  	// match: (Sub16 x y)
  9071  	// cond:
  9072  	// result: (SUBV x y)
  9073  	for {
  9074  		x := v.Args[0]
  9075  		y := v.Args[1]
  9076  		v.reset(OpMIPS64SUBV)
  9077  		v.AddArg(x)
  9078  		v.AddArg(y)
  9079  		return true
  9080  	}
  9081  }
  9082  func rewriteValueMIPS64_OpSub32_0(v *Value) bool {
  9083  	// match: (Sub32 x y)
  9084  	// cond:
  9085  	// result: (SUBV x y)
  9086  	for {
  9087  		x := v.Args[0]
  9088  		y := v.Args[1]
  9089  		v.reset(OpMIPS64SUBV)
  9090  		v.AddArg(x)
  9091  		v.AddArg(y)
  9092  		return true
  9093  	}
  9094  }
  9095  func rewriteValueMIPS64_OpSub32F_0(v *Value) bool {
  9096  	// match: (Sub32F x y)
  9097  	// cond:
  9098  	// result: (SUBF x y)
  9099  	for {
  9100  		x := v.Args[0]
  9101  		y := v.Args[1]
  9102  		v.reset(OpMIPS64SUBF)
  9103  		v.AddArg(x)
  9104  		v.AddArg(y)
  9105  		return true
  9106  	}
  9107  }
  9108  func rewriteValueMIPS64_OpSub64_0(v *Value) bool {
  9109  	// match: (Sub64 x y)
  9110  	// cond:
  9111  	// result: (SUBV x y)
  9112  	for {
  9113  		x := v.Args[0]
  9114  		y := v.Args[1]
  9115  		v.reset(OpMIPS64SUBV)
  9116  		v.AddArg(x)
  9117  		v.AddArg(y)
  9118  		return true
  9119  	}
  9120  }
  9121  func rewriteValueMIPS64_OpSub64F_0(v *Value) bool {
  9122  	// match: (Sub64F x y)
  9123  	// cond:
  9124  	// result: (SUBD x y)
  9125  	for {
  9126  		x := v.Args[0]
  9127  		y := v.Args[1]
  9128  		v.reset(OpMIPS64SUBD)
  9129  		v.AddArg(x)
  9130  		v.AddArg(y)
  9131  		return true
  9132  	}
  9133  }
  9134  func rewriteValueMIPS64_OpSub8_0(v *Value) bool {
  9135  	// match: (Sub8 x y)
  9136  	// cond:
  9137  	// result: (SUBV x y)
  9138  	for {
  9139  		x := v.Args[0]
  9140  		y := v.Args[1]
  9141  		v.reset(OpMIPS64SUBV)
  9142  		v.AddArg(x)
  9143  		v.AddArg(y)
  9144  		return true
  9145  	}
  9146  }
  9147  func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool {
  9148  	// match: (SubPtr x y)
  9149  	// cond:
  9150  	// result: (SUBV x y)
  9151  	for {
  9152  		x := v.Args[0]
  9153  		y := v.Args[1]
  9154  		v.reset(OpMIPS64SUBV)
  9155  		v.AddArg(x)
  9156  		v.AddArg(y)
  9157  		return true
  9158  	}
  9159  }
  9160  func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool {
  9161  	// match: (Trunc16to8 x)
  9162  	// cond:
  9163  	// result: x
  9164  	for {
  9165  		x := v.Args[0]
  9166  		v.reset(OpCopy)
  9167  		v.Type = x.Type
  9168  		v.AddArg(x)
  9169  		return true
  9170  	}
  9171  }
  9172  func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool {
  9173  	// match: (Trunc32to16 x)
  9174  	// cond:
  9175  	// result: x
  9176  	for {
  9177  		x := v.Args[0]
  9178  		v.reset(OpCopy)
  9179  		v.Type = x.Type
  9180  		v.AddArg(x)
  9181  		return true
  9182  	}
  9183  }
  9184  func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool {
  9185  	// match: (Trunc32to8 x)
  9186  	// cond:
  9187  	// result: x
  9188  	for {
  9189  		x := v.Args[0]
  9190  		v.reset(OpCopy)
  9191  		v.Type = x.Type
  9192  		v.AddArg(x)
  9193  		return true
  9194  	}
  9195  }
  9196  func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool {
  9197  	// match: (Trunc64to16 x)
  9198  	// cond:
  9199  	// result: x
  9200  	for {
  9201  		x := v.Args[0]
  9202  		v.reset(OpCopy)
  9203  		v.Type = x.Type
  9204  		v.AddArg(x)
  9205  		return true
  9206  	}
  9207  }
  9208  func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool {
  9209  	// match: (Trunc64to32 x)
  9210  	// cond:
  9211  	// result: x
  9212  	for {
  9213  		x := v.Args[0]
  9214  		v.reset(OpCopy)
  9215  		v.Type = x.Type
  9216  		v.AddArg(x)
  9217  		return true
  9218  	}
  9219  }
  9220  func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool {
  9221  	// match: (Trunc64to8 x)
  9222  	// cond:
  9223  	// result: x
  9224  	for {
  9225  		x := v.Args[0]
  9226  		v.reset(OpCopy)
  9227  		v.Type = x.Type
  9228  		v.AddArg(x)
  9229  		return true
  9230  	}
  9231  }
  9232  func rewriteValueMIPS64_OpXor16_0(v *Value) bool {
  9233  	// match: (Xor16 x y)
  9234  	// cond:
  9235  	// result: (XOR x y)
  9236  	for {
  9237  		x := v.Args[0]
  9238  		y := v.Args[1]
  9239  		v.reset(OpMIPS64XOR)
  9240  		v.AddArg(x)
  9241  		v.AddArg(y)
  9242  		return true
  9243  	}
  9244  }
  9245  func rewriteValueMIPS64_OpXor32_0(v *Value) bool {
  9246  	// match: (Xor32 x y)
  9247  	// cond:
  9248  	// result: (XOR x y)
  9249  	for {
  9250  		x := v.Args[0]
  9251  		y := v.Args[1]
  9252  		v.reset(OpMIPS64XOR)
  9253  		v.AddArg(x)
  9254  		v.AddArg(y)
  9255  		return true
  9256  	}
  9257  }
  9258  func rewriteValueMIPS64_OpXor64_0(v *Value) bool {
  9259  	// match: (Xor64 x y)
  9260  	// cond:
  9261  	// result: (XOR x y)
  9262  	for {
  9263  		x := v.Args[0]
  9264  		y := v.Args[1]
  9265  		v.reset(OpMIPS64XOR)
  9266  		v.AddArg(x)
  9267  		v.AddArg(y)
  9268  		return true
  9269  	}
  9270  }
  9271  func rewriteValueMIPS64_OpXor8_0(v *Value) bool {
  9272  	// match: (Xor8 x y)
  9273  	// cond:
  9274  	// result: (XOR x y)
  9275  	for {
  9276  		x := v.Args[0]
  9277  		y := v.Args[1]
  9278  		v.reset(OpMIPS64XOR)
  9279  		v.AddArg(x)
  9280  		v.AddArg(y)
  9281  		return true
  9282  	}
  9283  }
  9284  func rewriteValueMIPS64_OpZero_0(v *Value) bool {
  9285  	b := v.Block
  9286  	_ = b
  9287  	types := &b.Func.Config.Types
  9288  	_ = types
  9289  	// match: (Zero [0] _ mem)
  9290  	// cond:
  9291  	// result: mem
  9292  	for {
  9293  		if v.AuxInt != 0 {
  9294  			break
  9295  		}
  9296  		mem := v.Args[1]
  9297  		v.reset(OpCopy)
  9298  		v.Type = mem.Type
  9299  		v.AddArg(mem)
  9300  		return true
  9301  	}
  9302  	// match: (Zero [1] ptr mem)
  9303  	// cond:
  9304  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9305  	for {
  9306  		if v.AuxInt != 1 {
  9307  			break
  9308  		}
  9309  		ptr := v.Args[0]
  9310  		mem := v.Args[1]
  9311  		v.reset(OpMIPS64MOVBstore)
  9312  		v.AddArg(ptr)
  9313  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9314  		v0.AuxInt = 0
  9315  		v.AddArg(v0)
  9316  		v.AddArg(mem)
  9317  		return true
  9318  	}
  9319  	// match: (Zero [2] {t} ptr mem)
  9320  	// cond: t.(Type).Alignment()%2 == 0
  9321  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  9322  	for {
  9323  		if v.AuxInt != 2 {
  9324  			break
  9325  		}
  9326  		t := v.Aux
  9327  		ptr := v.Args[0]
  9328  		mem := v.Args[1]
  9329  		if !(t.(Type).Alignment()%2 == 0) {
  9330  			break
  9331  		}
  9332  		v.reset(OpMIPS64MOVHstore)
  9333  		v.AddArg(ptr)
  9334  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9335  		v0.AuxInt = 0
  9336  		v.AddArg(v0)
  9337  		v.AddArg(mem)
  9338  		return true
  9339  	}
  9340  	// match: (Zero [2] ptr mem)
  9341  	// cond:
  9342  	// result: (MOVBstore [1] ptr (MOVVconst [0]) 		(MOVBstore [0] ptr (MOVVconst [0]) mem))
  9343  	for {
  9344  		if v.AuxInt != 2 {
  9345  			break
  9346  		}
  9347  		ptr := v.Args[0]
  9348  		mem := v.Args[1]
  9349  		v.reset(OpMIPS64MOVBstore)
  9350  		v.AuxInt = 1
  9351  		v.AddArg(ptr)
  9352  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9353  		v0.AuxInt = 0
  9354  		v.AddArg(v0)
  9355  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9356  		v1.AuxInt = 0
  9357  		v1.AddArg(ptr)
  9358  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9359  		v2.AuxInt = 0
  9360  		v1.AddArg(v2)
  9361  		v1.AddArg(mem)
  9362  		v.AddArg(v1)
  9363  		return true
  9364  	}
  9365  	// match: (Zero [4] {t} ptr mem)
  9366  	// cond: t.(Type).Alignment()%4 == 0
  9367  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  9368  	for {
  9369  		if v.AuxInt != 4 {
  9370  			break
  9371  		}
  9372  		t := v.Aux
  9373  		ptr := v.Args[0]
  9374  		mem := v.Args[1]
  9375  		if !(t.(Type).Alignment()%4 == 0) {
  9376  			break
  9377  		}
  9378  		v.reset(OpMIPS64MOVWstore)
  9379  		v.AddArg(ptr)
  9380  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9381  		v0.AuxInt = 0
  9382  		v.AddArg(v0)
  9383  		v.AddArg(mem)
  9384  		return true
  9385  	}
  9386  	// match: (Zero [4] {t} ptr mem)
  9387  	// cond: t.(Type).Alignment()%2 == 0
  9388  	// result: (MOVHstore [2] ptr (MOVVconst [0]) 		(MOVHstore [0] ptr (MOVVconst [0]) mem))
  9389  	for {
  9390  		if v.AuxInt != 4 {
  9391  			break
  9392  		}
  9393  		t := v.Aux
  9394  		ptr := v.Args[0]
  9395  		mem := v.Args[1]
  9396  		if !(t.(Type).Alignment()%2 == 0) {
  9397  			break
  9398  		}
  9399  		v.reset(OpMIPS64MOVHstore)
  9400  		v.AuxInt = 2
  9401  		v.AddArg(ptr)
  9402  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9403  		v0.AuxInt = 0
  9404  		v.AddArg(v0)
  9405  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9406  		v1.AuxInt = 0
  9407  		v1.AddArg(ptr)
  9408  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9409  		v2.AuxInt = 0
  9410  		v1.AddArg(v2)
  9411  		v1.AddArg(mem)
  9412  		v.AddArg(v1)
  9413  		return true
  9414  	}
  9415  	// match: (Zero [4] ptr mem)
  9416  	// cond:
  9417  	// result: (MOVBstore [3] ptr (MOVVconst [0]) 		(MOVBstore [2] ptr (MOVVconst [0]) 			(MOVBstore [1] ptr (MOVVconst [0]) 				(MOVBstore [0] ptr (MOVVconst [0]) mem))))
  9418  	for {
  9419  		if v.AuxInt != 4 {
  9420  			break
  9421  		}
  9422  		ptr := v.Args[0]
  9423  		mem := v.Args[1]
  9424  		v.reset(OpMIPS64MOVBstore)
  9425  		v.AuxInt = 3
  9426  		v.AddArg(ptr)
  9427  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9428  		v0.AuxInt = 0
  9429  		v.AddArg(v0)
  9430  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9431  		v1.AuxInt = 2
  9432  		v1.AddArg(ptr)
  9433  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9434  		v2.AuxInt = 0
  9435  		v1.AddArg(v2)
  9436  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9437  		v3.AuxInt = 1
  9438  		v3.AddArg(ptr)
  9439  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9440  		v4.AuxInt = 0
  9441  		v3.AddArg(v4)
  9442  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9443  		v5.AuxInt = 0
  9444  		v5.AddArg(ptr)
  9445  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9446  		v6.AuxInt = 0
  9447  		v5.AddArg(v6)
  9448  		v5.AddArg(mem)
  9449  		v3.AddArg(v5)
  9450  		v1.AddArg(v3)
  9451  		v.AddArg(v1)
  9452  		return true
  9453  	}
  9454  	// match: (Zero [8] {t} ptr mem)
  9455  	// cond: t.(Type).Alignment()%8 == 0
  9456  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  9457  	for {
  9458  		if v.AuxInt != 8 {
  9459  			break
  9460  		}
  9461  		t := v.Aux
  9462  		ptr := v.Args[0]
  9463  		mem := v.Args[1]
  9464  		if !(t.(Type).Alignment()%8 == 0) {
  9465  			break
  9466  		}
  9467  		v.reset(OpMIPS64MOVVstore)
  9468  		v.AddArg(ptr)
  9469  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9470  		v0.AuxInt = 0
  9471  		v.AddArg(v0)
  9472  		v.AddArg(mem)
  9473  		return true
  9474  	}
  9475  	// match: (Zero [8] {t} ptr mem)
  9476  	// cond: t.(Type).Alignment()%4 == 0
  9477  	// result: (MOVWstore [4] ptr (MOVVconst [0]) 		(MOVWstore [0] ptr (MOVVconst [0]) mem))
  9478  	for {
  9479  		if v.AuxInt != 8 {
  9480  			break
  9481  		}
  9482  		t := v.Aux
  9483  		ptr := v.Args[0]
  9484  		mem := v.Args[1]
  9485  		if !(t.(Type).Alignment()%4 == 0) {
  9486  			break
  9487  		}
  9488  		v.reset(OpMIPS64MOVWstore)
  9489  		v.AuxInt = 4
  9490  		v.AddArg(ptr)
  9491  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9492  		v0.AuxInt = 0
  9493  		v.AddArg(v0)
  9494  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9495  		v1.AuxInt = 0
  9496  		v1.AddArg(ptr)
  9497  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9498  		v2.AuxInt = 0
  9499  		v1.AddArg(v2)
  9500  		v1.AddArg(mem)
  9501  		v.AddArg(v1)
  9502  		return true
  9503  	}
  9504  	// match: (Zero [8] {t} ptr mem)
  9505  	// cond: t.(Type).Alignment()%2 == 0
  9506  	// result: (MOVHstore [6] ptr (MOVVconst [0]) 		(MOVHstore [4] ptr (MOVVconst [0]) 			(MOVHstore [2] ptr (MOVVconst [0]) 				(MOVHstore [0] ptr (MOVVconst [0]) mem))))
  9507  	for {
  9508  		if v.AuxInt != 8 {
  9509  			break
  9510  		}
  9511  		t := v.Aux
  9512  		ptr := v.Args[0]
  9513  		mem := v.Args[1]
  9514  		if !(t.(Type).Alignment()%2 == 0) {
  9515  			break
  9516  		}
  9517  		v.reset(OpMIPS64MOVHstore)
  9518  		v.AuxInt = 6
  9519  		v.AddArg(ptr)
  9520  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9521  		v0.AuxInt = 0
  9522  		v.AddArg(v0)
  9523  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9524  		v1.AuxInt = 4
  9525  		v1.AddArg(ptr)
  9526  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9527  		v2.AuxInt = 0
  9528  		v1.AddArg(v2)
  9529  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9530  		v3.AuxInt = 2
  9531  		v3.AddArg(ptr)
  9532  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9533  		v4.AuxInt = 0
  9534  		v3.AddArg(v4)
  9535  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9536  		v5.AuxInt = 0
  9537  		v5.AddArg(ptr)
  9538  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9539  		v6.AuxInt = 0
  9540  		v5.AddArg(v6)
  9541  		v5.AddArg(mem)
  9542  		v3.AddArg(v5)
  9543  		v1.AddArg(v3)
  9544  		v.AddArg(v1)
  9545  		return true
  9546  	}
  9547  	return false
  9548  }
  9549  func rewriteValueMIPS64_OpZero_10(v *Value) bool {
  9550  	b := v.Block
  9551  	_ = b
  9552  	config := b.Func.Config
  9553  	_ = config
  9554  	types := &b.Func.Config.Types
  9555  	_ = types
  9556  	// match: (Zero [3] ptr mem)
  9557  	// cond:
  9558  	// result: (MOVBstore [2] ptr (MOVVconst [0]) 		(MOVBstore [1] ptr (MOVVconst [0]) 			(MOVBstore [0] ptr (MOVVconst [0]) mem)))
  9559  	for {
  9560  		if v.AuxInt != 3 {
  9561  			break
  9562  		}
  9563  		ptr := v.Args[0]
  9564  		mem := v.Args[1]
  9565  		v.reset(OpMIPS64MOVBstore)
  9566  		v.AuxInt = 2
  9567  		v.AddArg(ptr)
  9568  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9569  		v0.AuxInt = 0
  9570  		v.AddArg(v0)
  9571  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9572  		v1.AuxInt = 1
  9573  		v1.AddArg(ptr)
  9574  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9575  		v2.AuxInt = 0
  9576  		v1.AddArg(v2)
  9577  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, TypeMem)
  9578  		v3.AuxInt = 0
  9579  		v3.AddArg(ptr)
  9580  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9581  		v4.AuxInt = 0
  9582  		v3.AddArg(v4)
  9583  		v3.AddArg(mem)
  9584  		v1.AddArg(v3)
  9585  		v.AddArg(v1)
  9586  		return true
  9587  	}
  9588  	// match: (Zero [6] {t} ptr mem)
  9589  	// cond: t.(Type).Alignment()%2 == 0
  9590  	// result: (MOVHstore [4] ptr (MOVVconst [0]) 		(MOVHstore [2] ptr (MOVVconst [0]) 			(MOVHstore [0] ptr (MOVVconst [0]) mem)))
  9591  	for {
  9592  		if v.AuxInt != 6 {
  9593  			break
  9594  		}
  9595  		t := v.Aux
  9596  		ptr := v.Args[0]
  9597  		mem := v.Args[1]
  9598  		if !(t.(Type).Alignment()%2 == 0) {
  9599  			break
  9600  		}
  9601  		v.reset(OpMIPS64MOVHstore)
  9602  		v.AuxInt = 4
  9603  		v.AddArg(ptr)
  9604  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9605  		v0.AuxInt = 0
  9606  		v.AddArg(v0)
  9607  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9608  		v1.AuxInt = 2
  9609  		v1.AddArg(ptr)
  9610  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9611  		v2.AuxInt = 0
  9612  		v1.AddArg(v2)
  9613  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, TypeMem)
  9614  		v3.AuxInt = 0
  9615  		v3.AddArg(ptr)
  9616  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9617  		v4.AuxInt = 0
  9618  		v3.AddArg(v4)
  9619  		v3.AddArg(mem)
  9620  		v1.AddArg(v3)
  9621  		v.AddArg(v1)
  9622  		return true
  9623  	}
  9624  	// match: (Zero [12] {t} ptr mem)
  9625  	// cond: t.(Type).Alignment()%4 == 0
  9626  	// result: (MOVWstore [8] ptr (MOVVconst [0]) 		(MOVWstore [4] ptr (MOVVconst [0]) 			(MOVWstore [0] ptr (MOVVconst [0]) mem)))
  9627  	for {
  9628  		if v.AuxInt != 12 {
  9629  			break
  9630  		}
  9631  		t := v.Aux
  9632  		ptr := v.Args[0]
  9633  		mem := v.Args[1]
  9634  		if !(t.(Type).Alignment()%4 == 0) {
  9635  			break
  9636  		}
  9637  		v.reset(OpMIPS64MOVWstore)
  9638  		v.AuxInt = 8
  9639  		v.AddArg(ptr)
  9640  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9641  		v0.AuxInt = 0
  9642  		v.AddArg(v0)
  9643  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9644  		v1.AuxInt = 4
  9645  		v1.AddArg(ptr)
  9646  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9647  		v2.AuxInt = 0
  9648  		v1.AddArg(v2)
  9649  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, TypeMem)
  9650  		v3.AuxInt = 0
  9651  		v3.AddArg(ptr)
  9652  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9653  		v4.AuxInt = 0
  9654  		v3.AddArg(v4)
  9655  		v3.AddArg(mem)
  9656  		v1.AddArg(v3)
  9657  		v.AddArg(v1)
  9658  		return true
  9659  	}
  9660  	// match: (Zero [16] {t} ptr mem)
  9661  	// cond: t.(Type).Alignment()%8 == 0
  9662  	// result: (MOVVstore [8] ptr (MOVVconst [0]) 		(MOVVstore [0] ptr (MOVVconst [0]) mem))
  9663  	for {
  9664  		if v.AuxInt != 16 {
  9665  			break
  9666  		}
  9667  		t := v.Aux
  9668  		ptr := v.Args[0]
  9669  		mem := v.Args[1]
  9670  		if !(t.(Type).Alignment()%8 == 0) {
  9671  			break
  9672  		}
  9673  		v.reset(OpMIPS64MOVVstore)
  9674  		v.AuxInt = 8
  9675  		v.AddArg(ptr)
  9676  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9677  		v0.AuxInt = 0
  9678  		v.AddArg(v0)
  9679  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9680  		v1.AuxInt = 0
  9681  		v1.AddArg(ptr)
  9682  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9683  		v2.AuxInt = 0
  9684  		v1.AddArg(v2)
  9685  		v1.AddArg(mem)
  9686  		v.AddArg(v1)
  9687  		return true
  9688  	}
  9689  	// match: (Zero [24] {t} ptr mem)
  9690  	// cond: t.(Type).Alignment()%8 == 0
  9691  	// result: (MOVVstore [16] ptr (MOVVconst [0]) 		(MOVVstore [8] ptr (MOVVconst [0]) 			(MOVVstore [0] ptr (MOVVconst [0]) mem)))
  9692  	for {
  9693  		if v.AuxInt != 24 {
  9694  			break
  9695  		}
  9696  		t := v.Aux
  9697  		ptr := v.Args[0]
  9698  		mem := v.Args[1]
  9699  		if !(t.(Type).Alignment()%8 == 0) {
  9700  			break
  9701  		}
  9702  		v.reset(OpMIPS64MOVVstore)
  9703  		v.AuxInt = 16
  9704  		v.AddArg(ptr)
  9705  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9706  		v0.AuxInt = 0
  9707  		v.AddArg(v0)
  9708  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9709  		v1.AuxInt = 8
  9710  		v1.AddArg(ptr)
  9711  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9712  		v2.AuxInt = 0
  9713  		v1.AddArg(v2)
  9714  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, TypeMem)
  9715  		v3.AuxInt = 0
  9716  		v3.AddArg(ptr)
  9717  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, types.UInt64)
  9718  		v4.AuxInt = 0
  9719  		v3.AddArg(v4)
  9720  		v3.AddArg(mem)
  9721  		v1.AddArg(v3)
  9722  		v.AddArg(v1)
  9723  		return true
  9724  	}
  9725  	// match: (Zero [s] {t} ptr mem)
  9726  	// cond: s%8 == 0 && s > 24 && s <= 8*128 	&& t.(Type).Alignment()%8 == 0 && !config.noDuffDevice
  9727  	// result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
  9728  	for {
  9729  		s := v.AuxInt
  9730  		t := v.Aux
  9731  		ptr := v.Args[0]
  9732  		mem := v.Args[1]
  9733  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(Type).Alignment()%8 == 0 && !config.noDuffDevice) {
  9734  			break
  9735  		}
  9736  		v.reset(OpMIPS64DUFFZERO)
  9737  		v.AuxInt = 8 * (128 - int64(s/8))
  9738  		v.AddArg(ptr)
  9739  		v.AddArg(mem)
  9740  		return true
  9741  	}
  9742  	// match: (Zero [s] {t} ptr mem)
  9743  	// cond: (s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0
  9744  	// result: (LoweredZero [t.(Type).Alignment()] 		ptr 		(ADDVconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  9745  	for {
  9746  		s := v.AuxInt
  9747  		t := v.Aux
  9748  		ptr := v.Args[0]
  9749  		mem := v.Args[1]
  9750  		if !((s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0) {
  9751  			break
  9752  		}
  9753  		v.reset(OpMIPS64LoweredZero)
  9754  		v.AuxInt = t.(Type).Alignment()
  9755  		v.AddArg(ptr)
  9756  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  9757  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  9758  		v0.AddArg(ptr)
  9759  		v.AddArg(v0)
  9760  		v.AddArg(mem)
  9761  		return true
  9762  	}
  9763  	return false
  9764  }
  9765  func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool {
  9766  	// match: (ZeroExt16to32 x)
  9767  	// cond:
  9768  	// result: (MOVHUreg x)
  9769  	for {
  9770  		x := v.Args[0]
  9771  		v.reset(OpMIPS64MOVHUreg)
  9772  		v.AddArg(x)
  9773  		return true
  9774  	}
  9775  }
  9776  func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool {
  9777  	// match: (ZeroExt16to64 x)
  9778  	// cond:
  9779  	// result: (MOVHUreg x)
  9780  	for {
  9781  		x := v.Args[0]
  9782  		v.reset(OpMIPS64MOVHUreg)
  9783  		v.AddArg(x)
  9784  		return true
  9785  	}
  9786  }
  9787  func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool {
  9788  	// match: (ZeroExt32to64 x)
  9789  	// cond:
  9790  	// result: (MOVWUreg x)
  9791  	for {
  9792  		x := v.Args[0]
  9793  		v.reset(OpMIPS64MOVWUreg)
  9794  		v.AddArg(x)
  9795  		return true
  9796  	}
  9797  }
  9798  func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool {
  9799  	// match: (ZeroExt8to16 x)
  9800  	// cond:
  9801  	// result: (MOVBUreg x)
  9802  	for {
  9803  		x := v.Args[0]
  9804  		v.reset(OpMIPS64MOVBUreg)
  9805  		v.AddArg(x)
  9806  		return true
  9807  	}
  9808  }
  9809  func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool {
  9810  	// match: (ZeroExt8to32 x)
  9811  	// cond:
  9812  	// result: (MOVBUreg x)
  9813  	for {
  9814  		x := v.Args[0]
  9815  		v.reset(OpMIPS64MOVBUreg)
  9816  		v.AddArg(x)
  9817  		return true
  9818  	}
  9819  }
  9820  func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool {
  9821  	// match: (ZeroExt8to64 x)
  9822  	// cond:
  9823  	// result: (MOVBUreg x)
  9824  	for {
  9825  		x := v.Args[0]
  9826  		v.reset(OpMIPS64MOVBUreg)
  9827  		v.AddArg(x)
  9828  		return true
  9829  	}
  9830  }
  9831  func rewriteBlockMIPS64(b *Block) bool {
  9832  	config := b.Func.Config
  9833  	_ = config
  9834  	fe := b.Func.fe
  9835  	_ = fe
  9836  	types := &config.Types
  9837  	_ = types
  9838  	switch b.Kind {
  9839  	case BlockMIPS64EQ:
  9840  		// match: (EQ (FPFlagTrue cmp) yes no)
  9841  		// cond:
  9842  		// result: (FPF cmp yes no)
  9843  		for {
  9844  			v := b.Control
  9845  			if v.Op != OpMIPS64FPFlagTrue {
  9846  				break
  9847  			}
  9848  			cmp := v.Args[0]
  9849  			b.Kind = BlockMIPS64FPF
  9850  			b.SetControl(cmp)
  9851  			return true
  9852  		}
  9853  		// match: (EQ (FPFlagFalse cmp) yes no)
  9854  		// cond:
  9855  		// result: (FPT cmp yes no)
  9856  		for {
  9857  			v := b.Control
  9858  			if v.Op != OpMIPS64FPFlagFalse {
  9859  				break
  9860  			}
  9861  			cmp := v.Args[0]
  9862  			b.Kind = BlockMIPS64FPT
  9863  			b.SetControl(cmp)
  9864  			return true
  9865  		}
  9866  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9867  		// cond:
  9868  		// result: (NE cmp yes no)
  9869  		for {
  9870  			v := b.Control
  9871  			if v.Op != OpMIPS64XORconst {
  9872  				break
  9873  			}
  9874  			if v.AuxInt != 1 {
  9875  				break
  9876  			}
  9877  			cmp := v.Args[0]
  9878  			if cmp.Op != OpMIPS64SGT {
  9879  				break
  9880  			}
  9881  			b.Kind = BlockMIPS64NE
  9882  			b.SetControl(cmp)
  9883  			return true
  9884  		}
  9885  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9886  		// cond:
  9887  		// result: (NE cmp yes no)
  9888  		for {
  9889  			v := b.Control
  9890  			if v.Op != OpMIPS64XORconst {
  9891  				break
  9892  			}
  9893  			if v.AuxInt != 1 {
  9894  				break
  9895  			}
  9896  			cmp := v.Args[0]
  9897  			if cmp.Op != OpMIPS64SGTU {
  9898  				break
  9899  			}
  9900  			b.Kind = BlockMIPS64NE
  9901  			b.SetControl(cmp)
  9902  			return true
  9903  		}
  9904  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9905  		// cond:
  9906  		// result: (NE cmp yes no)
  9907  		for {
  9908  			v := b.Control
  9909  			if v.Op != OpMIPS64XORconst {
  9910  				break
  9911  			}
  9912  			if v.AuxInt != 1 {
  9913  				break
  9914  			}
  9915  			cmp := v.Args[0]
  9916  			if cmp.Op != OpMIPS64SGTconst {
  9917  				break
  9918  			}
  9919  			b.Kind = BlockMIPS64NE
  9920  			b.SetControl(cmp)
  9921  			return true
  9922  		}
  9923  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9924  		// cond:
  9925  		// result: (NE cmp yes no)
  9926  		for {
  9927  			v := b.Control
  9928  			if v.Op != OpMIPS64XORconst {
  9929  				break
  9930  			}
  9931  			if v.AuxInt != 1 {
  9932  				break
  9933  			}
  9934  			cmp := v.Args[0]
  9935  			if cmp.Op != OpMIPS64SGTUconst {
  9936  				break
  9937  			}
  9938  			b.Kind = BlockMIPS64NE
  9939  			b.SetControl(cmp)
  9940  			return true
  9941  		}
  9942  		// match: (EQ (SGTUconst [1] x) yes no)
  9943  		// cond:
  9944  		// result: (NE x yes no)
  9945  		for {
  9946  			v := b.Control
  9947  			if v.Op != OpMIPS64SGTUconst {
  9948  				break
  9949  			}
  9950  			if v.AuxInt != 1 {
  9951  				break
  9952  			}
  9953  			x := v.Args[0]
  9954  			b.Kind = BlockMIPS64NE
  9955  			b.SetControl(x)
  9956  			return true
  9957  		}
  9958  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  9959  		// cond:
  9960  		// result: (EQ x yes no)
  9961  		for {
  9962  			v := b.Control
  9963  			if v.Op != OpMIPS64SGTU {
  9964  				break
  9965  			}
  9966  			x := v.Args[0]
  9967  			v_1 := v.Args[1]
  9968  			if v_1.Op != OpMIPS64MOVVconst {
  9969  				break
  9970  			}
  9971  			if v_1.AuxInt != 0 {
  9972  				break
  9973  			}
  9974  			b.Kind = BlockMIPS64EQ
  9975  			b.SetControl(x)
  9976  			return true
  9977  		}
  9978  		// match: (EQ (SGTconst [0] x) yes no)
  9979  		// cond:
  9980  		// result: (GEZ x yes no)
  9981  		for {
  9982  			v := b.Control
  9983  			if v.Op != OpMIPS64SGTconst {
  9984  				break
  9985  			}
  9986  			if v.AuxInt != 0 {
  9987  				break
  9988  			}
  9989  			x := v.Args[0]
  9990  			b.Kind = BlockMIPS64GEZ
  9991  			b.SetControl(x)
  9992  			return true
  9993  		}
  9994  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  9995  		// cond:
  9996  		// result: (LEZ x yes no)
  9997  		for {
  9998  			v := b.Control
  9999  			if v.Op != OpMIPS64SGT {
 10000  				break
 10001  			}
 10002  			x := v.Args[0]
 10003  			v_1 := v.Args[1]
 10004  			if v_1.Op != OpMIPS64MOVVconst {
 10005  				break
 10006  			}
 10007  			if v_1.AuxInt != 0 {
 10008  				break
 10009  			}
 10010  			b.Kind = BlockMIPS64LEZ
 10011  			b.SetControl(x)
 10012  			return true
 10013  		}
 10014  		// match: (EQ (MOVVconst [0]) yes no)
 10015  		// cond:
 10016  		// result: (First nil yes no)
 10017  		for {
 10018  			v := b.Control
 10019  			if v.Op != OpMIPS64MOVVconst {
 10020  				break
 10021  			}
 10022  			if v.AuxInt != 0 {
 10023  				break
 10024  			}
 10025  			b.Kind = BlockFirst
 10026  			b.SetControl(nil)
 10027  			return true
 10028  		}
 10029  		// match: (EQ (MOVVconst [c]) yes no)
 10030  		// cond: c != 0
 10031  		// result: (First nil no yes)
 10032  		for {
 10033  			v := b.Control
 10034  			if v.Op != OpMIPS64MOVVconst {
 10035  				break
 10036  			}
 10037  			c := v.AuxInt
 10038  			if !(c != 0) {
 10039  				break
 10040  			}
 10041  			b.Kind = BlockFirst
 10042  			b.SetControl(nil)
 10043  			b.swapSuccessors()
 10044  			return true
 10045  		}
 10046  	case BlockMIPS64GEZ:
 10047  		// match: (GEZ (MOVVconst [c]) yes no)
 10048  		// cond: c >= 0
 10049  		// result: (First nil yes no)
 10050  		for {
 10051  			v := b.Control
 10052  			if v.Op != OpMIPS64MOVVconst {
 10053  				break
 10054  			}
 10055  			c := v.AuxInt
 10056  			if !(c >= 0) {
 10057  				break
 10058  			}
 10059  			b.Kind = BlockFirst
 10060  			b.SetControl(nil)
 10061  			return true
 10062  		}
 10063  		// match: (GEZ (MOVVconst [c]) yes no)
 10064  		// cond: c <  0
 10065  		// result: (First nil no yes)
 10066  		for {
 10067  			v := b.Control
 10068  			if v.Op != OpMIPS64MOVVconst {
 10069  				break
 10070  			}
 10071  			c := v.AuxInt
 10072  			if !(c < 0) {
 10073  				break
 10074  			}
 10075  			b.Kind = BlockFirst
 10076  			b.SetControl(nil)
 10077  			b.swapSuccessors()
 10078  			return true
 10079  		}
 10080  	case BlockMIPS64GTZ:
 10081  		// match: (GTZ (MOVVconst [c]) yes no)
 10082  		// cond: c >  0
 10083  		// result: (First nil yes no)
 10084  		for {
 10085  			v := b.Control
 10086  			if v.Op != OpMIPS64MOVVconst {
 10087  				break
 10088  			}
 10089  			c := v.AuxInt
 10090  			if !(c > 0) {
 10091  				break
 10092  			}
 10093  			b.Kind = BlockFirst
 10094  			b.SetControl(nil)
 10095  			return true
 10096  		}
 10097  		// match: (GTZ (MOVVconst [c]) yes no)
 10098  		// cond: c <= 0
 10099  		// result: (First nil no yes)
 10100  		for {
 10101  			v := b.Control
 10102  			if v.Op != OpMIPS64MOVVconst {
 10103  				break
 10104  			}
 10105  			c := v.AuxInt
 10106  			if !(c <= 0) {
 10107  				break
 10108  			}
 10109  			b.Kind = BlockFirst
 10110  			b.SetControl(nil)
 10111  			b.swapSuccessors()
 10112  			return true
 10113  		}
 10114  	case BlockIf:
 10115  		// match: (If cond yes no)
 10116  		// cond:
 10117  		// result: (NE cond yes no)
 10118  		for {
 10119  			v := b.Control
 10120  			_ = v
 10121  			cond := b.Control
 10122  			b.Kind = BlockMIPS64NE
 10123  			b.SetControl(cond)
 10124  			return true
 10125  		}
 10126  	case BlockMIPS64LEZ:
 10127  		// match: (LEZ (MOVVconst [c]) yes no)
 10128  		// cond: c <= 0
 10129  		// result: (First nil yes no)
 10130  		for {
 10131  			v := b.Control
 10132  			if v.Op != OpMIPS64MOVVconst {
 10133  				break
 10134  			}
 10135  			c := v.AuxInt
 10136  			if !(c <= 0) {
 10137  				break
 10138  			}
 10139  			b.Kind = BlockFirst
 10140  			b.SetControl(nil)
 10141  			return true
 10142  		}
 10143  		// match: (LEZ (MOVVconst [c]) yes no)
 10144  		// cond: c >  0
 10145  		// result: (First nil no yes)
 10146  		for {
 10147  			v := b.Control
 10148  			if v.Op != OpMIPS64MOVVconst {
 10149  				break
 10150  			}
 10151  			c := v.AuxInt
 10152  			if !(c > 0) {
 10153  				break
 10154  			}
 10155  			b.Kind = BlockFirst
 10156  			b.SetControl(nil)
 10157  			b.swapSuccessors()
 10158  			return true
 10159  		}
 10160  	case BlockMIPS64LTZ:
 10161  		// match: (LTZ (MOVVconst [c]) yes no)
 10162  		// cond: c <  0
 10163  		// result: (First nil yes no)
 10164  		for {
 10165  			v := b.Control
 10166  			if v.Op != OpMIPS64MOVVconst {
 10167  				break
 10168  			}
 10169  			c := v.AuxInt
 10170  			if !(c < 0) {
 10171  				break
 10172  			}
 10173  			b.Kind = BlockFirst
 10174  			b.SetControl(nil)
 10175  			return true
 10176  		}
 10177  		// match: (LTZ (MOVVconst [c]) yes no)
 10178  		// cond: c >= 0
 10179  		// result: (First nil no yes)
 10180  		for {
 10181  			v := b.Control
 10182  			if v.Op != OpMIPS64MOVVconst {
 10183  				break
 10184  			}
 10185  			c := v.AuxInt
 10186  			if !(c >= 0) {
 10187  				break
 10188  			}
 10189  			b.Kind = BlockFirst
 10190  			b.SetControl(nil)
 10191  			b.swapSuccessors()
 10192  			return true
 10193  		}
 10194  	case BlockMIPS64NE:
 10195  		// match: (NE (FPFlagTrue cmp) yes no)
 10196  		// cond:
 10197  		// result: (FPT cmp yes no)
 10198  		for {
 10199  			v := b.Control
 10200  			if v.Op != OpMIPS64FPFlagTrue {
 10201  				break
 10202  			}
 10203  			cmp := v.Args[0]
 10204  			b.Kind = BlockMIPS64FPT
 10205  			b.SetControl(cmp)
 10206  			return true
 10207  		}
 10208  		// match: (NE (FPFlagFalse cmp) yes no)
 10209  		// cond:
 10210  		// result: (FPF cmp yes no)
 10211  		for {
 10212  			v := b.Control
 10213  			if v.Op != OpMIPS64FPFlagFalse {
 10214  				break
 10215  			}
 10216  			cmp := v.Args[0]
 10217  			b.Kind = BlockMIPS64FPF
 10218  			b.SetControl(cmp)
 10219  			return true
 10220  		}
 10221  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10222  		// cond:
 10223  		// result: (EQ cmp yes no)
 10224  		for {
 10225  			v := b.Control
 10226  			if v.Op != OpMIPS64XORconst {
 10227  				break
 10228  			}
 10229  			if v.AuxInt != 1 {
 10230  				break
 10231  			}
 10232  			cmp := v.Args[0]
 10233  			if cmp.Op != OpMIPS64SGT {
 10234  				break
 10235  			}
 10236  			b.Kind = BlockMIPS64EQ
 10237  			b.SetControl(cmp)
 10238  			return true
 10239  		}
 10240  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10241  		// cond:
 10242  		// result: (EQ cmp yes no)
 10243  		for {
 10244  			v := b.Control
 10245  			if v.Op != OpMIPS64XORconst {
 10246  				break
 10247  			}
 10248  			if v.AuxInt != 1 {
 10249  				break
 10250  			}
 10251  			cmp := v.Args[0]
 10252  			if cmp.Op != OpMIPS64SGTU {
 10253  				break
 10254  			}
 10255  			b.Kind = BlockMIPS64EQ
 10256  			b.SetControl(cmp)
 10257  			return true
 10258  		}
 10259  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10260  		// cond:
 10261  		// result: (EQ cmp yes no)
 10262  		for {
 10263  			v := b.Control
 10264  			if v.Op != OpMIPS64XORconst {
 10265  				break
 10266  			}
 10267  			if v.AuxInt != 1 {
 10268  				break
 10269  			}
 10270  			cmp := v.Args[0]
 10271  			if cmp.Op != OpMIPS64SGTconst {
 10272  				break
 10273  			}
 10274  			b.Kind = BlockMIPS64EQ
 10275  			b.SetControl(cmp)
 10276  			return true
 10277  		}
 10278  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10279  		// cond:
 10280  		// result: (EQ cmp yes no)
 10281  		for {
 10282  			v := b.Control
 10283  			if v.Op != OpMIPS64XORconst {
 10284  				break
 10285  			}
 10286  			if v.AuxInt != 1 {
 10287  				break
 10288  			}
 10289  			cmp := v.Args[0]
 10290  			if cmp.Op != OpMIPS64SGTUconst {
 10291  				break
 10292  			}
 10293  			b.Kind = BlockMIPS64EQ
 10294  			b.SetControl(cmp)
 10295  			return true
 10296  		}
 10297  		// match: (NE (SGTUconst [1] x) yes no)
 10298  		// cond:
 10299  		// result: (EQ x yes no)
 10300  		for {
 10301  			v := b.Control
 10302  			if v.Op != OpMIPS64SGTUconst {
 10303  				break
 10304  			}
 10305  			if v.AuxInt != 1 {
 10306  				break
 10307  			}
 10308  			x := v.Args[0]
 10309  			b.Kind = BlockMIPS64EQ
 10310  			b.SetControl(x)
 10311  			return true
 10312  		}
 10313  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 10314  		// cond:
 10315  		// result: (NE x yes no)
 10316  		for {
 10317  			v := b.Control
 10318  			if v.Op != OpMIPS64SGTU {
 10319  				break
 10320  			}
 10321  			x := v.Args[0]
 10322  			v_1 := v.Args[1]
 10323  			if v_1.Op != OpMIPS64MOVVconst {
 10324  				break
 10325  			}
 10326  			if v_1.AuxInt != 0 {
 10327  				break
 10328  			}
 10329  			b.Kind = BlockMIPS64NE
 10330  			b.SetControl(x)
 10331  			return true
 10332  		}
 10333  		// match: (NE (SGTconst [0] x) yes no)
 10334  		// cond:
 10335  		// result: (LTZ x yes no)
 10336  		for {
 10337  			v := b.Control
 10338  			if v.Op != OpMIPS64SGTconst {
 10339  				break
 10340  			}
 10341  			if v.AuxInt != 0 {
 10342  				break
 10343  			}
 10344  			x := v.Args[0]
 10345  			b.Kind = BlockMIPS64LTZ
 10346  			b.SetControl(x)
 10347  			return true
 10348  		}
 10349  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 10350  		// cond:
 10351  		// result: (GTZ x yes no)
 10352  		for {
 10353  			v := b.Control
 10354  			if v.Op != OpMIPS64SGT {
 10355  				break
 10356  			}
 10357  			x := v.Args[0]
 10358  			v_1 := v.Args[1]
 10359  			if v_1.Op != OpMIPS64MOVVconst {
 10360  				break
 10361  			}
 10362  			if v_1.AuxInt != 0 {
 10363  				break
 10364  			}
 10365  			b.Kind = BlockMIPS64GTZ
 10366  			b.SetControl(x)
 10367  			return true
 10368  		}
 10369  		// match: (NE (MOVVconst [0]) yes no)
 10370  		// cond:
 10371  		// result: (First nil no yes)
 10372  		for {
 10373  			v := b.Control
 10374  			if v.Op != OpMIPS64MOVVconst {
 10375  				break
 10376  			}
 10377  			if v.AuxInt != 0 {
 10378  				break
 10379  			}
 10380  			b.Kind = BlockFirst
 10381  			b.SetControl(nil)
 10382  			b.swapSuccessors()
 10383  			return true
 10384  		}
 10385  		// match: (NE (MOVVconst [c]) yes no)
 10386  		// cond: c != 0
 10387  		// result: (First nil yes no)
 10388  		for {
 10389  			v := b.Control
 10390  			if v.Op != OpMIPS64MOVVconst {
 10391  				break
 10392  			}
 10393  			c := v.AuxInt
 10394  			if !(c != 0) {
 10395  				break
 10396  			}
 10397  			b.Kind = BlockFirst
 10398  			b.SetControl(nil)
 10399  			return true
 10400  		}
 10401  	}
 10402  	return false
 10403  }