github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about)

     1  // Code generated from _gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd _gen; go run .
     3  
     4  package ssa
     5  
     6  import "github.com/bir3/gocompiler/src/cmd/compile/internal/types"
     7  
     8  func rewriteValueMIPS64(v *Value) bool {
     9  	switch v.Op {
    10  	case OpAdd16:
    11  		v.Op = OpMIPS64ADDV
    12  		return true
    13  	case OpAdd32:
    14  		v.Op = OpMIPS64ADDV
    15  		return true
    16  	case OpAdd32F:
    17  		v.Op = OpMIPS64ADDF
    18  		return true
    19  	case OpAdd64:
    20  		v.Op = OpMIPS64ADDV
    21  		return true
    22  	case OpAdd64F:
    23  		v.Op = OpMIPS64ADDD
    24  		return true
    25  	case OpAdd8:
    26  		v.Op = OpMIPS64ADDV
    27  		return true
    28  	case OpAddPtr:
    29  		v.Op = OpMIPS64ADDV
    30  		return true
    31  	case OpAddr:
    32  		return rewriteValueMIPS64_OpAddr(v)
    33  	case OpAnd16:
    34  		v.Op = OpMIPS64AND
    35  		return true
    36  	case OpAnd32:
    37  		v.Op = OpMIPS64AND
    38  		return true
    39  	case OpAnd64:
    40  		v.Op = OpMIPS64AND
    41  		return true
    42  	case OpAnd8:
    43  		v.Op = OpMIPS64AND
    44  		return true
    45  	case OpAndB:
    46  		v.Op = OpMIPS64AND
    47  		return true
    48  	case OpAtomicAdd32:
    49  		v.Op = OpMIPS64LoweredAtomicAdd32
    50  		return true
    51  	case OpAtomicAdd64:
    52  		v.Op = OpMIPS64LoweredAtomicAdd64
    53  		return true
    54  	case OpAtomicCompareAndSwap32:
    55  		return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
    56  	case OpAtomicCompareAndSwap64:
    57  		v.Op = OpMIPS64LoweredAtomicCas64
    58  		return true
    59  	case OpAtomicExchange32:
    60  		v.Op = OpMIPS64LoweredAtomicExchange32
    61  		return true
    62  	case OpAtomicExchange64:
    63  		v.Op = OpMIPS64LoweredAtomicExchange64
    64  		return true
    65  	case OpAtomicLoad32:
    66  		v.Op = OpMIPS64LoweredAtomicLoad32
    67  		return true
    68  	case OpAtomicLoad64:
    69  		v.Op = OpMIPS64LoweredAtomicLoad64
    70  		return true
    71  	case OpAtomicLoad8:
    72  		v.Op = OpMIPS64LoweredAtomicLoad8
    73  		return true
    74  	case OpAtomicLoadPtr:
    75  		v.Op = OpMIPS64LoweredAtomicLoad64
    76  		return true
    77  	case OpAtomicStore32:
    78  		v.Op = OpMIPS64LoweredAtomicStore32
    79  		return true
    80  	case OpAtomicStore64:
    81  		v.Op = OpMIPS64LoweredAtomicStore64
    82  		return true
    83  	case OpAtomicStore8:
    84  		v.Op = OpMIPS64LoweredAtomicStore8
    85  		return true
    86  	case OpAtomicStorePtrNoWB:
    87  		v.Op = OpMIPS64LoweredAtomicStore64
    88  		return true
    89  	case OpAvg64u:
    90  		return rewriteValueMIPS64_OpAvg64u(v)
    91  	case OpClosureCall:
    92  		v.Op = OpMIPS64CALLclosure
    93  		return true
    94  	case OpCom16:
    95  		return rewriteValueMIPS64_OpCom16(v)
    96  	case OpCom32:
    97  		return rewriteValueMIPS64_OpCom32(v)
    98  	case OpCom64:
    99  		return rewriteValueMIPS64_OpCom64(v)
   100  	case OpCom8:
   101  		return rewriteValueMIPS64_OpCom8(v)
   102  	case OpConst16:
   103  		return rewriteValueMIPS64_OpConst16(v)
   104  	case OpConst32:
   105  		return rewriteValueMIPS64_OpConst32(v)
   106  	case OpConst32F:
   107  		return rewriteValueMIPS64_OpConst32F(v)
   108  	case OpConst64:
   109  		return rewriteValueMIPS64_OpConst64(v)
   110  	case OpConst64F:
   111  		return rewriteValueMIPS64_OpConst64F(v)
   112  	case OpConst8:
   113  		return rewriteValueMIPS64_OpConst8(v)
   114  	case OpConstBool:
   115  		return rewriteValueMIPS64_OpConstBool(v)
   116  	case OpConstNil:
   117  		return rewriteValueMIPS64_OpConstNil(v)
   118  	case OpCvt32Fto32:
   119  		v.Op = OpMIPS64TRUNCFW
   120  		return true
   121  	case OpCvt32Fto64:
   122  		v.Op = OpMIPS64TRUNCFV
   123  		return true
   124  	case OpCvt32Fto64F:
   125  		v.Op = OpMIPS64MOVFD
   126  		return true
   127  	case OpCvt32to32F:
   128  		v.Op = OpMIPS64MOVWF
   129  		return true
   130  	case OpCvt32to64F:
   131  		v.Op = OpMIPS64MOVWD
   132  		return true
   133  	case OpCvt64Fto32:
   134  		v.Op = OpMIPS64TRUNCDW
   135  		return true
   136  	case OpCvt64Fto32F:
   137  		v.Op = OpMIPS64MOVDF
   138  		return true
   139  	case OpCvt64Fto64:
   140  		v.Op = OpMIPS64TRUNCDV
   141  		return true
   142  	case OpCvt64to32F:
   143  		v.Op = OpMIPS64MOVVF
   144  		return true
   145  	case OpCvt64to64F:
   146  		v.Op = OpMIPS64MOVVD
   147  		return true
   148  	case OpCvtBoolToUint8:
   149  		v.Op = OpCopy
   150  		return true
   151  	case OpDiv16:
   152  		return rewriteValueMIPS64_OpDiv16(v)
   153  	case OpDiv16u:
   154  		return rewriteValueMIPS64_OpDiv16u(v)
   155  	case OpDiv32:
   156  		return rewriteValueMIPS64_OpDiv32(v)
   157  	case OpDiv32F:
   158  		v.Op = OpMIPS64DIVF
   159  		return true
   160  	case OpDiv32u:
   161  		return rewriteValueMIPS64_OpDiv32u(v)
   162  	case OpDiv64:
   163  		return rewriteValueMIPS64_OpDiv64(v)
   164  	case OpDiv64F:
   165  		v.Op = OpMIPS64DIVD
   166  		return true
   167  	case OpDiv64u:
   168  		return rewriteValueMIPS64_OpDiv64u(v)
   169  	case OpDiv8:
   170  		return rewriteValueMIPS64_OpDiv8(v)
   171  	case OpDiv8u:
   172  		return rewriteValueMIPS64_OpDiv8u(v)
   173  	case OpEq16:
   174  		return rewriteValueMIPS64_OpEq16(v)
   175  	case OpEq32:
   176  		return rewriteValueMIPS64_OpEq32(v)
   177  	case OpEq32F:
   178  		return rewriteValueMIPS64_OpEq32F(v)
   179  	case OpEq64:
   180  		return rewriteValueMIPS64_OpEq64(v)
   181  	case OpEq64F:
   182  		return rewriteValueMIPS64_OpEq64F(v)
   183  	case OpEq8:
   184  		return rewriteValueMIPS64_OpEq8(v)
   185  	case OpEqB:
   186  		return rewriteValueMIPS64_OpEqB(v)
   187  	case OpEqPtr:
   188  		return rewriteValueMIPS64_OpEqPtr(v)
   189  	case OpGetCallerPC:
   190  		v.Op = OpMIPS64LoweredGetCallerPC
   191  		return true
   192  	case OpGetCallerSP:
   193  		v.Op = OpMIPS64LoweredGetCallerSP
   194  		return true
   195  	case OpGetClosurePtr:
   196  		v.Op = OpMIPS64LoweredGetClosurePtr
   197  		return true
   198  	case OpHmul32:
   199  		return rewriteValueMIPS64_OpHmul32(v)
   200  	case OpHmul32u:
   201  		return rewriteValueMIPS64_OpHmul32u(v)
   202  	case OpHmul64:
   203  		return rewriteValueMIPS64_OpHmul64(v)
   204  	case OpHmul64u:
   205  		return rewriteValueMIPS64_OpHmul64u(v)
   206  	case OpInterCall:
   207  		v.Op = OpMIPS64CALLinter
   208  		return true
   209  	case OpIsInBounds:
   210  		return rewriteValueMIPS64_OpIsInBounds(v)
   211  	case OpIsNonNil:
   212  		return rewriteValueMIPS64_OpIsNonNil(v)
   213  	case OpIsSliceInBounds:
   214  		return rewriteValueMIPS64_OpIsSliceInBounds(v)
   215  	case OpLeq16:
   216  		return rewriteValueMIPS64_OpLeq16(v)
   217  	case OpLeq16U:
   218  		return rewriteValueMIPS64_OpLeq16U(v)
   219  	case OpLeq32:
   220  		return rewriteValueMIPS64_OpLeq32(v)
   221  	case OpLeq32F:
   222  		return rewriteValueMIPS64_OpLeq32F(v)
   223  	case OpLeq32U:
   224  		return rewriteValueMIPS64_OpLeq32U(v)
   225  	case OpLeq64:
   226  		return rewriteValueMIPS64_OpLeq64(v)
   227  	case OpLeq64F:
   228  		return rewriteValueMIPS64_OpLeq64F(v)
   229  	case OpLeq64U:
   230  		return rewriteValueMIPS64_OpLeq64U(v)
   231  	case OpLeq8:
   232  		return rewriteValueMIPS64_OpLeq8(v)
   233  	case OpLeq8U:
   234  		return rewriteValueMIPS64_OpLeq8U(v)
   235  	case OpLess16:
   236  		return rewriteValueMIPS64_OpLess16(v)
   237  	case OpLess16U:
   238  		return rewriteValueMIPS64_OpLess16U(v)
   239  	case OpLess32:
   240  		return rewriteValueMIPS64_OpLess32(v)
   241  	case OpLess32F:
   242  		return rewriteValueMIPS64_OpLess32F(v)
   243  	case OpLess32U:
   244  		return rewriteValueMIPS64_OpLess32U(v)
   245  	case OpLess64:
   246  		return rewriteValueMIPS64_OpLess64(v)
   247  	case OpLess64F:
   248  		return rewriteValueMIPS64_OpLess64F(v)
   249  	case OpLess64U:
   250  		return rewriteValueMIPS64_OpLess64U(v)
   251  	case OpLess8:
   252  		return rewriteValueMIPS64_OpLess8(v)
   253  	case OpLess8U:
   254  		return rewriteValueMIPS64_OpLess8U(v)
   255  	case OpLoad:
   256  		return rewriteValueMIPS64_OpLoad(v)
   257  	case OpLocalAddr:
   258  		return rewriteValueMIPS64_OpLocalAddr(v)
   259  	case OpLsh16x16:
   260  		return rewriteValueMIPS64_OpLsh16x16(v)
   261  	case OpLsh16x32:
   262  		return rewriteValueMIPS64_OpLsh16x32(v)
   263  	case OpLsh16x64:
   264  		return rewriteValueMIPS64_OpLsh16x64(v)
   265  	case OpLsh16x8:
   266  		return rewriteValueMIPS64_OpLsh16x8(v)
   267  	case OpLsh32x16:
   268  		return rewriteValueMIPS64_OpLsh32x16(v)
   269  	case OpLsh32x32:
   270  		return rewriteValueMIPS64_OpLsh32x32(v)
   271  	case OpLsh32x64:
   272  		return rewriteValueMIPS64_OpLsh32x64(v)
   273  	case OpLsh32x8:
   274  		return rewriteValueMIPS64_OpLsh32x8(v)
   275  	case OpLsh64x16:
   276  		return rewriteValueMIPS64_OpLsh64x16(v)
   277  	case OpLsh64x32:
   278  		return rewriteValueMIPS64_OpLsh64x32(v)
   279  	case OpLsh64x64:
   280  		return rewriteValueMIPS64_OpLsh64x64(v)
   281  	case OpLsh64x8:
   282  		return rewriteValueMIPS64_OpLsh64x8(v)
   283  	case OpLsh8x16:
   284  		return rewriteValueMIPS64_OpLsh8x16(v)
   285  	case OpLsh8x32:
   286  		return rewriteValueMIPS64_OpLsh8x32(v)
   287  	case OpLsh8x64:
   288  		return rewriteValueMIPS64_OpLsh8x64(v)
   289  	case OpLsh8x8:
   290  		return rewriteValueMIPS64_OpLsh8x8(v)
   291  	case OpMIPS64ADDV:
   292  		return rewriteValueMIPS64_OpMIPS64ADDV(v)
   293  	case OpMIPS64ADDVconst:
   294  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
   295  	case OpMIPS64AND:
   296  		return rewriteValueMIPS64_OpMIPS64AND(v)
   297  	case OpMIPS64ANDconst:
   298  		return rewriteValueMIPS64_OpMIPS64ANDconst(v)
   299  	case OpMIPS64LoweredAtomicAdd32:
   300  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
   301  	case OpMIPS64LoweredAtomicAdd64:
   302  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
   303  	case OpMIPS64LoweredAtomicStore32:
   304  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
   305  	case OpMIPS64LoweredAtomicStore64:
   306  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
   307  	case OpMIPS64MOVBUload:
   308  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
   309  	case OpMIPS64MOVBUreg:
   310  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
   311  	case OpMIPS64MOVBload:
   312  		return rewriteValueMIPS64_OpMIPS64MOVBload(v)
   313  	case OpMIPS64MOVBreg:
   314  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
   315  	case OpMIPS64MOVBstore:
   316  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
   317  	case OpMIPS64MOVBstorezero:
   318  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
   319  	case OpMIPS64MOVDload:
   320  		return rewriteValueMIPS64_OpMIPS64MOVDload(v)
   321  	case OpMIPS64MOVDstore:
   322  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
   323  	case OpMIPS64MOVFload:
   324  		return rewriteValueMIPS64_OpMIPS64MOVFload(v)
   325  	case OpMIPS64MOVFstore:
   326  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
   327  	case OpMIPS64MOVHUload:
   328  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
   329  	case OpMIPS64MOVHUreg:
   330  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
   331  	case OpMIPS64MOVHload:
   332  		return rewriteValueMIPS64_OpMIPS64MOVHload(v)
   333  	case OpMIPS64MOVHreg:
   334  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
   335  	case OpMIPS64MOVHstore:
   336  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
   337  	case OpMIPS64MOVHstorezero:
   338  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
   339  	case OpMIPS64MOVVload:
   340  		return rewriteValueMIPS64_OpMIPS64MOVVload(v)
   341  	case OpMIPS64MOVVnop:
   342  		return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
   343  	case OpMIPS64MOVVreg:
   344  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
   345  	case OpMIPS64MOVVstore:
   346  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
   347  	case OpMIPS64MOVVstorezero:
   348  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
   349  	case OpMIPS64MOVWUload:
   350  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
   351  	case OpMIPS64MOVWUreg:
   352  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
   353  	case OpMIPS64MOVWload:
   354  		return rewriteValueMIPS64_OpMIPS64MOVWload(v)
   355  	case OpMIPS64MOVWreg:
   356  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
   357  	case OpMIPS64MOVWstore:
   358  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
   359  	case OpMIPS64MOVWstorezero:
   360  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
   361  	case OpMIPS64NEGV:
   362  		return rewriteValueMIPS64_OpMIPS64NEGV(v)
   363  	case OpMIPS64NOR:
   364  		return rewriteValueMIPS64_OpMIPS64NOR(v)
   365  	case OpMIPS64NORconst:
   366  		return rewriteValueMIPS64_OpMIPS64NORconst(v)
   367  	case OpMIPS64OR:
   368  		return rewriteValueMIPS64_OpMIPS64OR(v)
   369  	case OpMIPS64ORconst:
   370  		return rewriteValueMIPS64_OpMIPS64ORconst(v)
   371  	case OpMIPS64SGT:
   372  		return rewriteValueMIPS64_OpMIPS64SGT(v)
   373  	case OpMIPS64SGTU:
   374  		return rewriteValueMIPS64_OpMIPS64SGTU(v)
   375  	case OpMIPS64SGTUconst:
   376  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
   377  	case OpMIPS64SGTconst:
   378  		return rewriteValueMIPS64_OpMIPS64SGTconst(v)
   379  	case OpMIPS64SLLV:
   380  		return rewriteValueMIPS64_OpMIPS64SLLV(v)
   381  	case OpMIPS64SLLVconst:
   382  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
   383  	case OpMIPS64SRAV:
   384  		return rewriteValueMIPS64_OpMIPS64SRAV(v)
   385  	case OpMIPS64SRAVconst:
   386  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
   387  	case OpMIPS64SRLV:
   388  		return rewriteValueMIPS64_OpMIPS64SRLV(v)
   389  	case OpMIPS64SRLVconst:
   390  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
   391  	case OpMIPS64SUBV:
   392  		return rewriteValueMIPS64_OpMIPS64SUBV(v)
   393  	case OpMIPS64SUBVconst:
   394  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
   395  	case OpMIPS64XOR:
   396  		return rewriteValueMIPS64_OpMIPS64XOR(v)
   397  	case OpMIPS64XORconst:
   398  		return rewriteValueMIPS64_OpMIPS64XORconst(v)
   399  	case OpMod16:
   400  		return rewriteValueMIPS64_OpMod16(v)
   401  	case OpMod16u:
   402  		return rewriteValueMIPS64_OpMod16u(v)
   403  	case OpMod32:
   404  		return rewriteValueMIPS64_OpMod32(v)
   405  	case OpMod32u:
   406  		return rewriteValueMIPS64_OpMod32u(v)
   407  	case OpMod64:
   408  		return rewriteValueMIPS64_OpMod64(v)
   409  	case OpMod64u:
   410  		return rewriteValueMIPS64_OpMod64u(v)
   411  	case OpMod8:
   412  		return rewriteValueMIPS64_OpMod8(v)
   413  	case OpMod8u:
   414  		return rewriteValueMIPS64_OpMod8u(v)
   415  	case OpMove:
   416  		return rewriteValueMIPS64_OpMove(v)
   417  	case OpMul16:
   418  		return rewriteValueMIPS64_OpMul16(v)
   419  	case OpMul32:
   420  		return rewriteValueMIPS64_OpMul32(v)
   421  	case OpMul32F:
   422  		v.Op = OpMIPS64MULF
   423  		return true
   424  	case OpMul64:
   425  		return rewriteValueMIPS64_OpMul64(v)
   426  	case OpMul64F:
   427  		v.Op = OpMIPS64MULD
   428  		return true
   429  	case OpMul64uhilo:
   430  		v.Op = OpMIPS64MULVU
   431  		return true
   432  	case OpMul8:
   433  		return rewriteValueMIPS64_OpMul8(v)
   434  	case OpNeg16:
   435  		v.Op = OpMIPS64NEGV
   436  		return true
   437  	case OpNeg32:
   438  		v.Op = OpMIPS64NEGV
   439  		return true
   440  	case OpNeg32F:
   441  		v.Op = OpMIPS64NEGF
   442  		return true
   443  	case OpNeg64:
   444  		v.Op = OpMIPS64NEGV
   445  		return true
   446  	case OpNeg64F:
   447  		v.Op = OpMIPS64NEGD
   448  		return true
   449  	case OpNeg8:
   450  		v.Op = OpMIPS64NEGV
   451  		return true
   452  	case OpNeq16:
   453  		return rewriteValueMIPS64_OpNeq16(v)
   454  	case OpNeq32:
   455  		return rewriteValueMIPS64_OpNeq32(v)
   456  	case OpNeq32F:
   457  		return rewriteValueMIPS64_OpNeq32F(v)
   458  	case OpNeq64:
   459  		return rewriteValueMIPS64_OpNeq64(v)
   460  	case OpNeq64F:
   461  		return rewriteValueMIPS64_OpNeq64F(v)
   462  	case OpNeq8:
   463  		return rewriteValueMIPS64_OpNeq8(v)
   464  	case OpNeqB:
   465  		v.Op = OpMIPS64XOR
   466  		return true
   467  	case OpNeqPtr:
   468  		return rewriteValueMIPS64_OpNeqPtr(v)
   469  	case OpNilCheck:
   470  		v.Op = OpMIPS64LoweredNilCheck
   471  		return true
   472  	case OpNot:
   473  		return rewriteValueMIPS64_OpNot(v)
   474  	case OpOffPtr:
   475  		return rewriteValueMIPS64_OpOffPtr(v)
   476  	case OpOr16:
   477  		v.Op = OpMIPS64OR
   478  		return true
   479  	case OpOr32:
   480  		v.Op = OpMIPS64OR
   481  		return true
   482  	case OpOr64:
   483  		v.Op = OpMIPS64OR
   484  		return true
   485  	case OpOr8:
   486  		v.Op = OpMIPS64OR
   487  		return true
   488  	case OpOrB:
   489  		v.Op = OpMIPS64OR
   490  		return true
   491  	case OpPanicBounds:
   492  		return rewriteValueMIPS64_OpPanicBounds(v)
   493  	case OpRotateLeft16:
   494  		return rewriteValueMIPS64_OpRotateLeft16(v)
   495  	case OpRotateLeft32:
   496  		return rewriteValueMIPS64_OpRotateLeft32(v)
   497  	case OpRotateLeft64:
   498  		return rewriteValueMIPS64_OpRotateLeft64(v)
   499  	case OpRotateLeft8:
   500  		return rewriteValueMIPS64_OpRotateLeft8(v)
   501  	case OpRound32F:
   502  		v.Op = OpCopy
   503  		return true
   504  	case OpRound64F:
   505  		v.Op = OpCopy
   506  		return true
   507  	case OpRsh16Ux16:
   508  		return rewriteValueMIPS64_OpRsh16Ux16(v)
   509  	case OpRsh16Ux32:
   510  		return rewriteValueMIPS64_OpRsh16Ux32(v)
   511  	case OpRsh16Ux64:
   512  		return rewriteValueMIPS64_OpRsh16Ux64(v)
   513  	case OpRsh16Ux8:
   514  		return rewriteValueMIPS64_OpRsh16Ux8(v)
   515  	case OpRsh16x16:
   516  		return rewriteValueMIPS64_OpRsh16x16(v)
   517  	case OpRsh16x32:
   518  		return rewriteValueMIPS64_OpRsh16x32(v)
   519  	case OpRsh16x64:
   520  		return rewriteValueMIPS64_OpRsh16x64(v)
   521  	case OpRsh16x8:
   522  		return rewriteValueMIPS64_OpRsh16x8(v)
   523  	case OpRsh32Ux16:
   524  		return rewriteValueMIPS64_OpRsh32Ux16(v)
   525  	case OpRsh32Ux32:
   526  		return rewriteValueMIPS64_OpRsh32Ux32(v)
   527  	case OpRsh32Ux64:
   528  		return rewriteValueMIPS64_OpRsh32Ux64(v)
   529  	case OpRsh32Ux8:
   530  		return rewriteValueMIPS64_OpRsh32Ux8(v)
   531  	case OpRsh32x16:
   532  		return rewriteValueMIPS64_OpRsh32x16(v)
   533  	case OpRsh32x32:
   534  		return rewriteValueMIPS64_OpRsh32x32(v)
   535  	case OpRsh32x64:
   536  		return rewriteValueMIPS64_OpRsh32x64(v)
   537  	case OpRsh32x8:
   538  		return rewriteValueMIPS64_OpRsh32x8(v)
   539  	case OpRsh64Ux16:
   540  		return rewriteValueMIPS64_OpRsh64Ux16(v)
   541  	case OpRsh64Ux32:
   542  		return rewriteValueMIPS64_OpRsh64Ux32(v)
   543  	case OpRsh64Ux64:
   544  		return rewriteValueMIPS64_OpRsh64Ux64(v)
   545  	case OpRsh64Ux8:
   546  		return rewriteValueMIPS64_OpRsh64Ux8(v)
   547  	case OpRsh64x16:
   548  		return rewriteValueMIPS64_OpRsh64x16(v)
   549  	case OpRsh64x32:
   550  		return rewriteValueMIPS64_OpRsh64x32(v)
   551  	case OpRsh64x64:
   552  		return rewriteValueMIPS64_OpRsh64x64(v)
   553  	case OpRsh64x8:
   554  		return rewriteValueMIPS64_OpRsh64x8(v)
   555  	case OpRsh8Ux16:
   556  		return rewriteValueMIPS64_OpRsh8Ux16(v)
   557  	case OpRsh8Ux32:
   558  		return rewriteValueMIPS64_OpRsh8Ux32(v)
   559  	case OpRsh8Ux64:
   560  		return rewriteValueMIPS64_OpRsh8Ux64(v)
   561  	case OpRsh8Ux8:
   562  		return rewriteValueMIPS64_OpRsh8Ux8(v)
   563  	case OpRsh8x16:
   564  		return rewriteValueMIPS64_OpRsh8x16(v)
   565  	case OpRsh8x32:
   566  		return rewriteValueMIPS64_OpRsh8x32(v)
   567  	case OpRsh8x64:
   568  		return rewriteValueMIPS64_OpRsh8x64(v)
   569  	case OpRsh8x8:
   570  		return rewriteValueMIPS64_OpRsh8x8(v)
   571  	case OpSelect0:
   572  		return rewriteValueMIPS64_OpSelect0(v)
   573  	case OpSelect1:
   574  		return rewriteValueMIPS64_OpSelect1(v)
   575  	case OpSignExt16to32:
   576  		v.Op = OpMIPS64MOVHreg
   577  		return true
   578  	case OpSignExt16to64:
   579  		v.Op = OpMIPS64MOVHreg
   580  		return true
   581  	case OpSignExt32to64:
   582  		v.Op = OpMIPS64MOVWreg
   583  		return true
   584  	case OpSignExt8to16:
   585  		v.Op = OpMIPS64MOVBreg
   586  		return true
   587  	case OpSignExt8to32:
   588  		v.Op = OpMIPS64MOVBreg
   589  		return true
   590  	case OpSignExt8to64:
   591  		v.Op = OpMIPS64MOVBreg
   592  		return true
   593  	case OpSlicemask:
   594  		return rewriteValueMIPS64_OpSlicemask(v)
   595  	case OpSqrt:
   596  		v.Op = OpMIPS64SQRTD
   597  		return true
   598  	case OpSqrt32:
   599  		v.Op = OpMIPS64SQRTF
   600  		return true
   601  	case OpStaticCall:
   602  		v.Op = OpMIPS64CALLstatic
   603  		return true
   604  	case OpStore:
   605  		return rewriteValueMIPS64_OpStore(v)
   606  	case OpSub16:
   607  		v.Op = OpMIPS64SUBV
   608  		return true
   609  	case OpSub32:
   610  		v.Op = OpMIPS64SUBV
   611  		return true
   612  	case OpSub32F:
   613  		v.Op = OpMIPS64SUBF
   614  		return true
   615  	case OpSub64:
   616  		v.Op = OpMIPS64SUBV
   617  		return true
   618  	case OpSub64F:
   619  		v.Op = OpMIPS64SUBD
   620  		return true
   621  	case OpSub8:
   622  		v.Op = OpMIPS64SUBV
   623  		return true
   624  	case OpSubPtr:
   625  		v.Op = OpMIPS64SUBV
   626  		return true
   627  	case OpTailCall:
   628  		v.Op = OpMIPS64CALLtail
   629  		return true
   630  	case OpTrunc16to8:
   631  		v.Op = OpCopy
   632  		return true
   633  	case OpTrunc32to16:
   634  		v.Op = OpCopy
   635  		return true
   636  	case OpTrunc32to8:
   637  		v.Op = OpCopy
   638  		return true
   639  	case OpTrunc64to16:
   640  		v.Op = OpCopy
   641  		return true
   642  	case OpTrunc64to32:
   643  		v.Op = OpCopy
   644  		return true
   645  	case OpTrunc64to8:
   646  		v.Op = OpCopy
   647  		return true
   648  	case OpWB:
   649  		v.Op = OpMIPS64LoweredWB
   650  		return true
   651  	case OpXor16:
   652  		v.Op = OpMIPS64XOR
   653  		return true
   654  	case OpXor32:
   655  		v.Op = OpMIPS64XOR
   656  		return true
   657  	case OpXor64:
   658  		v.Op = OpMIPS64XOR
   659  		return true
   660  	case OpXor8:
   661  		v.Op = OpMIPS64XOR
   662  		return true
   663  	case OpZero:
   664  		return rewriteValueMIPS64_OpZero(v)
   665  	case OpZeroExt16to32:
   666  		v.Op = OpMIPS64MOVHUreg
   667  		return true
   668  	case OpZeroExt16to64:
   669  		v.Op = OpMIPS64MOVHUreg
   670  		return true
   671  	case OpZeroExt32to64:
   672  		v.Op = OpMIPS64MOVWUreg
   673  		return true
   674  	case OpZeroExt8to16:
   675  		v.Op = OpMIPS64MOVBUreg
   676  		return true
   677  	case OpZeroExt8to32:
   678  		v.Op = OpMIPS64MOVBUreg
   679  		return true
   680  	case OpZeroExt8to64:
   681  		v.Op = OpMIPS64MOVBUreg
   682  		return true
   683  	}
   684  	return false
   685  }
   686  func rewriteValueMIPS64_OpAddr(v *Value) bool {
   687  	v_0 := v.Args[0]
   688  	// match: (Addr {sym} base)
   689  	// result: (MOVVaddr {sym} base)
   690  	for {
   691  		sym := auxToSym(v.Aux)
   692  		base := v_0
   693  		v.reset(OpMIPS64MOVVaddr)
   694  		v.Aux = symToAux(sym)
   695  		v.AddArg(base)
   696  		return true
   697  	}
   698  }
   699  func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
   700  	v_3 := v.Args[3]
   701  	v_2 := v.Args[2]
   702  	v_1 := v.Args[1]
   703  	v_0 := v.Args[0]
   704  	b := v.Block
   705  	typ := &b.Func.Config.Types
   706  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   707  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   708  	for {
   709  		ptr := v_0
   710  		old := v_1
   711  		new := v_2
   712  		mem := v_3
   713  		v.reset(OpMIPS64LoweredAtomicCas32)
   714  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   715  		v0.AddArg(old)
   716  		v.AddArg4(ptr, v0, new, mem)
   717  		return true
   718  	}
   719  }
   720  func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
   721  	v_1 := v.Args[1]
   722  	v_0 := v.Args[0]
   723  	b := v.Block
   724  	// match: (Avg64u <t> x y)
   725  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   726  	for {
   727  		t := v.Type
   728  		x := v_0
   729  		y := v_1
   730  		v.reset(OpMIPS64ADDV)
   731  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   732  		v0.AuxInt = int64ToAuxInt(1)
   733  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   734  		v1.AddArg2(x, y)
   735  		v0.AddArg(v1)
   736  		v.AddArg2(v0, y)
   737  		return true
   738  	}
   739  }
   740  func rewriteValueMIPS64_OpCom16(v *Value) bool {
   741  	v_0 := v.Args[0]
   742  	b := v.Block
   743  	typ := &b.Func.Config.Types
   744  	// match: (Com16 x)
   745  	// result: (NOR (MOVVconst [0]) x)
   746  	for {
   747  		x := v_0
   748  		v.reset(OpMIPS64NOR)
   749  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   750  		v0.AuxInt = int64ToAuxInt(0)
   751  		v.AddArg2(v0, x)
   752  		return true
   753  	}
   754  }
   755  func rewriteValueMIPS64_OpCom32(v *Value) bool {
   756  	v_0 := v.Args[0]
   757  	b := v.Block
   758  	typ := &b.Func.Config.Types
   759  	// match: (Com32 x)
   760  	// result: (NOR (MOVVconst [0]) x)
   761  	for {
   762  		x := v_0
   763  		v.reset(OpMIPS64NOR)
   764  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   765  		v0.AuxInt = int64ToAuxInt(0)
   766  		v.AddArg2(v0, x)
   767  		return true
   768  	}
   769  }
   770  func rewriteValueMIPS64_OpCom64(v *Value) bool {
   771  	v_0 := v.Args[0]
   772  	b := v.Block
   773  	typ := &b.Func.Config.Types
   774  	// match: (Com64 x)
   775  	// result: (NOR (MOVVconst [0]) x)
   776  	for {
   777  		x := v_0
   778  		v.reset(OpMIPS64NOR)
   779  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   780  		v0.AuxInt = int64ToAuxInt(0)
   781  		v.AddArg2(v0, x)
   782  		return true
   783  	}
   784  }
   785  func rewriteValueMIPS64_OpCom8(v *Value) bool {
   786  	v_0 := v.Args[0]
   787  	b := v.Block
   788  	typ := &b.Func.Config.Types
   789  	// match: (Com8 x)
   790  	// result: (NOR (MOVVconst [0]) x)
   791  	for {
   792  		x := v_0
   793  		v.reset(OpMIPS64NOR)
   794  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   795  		v0.AuxInt = int64ToAuxInt(0)
   796  		v.AddArg2(v0, x)
   797  		return true
   798  	}
   799  }
   800  func rewriteValueMIPS64_OpConst16(v *Value) bool {
   801  	// match: (Const16 [val])
   802  	// result: (MOVVconst [int64(val)])
   803  	for {
   804  		val := auxIntToInt16(v.AuxInt)
   805  		v.reset(OpMIPS64MOVVconst)
   806  		v.AuxInt = int64ToAuxInt(int64(val))
   807  		return true
   808  	}
   809  }
   810  func rewriteValueMIPS64_OpConst32(v *Value) bool {
   811  	// match: (Const32 [val])
   812  	// result: (MOVVconst [int64(val)])
   813  	for {
   814  		val := auxIntToInt32(v.AuxInt)
   815  		v.reset(OpMIPS64MOVVconst)
   816  		v.AuxInt = int64ToAuxInt(int64(val))
   817  		return true
   818  	}
   819  }
   820  func rewriteValueMIPS64_OpConst32F(v *Value) bool {
   821  	// match: (Const32F [val])
   822  	// result: (MOVFconst [float64(val)])
   823  	for {
   824  		val := auxIntToFloat32(v.AuxInt)
   825  		v.reset(OpMIPS64MOVFconst)
   826  		v.AuxInt = float64ToAuxInt(float64(val))
   827  		return true
   828  	}
   829  }
   830  func rewriteValueMIPS64_OpConst64(v *Value) bool {
   831  	// match: (Const64 [val])
   832  	// result: (MOVVconst [int64(val)])
   833  	for {
   834  		val := auxIntToInt64(v.AuxInt)
   835  		v.reset(OpMIPS64MOVVconst)
   836  		v.AuxInt = int64ToAuxInt(int64(val))
   837  		return true
   838  	}
   839  }
   840  func rewriteValueMIPS64_OpConst64F(v *Value) bool {
   841  	// match: (Const64F [val])
   842  	// result: (MOVDconst [float64(val)])
   843  	for {
   844  		val := auxIntToFloat64(v.AuxInt)
   845  		v.reset(OpMIPS64MOVDconst)
   846  		v.AuxInt = float64ToAuxInt(float64(val))
   847  		return true
   848  	}
   849  }
   850  func rewriteValueMIPS64_OpConst8(v *Value) bool {
   851  	// match: (Const8 [val])
   852  	// result: (MOVVconst [int64(val)])
   853  	for {
   854  		val := auxIntToInt8(v.AuxInt)
   855  		v.reset(OpMIPS64MOVVconst)
   856  		v.AuxInt = int64ToAuxInt(int64(val))
   857  		return true
   858  	}
   859  }
   860  func rewriteValueMIPS64_OpConstBool(v *Value) bool {
   861  	// match: (ConstBool [t])
   862  	// result: (MOVVconst [int64(b2i(t))])
   863  	for {
   864  		t := auxIntToBool(v.AuxInt)
   865  		v.reset(OpMIPS64MOVVconst)
   866  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
   867  		return true
   868  	}
   869  }
   870  func rewriteValueMIPS64_OpConstNil(v *Value) bool {
   871  	// match: (ConstNil)
   872  	// result: (MOVVconst [0])
   873  	for {
   874  		v.reset(OpMIPS64MOVVconst)
   875  		v.AuxInt = int64ToAuxInt(0)
   876  		return true
   877  	}
   878  }
   879  func rewriteValueMIPS64_OpDiv16(v *Value) bool {
   880  	v_1 := v.Args[1]
   881  	v_0 := v.Args[0]
   882  	b := v.Block
   883  	typ := &b.Func.Config.Types
   884  	// match: (Div16 x y)
   885  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
   886  	for {
   887  		x := v_0
   888  		y := v_1
   889  		v.reset(OpSelect1)
   890  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   891  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   892  		v1.AddArg(x)
   893  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   894  		v2.AddArg(y)
   895  		v0.AddArg2(v1, v2)
   896  		v.AddArg(v0)
   897  		return true
   898  	}
   899  }
   900  func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
   901  	v_1 := v.Args[1]
   902  	v_0 := v.Args[0]
   903  	b := v.Block
   904  	typ := &b.Func.Config.Types
   905  	// match: (Div16u x y)
   906  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
   907  	for {
   908  		x := v_0
   909  		y := v_1
   910  		v.reset(OpSelect1)
   911  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   912  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   913  		v1.AddArg(x)
   914  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   915  		v2.AddArg(y)
   916  		v0.AddArg2(v1, v2)
   917  		v.AddArg(v0)
   918  		return true
   919  	}
   920  }
   921  func rewriteValueMIPS64_OpDiv32(v *Value) bool {
   922  	v_1 := v.Args[1]
   923  	v_0 := v.Args[0]
   924  	b := v.Block
   925  	typ := &b.Func.Config.Types
   926  	// match: (Div32 x y)
   927  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
   928  	for {
   929  		x := v_0
   930  		y := v_1
   931  		v.reset(OpSelect1)
   932  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   933  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   934  		v1.AddArg(x)
   935  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   936  		v2.AddArg(y)
   937  		v0.AddArg2(v1, v2)
   938  		v.AddArg(v0)
   939  		return true
   940  	}
   941  }
   942  func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
   943  	v_1 := v.Args[1]
   944  	v_0 := v.Args[0]
   945  	b := v.Block
   946  	typ := &b.Func.Config.Types
   947  	// match: (Div32u x y)
   948  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
   949  	for {
   950  		x := v_0
   951  		y := v_1
   952  		v.reset(OpSelect1)
   953  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   954  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   955  		v1.AddArg(x)
   956  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   957  		v2.AddArg(y)
   958  		v0.AddArg2(v1, v2)
   959  		v.AddArg(v0)
   960  		return true
   961  	}
   962  }
   963  func rewriteValueMIPS64_OpDiv64(v *Value) bool {
   964  	v_1 := v.Args[1]
   965  	v_0 := v.Args[0]
   966  	b := v.Block
   967  	typ := &b.Func.Config.Types
   968  	// match: (Div64 x y)
   969  	// result: (Select1 (DIVV x y))
   970  	for {
   971  		x := v_0
   972  		y := v_1
   973  		v.reset(OpSelect1)
   974  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   975  		v0.AddArg2(x, y)
   976  		v.AddArg(v0)
   977  		return true
   978  	}
   979  }
   980  func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
   981  	v_1 := v.Args[1]
   982  	v_0 := v.Args[0]
   983  	b := v.Block
   984  	typ := &b.Func.Config.Types
   985  	// match: (Div64u x y)
   986  	// result: (Select1 (DIVVU x y))
   987  	for {
   988  		x := v_0
   989  		y := v_1
   990  		v.reset(OpSelect1)
   991  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   992  		v0.AddArg2(x, y)
   993  		v.AddArg(v0)
   994  		return true
   995  	}
   996  }
   997  func rewriteValueMIPS64_OpDiv8(v *Value) bool {
   998  	v_1 := v.Args[1]
   999  	v_0 := v.Args[0]
  1000  	b := v.Block
  1001  	typ := &b.Func.Config.Types
  1002  	// match: (Div8 x y)
  1003  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1004  	for {
  1005  		x := v_0
  1006  		y := v_1
  1007  		v.reset(OpSelect1)
  1008  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1009  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1010  		v1.AddArg(x)
  1011  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1012  		v2.AddArg(y)
  1013  		v0.AddArg2(v1, v2)
  1014  		v.AddArg(v0)
  1015  		return true
  1016  	}
  1017  }
  1018  func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
  1019  	v_1 := v.Args[1]
  1020  	v_0 := v.Args[0]
  1021  	b := v.Block
  1022  	typ := &b.Func.Config.Types
  1023  	// match: (Div8u x y)
  1024  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1025  	for {
  1026  		x := v_0
  1027  		y := v_1
  1028  		v.reset(OpSelect1)
  1029  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1030  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1031  		v1.AddArg(x)
  1032  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1033  		v2.AddArg(y)
  1034  		v0.AddArg2(v1, v2)
  1035  		v.AddArg(v0)
  1036  		return true
  1037  	}
  1038  }
  1039  func rewriteValueMIPS64_OpEq16(v *Value) bool {
  1040  	v_1 := v.Args[1]
  1041  	v_0 := v.Args[0]
  1042  	b := v.Block
  1043  	typ := &b.Func.Config.Types
  1044  	// match: (Eq16 x y)
  1045  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1046  	for {
  1047  		x := v_0
  1048  		y := v_1
  1049  		v.reset(OpMIPS64SGTU)
  1050  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1051  		v0.AuxInt = int64ToAuxInt(1)
  1052  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1053  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1054  		v2.AddArg(x)
  1055  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1056  		v3.AddArg(y)
  1057  		v1.AddArg2(v2, v3)
  1058  		v.AddArg2(v0, v1)
  1059  		return true
  1060  	}
  1061  }
  1062  func rewriteValueMIPS64_OpEq32(v *Value) bool {
  1063  	v_1 := v.Args[1]
  1064  	v_0 := v.Args[0]
  1065  	b := v.Block
  1066  	typ := &b.Func.Config.Types
  1067  	// match: (Eq32 x y)
  1068  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1069  	for {
  1070  		x := v_0
  1071  		y := v_1
  1072  		v.reset(OpMIPS64SGTU)
  1073  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1074  		v0.AuxInt = int64ToAuxInt(1)
  1075  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1076  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1077  		v2.AddArg(x)
  1078  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1079  		v3.AddArg(y)
  1080  		v1.AddArg2(v2, v3)
  1081  		v.AddArg2(v0, v1)
  1082  		return true
  1083  	}
  1084  }
  1085  func rewriteValueMIPS64_OpEq32F(v *Value) bool {
  1086  	v_1 := v.Args[1]
  1087  	v_0 := v.Args[0]
  1088  	b := v.Block
  1089  	// match: (Eq32F x y)
  1090  	// result: (FPFlagTrue (CMPEQF x y))
  1091  	for {
  1092  		x := v_0
  1093  		y := v_1
  1094  		v.reset(OpMIPS64FPFlagTrue)
  1095  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1096  		v0.AddArg2(x, y)
  1097  		v.AddArg(v0)
  1098  		return true
  1099  	}
  1100  }
  1101  func rewriteValueMIPS64_OpEq64(v *Value) bool {
  1102  	v_1 := v.Args[1]
  1103  	v_0 := v.Args[0]
  1104  	b := v.Block
  1105  	typ := &b.Func.Config.Types
  1106  	// match: (Eq64 x y)
  1107  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1108  	for {
  1109  		x := v_0
  1110  		y := v_1
  1111  		v.reset(OpMIPS64SGTU)
  1112  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1113  		v0.AuxInt = int64ToAuxInt(1)
  1114  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1115  		v1.AddArg2(x, y)
  1116  		v.AddArg2(v0, v1)
  1117  		return true
  1118  	}
  1119  }
  1120  func rewriteValueMIPS64_OpEq64F(v *Value) bool {
  1121  	v_1 := v.Args[1]
  1122  	v_0 := v.Args[0]
  1123  	b := v.Block
  1124  	// match: (Eq64F x y)
  1125  	// result: (FPFlagTrue (CMPEQD x y))
  1126  	for {
  1127  		x := v_0
  1128  		y := v_1
  1129  		v.reset(OpMIPS64FPFlagTrue)
  1130  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1131  		v0.AddArg2(x, y)
  1132  		v.AddArg(v0)
  1133  		return true
  1134  	}
  1135  }
  1136  func rewriteValueMIPS64_OpEq8(v *Value) bool {
  1137  	v_1 := v.Args[1]
  1138  	v_0 := v.Args[0]
  1139  	b := v.Block
  1140  	typ := &b.Func.Config.Types
  1141  	// match: (Eq8 x y)
  1142  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1143  	for {
  1144  		x := v_0
  1145  		y := v_1
  1146  		v.reset(OpMIPS64SGTU)
  1147  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1148  		v0.AuxInt = int64ToAuxInt(1)
  1149  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1150  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1151  		v2.AddArg(x)
  1152  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1153  		v3.AddArg(y)
  1154  		v1.AddArg2(v2, v3)
  1155  		v.AddArg2(v0, v1)
  1156  		return true
  1157  	}
  1158  }
  1159  func rewriteValueMIPS64_OpEqB(v *Value) bool {
  1160  	v_1 := v.Args[1]
  1161  	v_0 := v.Args[0]
  1162  	b := v.Block
  1163  	typ := &b.Func.Config.Types
  1164  	// match: (EqB x y)
  1165  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1166  	for {
  1167  		x := v_0
  1168  		y := v_1
  1169  		v.reset(OpMIPS64XOR)
  1170  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1171  		v0.AuxInt = int64ToAuxInt(1)
  1172  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1173  		v1.AddArg2(x, y)
  1174  		v.AddArg2(v0, v1)
  1175  		return true
  1176  	}
  1177  }
  1178  func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
  1179  	v_1 := v.Args[1]
  1180  	v_0 := v.Args[0]
  1181  	b := v.Block
  1182  	typ := &b.Func.Config.Types
  1183  	// match: (EqPtr x y)
  1184  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1185  	for {
  1186  		x := v_0
  1187  		y := v_1
  1188  		v.reset(OpMIPS64SGTU)
  1189  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1190  		v0.AuxInt = int64ToAuxInt(1)
  1191  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1192  		v1.AddArg2(x, y)
  1193  		v.AddArg2(v0, v1)
  1194  		return true
  1195  	}
  1196  }
  1197  func rewriteValueMIPS64_OpHmul32(v *Value) bool {
  1198  	v_1 := v.Args[1]
  1199  	v_0 := v.Args[0]
  1200  	b := v.Block
  1201  	typ := &b.Func.Config.Types
  1202  	// match: (Hmul32 x y)
  1203  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1204  	for {
  1205  		x := v_0
  1206  		y := v_1
  1207  		v.reset(OpMIPS64SRAVconst)
  1208  		v.AuxInt = int64ToAuxInt(32)
  1209  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  1210  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1211  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1212  		v2.AddArg(x)
  1213  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1214  		v3.AddArg(y)
  1215  		v1.AddArg2(v2, v3)
  1216  		v0.AddArg(v1)
  1217  		v.AddArg(v0)
  1218  		return true
  1219  	}
  1220  }
  1221  func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
  1222  	v_1 := v.Args[1]
  1223  	v_0 := v.Args[0]
  1224  	b := v.Block
  1225  	typ := &b.Func.Config.Types
  1226  	// match: (Hmul32u x y)
  1227  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1228  	for {
  1229  		x := v_0
  1230  		y := v_1
  1231  		v.reset(OpMIPS64SRLVconst)
  1232  		v.AuxInt = int64ToAuxInt(32)
  1233  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  1234  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1235  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1236  		v2.AddArg(x)
  1237  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1238  		v3.AddArg(y)
  1239  		v1.AddArg2(v2, v3)
  1240  		v0.AddArg(v1)
  1241  		v.AddArg(v0)
  1242  		return true
  1243  	}
  1244  }
  1245  func rewriteValueMIPS64_OpHmul64(v *Value) bool {
  1246  	v_1 := v.Args[1]
  1247  	v_0 := v.Args[0]
  1248  	b := v.Block
  1249  	typ := &b.Func.Config.Types
  1250  	// match: (Hmul64 x y)
  1251  	// result: (Select0 (MULV x y))
  1252  	for {
  1253  		x := v_0
  1254  		y := v_1
  1255  		v.reset(OpSelect0)
  1256  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1257  		v0.AddArg2(x, y)
  1258  		v.AddArg(v0)
  1259  		return true
  1260  	}
  1261  }
  1262  func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
  1263  	v_1 := v.Args[1]
  1264  	v_0 := v.Args[0]
  1265  	b := v.Block
  1266  	typ := &b.Func.Config.Types
  1267  	// match: (Hmul64u x y)
  1268  	// result: (Select0 (MULVU x y))
  1269  	for {
  1270  		x := v_0
  1271  		y := v_1
  1272  		v.reset(OpSelect0)
  1273  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1274  		v0.AddArg2(x, y)
  1275  		v.AddArg(v0)
  1276  		return true
  1277  	}
  1278  }
  1279  func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
  1280  	v_1 := v.Args[1]
  1281  	v_0 := v.Args[0]
  1282  	// match: (IsInBounds idx len)
  1283  	// result: (SGTU len idx)
  1284  	for {
  1285  		idx := v_0
  1286  		len := v_1
  1287  		v.reset(OpMIPS64SGTU)
  1288  		v.AddArg2(len, idx)
  1289  		return true
  1290  	}
  1291  }
  1292  func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
  1293  	v_0 := v.Args[0]
  1294  	b := v.Block
  1295  	typ := &b.Func.Config.Types
  1296  	// match: (IsNonNil ptr)
  1297  	// result: (SGTU ptr (MOVVconst [0]))
  1298  	for {
  1299  		ptr := v_0
  1300  		v.reset(OpMIPS64SGTU)
  1301  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1302  		v0.AuxInt = int64ToAuxInt(0)
  1303  		v.AddArg2(ptr, v0)
  1304  		return true
  1305  	}
  1306  }
  1307  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
  1308  	v_1 := v.Args[1]
  1309  	v_0 := v.Args[0]
  1310  	b := v.Block
  1311  	typ := &b.Func.Config.Types
  1312  	// match: (IsSliceInBounds idx len)
  1313  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1314  	for {
  1315  		idx := v_0
  1316  		len := v_1
  1317  		v.reset(OpMIPS64XOR)
  1318  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1319  		v0.AuxInt = int64ToAuxInt(1)
  1320  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1321  		v1.AddArg2(idx, len)
  1322  		v.AddArg2(v0, v1)
  1323  		return true
  1324  	}
  1325  }
  1326  func rewriteValueMIPS64_OpLeq16(v *Value) bool {
  1327  	v_1 := v.Args[1]
  1328  	v_0 := v.Args[0]
  1329  	b := v.Block
  1330  	typ := &b.Func.Config.Types
  1331  	// match: (Leq16 x y)
  1332  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  1333  	for {
  1334  		x := v_0
  1335  		y := v_1
  1336  		v.reset(OpMIPS64XOR)
  1337  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1338  		v0.AuxInt = int64ToAuxInt(1)
  1339  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1340  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1341  		v2.AddArg(x)
  1342  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1343  		v3.AddArg(y)
  1344  		v1.AddArg2(v2, v3)
  1345  		v.AddArg2(v0, v1)
  1346  		return true
  1347  	}
  1348  }
  1349  func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
  1350  	v_1 := v.Args[1]
  1351  	v_0 := v.Args[0]
  1352  	b := v.Block
  1353  	typ := &b.Func.Config.Types
  1354  	// match: (Leq16U x y)
  1355  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1356  	for {
  1357  		x := v_0
  1358  		y := v_1
  1359  		v.reset(OpMIPS64XOR)
  1360  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1361  		v0.AuxInt = int64ToAuxInt(1)
  1362  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1363  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1364  		v2.AddArg(x)
  1365  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1366  		v3.AddArg(y)
  1367  		v1.AddArg2(v2, v3)
  1368  		v.AddArg2(v0, v1)
  1369  		return true
  1370  	}
  1371  }
  1372  func rewriteValueMIPS64_OpLeq32(v *Value) bool {
  1373  	v_1 := v.Args[1]
  1374  	v_0 := v.Args[0]
  1375  	b := v.Block
  1376  	typ := &b.Func.Config.Types
  1377  	// match: (Leq32 x y)
  1378  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  1379  	for {
  1380  		x := v_0
  1381  		y := v_1
  1382  		v.reset(OpMIPS64XOR)
  1383  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1384  		v0.AuxInt = int64ToAuxInt(1)
  1385  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1386  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1387  		v2.AddArg(x)
  1388  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1389  		v3.AddArg(y)
  1390  		v1.AddArg2(v2, v3)
  1391  		v.AddArg2(v0, v1)
  1392  		return true
  1393  	}
  1394  }
  1395  func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
  1396  	v_1 := v.Args[1]
  1397  	v_0 := v.Args[0]
  1398  	b := v.Block
  1399  	// match: (Leq32F x y)
  1400  	// result: (FPFlagTrue (CMPGEF y x))
  1401  	for {
  1402  		x := v_0
  1403  		y := v_1
  1404  		v.reset(OpMIPS64FPFlagTrue)
  1405  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1406  		v0.AddArg2(y, x)
  1407  		v.AddArg(v0)
  1408  		return true
  1409  	}
  1410  }
  1411  func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
  1412  	v_1 := v.Args[1]
  1413  	v_0 := v.Args[0]
  1414  	b := v.Block
  1415  	typ := &b.Func.Config.Types
  1416  	// match: (Leq32U x y)
  1417  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1418  	for {
  1419  		x := v_0
  1420  		y := v_1
  1421  		v.reset(OpMIPS64XOR)
  1422  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1423  		v0.AuxInt = int64ToAuxInt(1)
  1424  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1425  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1426  		v2.AddArg(x)
  1427  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1428  		v3.AddArg(y)
  1429  		v1.AddArg2(v2, v3)
  1430  		v.AddArg2(v0, v1)
  1431  		return true
  1432  	}
  1433  }
  1434  func rewriteValueMIPS64_OpLeq64(v *Value) bool {
  1435  	v_1 := v.Args[1]
  1436  	v_0 := v.Args[0]
  1437  	b := v.Block
  1438  	typ := &b.Func.Config.Types
  1439  	// match: (Leq64 x y)
  1440  	// result: (XOR (MOVVconst [1]) (SGT x y))
  1441  	for {
  1442  		x := v_0
  1443  		y := v_1
  1444  		v.reset(OpMIPS64XOR)
  1445  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1446  		v0.AuxInt = int64ToAuxInt(1)
  1447  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1448  		v1.AddArg2(x, y)
  1449  		v.AddArg2(v0, v1)
  1450  		return true
  1451  	}
  1452  }
  1453  func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
  1454  	v_1 := v.Args[1]
  1455  	v_0 := v.Args[0]
  1456  	b := v.Block
  1457  	// match: (Leq64F x y)
  1458  	// result: (FPFlagTrue (CMPGED y x))
  1459  	for {
  1460  		x := v_0
  1461  		y := v_1
  1462  		v.reset(OpMIPS64FPFlagTrue)
  1463  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1464  		v0.AddArg2(y, x)
  1465  		v.AddArg(v0)
  1466  		return true
  1467  	}
  1468  }
  1469  func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
  1470  	v_1 := v.Args[1]
  1471  	v_0 := v.Args[0]
  1472  	b := v.Block
  1473  	typ := &b.Func.Config.Types
  1474  	// match: (Leq64U x y)
  1475  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  1476  	for {
  1477  		x := v_0
  1478  		y := v_1
  1479  		v.reset(OpMIPS64XOR)
  1480  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1481  		v0.AuxInt = int64ToAuxInt(1)
  1482  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1483  		v1.AddArg2(x, y)
  1484  		v.AddArg2(v0, v1)
  1485  		return true
  1486  	}
  1487  }
  1488  func rewriteValueMIPS64_OpLeq8(v *Value) bool {
  1489  	v_1 := v.Args[1]
  1490  	v_0 := v.Args[0]
  1491  	b := v.Block
  1492  	typ := &b.Func.Config.Types
  1493  	// match: (Leq8 x y)
  1494  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  1495  	for {
  1496  		x := v_0
  1497  		y := v_1
  1498  		v.reset(OpMIPS64XOR)
  1499  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1500  		v0.AuxInt = int64ToAuxInt(1)
  1501  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1502  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1503  		v2.AddArg(x)
  1504  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1505  		v3.AddArg(y)
  1506  		v1.AddArg2(v2, v3)
  1507  		v.AddArg2(v0, v1)
  1508  		return true
  1509  	}
  1510  }
  1511  func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
  1512  	v_1 := v.Args[1]
  1513  	v_0 := v.Args[0]
  1514  	b := v.Block
  1515  	typ := &b.Func.Config.Types
  1516  	// match: (Leq8U x y)
  1517  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1518  	for {
  1519  		x := v_0
  1520  		y := v_1
  1521  		v.reset(OpMIPS64XOR)
  1522  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1523  		v0.AuxInt = int64ToAuxInt(1)
  1524  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1525  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1526  		v2.AddArg(x)
  1527  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1528  		v3.AddArg(y)
  1529  		v1.AddArg2(v2, v3)
  1530  		v.AddArg2(v0, v1)
  1531  		return true
  1532  	}
  1533  }
  1534  func rewriteValueMIPS64_OpLess16(v *Value) bool {
  1535  	v_1 := v.Args[1]
  1536  	v_0 := v.Args[0]
  1537  	b := v.Block
  1538  	typ := &b.Func.Config.Types
  1539  	// match: (Less16 x y)
  1540  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  1541  	for {
  1542  		x := v_0
  1543  		y := v_1
  1544  		v.reset(OpMIPS64SGT)
  1545  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1546  		v0.AddArg(y)
  1547  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1548  		v1.AddArg(x)
  1549  		v.AddArg2(v0, v1)
  1550  		return true
  1551  	}
  1552  }
  1553  func rewriteValueMIPS64_OpLess16U(v *Value) bool {
  1554  	v_1 := v.Args[1]
  1555  	v_0 := v.Args[0]
  1556  	b := v.Block
  1557  	typ := &b.Func.Config.Types
  1558  	// match: (Less16U x y)
  1559  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  1560  	for {
  1561  		x := v_0
  1562  		y := v_1
  1563  		v.reset(OpMIPS64SGTU)
  1564  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1565  		v0.AddArg(y)
  1566  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1567  		v1.AddArg(x)
  1568  		v.AddArg2(v0, v1)
  1569  		return true
  1570  	}
  1571  }
  1572  func rewriteValueMIPS64_OpLess32(v *Value) bool {
  1573  	v_1 := v.Args[1]
  1574  	v_0 := v.Args[0]
  1575  	b := v.Block
  1576  	typ := &b.Func.Config.Types
  1577  	// match: (Less32 x y)
  1578  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  1579  	for {
  1580  		x := v_0
  1581  		y := v_1
  1582  		v.reset(OpMIPS64SGT)
  1583  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1584  		v0.AddArg(y)
  1585  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1586  		v1.AddArg(x)
  1587  		v.AddArg2(v0, v1)
  1588  		return true
  1589  	}
  1590  }
  1591  func rewriteValueMIPS64_OpLess32F(v *Value) bool {
  1592  	v_1 := v.Args[1]
  1593  	v_0 := v.Args[0]
  1594  	b := v.Block
  1595  	// match: (Less32F x y)
  1596  	// result: (FPFlagTrue (CMPGTF y x))
  1597  	for {
  1598  		x := v_0
  1599  		y := v_1
  1600  		v.reset(OpMIPS64FPFlagTrue)
  1601  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  1602  		v0.AddArg2(y, x)
  1603  		v.AddArg(v0)
  1604  		return true
  1605  	}
  1606  }
  1607  func rewriteValueMIPS64_OpLess32U(v *Value) bool {
  1608  	v_1 := v.Args[1]
  1609  	v_0 := v.Args[0]
  1610  	b := v.Block
  1611  	typ := &b.Func.Config.Types
  1612  	// match: (Less32U x y)
  1613  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  1614  	for {
  1615  		x := v_0
  1616  		y := v_1
  1617  		v.reset(OpMIPS64SGTU)
  1618  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1619  		v0.AddArg(y)
  1620  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1621  		v1.AddArg(x)
  1622  		v.AddArg2(v0, v1)
  1623  		return true
  1624  	}
  1625  }
  1626  func rewriteValueMIPS64_OpLess64(v *Value) bool {
  1627  	v_1 := v.Args[1]
  1628  	v_0 := v.Args[0]
  1629  	// match: (Less64 x y)
  1630  	// result: (SGT y x)
  1631  	for {
  1632  		x := v_0
  1633  		y := v_1
  1634  		v.reset(OpMIPS64SGT)
  1635  		v.AddArg2(y, x)
  1636  		return true
  1637  	}
  1638  }
  1639  func rewriteValueMIPS64_OpLess64F(v *Value) bool {
  1640  	v_1 := v.Args[1]
  1641  	v_0 := v.Args[0]
  1642  	b := v.Block
  1643  	// match: (Less64F x y)
  1644  	// result: (FPFlagTrue (CMPGTD y x))
  1645  	for {
  1646  		x := v_0
  1647  		y := v_1
  1648  		v.reset(OpMIPS64FPFlagTrue)
  1649  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  1650  		v0.AddArg2(y, x)
  1651  		v.AddArg(v0)
  1652  		return true
  1653  	}
  1654  }
  1655  func rewriteValueMIPS64_OpLess64U(v *Value) bool {
  1656  	v_1 := v.Args[1]
  1657  	v_0 := v.Args[0]
  1658  	// match: (Less64U x y)
  1659  	// result: (SGTU y x)
  1660  	for {
  1661  		x := v_0
  1662  		y := v_1
  1663  		v.reset(OpMIPS64SGTU)
  1664  		v.AddArg2(y, x)
  1665  		return true
  1666  	}
  1667  }
  1668  func rewriteValueMIPS64_OpLess8(v *Value) bool {
  1669  	v_1 := v.Args[1]
  1670  	v_0 := v.Args[0]
  1671  	b := v.Block
  1672  	typ := &b.Func.Config.Types
  1673  	// match: (Less8 x y)
  1674  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  1675  	for {
  1676  		x := v_0
  1677  		y := v_1
  1678  		v.reset(OpMIPS64SGT)
  1679  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1680  		v0.AddArg(y)
  1681  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1682  		v1.AddArg(x)
  1683  		v.AddArg2(v0, v1)
  1684  		return true
  1685  	}
  1686  }
  1687  func rewriteValueMIPS64_OpLess8U(v *Value) bool {
  1688  	v_1 := v.Args[1]
  1689  	v_0 := v.Args[0]
  1690  	b := v.Block
  1691  	typ := &b.Func.Config.Types
  1692  	// match: (Less8U x y)
  1693  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  1694  	for {
  1695  		x := v_0
  1696  		y := v_1
  1697  		v.reset(OpMIPS64SGTU)
  1698  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1699  		v0.AddArg(y)
  1700  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1701  		v1.AddArg(x)
  1702  		v.AddArg2(v0, v1)
  1703  		return true
  1704  	}
  1705  }
  1706  func rewriteValueMIPS64_OpLoad(v *Value) bool {
  1707  	v_1 := v.Args[1]
  1708  	v_0 := v.Args[0]
  1709  	// match: (Load <t> ptr mem)
  1710  	// cond: t.IsBoolean()
  1711  	// result: (MOVBUload ptr mem)
  1712  	for {
  1713  		t := v.Type
  1714  		ptr := v_0
  1715  		mem := v_1
  1716  		if !(t.IsBoolean()) {
  1717  			break
  1718  		}
  1719  		v.reset(OpMIPS64MOVBUload)
  1720  		v.AddArg2(ptr, mem)
  1721  		return true
  1722  	}
  1723  	// match: (Load <t> ptr mem)
  1724  	// cond: (is8BitInt(t) && isSigned(t))
  1725  	// result: (MOVBload ptr mem)
  1726  	for {
  1727  		t := v.Type
  1728  		ptr := v_0
  1729  		mem := v_1
  1730  		if !(is8BitInt(t) && isSigned(t)) {
  1731  			break
  1732  		}
  1733  		v.reset(OpMIPS64MOVBload)
  1734  		v.AddArg2(ptr, mem)
  1735  		return true
  1736  	}
  1737  	// match: (Load <t> ptr mem)
  1738  	// cond: (is8BitInt(t) && !isSigned(t))
  1739  	// result: (MOVBUload ptr mem)
  1740  	for {
  1741  		t := v.Type
  1742  		ptr := v_0
  1743  		mem := v_1
  1744  		if !(is8BitInt(t) && !isSigned(t)) {
  1745  			break
  1746  		}
  1747  		v.reset(OpMIPS64MOVBUload)
  1748  		v.AddArg2(ptr, mem)
  1749  		return true
  1750  	}
  1751  	// match: (Load <t> ptr mem)
  1752  	// cond: (is16BitInt(t) && isSigned(t))
  1753  	// result: (MOVHload ptr mem)
  1754  	for {
  1755  		t := v.Type
  1756  		ptr := v_0
  1757  		mem := v_1
  1758  		if !(is16BitInt(t) && isSigned(t)) {
  1759  			break
  1760  		}
  1761  		v.reset(OpMIPS64MOVHload)
  1762  		v.AddArg2(ptr, mem)
  1763  		return true
  1764  	}
  1765  	// match: (Load <t> ptr mem)
  1766  	// cond: (is16BitInt(t) && !isSigned(t))
  1767  	// result: (MOVHUload ptr mem)
  1768  	for {
  1769  		t := v.Type
  1770  		ptr := v_0
  1771  		mem := v_1
  1772  		if !(is16BitInt(t) && !isSigned(t)) {
  1773  			break
  1774  		}
  1775  		v.reset(OpMIPS64MOVHUload)
  1776  		v.AddArg2(ptr, mem)
  1777  		return true
  1778  	}
  1779  	// match: (Load <t> ptr mem)
  1780  	// cond: (is32BitInt(t) && isSigned(t))
  1781  	// result: (MOVWload ptr mem)
  1782  	for {
  1783  		t := v.Type
  1784  		ptr := v_0
  1785  		mem := v_1
  1786  		if !(is32BitInt(t) && isSigned(t)) {
  1787  			break
  1788  		}
  1789  		v.reset(OpMIPS64MOVWload)
  1790  		v.AddArg2(ptr, mem)
  1791  		return true
  1792  	}
  1793  	// match: (Load <t> ptr mem)
  1794  	// cond: (is32BitInt(t) && !isSigned(t))
  1795  	// result: (MOVWUload ptr mem)
  1796  	for {
  1797  		t := v.Type
  1798  		ptr := v_0
  1799  		mem := v_1
  1800  		if !(is32BitInt(t) && !isSigned(t)) {
  1801  			break
  1802  		}
  1803  		v.reset(OpMIPS64MOVWUload)
  1804  		v.AddArg2(ptr, mem)
  1805  		return true
  1806  	}
  1807  	// match: (Load <t> ptr mem)
  1808  	// cond: (is64BitInt(t) || isPtr(t))
  1809  	// result: (MOVVload ptr mem)
  1810  	for {
  1811  		t := v.Type
  1812  		ptr := v_0
  1813  		mem := v_1
  1814  		if !(is64BitInt(t) || isPtr(t)) {
  1815  			break
  1816  		}
  1817  		v.reset(OpMIPS64MOVVload)
  1818  		v.AddArg2(ptr, mem)
  1819  		return true
  1820  	}
  1821  	// match: (Load <t> ptr mem)
  1822  	// cond: is32BitFloat(t)
  1823  	// result: (MOVFload ptr mem)
  1824  	for {
  1825  		t := v.Type
  1826  		ptr := v_0
  1827  		mem := v_1
  1828  		if !(is32BitFloat(t)) {
  1829  			break
  1830  		}
  1831  		v.reset(OpMIPS64MOVFload)
  1832  		v.AddArg2(ptr, mem)
  1833  		return true
  1834  	}
  1835  	// match: (Load <t> ptr mem)
  1836  	// cond: is64BitFloat(t)
  1837  	// result: (MOVDload ptr mem)
  1838  	for {
  1839  		t := v.Type
  1840  		ptr := v_0
  1841  		mem := v_1
  1842  		if !(is64BitFloat(t)) {
  1843  			break
  1844  		}
  1845  		v.reset(OpMIPS64MOVDload)
  1846  		v.AddArg2(ptr, mem)
  1847  		return true
  1848  	}
  1849  	return false
  1850  }
  1851  func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
  1852  	v_0 := v.Args[0]
  1853  	// match: (LocalAddr {sym} base _)
  1854  	// result: (MOVVaddr {sym} base)
  1855  	for {
  1856  		sym := auxToSym(v.Aux)
  1857  		base := v_0
  1858  		v.reset(OpMIPS64MOVVaddr)
  1859  		v.Aux = symToAux(sym)
  1860  		v.AddArg(base)
  1861  		return true
  1862  	}
  1863  }
  1864  func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
  1865  	v_1 := v.Args[1]
  1866  	v_0 := v.Args[0]
  1867  	b := v.Block
  1868  	typ := &b.Func.Config.Types
  1869  	// match: (Lsh16x16 <t> x y)
  1870  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  1871  	for {
  1872  		t := v.Type
  1873  		x := v_0
  1874  		y := v_1
  1875  		v.reset(OpMIPS64AND)
  1876  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1877  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1878  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1879  		v2.AuxInt = int64ToAuxInt(64)
  1880  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1881  		v3.AddArg(y)
  1882  		v1.AddArg2(v2, v3)
  1883  		v0.AddArg(v1)
  1884  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1885  		v4.AddArg2(x, v3)
  1886  		v.AddArg2(v0, v4)
  1887  		return true
  1888  	}
  1889  }
  1890  func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
  1891  	v_1 := v.Args[1]
  1892  	v_0 := v.Args[0]
  1893  	b := v.Block
  1894  	typ := &b.Func.Config.Types
  1895  	// match: (Lsh16x32 <t> x y)
  1896  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  1897  	for {
  1898  		t := v.Type
  1899  		x := v_0
  1900  		y := v_1
  1901  		v.reset(OpMIPS64AND)
  1902  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1903  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1904  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1905  		v2.AuxInt = int64ToAuxInt(64)
  1906  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1907  		v3.AddArg(y)
  1908  		v1.AddArg2(v2, v3)
  1909  		v0.AddArg(v1)
  1910  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1911  		v4.AddArg2(x, v3)
  1912  		v.AddArg2(v0, v4)
  1913  		return true
  1914  	}
  1915  }
  1916  func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
  1917  	v_1 := v.Args[1]
  1918  	v_0 := v.Args[0]
  1919  	b := v.Block
  1920  	typ := &b.Func.Config.Types
  1921  	// match: (Lsh16x64 <t> x y)
  1922  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  1923  	for {
  1924  		t := v.Type
  1925  		x := v_0
  1926  		y := v_1
  1927  		v.reset(OpMIPS64AND)
  1928  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1929  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1930  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1931  		v2.AuxInt = int64ToAuxInt(64)
  1932  		v1.AddArg2(v2, y)
  1933  		v0.AddArg(v1)
  1934  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1935  		v3.AddArg2(x, y)
  1936  		v.AddArg2(v0, v3)
  1937  		return true
  1938  	}
  1939  }
  1940  func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
  1941  	v_1 := v.Args[1]
  1942  	v_0 := v.Args[0]
  1943  	b := v.Block
  1944  	typ := &b.Func.Config.Types
  1945  	// match: (Lsh16x8 <t> x y)
  1946  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  1947  	for {
  1948  		t := v.Type
  1949  		x := v_0
  1950  		y := v_1
  1951  		v.reset(OpMIPS64AND)
  1952  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1953  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1954  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1955  		v2.AuxInt = int64ToAuxInt(64)
  1956  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1957  		v3.AddArg(y)
  1958  		v1.AddArg2(v2, v3)
  1959  		v0.AddArg(v1)
  1960  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1961  		v4.AddArg2(x, v3)
  1962  		v.AddArg2(v0, v4)
  1963  		return true
  1964  	}
  1965  }
  1966  func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
  1967  	v_1 := v.Args[1]
  1968  	v_0 := v.Args[0]
  1969  	b := v.Block
  1970  	typ := &b.Func.Config.Types
  1971  	// match: (Lsh32x16 <t> x y)
  1972  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  1973  	for {
  1974  		t := v.Type
  1975  		x := v_0
  1976  		y := v_1
  1977  		v.reset(OpMIPS64AND)
  1978  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1979  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1980  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1981  		v2.AuxInt = int64ToAuxInt(64)
  1982  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1983  		v3.AddArg(y)
  1984  		v1.AddArg2(v2, v3)
  1985  		v0.AddArg(v1)
  1986  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1987  		v4.AddArg2(x, v3)
  1988  		v.AddArg2(v0, v4)
  1989  		return true
  1990  	}
  1991  }
  1992  func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
  1993  	v_1 := v.Args[1]
  1994  	v_0 := v.Args[0]
  1995  	b := v.Block
  1996  	typ := &b.Func.Config.Types
  1997  	// match: (Lsh32x32 <t> x y)
  1998  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  1999  	for {
  2000  		t := v.Type
  2001  		x := v_0
  2002  		y := v_1
  2003  		v.reset(OpMIPS64AND)
  2004  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2005  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2006  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2007  		v2.AuxInt = int64ToAuxInt(64)
  2008  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2009  		v3.AddArg(y)
  2010  		v1.AddArg2(v2, v3)
  2011  		v0.AddArg(v1)
  2012  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2013  		v4.AddArg2(x, v3)
  2014  		v.AddArg2(v0, v4)
  2015  		return true
  2016  	}
  2017  }
  2018  func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
  2019  	v_1 := v.Args[1]
  2020  	v_0 := v.Args[0]
  2021  	b := v.Block
  2022  	typ := &b.Func.Config.Types
  2023  	// match: (Lsh32x64 <t> x y)
  2024  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2025  	for {
  2026  		t := v.Type
  2027  		x := v_0
  2028  		y := v_1
  2029  		v.reset(OpMIPS64AND)
  2030  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2031  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2032  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2033  		v2.AuxInt = int64ToAuxInt(64)
  2034  		v1.AddArg2(v2, y)
  2035  		v0.AddArg(v1)
  2036  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2037  		v3.AddArg2(x, y)
  2038  		v.AddArg2(v0, v3)
  2039  		return true
  2040  	}
  2041  }
  2042  func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
  2043  	v_1 := v.Args[1]
  2044  	v_0 := v.Args[0]
  2045  	b := v.Block
  2046  	typ := &b.Func.Config.Types
  2047  	// match: (Lsh32x8 <t> x y)
  2048  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2049  	for {
  2050  		t := v.Type
  2051  		x := v_0
  2052  		y := v_1
  2053  		v.reset(OpMIPS64AND)
  2054  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2055  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2056  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2057  		v2.AuxInt = int64ToAuxInt(64)
  2058  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2059  		v3.AddArg(y)
  2060  		v1.AddArg2(v2, v3)
  2061  		v0.AddArg(v1)
  2062  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2063  		v4.AddArg2(x, v3)
  2064  		v.AddArg2(v0, v4)
  2065  		return true
  2066  	}
  2067  }
  2068  func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
  2069  	v_1 := v.Args[1]
  2070  	v_0 := v.Args[0]
  2071  	b := v.Block
  2072  	typ := &b.Func.Config.Types
  2073  	// match: (Lsh64x16 <t> x y)
  2074  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2075  	for {
  2076  		t := v.Type
  2077  		x := v_0
  2078  		y := v_1
  2079  		v.reset(OpMIPS64AND)
  2080  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2081  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2082  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2083  		v2.AuxInt = int64ToAuxInt(64)
  2084  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2085  		v3.AddArg(y)
  2086  		v1.AddArg2(v2, v3)
  2087  		v0.AddArg(v1)
  2088  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2089  		v4.AddArg2(x, v3)
  2090  		v.AddArg2(v0, v4)
  2091  		return true
  2092  	}
  2093  }
  2094  func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
  2095  	v_1 := v.Args[1]
  2096  	v_0 := v.Args[0]
  2097  	b := v.Block
  2098  	typ := &b.Func.Config.Types
  2099  	// match: (Lsh64x32 <t> x y)
  2100  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2101  	for {
  2102  		t := v.Type
  2103  		x := v_0
  2104  		y := v_1
  2105  		v.reset(OpMIPS64AND)
  2106  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2107  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2108  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2109  		v2.AuxInt = int64ToAuxInt(64)
  2110  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2111  		v3.AddArg(y)
  2112  		v1.AddArg2(v2, v3)
  2113  		v0.AddArg(v1)
  2114  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2115  		v4.AddArg2(x, v3)
  2116  		v.AddArg2(v0, v4)
  2117  		return true
  2118  	}
  2119  }
  2120  func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
  2121  	v_1 := v.Args[1]
  2122  	v_0 := v.Args[0]
  2123  	b := v.Block
  2124  	typ := &b.Func.Config.Types
  2125  	// match: (Lsh64x64 <t> x y)
  2126  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2127  	for {
  2128  		t := v.Type
  2129  		x := v_0
  2130  		y := v_1
  2131  		v.reset(OpMIPS64AND)
  2132  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2133  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2134  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2135  		v2.AuxInt = int64ToAuxInt(64)
  2136  		v1.AddArg2(v2, y)
  2137  		v0.AddArg(v1)
  2138  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2139  		v3.AddArg2(x, y)
  2140  		v.AddArg2(v0, v3)
  2141  		return true
  2142  	}
  2143  }
  2144  func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
  2145  	v_1 := v.Args[1]
  2146  	v_0 := v.Args[0]
  2147  	b := v.Block
  2148  	typ := &b.Func.Config.Types
  2149  	// match: (Lsh64x8 <t> x y)
  2150  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2151  	for {
  2152  		t := v.Type
  2153  		x := v_0
  2154  		y := v_1
  2155  		v.reset(OpMIPS64AND)
  2156  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2157  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2158  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2159  		v2.AuxInt = int64ToAuxInt(64)
  2160  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2161  		v3.AddArg(y)
  2162  		v1.AddArg2(v2, v3)
  2163  		v0.AddArg(v1)
  2164  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2165  		v4.AddArg2(x, v3)
  2166  		v.AddArg2(v0, v4)
  2167  		return true
  2168  	}
  2169  }
  2170  func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
  2171  	v_1 := v.Args[1]
  2172  	v_0 := v.Args[0]
  2173  	b := v.Block
  2174  	typ := &b.Func.Config.Types
  2175  	// match: (Lsh8x16 <t> x y)
  2176  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2177  	for {
  2178  		t := v.Type
  2179  		x := v_0
  2180  		y := v_1
  2181  		v.reset(OpMIPS64AND)
  2182  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2183  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2184  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2185  		v2.AuxInt = int64ToAuxInt(64)
  2186  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2187  		v3.AddArg(y)
  2188  		v1.AddArg2(v2, v3)
  2189  		v0.AddArg(v1)
  2190  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2191  		v4.AddArg2(x, v3)
  2192  		v.AddArg2(v0, v4)
  2193  		return true
  2194  	}
  2195  }
  2196  func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
  2197  	v_1 := v.Args[1]
  2198  	v_0 := v.Args[0]
  2199  	b := v.Block
  2200  	typ := &b.Func.Config.Types
  2201  	// match: (Lsh8x32 <t> x y)
  2202  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2203  	for {
  2204  		t := v.Type
  2205  		x := v_0
  2206  		y := v_1
  2207  		v.reset(OpMIPS64AND)
  2208  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2209  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2210  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2211  		v2.AuxInt = int64ToAuxInt(64)
  2212  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2213  		v3.AddArg(y)
  2214  		v1.AddArg2(v2, v3)
  2215  		v0.AddArg(v1)
  2216  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2217  		v4.AddArg2(x, v3)
  2218  		v.AddArg2(v0, v4)
  2219  		return true
  2220  	}
  2221  }
  2222  func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
  2223  	v_1 := v.Args[1]
  2224  	v_0 := v.Args[0]
  2225  	b := v.Block
  2226  	typ := &b.Func.Config.Types
  2227  	// match: (Lsh8x64 <t> x y)
  2228  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2229  	for {
  2230  		t := v.Type
  2231  		x := v_0
  2232  		y := v_1
  2233  		v.reset(OpMIPS64AND)
  2234  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2235  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2236  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2237  		v2.AuxInt = int64ToAuxInt(64)
  2238  		v1.AddArg2(v2, y)
  2239  		v0.AddArg(v1)
  2240  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2241  		v3.AddArg2(x, y)
  2242  		v.AddArg2(v0, v3)
  2243  		return true
  2244  	}
  2245  }
  2246  func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
  2247  	v_1 := v.Args[1]
  2248  	v_0 := v.Args[0]
  2249  	b := v.Block
  2250  	typ := &b.Func.Config.Types
  2251  	// match: (Lsh8x8 <t> x y)
  2252  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2253  	for {
  2254  		t := v.Type
  2255  		x := v_0
  2256  		y := v_1
  2257  		v.reset(OpMIPS64AND)
  2258  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2259  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2260  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2261  		v2.AuxInt = int64ToAuxInt(64)
  2262  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2263  		v3.AddArg(y)
  2264  		v1.AddArg2(v2, v3)
  2265  		v0.AddArg(v1)
  2266  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2267  		v4.AddArg2(x, v3)
  2268  		v.AddArg2(v0, v4)
  2269  		return true
  2270  	}
  2271  }
  2272  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
  2273  	v_1 := v.Args[1]
  2274  	v_0 := v.Args[0]
  2275  	// match: (ADDV x (MOVVconst [c]))
  2276  	// cond: is32Bit(c)
  2277  	// result: (ADDVconst [c] x)
  2278  	for {
  2279  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2280  			x := v_0
  2281  			if v_1.Op != OpMIPS64MOVVconst {
  2282  				continue
  2283  			}
  2284  			c := auxIntToInt64(v_1.AuxInt)
  2285  			if !(is32Bit(c)) {
  2286  				continue
  2287  			}
  2288  			v.reset(OpMIPS64ADDVconst)
  2289  			v.AuxInt = int64ToAuxInt(c)
  2290  			v.AddArg(x)
  2291  			return true
  2292  		}
  2293  		break
  2294  	}
  2295  	// match: (ADDV x (NEGV y))
  2296  	// result: (SUBV x y)
  2297  	for {
  2298  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2299  			x := v_0
  2300  			if v_1.Op != OpMIPS64NEGV {
  2301  				continue
  2302  			}
  2303  			y := v_1.Args[0]
  2304  			v.reset(OpMIPS64SUBV)
  2305  			v.AddArg2(x, y)
  2306  			return true
  2307  		}
  2308  		break
  2309  	}
  2310  	return false
  2311  }
  2312  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
  2313  	v_0 := v.Args[0]
  2314  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  2315  	// cond: is32Bit(off1+int64(off2))
  2316  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  2317  	for {
  2318  		off1 := auxIntToInt64(v.AuxInt)
  2319  		if v_0.Op != OpMIPS64MOVVaddr {
  2320  			break
  2321  		}
  2322  		off2 := auxIntToInt32(v_0.AuxInt)
  2323  		sym := auxToSym(v_0.Aux)
  2324  		ptr := v_0.Args[0]
  2325  		if !(is32Bit(off1 + int64(off2))) {
  2326  			break
  2327  		}
  2328  		v.reset(OpMIPS64MOVVaddr)
  2329  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  2330  		v.Aux = symToAux(sym)
  2331  		v.AddArg(ptr)
  2332  		return true
  2333  	}
  2334  	// match: (ADDVconst [0] x)
  2335  	// result: x
  2336  	for {
  2337  		if auxIntToInt64(v.AuxInt) != 0 {
  2338  			break
  2339  		}
  2340  		x := v_0
  2341  		v.copyOf(x)
  2342  		return true
  2343  	}
  2344  	// match: (ADDVconst [c] (MOVVconst [d]))
  2345  	// result: (MOVVconst [c+d])
  2346  	for {
  2347  		c := auxIntToInt64(v.AuxInt)
  2348  		if v_0.Op != OpMIPS64MOVVconst {
  2349  			break
  2350  		}
  2351  		d := auxIntToInt64(v_0.AuxInt)
  2352  		v.reset(OpMIPS64MOVVconst)
  2353  		v.AuxInt = int64ToAuxInt(c + d)
  2354  		return true
  2355  	}
  2356  	// match: (ADDVconst [c] (ADDVconst [d] x))
  2357  	// cond: is32Bit(c+d)
  2358  	// result: (ADDVconst [c+d] x)
  2359  	for {
  2360  		c := auxIntToInt64(v.AuxInt)
  2361  		if v_0.Op != OpMIPS64ADDVconst {
  2362  			break
  2363  		}
  2364  		d := auxIntToInt64(v_0.AuxInt)
  2365  		x := v_0.Args[0]
  2366  		if !(is32Bit(c + d)) {
  2367  			break
  2368  		}
  2369  		v.reset(OpMIPS64ADDVconst)
  2370  		v.AuxInt = int64ToAuxInt(c + d)
  2371  		v.AddArg(x)
  2372  		return true
  2373  	}
  2374  	// match: (ADDVconst [c] (SUBVconst [d] x))
  2375  	// cond: is32Bit(c-d)
  2376  	// result: (ADDVconst [c-d] x)
  2377  	for {
  2378  		c := auxIntToInt64(v.AuxInt)
  2379  		if v_0.Op != OpMIPS64SUBVconst {
  2380  			break
  2381  		}
  2382  		d := auxIntToInt64(v_0.AuxInt)
  2383  		x := v_0.Args[0]
  2384  		if !(is32Bit(c - d)) {
  2385  			break
  2386  		}
  2387  		v.reset(OpMIPS64ADDVconst)
  2388  		v.AuxInt = int64ToAuxInt(c - d)
  2389  		v.AddArg(x)
  2390  		return true
  2391  	}
  2392  	return false
  2393  }
  2394  func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
  2395  	v_1 := v.Args[1]
  2396  	v_0 := v.Args[0]
  2397  	// match: (AND x (MOVVconst [c]))
  2398  	// cond: is32Bit(c)
  2399  	// result: (ANDconst [c] x)
  2400  	for {
  2401  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2402  			x := v_0
  2403  			if v_1.Op != OpMIPS64MOVVconst {
  2404  				continue
  2405  			}
  2406  			c := auxIntToInt64(v_1.AuxInt)
  2407  			if !(is32Bit(c)) {
  2408  				continue
  2409  			}
  2410  			v.reset(OpMIPS64ANDconst)
  2411  			v.AuxInt = int64ToAuxInt(c)
  2412  			v.AddArg(x)
  2413  			return true
  2414  		}
  2415  		break
  2416  	}
  2417  	// match: (AND x x)
  2418  	// result: x
  2419  	for {
  2420  		x := v_0
  2421  		if x != v_1 {
  2422  			break
  2423  		}
  2424  		v.copyOf(x)
  2425  		return true
  2426  	}
  2427  	return false
  2428  }
  2429  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
  2430  	v_0 := v.Args[0]
  2431  	// match: (ANDconst [0] _)
  2432  	// result: (MOVVconst [0])
  2433  	for {
  2434  		if auxIntToInt64(v.AuxInt) != 0 {
  2435  			break
  2436  		}
  2437  		v.reset(OpMIPS64MOVVconst)
  2438  		v.AuxInt = int64ToAuxInt(0)
  2439  		return true
  2440  	}
  2441  	// match: (ANDconst [-1] x)
  2442  	// result: x
  2443  	for {
  2444  		if auxIntToInt64(v.AuxInt) != -1 {
  2445  			break
  2446  		}
  2447  		x := v_0
  2448  		v.copyOf(x)
  2449  		return true
  2450  	}
  2451  	// match: (ANDconst [c] (MOVVconst [d]))
  2452  	// result: (MOVVconst [c&d])
  2453  	for {
  2454  		c := auxIntToInt64(v.AuxInt)
  2455  		if v_0.Op != OpMIPS64MOVVconst {
  2456  			break
  2457  		}
  2458  		d := auxIntToInt64(v_0.AuxInt)
  2459  		v.reset(OpMIPS64MOVVconst)
  2460  		v.AuxInt = int64ToAuxInt(c & d)
  2461  		return true
  2462  	}
  2463  	// match: (ANDconst [c] (ANDconst [d] x))
  2464  	// result: (ANDconst [c&d] x)
  2465  	for {
  2466  		c := auxIntToInt64(v.AuxInt)
  2467  		if v_0.Op != OpMIPS64ANDconst {
  2468  			break
  2469  		}
  2470  		d := auxIntToInt64(v_0.AuxInt)
  2471  		x := v_0.Args[0]
  2472  		v.reset(OpMIPS64ANDconst)
  2473  		v.AuxInt = int64ToAuxInt(c & d)
  2474  		v.AddArg(x)
  2475  		return true
  2476  	}
  2477  	return false
  2478  }
  2479  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
  2480  	v_2 := v.Args[2]
  2481  	v_1 := v.Args[1]
  2482  	v_0 := v.Args[0]
  2483  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  2484  	// cond: is32Bit(c)
  2485  	// result: (LoweredAtomicAddconst32 [int32(c)] ptr mem)
  2486  	for {
  2487  		ptr := v_0
  2488  		if v_1.Op != OpMIPS64MOVVconst {
  2489  			break
  2490  		}
  2491  		c := auxIntToInt64(v_1.AuxInt)
  2492  		mem := v_2
  2493  		if !(is32Bit(c)) {
  2494  			break
  2495  		}
  2496  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  2497  		v.AuxInt = int32ToAuxInt(int32(c))
  2498  		v.AddArg2(ptr, mem)
  2499  		return true
  2500  	}
  2501  	return false
  2502  }
  2503  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
  2504  	v_2 := v.Args[2]
  2505  	v_1 := v.Args[1]
  2506  	v_0 := v.Args[0]
  2507  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  2508  	// cond: is32Bit(c)
  2509  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  2510  	for {
  2511  		ptr := v_0
  2512  		if v_1.Op != OpMIPS64MOVVconst {
  2513  			break
  2514  		}
  2515  		c := auxIntToInt64(v_1.AuxInt)
  2516  		mem := v_2
  2517  		if !(is32Bit(c)) {
  2518  			break
  2519  		}
  2520  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  2521  		v.AuxInt = int64ToAuxInt(c)
  2522  		v.AddArg2(ptr, mem)
  2523  		return true
  2524  	}
  2525  	return false
  2526  }
  2527  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
  2528  	v_2 := v.Args[2]
  2529  	v_1 := v.Args[1]
  2530  	v_0 := v.Args[0]
  2531  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  2532  	// result: (LoweredAtomicStorezero32 ptr mem)
  2533  	for {
  2534  		ptr := v_0
  2535  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2536  			break
  2537  		}
  2538  		mem := v_2
  2539  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  2540  		v.AddArg2(ptr, mem)
  2541  		return true
  2542  	}
  2543  	return false
  2544  }
  2545  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
  2546  	v_2 := v.Args[2]
  2547  	v_1 := v.Args[1]
  2548  	v_0 := v.Args[0]
  2549  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  2550  	// result: (LoweredAtomicStorezero64 ptr mem)
  2551  	for {
  2552  		ptr := v_0
  2553  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2554  			break
  2555  		}
  2556  		mem := v_2
  2557  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  2558  		v.AddArg2(ptr, mem)
  2559  		return true
  2560  	}
  2561  	return false
  2562  }
  2563  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
  2564  	v_1 := v.Args[1]
  2565  	v_0 := v.Args[0]
  2566  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2567  	// cond: is32Bit(int64(off1)+off2)
  2568  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2569  	for {
  2570  		off1 := auxIntToInt32(v.AuxInt)
  2571  		sym := auxToSym(v.Aux)
  2572  		if v_0.Op != OpMIPS64ADDVconst {
  2573  			break
  2574  		}
  2575  		off2 := auxIntToInt64(v_0.AuxInt)
  2576  		ptr := v_0.Args[0]
  2577  		mem := v_1
  2578  		if !(is32Bit(int64(off1) + off2)) {
  2579  			break
  2580  		}
  2581  		v.reset(OpMIPS64MOVBUload)
  2582  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2583  		v.Aux = symToAux(sym)
  2584  		v.AddArg2(ptr, mem)
  2585  		return true
  2586  	}
  2587  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2588  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2589  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2590  	for {
  2591  		off1 := auxIntToInt32(v.AuxInt)
  2592  		sym1 := auxToSym(v.Aux)
  2593  		if v_0.Op != OpMIPS64MOVVaddr {
  2594  			break
  2595  		}
  2596  		off2 := auxIntToInt32(v_0.AuxInt)
  2597  		sym2 := auxToSym(v_0.Aux)
  2598  		ptr := v_0.Args[0]
  2599  		mem := v_1
  2600  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2601  			break
  2602  		}
  2603  		v.reset(OpMIPS64MOVBUload)
  2604  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2605  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2606  		v.AddArg2(ptr, mem)
  2607  		return true
  2608  	}
  2609  	return false
  2610  }
  2611  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
  2612  	v_0 := v.Args[0]
  2613  	// match: (MOVBUreg x:(MOVBUload _ _))
  2614  	// result: (MOVVreg x)
  2615  	for {
  2616  		x := v_0
  2617  		if x.Op != OpMIPS64MOVBUload {
  2618  			break
  2619  		}
  2620  		v.reset(OpMIPS64MOVVreg)
  2621  		v.AddArg(x)
  2622  		return true
  2623  	}
  2624  	// match: (MOVBUreg x:(MOVBUreg _))
  2625  	// result: (MOVVreg x)
  2626  	for {
  2627  		x := v_0
  2628  		if x.Op != OpMIPS64MOVBUreg {
  2629  			break
  2630  		}
  2631  		v.reset(OpMIPS64MOVVreg)
  2632  		v.AddArg(x)
  2633  		return true
  2634  	}
  2635  	// match: (MOVBUreg (MOVVconst [c]))
  2636  	// result: (MOVVconst [int64(uint8(c))])
  2637  	for {
  2638  		if v_0.Op != OpMIPS64MOVVconst {
  2639  			break
  2640  		}
  2641  		c := auxIntToInt64(v_0.AuxInt)
  2642  		v.reset(OpMIPS64MOVVconst)
  2643  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2644  		return true
  2645  	}
  2646  	return false
  2647  }
  2648  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
  2649  	v_1 := v.Args[1]
  2650  	v_0 := v.Args[0]
  2651  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2652  	// cond: is32Bit(int64(off1)+off2)
  2653  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2654  	for {
  2655  		off1 := auxIntToInt32(v.AuxInt)
  2656  		sym := auxToSym(v.Aux)
  2657  		if v_0.Op != OpMIPS64ADDVconst {
  2658  			break
  2659  		}
  2660  		off2 := auxIntToInt64(v_0.AuxInt)
  2661  		ptr := v_0.Args[0]
  2662  		mem := v_1
  2663  		if !(is32Bit(int64(off1) + off2)) {
  2664  			break
  2665  		}
  2666  		v.reset(OpMIPS64MOVBload)
  2667  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2668  		v.Aux = symToAux(sym)
  2669  		v.AddArg2(ptr, mem)
  2670  		return true
  2671  	}
  2672  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2673  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2674  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2675  	for {
  2676  		off1 := auxIntToInt32(v.AuxInt)
  2677  		sym1 := auxToSym(v.Aux)
  2678  		if v_0.Op != OpMIPS64MOVVaddr {
  2679  			break
  2680  		}
  2681  		off2 := auxIntToInt32(v_0.AuxInt)
  2682  		sym2 := auxToSym(v_0.Aux)
  2683  		ptr := v_0.Args[0]
  2684  		mem := v_1
  2685  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2686  			break
  2687  		}
  2688  		v.reset(OpMIPS64MOVBload)
  2689  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2690  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2691  		v.AddArg2(ptr, mem)
  2692  		return true
  2693  	}
  2694  	// match: (MOVBload [off] {sym} (SB) _)
  2695  	// cond: symIsRO(sym)
  2696  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2697  	for {
  2698  		off := auxIntToInt32(v.AuxInt)
  2699  		sym := auxToSym(v.Aux)
  2700  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2701  			break
  2702  		}
  2703  		v.reset(OpMIPS64MOVVconst)
  2704  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2705  		return true
  2706  	}
  2707  	return false
  2708  }
  2709  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
  2710  	v_0 := v.Args[0]
  2711  	// match: (MOVBreg x:(MOVBload _ _))
  2712  	// result: (MOVVreg x)
  2713  	for {
  2714  		x := v_0
  2715  		if x.Op != OpMIPS64MOVBload {
  2716  			break
  2717  		}
  2718  		v.reset(OpMIPS64MOVVreg)
  2719  		v.AddArg(x)
  2720  		return true
  2721  	}
  2722  	// match: (MOVBreg x:(MOVBreg _))
  2723  	// result: (MOVVreg x)
  2724  	for {
  2725  		x := v_0
  2726  		if x.Op != OpMIPS64MOVBreg {
  2727  			break
  2728  		}
  2729  		v.reset(OpMIPS64MOVVreg)
  2730  		v.AddArg(x)
  2731  		return true
  2732  	}
  2733  	// match: (MOVBreg (MOVVconst [c]))
  2734  	// result: (MOVVconst [int64(int8(c))])
  2735  	for {
  2736  		if v_0.Op != OpMIPS64MOVVconst {
  2737  			break
  2738  		}
  2739  		c := auxIntToInt64(v_0.AuxInt)
  2740  		v.reset(OpMIPS64MOVVconst)
  2741  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2742  		return true
  2743  	}
  2744  	return false
  2745  }
  2746  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
  2747  	v_2 := v.Args[2]
  2748  	v_1 := v.Args[1]
  2749  	v_0 := v.Args[0]
  2750  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2751  	// cond: is32Bit(int64(off1)+off2)
  2752  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2753  	for {
  2754  		off1 := auxIntToInt32(v.AuxInt)
  2755  		sym := auxToSym(v.Aux)
  2756  		if v_0.Op != OpMIPS64ADDVconst {
  2757  			break
  2758  		}
  2759  		off2 := auxIntToInt64(v_0.AuxInt)
  2760  		ptr := v_0.Args[0]
  2761  		val := v_1
  2762  		mem := v_2
  2763  		if !(is32Bit(int64(off1) + off2)) {
  2764  			break
  2765  		}
  2766  		v.reset(OpMIPS64MOVBstore)
  2767  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2768  		v.Aux = symToAux(sym)
  2769  		v.AddArg3(ptr, val, mem)
  2770  		return true
  2771  	}
  2772  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2773  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2774  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2775  	for {
  2776  		off1 := auxIntToInt32(v.AuxInt)
  2777  		sym1 := auxToSym(v.Aux)
  2778  		if v_0.Op != OpMIPS64MOVVaddr {
  2779  			break
  2780  		}
  2781  		off2 := auxIntToInt32(v_0.AuxInt)
  2782  		sym2 := auxToSym(v_0.Aux)
  2783  		ptr := v_0.Args[0]
  2784  		val := v_1
  2785  		mem := v_2
  2786  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2787  			break
  2788  		}
  2789  		v.reset(OpMIPS64MOVBstore)
  2790  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2791  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2792  		v.AddArg3(ptr, val, mem)
  2793  		return true
  2794  	}
  2795  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  2796  	// result: (MOVBstorezero [off] {sym} ptr mem)
  2797  	for {
  2798  		off := auxIntToInt32(v.AuxInt)
  2799  		sym := auxToSym(v.Aux)
  2800  		ptr := v_0
  2801  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2802  			break
  2803  		}
  2804  		mem := v_2
  2805  		v.reset(OpMIPS64MOVBstorezero)
  2806  		v.AuxInt = int32ToAuxInt(off)
  2807  		v.Aux = symToAux(sym)
  2808  		v.AddArg2(ptr, mem)
  2809  		return true
  2810  	}
  2811  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2812  	// result: (MOVBstore [off] {sym} ptr x mem)
  2813  	for {
  2814  		off := auxIntToInt32(v.AuxInt)
  2815  		sym := auxToSym(v.Aux)
  2816  		ptr := v_0
  2817  		if v_1.Op != OpMIPS64MOVBreg {
  2818  			break
  2819  		}
  2820  		x := v_1.Args[0]
  2821  		mem := v_2
  2822  		v.reset(OpMIPS64MOVBstore)
  2823  		v.AuxInt = int32ToAuxInt(off)
  2824  		v.Aux = symToAux(sym)
  2825  		v.AddArg3(ptr, x, mem)
  2826  		return true
  2827  	}
  2828  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  2829  	// result: (MOVBstore [off] {sym} ptr x mem)
  2830  	for {
  2831  		off := auxIntToInt32(v.AuxInt)
  2832  		sym := auxToSym(v.Aux)
  2833  		ptr := v_0
  2834  		if v_1.Op != OpMIPS64MOVBUreg {
  2835  			break
  2836  		}
  2837  		x := v_1.Args[0]
  2838  		mem := v_2
  2839  		v.reset(OpMIPS64MOVBstore)
  2840  		v.AuxInt = int32ToAuxInt(off)
  2841  		v.Aux = symToAux(sym)
  2842  		v.AddArg3(ptr, x, mem)
  2843  		return true
  2844  	}
  2845  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2846  	// result: (MOVBstore [off] {sym} ptr x mem)
  2847  	for {
  2848  		off := auxIntToInt32(v.AuxInt)
  2849  		sym := auxToSym(v.Aux)
  2850  		ptr := v_0
  2851  		if v_1.Op != OpMIPS64MOVHreg {
  2852  			break
  2853  		}
  2854  		x := v_1.Args[0]
  2855  		mem := v_2
  2856  		v.reset(OpMIPS64MOVBstore)
  2857  		v.AuxInt = int32ToAuxInt(off)
  2858  		v.Aux = symToAux(sym)
  2859  		v.AddArg3(ptr, x, mem)
  2860  		return true
  2861  	}
  2862  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2863  	// result: (MOVBstore [off] {sym} ptr x mem)
  2864  	for {
  2865  		off := auxIntToInt32(v.AuxInt)
  2866  		sym := auxToSym(v.Aux)
  2867  		ptr := v_0
  2868  		if v_1.Op != OpMIPS64MOVHUreg {
  2869  			break
  2870  		}
  2871  		x := v_1.Args[0]
  2872  		mem := v_2
  2873  		v.reset(OpMIPS64MOVBstore)
  2874  		v.AuxInt = int32ToAuxInt(off)
  2875  		v.Aux = symToAux(sym)
  2876  		v.AddArg3(ptr, x, mem)
  2877  		return true
  2878  	}
  2879  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2880  	// result: (MOVBstore [off] {sym} ptr x mem)
  2881  	for {
  2882  		off := auxIntToInt32(v.AuxInt)
  2883  		sym := auxToSym(v.Aux)
  2884  		ptr := v_0
  2885  		if v_1.Op != OpMIPS64MOVWreg {
  2886  			break
  2887  		}
  2888  		x := v_1.Args[0]
  2889  		mem := v_2
  2890  		v.reset(OpMIPS64MOVBstore)
  2891  		v.AuxInt = int32ToAuxInt(off)
  2892  		v.Aux = symToAux(sym)
  2893  		v.AddArg3(ptr, x, mem)
  2894  		return true
  2895  	}
  2896  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2897  	// result: (MOVBstore [off] {sym} ptr x mem)
  2898  	for {
  2899  		off := auxIntToInt32(v.AuxInt)
  2900  		sym := auxToSym(v.Aux)
  2901  		ptr := v_0
  2902  		if v_1.Op != OpMIPS64MOVWUreg {
  2903  			break
  2904  		}
  2905  		x := v_1.Args[0]
  2906  		mem := v_2
  2907  		v.reset(OpMIPS64MOVBstore)
  2908  		v.AuxInt = int32ToAuxInt(off)
  2909  		v.Aux = symToAux(sym)
  2910  		v.AddArg3(ptr, x, mem)
  2911  		return true
  2912  	}
  2913  	return false
  2914  }
  2915  func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
  2916  	v_1 := v.Args[1]
  2917  	v_0 := v.Args[0]
  2918  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2919  	// cond: is32Bit(int64(off1)+off2)
  2920  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2921  	for {
  2922  		off1 := auxIntToInt32(v.AuxInt)
  2923  		sym := auxToSym(v.Aux)
  2924  		if v_0.Op != OpMIPS64ADDVconst {
  2925  			break
  2926  		}
  2927  		off2 := auxIntToInt64(v_0.AuxInt)
  2928  		ptr := v_0.Args[0]
  2929  		mem := v_1
  2930  		if !(is32Bit(int64(off1) + off2)) {
  2931  			break
  2932  		}
  2933  		v.reset(OpMIPS64MOVBstorezero)
  2934  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2935  		v.Aux = symToAux(sym)
  2936  		v.AddArg2(ptr, mem)
  2937  		return true
  2938  	}
  2939  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2940  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2941  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2942  	for {
  2943  		off1 := auxIntToInt32(v.AuxInt)
  2944  		sym1 := auxToSym(v.Aux)
  2945  		if v_0.Op != OpMIPS64MOVVaddr {
  2946  			break
  2947  		}
  2948  		off2 := auxIntToInt32(v_0.AuxInt)
  2949  		sym2 := auxToSym(v_0.Aux)
  2950  		ptr := v_0.Args[0]
  2951  		mem := v_1
  2952  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2953  			break
  2954  		}
  2955  		v.reset(OpMIPS64MOVBstorezero)
  2956  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2957  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2958  		v.AddArg2(ptr, mem)
  2959  		return true
  2960  	}
  2961  	return false
  2962  }
  2963  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
  2964  	v_1 := v.Args[1]
  2965  	v_0 := v.Args[0]
  2966  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2967  	// cond: is32Bit(int64(off1)+off2)
  2968  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  2969  	for {
  2970  		off1 := auxIntToInt32(v.AuxInt)
  2971  		sym := auxToSym(v.Aux)
  2972  		if v_0.Op != OpMIPS64ADDVconst {
  2973  			break
  2974  		}
  2975  		off2 := auxIntToInt64(v_0.AuxInt)
  2976  		ptr := v_0.Args[0]
  2977  		mem := v_1
  2978  		if !(is32Bit(int64(off1) + off2)) {
  2979  			break
  2980  		}
  2981  		v.reset(OpMIPS64MOVDload)
  2982  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2983  		v.Aux = symToAux(sym)
  2984  		v.AddArg2(ptr, mem)
  2985  		return true
  2986  	}
  2987  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2988  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2989  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2990  	for {
  2991  		off1 := auxIntToInt32(v.AuxInt)
  2992  		sym1 := auxToSym(v.Aux)
  2993  		if v_0.Op != OpMIPS64MOVVaddr {
  2994  			break
  2995  		}
  2996  		off2 := auxIntToInt32(v_0.AuxInt)
  2997  		sym2 := auxToSym(v_0.Aux)
  2998  		ptr := v_0.Args[0]
  2999  		mem := v_1
  3000  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3001  			break
  3002  		}
  3003  		v.reset(OpMIPS64MOVDload)
  3004  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3005  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3006  		v.AddArg2(ptr, mem)
  3007  		return true
  3008  	}
  3009  	return false
  3010  }
  3011  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
  3012  	v_2 := v.Args[2]
  3013  	v_1 := v.Args[1]
  3014  	v_0 := v.Args[0]
  3015  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3016  	// cond: is32Bit(int64(off1)+off2)
  3017  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3018  	for {
  3019  		off1 := auxIntToInt32(v.AuxInt)
  3020  		sym := auxToSym(v.Aux)
  3021  		if v_0.Op != OpMIPS64ADDVconst {
  3022  			break
  3023  		}
  3024  		off2 := auxIntToInt64(v_0.AuxInt)
  3025  		ptr := v_0.Args[0]
  3026  		val := v_1
  3027  		mem := v_2
  3028  		if !(is32Bit(int64(off1) + off2)) {
  3029  			break
  3030  		}
  3031  		v.reset(OpMIPS64MOVDstore)
  3032  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3033  		v.Aux = symToAux(sym)
  3034  		v.AddArg3(ptr, val, mem)
  3035  		return true
  3036  	}
  3037  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3038  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3039  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3040  	for {
  3041  		off1 := auxIntToInt32(v.AuxInt)
  3042  		sym1 := auxToSym(v.Aux)
  3043  		if v_0.Op != OpMIPS64MOVVaddr {
  3044  			break
  3045  		}
  3046  		off2 := auxIntToInt32(v_0.AuxInt)
  3047  		sym2 := auxToSym(v_0.Aux)
  3048  		ptr := v_0.Args[0]
  3049  		val := v_1
  3050  		mem := v_2
  3051  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3052  			break
  3053  		}
  3054  		v.reset(OpMIPS64MOVDstore)
  3055  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3056  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3057  		v.AddArg3(ptr, val, mem)
  3058  		return true
  3059  	}
  3060  	return false
  3061  }
  3062  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
  3063  	v_1 := v.Args[1]
  3064  	v_0 := v.Args[0]
  3065  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3066  	// cond: is32Bit(int64(off1)+off2)
  3067  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3068  	for {
  3069  		off1 := auxIntToInt32(v.AuxInt)
  3070  		sym := auxToSym(v.Aux)
  3071  		if v_0.Op != OpMIPS64ADDVconst {
  3072  			break
  3073  		}
  3074  		off2 := auxIntToInt64(v_0.AuxInt)
  3075  		ptr := v_0.Args[0]
  3076  		mem := v_1
  3077  		if !(is32Bit(int64(off1) + off2)) {
  3078  			break
  3079  		}
  3080  		v.reset(OpMIPS64MOVFload)
  3081  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3082  		v.Aux = symToAux(sym)
  3083  		v.AddArg2(ptr, mem)
  3084  		return true
  3085  	}
  3086  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3087  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3088  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3089  	for {
  3090  		off1 := auxIntToInt32(v.AuxInt)
  3091  		sym1 := auxToSym(v.Aux)
  3092  		if v_0.Op != OpMIPS64MOVVaddr {
  3093  			break
  3094  		}
  3095  		off2 := auxIntToInt32(v_0.AuxInt)
  3096  		sym2 := auxToSym(v_0.Aux)
  3097  		ptr := v_0.Args[0]
  3098  		mem := v_1
  3099  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3100  			break
  3101  		}
  3102  		v.reset(OpMIPS64MOVFload)
  3103  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3104  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3105  		v.AddArg2(ptr, mem)
  3106  		return true
  3107  	}
  3108  	return false
  3109  }
  3110  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
  3111  	v_2 := v.Args[2]
  3112  	v_1 := v.Args[1]
  3113  	v_0 := v.Args[0]
  3114  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3115  	// cond: is32Bit(int64(off1)+off2)
  3116  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3117  	for {
  3118  		off1 := auxIntToInt32(v.AuxInt)
  3119  		sym := auxToSym(v.Aux)
  3120  		if v_0.Op != OpMIPS64ADDVconst {
  3121  			break
  3122  		}
  3123  		off2 := auxIntToInt64(v_0.AuxInt)
  3124  		ptr := v_0.Args[0]
  3125  		val := v_1
  3126  		mem := v_2
  3127  		if !(is32Bit(int64(off1) + off2)) {
  3128  			break
  3129  		}
  3130  		v.reset(OpMIPS64MOVFstore)
  3131  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3132  		v.Aux = symToAux(sym)
  3133  		v.AddArg3(ptr, val, mem)
  3134  		return true
  3135  	}
  3136  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3137  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3138  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3139  	for {
  3140  		off1 := auxIntToInt32(v.AuxInt)
  3141  		sym1 := auxToSym(v.Aux)
  3142  		if v_0.Op != OpMIPS64MOVVaddr {
  3143  			break
  3144  		}
  3145  		off2 := auxIntToInt32(v_0.AuxInt)
  3146  		sym2 := auxToSym(v_0.Aux)
  3147  		ptr := v_0.Args[0]
  3148  		val := v_1
  3149  		mem := v_2
  3150  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3151  			break
  3152  		}
  3153  		v.reset(OpMIPS64MOVFstore)
  3154  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3155  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3156  		v.AddArg3(ptr, val, mem)
  3157  		return true
  3158  	}
  3159  	return false
  3160  }
  3161  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
  3162  	v_1 := v.Args[1]
  3163  	v_0 := v.Args[0]
  3164  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3165  	// cond: is32Bit(int64(off1)+off2)
  3166  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3167  	for {
  3168  		off1 := auxIntToInt32(v.AuxInt)
  3169  		sym := auxToSym(v.Aux)
  3170  		if v_0.Op != OpMIPS64ADDVconst {
  3171  			break
  3172  		}
  3173  		off2 := auxIntToInt64(v_0.AuxInt)
  3174  		ptr := v_0.Args[0]
  3175  		mem := v_1
  3176  		if !(is32Bit(int64(off1) + off2)) {
  3177  			break
  3178  		}
  3179  		v.reset(OpMIPS64MOVHUload)
  3180  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3181  		v.Aux = symToAux(sym)
  3182  		v.AddArg2(ptr, mem)
  3183  		return true
  3184  	}
  3185  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3186  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3187  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3188  	for {
  3189  		off1 := auxIntToInt32(v.AuxInt)
  3190  		sym1 := auxToSym(v.Aux)
  3191  		if v_0.Op != OpMIPS64MOVVaddr {
  3192  			break
  3193  		}
  3194  		off2 := auxIntToInt32(v_0.AuxInt)
  3195  		sym2 := auxToSym(v_0.Aux)
  3196  		ptr := v_0.Args[0]
  3197  		mem := v_1
  3198  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3199  			break
  3200  		}
  3201  		v.reset(OpMIPS64MOVHUload)
  3202  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3203  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3204  		v.AddArg2(ptr, mem)
  3205  		return true
  3206  	}
  3207  	return false
  3208  }
  3209  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
  3210  	v_0 := v.Args[0]
  3211  	// match: (MOVHUreg x:(MOVBUload _ _))
  3212  	// result: (MOVVreg x)
  3213  	for {
  3214  		x := v_0
  3215  		if x.Op != OpMIPS64MOVBUload {
  3216  			break
  3217  		}
  3218  		v.reset(OpMIPS64MOVVreg)
  3219  		v.AddArg(x)
  3220  		return true
  3221  	}
  3222  	// match: (MOVHUreg x:(MOVHUload _ _))
  3223  	// result: (MOVVreg x)
  3224  	for {
  3225  		x := v_0
  3226  		if x.Op != OpMIPS64MOVHUload {
  3227  			break
  3228  		}
  3229  		v.reset(OpMIPS64MOVVreg)
  3230  		v.AddArg(x)
  3231  		return true
  3232  	}
  3233  	// match: (MOVHUreg x:(MOVBUreg _))
  3234  	// result: (MOVVreg x)
  3235  	for {
  3236  		x := v_0
  3237  		if x.Op != OpMIPS64MOVBUreg {
  3238  			break
  3239  		}
  3240  		v.reset(OpMIPS64MOVVreg)
  3241  		v.AddArg(x)
  3242  		return true
  3243  	}
  3244  	// match: (MOVHUreg x:(MOVHUreg _))
  3245  	// result: (MOVVreg x)
  3246  	for {
  3247  		x := v_0
  3248  		if x.Op != OpMIPS64MOVHUreg {
  3249  			break
  3250  		}
  3251  		v.reset(OpMIPS64MOVVreg)
  3252  		v.AddArg(x)
  3253  		return true
  3254  	}
  3255  	// match: (MOVHUreg (MOVVconst [c]))
  3256  	// result: (MOVVconst [int64(uint16(c))])
  3257  	for {
  3258  		if v_0.Op != OpMIPS64MOVVconst {
  3259  			break
  3260  		}
  3261  		c := auxIntToInt64(v_0.AuxInt)
  3262  		v.reset(OpMIPS64MOVVconst)
  3263  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3264  		return true
  3265  	}
  3266  	return false
  3267  }
  3268  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
  3269  	v_1 := v.Args[1]
  3270  	v_0 := v.Args[0]
  3271  	b := v.Block
  3272  	config := b.Func.Config
  3273  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3274  	// cond: is32Bit(int64(off1)+off2)
  3275  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3276  	for {
  3277  		off1 := auxIntToInt32(v.AuxInt)
  3278  		sym := auxToSym(v.Aux)
  3279  		if v_0.Op != OpMIPS64ADDVconst {
  3280  			break
  3281  		}
  3282  		off2 := auxIntToInt64(v_0.AuxInt)
  3283  		ptr := v_0.Args[0]
  3284  		mem := v_1
  3285  		if !(is32Bit(int64(off1) + off2)) {
  3286  			break
  3287  		}
  3288  		v.reset(OpMIPS64MOVHload)
  3289  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3290  		v.Aux = symToAux(sym)
  3291  		v.AddArg2(ptr, mem)
  3292  		return true
  3293  	}
  3294  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3295  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3296  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3297  	for {
  3298  		off1 := auxIntToInt32(v.AuxInt)
  3299  		sym1 := auxToSym(v.Aux)
  3300  		if v_0.Op != OpMIPS64MOVVaddr {
  3301  			break
  3302  		}
  3303  		off2 := auxIntToInt32(v_0.AuxInt)
  3304  		sym2 := auxToSym(v_0.Aux)
  3305  		ptr := v_0.Args[0]
  3306  		mem := v_1
  3307  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3308  			break
  3309  		}
  3310  		v.reset(OpMIPS64MOVHload)
  3311  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3312  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3313  		v.AddArg2(ptr, mem)
  3314  		return true
  3315  	}
  3316  	// match: (MOVHload [off] {sym} (SB) _)
  3317  	// cond: symIsRO(sym)
  3318  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3319  	for {
  3320  		off := auxIntToInt32(v.AuxInt)
  3321  		sym := auxToSym(v.Aux)
  3322  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3323  			break
  3324  		}
  3325  		v.reset(OpMIPS64MOVVconst)
  3326  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3327  		return true
  3328  	}
  3329  	return false
  3330  }
  3331  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
  3332  	v_0 := v.Args[0]
  3333  	// match: (MOVHreg x:(MOVBload _ _))
  3334  	// result: (MOVVreg x)
  3335  	for {
  3336  		x := v_0
  3337  		if x.Op != OpMIPS64MOVBload {
  3338  			break
  3339  		}
  3340  		v.reset(OpMIPS64MOVVreg)
  3341  		v.AddArg(x)
  3342  		return true
  3343  	}
  3344  	// match: (MOVHreg x:(MOVBUload _ _))
  3345  	// result: (MOVVreg x)
  3346  	for {
  3347  		x := v_0
  3348  		if x.Op != OpMIPS64MOVBUload {
  3349  			break
  3350  		}
  3351  		v.reset(OpMIPS64MOVVreg)
  3352  		v.AddArg(x)
  3353  		return true
  3354  	}
  3355  	// match: (MOVHreg x:(MOVHload _ _))
  3356  	// result: (MOVVreg x)
  3357  	for {
  3358  		x := v_0
  3359  		if x.Op != OpMIPS64MOVHload {
  3360  			break
  3361  		}
  3362  		v.reset(OpMIPS64MOVVreg)
  3363  		v.AddArg(x)
  3364  		return true
  3365  	}
  3366  	// match: (MOVHreg x:(MOVBreg _))
  3367  	// result: (MOVVreg x)
  3368  	for {
  3369  		x := v_0
  3370  		if x.Op != OpMIPS64MOVBreg {
  3371  			break
  3372  		}
  3373  		v.reset(OpMIPS64MOVVreg)
  3374  		v.AddArg(x)
  3375  		return true
  3376  	}
  3377  	// match: (MOVHreg x:(MOVBUreg _))
  3378  	// result: (MOVVreg x)
  3379  	for {
  3380  		x := v_0
  3381  		if x.Op != OpMIPS64MOVBUreg {
  3382  			break
  3383  		}
  3384  		v.reset(OpMIPS64MOVVreg)
  3385  		v.AddArg(x)
  3386  		return true
  3387  	}
  3388  	// match: (MOVHreg x:(MOVHreg _))
  3389  	// result: (MOVVreg x)
  3390  	for {
  3391  		x := v_0
  3392  		if x.Op != OpMIPS64MOVHreg {
  3393  			break
  3394  		}
  3395  		v.reset(OpMIPS64MOVVreg)
  3396  		v.AddArg(x)
  3397  		return true
  3398  	}
  3399  	// match: (MOVHreg (MOVVconst [c]))
  3400  	// result: (MOVVconst [int64(int16(c))])
  3401  	for {
  3402  		if v_0.Op != OpMIPS64MOVVconst {
  3403  			break
  3404  		}
  3405  		c := auxIntToInt64(v_0.AuxInt)
  3406  		v.reset(OpMIPS64MOVVconst)
  3407  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3408  		return true
  3409  	}
  3410  	return false
  3411  }
  3412  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
  3413  	v_2 := v.Args[2]
  3414  	v_1 := v.Args[1]
  3415  	v_0 := v.Args[0]
  3416  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3417  	// cond: is32Bit(int64(off1)+off2)
  3418  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3419  	for {
  3420  		off1 := auxIntToInt32(v.AuxInt)
  3421  		sym := auxToSym(v.Aux)
  3422  		if v_0.Op != OpMIPS64ADDVconst {
  3423  			break
  3424  		}
  3425  		off2 := auxIntToInt64(v_0.AuxInt)
  3426  		ptr := v_0.Args[0]
  3427  		val := v_1
  3428  		mem := v_2
  3429  		if !(is32Bit(int64(off1) + off2)) {
  3430  			break
  3431  		}
  3432  		v.reset(OpMIPS64MOVHstore)
  3433  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3434  		v.Aux = symToAux(sym)
  3435  		v.AddArg3(ptr, val, mem)
  3436  		return true
  3437  	}
  3438  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3439  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3440  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3441  	for {
  3442  		off1 := auxIntToInt32(v.AuxInt)
  3443  		sym1 := auxToSym(v.Aux)
  3444  		if v_0.Op != OpMIPS64MOVVaddr {
  3445  			break
  3446  		}
  3447  		off2 := auxIntToInt32(v_0.AuxInt)
  3448  		sym2 := auxToSym(v_0.Aux)
  3449  		ptr := v_0.Args[0]
  3450  		val := v_1
  3451  		mem := v_2
  3452  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3453  			break
  3454  		}
  3455  		v.reset(OpMIPS64MOVHstore)
  3456  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3457  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3458  		v.AddArg3(ptr, val, mem)
  3459  		return true
  3460  	}
  3461  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  3462  	// result: (MOVHstorezero [off] {sym} ptr mem)
  3463  	for {
  3464  		off := auxIntToInt32(v.AuxInt)
  3465  		sym := auxToSym(v.Aux)
  3466  		ptr := v_0
  3467  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3468  			break
  3469  		}
  3470  		mem := v_2
  3471  		v.reset(OpMIPS64MOVHstorezero)
  3472  		v.AuxInt = int32ToAuxInt(off)
  3473  		v.Aux = symToAux(sym)
  3474  		v.AddArg2(ptr, mem)
  3475  		return true
  3476  	}
  3477  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3478  	// result: (MOVHstore [off] {sym} ptr x mem)
  3479  	for {
  3480  		off := auxIntToInt32(v.AuxInt)
  3481  		sym := auxToSym(v.Aux)
  3482  		ptr := v_0
  3483  		if v_1.Op != OpMIPS64MOVHreg {
  3484  			break
  3485  		}
  3486  		x := v_1.Args[0]
  3487  		mem := v_2
  3488  		v.reset(OpMIPS64MOVHstore)
  3489  		v.AuxInt = int32ToAuxInt(off)
  3490  		v.Aux = symToAux(sym)
  3491  		v.AddArg3(ptr, x, mem)
  3492  		return true
  3493  	}
  3494  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3495  	// result: (MOVHstore [off] {sym} ptr x mem)
  3496  	for {
  3497  		off := auxIntToInt32(v.AuxInt)
  3498  		sym := auxToSym(v.Aux)
  3499  		ptr := v_0
  3500  		if v_1.Op != OpMIPS64MOVHUreg {
  3501  			break
  3502  		}
  3503  		x := v_1.Args[0]
  3504  		mem := v_2
  3505  		v.reset(OpMIPS64MOVHstore)
  3506  		v.AuxInt = int32ToAuxInt(off)
  3507  		v.Aux = symToAux(sym)
  3508  		v.AddArg3(ptr, x, mem)
  3509  		return true
  3510  	}
  3511  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3512  	// result: (MOVHstore [off] {sym} ptr x mem)
  3513  	for {
  3514  		off := auxIntToInt32(v.AuxInt)
  3515  		sym := auxToSym(v.Aux)
  3516  		ptr := v_0
  3517  		if v_1.Op != OpMIPS64MOVWreg {
  3518  			break
  3519  		}
  3520  		x := v_1.Args[0]
  3521  		mem := v_2
  3522  		v.reset(OpMIPS64MOVHstore)
  3523  		v.AuxInt = int32ToAuxInt(off)
  3524  		v.Aux = symToAux(sym)
  3525  		v.AddArg3(ptr, x, mem)
  3526  		return true
  3527  	}
  3528  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3529  	// result: (MOVHstore [off] {sym} ptr x mem)
  3530  	for {
  3531  		off := auxIntToInt32(v.AuxInt)
  3532  		sym := auxToSym(v.Aux)
  3533  		ptr := v_0
  3534  		if v_1.Op != OpMIPS64MOVWUreg {
  3535  			break
  3536  		}
  3537  		x := v_1.Args[0]
  3538  		mem := v_2
  3539  		v.reset(OpMIPS64MOVHstore)
  3540  		v.AuxInt = int32ToAuxInt(off)
  3541  		v.Aux = symToAux(sym)
  3542  		v.AddArg3(ptr, x, mem)
  3543  		return true
  3544  	}
  3545  	return false
  3546  }
  3547  func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
  3548  	v_1 := v.Args[1]
  3549  	v_0 := v.Args[0]
  3550  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3551  	// cond: is32Bit(int64(off1)+off2)
  3552  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  3553  	for {
  3554  		off1 := auxIntToInt32(v.AuxInt)
  3555  		sym := auxToSym(v.Aux)
  3556  		if v_0.Op != OpMIPS64ADDVconst {
  3557  			break
  3558  		}
  3559  		off2 := auxIntToInt64(v_0.AuxInt)
  3560  		ptr := v_0.Args[0]
  3561  		mem := v_1
  3562  		if !(is32Bit(int64(off1) + off2)) {
  3563  			break
  3564  		}
  3565  		v.reset(OpMIPS64MOVHstorezero)
  3566  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3567  		v.Aux = symToAux(sym)
  3568  		v.AddArg2(ptr, mem)
  3569  		return true
  3570  	}
  3571  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3572  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3573  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3574  	for {
  3575  		off1 := auxIntToInt32(v.AuxInt)
  3576  		sym1 := auxToSym(v.Aux)
  3577  		if v_0.Op != OpMIPS64MOVVaddr {
  3578  			break
  3579  		}
  3580  		off2 := auxIntToInt32(v_0.AuxInt)
  3581  		sym2 := auxToSym(v_0.Aux)
  3582  		ptr := v_0.Args[0]
  3583  		mem := v_1
  3584  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3585  			break
  3586  		}
  3587  		v.reset(OpMIPS64MOVHstorezero)
  3588  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3589  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3590  		v.AddArg2(ptr, mem)
  3591  		return true
  3592  	}
  3593  	return false
  3594  }
  3595  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
  3596  	v_1 := v.Args[1]
  3597  	v_0 := v.Args[0]
  3598  	b := v.Block
  3599  	config := b.Func.Config
  3600  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3601  	// cond: is32Bit(int64(off1)+off2)
  3602  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  3603  	for {
  3604  		off1 := auxIntToInt32(v.AuxInt)
  3605  		sym := auxToSym(v.Aux)
  3606  		if v_0.Op != OpMIPS64ADDVconst {
  3607  			break
  3608  		}
  3609  		off2 := auxIntToInt64(v_0.AuxInt)
  3610  		ptr := v_0.Args[0]
  3611  		mem := v_1
  3612  		if !(is32Bit(int64(off1) + off2)) {
  3613  			break
  3614  		}
  3615  		v.reset(OpMIPS64MOVVload)
  3616  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3617  		v.Aux = symToAux(sym)
  3618  		v.AddArg2(ptr, mem)
  3619  		return true
  3620  	}
  3621  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3622  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3623  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3624  	for {
  3625  		off1 := auxIntToInt32(v.AuxInt)
  3626  		sym1 := auxToSym(v.Aux)
  3627  		if v_0.Op != OpMIPS64MOVVaddr {
  3628  			break
  3629  		}
  3630  		off2 := auxIntToInt32(v_0.AuxInt)
  3631  		sym2 := auxToSym(v_0.Aux)
  3632  		ptr := v_0.Args[0]
  3633  		mem := v_1
  3634  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3635  			break
  3636  		}
  3637  		v.reset(OpMIPS64MOVVload)
  3638  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3639  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3640  		v.AddArg2(ptr, mem)
  3641  		return true
  3642  	}
  3643  	// match: (MOVVload [off] {sym} (SB) _)
  3644  	// cond: symIsRO(sym)
  3645  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3646  	for {
  3647  		off := auxIntToInt32(v.AuxInt)
  3648  		sym := auxToSym(v.Aux)
  3649  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3650  			break
  3651  		}
  3652  		v.reset(OpMIPS64MOVVconst)
  3653  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3654  		return true
  3655  	}
  3656  	return false
  3657  }
  3658  func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
  3659  	v_0 := v.Args[0]
  3660  	// match: (MOVVnop (MOVVconst [c]))
  3661  	// result: (MOVVconst [c])
  3662  	for {
  3663  		if v_0.Op != OpMIPS64MOVVconst {
  3664  			break
  3665  		}
  3666  		c := auxIntToInt64(v_0.AuxInt)
  3667  		v.reset(OpMIPS64MOVVconst)
  3668  		v.AuxInt = int64ToAuxInt(c)
  3669  		return true
  3670  	}
  3671  	return false
  3672  }
  3673  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
  3674  	v_0 := v.Args[0]
  3675  	// match: (MOVVreg x)
  3676  	// cond: x.Uses == 1
  3677  	// result: (MOVVnop x)
  3678  	for {
  3679  		x := v_0
  3680  		if !(x.Uses == 1) {
  3681  			break
  3682  		}
  3683  		v.reset(OpMIPS64MOVVnop)
  3684  		v.AddArg(x)
  3685  		return true
  3686  	}
  3687  	// match: (MOVVreg (MOVVconst [c]))
  3688  	// result: (MOVVconst [c])
  3689  	for {
  3690  		if v_0.Op != OpMIPS64MOVVconst {
  3691  			break
  3692  		}
  3693  		c := auxIntToInt64(v_0.AuxInt)
  3694  		v.reset(OpMIPS64MOVVconst)
  3695  		v.AuxInt = int64ToAuxInt(c)
  3696  		return true
  3697  	}
  3698  	return false
  3699  }
  3700  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
  3701  	v_2 := v.Args[2]
  3702  	v_1 := v.Args[1]
  3703  	v_0 := v.Args[0]
  3704  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3705  	// cond: is32Bit(int64(off1)+off2)
  3706  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  3707  	for {
  3708  		off1 := auxIntToInt32(v.AuxInt)
  3709  		sym := auxToSym(v.Aux)
  3710  		if v_0.Op != OpMIPS64ADDVconst {
  3711  			break
  3712  		}
  3713  		off2 := auxIntToInt64(v_0.AuxInt)
  3714  		ptr := v_0.Args[0]
  3715  		val := v_1
  3716  		mem := v_2
  3717  		if !(is32Bit(int64(off1) + off2)) {
  3718  			break
  3719  		}
  3720  		v.reset(OpMIPS64MOVVstore)
  3721  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3722  		v.Aux = symToAux(sym)
  3723  		v.AddArg3(ptr, val, mem)
  3724  		return true
  3725  	}
  3726  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3727  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3728  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3729  	for {
  3730  		off1 := auxIntToInt32(v.AuxInt)
  3731  		sym1 := auxToSym(v.Aux)
  3732  		if v_0.Op != OpMIPS64MOVVaddr {
  3733  			break
  3734  		}
  3735  		off2 := auxIntToInt32(v_0.AuxInt)
  3736  		sym2 := auxToSym(v_0.Aux)
  3737  		ptr := v_0.Args[0]
  3738  		val := v_1
  3739  		mem := v_2
  3740  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3741  			break
  3742  		}
  3743  		v.reset(OpMIPS64MOVVstore)
  3744  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3745  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3746  		v.AddArg3(ptr, val, mem)
  3747  		return true
  3748  	}
  3749  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  3750  	// result: (MOVVstorezero [off] {sym} ptr mem)
  3751  	for {
  3752  		off := auxIntToInt32(v.AuxInt)
  3753  		sym := auxToSym(v.Aux)
  3754  		ptr := v_0
  3755  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3756  			break
  3757  		}
  3758  		mem := v_2
  3759  		v.reset(OpMIPS64MOVVstorezero)
  3760  		v.AuxInt = int32ToAuxInt(off)
  3761  		v.Aux = symToAux(sym)
  3762  		v.AddArg2(ptr, mem)
  3763  		return true
  3764  	}
  3765  	return false
  3766  }
  3767  func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
  3768  	v_1 := v.Args[1]
  3769  	v_0 := v.Args[0]
  3770  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3771  	// cond: is32Bit(int64(off1)+off2)
  3772  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  3773  	for {
  3774  		off1 := auxIntToInt32(v.AuxInt)
  3775  		sym := auxToSym(v.Aux)
  3776  		if v_0.Op != OpMIPS64ADDVconst {
  3777  			break
  3778  		}
  3779  		off2 := auxIntToInt64(v_0.AuxInt)
  3780  		ptr := v_0.Args[0]
  3781  		mem := v_1
  3782  		if !(is32Bit(int64(off1) + off2)) {
  3783  			break
  3784  		}
  3785  		v.reset(OpMIPS64MOVVstorezero)
  3786  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3787  		v.Aux = symToAux(sym)
  3788  		v.AddArg2(ptr, mem)
  3789  		return true
  3790  	}
  3791  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3792  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3793  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3794  	for {
  3795  		off1 := auxIntToInt32(v.AuxInt)
  3796  		sym1 := auxToSym(v.Aux)
  3797  		if v_0.Op != OpMIPS64MOVVaddr {
  3798  			break
  3799  		}
  3800  		off2 := auxIntToInt32(v_0.AuxInt)
  3801  		sym2 := auxToSym(v_0.Aux)
  3802  		ptr := v_0.Args[0]
  3803  		mem := v_1
  3804  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3805  			break
  3806  		}
  3807  		v.reset(OpMIPS64MOVVstorezero)
  3808  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3809  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3810  		v.AddArg2(ptr, mem)
  3811  		return true
  3812  	}
  3813  	return false
  3814  }
  3815  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
  3816  	v_1 := v.Args[1]
  3817  	v_0 := v.Args[0]
  3818  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3819  	// cond: is32Bit(int64(off1)+off2)
  3820  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  3821  	for {
  3822  		off1 := auxIntToInt32(v.AuxInt)
  3823  		sym := auxToSym(v.Aux)
  3824  		if v_0.Op != OpMIPS64ADDVconst {
  3825  			break
  3826  		}
  3827  		off2 := auxIntToInt64(v_0.AuxInt)
  3828  		ptr := v_0.Args[0]
  3829  		mem := v_1
  3830  		if !(is32Bit(int64(off1) + off2)) {
  3831  			break
  3832  		}
  3833  		v.reset(OpMIPS64MOVWUload)
  3834  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3835  		v.Aux = symToAux(sym)
  3836  		v.AddArg2(ptr, mem)
  3837  		return true
  3838  	}
  3839  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3840  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3841  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3842  	for {
  3843  		off1 := auxIntToInt32(v.AuxInt)
  3844  		sym1 := auxToSym(v.Aux)
  3845  		if v_0.Op != OpMIPS64MOVVaddr {
  3846  			break
  3847  		}
  3848  		off2 := auxIntToInt32(v_0.AuxInt)
  3849  		sym2 := auxToSym(v_0.Aux)
  3850  		ptr := v_0.Args[0]
  3851  		mem := v_1
  3852  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3853  			break
  3854  		}
  3855  		v.reset(OpMIPS64MOVWUload)
  3856  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3857  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3858  		v.AddArg2(ptr, mem)
  3859  		return true
  3860  	}
  3861  	return false
  3862  }
  3863  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
  3864  	v_0 := v.Args[0]
  3865  	// match: (MOVWUreg x:(MOVBUload _ _))
  3866  	// result: (MOVVreg x)
  3867  	for {
  3868  		x := v_0
  3869  		if x.Op != OpMIPS64MOVBUload {
  3870  			break
  3871  		}
  3872  		v.reset(OpMIPS64MOVVreg)
  3873  		v.AddArg(x)
  3874  		return true
  3875  	}
  3876  	// match: (MOVWUreg x:(MOVHUload _ _))
  3877  	// result: (MOVVreg x)
  3878  	for {
  3879  		x := v_0
  3880  		if x.Op != OpMIPS64MOVHUload {
  3881  			break
  3882  		}
  3883  		v.reset(OpMIPS64MOVVreg)
  3884  		v.AddArg(x)
  3885  		return true
  3886  	}
  3887  	// match: (MOVWUreg x:(MOVWUload _ _))
  3888  	// result: (MOVVreg x)
  3889  	for {
  3890  		x := v_0
  3891  		if x.Op != OpMIPS64MOVWUload {
  3892  			break
  3893  		}
  3894  		v.reset(OpMIPS64MOVVreg)
  3895  		v.AddArg(x)
  3896  		return true
  3897  	}
  3898  	// match: (MOVWUreg x:(MOVBUreg _))
  3899  	// result: (MOVVreg x)
  3900  	for {
  3901  		x := v_0
  3902  		if x.Op != OpMIPS64MOVBUreg {
  3903  			break
  3904  		}
  3905  		v.reset(OpMIPS64MOVVreg)
  3906  		v.AddArg(x)
  3907  		return true
  3908  	}
  3909  	// match: (MOVWUreg x:(MOVHUreg _))
  3910  	// result: (MOVVreg x)
  3911  	for {
  3912  		x := v_0
  3913  		if x.Op != OpMIPS64MOVHUreg {
  3914  			break
  3915  		}
  3916  		v.reset(OpMIPS64MOVVreg)
  3917  		v.AddArg(x)
  3918  		return true
  3919  	}
  3920  	// match: (MOVWUreg x:(MOVWUreg _))
  3921  	// result: (MOVVreg x)
  3922  	for {
  3923  		x := v_0
  3924  		if x.Op != OpMIPS64MOVWUreg {
  3925  			break
  3926  		}
  3927  		v.reset(OpMIPS64MOVVreg)
  3928  		v.AddArg(x)
  3929  		return true
  3930  	}
  3931  	// match: (MOVWUreg (MOVVconst [c]))
  3932  	// result: (MOVVconst [int64(uint32(c))])
  3933  	for {
  3934  		if v_0.Op != OpMIPS64MOVVconst {
  3935  			break
  3936  		}
  3937  		c := auxIntToInt64(v_0.AuxInt)
  3938  		v.reset(OpMIPS64MOVVconst)
  3939  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  3940  		return true
  3941  	}
  3942  	return false
  3943  }
  3944  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
  3945  	v_1 := v.Args[1]
  3946  	v_0 := v.Args[0]
  3947  	b := v.Block
  3948  	config := b.Func.Config
  3949  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3950  	// cond: is32Bit(int64(off1)+off2)
  3951  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  3952  	for {
  3953  		off1 := auxIntToInt32(v.AuxInt)
  3954  		sym := auxToSym(v.Aux)
  3955  		if v_0.Op != OpMIPS64ADDVconst {
  3956  			break
  3957  		}
  3958  		off2 := auxIntToInt64(v_0.AuxInt)
  3959  		ptr := v_0.Args[0]
  3960  		mem := v_1
  3961  		if !(is32Bit(int64(off1) + off2)) {
  3962  			break
  3963  		}
  3964  		v.reset(OpMIPS64MOVWload)
  3965  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3966  		v.Aux = symToAux(sym)
  3967  		v.AddArg2(ptr, mem)
  3968  		return true
  3969  	}
  3970  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3971  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3972  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3973  	for {
  3974  		off1 := auxIntToInt32(v.AuxInt)
  3975  		sym1 := auxToSym(v.Aux)
  3976  		if v_0.Op != OpMIPS64MOVVaddr {
  3977  			break
  3978  		}
  3979  		off2 := auxIntToInt32(v_0.AuxInt)
  3980  		sym2 := auxToSym(v_0.Aux)
  3981  		ptr := v_0.Args[0]
  3982  		mem := v_1
  3983  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3984  			break
  3985  		}
  3986  		v.reset(OpMIPS64MOVWload)
  3987  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3988  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3989  		v.AddArg2(ptr, mem)
  3990  		return true
  3991  	}
  3992  	// match: (MOVWload [off] {sym} (SB) _)
  3993  	// cond: symIsRO(sym)
  3994  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3995  	for {
  3996  		off := auxIntToInt32(v.AuxInt)
  3997  		sym := auxToSym(v.Aux)
  3998  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3999  			break
  4000  		}
  4001  		v.reset(OpMIPS64MOVVconst)
  4002  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4003  		return true
  4004  	}
  4005  	return false
  4006  }
  4007  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
  4008  	v_0 := v.Args[0]
  4009  	// match: (MOVWreg x:(MOVBload _ _))
  4010  	// result: (MOVVreg x)
  4011  	for {
  4012  		x := v_0
  4013  		if x.Op != OpMIPS64MOVBload {
  4014  			break
  4015  		}
  4016  		v.reset(OpMIPS64MOVVreg)
  4017  		v.AddArg(x)
  4018  		return true
  4019  	}
  4020  	// match: (MOVWreg x:(MOVBUload _ _))
  4021  	// result: (MOVVreg x)
  4022  	for {
  4023  		x := v_0
  4024  		if x.Op != OpMIPS64MOVBUload {
  4025  			break
  4026  		}
  4027  		v.reset(OpMIPS64MOVVreg)
  4028  		v.AddArg(x)
  4029  		return true
  4030  	}
  4031  	// match: (MOVWreg x:(MOVHload _ _))
  4032  	// result: (MOVVreg x)
  4033  	for {
  4034  		x := v_0
  4035  		if x.Op != OpMIPS64MOVHload {
  4036  			break
  4037  		}
  4038  		v.reset(OpMIPS64MOVVreg)
  4039  		v.AddArg(x)
  4040  		return true
  4041  	}
  4042  	// match: (MOVWreg x:(MOVHUload _ _))
  4043  	// result: (MOVVreg x)
  4044  	for {
  4045  		x := v_0
  4046  		if x.Op != OpMIPS64MOVHUload {
  4047  			break
  4048  		}
  4049  		v.reset(OpMIPS64MOVVreg)
  4050  		v.AddArg(x)
  4051  		return true
  4052  	}
  4053  	// match: (MOVWreg x:(MOVWload _ _))
  4054  	// result: (MOVVreg x)
  4055  	for {
  4056  		x := v_0
  4057  		if x.Op != OpMIPS64MOVWload {
  4058  			break
  4059  		}
  4060  		v.reset(OpMIPS64MOVVreg)
  4061  		v.AddArg(x)
  4062  		return true
  4063  	}
  4064  	// match: (MOVWreg x:(MOVBreg _))
  4065  	// result: (MOVVreg x)
  4066  	for {
  4067  		x := v_0
  4068  		if x.Op != OpMIPS64MOVBreg {
  4069  			break
  4070  		}
  4071  		v.reset(OpMIPS64MOVVreg)
  4072  		v.AddArg(x)
  4073  		return true
  4074  	}
  4075  	// match: (MOVWreg x:(MOVBUreg _))
  4076  	// result: (MOVVreg x)
  4077  	for {
  4078  		x := v_0
  4079  		if x.Op != OpMIPS64MOVBUreg {
  4080  			break
  4081  		}
  4082  		v.reset(OpMIPS64MOVVreg)
  4083  		v.AddArg(x)
  4084  		return true
  4085  	}
  4086  	// match: (MOVWreg x:(MOVHreg _))
  4087  	// result: (MOVVreg x)
  4088  	for {
  4089  		x := v_0
  4090  		if x.Op != OpMIPS64MOVHreg {
  4091  			break
  4092  		}
  4093  		v.reset(OpMIPS64MOVVreg)
  4094  		v.AddArg(x)
  4095  		return true
  4096  	}
  4097  	// match: (MOVWreg x:(MOVWreg _))
  4098  	// result: (MOVVreg x)
  4099  	for {
  4100  		x := v_0
  4101  		if x.Op != OpMIPS64MOVWreg {
  4102  			break
  4103  		}
  4104  		v.reset(OpMIPS64MOVVreg)
  4105  		v.AddArg(x)
  4106  		return true
  4107  	}
  4108  	// match: (MOVWreg (MOVVconst [c]))
  4109  	// result: (MOVVconst [int64(int32(c))])
  4110  	for {
  4111  		if v_0.Op != OpMIPS64MOVVconst {
  4112  			break
  4113  		}
  4114  		c := auxIntToInt64(v_0.AuxInt)
  4115  		v.reset(OpMIPS64MOVVconst)
  4116  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  4117  		return true
  4118  	}
  4119  	return false
  4120  }
  4121  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
  4122  	v_2 := v.Args[2]
  4123  	v_1 := v.Args[1]
  4124  	v_0 := v.Args[0]
  4125  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4126  	// cond: is32Bit(int64(off1)+off2)
  4127  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  4128  	for {
  4129  		off1 := auxIntToInt32(v.AuxInt)
  4130  		sym := auxToSym(v.Aux)
  4131  		if v_0.Op != OpMIPS64ADDVconst {
  4132  			break
  4133  		}
  4134  		off2 := auxIntToInt64(v_0.AuxInt)
  4135  		ptr := v_0.Args[0]
  4136  		val := v_1
  4137  		mem := v_2
  4138  		if !(is32Bit(int64(off1) + off2)) {
  4139  			break
  4140  		}
  4141  		v.reset(OpMIPS64MOVWstore)
  4142  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4143  		v.Aux = symToAux(sym)
  4144  		v.AddArg3(ptr, val, mem)
  4145  		return true
  4146  	}
  4147  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4148  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4149  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4150  	for {
  4151  		off1 := auxIntToInt32(v.AuxInt)
  4152  		sym1 := auxToSym(v.Aux)
  4153  		if v_0.Op != OpMIPS64MOVVaddr {
  4154  			break
  4155  		}
  4156  		off2 := auxIntToInt32(v_0.AuxInt)
  4157  		sym2 := auxToSym(v_0.Aux)
  4158  		ptr := v_0.Args[0]
  4159  		val := v_1
  4160  		mem := v_2
  4161  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4162  			break
  4163  		}
  4164  		v.reset(OpMIPS64MOVWstore)
  4165  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4166  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4167  		v.AddArg3(ptr, val, mem)
  4168  		return true
  4169  	}
  4170  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  4171  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4172  	for {
  4173  		off := auxIntToInt32(v.AuxInt)
  4174  		sym := auxToSym(v.Aux)
  4175  		ptr := v_0
  4176  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4177  			break
  4178  		}
  4179  		mem := v_2
  4180  		v.reset(OpMIPS64MOVWstorezero)
  4181  		v.AuxInt = int32ToAuxInt(off)
  4182  		v.Aux = symToAux(sym)
  4183  		v.AddArg2(ptr, mem)
  4184  		return true
  4185  	}
  4186  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4187  	// result: (MOVWstore [off] {sym} ptr x mem)
  4188  	for {
  4189  		off := auxIntToInt32(v.AuxInt)
  4190  		sym := auxToSym(v.Aux)
  4191  		ptr := v_0
  4192  		if v_1.Op != OpMIPS64MOVWreg {
  4193  			break
  4194  		}
  4195  		x := v_1.Args[0]
  4196  		mem := v_2
  4197  		v.reset(OpMIPS64MOVWstore)
  4198  		v.AuxInt = int32ToAuxInt(off)
  4199  		v.Aux = symToAux(sym)
  4200  		v.AddArg3(ptr, x, mem)
  4201  		return true
  4202  	}
  4203  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  4204  	// result: (MOVWstore [off] {sym} ptr x mem)
  4205  	for {
  4206  		off := auxIntToInt32(v.AuxInt)
  4207  		sym := auxToSym(v.Aux)
  4208  		ptr := v_0
  4209  		if v_1.Op != OpMIPS64MOVWUreg {
  4210  			break
  4211  		}
  4212  		x := v_1.Args[0]
  4213  		mem := v_2
  4214  		v.reset(OpMIPS64MOVWstore)
  4215  		v.AuxInt = int32ToAuxInt(off)
  4216  		v.Aux = symToAux(sym)
  4217  		v.AddArg3(ptr, x, mem)
  4218  		return true
  4219  	}
  4220  	return false
  4221  }
  4222  func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
  4223  	v_1 := v.Args[1]
  4224  	v_0 := v.Args[0]
  4225  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4226  	// cond: is32Bit(int64(off1)+off2)
  4227  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  4228  	for {
  4229  		off1 := auxIntToInt32(v.AuxInt)
  4230  		sym := auxToSym(v.Aux)
  4231  		if v_0.Op != OpMIPS64ADDVconst {
  4232  			break
  4233  		}
  4234  		off2 := auxIntToInt64(v_0.AuxInt)
  4235  		ptr := v_0.Args[0]
  4236  		mem := v_1
  4237  		if !(is32Bit(int64(off1) + off2)) {
  4238  			break
  4239  		}
  4240  		v.reset(OpMIPS64MOVWstorezero)
  4241  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4242  		v.Aux = symToAux(sym)
  4243  		v.AddArg2(ptr, mem)
  4244  		return true
  4245  	}
  4246  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4247  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4248  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4249  	for {
  4250  		off1 := auxIntToInt32(v.AuxInt)
  4251  		sym1 := auxToSym(v.Aux)
  4252  		if v_0.Op != OpMIPS64MOVVaddr {
  4253  			break
  4254  		}
  4255  		off2 := auxIntToInt32(v_0.AuxInt)
  4256  		sym2 := auxToSym(v_0.Aux)
  4257  		ptr := v_0.Args[0]
  4258  		mem := v_1
  4259  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4260  			break
  4261  		}
  4262  		v.reset(OpMIPS64MOVWstorezero)
  4263  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4264  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4265  		v.AddArg2(ptr, mem)
  4266  		return true
  4267  	}
  4268  	return false
  4269  }
  4270  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
  4271  	v_0 := v.Args[0]
  4272  	// match: (NEGV (MOVVconst [c]))
  4273  	// result: (MOVVconst [-c])
  4274  	for {
  4275  		if v_0.Op != OpMIPS64MOVVconst {
  4276  			break
  4277  		}
  4278  		c := auxIntToInt64(v_0.AuxInt)
  4279  		v.reset(OpMIPS64MOVVconst)
  4280  		v.AuxInt = int64ToAuxInt(-c)
  4281  		return true
  4282  	}
  4283  	return false
  4284  }
  4285  func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
  4286  	v_1 := v.Args[1]
  4287  	v_0 := v.Args[0]
  4288  	// match: (NOR x (MOVVconst [c]))
  4289  	// cond: is32Bit(c)
  4290  	// result: (NORconst [c] x)
  4291  	for {
  4292  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4293  			x := v_0
  4294  			if v_1.Op != OpMIPS64MOVVconst {
  4295  				continue
  4296  			}
  4297  			c := auxIntToInt64(v_1.AuxInt)
  4298  			if !(is32Bit(c)) {
  4299  				continue
  4300  			}
  4301  			v.reset(OpMIPS64NORconst)
  4302  			v.AuxInt = int64ToAuxInt(c)
  4303  			v.AddArg(x)
  4304  			return true
  4305  		}
  4306  		break
  4307  	}
  4308  	return false
  4309  }
  4310  func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
  4311  	v_0 := v.Args[0]
  4312  	// match: (NORconst [c] (MOVVconst [d]))
  4313  	// result: (MOVVconst [^(c|d)])
  4314  	for {
  4315  		c := auxIntToInt64(v.AuxInt)
  4316  		if v_0.Op != OpMIPS64MOVVconst {
  4317  			break
  4318  		}
  4319  		d := auxIntToInt64(v_0.AuxInt)
  4320  		v.reset(OpMIPS64MOVVconst)
  4321  		v.AuxInt = int64ToAuxInt(^(c | d))
  4322  		return true
  4323  	}
  4324  	return false
  4325  }
  4326  func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
  4327  	v_1 := v.Args[1]
  4328  	v_0 := v.Args[0]
  4329  	// match: (OR x (MOVVconst [c]))
  4330  	// cond: is32Bit(c)
  4331  	// result: (ORconst [c] x)
  4332  	for {
  4333  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4334  			x := v_0
  4335  			if v_1.Op != OpMIPS64MOVVconst {
  4336  				continue
  4337  			}
  4338  			c := auxIntToInt64(v_1.AuxInt)
  4339  			if !(is32Bit(c)) {
  4340  				continue
  4341  			}
  4342  			v.reset(OpMIPS64ORconst)
  4343  			v.AuxInt = int64ToAuxInt(c)
  4344  			v.AddArg(x)
  4345  			return true
  4346  		}
  4347  		break
  4348  	}
  4349  	// match: (OR x x)
  4350  	// result: x
  4351  	for {
  4352  		x := v_0
  4353  		if x != v_1 {
  4354  			break
  4355  		}
  4356  		v.copyOf(x)
  4357  		return true
  4358  	}
  4359  	return false
  4360  }
  4361  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
  4362  	v_0 := v.Args[0]
  4363  	// match: (ORconst [0] x)
  4364  	// result: x
  4365  	for {
  4366  		if auxIntToInt64(v.AuxInt) != 0 {
  4367  			break
  4368  		}
  4369  		x := v_0
  4370  		v.copyOf(x)
  4371  		return true
  4372  	}
  4373  	// match: (ORconst [-1] _)
  4374  	// result: (MOVVconst [-1])
  4375  	for {
  4376  		if auxIntToInt64(v.AuxInt) != -1 {
  4377  			break
  4378  		}
  4379  		v.reset(OpMIPS64MOVVconst)
  4380  		v.AuxInt = int64ToAuxInt(-1)
  4381  		return true
  4382  	}
  4383  	// match: (ORconst [c] (MOVVconst [d]))
  4384  	// result: (MOVVconst [c|d])
  4385  	for {
  4386  		c := auxIntToInt64(v.AuxInt)
  4387  		if v_0.Op != OpMIPS64MOVVconst {
  4388  			break
  4389  		}
  4390  		d := auxIntToInt64(v_0.AuxInt)
  4391  		v.reset(OpMIPS64MOVVconst)
  4392  		v.AuxInt = int64ToAuxInt(c | d)
  4393  		return true
  4394  	}
  4395  	// match: (ORconst [c] (ORconst [d] x))
  4396  	// cond: is32Bit(c|d)
  4397  	// result: (ORconst [c|d] x)
  4398  	for {
  4399  		c := auxIntToInt64(v.AuxInt)
  4400  		if v_0.Op != OpMIPS64ORconst {
  4401  			break
  4402  		}
  4403  		d := auxIntToInt64(v_0.AuxInt)
  4404  		x := v_0.Args[0]
  4405  		if !(is32Bit(c | d)) {
  4406  			break
  4407  		}
  4408  		v.reset(OpMIPS64ORconst)
  4409  		v.AuxInt = int64ToAuxInt(c | d)
  4410  		v.AddArg(x)
  4411  		return true
  4412  	}
  4413  	return false
  4414  }
  4415  func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
  4416  	v_1 := v.Args[1]
  4417  	v_0 := v.Args[0]
  4418  	// match: (SGT (MOVVconst [c]) x)
  4419  	// cond: is32Bit(c)
  4420  	// result: (SGTconst [c] x)
  4421  	for {
  4422  		if v_0.Op != OpMIPS64MOVVconst {
  4423  			break
  4424  		}
  4425  		c := auxIntToInt64(v_0.AuxInt)
  4426  		x := v_1
  4427  		if !(is32Bit(c)) {
  4428  			break
  4429  		}
  4430  		v.reset(OpMIPS64SGTconst)
  4431  		v.AuxInt = int64ToAuxInt(c)
  4432  		v.AddArg(x)
  4433  		return true
  4434  	}
  4435  	return false
  4436  }
  4437  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
  4438  	v_1 := v.Args[1]
  4439  	v_0 := v.Args[0]
  4440  	// match: (SGTU (MOVVconst [c]) x)
  4441  	// cond: is32Bit(c)
  4442  	// result: (SGTUconst [c] x)
  4443  	for {
  4444  		if v_0.Op != OpMIPS64MOVVconst {
  4445  			break
  4446  		}
  4447  		c := auxIntToInt64(v_0.AuxInt)
  4448  		x := v_1
  4449  		if !(is32Bit(c)) {
  4450  			break
  4451  		}
  4452  		v.reset(OpMIPS64SGTUconst)
  4453  		v.AuxInt = int64ToAuxInt(c)
  4454  		v.AddArg(x)
  4455  		return true
  4456  	}
  4457  	return false
  4458  }
  4459  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
  4460  	v_0 := v.Args[0]
  4461  	// match: (SGTUconst [c] (MOVVconst [d]))
  4462  	// cond: uint64(c)>uint64(d)
  4463  	// result: (MOVVconst [1])
  4464  	for {
  4465  		c := auxIntToInt64(v.AuxInt)
  4466  		if v_0.Op != OpMIPS64MOVVconst {
  4467  			break
  4468  		}
  4469  		d := auxIntToInt64(v_0.AuxInt)
  4470  		if !(uint64(c) > uint64(d)) {
  4471  			break
  4472  		}
  4473  		v.reset(OpMIPS64MOVVconst)
  4474  		v.AuxInt = int64ToAuxInt(1)
  4475  		return true
  4476  	}
  4477  	// match: (SGTUconst [c] (MOVVconst [d]))
  4478  	// cond: uint64(c)<=uint64(d)
  4479  	// result: (MOVVconst [0])
  4480  	for {
  4481  		c := auxIntToInt64(v.AuxInt)
  4482  		if v_0.Op != OpMIPS64MOVVconst {
  4483  			break
  4484  		}
  4485  		d := auxIntToInt64(v_0.AuxInt)
  4486  		if !(uint64(c) <= uint64(d)) {
  4487  			break
  4488  		}
  4489  		v.reset(OpMIPS64MOVVconst)
  4490  		v.AuxInt = int64ToAuxInt(0)
  4491  		return true
  4492  	}
  4493  	// match: (SGTUconst [c] (MOVBUreg _))
  4494  	// cond: 0xff < uint64(c)
  4495  	// result: (MOVVconst [1])
  4496  	for {
  4497  		c := auxIntToInt64(v.AuxInt)
  4498  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
  4499  			break
  4500  		}
  4501  		v.reset(OpMIPS64MOVVconst)
  4502  		v.AuxInt = int64ToAuxInt(1)
  4503  		return true
  4504  	}
  4505  	// match: (SGTUconst [c] (MOVHUreg _))
  4506  	// cond: 0xffff < uint64(c)
  4507  	// result: (MOVVconst [1])
  4508  	for {
  4509  		c := auxIntToInt64(v.AuxInt)
  4510  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
  4511  			break
  4512  		}
  4513  		v.reset(OpMIPS64MOVVconst)
  4514  		v.AuxInt = int64ToAuxInt(1)
  4515  		return true
  4516  	}
  4517  	// match: (SGTUconst [c] (ANDconst [m] _))
  4518  	// cond: uint64(m) < uint64(c)
  4519  	// result: (MOVVconst [1])
  4520  	for {
  4521  		c := auxIntToInt64(v.AuxInt)
  4522  		if v_0.Op != OpMIPS64ANDconst {
  4523  			break
  4524  		}
  4525  		m := auxIntToInt64(v_0.AuxInt)
  4526  		if !(uint64(m) < uint64(c)) {
  4527  			break
  4528  		}
  4529  		v.reset(OpMIPS64MOVVconst)
  4530  		v.AuxInt = int64ToAuxInt(1)
  4531  		return true
  4532  	}
  4533  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  4534  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4535  	// result: (MOVVconst [1])
  4536  	for {
  4537  		c := auxIntToInt64(v.AuxInt)
  4538  		if v_0.Op != OpMIPS64SRLVconst {
  4539  			break
  4540  		}
  4541  		d := auxIntToInt64(v_0.AuxInt)
  4542  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4543  			break
  4544  		}
  4545  		v.reset(OpMIPS64MOVVconst)
  4546  		v.AuxInt = int64ToAuxInt(1)
  4547  		return true
  4548  	}
  4549  	return false
  4550  }
  4551  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
  4552  	v_0 := v.Args[0]
  4553  	// match: (SGTconst [c] (MOVVconst [d]))
  4554  	// cond: c>d
  4555  	// result: (MOVVconst [1])
  4556  	for {
  4557  		c := auxIntToInt64(v.AuxInt)
  4558  		if v_0.Op != OpMIPS64MOVVconst {
  4559  			break
  4560  		}
  4561  		d := auxIntToInt64(v_0.AuxInt)
  4562  		if !(c > d) {
  4563  			break
  4564  		}
  4565  		v.reset(OpMIPS64MOVVconst)
  4566  		v.AuxInt = int64ToAuxInt(1)
  4567  		return true
  4568  	}
  4569  	// match: (SGTconst [c] (MOVVconst [d]))
  4570  	// cond: c<=d
  4571  	// result: (MOVVconst [0])
  4572  	for {
  4573  		c := auxIntToInt64(v.AuxInt)
  4574  		if v_0.Op != OpMIPS64MOVVconst {
  4575  			break
  4576  		}
  4577  		d := auxIntToInt64(v_0.AuxInt)
  4578  		if !(c <= d) {
  4579  			break
  4580  		}
  4581  		v.reset(OpMIPS64MOVVconst)
  4582  		v.AuxInt = int64ToAuxInt(0)
  4583  		return true
  4584  	}
  4585  	// match: (SGTconst [c] (MOVBreg _))
  4586  	// cond: 0x7f < c
  4587  	// result: (MOVVconst [1])
  4588  	for {
  4589  		c := auxIntToInt64(v.AuxInt)
  4590  		if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
  4591  			break
  4592  		}
  4593  		v.reset(OpMIPS64MOVVconst)
  4594  		v.AuxInt = int64ToAuxInt(1)
  4595  		return true
  4596  	}
  4597  	// match: (SGTconst [c] (MOVBreg _))
  4598  	// cond: c <= -0x80
  4599  	// result: (MOVVconst [0])
  4600  	for {
  4601  		c := auxIntToInt64(v.AuxInt)
  4602  		if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
  4603  			break
  4604  		}
  4605  		v.reset(OpMIPS64MOVVconst)
  4606  		v.AuxInt = int64ToAuxInt(0)
  4607  		return true
  4608  	}
  4609  	// match: (SGTconst [c] (MOVBUreg _))
  4610  	// cond: 0xff < c
  4611  	// result: (MOVVconst [1])
  4612  	for {
  4613  		c := auxIntToInt64(v.AuxInt)
  4614  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
  4615  			break
  4616  		}
  4617  		v.reset(OpMIPS64MOVVconst)
  4618  		v.AuxInt = int64ToAuxInt(1)
  4619  		return true
  4620  	}
  4621  	// match: (SGTconst [c] (MOVBUreg _))
  4622  	// cond: c < 0
  4623  	// result: (MOVVconst [0])
  4624  	for {
  4625  		c := auxIntToInt64(v.AuxInt)
  4626  		if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
  4627  			break
  4628  		}
  4629  		v.reset(OpMIPS64MOVVconst)
  4630  		v.AuxInt = int64ToAuxInt(0)
  4631  		return true
  4632  	}
  4633  	// match: (SGTconst [c] (MOVHreg _))
  4634  	// cond: 0x7fff < c
  4635  	// result: (MOVVconst [1])
  4636  	for {
  4637  		c := auxIntToInt64(v.AuxInt)
  4638  		if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
  4639  			break
  4640  		}
  4641  		v.reset(OpMIPS64MOVVconst)
  4642  		v.AuxInt = int64ToAuxInt(1)
  4643  		return true
  4644  	}
  4645  	// match: (SGTconst [c] (MOVHreg _))
  4646  	// cond: c <= -0x8000
  4647  	// result: (MOVVconst [0])
  4648  	for {
  4649  		c := auxIntToInt64(v.AuxInt)
  4650  		if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
  4651  			break
  4652  		}
  4653  		v.reset(OpMIPS64MOVVconst)
  4654  		v.AuxInt = int64ToAuxInt(0)
  4655  		return true
  4656  	}
  4657  	// match: (SGTconst [c] (MOVHUreg _))
  4658  	// cond: 0xffff < c
  4659  	// result: (MOVVconst [1])
  4660  	for {
  4661  		c := auxIntToInt64(v.AuxInt)
  4662  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
  4663  			break
  4664  		}
  4665  		v.reset(OpMIPS64MOVVconst)
  4666  		v.AuxInt = int64ToAuxInt(1)
  4667  		return true
  4668  	}
  4669  	// match: (SGTconst [c] (MOVHUreg _))
  4670  	// cond: c < 0
  4671  	// result: (MOVVconst [0])
  4672  	for {
  4673  		c := auxIntToInt64(v.AuxInt)
  4674  		if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
  4675  			break
  4676  		}
  4677  		v.reset(OpMIPS64MOVVconst)
  4678  		v.AuxInt = int64ToAuxInt(0)
  4679  		return true
  4680  	}
  4681  	// match: (SGTconst [c] (MOVWUreg _))
  4682  	// cond: c < 0
  4683  	// result: (MOVVconst [0])
  4684  	for {
  4685  		c := auxIntToInt64(v.AuxInt)
  4686  		if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
  4687  			break
  4688  		}
  4689  		v.reset(OpMIPS64MOVVconst)
  4690  		v.AuxInt = int64ToAuxInt(0)
  4691  		return true
  4692  	}
  4693  	// match: (SGTconst [c] (ANDconst [m] _))
  4694  	// cond: 0 <= m && m < c
  4695  	// result: (MOVVconst [1])
  4696  	for {
  4697  		c := auxIntToInt64(v.AuxInt)
  4698  		if v_0.Op != OpMIPS64ANDconst {
  4699  			break
  4700  		}
  4701  		m := auxIntToInt64(v_0.AuxInt)
  4702  		if !(0 <= m && m < c) {
  4703  			break
  4704  		}
  4705  		v.reset(OpMIPS64MOVVconst)
  4706  		v.AuxInt = int64ToAuxInt(1)
  4707  		return true
  4708  	}
  4709  	// match: (SGTconst [c] (SRLVconst _ [d]))
  4710  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4711  	// result: (MOVVconst [1])
  4712  	for {
  4713  		c := auxIntToInt64(v.AuxInt)
  4714  		if v_0.Op != OpMIPS64SRLVconst {
  4715  			break
  4716  		}
  4717  		d := auxIntToInt64(v_0.AuxInt)
  4718  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4719  			break
  4720  		}
  4721  		v.reset(OpMIPS64MOVVconst)
  4722  		v.AuxInt = int64ToAuxInt(1)
  4723  		return true
  4724  	}
  4725  	return false
  4726  }
  4727  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
  4728  	v_1 := v.Args[1]
  4729  	v_0 := v.Args[0]
  4730  	// match: (SLLV _ (MOVVconst [c]))
  4731  	// cond: uint64(c)>=64
  4732  	// result: (MOVVconst [0])
  4733  	for {
  4734  		if v_1.Op != OpMIPS64MOVVconst {
  4735  			break
  4736  		}
  4737  		c := auxIntToInt64(v_1.AuxInt)
  4738  		if !(uint64(c) >= 64) {
  4739  			break
  4740  		}
  4741  		v.reset(OpMIPS64MOVVconst)
  4742  		v.AuxInt = int64ToAuxInt(0)
  4743  		return true
  4744  	}
  4745  	// match: (SLLV x (MOVVconst [c]))
  4746  	// result: (SLLVconst x [c])
  4747  	for {
  4748  		x := v_0
  4749  		if v_1.Op != OpMIPS64MOVVconst {
  4750  			break
  4751  		}
  4752  		c := auxIntToInt64(v_1.AuxInt)
  4753  		v.reset(OpMIPS64SLLVconst)
  4754  		v.AuxInt = int64ToAuxInt(c)
  4755  		v.AddArg(x)
  4756  		return true
  4757  	}
  4758  	return false
  4759  }
  4760  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
  4761  	v_0 := v.Args[0]
  4762  	// match: (SLLVconst [c] (MOVVconst [d]))
  4763  	// result: (MOVVconst [d<<uint64(c)])
  4764  	for {
  4765  		c := auxIntToInt64(v.AuxInt)
  4766  		if v_0.Op != OpMIPS64MOVVconst {
  4767  			break
  4768  		}
  4769  		d := auxIntToInt64(v_0.AuxInt)
  4770  		v.reset(OpMIPS64MOVVconst)
  4771  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  4772  		return true
  4773  	}
  4774  	return false
  4775  }
  4776  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
  4777  	v_1 := v.Args[1]
  4778  	v_0 := v.Args[0]
  4779  	// match: (SRAV x (MOVVconst [c]))
  4780  	// cond: uint64(c)>=64
  4781  	// result: (SRAVconst x [63])
  4782  	for {
  4783  		x := v_0
  4784  		if v_1.Op != OpMIPS64MOVVconst {
  4785  			break
  4786  		}
  4787  		c := auxIntToInt64(v_1.AuxInt)
  4788  		if !(uint64(c) >= 64) {
  4789  			break
  4790  		}
  4791  		v.reset(OpMIPS64SRAVconst)
  4792  		v.AuxInt = int64ToAuxInt(63)
  4793  		v.AddArg(x)
  4794  		return true
  4795  	}
  4796  	// match: (SRAV x (MOVVconst [c]))
  4797  	// result: (SRAVconst x [c])
  4798  	for {
  4799  		x := v_0
  4800  		if v_1.Op != OpMIPS64MOVVconst {
  4801  			break
  4802  		}
  4803  		c := auxIntToInt64(v_1.AuxInt)
  4804  		v.reset(OpMIPS64SRAVconst)
  4805  		v.AuxInt = int64ToAuxInt(c)
  4806  		v.AddArg(x)
  4807  		return true
  4808  	}
  4809  	return false
  4810  }
  4811  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
  4812  	v_0 := v.Args[0]
  4813  	// match: (SRAVconst [c] (MOVVconst [d]))
  4814  	// result: (MOVVconst [d>>uint64(c)])
  4815  	for {
  4816  		c := auxIntToInt64(v.AuxInt)
  4817  		if v_0.Op != OpMIPS64MOVVconst {
  4818  			break
  4819  		}
  4820  		d := auxIntToInt64(v_0.AuxInt)
  4821  		v.reset(OpMIPS64MOVVconst)
  4822  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  4823  		return true
  4824  	}
  4825  	return false
  4826  }
  4827  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
  4828  	v_1 := v.Args[1]
  4829  	v_0 := v.Args[0]
  4830  	// match: (SRLV _ (MOVVconst [c]))
  4831  	// cond: uint64(c)>=64
  4832  	// result: (MOVVconst [0])
  4833  	for {
  4834  		if v_1.Op != OpMIPS64MOVVconst {
  4835  			break
  4836  		}
  4837  		c := auxIntToInt64(v_1.AuxInt)
  4838  		if !(uint64(c) >= 64) {
  4839  			break
  4840  		}
  4841  		v.reset(OpMIPS64MOVVconst)
  4842  		v.AuxInt = int64ToAuxInt(0)
  4843  		return true
  4844  	}
  4845  	// match: (SRLV x (MOVVconst [c]))
  4846  	// result: (SRLVconst x [c])
  4847  	for {
  4848  		x := v_0
  4849  		if v_1.Op != OpMIPS64MOVVconst {
  4850  			break
  4851  		}
  4852  		c := auxIntToInt64(v_1.AuxInt)
  4853  		v.reset(OpMIPS64SRLVconst)
  4854  		v.AuxInt = int64ToAuxInt(c)
  4855  		v.AddArg(x)
  4856  		return true
  4857  	}
  4858  	return false
  4859  }
  4860  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
  4861  	v_0 := v.Args[0]
  4862  	// match: (SRLVconst [c] (MOVVconst [d]))
  4863  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  4864  	for {
  4865  		c := auxIntToInt64(v.AuxInt)
  4866  		if v_0.Op != OpMIPS64MOVVconst {
  4867  			break
  4868  		}
  4869  		d := auxIntToInt64(v_0.AuxInt)
  4870  		v.reset(OpMIPS64MOVVconst)
  4871  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  4872  		return true
  4873  	}
  4874  	return false
  4875  }
  4876  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
  4877  	v_1 := v.Args[1]
  4878  	v_0 := v.Args[0]
  4879  	// match: (SUBV x (MOVVconst [c]))
  4880  	// cond: is32Bit(c)
  4881  	// result: (SUBVconst [c] x)
  4882  	for {
  4883  		x := v_0
  4884  		if v_1.Op != OpMIPS64MOVVconst {
  4885  			break
  4886  		}
  4887  		c := auxIntToInt64(v_1.AuxInt)
  4888  		if !(is32Bit(c)) {
  4889  			break
  4890  		}
  4891  		v.reset(OpMIPS64SUBVconst)
  4892  		v.AuxInt = int64ToAuxInt(c)
  4893  		v.AddArg(x)
  4894  		return true
  4895  	}
  4896  	// match: (SUBV x x)
  4897  	// result: (MOVVconst [0])
  4898  	for {
  4899  		x := v_0
  4900  		if x != v_1 {
  4901  			break
  4902  		}
  4903  		v.reset(OpMIPS64MOVVconst)
  4904  		v.AuxInt = int64ToAuxInt(0)
  4905  		return true
  4906  	}
  4907  	// match: (SUBV (MOVVconst [0]) x)
  4908  	// result: (NEGV x)
  4909  	for {
  4910  		if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4911  			break
  4912  		}
  4913  		x := v_1
  4914  		v.reset(OpMIPS64NEGV)
  4915  		v.AddArg(x)
  4916  		return true
  4917  	}
  4918  	return false
  4919  }
  4920  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
  4921  	v_0 := v.Args[0]
  4922  	// match: (SUBVconst [0] x)
  4923  	// result: x
  4924  	for {
  4925  		if auxIntToInt64(v.AuxInt) != 0 {
  4926  			break
  4927  		}
  4928  		x := v_0
  4929  		v.copyOf(x)
  4930  		return true
  4931  	}
  4932  	// match: (SUBVconst [c] (MOVVconst [d]))
  4933  	// result: (MOVVconst [d-c])
  4934  	for {
  4935  		c := auxIntToInt64(v.AuxInt)
  4936  		if v_0.Op != OpMIPS64MOVVconst {
  4937  			break
  4938  		}
  4939  		d := auxIntToInt64(v_0.AuxInt)
  4940  		v.reset(OpMIPS64MOVVconst)
  4941  		v.AuxInt = int64ToAuxInt(d - c)
  4942  		return true
  4943  	}
  4944  	// match: (SUBVconst [c] (SUBVconst [d] x))
  4945  	// cond: is32Bit(-c-d)
  4946  	// result: (ADDVconst [-c-d] x)
  4947  	for {
  4948  		c := auxIntToInt64(v.AuxInt)
  4949  		if v_0.Op != OpMIPS64SUBVconst {
  4950  			break
  4951  		}
  4952  		d := auxIntToInt64(v_0.AuxInt)
  4953  		x := v_0.Args[0]
  4954  		if !(is32Bit(-c - d)) {
  4955  			break
  4956  		}
  4957  		v.reset(OpMIPS64ADDVconst)
  4958  		v.AuxInt = int64ToAuxInt(-c - d)
  4959  		v.AddArg(x)
  4960  		return true
  4961  	}
  4962  	// match: (SUBVconst [c] (ADDVconst [d] x))
  4963  	// cond: is32Bit(-c+d)
  4964  	// result: (ADDVconst [-c+d] x)
  4965  	for {
  4966  		c := auxIntToInt64(v.AuxInt)
  4967  		if v_0.Op != OpMIPS64ADDVconst {
  4968  			break
  4969  		}
  4970  		d := auxIntToInt64(v_0.AuxInt)
  4971  		x := v_0.Args[0]
  4972  		if !(is32Bit(-c + d)) {
  4973  			break
  4974  		}
  4975  		v.reset(OpMIPS64ADDVconst)
  4976  		v.AuxInt = int64ToAuxInt(-c + d)
  4977  		v.AddArg(x)
  4978  		return true
  4979  	}
  4980  	return false
  4981  }
  4982  func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
  4983  	v_1 := v.Args[1]
  4984  	v_0 := v.Args[0]
  4985  	// match: (XOR x (MOVVconst [c]))
  4986  	// cond: is32Bit(c)
  4987  	// result: (XORconst [c] x)
  4988  	for {
  4989  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4990  			x := v_0
  4991  			if v_1.Op != OpMIPS64MOVVconst {
  4992  				continue
  4993  			}
  4994  			c := auxIntToInt64(v_1.AuxInt)
  4995  			if !(is32Bit(c)) {
  4996  				continue
  4997  			}
  4998  			v.reset(OpMIPS64XORconst)
  4999  			v.AuxInt = int64ToAuxInt(c)
  5000  			v.AddArg(x)
  5001  			return true
  5002  		}
  5003  		break
  5004  	}
  5005  	// match: (XOR x x)
  5006  	// result: (MOVVconst [0])
  5007  	for {
  5008  		x := v_0
  5009  		if x != v_1 {
  5010  			break
  5011  		}
  5012  		v.reset(OpMIPS64MOVVconst)
  5013  		v.AuxInt = int64ToAuxInt(0)
  5014  		return true
  5015  	}
  5016  	return false
  5017  }
  5018  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
  5019  	v_0 := v.Args[0]
  5020  	// match: (XORconst [0] x)
  5021  	// result: x
  5022  	for {
  5023  		if auxIntToInt64(v.AuxInt) != 0 {
  5024  			break
  5025  		}
  5026  		x := v_0
  5027  		v.copyOf(x)
  5028  		return true
  5029  	}
  5030  	// match: (XORconst [-1] x)
  5031  	// result: (NORconst [0] x)
  5032  	for {
  5033  		if auxIntToInt64(v.AuxInt) != -1 {
  5034  			break
  5035  		}
  5036  		x := v_0
  5037  		v.reset(OpMIPS64NORconst)
  5038  		v.AuxInt = int64ToAuxInt(0)
  5039  		v.AddArg(x)
  5040  		return true
  5041  	}
  5042  	// match: (XORconst [c] (MOVVconst [d]))
  5043  	// result: (MOVVconst [c^d])
  5044  	for {
  5045  		c := auxIntToInt64(v.AuxInt)
  5046  		if v_0.Op != OpMIPS64MOVVconst {
  5047  			break
  5048  		}
  5049  		d := auxIntToInt64(v_0.AuxInt)
  5050  		v.reset(OpMIPS64MOVVconst)
  5051  		v.AuxInt = int64ToAuxInt(c ^ d)
  5052  		return true
  5053  	}
  5054  	// match: (XORconst [c] (XORconst [d] x))
  5055  	// cond: is32Bit(c^d)
  5056  	// result: (XORconst [c^d] x)
  5057  	for {
  5058  		c := auxIntToInt64(v.AuxInt)
  5059  		if v_0.Op != OpMIPS64XORconst {
  5060  			break
  5061  		}
  5062  		d := auxIntToInt64(v_0.AuxInt)
  5063  		x := v_0.Args[0]
  5064  		if !(is32Bit(c ^ d)) {
  5065  			break
  5066  		}
  5067  		v.reset(OpMIPS64XORconst)
  5068  		v.AuxInt = int64ToAuxInt(c ^ d)
  5069  		v.AddArg(x)
  5070  		return true
  5071  	}
  5072  	return false
  5073  }
  5074  func rewriteValueMIPS64_OpMod16(v *Value) bool {
  5075  	v_1 := v.Args[1]
  5076  	v_0 := v.Args[0]
  5077  	b := v.Block
  5078  	typ := &b.Func.Config.Types
  5079  	// match: (Mod16 x y)
  5080  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  5081  	for {
  5082  		x := v_0
  5083  		y := v_1
  5084  		v.reset(OpSelect0)
  5085  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5086  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5087  		v1.AddArg(x)
  5088  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5089  		v2.AddArg(y)
  5090  		v0.AddArg2(v1, v2)
  5091  		v.AddArg(v0)
  5092  		return true
  5093  	}
  5094  }
  5095  func rewriteValueMIPS64_OpMod16u(v *Value) bool {
  5096  	v_1 := v.Args[1]
  5097  	v_0 := v.Args[0]
  5098  	b := v.Block
  5099  	typ := &b.Func.Config.Types
  5100  	// match: (Mod16u x y)
  5101  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  5102  	for {
  5103  		x := v_0
  5104  		y := v_1
  5105  		v.reset(OpSelect0)
  5106  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5107  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5108  		v1.AddArg(x)
  5109  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5110  		v2.AddArg(y)
  5111  		v0.AddArg2(v1, v2)
  5112  		v.AddArg(v0)
  5113  		return true
  5114  	}
  5115  }
  5116  func rewriteValueMIPS64_OpMod32(v *Value) bool {
  5117  	v_1 := v.Args[1]
  5118  	v_0 := v.Args[0]
  5119  	b := v.Block
  5120  	typ := &b.Func.Config.Types
  5121  	// match: (Mod32 x y)
  5122  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  5123  	for {
  5124  		x := v_0
  5125  		y := v_1
  5126  		v.reset(OpSelect0)
  5127  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5128  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5129  		v1.AddArg(x)
  5130  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5131  		v2.AddArg(y)
  5132  		v0.AddArg2(v1, v2)
  5133  		v.AddArg(v0)
  5134  		return true
  5135  	}
  5136  }
  5137  func rewriteValueMIPS64_OpMod32u(v *Value) bool {
  5138  	v_1 := v.Args[1]
  5139  	v_0 := v.Args[0]
  5140  	b := v.Block
  5141  	typ := &b.Func.Config.Types
  5142  	// match: (Mod32u x y)
  5143  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  5144  	for {
  5145  		x := v_0
  5146  		y := v_1
  5147  		v.reset(OpSelect0)
  5148  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5149  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5150  		v1.AddArg(x)
  5151  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5152  		v2.AddArg(y)
  5153  		v0.AddArg2(v1, v2)
  5154  		v.AddArg(v0)
  5155  		return true
  5156  	}
  5157  }
  5158  func rewriteValueMIPS64_OpMod64(v *Value) bool {
  5159  	v_1 := v.Args[1]
  5160  	v_0 := v.Args[0]
  5161  	b := v.Block
  5162  	typ := &b.Func.Config.Types
  5163  	// match: (Mod64 x y)
  5164  	// result: (Select0 (DIVV x y))
  5165  	for {
  5166  		x := v_0
  5167  		y := v_1
  5168  		v.reset(OpSelect0)
  5169  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5170  		v0.AddArg2(x, y)
  5171  		v.AddArg(v0)
  5172  		return true
  5173  	}
  5174  }
  5175  func rewriteValueMIPS64_OpMod64u(v *Value) bool {
  5176  	v_1 := v.Args[1]
  5177  	v_0 := v.Args[0]
  5178  	b := v.Block
  5179  	typ := &b.Func.Config.Types
  5180  	// match: (Mod64u x y)
  5181  	// result: (Select0 (DIVVU x y))
  5182  	for {
  5183  		x := v_0
  5184  		y := v_1
  5185  		v.reset(OpSelect0)
  5186  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5187  		v0.AddArg2(x, y)
  5188  		v.AddArg(v0)
  5189  		return true
  5190  	}
  5191  }
  5192  func rewriteValueMIPS64_OpMod8(v *Value) bool {
  5193  	v_1 := v.Args[1]
  5194  	v_0 := v.Args[0]
  5195  	b := v.Block
  5196  	typ := &b.Func.Config.Types
  5197  	// match: (Mod8 x y)
  5198  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  5199  	for {
  5200  		x := v_0
  5201  		y := v_1
  5202  		v.reset(OpSelect0)
  5203  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5204  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5205  		v1.AddArg(x)
  5206  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5207  		v2.AddArg(y)
  5208  		v0.AddArg2(v1, v2)
  5209  		v.AddArg(v0)
  5210  		return true
  5211  	}
  5212  }
  5213  func rewriteValueMIPS64_OpMod8u(v *Value) bool {
  5214  	v_1 := v.Args[1]
  5215  	v_0 := v.Args[0]
  5216  	b := v.Block
  5217  	typ := &b.Func.Config.Types
  5218  	// match: (Mod8u x y)
  5219  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  5220  	for {
  5221  		x := v_0
  5222  		y := v_1
  5223  		v.reset(OpSelect0)
  5224  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5225  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5226  		v1.AddArg(x)
  5227  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5228  		v2.AddArg(y)
  5229  		v0.AddArg2(v1, v2)
  5230  		v.AddArg(v0)
  5231  		return true
  5232  	}
  5233  }
  5234  func rewriteValueMIPS64_OpMove(v *Value) bool {
  5235  	v_2 := v.Args[2]
  5236  	v_1 := v.Args[1]
  5237  	v_0 := v.Args[0]
  5238  	b := v.Block
  5239  	config := b.Func.Config
  5240  	typ := &b.Func.Config.Types
  5241  	// match: (Move [0] _ _ mem)
  5242  	// result: mem
  5243  	for {
  5244  		if auxIntToInt64(v.AuxInt) != 0 {
  5245  			break
  5246  		}
  5247  		mem := v_2
  5248  		v.copyOf(mem)
  5249  		return true
  5250  	}
  5251  	// match: (Move [1] dst src mem)
  5252  	// result: (MOVBstore dst (MOVBload src mem) mem)
  5253  	for {
  5254  		if auxIntToInt64(v.AuxInt) != 1 {
  5255  			break
  5256  		}
  5257  		dst := v_0
  5258  		src := v_1
  5259  		mem := v_2
  5260  		v.reset(OpMIPS64MOVBstore)
  5261  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5262  		v0.AddArg2(src, mem)
  5263  		v.AddArg3(dst, v0, mem)
  5264  		return true
  5265  	}
  5266  	// match: (Move [2] {t} dst src mem)
  5267  	// cond: t.Alignment()%2 == 0
  5268  	// result: (MOVHstore dst (MOVHload src mem) mem)
  5269  	for {
  5270  		if auxIntToInt64(v.AuxInt) != 2 {
  5271  			break
  5272  		}
  5273  		t := auxToType(v.Aux)
  5274  		dst := v_0
  5275  		src := v_1
  5276  		mem := v_2
  5277  		if !(t.Alignment()%2 == 0) {
  5278  			break
  5279  		}
  5280  		v.reset(OpMIPS64MOVHstore)
  5281  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5282  		v0.AddArg2(src, mem)
  5283  		v.AddArg3(dst, v0, mem)
  5284  		return true
  5285  	}
  5286  	// match: (Move [2] dst src mem)
  5287  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  5288  	for {
  5289  		if auxIntToInt64(v.AuxInt) != 2 {
  5290  			break
  5291  		}
  5292  		dst := v_0
  5293  		src := v_1
  5294  		mem := v_2
  5295  		v.reset(OpMIPS64MOVBstore)
  5296  		v.AuxInt = int32ToAuxInt(1)
  5297  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5298  		v0.AuxInt = int32ToAuxInt(1)
  5299  		v0.AddArg2(src, mem)
  5300  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5301  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5302  		v2.AddArg2(src, mem)
  5303  		v1.AddArg3(dst, v2, mem)
  5304  		v.AddArg3(dst, v0, v1)
  5305  		return true
  5306  	}
  5307  	// match: (Move [4] {t} dst src mem)
  5308  	// cond: t.Alignment()%4 == 0
  5309  	// result: (MOVWstore dst (MOVWload src mem) mem)
  5310  	for {
  5311  		if auxIntToInt64(v.AuxInt) != 4 {
  5312  			break
  5313  		}
  5314  		t := auxToType(v.Aux)
  5315  		dst := v_0
  5316  		src := v_1
  5317  		mem := v_2
  5318  		if !(t.Alignment()%4 == 0) {
  5319  			break
  5320  		}
  5321  		v.reset(OpMIPS64MOVWstore)
  5322  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5323  		v0.AddArg2(src, mem)
  5324  		v.AddArg3(dst, v0, mem)
  5325  		return true
  5326  	}
  5327  	// match: (Move [4] {t} dst src mem)
  5328  	// cond: t.Alignment()%2 == 0
  5329  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  5330  	for {
  5331  		if auxIntToInt64(v.AuxInt) != 4 {
  5332  			break
  5333  		}
  5334  		t := auxToType(v.Aux)
  5335  		dst := v_0
  5336  		src := v_1
  5337  		mem := v_2
  5338  		if !(t.Alignment()%2 == 0) {
  5339  			break
  5340  		}
  5341  		v.reset(OpMIPS64MOVHstore)
  5342  		v.AuxInt = int32ToAuxInt(2)
  5343  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5344  		v0.AuxInt = int32ToAuxInt(2)
  5345  		v0.AddArg2(src, mem)
  5346  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5347  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5348  		v2.AddArg2(src, mem)
  5349  		v1.AddArg3(dst, v2, mem)
  5350  		v.AddArg3(dst, v0, v1)
  5351  		return true
  5352  	}
  5353  	// match: (Move [4] dst src mem)
  5354  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  5355  	for {
  5356  		if auxIntToInt64(v.AuxInt) != 4 {
  5357  			break
  5358  		}
  5359  		dst := v_0
  5360  		src := v_1
  5361  		mem := v_2
  5362  		v.reset(OpMIPS64MOVBstore)
  5363  		v.AuxInt = int32ToAuxInt(3)
  5364  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5365  		v0.AuxInt = int32ToAuxInt(3)
  5366  		v0.AddArg2(src, mem)
  5367  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5368  		v1.AuxInt = int32ToAuxInt(2)
  5369  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5370  		v2.AuxInt = int32ToAuxInt(2)
  5371  		v2.AddArg2(src, mem)
  5372  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5373  		v3.AuxInt = int32ToAuxInt(1)
  5374  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5375  		v4.AuxInt = int32ToAuxInt(1)
  5376  		v4.AddArg2(src, mem)
  5377  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5378  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5379  		v6.AddArg2(src, mem)
  5380  		v5.AddArg3(dst, v6, mem)
  5381  		v3.AddArg3(dst, v4, v5)
  5382  		v1.AddArg3(dst, v2, v3)
  5383  		v.AddArg3(dst, v0, v1)
  5384  		return true
  5385  	}
  5386  	// match: (Move [8] {t} dst src mem)
  5387  	// cond: t.Alignment()%8 == 0
  5388  	// result: (MOVVstore dst (MOVVload src mem) mem)
  5389  	for {
  5390  		if auxIntToInt64(v.AuxInt) != 8 {
  5391  			break
  5392  		}
  5393  		t := auxToType(v.Aux)
  5394  		dst := v_0
  5395  		src := v_1
  5396  		mem := v_2
  5397  		if !(t.Alignment()%8 == 0) {
  5398  			break
  5399  		}
  5400  		v.reset(OpMIPS64MOVVstore)
  5401  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5402  		v0.AddArg2(src, mem)
  5403  		v.AddArg3(dst, v0, mem)
  5404  		return true
  5405  	}
  5406  	// match: (Move [8] {t} dst src mem)
  5407  	// cond: t.Alignment()%4 == 0
  5408  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  5409  	for {
  5410  		if auxIntToInt64(v.AuxInt) != 8 {
  5411  			break
  5412  		}
  5413  		t := auxToType(v.Aux)
  5414  		dst := v_0
  5415  		src := v_1
  5416  		mem := v_2
  5417  		if !(t.Alignment()%4 == 0) {
  5418  			break
  5419  		}
  5420  		v.reset(OpMIPS64MOVWstore)
  5421  		v.AuxInt = int32ToAuxInt(4)
  5422  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5423  		v0.AuxInt = int32ToAuxInt(4)
  5424  		v0.AddArg2(src, mem)
  5425  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5426  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5427  		v2.AddArg2(src, mem)
  5428  		v1.AddArg3(dst, v2, mem)
  5429  		v.AddArg3(dst, v0, v1)
  5430  		return true
  5431  	}
  5432  	// match: (Move [8] {t} dst src mem)
  5433  	// cond: t.Alignment()%2 == 0
  5434  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  5435  	for {
  5436  		if auxIntToInt64(v.AuxInt) != 8 {
  5437  			break
  5438  		}
  5439  		t := auxToType(v.Aux)
  5440  		dst := v_0
  5441  		src := v_1
  5442  		mem := v_2
  5443  		if !(t.Alignment()%2 == 0) {
  5444  			break
  5445  		}
  5446  		v.reset(OpMIPS64MOVHstore)
  5447  		v.AuxInt = int32ToAuxInt(6)
  5448  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5449  		v0.AuxInt = int32ToAuxInt(6)
  5450  		v0.AddArg2(src, mem)
  5451  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5452  		v1.AuxInt = int32ToAuxInt(4)
  5453  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5454  		v2.AuxInt = int32ToAuxInt(4)
  5455  		v2.AddArg2(src, mem)
  5456  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5457  		v3.AuxInt = int32ToAuxInt(2)
  5458  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5459  		v4.AuxInt = int32ToAuxInt(2)
  5460  		v4.AddArg2(src, mem)
  5461  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5462  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5463  		v6.AddArg2(src, mem)
  5464  		v5.AddArg3(dst, v6, mem)
  5465  		v3.AddArg3(dst, v4, v5)
  5466  		v1.AddArg3(dst, v2, v3)
  5467  		v.AddArg3(dst, v0, v1)
  5468  		return true
  5469  	}
  5470  	// match: (Move [3] dst src mem)
  5471  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  5472  	for {
  5473  		if auxIntToInt64(v.AuxInt) != 3 {
  5474  			break
  5475  		}
  5476  		dst := v_0
  5477  		src := v_1
  5478  		mem := v_2
  5479  		v.reset(OpMIPS64MOVBstore)
  5480  		v.AuxInt = int32ToAuxInt(2)
  5481  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5482  		v0.AuxInt = int32ToAuxInt(2)
  5483  		v0.AddArg2(src, mem)
  5484  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5485  		v1.AuxInt = int32ToAuxInt(1)
  5486  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5487  		v2.AuxInt = int32ToAuxInt(1)
  5488  		v2.AddArg2(src, mem)
  5489  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5490  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5491  		v4.AddArg2(src, mem)
  5492  		v3.AddArg3(dst, v4, mem)
  5493  		v1.AddArg3(dst, v2, v3)
  5494  		v.AddArg3(dst, v0, v1)
  5495  		return true
  5496  	}
  5497  	// match: (Move [6] {t} dst src mem)
  5498  	// cond: t.Alignment()%2 == 0
  5499  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  5500  	for {
  5501  		if auxIntToInt64(v.AuxInt) != 6 {
  5502  			break
  5503  		}
  5504  		t := auxToType(v.Aux)
  5505  		dst := v_0
  5506  		src := v_1
  5507  		mem := v_2
  5508  		if !(t.Alignment()%2 == 0) {
  5509  			break
  5510  		}
  5511  		v.reset(OpMIPS64MOVHstore)
  5512  		v.AuxInt = int32ToAuxInt(4)
  5513  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5514  		v0.AuxInt = int32ToAuxInt(4)
  5515  		v0.AddArg2(src, mem)
  5516  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5517  		v1.AuxInt = int32ToAuxInt(2)
  5518  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5519  		v2.AuxInt = int32ToAuxInt(2)
  5520  		v2.AddArg2(src, mem)
  5521  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5522  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5523  		v4.AddArg2(src, mem)
  5524  		v3.AddArg3(dst, v4, mem)
  5525  		v1.AddArg3(dst, v2, v3)
  5526  		v.AddArg3(dst, v0, v1)
  5527  		return true
  5528  	}
  5529  	// match: (Move [12] {t} dst src mem)
  5530  	// cond: t.Alignment()%4 == 0
  5531  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  5532  	for {
  5533  		if auxIntToInt64(v.AuxInt) != 12 {
  5534  			break
  5535  		}
  5536  		t := auxToType(v.Aux)
  5537  		dst := v_0
  5538  		src := v_1
  5539  		mem := v_2
  5540  		if !(t.Alignment()%4 == 0) {
  5541  			break
  5542  		}
  5543  		v.reset(OpMIPS64MOVWstore)
  5544  		v.AuxInt = int32ToAuxInt(8)
  5545  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5546  		v0.AuxInt = int32ToAuxInt(8)
  5547  		v0.AddArg2(src, mem)
  5548  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5549  		v1.AuxInt = int32ToAuxInt(4)
  5550  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5551  		v2.AuxInt = int32ToAuxInt(4)
  5552  		v2.AddArg2(src, mem)
  5553  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5554  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5555  		v4.AddArg2(src, mem)
  5556  		v3.AddArg3(dst, v4, mem)
  5557  		v1.AddArg3(dst, v2, v3)
  5558  		v.AddArg3(dst, v0, v1)
  5559  		return true
  5560  	}
  5561  	// match: (Move [16] {t} dst src mem)
  5562  	// cond: t.Alignment()%8 == 0
  5563  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  5564  	for {
  5565  		if auxIntToInt64(v.AuxInt) != 16 {
  5566  			break
  5567  		}
  5568  		t := auxToType(v.Aux)
  5569  		dst := v_0
  5570  		src := v_1
  5571  		mem := v_2
  5572  		if !(t.Alignment()%8 == 0) {
  5573  			break
  5574  		}
  5575  		v.reset(OpMIPS64MOVVstore)
  5576  		v.AuxInt = int32ToAuxInt(8)
  5577  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5578  		v0.AuxInt = int32ToAuxInt(8)
  5579  		v0.AddArg2(src, mem)
  5580  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5581  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5582  		v2.AddArg2(src, mem)
  5583  		v1.AddArg3(dst, v2, mem)
  5584  		v.AddArg3(dst, v0, v1)
  5585  		return true
  5586  	}
  5587  	// match: (Move [24] {t} dst src mem)
  5588  	// cond: t.Alignment()%8 == 0
  5589  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  5590  	for {
  5591  		if auxIntToInt64(v.AuxInt) != 24 {
  5592  			break
  5593  		}
  5594  		t := auxToType(v.Aux)
  5595  		dst := v_0
  5596  		src := v_1
  5597  		mem := v_2
  5598  		if !(t.Alignment()%8 == 0) {
  5599  			break
  5600  		}
  5601  		v.reset(OpMIPS64MOVVstore)
  5602  		v.AuxInt = int32ToAuxInt(16)
  5603  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5604  		v0.AuxInt = int32ToAuxInt(16)
  5605  		v0.AddArg2(src, mem)
  5606  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5607  		v1.AuxInt = int32ToAuxInt(8)
  5608  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5609  		v2.AuxInt = int32ToAuxInt(8)
  5610  		v2.AddArg2(src, mem)
  5611  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5612  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5613  		v4.AddArg2(src, mem)
  5614  		v3.AddArg3(dst, v4, mem)
  5615  		v1.AddArg3(dst, v2, v3)
  5616  		v.AddArg3(dst, v0, v1)
  5617  		return true
  5618  	}
  5619  	// match: (Move [s] {t} dst src mem)
  5620  	// cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  5621  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  5622  	for {
  5623  		s := auxIntToInt64(v.AuxInt)
  5624  		t := auxToType(v.Aux)
  5625  		dst := v_0
  5626  		src := v_1
  5627  		mem := v_2
  5628  		if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  5629  			break
  5630  		}
  5631  		v.reset(OpMIPS64DUFFCOPY)
  5632  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  5633  		v.AddArg3(dst, src, mem)
  5634  		return true
  5635  	}
  5636  	// match: (Move [s] {t} dst src mem)
  5637  	// cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0
  5638  	// result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem)
  5639  	for {
  5640  		s := auxIntToInt64(v.AuxInt)
  5641  		t := auxToType(v.Aux)
  5642  		dst := v_0
  5643  		src := v_1
  5644  		mem := v_2
  5645  		if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
  5646  			break
  5647  		}
  5648  		v.reset(OpMIPS64LoweredMove)
  5649  		v.AuxInt = int64ToAuxInt(t.Alignment())
  5650  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  5651  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  5652  		v0.AddArg(src)
  5653  		v.AddArg4(dst, src, v0, mem)
  5654  		return true
  5655  	}
  5656  	return false
  5657  }
  5658  func rewriteValueMIPS64_OpMul16(v *Value) bool {
  5659  	v_1 := v.Args[1]
  5660  	v_0 := v.Args[0]
  5661  	b := v.Block
  5662  	typ := &b.Func.Config.Types
  5663  	// match: (Mul16 x y)
  5664  	// result: (Select1 (MULVU x y))
  5665  	for {
  5666  		x := v_0
  5667  		y := v_1
  5668  		v.reset(OpSelect1)
  5669  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5670  		v0.AddArg2(x, y)
  5671  		v.AddArg(v0)
  5672  		return true
  5673  	}
  5674  }
  5675  func rewriteValueMIPS64_OpMul32(v *Value) bool {
  5676  	v_1 := v.Args[1]
  5677  	v_0 := v.Args[0]
  5678  	b := v.Block
  5679  	typ := &b.Func.Config.Types
  5680  	// match: (Mul32 x y)
  5681  	// result: (Select1 (MULVU x y))
  5682  	for {
  5683  		x := v_0
  5684  		y := v_1
  5685  		v.reset(OpSelect1)
  5686  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5687  		v0.AddArg2(x, y)
  5688  		v.AddArg(v0)
  5689  		return true
  5690  	}
  5691  }
  5692  func rewriteValueMIPS64_OpMul64(v *Value) bool {
  5693  	v_1 := v.Args[1]
  5694  	v_0 := v.Args[0]
  5695  	b := v.Block
  5696  	typ := &b.Func.Config.Types
  5697  	// match: (Mul64 x y)
  5698  	// result: (Select1 (MULVU x y))
  5699  	for {
  5700  		x := v_0
  5701  		y := v_1
  5702  		v.reset(OpSelect1)
  5703  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5704  		v0.AddArg2(x, y)
  5705  		v.AddArg(v0)
  5706  		return true
  5707  	}
  5708  }
  5709  func rewriteValueMIPS64_OpMul8(v *Value) bool {
  5710  	v_1 := v.Args[1]
  5711  	v_0 := v.Args[0]
  5712  	b := v.Block
  5713  	typ := &b.Func.Config.Types
  5714  	// match: (Mul8 x y)
  5715  	// result: (Select1 (MULVU x y))
  5716  	for {
  5717  		x := v_0
  5718  		y := v_1
  5719  		v.reset(OpSelect1)
  5720  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5721  		v0.AddArg2(x, y)
  5722  		v.AddArg(v0)
  5723  		return true
  5724  	}
  5725  }
  5726  func rewriteValueMIPS64_OpNeq16(v *Value) bool {
  5727  	v_1 := v.Args[1]
  5728  	v_0 := v.Args[0]
  5729  	b := v.Block
  5730  	typ := &b.Func.Config.Types
  5731  	// match: (Neq16 x y)
  5732  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  5733  	for {
  5734  		x := v_0
  5735  		y := v_1
  5736  		v.reset(OpMIPS64SGTU)
  5737  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5738  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  5739  		v1.AddArg(x)
  5740  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5741  		v2.AddArg(y)
  5742  		v0.AddArg2(v1, v2)
  5743  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5744  		v3.AuxInt = int64ToAuxInt(0)
  5745  		v.AddArg2(v0, v3)
  5746  		return true
  5747  	}
  5748  }
  5749  func rewriteValueMIPS64_OpNeq32(v *Value) bool {
  5750  	v_1 := v.Args[1]
  5751  	v_0 := v.Args[0]
  5752  	b := v.Block
  5753  	typ := &b.Func.Config.Types
  5754  	// match: (Neq32 x y)
  5755  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  5756  	for {
  5757  		x := v_0
  5758  		y := v_1
  5759  		v.reset(OpMIPS64SGTU)
  5760  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5761  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5762  		v1.AddArg(x)
  5763  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5764  		v2.AddArg(y)
  5765  		v0.AddArg2(v1, v2)
  5766  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5767  		v3.AuxInt = int64ToAuxInt(0)
  5768  		v.AddArg2(v0, v3)
  5769  		return true
  5770  	}
  5771  }
  5772  func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
  5773  	v_1 := v.Args[1]
  5774  	v_0 := v.Args[0]
  5775  	b := v.Block
  5776  	// match: (Neq32F x y)
  5777  	// result: (FPFlagFalse (CMPEQF x y))
  5778  	for {
  5779  		x := v_0
  5780  		y := v_1
  5781  		v.reset(OpMIPS64FPFlagFalse)
  5782  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  5783  		v0.AddArg2(x, y)
  5784  		v.AddArg(v0)
  5785  		return true
  5786  	}
  5787  }
  5788  func rewriteValueMIPS64_OpNeq64(v *Value) bool {
  5789  	v_1 := v.Args[1]
  5790  	v_0 := v.Args[0]
  5791  	b := v.Block
  5792  	typ := &b.Func.Config.Types
  5793  	// match: (Neq64 x y)
  5794  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5795  	for {
  5796  		x := v_0
  5797  		y := v_1
  5798  		v.reset(OpMIPS64SGTU)
  5799  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5800  		v0.AddArg2(x, y)
  5801  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5802  		v1.AuxInt = int64ToAuxInt(0)
  5803  		v.AddArg2(v0, v1)
  5804  		return true
  5805  	}
  5806  }
  5807  func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
  5808  	v_1 := v.Args[1]
  5809  	v_0 := v.Args[0]
  5810  	b := v.Block
  5811  	// match: (Neq64F x y)
  5812  	// result: (FPFlagFalse (CMPEQD x y))
  5813  	for {
  5814  		x := v_0
  5815  		y := v_1
  5816  		v.reset(OpMIPS64FPFlagFalse)
  5817  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  5818  		v0.AddArg2(x, y)
  5819  		v.AddArg(v0)
  5820  		return true
  5821  	}
  5822  }
  5823  func rewriteValueMIPS64_OpNeq8(v *Value) bool {
  5824  	v_1 := v.Args[1]
  5825  	v_0 := v.Args[0]
  5826  	b := v.Block
  5827  	typ := &b.Func.Config.Types
  5828  	// match: (Neq8 x y)
  5829  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  5830  	for {
  5831  		x := v_0
  5832  		y := v_1
  5833  		v.reset(OpMIPS64SGTU)
  5834  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5835  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5836  		v1.AddArg(x)
  5837  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5838  		v2.AddArg(y)
  5839  		v0.AddArg2(v1, v2)
  5840  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5841  		v3.AuxInt = int64ToAuxInt(0)
  5842  		v.AddArg2(v0, v3)
  5843  		return true
  5844  	}
  5845  }
  5846  func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
  5847  	v_1 := v.Args[1]
  5848  	v_0 := v.Args[0]
  5849  	b := v.Block
  5850  	typ := &b.Func.Config.Types
  5851  	// match: (NeqPtr x y)
  5852  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5853  	for {
  5854  		x := v_0
  5855  		y := v_1
  5856  		v.reset(OpMIPS64SGTU)
  5857  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5858  		v0.AddArg2(x, y)
  5859  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5860  		v1.AuxInt = int64ToAuxInt(0)
  5861  		v.AddArg2(v0, v1)
  5862  		return true
  5863  	}
  5864  }
  5865  func rewriteValueMIPS64_OpNot(v *Value) bool {
  5866  	v_0 := v.Args[0]
  5867  	// match: (Not x)
  5868  	// result: (XORconst [1] x)
  5869  	for {
  5870  		x := v_0
  5871  		v.reset(OpMIPS64XORconst)
  5872  		v.AuxInt = int64ToAuxInt(1)
  5873  		v.AddArg(x)
  5874  		return true
  5875  	}
  5876  }
  5877  func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
  5878  	v_0 := v.Args[0]
  5879  	// match: (OffPtr [off] ptr:(SP))
  5880  	// cond: is32Bit(off)
  5881  	// result: (MOVVaddr [int32(off)] ptr)
  5882  	for {
  5883  		off := auxIntToInt64(v.AuxInt)
  5884  		ptr := v_0
  5885  		if ptr.Op != OpSP || !(is32Bit(off)) {
  5886  			break
  5887  		}
  5888  		v.reset(OpMIPS64MOVVaddr)
  5889  		v.AuxInt = int32ToAuxInt(int32(off))
  5890  		v.AddArg(ptr)
  5891  		return true
  5892  	}
  5893  	// match: (OffPtr [off] ptr)
  5894  	// result: (ADDVconst [off] ptr)
  5895  	for {
  5896  		off := auxIntToInt64(v.AuxInt)
  5897  		ptr := v_0
  5898  		v.reset(OpMIPS64ADDVconst)
  5899  		v.AuxInt = int64ToAuxInt(off)
  5900  		v.AddArg(ptr)
  5901  		return true
  5902  	}
  5903  }
  5904  func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
  5905  	v_2 := v.Args[2]
  5906  	v_1 := v.Args[1]
  5907  	v_0 := v.Args[0]
  5908  	// match: (PanicBounds [kind] x y mem)
  5909  	// cond: boundsABI(kind) == 0
  5910  	// result: (LoweredPanicBoundsA [kind] x y mem)
  5911  	for {
  5912  		kind := auxIntToInt64(v.AuxInt)
  5913  		x := v_0
  5914  		y := v_1
  5915  		mem := v_2
  5916  		if !(boundsABI(kind) == 0) {
  5917  			break
  5918  		}
  5919  		v.reset(OpMIPS64LoweredPanicBoundsA)
  5920  		v.AuxInt = int64ToAuxInt(kind)
  5921  		v.AddArg3(x, y, mem)
  5922  		return true
  5923  	}
  5924  	// match: (PanicBounds [kind] x y mem)
  5925  	// cond: boundsABI(kind) == 1
  5926  	// result: (LoweredPanicBoundsB [kind] x y mem)
  5927  	for {
  5928  		kind := auxIntToInt64(v.AuxInt)
  5929  		x := v_0
  5930  		y := v_1
  5931  		mem := v_2
  5932  		if !(boundsABI(kind) == 1) {
  5933  			break
  5934  		}
  5935  		v.reset(OpMIPS64LoweredPanicBoundsB)
  5936  		v.AuxInt = int64ToAuxInt(kind)
  5937  		v.AddArg3(x, y, mem)
  5938  		return true
  5939  	}
  5940  	// match: (PanicBounds [kind] x y mem)
  5941  	// cond: boundsABI(kind) == 2
  5942  	// result: (LoweredPanicBoundsC [kind] x y mem)
  5943  	for {
  5944  		kind := auxIntToInt64(v.AuxInt)
  5945  		x := v_0
  5946  		y := v_1
  5947  		mem := v_2
  5948  		if !(boundsABI(kind) == 2) {
  5949  			break
  5950  		}
  5951  		v.reset(OpMIPS64LoweredPanicBoundsC)
  5952  		v.AuxInt = int64ToAuxInt(kind)
  5953  		v.AddArg3(x, y, mem)
  5954  		return true
  5955  	}
  5956  	return false
  5957  }
  5958  func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
  5959  	v_1 := v.Args[1]
  5960  	v_0 := v.Args[0]
  5961  	b := v.Block
  5962  	typ := &b.Func.Config.Types
  5963  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  5964  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  5965  	for {
  5966  		t := v.Type
  5967  		x := v_0
  5968  		if v_1.Op != OpMIPS64MOVVconst {
  5969  			break
  5970  		}
  5971  		c := auxIntToInt64(v_1.AuxInt)
  5972  		v.reset(OpOr16)
  5973  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  5974  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5975  		v1.AuxInt = int64ToAuxInt(c & 15)
  5976  		v0.AddArg2(x, v1)
  5977  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  5978  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5979  		v3.AuxInt = int64ToAuxInt(-c & 15)
  5980  		v2.AddArg2(x, v3)
  5981  		v.AddArg2(v0, v2)
  5982  		return true
  5983  	}
  5984  	return false
  5985  }
  5986  func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
  5987  	v_1 := v.Args[1]
  5988  	v_0 := v.Args[0]
  5989  	b := v.Block
  5990  	typ := &b.Func.Config.Types
  5991  	// match: (RotateLeft32 <t> x (MOVVconst [c]))
  5992  	// result: (Or32 (Lsh32x64 <t> x (MOVVconst [c&31])) (Rsh32Ux64 <t> x (MOVVconst [-c&31])))
  5993  	for {
  5994  		t := v.Type
  5995  		x := v_0
  5996  		if v_1.Op != OpMIPS64MOVVconst {
  5997  			break
  5998  		}
  5999  		c := auxIntToInt64(v_1.AuxInt)
  6000  		v.reset(OpOr32)
  6001  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  6002  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6003  		v1.AuxInt = int64ToAuxInt(c & 31)
  6004  		v0.AddArg2(x, v1)
  6005  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6006  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6007  		v3.AuxInt = int64ToAuxInt(-c & 31)
  6008  		v2.AddArg2(x, v3)
  6009  		v.AddArg2(v0, v2)
  6010  		return true
  6011  	}
  6012  	return false
  6013  }
  6014  func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
  6015  	v_1 := v.Args[1]
  6016  	v_0 := v.Args[0]
  6017  	b := v.Block
  6018  	typ := &b.Func.Config.Types
  6019  	// match: (RotateLeft64 <t> x (MOVVconst [c]))
  6020  	// result: (Or64 (Lsh64x64 <t> x (MOVVconst [c&63])) (Rsh64Ux64 <t> x (MOVVconst [-c&63])))
  6021  	for {
  6022  		t := v.Type
  6023  		x := v_0
  6024  		if v_1.Op != OpMIPS64MOVVconst {
  6025  			break
  6026  		}
  6027  		c := auxIntToInt64(v_1.AuxInt)
  6028  		v.reset(OpOr64)
  6029  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  6030  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6031  		v1.AuxInt = int64ToAuxInt(c & 63)
  6032  		v0.AddArg2(x, v1)
  6033  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6034  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6035  		v3.AuxInt = int64ToAuxInt(-c & 63)
  6036  		v2.AddArg2(x, v3)
  6037  		v.AddArg2(v0, v2)
  6038  		return true
  6039  	}
  6040  	return false
  6041  }
  6042  func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
  6043  	v_1 := v.Args[1]
  6044  	v_0 := v.Args[0]
  6045  	b := v.Block
  6046  	typ := &b.Func.Config.Types
  6047  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  6048  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  6049  	for {
  6050  		t := v.Type
  6051  		x := v_0
  6052  		if v_1.Op != OpMIPS64MOVVconst {
  6053  			break
  6054  		}
  6055  		c := auxIntToInt64(v_1.AuxInt)
  6056  		v.reset(OpOr8)
  6057  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6058  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6059  		v1.AuxInt = int64ToAuxInt(c & 7)
  6060  		v0.AddArg2(x, v1)
  6061  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6062  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6063  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6064  		v2.AddArg2(x, v3)
  6065  		v.AddArg2(v0, v2)
  6066  		return true
  6067  	}
  6068  	return false
  6069  }
  6070  func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
  6071  	v_1 := v.Args[1]
  6072  	v_0 := v.Args[0]
  6073  	b := v.Block
  6074  	typ := &b.Func.Config.Types
  6075  	// match: (Rsh16Ux16 <t> x y)
  6076  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6077  	for {
  6078  		t := v.Type
  6079  		x := v_0
  6080  		y := v_1
  6081  		v.reset(OpMIPS64AND)
  6082  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6083  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6084  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6085  		v2.AuxInt = int64ToAuxInt(64)
  6086  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6087  		v3.AddArg(y)
  6088  		v1.AddArg2(v2, v3)
  6089  		v0.AddArg(v1)
  6090  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6091  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6092  		v5.AddArg(x)
  6093  		v4.AddArg2(v5, v3)
  6094  		v.AddArg2(v0, v4)
  6095  		return true
  6096  	}
  6097  }
  6098  func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
  6099  	v_1 := v.Args[1]
  6100  	v_0 := v.Args[0]
  6101  	b := v.Block
  6102  	typ := &b.Func.Config.Types
  6103  	// match: (Rsh16Ux32 <t> x y)
  6104  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  6105  	for {
  6106  		t := v.Type
  6107  		x := v_0
  6108  		y := v_1
  6109  		v.reset(OpMIPS64AND)
  6110  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6111  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6112  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6113  		v2.AuxInt = int64ToAuxInt(64)
  6114  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6115  		v3.AddArg(y)
  6116  		v1.AddArg2(v2, v3)
  6117  		v0.AddArg(v1)
  6118  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6119  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6120  		v5.AddArg(x)
  6121  		v4.AddArg2(v5, v3)
  6122  		v.AddArg2(v0, v4)
  6123  		return true
  6124  	}
  6125  }
  6126  func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
  6127  	v_1 := v.Args[1]
  6128  	v_0 := v.Args[0]
  6129  	b := v.Block
  6130  	typ := &b.Func.Config.Types
  6131  	// match: (Rsh16Ux64 <t> x y)
  6132  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  6133  	for {
  6134  		t := v.Type
  6135  		x := v_0
  6136  		y := v_1
  6137  		v.reset(OpMIPS64AND)
  6138  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6139  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6140  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6141  		v2.AuxInt = int64ToAuxInt(64)
  6142  		v1.AddArg2(v2, y)
  6143  		v0.AddArg(v1)
  6144  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6145  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6146  		v4.AddArg(x)
  6147  		v3.AddArg2(v4, y)
  6148  		v.AddArg2(v0, v3)
  6149  		return true
  6150  	}
  6151  }
  6152  func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
  6153  	v_1 := v.Args[1]
  6154  	v_0 := v.Args[0]
  6155  	b := v.Block
  6156  	typ := &b.Func.Config.Types
  6157  	// match: (Rsh16Ux8 <t> x y)
  6158  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  6159  	for {
  6160  		t := v.Type
  6161  		x := v_0
  6162  		y := v_1
  6163  		v.reset(OpMIPS64AND)
  6164  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6165  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6166  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6167  		v2.AuxInt = int64ToAuxInt(64)
  6168  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6169  		v3.AddArg(y)
  6170  		v1.AddArg2(v2, v3)
  6171  		v0.AddArg(v1)
  6172  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6173  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6174  		v5.AddArg(x)
  6175  		v4.AddArg2(v5, v3)
  6176  		v.AddArg2(v0, v4)
  6177  		return true
  6178  	}
  6179  }
  6180  func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
  6181  	v_1 := v.Args[1]
  6182  	v_0 := v.Args[0]
  6183  	b := v.Block
  6184  	typ := &b.Func.Config.Types
  6185  	// match: (Rsh16x16 <t> x y)
  6186  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6187  	for {
  6188  		t := v.Type
  6189  		x := v_0
  6190  		y := v_1
  6191  		v.reset(OpMIPS64SRAV)
  6192  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6193  		v0.AddArg(x)
  6194  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6195  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6196  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6197  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6198  		v4.AddArg(y)
  6199  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6200  		v5.AuxInt = int64ToAuxInt(63)
  6201  		v3.AddArg2(v4, v5)
  6202  		v2.AddArg(v3)
  6203  		v1.AddArg2(v2, v4)
  6204  		v.AddArg2(v0, v1)
  6205  		return true
  6206  	}
  6207  }
  6208  func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
  6209  	v_1 := v.Args[1]
  6210  	v_0 := v.Args[0]
  6211  	b := v.Block
  6212  	typ := &b.Func.Config.Types
  6213  	// match: (Rsh16x32 <t> x y)
  6214  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6215  	for {
  6216  		t := v.Type
  6217  		x := v_0
  6218  		y := v_1
  6219  		v.reset(OpMIPS64SRAV)
  6220  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6221  		v0.AddArg(x)
  6222  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6223  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6224  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6225  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6226  		v4.AddArg(y)
  6227  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6228  		v5.AuxInt = int64ToAuxInt(63)
  6229  		v3.AddArg2(v4, v5)
  6230  		v2.AddArg(v3)
  6231  		v1.AddArg2(v2, v4)
  6232  		v.AddArg2(v0, v1)
  6233  		return true
  6234  	}
  6235  }
  6236  func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
  6237  	v_1 := v.Args[1]
  6238  	v_0 := v.Args[0]
  6239  	b := v.Block
  6240  	typ := &b.Func.Config.Types
  6241  	// match: (Rsh16x64 <t> x y)
  6242  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6243  	for {
  6244  		t := v.Type
  6245  		x := v_0
  6246  		y := v_1
  6247  		v.reset(OpMIPS64SRAV)
  6248  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6249  		v0.AddArg(x)
  6250  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6251  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6252  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6253  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6254  		v4.AuxInt = int64ToAuxInt(63)
  6255  		v3.AddArg2(y, v4)
  6256  		v2.AddArg(v3)
  6257  		v1.AddArg2(v2, y)
  6258  		v.AddArg2(v0, v1)
  6259  		return true
  6260  	}
  6261  }
  6262  func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
  6263  	v_1 := v.Args[1]
  6264  	v_0 := v.Args[0]
  6265  	b := v.Block
  6266  	typ := &b.Func.Config.Types
  6267  	// match: (Rsh16x8 <t> x y)
  6268  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6269  	for {
  6270  		t := v.Type
  6271  		x := v_0
  6272  		y := v_1
  6273  		v.reset(OpMIPS64SRAV)
  6274  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6275  		v0.AddArg(x)
  6276  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6277  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6278  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6279  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6280  		v4.AddArg(y)
  6281  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6282  		v5.AuxInt = int64ToAuxInt(63)
  6283  		v3.AddArg2(v4, v5)
  6284  		v2.AddArg(v3)
  6285  		v1.AddArg2(v2, v4)
  6286  		v.AddArg2(v0, v1)
  6287  		return true
  6288  	}
  6289  }
  6290  func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
  6291  	v_1 := v.Args[1]
  6292  	v_0 := v.Args[0]
  6293  	b := v.Block
  6294  	typ := &b.Func.Config.Types
  6295  	// match: (Rsh32Ux16 <t> x y)
  6296  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  6297  	for {
  6298  		t := v.Type
  6299  		x := v_0
  6300  		y := v_1
  6301  		v.reset(OpMIPS64AND)
  6302  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6303  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6304  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6305  		v2.AuxInt = int64ToAuxInt(64)
  6306  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6307  		v3.AddArg(y)
  6308  		v1.AddArg2(v2, v3)
  6309  		v0.AddArg(v1)
  6310  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6311  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6312  		v5.AddArg(x)
  6313  		v4.AddArg2(v5, v3)
  6314  		v.AddArg2(v0, v4)
  6315  		return true
  6316  	}
  6317  }
  6318  func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
  6319  	v_1 := v.Args[1]
  6320  	v_0 := v.Args[0]
  6321  	b := v.Block
  6322  	typ := &b.Func.Config.Types
  6323  	// match: (Rsh32Ux32 <t> x y)
  6324  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6325  	for {
  6326  		t := v.Type
  6327  		x := v_0
  6328  		y := v_1
  6329  		v.reset(OpMIPS64AND)
  6330  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6331  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6332  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6333  		v2.AuxInt = int64ToAuxInt(64)
  6334  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6335  		v3.AddArg(y)
  6336  		v1.AddArg2(v2, v3)
  6337  		v0.AddArg(v1)
  6338  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6339  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6340  		v5.AddArg(x)
  6341  		v4.AddArg2(v5, v3)
  6342  		v.AddArg2(v0, v4)
  6343  		return true
  6344  	}
  6345  }
  6346  func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
  6347  	v_1 := v.Args[1]
  6348  	v_0 := v.Args[0]
  6349  	b := v.Block
  6350  	typ := &b.Func.Config.Types
  6351  	// match: (Rsh32Ux64 <t> x y)
  6352  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  6353  	for {
  6354  		t := v.Type
  6355  		x := v_0
  6356  		y := v_1
  6357  		v.reset(OpMIPS64AND)
  6358  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6359  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6360  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6361  		v2.AuxInt = int64ToAuxInt(64)
  6362  		v1.AddArg2(v2, y)
  6363  		v0.AddArg(v1)
  6364  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6365  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6366  		v4.AddArg(x)
  6367  		v3.AddArg2(v4, y)
  6368  		v.AddArg2(v0, v3)
  6369  		return true
  6370  	}
  6371  }
  6372  func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
  6373  	v_1 := v.Args[1]
  6374  	v_0 := v.Args[0]
  6375  	b := v.Block
  6376  	typ := &b.Func.Config.Types
  6377  	// match: (Rsh32Ux8 <t> x y)
  6378  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  6379  	for {
  6380  		t := v.Type
  6381  		x := v_0
  6382  		y := v_1
  6383  		v.reset(OpMIPS64AND)
  6384  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6385  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6386  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6387  		v2.AuxInt = int64ToAuxInt(64)
  6388  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6389  		v3.AddArg(y)
  6390  		v1.AddArg2(v2, v3)
  6391  		v0.AddArg(v1)
  6392  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6393  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6394  		v5.AddArg(x)
  6395  		v4.AddArg2(v5, v3)
  6396  		v.AddArg2(v0, v4)
  6397  		return true
  6398  	}
  6399  }
  6400  func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
  6401  	v_1 := v.Args[1]
  6402  	v_0 := v.Args[0]
  6403  	b := v.Block
  6404  	typ := &b.Func.Config.Types
  6405  	// match: (Rsh32x16 <t> x y)
  6406  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6407  	for {
  6408  		t := v.Type
  6409  		x := v_0
  6410  		y := v_1
  6411  		v.reset(OpMIPS64SRAV)
  6412  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6413  		v0.AddArg(x)
  6414  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6415  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6416  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6417  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6418  		v4.AddArg(y)
  6419  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6420  		v5.AuxInt = int64ToAuxInt(63)
  6421  		v3.AddArg2(v4, v5)
  6422  		v2.AddArg(v3)
  6423  		v1.AddArg2(v2, v4)
  6424  		v.AddArg2(v0, v1)
  6425  		return true
  6426  	}
  6427  }
  6428  func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
  6429  	v_1 := v.Args[1]
  6430  	v_0 := v.Args[0]
  6431  	b := v.Block
  6432  	typ := &b.Func.Config.Types
  6433  	// match: (Rsh32x32 <t> x y)
  6434  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6435  	for {
  6436  		t := v.Type
  6437  		x := v_0
  6438  		y := v_1
  6439  		v.reset(OpMIPS64SRAV)
  6440  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6441  		v0.AddArg(x)
  6442  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6443  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6444  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6445  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6446  		v4.AddArg(y)
  6447  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6448  		v5.AuxInt = int64ToAuxInt(63)
  6449  		v3.AddArg2(v4, v5)
  6450  		v2.AddArg(v3)
  6451  		v1.AddArg2(v2, v4)
  6452  		v.AddArg2(v0, v1)
  6453  		return true
  6454  	}
  6455  }
  6456  func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
  6457  	v_1 := v.Args[1]
  6458  	v_0 := v.Args[0]
  6459  	b := v.Block
  6460  	typ := &b.Func.Config.Types
  6461  	// match: (Rsh32x64 <t> x y)
  6462  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6463  	for {
  6464  		t := v.Type
  6465  		x := v_0
  6466  		y := v_1
  6467  		v.reset(OpMIPS64SRAV)
  6468  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6469  		v0.AddArg(x)
  6470  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6471  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6472  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6473  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6474  		v4.AuxInt = int64ToAuxInt(63)
  6475  		v3.AddArg2(y, v4)
  6476  		v2.AddArg(v3)
  6477  		v1.AddArg2(v2, y)
  6478  		v.AddArg2(v0, v1)
  6479  		return true
  6480  	}
  6481  }
  6482  func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
  6483  	v_1 := v.Args[1]
  6484  	v_0 := v.Args[0]
  6485  	b := v.Block
  6486  	typ := &b.Func.Config.Types
  6487  	// match: (Rsh32x8 <t> x y)
  6488  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6489  	for {
  6490  		t := v.Type
  6491  		x := v_0
  6492  		y := v_1
  6493  		v.reset(OpMIPS64SRAV)
  6494  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6495  		v0.AddArg(x)
  6496  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6497  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6498  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6499  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6500  		v4.AddArg(y)
  6501  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6502  		v5.AuxInt = int64ToAuxInt(63)
  6503  		v3.AddArg2(v4, v5)
  6504  		v2.AddArg(v3)
  6505  		v1.AddArg2(v2, v4)
  6506  		v.AddArg2(v0, v1)
  6507  		return true
  6508  	}
  6509  }
  6510  func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
  6511  	v_1 := v.Args[1]
  6512  	v_0 := v.Args[0]
  6513  	b := v.Block
  6514  	typ := &b.Func.Config.Types
  6515  	// match: (Rsh64Ux16 <t> x y)
  6516  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  6517  	for {
  6518  		t := v.Type
  6519  		x := v_0
  6520  		y := v_1
  6521  		v.reset(OpMIPS64AND)
  6522  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6523  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6524  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6525  		v2.AuxInt = int64ToAuxInt(64)
  6526  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6527  		v3.AddArg(y)
  6528  		v1.AddArg2(v2, v3)
  6529  		v0.AddArg(v1)
  6530  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6531  		v4.AddArg2(x, v3)
  6532  		v.AddArg2(v0, v4)
  6533  		return true
  6534  	}
  6535  }
  6536  func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
  6537  	v_1 := v.Args[1]
  6538  	v_0 := v.Args[0]
  6539  	b := v.Block
  6540  	typ := &b.Func.Config.Types
  6541  	// match: (Rsh64Ux32 <t> x y)
  6542  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  6543  	for {
  6544  		t := v.Type
  6545  		x := v_0
  6546  		y := v_1
  6547  		v.reset(OpMIPS64AND)
  6548  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6549  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6550  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6551  		v2.AuxInt = int64ToAuxInt(64)
  6552  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6553  		v3.AddArg(y)
  6554  		v1.AddArg2(v2, v3)
  6555  		v0.AddArg(v1)
  6556  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6557  		v4.AddArg2(x, v3)
  6558  		v.AddArg2(v0, v4)
  6559  		return true
  6560  	}
  6561  }
  6562  func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
  6563  	v_1 := v.Args[1]
  6564  	v_0 := v.Args[0]
  6565  	b := v.Block
  6566  	typ := &b.Func.Config.Types
  6567  	// match: (Rsh64Ux64 <t> x y)
  6568  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  6569  	for {
  6570  		t := v.Type
  6571  		x := v_0
  6572  		y := v_1
  6573  		v.reset(OpMIPS64AND)
  6574  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6575  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6576  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6577  		v2.AuxInt = int64ToAuxInt(64)
  6578  		v1.AddArg2(v2, y)
  6579  		v0.AddArg(v1)
  6580  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6581  		v3.AddArg2(x, y)
  6582  		v.AddArg2(v0, v3)
  6583  		return true
  6584  	}
  6585  }
  6586  func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
  6587  	v_1 := v.Args[1]
  6588  	v_0 := v.Args[0]
  6589  	b := v.Block
  6590  	typ := &b.Func.Config.Types
  6591  	// match: (Rsh64Ux8 <t> x y)
  6592  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  6593  	for {
  6594  		t := v.Type
  6595  		x := v_0
  6596  		y := v_1
  6597  		v.reset(OpMIPS64AND)
  6598  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6599  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6600  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6601  		v2.AuxInt = int64ToAuxInt(64)
  6602  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6603  		v3.AddArg(y)
  6604  		v1.AddArg2(v2, v3)
  6605  		v0.AddArg(v1)
  6606  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6607  		v4.AddArg2(x, v3)
  6608  		v.AddArg2(v0, v4)
  6609  		return true
  6610  	}
  6611  }
  6612  func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
  6613  	v_1 := v.Args[1]
  6614  	v_0 := v.Args[0]
  6615  	b := v.Block
  6616  	typ := &b.Func.Config.Types
  6617  	// match: (Rsh64x16 <t> x y)
  6618  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6619  	for {
  6620  		t := v.Type
  6621  		x := v_0
  6622  		y := v_1
  6623  		v.reset(OpMIPS64SRAV)
  6624  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6625  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6626  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6627  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6628  		v3.AddArg(y)
  6629  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6630  		v4.AuxInt = int64ToAuxInt(63)
  6631  		v2.AddArg2(v3, v4)
  6632  		v1.AddArg(v2)
  6633  		v0.AddArg2(v1, v3)
  6634  		v.AddArg2(x, v0)
  6635  		return true
  6636  	}
  6637  }
  6638  func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
  6639  	v_1 := v.Args[1]
  6640  	v_0 := v.Args[0]
  6641  	b := v.Block
  6642  	typ := &b.Func.Config.Types
  6643  	// match: (Rsh64x32 <t> x y)
  6644  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6645  	for {
  6646  		t := v.Type
  6647  		x := v_0
  6648  		y := v_1
  6649  		v.reset(OpMIPS64SRAV)
  6650  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6651  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6652  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6653  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6654  		v3.AddArg(y)
  6655  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6656  		v4.AuxInt = int64ToAuxInt(63)
  6657  		v2.AddArg2(v3, v4)
  6658  		v1.AddArg(v2)
  6659  		v0.AddArg2(v1, v3)
  6660  		v.AddArg2(x, v0)
  6661  		return true
  6662  	}
  6663  }
  6664  func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
  6665  	v_1 := v.Args[1]
  6666  	v_0 := v.Args[0]
  6667  	b := v.Block
  6668  	typ := &b.Func.Config.Types
  6669  	// match: (Rsh64x64 <t> x y)
  6670  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6671  	for {
  6672  		t := v.Type
  6673  		x := v_0
  6674  		y := v_1
  6675  		v.reset(OpMIPS64SRAV)
  6676  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6677  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6678  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6679  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6680  		v3.AuxInt = int64ToAuxInt(63)
  6681  		v2.AddArg2(y, v3)
  6682  		v1.AddArg(v2)
  6683  		v0.AddArg2(v1, y)
  6684  		v.AddArg2(x, v0)
  6685  		return true
  6686  	}
  6687  }
  6688  func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
  6689  	v_1 := v.Args[1]
  6690  	v_0 := v.Args[0]
  6691  	b := v.Block
  6692  	typ := &b.Func.Config.Types
  6693  	// match: (Rsh64x8 <t> x y)
  6694  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6695  	for {
  6696  		t := v.Type
  6697  		x := v_0
  6698  		y := v_1
  6699  		v.reset(OpMIPS64SRAV)
  6700  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6701  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6702  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6703  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6704  		v3.AddArg(y)
  6705  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6706  		v4.AuxInt = int64ToAuxInt(63)
  6707  		v2.AddArg2(v3, v4)
  6708  		v1.AddArg(v2)
  6709  		v0.AddArg2(v1, v3)
  6710  		v.AddArg2(x, v0)
  6711  		return true
  6712  	}
  6713  }
  6714  func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
  6715  	v_1 := v.Args[1]
  6716  	v_0 := v.Args[0]
  6717  	b := v.Block
  6718  	typ := &b.Func.Config.Types
  6719  	// match: (Rsh8Ux16 <t> x y)
  6720  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  6721  	for {
  6722  		t := v.Type
  6723  		x := v_0
  6724  		y := v_1
  6725  		v.reset(OpMIPS64AND)
  6726  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6727  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6728  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6729  		v2.AuxInt = int64ToAuxInt(64)
  6730  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6731  		v3.AddArg(y)
  6732  		v1.AddArg2(v2, v3)
  6733  		v0.AddArg(v1)
  6734  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6735  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6736  		v5.AddArg(x)
  6737  		v4.AddArg2(v5, v3)
  6738  		v.AddArg2(v0, v4)
  6739  		return true
  6740  	}
  6741  }
  6742  func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
  6743  	v_1 := v.Args[1]
  6744  	v_0 := v.Args[0]
  6745  	b := v.Block
  6746  	typ := &b.Func.Config.Types
  6747  	// match: (Rsh8Ux32 <t> x y)
  6748  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  6749  	for {
  6750  		t := v.Type
  6751  		x := v_0
  6752  		y := v_1
  6753  		v.reset(OpMIPS64AND)
  6754  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6755  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6756  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6757  		v2.AuxInt = int64ToAuxInt(64)
  6758  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6759  		v3.AddArg(y)
  6760  		v1.AddArg2(v2, v3)
  6761  		v0.AddArg(v1)
  6762  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6763  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6764  		v5.AddArg(x)
  6765  		v4.AddArg2(v5, v3)
  6766  		v.AddArg2(v0, v4)
  6767  		return true
  6768  	}
  6769  }
  6770  func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
  6771  	v_1 := v.Args[1]
  6772  	v_0 := v.Args[0]
  6773  	b := v.Block
  6774  	typ := &b.Func.Config.Types
  6775  	// match: (Rsh8Ux64 <t> x y)
  6776  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  6777  	for {
  6778  		t := v.Type
  6779  		x := v_0
  6780  		y := v_1
  6781  		v.reset(OpMIPS64AND)
  6782  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6783  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6784  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6785  		v2.AuxInt = int64ToAuxInt(64)
  6786  		v1.AddArg2(v2, y)
  6787  		v0.AddArg(v1)
  6788  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6789  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6790  		v4.AddArg(x)
  6791  		v3.AddArg2(v4, y)
  6792  		v.AddArg2(v0, v3)
  6793  		return true
  6794  	}
  6795  }
  6796  func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
  6797  	v_1 := v.Args[1]
  6798  	v_0 := v.Args[0]
  6799  	b := v.Block
  6800  	typ := &b.Func.Config.Types
  6801  	// match: (Rsh8Ux8 <t> x y)
  6802  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6803  	for {
  6804  		t := v.Type
  6805  		x := v_0
  6806  		y := v_1
  6807  		v.reset(OpMIPS64AND)
  6808  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6809  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6810  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6811  		v2.AuxInt = int64ToAuxInt(64)
  6812  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6813  		v3.AddArg(y)
  6814  		v1.AddArg2(v2, v3)
  6815  		v0.AddArg(v1)
  6816  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6817  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6818  		v5.AddArg(x)
  6819  		v4.AddArg2(v5, v3)
  6820  		v.AddArg2(v0, v4)
  6821  		return true
  6822  	}
  6823  }
  6824  func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
  6825  	v_1 := v.Args[1]
  6826  	v_0 := v.Args[0]
  6827  	b := v.Block
  6828  	typ := &b.Func.Config.Types
  6829  	// match: (Rsh8x16 <t> x y)
  6830  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6831  	for {
  6832  		t := v.Type
  6833  		x := v_0
  6834  		y := v_1
  6835  		v.reset(OpMIPS64SRAV)
  6836  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6837  		v0.AddArg(x)
  6838  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6839  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6840  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6841  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6842  		v4.AddArg(y)
  6843  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6844  		v5.AuxInt = int64ToAuxInt(63)
  6845  		v3.AddArg2(v4, v5)
  6846  		v2.AddArg(v3)
  6847  		v1.AddArg2(v2, v4)
  6848  		v.AddArg2(v0, v1)
  6849  		return true
  6850  	}
  6851  }
  6852  func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
  6853  	v_1 := v.Args[1]
  6854  	v_0 := v.Args[0]
  6855  	b := v.Block
  6856  	typ := &b.Func.Config.Types
  6857  	// match: (Rsh8x32 <t> x y)
  6858  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6859  	for {
  6860  		t := v.Type
  6861  		x := v_0
  6862  		y := v_1
  6863  		v.reset(OpMIPS64SRAV)
  6864  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6865  		v0.AddArg(x)
  6866  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6867  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6868  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6869  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6870  		v4.AddArg(y)
  6871  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6872  		v5.AuxInt = int64ToAuxInt(63)
  6873  		v3.AddArg2(v4, v5)
  6874  		v2.AddArg(v3)
  6875  		v1.AddArg2(v2, v4)
  6876  		v.AddArg2(v0, v1)
  6877  		return true
  6878  	}
  6879  }
  6880  func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
  6881  	v_1 := v.Args[1]
  6882  	v_0 := v.Args[0]
  6883  	b := v.Block
  6884  	typ := &b.Func.Config.Types
  6885  	// match: (Rsh8x64 <t> x y)
  6886  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6887  	for {
  6888  		t := v.Type
  6889  		x := v_0
  6890  		y := v_1
  6891  		v.reset(OpMIPS64SRAV)
  6892  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6893  		v0.AddArg(x)
  6894  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6895  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6896  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6897  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6898  		v4.AuxInt = int64ToAuxInt(63)
  6899  		v3.AddArg2(y, v4)
  6900  		v2.AddArg(v3)
  6901  		v1.AddArg2(v2, y)
  6902  		v.AddArg2(v0, v1)
  6903  		return true
  6904  	}
  6905  }
  6906  func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
  6907  	v_1 := v.Args[1]
  6908  	v_0 := v.Args[0]
  6909  	b := v.Block
  6910  	typ := &b.Func.Config.Types
  6911  	// match: (Rsh8x8 <t> x y)
  6912  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6913  	for {
  6914  		t := v.Type
  6915  		x := v_0
  6916  		y := v_1
  6917  		v.reset(OpMIPS64SRAV)
  6918  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6919  		v0.AddArg(x)
  6920  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6921  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6922  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6923  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6924  		v4.AddArg(y)
  6925  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6926  		v5.AuxInt = int64ToAuxInt(63)
  6927  		v3.AddArg2(v4, v5)
  6928  		v2.AddArg(v3)
  6929  		v1.AddArg2(v2, v4)
  6930  		v.AddArg2(v0, v1)
  6931  		return true
  6932  	}
  6933  }
  6934  func rewriteValueMIPS64_OpSelect0(v *Value) bool {
  6935  	v_0 := v.Args[0]
  6936  	b := v.Block
  6937  	typ := &b.Func.Config.Types
  6938  	// match: (Select0 (Mul64uover x y))
  6939  	// result: (Select1 <typ.UInt64> (MULVU x y))
  6940  	for {
  6941  		if v_0.Op != OpMul64uover {
  6942  			break
  6943  		}
  6944  		y := v_0.Args[1]
  6945  		x := v_0.Args[0]
  6946  		v.reset(OpSelect1)
  6947  		v.Type = typ.UInt64
  6948  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6949  		v0.AddArg2(x, y)
  6950  		v.AddArg(v0)
  6951  		return true
  6952  	}
  6953  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  6954  	// result: (MOVVconst [0])
  6955  	for {
  6956  		if v_0.Op != OpMIPS64DIVVU {
  6957  			break
  6958  		}
  6959  		_ = v_0.Args[1]
  6960  		v_0_1 := v_0.Args[1]
  6961  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  6962  			break
  6963  		}
  6964  		v.reset(OpMIPS64MOVVconst)
  6965  		v.AuxInt = int64ToAuxInt(0)
  6966  		return true
  6967  	}
  6968  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  6969  	// cond: isPowerOfTwo64(c)
  6970  	// result: (ANDconst [c-1] x)
  6971  	for {
  6972  		if v_0.Op != OpMIPS64DIVVU {
  6973  			break
  6974  		}
  6975  		_ = v_0.Args[1]
  6976  		x := v_0.Args[0]
  6977  		v_0_1 := v_0.Args[1]
  6978  		if v_0_1.Op != OpMIPS64MOVVconst {
  6979  			break
  6980  		}
  6981  		c := auxIntToInt64(v_0_1.AuxInt)
  6982  		if !(isPowerOfTwo64(c)) {
  6983  			break
  6984  		}
  6985  		v.reset(OpMIPS64ANDconst)
  6986  		v.AuxInt = int64ToAuxInt(c - 1)
  6987  		v.AddArg(x)
  6988  		return true
  6989  	}
  6990  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  6991  	// cond: d != 0
  6992  	// result: (MOVVconst [c%d])
  6993  	for {
  6994  		if v_0.Op != OpMIPS64DIVV {
  6995  			break
  6996  		}
  6997  		_ = v_0.Args[1]
  6998  		v_0_0 := v_0.Args[0]
  6999  		if v_0_0.Op != OpMIPS64MOVVconst {
  7000  			break
  7001  		}
  7002  		c := auxIntToInt64(v_0_0.AuxInt)
  7003  		v_0_1 := v_0.Args[1]
  7004  		if v_0_1.Op != OpMIPS64MOVVconst {
  7005  			break
  7006  		}
  7007  		d := auxIntToInt64(v_0_1.AuxInt)
  7008  		if !(d != 0) {
  7009  			break
  7010  		}
  7011  		v.reset(OpMIPS64MOVVconst)
  7012  		v.AuxInt = int64ToAuxInt(c % d)
  7013  		return true
  7014  	}
  7015  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7016  	// cond: d != 0
  7017  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  7018  	for {
  7019  		if v_0.Op != OpMIPS64DIVVU {
  7020  			break
  7021  		}
  7022  		_ = v_0.Args[1]
  7023  		v_0_0 := v_0.Args[0]
  7024  		if v_0_0.Op != OpMIPS64MOVVconst {
  7025  			break
  7026  		}
  7027  		c := auxIntToInt64(v_0_0.AuxInt)
  7028  		v_0_1 := v_0.Args[1]
  7029  		if v_0_1.Op != OpMIPS64MOVVconst {
  7030  			break
  7031  		}
  7032  		d := auxIntToInt64(v_0_1.AuxInt)
  7033  		if !(d != 0) {
  7034  			break
  7035  		}
  7036  		v.reset(OpMIPS64MOVVconst)
  7037  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  7038  		return true
  7039  	}
  7040  	return false
  7041  }
  7042  func rewriteValueMIPS64_OpSelect1(v *Value) bool {
  7043  	v_0 := v.Args[0]
  7044  	b := v.Block
  7045  	typ := &b.Func.Config.Types
  7046  	// match: (Select1 (Mul64uover x y))
  7047  	// result: (SGTU <typ.Bool> (Select0 <typ.UInt64> (MULVU x y)) (MOVVconst <typ.UInt64> [0]))
  7048  	for {
  7049  		if v_0.Op != OpMul64uover {
  7050  			break
  7051  		}
  7052  		y := v_0.Args[1]
  7053  		x := v_0.Args[0]
  7054  		v.reset(OpMIPS64SGTU)
  7055  		v.Type = typ.Bool
  7056  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
  7057  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7058  		v1.AddArg2(x, y)
  7059  		v0.AddArg(v1)
  7060  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7061  		v2.AuxInt = int64ToAuxInt(0)
  7062  		v.AddArg2(v0, v2)
  7063  		return true
  7064  	}
  7065  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  7066  	// result: (NEGV x)
  7067  	for {
  7068  		if v_0.Op != OpMIPS64MULVU {
  7069  			break
  7070  		}
  7071  		_ = v_0.Args[1]
  7072  		v_0_0 := v_0.Args[0]
  7073  		v_0_1 := v_0.Args[1]
  7074  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7075  			x := v_0_0
  7076  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
  7077  				continue
  7078  			}
  7079  			v.reset(OpMIPS64NEGV)
  7080  			v.AddArg(x)
  7081  			return true
  7082  		}
  7083  		break
  7084  	}
  7085  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  7086  	// result: (MOVVconst [0])
  7087  	for {
  7088  		if v_0.Op != OpMIPS64MULVU {
  7089  			break
  7090  		}
  7091  		_ = v_0.Args[1]
  7092  		v_0_0 := v_0.Args[0]
  7093  		v_0_1 := v_0.Args[1]
  7094  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7095  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7096  				continue
  7097  			}
  7098  			v.reset(OpMIPS64MOVVconst)
  7099  			v.AuxInt = int64ToAuxInt(0)
  7100  			return true
  7101  		}
  7102  		break
  7103  	}
  7104  	// match: (Select1 (MULVU x (MOVVconst [1])))
  7105  	// result: x
  7106  	for {
  7107  		if v_0.Op != OpMIPS64MULVU {
  7108  			break
  7109  		}
  7110  		_ = v_0.Args[1]
  7111  		v_0_0 := v_0.Args[0]
  7112  		v_0_1 := v_0.Args[1]
  7113  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7114  			x := v_0_0
  7115  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7116  				continue
  7117  			}
  7118  			v.copyOf(x)
  7119  			return true
  7120  		}
  7121  		break
  7122  	}
  7123  	// match: (Select1 (MULVU x (MOVVconst [c])))
  7124  	// cond: isPowerOfTwo64(c)
  7125  	// result: (SLLVconst [log64(c)] x)
  7126  	for {
  7127  		if v_0.Op != OpMIPS64MULVU {
  7128  			break
  7129  		}
  7130  		_ = v_0.Args[1]
  7131  		v_0_0 := v_0.Args[0]
  7132  		v_0_1 := v_0.Args[1]
  7133  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7134  			x := v_0_0
  7135  			if v_0_1.Op != OpMIPS64MOVVconst {
  7136  				continue
  7137  			}
  7138  			c := auxIntToInt64(v_0_1.AuxInt)
  7139  			if !(isPowerOfTwo64(c)) {
  7140  				continue
  7141  			}
  7142  			v.reset(OpMIPS64SLLVconst)
  7143  			v.AuxInt = int64ToAuxInt(log64(c))
  7144  			v.AddArg(x)
  7145  			return true
  7146  		}
  7147  		break
  7148  	}
  7149  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  7150  	// result: x
  7151  	for {
  7152  		if v_0.Op != OpMIPS64DIVVU {
  7153  			break
  7154  		}
  7155  		_ = v_0.Args[1]
  7156  		x := v_0.Args[0]
  7157  		v_0_1 := v_0.Args[1]
  7158  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7159  			break
  7160  		}
  7161  		v.copyOf(x)
  7162  		return true
  7163  	}
  7164  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  7165  	// cond: isPowerOfTwo64(c)
  7166  	// result: (SRLVconst [log64(c)] x)
  7167  	for {
  7168  		if v_0.Op != OpMIPS64DIVVU {
  7169  			break
  7170  		}
  7171  		_ = v_0.Args[1]
  7172  		x := v_0.Args[0]
  7173  		v_0_1 := v_0.Args[1]
  7174  		if v_0_1.Op != OpMIPS64MOVVconst {
  7175  			break
  7176  		}
  7177  		c := auxIntToInt64(v_0_1.AuxInt)
  7178  		if !(isPowerOfTwo64(c)) {
  7179  			break
  7180  		}
  7181  		v.reset(OpMIPS64SRLVconst)
  7182  		v.AuxInt = int64ToAuxInt(log64(c))
  7183  		v.AddArg(x)
  7184  		return true
  7185  	}
  7186  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  7187  	// result: (MOVVconst [c*d])
  7188  	for {
  7189  		if v_0.Op != OpMIPS64MULVU {
  7190  			break
  7191  		}
  7192  		_ = v_0.Args[1]
  7193  		v_0_0 := v_0.Args[0]
  7194  		v_0_1 := v_0.Args[1]
  7195  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7196  			if v_0_0.Op != OpMIPS64MOVVconst {
  7197  				continue
  7198  			}
  7199  			c := auxIntToInt64(v_0_0.AuxInt)
  7200  			if v_0_1.Op != OpMIPS64MOVVconst {
  7201  				continue
  7202  			}
  7203  			d := auxIntToInt64(v_0_1.AuxInt)
  7204  			v.reset(OpMIPS64MOVVconst)
  7205  			v.AuxInt = int64ToAuxInt(c * d)
  7206  			return true
  7207  		}
  7208  		break
  7209  	}
  7210  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7211  	// cond: d != 0
  7212  	// result: (MOVVconst [c/d])
  7213  	for {
  7214  		if v_0.Op != OpMIPS64DIVV {
  7215  			break
  7216  		}
  7217  		_ = v_0.Args[1]
  7218  		v_0_0 := v_0.Args[0]
  7219  		if v_0_0.Op != OpMIPS64MOVVconst {
  7220  			break
  7221  		}
  7222  		c := auxIntToInt64(v_0_0.AuxInt)
  7223  		v_0_1 := v_0.Args[1]
  7224  		if v_0_1.Op != OpMIPS64MOVVconst {
  7225  			break
  7226  		}
  7227  		d := auxIntToInt64(v_0_1.AuxInt)
  7228  		if !(d != 0) {
  7229  			break
  7230  		}
  7231  		v.reset(OpMIPS64MOVVconst)
  7232  		v.AuxInt = int64ToAuxInt(c / d)
  7233  		return true
  7234  	}
  7235  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7236  	// cond: d != 0
  7237  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  7238  	for {
  7239  		if v_0.Op != OpMIPS64DIVVU {
  7240  			break
  7241  		}
  7242  		_ = v_0.Args[1]
  7243  		v_0_0 := v_0.Args[0]
  7244  		if v_0_0.Op != OpMIPS64MOVVconst {
  7245  			break
  7246  		}
  7247  		c := auxIntToInt64(v_0_0.AuxInt)
  7248  		v_0_1 := v_0.Args[1]
  7249  		if v_0_1.Op != OpMIPS64MOVVconst {
  7250  			break
  7251  		}
  7252  		d := auxIntToInt64(v_0_1.AuxInt)
  7253  		if !(d != 0) {
  7254  			break
  7255  		}
  7256  		v.reset(OpMIPS64MOVVconst)
  7257  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  7258  		return true
  7259  	}
  7260  	return false
  7261  }
  7262  func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
  7263  	v_0 := v.Args[0]
  7264  	b := v.Block
  7265  	// match: (Slicemask <t> x)
  7266  	// result: (SRAVconst (NEGV <t> x) [63])
  7267  	for {
  7268  		t := v.Type
  7269  		x := v_0
  7270  		v.reset(OpMIPS64SRAVconst)
  7271  		v.AuxInt = int64ToAuxInt(63)
  7272  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7273  		v0.AddArg(x)
  7274  		v.AddArg(v0)
  7275  		return true
  7276  	}
  7277  }
  7278  func rewriteValueMIPS64_OpStore(v *Value) bool {
  7279  	v_2 := v.Args[2]
  7280  	v_1 := v.Args[1]
  7281  	v_0 := v.Args[0]
  7282  	// match: (Store {t} ptr val mem)
  7283  	// cond: t.Size() == 1
  7284  	// result: (MOVBstore ptr val mem)
  7285  	for {
  7286  		t := auxToType(v.Aux)
  7287  		ptr := v_0
  7288  		val := v_1
  7289  		mem := v_2
  7290  		if !(t.Size() == 1) {
  7291  			break
  7292  		}
  7293  		v.reset(OpMIPS64MOVBstore)
  7294  		v.AddArg3(ptr, val, mem)
  7295  		return true
  7296  	}
  7297  	// match: (Store {t} ptr val mem)
  7298  	// cond: t.Size() == 2
  7299  	// result: (MOVHstore ptr val mem)
  7300  	for {
  7301  		t := auxToType(v.Aux)
  7302  		ptr := v_0
  7303  		val := v_1
  7304  		mem := v_2
  7305  		if !(t.Size() == 2) {
  7306  			break
  7307  		}
  7308  		v.reset(OpMIPS64MOVHstore)
  7309  		v.AddArg3(ptr, val, mem)
  7310  		return true
  7311  	}
  7312  	// match: (Store {t} ptr val mem)
  7313  	// cond: t.Size() == 4 && !is32BitFloat(val.Type)
  7314  	// result: (MOVWstore ptr val mem)
  7315  	for {
  7316  		t := auxToType(v.Aux)
  7317  		ptr := v_0
  7318  		val := v_1
  7319  		mem := v_2
  7320  		if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
  7321  			break
  7322  		}
  7323  		v.reset(OpMIPS64MOVWstore)
  7324  		v.AddArg3(ptr, val, mem)
  7325  		return true
  7326  	}
  7327  	// match: (Store {t} ptr val mem)
  7328  	// cond: t.Size() == 8 && !is64BitFloat(val.Type)
  7329  	// result: (MOVVstore ptr val mem)
  7330  	for {
  7331  		t := auxToType(v.Aux)
  7332  		ptr := v_0
  7333  		val := v_1
  7334  		mem := v_2
  7335  		if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
  7336  			break
  7337  		}
  7338  		v.reset(OpMIPS64MOVVstore)
  7339  		v.AddArg3(ptr, val, mem)
  7340  		return true
  7341  	}
  7342  	// match: (Store {t} ptr val mem)
  7343  	// cond: t.Size() == 4 && is32BitFloat(val.Type)
  7344  	// result: (MOVFstore ptr val mem)
  7345  	for {
  7346  		t := auxToType(v.Aux)
  7347  		ptr := v_0
  7348  		val := v_1
  7349  		mem := v_2
  7350  		if !(t.Size() == 4 && is32BitFloat(val.Type)) {
  7351  			break
  7352  		}
  7353  		v.reset(OpMIPS64MOVFstore)
  7354  		v.AddArg3(ptr, val, mem)
  7355  		return true
  7356  	}
  7357  	// match: (Store {t} ptr val mem)
  7358  	// cond: t.Size() == 8 && is64BitFloat(val.Type)
  7359  	// result: (MOVDstore ptr val mem)
  7360  	for {
  7361  		t := auxToType(v.Aux)
  7362  		ptr := v_0
  7363  		val := v_1
  7364  		mem := v_2
  7365  		if !(t.Size() == 8 && is64BitFloat(val.Type)) {
  7366  			break
  7367  		}
  7368  		v.reset(OpMIPS64MOVDstore)
  7369  		v.AddArg3(ptr, val, mem)
  7370  		return true
  7371  	}
  7372  	return false
  7373  }
  7374  func rewriteValueMIPS64_OpZero(v *Value) bool {
  7375  	v_1 := v.Args[1]
  7376  	v_0 := v.Args[0]
  7377  	b := v.Block
  7378  	config := b.Func.Config
  7379  	typ := &b.Func.Config.Types
  7380  	// match: (Zero [0] _ mem)
  7381  	// result: mem
  7382  	for {
  7383  		if auxIntToInt64(v.AuxInt) != 0 {
  7384  			break
  7385  		}
  7386  		mem := v_1
  7387  		v.copyOf(mem)
  7388  		return true
  7389  	}
  7390  	// match: (Zero [1] ptr mem)
  7391  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  7392  	for {
  7393  		if auxIntToInt64(v.AuxInt) != 1 {
  7394  			break
  7395  		}
  7396  		ptr := v_0
  7397  		mem := v_1
  7398  		v.reset(OpMIPS64MOVBstore)
  7399  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7400  		v0.AuxInt = int64ToAuxInt(0)
  7401  		v.AddArg3(ptr, v0, mem)
  7402  		return true
  7403  	}
  7404  	// match: (Zero [2] {t} ptr mem)
  7405  	// cond: t.Alignment()%2 == 0
  7406  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  7407  	for {
  7408  		if auxIntToInt64(v.AuxInt) != 2 {
  7409  			break
  7410  		}
  7411  		t := auxToType(v.Aux)
  7412  		ptr := v_0
  7413  		mem := v_1
  7414  		if !(t.Alignment()%2 == 0) {
  7415  			break
  7416  		}
  7417  		v.reset(OpMIPS64MOVHstore)
  7418  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7419  		v0.AuxInt = int64ToAuxInt(0)
  7420  		v.AddArg3(ptr, v0, mem)
  7421  		return true
  7422  	}
  7423  	// match: (Zero [2] ptr mem)
  7424  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  7425  	for {
  7426  		if auxIntToInt64(v.AuxInt) != 2 {
  7427  			break
  7428  		}
  7429  		ptr := v_0
  7430  		mem := v_1
  7431  		v.reset(OpMIPS64MOVBstore)
  7432  		v.AuxInt = int32ToAuxInt(1)
  7433  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7434  		v0.AuxInt = int64ToAuxInt(0)
  7435  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7436  		v1.AuxInt = int32ToAuxInt(0)
  7437  		v1.AddArg3(ptr, v0, mem)
  7438  		v.AddArg3(ptr, v0, v1)
  7439  		return true
  7440  	}
  7441  	// match: (Zero [4] {t} ptr mem)
  7442  	// cond: t.Alignment()%4 == 0
  7443  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  7444  	for {
  7445  		if auxIntToInt64(v.AuxInt) != 4 {
  7446  			break
  7447  		}
  7448  		t := auxToType(v.Aux)
  7449  		ptr := v_0
  7450  		mem := v_1
  7451  		if !(t.Alignment()%4 == 0) {
  7452  			break
  7453  		}
  7454  		v.reset(OpMIPS64MOVWstore)
  7455  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7456  		v0.AuxInt = int64ToAuxInt(0)
  7457  		v.AddArg3(ptr, v0, mem)
  7458  		return true
  7459  	}
  7460  	// match: (Zero [4] {t} ptr mem)
  7461  	// cond: t.Alignment()%2 == 0
  7462  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  7463  	for {
  7464  		if auxIntToInt64(v.AuxInt) != 4 {
  7465  			break
  7466  		}
  7467  		t := auxToType(v.Aux)
  7468  		ptr := v_0
  7469  		mem := v_1
  7470  		if !(t.Alignment()%2 == 0) {
  7471  			break
  7472  		}
  7473  		v.reset(OpMIPS64MOVHstore)
  7474  		v.AuxInt = int32ToAuxInt(2)
  7475  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7476  		v0.AuxInt = int64ToAuxInt(0)
  7477  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7478  		v1.AuxInt = int32ToAuxInt(0)
  7479  		v1.AddArg3(ptr, v0, mem)
  7480  		v.AddArg3(ptr, v0, v1)
  7481  		return true
  7482  	}
  7483  	// match: (Zero [4] ptr mem)
  7484  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  7485  	for {
  7486  		if auxIntToInt64(v.AuxInt) != 4 {
  7487  			break
  7488  		}
  7489  		ptr := v_0
  7490  		mem := v_1
  7491  		v.reset(OpMIPS64MOVBstore)
  7492  		v.AuxInt = int32ToAuxInt(3)
  7493  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7494  		v0.AuxInt = int64ToAuxInt(0)
  7495  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7496  		v1.AuxInt = int32ToAuxInt(2)
  7497  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7498  		v2.AuxInt = int32ToAuxInt(1)
  7499  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7500  		v3.AuxInt = int32ToAuxInt(0)
  7501  		v3.AddArg3(ptr, v0, mem)
  7502  		v2.AddArg3(ptr, v0, v3)
  7503  		v1.AddArg3(ptr, v0, v2)
  7504  		v.AddArg3(ptr, v0, v1)
  7505  		return true
  7506  	}
  7507  	// match: (Zero [8] {t} ptr mem)
  7508  	// cond: t.Alignment()%8 == 0
  7509  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  7510  	for {
  7511  		if auxIntToInt64(v.AuxInt) != 8 {
  7512  			break
  7513  		}
  7514  		t := auxToType(v.Aux)
  7515  		ptr := v_0
  7516  		mem := v_1
  7517  		if !(t.Alignment()%8 == 0) {
  7518  			break
  7519  		}
  7520  		v.reset(OpMIPS64MOVVstore)
  7521  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7522  		v0.AuxInt = int64ToAuxInt(0)
  7523  		v.AddArg3(ptr, v0, mem)
  7524  		return true
  7525  	}
  7526  	// match: (Zero [8] {t} ptr mem)
  7527  	// cond: t.Alignment()%4 == 0
  7528  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  7529  	for {
  7530  		if auxIntToInt64(v.AuxInt) != 8 {
  7531  			break
  7532  		}
  7533  		t := auxToType(v.Aux)
  7534  		ptr := v_0
  7535  		mem := v_1
  7536  		if !(t.Alignment()%4 == 0) {
  7537  			break
  7538  		}
  7539  		v.reset(OpMIPS64MOVWstore)
  7540  		v.AuxInt = int32ToAuxInt(4)
  7541  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7542  		v0.AuxInt = int64ToAuxInt(0)
  7543  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7544  		v1.AuxInt = int32ToAuxInt(0)
  7545  		v1.AddArg3(ptr, v0, mem)
  7546  		v.AddArg3(ptr, v0, v1)
  7547  		return true
  7548  	}
  7549  	// match: (Zero [8] {t} ptr mem)
  7550  	// cond: t.Alignment()%2 == 0
  7551  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  7552  	for {
  7553  		if auxIntToInt64(v.AuxInt) != 8 {
  7554  			break
  7555  		}
  7556  		t := auxToType(v.Aux)
  7557  		ptr := v_0
  7558  		mem := v_1
  7559  		if !(t.Alignment()%2 == 0) {
  7560  			break
  7561  		}
  7562  		v.reset(OpMIPS64MOVHstore)
  7563  		v.AuxInt = int32ToAuxInt(6)
  7564  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7565  		v0.AuxInt = int64ToAuxInt(0)
  7566  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7567  		v1.AuxInt = int32ToAuxInt(4)
  7568  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7569  		v2.AuxInt = int32ToAuxInt(2)
  7570  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7571  		v3.AuxInt = int32ToAuxInt(0)
  7572  		v3.AddArg3(ptr, v0, mem)
  7573  		v2.AddArg3(ptr, v0, v3)
  7574  		v1.AddArg3(ptr, v0, v2)
  7575  		v.AddArg3(ptr, v0, v1)
  7576  		return true
  7577  	}
  7578  	// match: (Zero [3] ptr mem)
  7579  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  7580  	for {
  7581  		if auxIntToInt64(v.AuxInt) != 3 {
  7582  			break
  7583  		}
  7584  		ptr := v_0
  7585  		mem := v_1
  7586  		v.reset(OpMIPS64MOVBstore)
  7587  		v.AuxInt = int32ToAuxInt(2)
  7588  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7589  		v0.AuxInt = int64ToAuxInt(0)
  7590  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7591  		v1.AuxInt = int32ToAuxInt(1)
  7592  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7593  		v2.AuxInt = int32ToAuxInt(0)
  7594  		v2.AddArg3(ptr, v0, mem)
  7595  		v1.AddArg3(ptr, v0, v2)
  7596  		v.AddArg3(ptr, v0, v1)
  7597  		return true
  7598  	}
  7599  	// match: (Zero [6] {t} ptr mem)
  7600  	// cond: t.Alignment()%2 == 0
  7601  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  7602  	for {
  7603  		if auxIntToInt64(v.AuxInt) != 6 {
  7604  			break
  7605  		}
  7606  		t := auxToType(v.Aux)
  7607  		ptr := v_0
  7608  		mem := v_1
  7609  		if !(t.Alignment()%2 == 0) {
  7610  			break
  7611  		}
  7612  		v.reset(OpMIPS64MOVHstore)
  7613  		v.AuxInt = int32ToAuxInt(4)
  7614  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7615  		v0.AuxInt = int64ToAuxInt(0)
  7616  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7617  		v1.AuxInt = int32ToAuxInt(2)
  7618  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7619  		v2.AuxInt = int32ToAuxInt(0)
  7620  		v2.AddArg3(ptr, v0, mem)
  7621  		v1.AddArg3(ptr, v0, v2)
  7622  		v.AddArg3(ptr, v0, v1)
  7623  		return true
  7624  	}
  7625  	// match: (Zero [12] {t} ptr mem)
  7626  	// cond: t.Alignment()%4 == 0
  7627  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  7628  	for {
  7629  		if auxIntToInt64(v.AuxInt) != 12 {
  7630  			break
  7631  		}
  7632  		t := auxToType(v.Aux)
  7633  		ptr := v_0
  7634  		mem := v_1
  7635  		if !(t.Alignment()%4 == 0) {
  7636  			break
  7637  		}
  7638  		v.reset(OpMIPS64MOVWstore)
  7639  		v.AuxInt = int32ToAuxInt(8)
  7640  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7641  		v0.AuxInt = int64ToAuxInt(0)
  7642  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7643  		v1.AuxInt = int32ToAuxInt(4)
  7644  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7645  		v2.AuxInt = int32ToAuxInt(0)
  7646  		v2.AddArg3(ptr, v0, mem)
  7647  		v1.AddArg3(ptr, v0, v2)
  7648  		v.AddArg3(ptr, v0, v1)
  7649  		return true
  7650  	}
  7651  	// match: (Zero [16] {t} ptr mem)
  7652  	// cond: t.Alignment()%8 == 0
  7653  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
  7654  	for {
  7655  		if auxIntToInt64(v.AuxInt) != 16 {
  7656  			break
  7657  		}
  7658  		t := auxToType(v.Aux)
  7659  		ptr := v_0
  7660  		mem := v_1
  7661  		if !(t.Alignment()%8 == 0) {
  7662  			break
  7663  		}
  7664  		v.reset(OpMIPS64MOVVstore)
  7665  		v.AuxInt = int32ToAuxInt(8)
  7666  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7667  		v0.AuxInt = int64ToAuxInt(0)
  7668  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7669  		v1.AuxInt = int32ToAuxInt(0)
  7670  		v1.AddArg3(ptr, v0, mem)
  7671  		v.AddArg3(ptr, v0, v1)
  7672  		return true
  7673  	}
  7674  	// match: (Zero [24] {t} ptr mem)
  7675  	// cond: t.Alignment()%8 == 0
  7676  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
  7677  	for {
  7678  		if auxIntToInt64(v.AuxInt) != 24 {
  7679  			break
  7680  		}
  7681  		t := auxToType(v.Aux)
  7682  		ptr := v_0
  7683  		mem := v_1
  7684  		if !(t.Alignment()%8 == 0) {
  7685  			break
  7686  		}
  7687  		v.reset(OpMIPS64MOVVstore)
  7688  		v.AuxInt = int32ToAuxInt(16)
  7689  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7690  		v0.AuxInt = int64ToAuxInt(0)
  7691  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7692  		v1.AuxInt = int32ToAuxInt(8)
  7693  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7694  		v2.AuxInt = int32ToAuxInt(0)
  7695  		v2.AddArg3(ptr, v0, mem)
  7696  		v1.AddArg3(ptr, v0, v2)
  7697  		v.AddArg3(ptr, v0, v1)
  7698  		return true
  7699  	}
  7700  	// match: (Zero [s] {t} ptr mem)
  7701  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  7702  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  7703  	for {
  7704  		s := auxIntToInt64(v.AuxInt)
  7705  		t := auxToType(v.Aux)
  7706  		ptr := v_0
  7707  		mem := v_1
  7708  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  7709  			break
  7710  		}
  7711  		v.reset(OpMIPS64DUFFZERO)
  7712  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  7713  		v.AddArg2(ptr, mem)
  7714  		return true
  7715  	}
  7716  	// match: (Zero [s] {t} ptr mem)
  7717  	// cond: (s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0
  7718  	// result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem)
  7719  	for {
  7720  		s := auxIntToInt64(v.AuxInt)
  7721  		t := auxToType(v.Aux)
  7722  		ptr := v_0
  7723  		mem := v_1
  7724  		if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) {
  7725  			break
  7726  		}
  7727  		v.reset(OpMIPS64LoweredZero)
  7728  		v.AuxInt = int64ToAuxInt(t.Alignment())
  7729  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  7730  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  7731  		v0.AddArg(ptr)
  7732  		v.AddArg3(ptr, v0, mem)
  7733  		return true
  7734  	}
  7735  	return false
  7736  }
  7737  func rewriteBlockMIPS64(b *Block) bool {
  7738  	switch b.Kind {
  7739  	case BlockMIPS64EQ:
  7740  		// match: (EQ (FPFlagTrue cmp) yes no)
  7741  		// result: (FPF cmp yes no)
  7742  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  7743  			v_0 := b.Controls[0]
  7744  			cmp := v_0.Args[0]
  7745  			b.resetWithControl(BlockMIPS64FPF, cmp)
  7746  			return true
  7747  		}
  7748  		// match: (EQ (FPFlagFalse cmp) yes no)
  7749  		// result: (FPT cmp yes no)
  7750  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  7751  			v_0 := b.Controls[0]
  7752  			cmp := v_0.Args[0]
  7753  			b.resetWithControl(BlockMIPS64FPT, cmp)
  7754  			return true
  7755  		}
  7756  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  7757  		// result: (NE cmp yes no)
  7758  		for b.Controls[0].Op == OpMIPS64XORconst {
  7759  			v_0 := b.Controls[0]
  7760  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7761  				break
  7762  			}
  7763  			cmp := v_0.Args[0]
  7764  			if cmp.Op != OpMIPS64SGT {
  7765  				break
  7766  			}
  7767  			b.resetWithControl(BlockMIPS64NE, cmp)
  7768  			return true
  7769  		}
  7770  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  7771  		// result: (NE cmp yes no)
  7772  		for b.Controls[0].Op == OpMIPS64XORconst {
  7773  			v_0 := b.Controls[0]
  7774  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7775  				break
  7776  			}
  7777  			cmp := v_0.Args[0]
  7778  			if cmp.Op != OpMIPS64SGTU {
  7779  				break
  7780  			}
  7781  			b.resetWithControl(BlockMIPS64NE, cmp)
  7782  			return true
  7783  		}
  7784  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  7785  		// result: (NE cmp yes no)
  7786  		for b.Controls[0].Op == OpMIPS64XORconst {
  7787  			v_0 := b.Controls[0]
  7788  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7789  				break
  7790  			}
  7791  			cmp := v_0.Args[0]
  7792  			if cmp.Op != OpMIPS64SGTconst {
  7793  				break
  7794  			}
  7795  			b.resetWithControl(BlockMIPS64NE, cmp)
  7796  			return true
  7797  		}
  7798  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  7799  		// result: (NE cmp yes no)
  7800  		for b.Controls[0].Op == OpMIPS64XORconst {
  7801  			v_0 := b.Controls[0]
  7802  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7803  				break
  7804  			}
  7805  			cmp := v_0.Args[0]
  7806  			if cmp.Op != OpMIPS64SGTUconst {
  7807  				break
  7808  			}
  7809  			b.resetWithControl(BlockMIPS64NE, cmp)
  7810  			return true
  7811  		}
  7812  		// match: (EQ (SGTUconst [1] x) yes no)
  7813  		// result: (NE x yes no)
  7814  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  7815  			v_0 := b.Controls[0]
  7816  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7817  				break
  7818  			}
  7819  			x := v_0.Args[0]
  7820  			b.resetWithControl(BlockMIPS64NE, x)
  7821  			return true
  7822  		}
  7823  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  7824  		// result: (EQ x yes no)
  7825  		for b.Controls[0].Op == OpMIPS64SGTU {
  7826  			v_0 := b.Controls[0]
  7827  			_ = v_0.Args[1]
  7828  			x := v_0.Args[0]
  7829  			v_0_1 := v_0.Args[1]
  7830  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7831  				break
  7832  			}
  7833  			b.resetWithControl(BlockMIPS64EQ, x)
  7834  			return true
  7835  		}
  7836  		// match: (EQ (SGTconst [0] x) yes no)
  7837  		// result: (GEZ x yes no)
  7838  		for b.Controls[0].Op == OpMIPS64SGTconst {
  7839  			v_0 := b.Controls[0]
  7840  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7841  				break
  7842  			}
  7843  			x := v_0.Args[0]
  7844  			b.resetWithControl(BlockMIPS64GEZ, x)
  7845  			return true
  7846  		}
  7847  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  7848  		// result: (LEZ x yes no)
  7849  		for b.Controls[0].Op == OpMIPS64SGT {
  7850  			v_0 := b.Controls[0]
  7851  			_ = v_0.Args[1]
  7852  			x := v_0.Args[0]
  7853  			v_0_1 := v_0.Args[1]
  7854  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7855  				break
  7856  			}
  7857  			b.resetWithControl(BlockMIPS64LEZ, x)
  7858  			return true
  7859  		}
  7860  		// match: (EQ (MOVVconst [0]) yes no)
  7861  		// result: (First yes no)
  7862  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7863  			v_0 := b.Controls[0]
  7864  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7865  				break
  7866  			}
  7867  			b.Reset(BlockFirst)
  7868  			return true
  7869  		}
  7870  		// match: (EQ (MOVVconst [c]) yes no)
  7871  		// cond: c != 0
  7872  		// result: (First no yes)
  7873  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7874  			v_0 := b.Controls[0]
  7875  			c := auxIntToInt64(v_0.AuxInt)
  7876  			if !(c != 0) {
  7877  				break
  7878  			}
  7879  			b.Reset(BlockFirst)
  7880  			b.swapSuccessors()
  7881  			return true
  7882  		}
  7883  	case BlockMIPS64GEZ:
  7884  		// match: (GEZ (MOVVconst [c]) yes no)
  7885  		// cond: c >= 0
  7886  		// result: (First yes no)
  7887  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7888  			v_0 := b.Controls[0]
  7889  			c := auxIntToInt64(v_0.AuxInt)
  7890  			if !(c >= 0) {
  7891  				break
  7892  			}
  7893  			b.Reset(BlockFirst)
  7894  			return true
  7895  		}
  7896  		// match: (GEZ (MOVVconst [c]) yes no)
  7897  		// cond: c < 0
  7898  		// result: (First no yes)
  7899  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7900  			v_0 := b.Controls[0]
  7901  			c := auxIntToInt64(v_0.AuxInt)
  7902  			if !(c < 0) {
  7903  				break
  7904  			}
  7905  			b.Reset(BlockFirst)
  7906  			b.swapSuccessors()
  7907  			return true
  7908  		}
  7909  	case BlockMIPS64GTZ:
  7910  		// match: (GTZ (MOVVconst [c]) yes no)
  7911  		// cond: c > 0
  7912  		// result: (First yes no)
  7913  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7914  			v_0 := b.Controls[0]
  7915  			c := auxIntToInt64(v_0.AuxInt)
  7916  			if !(c > 0) {
  7917  				break
  7918  			}
  7919  			b.Reset(BlockFirst)
  7920  			return true
  7921  		}
  7922  		// match: (GTZ (MOVVconst [c]) yes no)
  7923  		// cond: c <= 0
  7924  		// result: (First no yes)
  7925  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7926  			v_0 := b.Controls[0]
  7927  			c := auxIntToInt64(v_0.AuxInt)
  7928  			if !(c <= 0) {
  7929  				break
  7930  			}
  7931  			b.Reset(BlockFirst)
  7932  			b.swapSuccessors()
  7933  			return true
  7934  		}
  7935  	case BlockIf:
  7936  		// match: (If cond yes no)
  7937  		// result: (NE cond yes no)
  7938  		for {
  7939  			cond := b.Controls[0]
  7940  			b.resetWithControl(BlockMIPS64NE, cond)
  7941  			return true
  7942  		}
  7943  	case BlockMIPS64LEZ:
  7944  		// match: (LEZ (MOVVconst [c]) yes no)
  7945  		// cond: c <= 0
  7946  		// result: (First yes no)
  7947  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7948  			v_0 := b.Controls[0]
  7949  			c := auxIntToInt64(v_0.AuxInt)
  7950  			if !(c <= 0) {
  7951  				break
  7952  			}
  7953  			b.Reset(BlockFirst)
  7954  			return true
  7955  		}
  7956  		// match: (LEZ (MOVVconst [c]) yes no)
  7957  		// cond: c > 0
  7958  		// result: (First no yes)
  7959  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7960  			v_0 := b.Controls[0]
  7961  			c := auxIntToInt64(v_0.AuxInt)
  7962  			if !(c > 0) {
  7963  				break
  7964  			}
  7965  			b.Reset(BlockFirst)
  7966  			b.swapSuccessors()
  7967  			return true
  7968  		}
  7969  	case BlockMIPS64LTZ:
  7970  		// match: (LTZ (MOVVconst [c]) yes no)
  7971  		// cond: c < 0
  7972  		// result: (First yes no)
  7973  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7974  			v_0 := b.Controls[0]
  7975  			c := auxIntToInt64(v_0.AuxInt)
  7976  			if !(c < 0) {
  7977  				break
  7978  			}
  7979  			b.Reset(BlockFirst)
  7980  			return true
  7981  		}
  7982  		// match: (LTZ (MOVVconst [c]) yes no)
  7983  		// cond: c >= 0
  7984  		// result: (First no yes)
  7985  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7986  			v_0 := b.Controls[0]
  7987  			c := auxIntToInt64(v_0.AuxInt)
  7988  			if !(c >= 0) {
  7989  				break
  7990  			}
  7991  			b.Reset(BlockFirst)
  7992  			b.swapSuccessors()
  7993  			return true
  7994  		}
  7995  	case BlockMIPS64NE:
  7996  		// match: (NE (FPFlagTrue cmp) yes no)
  7997  		// result: (FPT cmp yes no)
  7998  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  7999  			v_0 := b.Controls[0]
  8000  			cmp := v_0.Args[0]
  8001  			b.resetWithControl(BlockMIPS64FPT, cmp)
  8002  			return true
  8003  		}
  8004  		// match: (NE (FPFlagFalse cmp) yes no)
  8005  		// result: (FPF cmp yes no)
  8006  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  8007  			v_0 := b.Controls[0]
  8008  			cmp := v_0.Args[0]
  8009  			b.resetWithControl(BlockMIPS64FPF, cmp)
  8010  			return true
  8011  		}
  8012  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  8013  		// result: (EQ cmp yes no)
  8014  		for b.Controls[0].Op == OpMIPS64XORconst {
  8015  			v_0 := b.Controls[0]
  8016  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8017  				break
  8018  			}
  8019  			cmp := v_0.Args[0]
  8020  			if cmp.Op != OpMIPS64SGT {
  8021  				break
  8022  			}
  8023  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8024  			return true
  8025  		}
  8026  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  8027  		// result: (EQ cmp yes no)
  8028  		for b.Controls[0].Op == OpMIPS64XORconst {
  8029  			v_0 := b.Controls[0]
  8030  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8031  				break
  8032  			}
  8033  			cmp := v_0.Args[0]
  8034  			if cmp.Op != OpMIPS64SGTU {
  8035  				break
  8036  			}
  8037  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8038  			return true
  8039  		}
  8040  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  8041  		// result: (EQ cmp yes no)
  8042  		for b.Controls[0].Op == OpMIPS64XORconst {
  8043  			v_0 := b.Controls[0]
  8044  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8045  				break
  8046  			}
  8047  			cmp := v_0.Args[0]
  8048  			if cmp.Op != OpMIPS64SGTconst {
  8049  				break
  8050  			}
  8051  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8052  			return true
  8053  		}
  8054  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  8055  		// result: (EQ cmp yes no)
  8056  		for b.Controls[0].Op == OpMIPS64XORconst {
  8057  			v_0 := b.Controls[0]
  8058  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8059  				break
  8060  			}
  8061  			cmp := v_0.Args[0]
  8062  			if cmp.Op != OpMIPS64SGTUconst {
  8063  				break
  8064  			}
  8065  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8066  			return true
  8067  		}
  8068  		// match: (NE (SGTUconst [1] x) yes no)
  8069  		// result: (EQ x yes no)
  8070  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8071  			v_0 := b.Controls[0]
  8072  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8073  				break
  8074  			}
  8075  			x := v_0.Args[0]
  8076  			b.resetWithControl(BlockMIPS64EQ, x)
  8077  			return true
  8078  		}
  8079  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
  8080  		// result: (NE x yes no)
  8081  		for b.Controls[0].Op == OpMIPS64SGTU {
  8082  			v_0 := b.Controls[0]
  8083  			_ = v_0.Args[1]
  8084  			x := v_0.Args[0]
  8085  			v_0_1 := v_0.Args[1]
  8086  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8087  				break
  8088  			}
  8089  			b.resetWithControl(BlockMIPS64NE, x)
  8090  			return true
  8091  		}
  8092  		// match: (NE (SGTconst [0] x) yes no)
  8093  		// result: (LTZ x yes no)
  8094  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8095  			v_0 := b.Controls[0]
  8096  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8097  				break
  8098  			}
  8099  			x := v_0.Args[0]
  8100  			b.resetWithControl(BlockMIPS64LTZ, x)
  8101  			return true
  8102  		}
  8103  		// match: (NE (SGT x (MOVVconst [0])) yes no)
  8104  		// result: (GTZ x yes no)
  8105  		for b.Controls[0].Op == OpMIPS64SGT {
  8106  			v_0 := b.Controls[0]
  8107  			_ = v_0.Args[1]
  8108  			x := v_0.Args[0]
  8109  			v_0_1 := v_0.Args[1]
  8110  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8111  				break
  8112  			}
  8113  			b.resetWithControl(BlockMIPS64GTZ, x)
  8114  			return true
  8115  		}
  8116  		// match: (NE (MOVVconst [0]) yes no)
  8117  		// result: (First no yes)
  8118  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8119  			v_0 := b.Controls[0]
  8120  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8121  				break
  8122  			}
  8123  			b.Reset(BlockFirst)
  8124  			b.swapSuccessors()
  8125  			return true
  8126  		}
  8127  		// match: (NE (MOVVconst [c]) yes no)
  8128  		// cond: c != 0
  8129  		// result: (First yes no)
  8130  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8131  			v_0 := b.Controls[0]
  8132  			c := auxIntToInt64(v_0.AuxInt)
  8133  			if !(c != 0) {
  8134  				break
  8135  			}
  8136  			b.Reset(BlockFirst)
  8137  			return true
  8138  		}
  8139  	}
  8140  	return false
  8141  }