github.com/sbinet/go@v0.0.0-20160827155028-54d7de7dd62b/src/cmd/compile/internal/ssa/rewriteARM64.go (about)

     1  // autogenerated from gen/ARM64.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValueARM64(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpARM64ADD:
    12  		return rewriteValueARM64_OpARM64ADD(v, config)
    13  	case OpARM64ADDconst:
    14  		return rewriteValueARM64_OpARM64ADDconst(v, config)
    15  	case OpARM64ADDshiftLL:
    16  		return rewriteValueARM64_OpARM64ADDshiftLL(v, config)
    17  	case OpARM64ADDshiftRA:
    18  		return rewriteValueARM64_OpARM64ADDshiftRA(v, config)
    19  	case OpARM64ADDshiftRL:
    20  		return rewriteValueARM64_OpARM64ADDshiftRL(v, config)
    21  	case OpARM64AND:
    22  		return rewriteValueARM64_OpARM64AND(v, config)
    23  	case OpARM64ANDconst:
    24  		return rewriteValueARM64_OpARM64ANDconst(v, config)
    25  	case OpARM64ANDshiftLL:
    26  		return rewriteValueARM64_OpARM64ANDshiftLL(v, config)
    27  	case OpARM64ANDshiftRA:
    28  		return rewriteValueARM64_OpARM64ANDshiftRA(v, config)
    29  	case OpARM64ANDshiftRL:
    30  		return rewriteValueARM64_OpARM64ANDshiftRL(v, config)
    31  	case OpARM64BIC:
    32  		return rewriteValueARM64_OpARM64BIC(v, config)
    33  	case OpARM64BICconst:
    34  		return rewriteValueARM64_OpARM64BICconst(v, config)
    35  	case OpARM64BICshiftLL:
    36  		return rewriteValueARM64_OpARM64BICshiftLL(v, config)
    37  	case OpARM64BICshiftRA:
    38  		return rewriteValueARM64_OpARM64BICshiftRA(v, config)
    39  	case OpARM64BICshiftRL:
    40  		return rewriteValueARM64_OpARM64BICshiftRL(v, config)
    41  	case OpARM64CMP:
    42  		return rewriteValueARM64_OpARM64CMP(v, config)
    43  	case OpARM64CMPW:
    44  		return rewriteValueARM64_OpARM64CMPW(v, config)
    45  	case OpARM64CMPWconst:
    46  		return rewriteValueARM64_OpARM64CMPWconst(v, config)
    47  	case OpARM64CMPconst:
    48  		return rewriteValueARM64_OpARM64CMPconst(v, config)
    49  	case OpARM64CMPshiftLL:
    50  		return rewriteValueARM64_OpARM64CMPshiftLL(v, config)
    51  	case OpARM64CMPshiftRA:
    52  		return rewriteValueARM64_OpARM64CMPshiftRA(v, config)
    53  	case OpARM64CMPshiftRL:
    54  		return rewriteValueARM64_OpARM64CMPshiftRL(v, config)
    55  	case OpARM64CSELULT:
    56  		return rewriteValueARM64_OpARM64CSELULT(v, config)
    57  	case OpARM64CSELULT0:
    58  		return rewriteValueARM64_OpARM64CSELULT0(v, config)
    59  	case OpARM64DIV:
    60  		return rewriteValueARM64_OpARM64DIV(v, config)
    61  	case OpARM64DIVW:
    62  		return rewriteValueARM64_OpARM64DIVW(v, config)
    63  	case OpARM64Equal:
    64  		return rewriteValueARM64_OpARM64Equal(v, config)
    65  	case OpARM64FMOVDload:
    66  		return rewriteValueARM64_OpARM64FMOVDload(v, config)
    67  	case OpARM64FMOVDstore:
    68  		return rewriteValueARM64_OpARM64FMOVDstore(v, config)
    69  	case OpARM64FMOVSload:
    70  		return rewriteValueARM64_OpARM64FMOVSload(v, config)
    71  	case OpARM64FMOVSstore:
    72  		return rewriteValueARM64_OpARM64FMOVSstore(v, config)
    73  	case OpARM64GreaterEqual:
    74  		return rewriteValueARM64_OpARM64GreaterEqual(v, config)
    75  	case OpARM64GreaterEqualU:
    76  		return rewriteValueARM64_OpARM64GreaterEqualU(v, config)
    77  	case OpARM64GreaterThan:
    78  		return rewriteValueARM64_OpARM64GreaterThan(v, config)
    79  	case OpARM64GreaterThanU:
    80  		return rewriteValueARM64_OpARM64GreaterThanU(v, config)
    81  	case OpARM64LessEqual:
    82  		return rewriteValueARM64_OpARM64LessEqual(v, config)
    83  	case OpARM64LessEqualU:
    84  		return rewriteValueARM64_OpARM64LessEqualU(v, config)
    85  	case OpARM64LessThan:
    86  		return rewriteValueARM64_OpARM64LessThan(v, config)
    87  	case OpARM64LessThanU:
    88  		return rewriteValueARM64_OpARM64LessThanU(v, config)
    89  	case OpARM64MOD:
    90  		return rewriteValueARM64_OpARM64MOD(v, config)
    91  	case OpARM64MODW:
    92  		return rewriteValueARM64_OpARM64MODW(v, config)
    93  	case OpARM64MOVBUload:
    94  		return rewriteValueARM64_OpARM64MOVBUload(v, config)
    95  	case OpARM64MOVBUreg:
    96  		return rewriteValueARM64_OpARM64MOVBUreg(v, config)
    97  	case OpARM64MOVBload:
    98  		return rewriteValueARM64_OpARM64MOVBload(v, config)
    99  	case OpARM64MOVBreg:
   100  		return rewriteValueARM64_OpARM64MOVBreg(v, config)
   101  	case OpARM64MOVBstore:
   102  		return rewriteValueARM64_OpARM64MOVBstore(v, config)
   103  	case OpARM64MOVBstorezero:
   104  		return rewriteValueARM64_OpARM64MOVBstorezero(v, config)
   105  	case OpARM64MOVDload:
   106  		return rewriteValueARM64_OpARM64MOVDload(v, config)
   107  	case OpARM64MOVDreg:
   108  		return rewriteValueARM64_OpARM64MOVDreg(v, config)
   109  	case OpARM64MOVDstore:
   110  		return rewriteValueARM64_OpARM64MOVDstore(v, config)
   111  	case OpARM64MOVDstorezero:
   112  		return rewriteValueARM64_OpARM64MOVDstorezero(v, config)
   113  	case OpARM64MOVHUload:
   114  		return rewriteValueARM64_OpARM64MOVHUload(v, config)
   115  	case OpARM64MOVHUreg:
   116  		return rewriteValueARM64_OpARM64MOVHUreg(v, config)
   117  	case OpARM64MOVHload:
   118  		return rewriteValueARM64_OpARM64MOVHload(v, config)
   119  	case OpARM64MOVHreg:
   120  		return rewriteValueARM64_OpARM64MOVHreg(v, config)
   121  	case OpARM64MOVHstore:
   122  		return rewriteValueARM64_OpARM64MOVHstore(v, config)
   123  	case OpARM64MOVHstorezero:
   124  		return rewriteValueARM64_OpARM64MOVHstorezero(v, config)
   125  	case OpARM64MOVWUload:
   126  		return rewriteValueARM64_OpARM64MOVWUload(v, config)
   127  	case OpARM64MOVWUreg:
   128  		return rewriteValueARM64_OpARM64MOVWUreg(v, config)
   129  	case OpARM64MOVWload:
   130  		return rewriteValueARM64_OpARM64MOVWload(v, config)
   131  	case OpARM64MOVWreg:
   132  		return rewriteValueARM64_OpARM64MOVWreg(v, config)
   133  	case OpARM64MOVWstore:
   134  		return rewriteValueARM64_OpARM64MOVWstore(v, config)
   135  	case OpARM64MOVWstorezero:
   136  		return rewriteValueARM64_OpARM64MOVWstorezero(v, config)
   137  	case OpARM64MUL:
   138  		return rewriteValueARM64_OpARM64MUL(v, config)
   139  	case OpARM64MULW:
   140  		return rewriteValueARM64_OpARM64MULW(v, config)
   141  	case OpARM64MVN:
   142  		return rewriteValueARM64_OpARM64MVN(v, config)
   143  	case OpARM64NEG:
   144  		return rewriteValueARM64_OpARM64NEG(v, config)
   145  	case OpARM64NotEqual:
   146  		return rewriteValueARM64_OpARM64NotEqual(v, config)
   147  	case OpARM64OR:
   148  		return rewriteValueARM64_OpARM64OR(v, config)
   149  	case OpARM64ORconst:
   150  		return rewriteValueARM64_OpARM64ORconst(v, config)
   151  	case OpARM64ORshiftLL:
   152  		return rewriteValueARM64_OpARM64ORshiftLL(v, config)
   153  	case OpARM64ORshiftRA:
   154  		return rewriteValueARM64_OpARM64ORshiftRA(v, config)
   155  	case OpARM64ORshiftRL:
   156  		return rewriteValueARM64_OpARM64ORshiftRL(v, config)
   157  	case OpARM64SLL:
   158  		return rewriteValueARM64_OpARM64SLL(v, config)
   159  	case OpARM64SLLconst:
   160  		return rewriteValueARM64_OpARM64SLLconst(v, config)
   161  	case OpARM64SRA:
   162  		return rewriteValueARM64_OpARM64SRA(v, config)
   163  	case OpARM64SRAconst:
   164  		return rewriteValueARM64_OpARM64SRAconst(v, config)
   165  	case OpARM64SRL:
   166  		return rewriteValueARM64_OpARM64SRL(v, config)
   167  	case OpARM64SRLconst:
   168  		return rewriteValueARM64_OpARM64SRLconst(v, config)
   169  	case OpARM64SUB:
   170  		return rewriteValueARM64_OpARM64SUB(v, config)
   171  	case OpARM64SUBconst:
   172  		return rewriteValueARM64_OpARM64SUBconst(v, config)
   173  	case OpARM64SUBshiftLL:
   174  		return rewriteValueARM64_OpARM64SUBshiftLL(v, config)
   175  	case OpARM64SUBshiftRA:
   176  		return rewriteValueARM64_OpARM64SUBshiftRA(v, config)
   177  	case OpARM64SUBshiftRL:
   178  		return rewriteValueARM64_OpARM64SUBshiftRL(v, config)
   179  	case OpARM64UDIV:
   180  		return rewriteValueARM64_OpARM64UDIV(v, config)
   181  	case OpARM64UDIVW:
   182  		return rewriteValueARM64_OpARM64UDIVW(v, config)
   183  	case OpARM64UMOD:
   184  		return rewriteValueARM64_OpARM64UMOD(v, config)
   185  	case OpARM64UMODW:
   186  		return rewriteValueARM64_OpARM64UMODW(v, config)
   187  	case OpARM64XOR:
   188  		return rewriteValueARM64_OpARM64XOR(v, config)
   189  	case OpARM64XORconst:
   190  		return rewriteValueARM64_OpARM64XORconst(v, config)
   191  	case OpARM64XORshiftLL:
   192  		return rewriteValueARM64_OpARM64XORshiftLL(v, config)
   193  	case OpARM64XORshiftRA:
   194  		return rewriteValueARM64_OpARM64XORshiftRA(v, config)
   195  	case OpARM64XORshiftRL:
   196  		return rewriteValueARM64_OpARM64XORshiftRL(v, config)
   197  	case OpAdd16:
   198  		return rewriteValueARM64_OpAdd16(v, config)
   199  	case OpAdd32:
   200  		return rewriteValueARM64_OpAdd32(v, config)
   201  	case OpAdd32F:
   202  		return rewriteValueARM64_OpAdd32F(v, config)
   203  	case OpAdd64:
   204  		return rewriteValueARM64_OpAdd64(v, config)
   205  	case OpAdd64F:
   206  		return rewriteValueARM64_OpAdd64F(v, config)
   207  	case OpAdd8:
   208  		return rewriteValueARM64_OpAdd8(v, config)
   209  	case OpAddPtr:
   210  		return rewriteValueARM64_OpAddPtr(v, config)
   211  	case OpAddr:
   212  		return rewriteValueARM64_OpAddr(v, config)
   213  	case OpAnd16:
   214  		return rewriteValueARM64_OpAnd16(v, config)
   215  	case OpAnd32:
   216  		return rewriteValueARM64_OpAnd32(v, config)
   217  	case OpAnd64:
   218  		return rewriteValueARM64_OpAnd64(v, config)
   219  	case OpAnd8:
   220  		return rewriteValueARM64_OpAnd8(v, config)
   221  	case OpAndB:
   222  		return rewriteValueARM64_OpAndB(v, config)
   223  	case OpAvg64u:
   224  		return rewriteValueARM64_OpAvg64u(v, config)
   225  	case OpClosureCall:
   226  		return rewriteValueARM64_OpClosureCall(v, config)
   227  	case OpCom16:
   228  		return rewriteValueARM64_OpCom16(v, config)
   229  	case OpCom32:
   230  		return rewriteValueARM64_OpCom32(v, config)
   231  	case OpCom64:
   232  		return rewriteValueARM64_OpCom64(v, config)
   233  	case OpCom8:
   234  		return rewriteValueARM64_OpCom8(v, config)
   235  	case OpConst16:
   236  		return rewriteValueARM64_OpConst16(v, config)
   237  	case OpConst32:
   238  		return rewriteValueARM64_OpConst32(v, config)
   239  	case OpConst32F:
   240  		return rewriteValueARM64_OpConst32F(v, config)
   241  	case OpConst64:
   242  		return rewriteValueARM64_OpConst64(v, config)
   243  	case OpConst64F:
   244  		return rewriteValueARM64_OpConst64F(v, config)
   245  	case OpConst8:
   246  		return rewriteValueARM64_OpConst8(v, config)
   247  	case OpConstBool:
   248  		return rewriteValueARM64_OpConstBool(v, config)
   249  	case OpConstNil:
   250  		return rewriteValueARM64_OpConstNil(v, config)
   251  	case OpConvert:
   252  		return rewriteValueARM64_OpConvert(v, config)
   253  	case OpCvt32Fto32:
   254  		return rewriteValueARM64_OpCvt32Fto32(v, config)
   255  	case OpCvt32Fto32U:
   256  		return rewriteValueARM64_OpCvt32Fto32U(v, config)
   257  	case OpCvt32Fto64:
   258  		return rewriteValueARM64_OpCvt32Fto64(v, config)
   259  	case OpCvt32Fto64F:
   260  		return rewriteValueARM64_OpCvt32Fto64F(v, config)
   261  	case OpCvt32Fto64U:
   262  		return rewriteValueARM64_OpCvt32Fto64U(v, config)
   263  	case OpCvt32Uto32F:
   264  		return rewriteValueARM64_OpCvt32Uto32F(v, config)
   265  	case OpCvt32Uto64F:
   266  		return rewriteValueARM64_OpCvt32Uto64F(v, config)
   267  	case OpCvt32to32F:
   268  		return rewriteValueARM64_OpCvt32to32F(v, config)
   269  	case OpCvt32to64F:
   270  		return rewriteValueARM64_OpCvt32to64F(v, config)
   271  	case OpCvt64Fto32:
   272  		return rewriteValueARM64_OpCvt64Fto32(v, config)
   273  	case OpCvt64Fto32F:
   274  		return rewriteValueARM64_OpCvt64Fto32F(v, config)
   275  	case OpCvt64Fto32U:
   276  		return rewriteValueARM64_OpCvt64Fto32U(v, config)
   277  	case OpCvt64Fto64:
   278  		return rewriteValueARM64_OpCvt64Fto64(v, config)
   279  	case OpCvt64Fto64U:
   280  		return rewriteValueARM64_OpCvt64Fto64U(v, config)
   281  	case OpCvt64Uto32F:
   282  		return rewriteValueARM64_OpCvt64Uto32F(v, config)
   283  	case OpCvt64Uto64F:
   284  		return rewriteValueARM64_OpCvt64Uto64F(v, config)
   285  	case OpCvt64to32F:
   286  		return rewriteValueARM64_OpCvt64to32F(v, config)
   287  	case OpCvt64to64F:
   288  		return rewriteValueARM64_OpCvt64to64F(v, config)
   289  	case OpDeferCall:
   290  		return rewriteValueARM64_OpDeferCall(v, config)
   291  	case OpDiv16:
   292  		return rewriteValueARM64_OpDiv16(v, config)
   293  	case OpDiv16u:
   294  		return rewriteValueARM64_OpDiv16u(v, config)
   295  	case OpDiv32:
   296  		return rewriteValueARM64_OpDiv32(v, config)
   297  	case OpDiv32F:
   298  		return rewriteValueARM64_OpDiv32F(v, config)
   299  	case OpDiv32u:
   300  		return rewriteValueARM64_OpDiv32u(v, config)
   301  	case OpDiv64:
   302  		return rewriteValueARM64_OpDiv64(v, config)
   303  	case OpDiv64F:
   304  		return rewriteValueARM64_OpDiv64F(v, config)
   305  	case OpDiv64u:
   306  		return rewriteValueARM64_OpDiv64u(v, config)
   307  	case OpDiv8:
   308  		return rewriteValueARM64_OpDiv8(v, config)
   309  	case OpDiv8u:
   310  		return rewriteValueARM64_OpDiv8u(v, config)
   311  	case OpEq16:
   312  		return rewriteValueARM64_OpEq16(v, config)
   313  	case OpEq32:
   314  		return rewriteValueARM64_OpEq32(v, config)
   315  	case OpEq32F:
   316  		return rewriteValueARM64_OpEq32F(v, config)
   317  	case OpEq64:
   318  		return rewriteValueARM64_OpEq64(v, config)
   319  	case OpEq64F:
   320  		return rewriteValueARM64_OpEq64F(v, config)
   321  	case OpEq8:
   322  		return rewriteValueARM64_OpEq8(v, config)
   323  	case OpEqB:
   324  		return rewriteValueARM64_OpEqB(v, config)
   325  	case OpEqPtr:
   326  		return rewriteValueARM64_OpEqPtr(v, config)
   327  	case OpGeq16:
   328  		return rewriteValueARM64_OpGeq16(v, config)
   329  	case OpGeq16U:
   330  		return rewriteValueARM64_OpGeq16U(v, config)
   331  	case OpGeq32:
   332  		return rewriteValueARM64_OpGeq32(v, config)
   333  	case OpGeq32F:
   334  		return rewriteValueARM64_OpGeq32F(v, config)
   335  	case OpGeq32U:
   336  		return rewriteValueARM64_OpGeq32U(v, config)
   337  	case OpGeq64:
   338  		return rewriteValueARM64_OpGeq64(v, config)
   339  	case OpGeq64F:
   340  		return rewriteValueARM64_OpGeq64F(v, config)
   341  	case OpGeq64U:
   342  		return rewriteValueARM64_OpGeq64U(v, config)
   343  	case OpGeq8:
   344  		return rewriteValueARM64_OpGeq8(v, config)
   345  	case OpGeq8U:
   346  		return rewriteValueARM64_OpGeq8U(v, config)
   347  	case OpGetClosurePtr:
   348  		return rewriteValueARM64_OpGetClosurePtr(v, config)
   349  	case OpGoCall:
   350  		return rewriteValueARM64_OpGoCall(v, config)
   351  	case OpGreater16:
   352  		return rewriteValueARM64_OpGreater16(v, config)
   353  	case OpGreater16U:
   354  		return rewriteValueARM64_OpGreater16U(v, config)
   355  	case OpGreater32:
   356  		return rewriteValueARM64_OpGreater32(v, config)
   357  	case OpGreater32F:
   358  		return rewriteValueARM64_OpGreater32F(v, config)
   359  	case OpGreater32U:
   360  		return rewriteValueARM64_OpGreater32U(v, config)
   361  	case OpGreater64:
   362  		return rewriteValueARM64_OpGreater64(v, config)
   363  	case OpGreater64F:
   364  		return rewriteValueARM64_OpGreater64F(v, config)
   365  	case OpGreater64U:
   366  		return rewriteValueARM64_OpGreater64U(v, config)
   367  	case OpGreater8:
   368  		return rewriteValueARM64_OpGreater8(v, config)
   369  	case OpGreater8U:
   370  		return rewriteValueARM64_OpGreater8U(v, config)
   371  	case OpHmul16:
   372  		return rewriteValueARM64_OpHmul16(v, config)
   373  	case OpHmul16u:
   374  		return rewriteValueARM64_OpHmul16u(v, config)
   375  	case OpHmul32:
   376  		return rewriteValueARM64_OpHmul32(v, config)
   377  	case OpHmul32u:
   378  		return rewriteValueARM64_OpHmul32u(v, config)
   379  	case OpHmul64:
   380  		return rewriteValueARM64_OpHmul64(v, config)
   381  	case OpHmul64u:
   382  		return rewriteValueARM64_OpHmul64u(v, config)
   383  	case OpHmul8:
   384  		return rewriteValueARM64_OpHmul8(v, config)
   385  	case OpHmul8u:
   386  		return rewriteValueARM64_OpHmul8u(v, config)
   387  	case OpInterCall:
   388  		return rewriteValueARM64_OpInterCall(v, config)
   389  	case OpIsInBounds:
   390  		return rewriteValueARM64_OpIsInBounds(v, config)
   391  	case OpIsNonNil:
   392  		return rewriteValueARM64_OpIsNonNil(v, config)
   393  	case OpIsSliceInBounds:
   394  		return rewriteValueARM64_OpIsSliceInBounds(v, config)
   395  	case OpLeq16:
   396  		return rewriteValueARM64_OpLeq16(v, config)
   397  	case OpLeq16U:
   398  		return rewriteValueARM64_OpLeq16U(v, config)
   399  	case OpLeq32:
   400  		return rewriteValueARM64_OpLeq32(v, config)
   401  	case OpLeq32F:
   402  		return rewriteValueARM64_OpLeq32F(v, config)
   403  	case OpLeq32U:
   404  		return rewriteValueARM64_OpLeq32U(v, config)
   405  	case OpLeq64:
   406  		return rewriteValueARM64_OpLeq64(v, config)
   407  	case OpLeq64F:
   408  		return rewriteValueARM64_OpLeq64F(v, config)
   409  	case OpLeq64U:
   410  		return rewriteValueARM64_OpLeq64U(v, config)
   411  	case OpLeq8:
   412  		return rewriteValueARM64_OpLeq8(v, config)
   413  	case OpLeq8U:
   414  		return rewriteValueARM64_OpLeq8U(v, config)
   415  	case OpLess16:
   416  		return rewriteValueARM64_OpLess16(v, config)
   417  	case OpLess16U:
   418  		return rewriteValueARM64_OpLess16U(v, config)
   419  	case OpLess32:
   420  		return rewriteValueARM64_OpLess32(v, config)
   421  	case OpLess32F:
   422  		return rewriteValueARM64_OpLess32F(v, config)
   423  	case OpLess32U:
   424  		return rewriteValueARM64_OpLess32U(v, config)
   425  	case OpLess64:
   426  		return rewriteValueARM64_OpLess64(v, config)
   427  	case OpLess64F:
   428  		return rewriteValueARM64_OpLess64F(v, config)
   429  	case OpLess64U:
   430  		return rewriteValueARM64_OpLess64U(v, config)
   431  	case OpLess8:
   432  		return rewriteValueARM64_OpLess8(v, config)
   433  	case OpLess8U:
   434  		return rewriteValueARM64_OpLess8U(v, config)
   435  	case OpLoad:
   436  		return rewriteValueARM64_OpLoad(v, config)
   437  	case OpLrot16:
   438  		return rewriteValueARM64_OpLrot16(v, config)
   439  	case OpLrot32:
   440  		return rewriteValueARM64_OpLrot32(v, config)
   441  	case OpLrot64:
   442  		return rewriteValueARM64_OpLrot64(v, config)
   443  	case OpLrot8:
   444  		return rewriteValueARM64_OpLrot8(v, config)
   445  	case OpLsh16x16:
   446  		return rewriteValueARM64_OpLsh16x16(v, config)
   447  	case OpLsh16x32:
   448  		return rewriteValueARM64_OpLsh16x32(v, config)
   449  	case OpLsh16x64:
   450  		return rewriteValueARM64_OpLsh16x64(v, config)
   451  	case OpLsh16x8:
   452  		return rewriteValueARM64_OpLsh16x8(v, config)
   453  	case OpLsh32x16:
   454  		return rewriteValueARM64_OpLsh32x16(v, config)
   455  	case OpLsh32x32:
   456  		return rewriteValueARM64_OpLsh32x32(v, config)
   457  	case OpLsh32x64:
   458  		return rewriteValueARM64_OpLsh32x64(v, config)
   459  	case OpLsh32x8:
   460  		return rewriteValueARM64_OpLsh32x8(v, config)
   461  	case OpLsh64x16:
   462  		return rewriteValueARM64_OpLsh64x16(v, config)
   463  	case OpLsh64x32:
   464  		return rewriteValueARM64_OpLsh64x32(v, config)
   465  	case OpLsh64x64:
   466  		return rewriteValueARM64_OpLsh64x64(v, config)
   467  	case OpLsh64x8:
   468  		return rewriteValueARM64_OpLsh64x8(v, config)
   469  	case OpLsh8x16:
   470  		return rewriteValueARM64_OpLsh8x16(v, config)
   471  	case OpLsh8x32:
   472  		return rewriteValueARM64_OpLsh8x32(v, config)
   473  	case OpLsh8x64:
   474  		return rewriteValueARM64_OpLsh8x64(v, config)
   475  	case OpLsh8x8:
   476  		return rewriteValueARM64_OpLsh8x8(v, config)
   477  	case OpMod16:
   478  		return rewriteValueARM64_OpMod16(v, config)
   479  	case OpMod16u:
   480  		return rewriteValueARM64_OpMod16u(v, config)
   481  	case OpMod32:
   482  		return rewriteValueARM64_OpMod32(v, config)
   483  	case OpMod32u:
   484  		return rewriteValueARM64_OpMod32u(v, config)
   485  	case OpMod64:
   486  		return rewriteValueARM64_OpMod64(v, config)
   487  	case OpMod64u:
   488  		return rewriteValueARM64_OpMod64u(v, config)
   489  	case OpMod8:
   490  		return rewriteValueARM64_OpMod8(v, config)
   491  	case OpMod8u:
   492  		return rewriteValueARM64_OpMod8u(v, config)
   493  	case OpMove:
   494  		return rewriteValueARM64_OpMove(v, config)
   495  	case OpMul16:
   496  		return rewriteValueARM64_OpMul16(v, config)
   497  	case OpMul32:
   498  		return rewriteValueARM64_OpMul32(v, config)
   499  	case OpMul32F:
   500  		return rewriteValueARM64_OpMul32F(v, config)
   501  	case OpMul64:
   502  		return rewriteValueARM64_OpMul64(v, config)
   503  	case OpMul64F:
   504  		return rewriteValueARM64_OpMul64F(v, config)
   505  	case OpMul8:
   506  		return rewriteValueARM64_OpMul8(v, config)
   507  	case OpNeg16:
   508  		return rewriteValueARM64_OpNeg16(v, config)
   509  	case OpNeg32:
   510  		return rewriteValueARM64_OpNeg32(v, config)
   511  	case OpNeg32F:
   512  		return rewriteValueARM64_OpNeg32F(v, config)
   513  	case OpNeg64:
   514  		return rewriteValueARM64_OpNeg64(v, config)
   515  	case OpNeg64F:
   516  		return rewriteValueARM64_OpNeg64F(v, config)
   517  	case OpNeg8:
   518  		return rewriteValueARM64_OpNeg8(v, config)
   519  	case OpNeq16:
   520  		return rewriteValueARM64_OpNeq16(v, config)
   521  	case OpNeq32:
   522  		return rewriteValueARM64_OpNeq32(v, config)
   523  	case OpNeq32F:
   524  		return rewriteValueARM64_OpNeq32F(v, config)
   525  	case OpNeq64:
   526  		return rewriteValueARM64_OpNeq64(v, config)
   527  	case OpNeq64F:
   528  		return rewriteValueARM64_OpNeq64F(v, config)
   529  	case OpNeq8:
   530  		return rewriteValueARM64_OpNeq8(v, config)
   531  	case OpNeqB:
   532  		return rewriteValueARM64_OpNeqB(v, config)
   533  	case OpNeqPtr:
   534  		return rewriteValueARM64_OpNeqPtr(v, config)
   535  	case OpNilCheck:
   536  		return rewriteValueARM64_OpNilCheck(v, config)
   537  	case OpNot:
   538  		return rewriteValueARM64_OpNot(v, config)
   539  	case OpOffPtr:
   540  		return rewriteValueARM64_OpOffPtr(v, config)
   541  	case OpOr16:
   542  		return rewriteValueARM64_OpOr16(v, config)
   543  	case OpOr32:
   544  		return rewriteValueARM64_OpOr32(v, config)
   545  	case OpOr64:
   546  		return rewriteValueARM64_OpOr64(v, config)
   547  	case OpOr8:
   548  		return rewriteValueARM64_OpOr8(v, config)
   549  	case OpOrB:
   550  		return rewriteValueARM64_OpOrB(v, config)
   551  	case OpRsh16Ux16:
   552  		return rewriteValueARM64_OpRsh16Ux16(v, config)
   553  	case OpRsh16Ux32:
   554  		return rewriteValueARM64_OpRsh16Ux32(v, config)
   555  	case OpRsh16Ux64:
   556  		return rewriteValueARM64_OpRsh16Ux64(v, config)
   557  	case OpRsh16Ux8:
   558  		return rewriteValueARM64_OpRsh16Ux8(v, config)
   559  	case OpRsh16x16:
   560  		return rewriteValueARM64_OpRsh16x16(v, config)
   561  	case OpRsh16x32:
   562  		return rewriteValueARM64_OpRsh16x32(v, config)
   563  	case OpRsh16x64:
   564  		return rewriteValueARM64_OpRsh16x64(v, config)
   565  	case OpRsh16x8:
   566  		return rewriteValueARM64_OpRsh16x8(v, config)
   567  	case OpRsh32Ux16:
   568  		return rewriteValueARM64_OpRsh32Ux16(v, config)
   569  	case OpRsh32Ux32:
   570  		return rewriteValueARM64_OpRsh32Ux32(v, config)
   571  	case OpRsh32Ux64:
   572  		return rewriteValueARM64_OpRsh32Ux64(v, config)
   573  	case OpRsh32Ux8:
   574  		return rewriteValueARM64_OpRsh32Ux8(v, config)
   575  	case OpRsh32x16:
   576  		return rewriteValueARM64_OpRsh32x16(v, config)
   577  	case OpRsh32x32:
   578  		return rewriteValueARM64_OpRsh32x32(v, config)
   579  	case OpRsh32x64:
   580  		return rewriteValueARM64_OpRsh32x64(v, config)
   581  	case OpRsh32x8:
   582  		return rewriteValueARM64_OpRsh32x8(v, config)
   583  	case OpRsh64Ux16:
   584  		return rewriteValueARM64_OpRsh64Ux16(v, config)
   585  	case OpRsh64Ux32:
   586  		return rewriteValueARM64_OpRsh64Ux32(v, config)
   587  	case OpRsh64Ux64:
   588  		return rewriteValueARM64_OpRsh64Ux64(v, config)
   589  	case OpRsh64Ux8:
   590  		return rewriteValueARM64_OpRsh64Ux8(v, config)
   591  	case OpRsh64x16:
   592  		return rewriteValueARM64_OpRsh64x16(v, config)
   593  	case OpRsh64x32:
   594  		return rewriteValueARM64_OpRsh64x32(v, config)
   595  	case OpRsh64x64:
   596  		return rewriteValueARM64_OpRsh64x64(v, config)
   597  	case OpRsh64x8:
   598  		return rewriteValueARM64_OpRsh64x8(v, config)
   599  	case OpRsh8Ux16:
   600  		return rewriteValueARM64_OpRsh8Ux16(v, config)
   601  	case OpRsh8Ux32:
   602  		return rewriteValueARM64_OpRsh8Ux32(v, config)
   603  	case OpRsh8Ux64:
   604  		return rewriteValueARM64_OpRsh8Ux64(v, config)
   605  	case OpRsh8Ux8:
   606  		return rewriteValueARM64_OpRsh8Ux8(v, config)
   607  	case OpRsh8x16:
   608  		return rewriteValueARM64_OpRsh8x16(v, config)
   609  	case OpRsh8x32:
   610  		return rewriteValueARM64_OpRsh8x32(v, config)
   611  	case OpRsh8x64:
   612  		return rewriteValueARM64_OpRsh8x64(v, config)
   613  	case OpRsh8x8:
   614  		return rewriteValueARM64_OpRsh8x8(v, config)
   615  	case OpSignExt16to32:
   616  		return rewriteValueARM64_OpSignExt16to32(v, config)
   617  	case OpSignExt16to64:
   618  		return rewriteValueARM64_OpSignExt16to64(v, config)
   619  	case OpSignExt32to64:
   620  		return rewriteValueARM64_OpSignExt32to64(v, config)
   621  	case OpSignExt8to16:
   622  		return rewriteValueARM64_OpSignExt8to16(v, config)
   623  	case OpSignExt8to32:
   624  		return rewriteValueARM64_OpSignExt8to32(v, config)
   625  	case OpSignExt8to64:
   626  		return rewriteValueARM64_OpSignExt8to64(v, config)
   627  	case OpSqrt:
   628  		return rewriteValueARM64_OpSqrt(v, config)
   629  	case OpStaticCall:
   630  		return rewriteValueARM64_OpStaticCall(v, config)
   631  	case OpStore:
   632  		return rewriteValueARM64_OpStore(v, config)
   633  	case OpSub16:
   634  		return rewriteValueARM64_OpSub16(v, config)
   635  	case OpSub32:
   636  		return rewriteValueARM64_OpSub32(v, config)
   637  	case OpSub32F:
   638  		return rewriteValueARM64_OpSub32F(v, config)
   639  	case OpSub64:
   640  		return rewriteValueARM64_OpSub64(v, config)
   641  	case OpSub64F:
   642  		return rewriteValueARM64_OpSub64F(v, config)
   643  	case OpSub8:
   644  		return rewriteValueARM64_OpSub8(v, config)
   645  	case OpSubPtr:
   646  		return rewriteValueARM64_OpSubPtr(v, config)
   647  	case OpTrunc16to8:
   648  		return rewriteValueARM64_OpTrunc16to8(v, config)
   649  	case OpTrunc32to16:
   650  		return rewriteValueARM64_OpTrunc32to16(v, config)
   651  	case OpTrunc32to8:
   652  		return rewriteValueARM64_OpTrunc32to8(v, config)
   653  	case OpTrunc64to16:
   654  		return rewriteValueARM64_OpTrunc64to16(v, config)
   655  	case OpTrunc64to32:
   656  		return rewriteValueARM64_OpTrunc64to32(v, config)
   657  	case OpTrunc64to8:
   658  		return rewriteValueARM64_OpTrunc64to8(v, config)
   659  	case OpXor16:
   660  		return rewriteValueARM64_OpXor16(v, config)
   661  	case OpXor32:
   662  		return rewriteValueARM64_OpXor32(v, config)
   663  	case OpXor64:
   664  		return rewriteValueARM64_OpXor64(v, config)
   665  	case OpXor8:
   666  		return rewriteValueARM64_OpXor8(v, config)
   667  	case OpZero:
   668  		return rewriteValueARM64_OpZero(v, config)
   669  	case OpZeroExt16to32:
   670  		return rewriteValueARM64_OpZeroExt16to32(v, config)
   671  	case OpZeroExt16to64:
   672  		return rewriteValueARM64_OpZeroExt16to64(v, config)
   673  	case OpZeroExt32to64:
   674  		return rewriteValueARM64_OpZeroExt32to64(v, config)
   675  	case OpZeroExt8to16:
   676  		return rewriteValueARM64_OpZeroExt8to16(v, config)
   677  	case OpZeroExt8to32:
   678  		return rewriteValueARM64_OpZeroExt8to32(v, config)
   679  	case OpZeroExt8to64:
   680  		return rewriteValueARM64_OpZeroExt8to64(v, config)
   681  	}
   682  	return false
   683  }
   684  func rewriteValueARM64_OpARM64ADD(v *Value, config *Config) bool {
   685  	b := v.Block
   686  	_ = b
   687  	// match: (ADD (MOVDconst [c]) x)
   688  	// cond:
   689  	// result: (ADDconst [c] x)
   690  	for {
   691  		v_0 := v.Args[0]
   692  		if v_0.Op != OpARM64MOVDconst {
   693  			break
   694  		}
   695  		c := v_0.AuxInt
   696  		x := v.Args[1]
   697  		v.reset(OpARM64ADDconst)
   698  		v.AuxInt = c
   699  		v.AddArg(x)
   700  		return true
   701  	}
   702  	// match: (ADD x (MOVDconst [c]))
   703  	// cond:
   704  	// result: (ADDconst [c] x)
   705  	for {
   706  		x := v.Args[0]
   707  		v_1 := v.Args[1]
   708  		if v_1.Op != OpARM64MOVDconst {
   709  			break
   710  		}
   711  		c := v_1.AuxInt
   712  		v.reset(OpARM64ADDconst)
   713  		v.AuxInt = c
   714  		v.AddArg(x)
   715  		return true
   716  	}
   717  	// match: (ADD x (NEG y))
   718  	// cond:
   719  	// result: (SUB x y)
   720  	for {
   721  		x := v.Args[0]
   722  		v_1 := v.Args[1]
   723  		if v_1.Op != OpARM64NEG {
   724  			break
   725  		}
   726  		y := v_1.Args[0]
   727  		v.reset(OpARM64SUB)
   728  		v.AddArg(x)
   729  		v.AddArg(y)
   730  		return true
   731  	}
   732  	// match: (ADD (NEG y) x)
   733  	// cond:
   734  	// result: (SUB x y)
   735  	for {
   736  		v_0 := v.Args[0]
   737  		if v_0.Op != OpARM64NEG {
   738  			break
   739  		}
   740  		y := v_0.Args[0]
   741  		x := v.Args[1]
   742  		v.reset(OpARM64SUB)
   743  		v.AddArg(x)
   744  		v.AddArg(y)
   745  		return true
   746  	}
   747  	// match: (ADD x (SLLconst [c] y))
   748  	// cond:
   749  	// result: (ADDshiftLL x y [c])
   750  	for {
   751  		x := v.Args[0]
   752  		v_1 := v.Args[1]
   753  		if v_1.Op != OpARM64SLLconst {
   754  			break
   755  		}
   756  		c := v_1.AuxInt
   757  		y := v_1.Args[0]
   758  		v.reset(OpARM64ADDshiftLL)
   759  		v.AuxInt = c
   760  		v.AddArg(x)
   761  		v.AddArg(y)
   762  		return true
   763  	}
   764  	// match: (ADD (SLLconst [c] y) x)
   765  	// cond:
   766  	// result: (ADDshiftLL x y [c])
   767  	for {
   768  		v_0 := v.Args[0]
   769  		if v_0.Op != OpARM64SLLconst {
   770  			break
   771  		}
   772  		c := v_0.AuxInt
   773  		y := v_0.Args[0]
   774  		x := v.Args[1]
   775  		v.reset(OpARM64ADDshiftLL)
   776  		v.AuxInt = c
   777  		v.AddArg(x)
   778  		v.AddArg(y)
   779  		return true
   780  	}
   781  	// match: (ADD x (SRLconst [c] y))
   782  	// cond:
   783  	// result: (ADDshiftRL x y [c])
   784  	for {
   785  		x := v.Args[0]
   786  		v_1 := v.Args[1]
   787  		if v_1.Op != OpARM64SRLconst {
   788  			break
   789  		}
   790  		c := v_1.AuxInt
   791  		y := v_1.Args[0]
   792  		v.reset(OpARM64ADDshiftRL)
   793  		v.AuxInt = c
   794  		v.AddArg(x)
   795  		v.AddArg(y)
   796  		return true
   797  	}
   798  	// match: (ADD (SRLconst [c] y) x)
   799  	// cond:
   800  	// result: (ADDshiftRL x y [c])
   801  	for {
   802  		v_0 := v.Args[0]
   803  		if v_0.Op != OpARM64SRLconst {
   804  			break
   805  		}
   806  		c := v_0.AuxInt
   807  		y := v_0.Args[0]
   808  		x := v.Args[1]
   809  		v.reset(OpARM64ADDshiftRL)
   810  		v.AuxInt = c
   811  		v.AddArg(x)
   812  		v.AddArg(y)
   813  		return true
   814  	}
   815  	// match: (ADD x (SRAconst [c] y))
   816  	// cond:
   817  	// result: (ADDshiftRA x y [c])
   818  	for {
   819  		x := v.Args[0]
   820  		v_1 := v.Args[1]
   821  		if v_1.Op != OpARM64SRAconst {
   822  			break
   823  		}
   824  		c := v_1.AuxInt
   825  		y := v_1.Args[0]
   826  		v.reset(OpARM64ADDshiftRA)
   827  		v.AuxInt = c
   828  		v.AddArg(x)
   829  		v.AddArg(y)
   830  		return true
   831  	}
   832  	// match: (ADD (SRAconst [c] y) x)
   833  	// cond:
   834  	// result: (ADDshiftRA x y [c])
   835  	for {
   836  		v_0 := v.Args[0]
   837  		if v_0.Op != OpARM64SRAconst {
   838  			break
   839  		}
   840  		c := v_0.AuxInt
   841  		y := v_0.Args[0]
   842  		x := v.Args[1]
   843  		v.reset(OpARM64ADDshiftRA)
   844  		v.AuxInt = c
   845  		v.AddArg(x)
   846  		v.AddArg(y)
   847  		return true
   848  	}
   849  	return false
   850  }
   851  func rewriteValueARM64_OpARM64ADDconst(v *Value, config *Config) bool {
   852  	b := v.Block
   853  	_ = b
   854  	// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
   855  	// cond:
   856  	// result: (MOVDaddr [off1+off2] {sym} ptr)
   857  	for {
   858  		off1 := v.AuxInt
   859  		v_0 := v.Args[0]
   860  		if v_0.Op != OpARM64MOVDaddr {
   861  			break
   862  		}
   863  		off2 := v_0.AuxInt
   864  		sym := v_0.Aux
   865  		ptr := v_0.Args[0]
   866  		v.reset(OpARM64MOVDaddr)
   867  		v.AuxInt = off1 + off2
   868  		v.Aux = sym
   869  		v.AddArg(ptr)
   870  		return true
   871  	}
   872  	// match: (ADDconst [0]  x)
   873  	// cond:
   874  	// result: x
   875  	for {
   876  		if v.AuxInt != 0 {
   877  			break
   878  		}
   879  		x := v.Args[0]
   880  		v.reset(OpCopy)
   881  		v.Type = x.Type
   882  		v.AddArg(x)
   883  		return true
   884  	}
   885  	// match: (ADDconst [c] (MOVDconst [d]))
   886  	// cond:
   887  	// result: (MOVDconst [c+d])
   888  	for {
   889  		c := v.AuxInt
   890  		v_0 := v.Args[0]
   891  		if v_0.Op != OpARM64MOVDconst {
   892  			break
   893  		}
   894  		d := v_0.AuxInt
   895  		v.reset(OpARM64MOVDconst)
   896  		v.AuxInt = c + d
   897  		return true
   898  	}
   899  	// match: (ADDconst [c] (ADDconst [d] x))
   900  	// cond:
   901  	// result: (ADDconst [c+d] x)
   902  	for {
   903  		c := v.AuxInt
   904  		v_0 := v.Args[0]
   905  		if v_0.Op != OpARM64ADDconst {
   906  			break
   907  		}
   908  		d := v_0.AuxInt
   909  		x := v_0.Args[0]
   910  		v.reset(OpARM64ADDconst)
   911  		v.AuxInt = c + d
   912  		v.AddArg(x)
   913  		return true
   914  	}
   915  	// match: (ADDconst [c] (SUBconst [d] x))
   916  	// cond:
   917  	// result: (ADDconst [c-d] x)
   918  	for {
   919  		c := v.AuxInt
   920  		v_0 := v.Args[0]
   921  		if v_0.Op != OpARM64SUBconst {
   922  			break
   923  		}
   924  		d := v_0.AuxInt
   925  		x := v_0.Args[0]
   926  		v.reset(OpARM64ADDconst)
   927  		v.AuxInt = c - d
   928  		v.AddArg(x)
   929  		return true
   930  	}
   931  	return false
   932  }
   933  func rewriteValueARM64_OpARM64ADDshiftLL(v *Value, config *Config) bool {
   934  	b := v.Block
   935  	_ = b
   936  	// match: (ADDshiftLL (MOVDconst [c]) x [d])
   937  	// cond:
   938  	// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
   939  	for {
   940  		d := v.AuxInt
   941  		v_0 := v.Args[0]
   942  		if v_0.Op != OpARM64MOVDconst {
   943  			break
   944  		}
   945  		c := v_0.AuxInt
   946  		x := v.Args[1]
   947  		v.reset(OpARM64ADDconst)
   948  		v.AuxInt = c
   949  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
   950  		v0.AuxInt = d
   951  		v0.AddArg(x)
   952  		v.AddArg(v0)
   953  		return true
   954  	}
   955  	// match: (ADDshiftLL x (MOVDconst [c]) [d])
   956  	// cond:
   957  	// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
   958  	for {
   959  		d := v.AuxInt
   960  		x := v.Args[0]
   961  		v_1 := v.Args[1]
   962  		if v_1.Op != OpARM64MOVDconst {
   963  			break
   964  		}
   965  		c := v_1.AuxInt
   966  		v.reset(OpARM64ADDconst)
   967  		v.AuxInt = int64(uint64(c) << uint64(d))
   968  		v.AddArg(x)
   969  		return true
   970  	}
   971  	return false
   972  }
   973  func rewriteValueARM64_OpARM64ADDshiftRA(v *Value, config *Config) bool {
   974  	b := v.Block
   975  	_ = b
   976  	// match: (ADDshiftRA (MOVDconst [c]) x [d])
   977  	// cond:
   978  	// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
   979  	for {
   980  		d := v.AuxInt
   981  		v_0 := v.Args[0]
   982  		if v_0.Op != OpARM64MOVDconst {
   983  			break
   984  		}
   985  		c := v_0.AuxInt
   986  		x := v.Args[1]
   987  		v.reset(OpARM64ADDconst)
   988  		v.AuxInt = c
   989  		v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
   990  		v0.AuxInt = d
   991  		v0.AddArg(x)
   992  		v.AddArg(v0)
   993  		return true
   994  	}
   995  	// match: (ADDshiftRA x (MOVDconst [c]) [d])
   996  	// cond:
   997  	// result: (ADDconst x [int64(int64(c)>>uint64(d))])
   998  	for {
   999  		d := v.AuxInt
  1000  		x := v.Args[0]
  1001  		v_1 := v.Args[1]
  1002  		if v_1.Op != OpARM64MOVDconst {
  1003  			break
  1004  		}
  1005  		c := v_1.AuxInt
  1006  		v.reset(OpARM64ADDconst)
  1007  		v.AuxInt = int64(int64(c) >> uint64(d))
  1008  		v.AddArg(x)
  1009  		return true
  1010  	}
  1011  	return false
  1012  }
  1013  func rewriteValueARM64_OpARM64ADDshiftRL(v *Value, config *Config) bool {
  1014  	b := v.Block
  1015  	_ = b
  1016  	// match: (ADDshiftRL (MOVDconst [c]) x [d])
  1017  	// cond:
  1018  	// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
  1019  	for {
  1020  		d := v.AuxInt
  1021  		v_0 := v.Args[0]
  1022  		if v_0.Op != OpARM64MOVDconst {
  1023  			break
  1024  		}
  1025  		c := v_0.AuxInt
  1026  		x := v.Args[1]
  1027  		v.reset(OpARM64ADDconst)
  1028  		v.AuxInt = c
  1029  		v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
  1030  		v0.AuxInt = d
  1031  		v0.AddArg(x)
  1032  		v.AddArg(v0)
  1033  		return true
  1034  	}
  1035  	// match: (ADDshiftRL x (MOVDconst [c]) [d])
  1036  	// cond:
  1037  	// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
  1038  	for {
  1039  		d := v.AuxInt
  1040  		x := v.Args[0]
  1041  		v_1 := v.Args[1]
  1042  		if v_1.Op != OpARM64MOVDconst {
  1043  			break
  1044  		}
  1045  		c := v_1.AuxInt
  1046  		v.reset(OpARM64ADDconst)
  1047  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1048  		v.AddArg(x)
  1049  		return true
  1050  	}
  1051  	return false
  1052  }
  1053  func rewriteValueARM64_OpARM64AND(v *Value, config *Config) bool {
  1054  	b := v.Block
  1055  	_ = b
  1056  	// match: (AND (MOVDconst [c]) x)
  1057  	// cond:
  1058  	// result: (ANDconst [c] x)
  1059  	for {
  1060  		v_0 := v.Args[0]
  1061  		if v_0.Op != OpARM64MOVDconst {
  1062  			break
  1063  		}
  1064  		c := v_0.AuxInt
  1065  		x := v.Args[1]
  1066  		v.reset(OpARM64ANDconst)
  1067  		v.AuxInt = c
  1068  		v.AddArg(x)
  1069  		return true
  1070  	}
  1071  	// match: (AND x (MOVDconst [c]))
  1072  	// cond:
  1073  	// result: (ANDconst [c] x)
  1074  	for {
  1075  		x := v.Args[0]
  1076  		v_1 := v.Args[1]
  1077  		if v_1.Op != OpARM64MOVDconst {
  1078  			break
  1079  		}
  1080  		c := v_1.AuxInt
  1081  		v.reset(OpARM64ANDconst)
  1082  		v.AuxInt = c
  1083  		v.AddArg(x)
  1084  		return true
  1085  	}
  1086  	// match: (AND x x)
  1087  	// cond:
  1088  	// result: x
  1089  	for {
  1090  		x := v.Args[0]
  1091  		if x != v.Args[1] {
  1092  			break
  1093  		}
  1094  		v.reset(OpCopy)
  1095  		v.Type = x.Type
  1096  		v.AddArg(x)
  1097  		return true
  1098  	}
  1099  	// match: (AND x (MVN y))
  1100  	// cond:
  1101  	// result: (BIC x y)
  1102  	for {
  1103  		x := v.Args[0]
  1104  		v_1 := v.Args[1]
  1105  		if v_1.Op != OpARM64MVN {
  1106  			break
  1107  		}
  1108  		y := v_1.Args[0]
  1109  		v.reset(OpARM64BIC)
  1110  		v.AddArg(x)
  1111  		v.AddArg(y)
  1112  		return true
  1113  	}
  1114  	// match: (AND x (SLLconst [c] y))
  1115  	// cond:
  1116  	// result: (ANDshiftLL x y [c])
  1117  	for {
  1118  		x := v.Args[0]
  1119  		v_1 := v.Args[1]
  1120  		if v_1.Op != OpARM64SLLconst {
  1121  			break
  1122  		}
  1123  		c := v_1.AuxInt
  1124  		y := v_1.Args[0]
  1125  		v.reset(OpARM64ANDshiftLL)
  1126  		v.AuxInt = c
  1127  		v.AddArg(x)
  1128  		v.AddArg(y)
  1129  		return true
  1130  	}
  1131  	// match: (AND (SLLconst [c] y) x)
  1132  	// cond:
  1133  	// result: (ANDshiftLL x y [c])
  1134  	for {
  1135  		v_0 := v.Args[0]
  1136  		if v_0.Op != OpARM64SLLconst {
  1137  			break
  1138  		}
  1139  		c := v_0.AuxInt
  1140  		y := v_0.Args[0]
  1141  		x := v.Args[1]
  1142  		v.reset(OpARM64ANDshiftLL)
  1143  		v.AuxInt = c
  1144  		v.AddArg(x)
  1145  		v.AddArg(y)
  1146  		return true
  1147  	}
  1148  	// match: (AND x (SRLconst [c] y))
  1149  	// cond:
  1150  	// result: (ANDshiftRL x y [c])
  1151  	for {
  1152  		x := v.Args[0]
  1153  		v_1 := v.Args[1]
  1154  		if v_1.Op != OpARM64SRLconst {
  1155  			break
  1156  		}
  1157  		c := v_1.AuxInt
  1158  		y := v_1.Args[0]
  1159  		v.reset(OpARM64ANDshiftRL)
  1160  		v.AuxInt = c
  1161  		v.AddArg(x)
  1162  		v.AddArg(y)
  1163  		return true
  1164  	}
  1165  	// match: (AND (SRLconst [c] y) x)
  1166  	// cond:
  1167  	// result: (ANDshiftRL x y [c])
  1168  	for {
  1169  		v_0 := v.Args[0]
  1170  		if v_0.Op != OpARM64SRLconst {
  1171  			break
  1172  		}
  1173  		c := v_0.AuxInt
  1174  		y := v_0.Args[0]
  1175  		x := v.Args[1]
  1176  		v.reset(OpARM64ANDshiftRL)
  1177  		v.AuxInt = c
  1178  		v.AddArg(x)
  1179  		v.AddArg(y)
  1180  		return true
  1181  	}
  1182  	// match: (AND x (SRAconst [c] y))
  1183  	// cond:
  1184  	// result: (ANDshiftRA x y [c])
  1185  	for {
  1186  		x := v.Args[0]
  1187  		v_1 := v.Args[1]
  1188  		if v_1.Op != OpARM64SRAconst {
  1189  			break
  1190  		}
  1191  		c := v_1.AuxInt
  1192  		y := v_1.Args[0]
  1193  		v.reset(OpARM64ANDshiftRA)
  1194  		v.AuxInt = c
  1195  		v.AddArg(x)
  1196  		v.AddArg(y)
  1197  		return true
  1198  	}
  1199  	// match: (AND (SRAconst [c] y) x)
  1200  	// cond:
  1201  	// result: (ANDshiftRA x y [c])
  1202  	for {
  1203  		v_0 := v.Args[0]
  1204  		if v_0.Op != OpARM64SRAconst {
  1205  			break
  1206  		}
  1207  		c := v_0.AuxInt
  1208  		y := v_0.Args[0]
  1209  		x := v.Args[1]
  1210  		v.reset(OpARM64ANDshiftRA)
  1211  		v.AuxInt = c
  1212  		v.AddArg(x)
  1213  		v.AddArg(y)
  1214  		return true
  1215  	}
  1216  	return false
  1217  }
  1218  func rewriteValueARM64_OpARM64ANDconst(v *Value, config *Config) bool {
  1219  	b := v.Block
  1220  	_ = b
  1221  	// match: (ANDconst [0]  _)
  1222  	// cond:
  1223  	// result: (MOVDconst [0])
  1224  	for {
  1225  		if v.AuxInt != 0 {
  1226  			break
  1227  		}
  1228  		v.reset(OpARM64MOVDconst)
  1229  		v.AuxInt = 0
  1230  		return true
  1231  	}
  1232  	// match: (ANDconst [-1] x)
  1233  	// cond:
  1234  	// result: x
  1235  	for {
  1236  		if v.AuxInt != -1 {
  1237  			break
  1238  		}
  1239  		x := v.Args[0]
  1240  		v.reset(OpCopy)
  1241  		v.Type = x.Type
  1242  		v.AddArg(x)
  1243  		return true
  1244  	}
  1245  	// match: (ANDconst [c] (MOVDconst [d]))
  1246  	// cond:
  1247  	// result: (MOVDconst [c&d])
  1248  	for {
  1249  		c := v.AuxInt
  1250  		v_0 := v.Args[0]
  1251  		if v_0.Op != OpARM64MOVDconst {
  1252  			break
  1253  		}
  1254  		d := v_0.AuxInt
  1255  		v.reset(OpARM64MOVDconst)
  1256  		v.AuxInt = c & d
  1257  		return true
  1258  	}
  1259  	// match: (ANDconst [c] (ANDconst [d] x))
  1260  	// cond:
  1261  	// result: (ANDconst [c&d] x)
  1262  	for {
  1263  		c := v.AuxInt
  1264  		v_0 := v.Args[0]
  1265  		if v_0.Op != OpARM64ANDconst {
  1266  			break
  1267  		}
  1268  		d := v_0.AuxInt
  1269  		x := v_0.Args[0]
  1270  		v.reset(OpARM64ANDconst)
  1271  		v.AuxInt = c & d
  1272  		v.AddArg(x)
  1273  		return true
  1274  	}
  1275  	return false
  1276  }
  1277  func rewriteValueARM64_OpARM64ANDshiftLL(v *Value, config *Config) bool {
  1278  	b := v.Block
  1279  	_ = b
  1280  	// match: (ANDshiftLL (MOVDconst [c]) x [d])
  1281  	// cond:
  1282  	// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
  1283  	for {
  1284  		d := v.AuxInt
  1285  		v_0 := v.Args[0]
  1286  		if v_0.Op != OpARM64MOVDconst {
  1287  			break
  1288  		}
  1289  		c := v_0.AuxInt
  1290  		x := v.Args[1]
  1291  		v.reset(OpARM64ANDconst)
  1292  		v.AuxInt = c
  1293  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
  1294  		v0.AuxInt = d
  1295  		v0.AddArg(x)
  1296  		v.AddArg(v0)
  1297  		return true
  1298  	}
  1299  	// match: (ANDshiftLL x (MOVDconst [c]) [d])
  1300  	// cond:
  1301  	// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
  1302  	for {
  1303  		d := v.AuxInt
  1304  		x := v.Args[0]
  1305  		v_1 := v.Args[1]
  1306  		if v_1.Op != OpARM64MOVDconst {
  1307  			break
  1308  		}
  1309  		c := v_1.AuxInt
  1310  		v.reset(OpARM64ANDconst)
  1311  		v.AuxInt = int64(uint64(c) << uint64(d))
  1312  		v.AddArg(x)
  1313  		return true
  1314  	}
  1315  	// match: (ANDshiftLL x y:(SLLconst x [c]) [d])
  1316  	// cond: c==d
  1317  	// result: y
  1318  	for {
  1319  		d := v.AuxInt
  1320  		x := v.Args[0]
  1321  		y := v.Args[1]
  1322  		if y.Op != OpARM64SLLconst {
  1323  			break
  1324  		}
  1325  		c := y.AuxInt
  1326  		if x != y.Args[0] {
  1327  			break
  1328  		}
  1329  		if !(c == d) {
  1330  			break
  1331  		}
  1332  		v.reset(OpCopy)
  1333  		v.Type = y.Type
  1334  		v.AddArg(y)
  1335  		return true
  1336  	}
  1337  	return false
  1338  }
  1339  func rewriteValueARM64_OpARM64ANDshiftRA(v *Value, config *Config) bool {
  1340  	b := v.Block
  1341  	_ = b
  1342  	// match: (ANDshiftRA (MOVDconst [c]) x [d])
  1343  	// cond:
  1344  	// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
  1345  	for {
  1346  		d := v.AuxInt
  1347  		v_0 := v.Args[0]
  1348  		if v_0.Op != OpARM64MOVDconst {
  1349  			break
  1350  		}
  1351  		c := v_0.AuxInt
  1352  		x := v.Args[1]
  1353  		v.reset(OpARM64ANDconst)
  1354  		v.AuxInt = c
  1355  		v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
  1356  		v0.AuxInt = d
  1357  		v0.AddArg(x)
  1358  		v.AddArg(v0)
  1359  		return true
  1360  	}
  1361  	// match: (ANDshiftRA x (MOVDconst [c]) [d])
  1362  	// cond:
  1363  	// result: (ANDconst x [int64(int64(c)>>uint64(d))])
  1364  	for {
  1365  		d := v.AuxInt
  1366  		x := v.Args[0]
  1367  		v_1 := v.Args[1]
  1368  		if v_1.Op != OpARM64MOVDconst {
  1369  			break
  1370  		}
  1371  		c := v_1.AuxInt
  1372  		v.reset(OpARM64ANDconst)
  1373  		v.AuxInt = int64(int64(c) >> uint64(d))
  1374  		v.AddArg(x)
  1375  		return true
  1376  	}
  1377  	// match: (ANDshiftRA x y:(SRAconst x [c]) [d])
  1378  	// cond: c==d
  1379  	// result: y
  1380  	for {
  1381  		d := v.AuxInt
  1382  		x := v.Args[0]
  1383  		y := v.Args[1]
  1384  		if y.Op != OpARM64SRAconst {
  1385  			break
  1386  		}
  1387  		c := y.AuxInt
  1388  		if x != y.Args[0] {
  1389  			break
  1390  		}
  1391  		if !(c == d) {
  1392  			break
  1393  		}
  1394  		v.reset(OpCopy)
  1395  		v.Type = y.Type
  1396  		v.AddArg(y)
  1397  		return true
  1398  	}
  1399  	return false
  1400  }
  1401  func rewriteValueARM64_OpARM64ANDshiftRL(v *Value, config *Config) bool {
  1402  	b := v.Block
  1403  	_ = b
  1404  	// match: (ANDshiftRL (MOVDconst [c]) x [d])
  1405  	// cond:
  1406  	// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
  1407  	for {
  1408  		d := v.AuxInt
  1409  		v_0 := v.Args[0]
  1410  		if v_0.Op != OpARM64MOVDconst {
  1411  			break
  1412  		}
  1413  		c := v_0.AuxInt
  1414  		x := v.Args[1]
  1415  		v.reset(OpARM64ANDconst)
  1416  		v.AuxInt = c
  1417  		v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
  1418  		v0.AuxInt = d
  1419  		v0.AddArg(x)
  1420  		v.AddArg(v0)
  1421  		return true
  1422  	}
  1423  	// match: (ANDshiftRL x (MOVDconst [c]) [d])
  1424  	// cond:
  1425  	// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
  1426  	for {
  1427  		d := v.AuxInt
  1428  		x := v.Args[0]
  1429  		v_1 := v.Args[1]
  1430  		if v_1.Op != OpARM64MOVDconst {
  1431  			break
  1432  		}
  1433  		c := v_1.AuxInt
  1434  		v.reset(OpARM64ANDconst)
  1435  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1436  		v.AddArg(x)
  1437  		return true
  1438  	}
  1439  	// match: (ANDshiftRL x y:(SRLconst x [c]) [d])
  1440  	// cond: c==d
  1441  	// result: y
  1442  	for {
  1443  		d := v.AuxInt
  1444  		x := v.Args[0]
  1445  		y := v.Args[1]
  1446  		if y.Op != OpARM64SRLconst {
  1447  			break
  1448  		}
  1449  		c := y.AuxInt
  1450  		if x != y.Args[0] {
  1451  			break
  1452  		}
  1453  		if !(c == d) {
  1454  			break
  1455  		}
  1456  		v.reset(OpCopy)
  1457  		v.Type = y.Type
  1458  		v.AddArg(y)
  1459  		return true
  1460  	}
  1461  	return false
  1462  }
  1463  func rewriteValueARM64_OpARM64BIC(v *Value, config *Config) bool {
  1464  	b := v.Block
  1465  	_ = b
  1466  	// match: (BIC x (MOVDconst [c]))
  1467  	// cond:
  1468  	// result: (BICconst [c] x)
  1469  	for {
  1470  		x := v.Args[0]
  1471  		v_1 := v.Args[1]
  1472  		if v_1.Op != OpARM64MOVDconst {
  1473  			break
  1474  		}
  1475  		c := v_1.AuxInt
  1476  		v.reset(OpARM64BICconst)
  1477  		v.AuxInt = c
  1478  		v.AddArg(x)
  1479  		return true
  1480  	}
  1481  	// match: (BIC x x)
  1482  	// cond:
  1483  	// result: (MOVDconst [0])
  1484  	for {
  1485  		x := v.Args[0]
  1486  		if x != v.Args[1] {
  1487  			break
  1488  		}
  1489  		v.reset(OpARM64MOVDconst)
  1490  		v.AuxInt = 0
  1491  		return true
  1492  	}
  1493  	// match: (BIC x (SLLconst [c] y))
  1494  	// cond:
  1495  	// result: (BICshiftLL x y [c])
  1496  	for {
  1497  		x := v.Args[0]
  1498  		v_1 := v.Args[1]
  1499  		if v_1.Op != OpARM64SLLconst {
  1500  			break
  1501  		}
  1502  		c := v_1.AuxInt
  1503  		y := v_1.Args[0]
  1504  		v.reset(OpARM64BICshiftLL)
  1505  		v.AuxInt = c
  1506  		v.AddArg(x)
  1507  		v.AddArg(y)
  1508  		return true
  1509  	}
  1510  	// match: (BIC x (SRLconst [c] y))
  1511  	// cond:
  1512  	// result: (BICshiftRL x y [c])
  1513  	for {
  1514  		x := v.Args[0]
  1515  		v_1 := v.Args[1]
  1516  		if v_1.Op != OpARM64SRLconst {
  1517  			break
  1518  		}
  1519  		c := v_1.AuxInt
  1520  		y := v_1.Args[0]
  1521  		v.reset(OpARM64BICshiftRL)
  1522  		v.AuxInt = c
  1523  		v.AddArg(x)
  1524  		v.AddArg(y)
  1525  		return true
  1526  	}
  1527  	// match: (BIC x (SRAconst [c] y))
  1528  	// cond:
  1529  	// result: (BICshiftRA x y [c])
  1530  	for {
  1531  		x := v.Args[0]
  1532  		v_1 := v.Args[1]
  1533  		if v_1.Op != OpARM64SRAconst {
  1534  			break
  1535  		}
  1536  		c := v_1.AuxInt
  1537  		y := v_1.Args[0]
  1538  		v.reset(OpARM64BICshiftRA)
  1539  		v.AuxInt = c
  1540  		v.AddArg(x)
  1541  		v.AddArg(y)
  1542  		return true
  1543  	}
  1544  	return false
  1545  }
  1546  func rewriteValueARM64_OpARM64BICconst(v *Value, config *Config) bool {
  1547  	b := v.Block
  1548  	_ = b
  1549  	// match: (BICconst [0]  x)
  1550  	// cond:
  1551  	// result: x
  1552  	for {
  1553  		if v.AuxInt != 0 {
  1554  			break
  1555  		}
  1556  		x := v.Args[0]
  1557  		v.reset(OpCopy)
  1558  		v.Type = x.Type
  1559  		v.AddArg(x)
  1560  		return true
  1561  	}
  1562  	// match: (BICconst [-1] _)
  1563  	// cond:
  1564  	// result: (MOVDconst [0])
  1565  	for {
  1566  		if v.AuxInt != -1 {
  1567  			break
  1568  		}
  1569  		v.reset(OpARM64MOVDconst)
  1570  		v.AuxInt = 0
  1571  		return true
  1572  	}
  1573  	// match: (BICconst [c] (MOVDconst [d]))
  1574  	// cond:
  1575  	// result: (MOVDconst [d&^c])
  1576  	for {
  1577  		c := v.AuxInt
  1578  		v_0 := v.Args[0]
  1579  		if v_0.Op != OpARM64MOVDconst {
  1580  			break
  1581  		}
  1582  		d := v_0.AuxInt
  1583  		v.reset(OpARM64MOVDconst)
  1584  		v.AuxInt = d &^ c
  1585  		return true
  1586  	}
  1587  	return false
  1588  }
  1589  func rewriteValueARM64_OpARM64BICshiftLL(v *Value, config *Config) bool {
  1590  	b := v.Block
  1591  	_ = b
  1592  	// match: (BICshiftLL x (MOVDconst [c]) [d])
  1593  	// cond:
  1594  	// result: (BICconst x [int64(uint64(c)<<uint64(d))])
  1595  	for {
  1596  		d := v.AuxInt
  1597  		x := v.Args[0]
  1598  		v_1 := v.Args[1]
  1599  		if v_1.Op != OpARM64MOVDconst {
  1600  			break
  1601  		}
  1602  		c := v_1.AuxInt
  1603  		v.reset(OpARM64BICconst)
  1604  		v.AuxInt = int64(uint64(c) << uint64(d))
  1605  		v.AddArg(x)
  1606  		return true
  1607  	}
  1608  	// match: (BICshiftLL x (SLLconst x [c]) [d])
  1609  	// cond: c==d
  1610  	// result: (MOVDconst [0])
  1611  	for {
  1612  		d := v.AuxInt
  1613  		x := v.Args[0]
  1614  		v_1 := v.Args[1]
  1615  		if v_1.Op != OpARM64SLLconst {
  1616  			break
  1617  		}
  1618  		c := v_1.AuxInt
  1619  		if x != v_1.Args[0] {
  1620  			break
  1621  		}
  1622  		if !(c == d) {
  1623  			break
  1624  		}
  1625  		v.reset(OpARM64MOVDconst)
  1626  		v.AuxInt = 0
  1627  		return true
  1628  	}
  1629  	return false
  1630  }
  1631  func rewriteValueARM64_OpARM64BICshiftRA(v *Value, config *Config) bool {
  1632  	b := v.Block
  1633  	_ = b
  1634  	// match: (BICshiftRA x (MOVDconst [c]) [d])
  1635  	// cond:
  1636  	// result: (BICconst x [int64(int64(c)>>uint64(d))])
  1637  	for {
  1638  		d := v.AuxInt
  1639  		x := v.Args[0]
  1640  		v_1 := v.Args[1]
  1641  		if v_1.Op != OpARM64MOVDconst {
  1642  			break
  1643  		}
  1644  		c := v_1.AuxInt
  1645  		v.reset(OpARM64BICconst)
  1646  		v.AuxInt = int64(int64(c) >> uint64(d))
  1647  		v.AddArg(x)
  1648  		return true
  1649  	}
  1650  	// match: (BICshiftRA x (SRAconst x [c]) [d])
  1651  	// cond: c==d
  1652  	// result: (MOVDconst [0])
  1653  	for {
  1654  		d := v.AuxInt
  1655  		x := v.Args[0]
  1656  		v_1 := v.Args[1]
  1657  		if v_1.Op != OpARM64SRAconst {
  1658  			break
  1659  		}
  1660  		c := v_1.AuxInt
  1661  		if x != v_1.Args[0] {
  1662  			break
  1663  		}
  1664  		if !(c == d) {
  1665  			break
  1666  		}
  1667  		v.reset(OpARM64MOVDconst)
  1668  		v.AuxInt = 0
  1669  		return true
  1670  	}
  1671  	return false
  1672  }
  1673  func rewriteValueARM64_OpARM64BICshiftRL(v *Value, config *Config) bool {
  1674  	b := v.Block
  1675  	_ = b
  1676  	// match: (BICshiftRL x (MOVDconst [c]) [d])
  1677  	// cond:
  1678  	// result: (BICconst x [int64(uint64(c)>>uint64(d))])
  1679  	for {
  1680  		d := v.AuxInt
  1681  		x := v.Args[0]
  1682  		v_1 := v.Args[1]
  1683  		if v_1.Op != OpARM64MOVDconst {
  1684  			break
  1685  		}
  1686  		c := v_1.AuxInt
  1687  		v.reset(OpARM64BICconst)
  1688  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1689  		v.AddArg(x)
  1690  		return true
  1691  	}
  1692  	// match: (BICshiftRL x (SRLconst x [c]) [d])
  1693  	// cond: c==d
  1694  	// result: (MOVDconst [0])
  1695  	for {
  1696  		d := v.AuxInt
  1697  		x := v.Args[0]
  1698  		v_1 := v.Args[1]
  1699  		if v_1.Op != OpARM64SRLconst {
  1700  			break
  1701  		}
  1702  		c := v_1.AuxInt
  1703  		if x != v_1.Args[0] {
  1704  			break
  1705  		}
  1706  		if !(c == d) {
  1707  			break
  1708  		}
  1709  		v.reset(OpARM64MOVDconst)
  1710  		v.AuxInt = 0
  1711  		return true
  1712  	}
  1713  	return false
  1714  }
  1715  func rewriteValueARM64_OpARM64CMP(v *Value, config *Config) bool {
  1716  	b := v.Block
  1717  	_ = b
  1718  	// match: (CMP x (MOVDconst [c]))
  1719  	// cond:
  1720  	// result: (CMPconst [c] x)
  1721  	for {
  1722  		x := v.Args[0]
  1723  		v_1 := v.Args[1]
  1724  		if v_1.Op != OpARM64MOVDconst {
  1725  			break
  1726  		}
  1727  		c := v_1.AuxInt
  1728  		v.reset(OpARM64CMPconst)
  1729  		v.AuxInt = c
  1730  		v.AddArg(x)
  1731  		return true
  1732  	}
  1733  	// match: (CMP (MOVDconst [c]) x)
  1734  	// cond:
  1735  	// result: (InvertFlags (CMPconst [c] x))
  1736  	for {
  1737  		v_0 := v.Args[0]
  1738  		if v_0.Op != OpARM64MOVDconst {
  1739  			break
  1740  		}
  1741  		c := v_0.AuxInt
  1742  		x := v.Args[1]
  1743  		v.reset(OpARM64InvertFlags)
  1744  		v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
  1745  		v0.AuxInt = c
  1746  		v0.AddArg(x)
  1747  		v.AddArg(v0)
  1748  		return true
  1749  	}
  1750  	// match: (CMP x (SLLconst [c] y))
  1751  	// cond:
  1752  	// result: (CMPshiftLL x y [c])
  1753  	for {
  1754  		x := v.Args[0]
  1755  		v_1 := v.Args[1]
  1756  		if v_1.Op != OpARM64SLLconst {
  1757  			break
  1758  		}
  1759  		c := v_1.AuxInt
  1760  		y := v_1.Args[0]
  1761  		v.reset(OpARM64CMPshiftLL)
  1762  		v.AuxInt = c
  1763  		v.AddArg(x)
  1764  		v.AddArg(y)
  1765  		return true
  1766  	}
  1767  	// match: (CMP (SLLconst [c] y) x)
  1768  	// cond:
  1769  	// result: (InvertFlags (CMPshiftLL x y [c]))
  1770  	for {
  1771  		v_0 := v.Args[0]
  1772  		if v_0.Op != OpARM64SLLconst {
  1773  			break
  1774  		}
  1775  		c := v_0.AuxInt
  1776  		y := v_0.Args[0]
  1777  		x := v.Args[1]
  1778  		v.reset(OpARM64InvertFlags)
  1779  		v0 := b.NewValue0(v.Line, OpARM64CMPshiftLL, TypeFlags)
  1780  		v0.AuxInt = c
  1781  		v0.AddArg(x)
  1782  		v0.AddArg(y)
  1783  		v.AddArg(v0)
  1784  		return true
  1785  	}
  1786  	// match: (CMP x (SRLconst [c] y))
  1787  	// cond:
  1788  	// result: (CMPshiftRL x y [c])
  1789  	for {
  1790  		x := v.Args[0]
  1791  		v_1 := v.Args[1]
  1792  		if v_1.Op != OpARM64SRLconst {
  1793  			break
  1794  		}
  1795  		c := v_1.AuxInt
  1796  		y := v_1.Args[0]
  1797  		v.reset(OpARM64CMPshiftRL)
  1798  		v.AuxInt = c
  1799  		v.AddArg(x)
  1800  		v.AddArg(y)
  1801  		return true
  1802  	}
  1803  	// match: (CMP (SRLconst [c] y) x)
  1804  	// cond:
  1805  	// result: (InvertFlags (CMPshiftRL x y [c]))
  1806  	for {
  1807  		v_0 := v.Args[0]
  1808  		if v_0.Op != OpARM64SRLconst {
  1809  			break
  1810  		}
  1811  		c := v_0.AuxInt
  1812  		y := v_0.Args[0]
  1813  		x := v.Args[1]
  1814  		v.reset(OpARM64InvertFlags)
  1815  		v0 := b.NewValue0(v.Line, OpARM64CMPshiftRL, TypeFlags)
  1816  		v0.AuxInt = c
  1817  		v0.AddArg(x)
  1818  		v0.AddArg(y)
  1819  		v.AddArg(v0)
  1820  		return true
  1821  	}
  1822  	// match: (CMP x (SRAconst [c] y))
  1823  	// cond:
  1824  	// result: (CMPshiftRA x y [c])
  1825  	for {
  1826  		x := v.Args[0]
  1827  		v_1 := v.Args[1]
  1828  		if v_1.Op != OpARM64SRAconst {
  1829  			break
  1830  		}
  1831  		c := v_1.AuxInt
  1832  		y := v_1.Args[0]
  1833  		v.reset(OpARM64CMPshiftRA)
  1834  		v.AuxInt = c
  1835  		v.AddArg(x)
  1836  		v.AddArg(y)
  1837  		return true
  1838  	}
  1839  	// match: (CMP (SRAconst [c] y) x)
  1840  	// cond:
  1841  	// result: (InvertFlags (CMPshiftRA x y [c]))
  1842  	for {
  1843  		v_0 := v.Args[0]
  1844  		if v_0.Op != OpARM64SRAconst {
  1845  			break
  1846  		}
  1847  		c := v_0.AuxInt
  1848  		y := v_0.Args[0]
  1849  		x := v.Args[1]
  1850  		v.reset(OpARM64InvertFlags)
  1851  		v0 := b.NewValue0(v.Line, OpARM64CMPshiftRA, TypeFlags)
  1852  		v0.AuxInt = c
  1853  		v0.AddArg(x)
  1854  		v0.AddArg(y)
  1855  		v.AddArg(v0)
  1856  		return true
  1857  	}
  1858  	return false
  1859  }
  1860  func rewriteValueARM64_OpARM64CMPW(v *Value, config *Config) bool {
  1861  	b := v.Block
  1862  	_ = b
  1863  	// match: (CMPW x (MOVDconst [c]))
  1864  	// cond:
  1865  	// result: (CMPWconst [int64(int32(c))] x)
  1866  	for {
  1867  		x := v.Args[0]
  1868  		v_1 := v.Args[1]
  1869  		if v_1.Op != OpARM64MOVDconst {
  1870  			break
  1871  		}
  1872  		c := v_1.AuxInt
  1873  		v.reset(OpARM64CMPWconst)
  1874  		v.AuxInt = int64(int32(c))
  1875  		v.AddArg(x)
  1876  		return true
  1877  	}
  1878  	// match: (CMPW (MOVDconst [c]) x)
  1879  	// cond:
  1880  	// result: (InvertFlags (CMPWconst [int64(int32(c))] x))
  1881  	for {
  1882  		v_0 := v.Args[0]
  1883  		if v_0.Op != OpARM64MOVDconst {
  1884  			break
  1885  		}
  1886  		c := v_0.AuxInt
  1887  		x := v.Args[1]
  1888  		v.reset(OpARM64InvertFlags)
  1889  		v0 := b.NewValue0(v.Line, OpARM64CMPWconst, TypeFlags)
  1890  		v0.AuxInt = int64(int32(c))
  1891  		v0.AddArg(x)
  1892  		v.AddArg(v0)
  1893  		return true
  1894  	}
  1895  	return false
  1896  }
  1897  func rewriteValueARM64_OpARM64CMPWconst(v *Value, config *Config) bool {
  1898  	b := v.Block
  1899  	_ = b
  1900  	// match: (CMPWconst (MOVDconst [x]) [y])
  1901  	// cond: int32(x)==int32(y)
  1902  	// result: (FlagEQ)
  1903  	for {
  1904  		y := v.AuxInt
  1905  		v_0 := v.Args[0]
  1906  		if v_0.Op != OpARM64MOVDconst {
  1907  			break
  1908  		}
  1909  		x := v_0.AuxInt
  1910  		if !(int32(x) == int32(y)) {
  1911  			break
  1912  		}
  1913  		v.reset(OpARM64FlagEQ)
  1914  		return true
  1915  	}
  1916  	// match: (CMPWconst (MOVDconst [x]) [y])
  1917  	// cond: int32(x)<int32(y) && uint32(x)<uint32(y)
  1918  	// result: (FlagLT_ULT)
  1919  	for {
  1920  		y := v.AuxInt
  1921  		v_0 := v.Args[0]
  1922  		if v_0.Op != OpARM64MOVDconst {
  1923  			break
  1924  		}
  1925  		x := v_0.AuxInt
  1926  		if !(int32(x) < int32(y) && uint32(x) < uint32(y)) {
  1927  			break
  1928  		}
  1929  		v.reset(OpARM64FlagLT_ULT)
  1930  		return true
  1931  	}
  1932  	// match: (CMPWconst (MOVDconst [x]) [y])
  1933  	// cond: int32(x)<int32(y) && uint32(x)>uint32(y)
  1934  	// result: (FlagLT_UGT)
  1935  	for {
  1936  		y := v.AuxInt
  1937  		v_0 := v.Args[0]
  1938  		if v_0.Op != OpARM64MOVDconst {
  1939  			break
  1940  		}
  1941  		x := v_0.AuxInt
  1942  		if !(int32(x) < int32(y) && uint32(x) > uint32(y)) {
  1943  			break
  1944  		}
  1945  		v.reset(OpARM64FlagLT_UGT)
  1946  		return true
  1947  	}
  1948  	// match: (CMPWconst (MOVDconst [x]) [y])
  1949  	// cond: int32(x)>int32(y) && uint32(x)<uint32(y)
  1950  	// result: (FlagGT_ULT)
  1951  	for {
  1952  		y := v.AuxInt
  1953  		v_0 := v.Args[0]
  1954  		if v_0.Op != OpARM64MOVDconst {
  1955  			break
  1956  		}
  1957  		x := v_0.AuxInt
  1958  		if !(int32(x) > int32(y) && uint32(x) < uint32(y)) {
  1959  			break
  1960  		}
  1961  		v.reset(OpARM64FlagGT_ULT)
  1962  		return true
  1963  	}
  1964  	// match: (CMPWconst (MOVDconst [x]) [y])
  1965  	// cond: int32(x)>int32(y) && uint32(x)>uint32(y)
  1966  	// result: (FlagGT_UGT)
  1967  	for {
  1968  		y := v.AuxInt
  1969  		v_0 := v.Args[0]
  1970  		if v_0.Op != OpARM64MOVDconst {
  1971  			break
  1972  		}
  1973  		x := v_0.AuxInt
  1974  		if !(int32(x) > int32(y) && uint32(x) > uint32(y)) {
  1975  			break
  1976  		}
  1977  		v.reset(OpARM64FlagGT_UGT)
  1978  		return true
  1979  	}
  1980  	// match: (CMPWconst (MOVBUreg _) [c])
  1981  	// cond: 0xff < int32(c)
  1982  	// result: (FlagLT_ULT)
  1983  	for {
  1984  		c := v.AuxInt
  1985  		v_0 := v.Args[0]
  1986  		if v_0.Op != OpARM64MOVBUreg {
  1987  			break
  1988  		}
  1989  		if !(0xff < int32(c)) {
  1990  			break
  1991  		}
  1992  		v.reset(OpARM64FlagLT_ULT)
  1993  		return true
  1994  	}
  1995  	// match: (CMPWconst (MOVHUreg _) [c])
  1996  	// cond: 0xffff < int32(c)
  1997  	// result: (FlagLT_ULT)
  1998  	for {
  1999  		c := v.AuxInt
  2000  		v_0 := v.Args[0]
  2001  		if v_0.Op != OpARM64MOVHUreg {
  2002  			break
  2003  		}
  2004  		if !(0xffff < int32(c)) {
  2005  			break
  2006  		}
  2007  		v.reset(OpARM64FlagLT_ULT)
  2008  		return true
  2009  	}
  2010  	return false
  2011  }
  2012  func rewriteValueARM64_OpARM64CMPconst(v *Value, config *Config) bool {
  2013  	b := v.Block
  2014  	_ = b
  2015  	// match: (CMPconst  (MOVDconst [x]) [y])
  2016  	// cond: x==y
  2017  	// result: (FlagEQ)
  2018  	for {
  2019  		y := v.AuxInt
  2020  		v_0 := v.Args[0]
  2021  		if v_0.Op != OpARM64MOVDconst {
  2022  			break
  2023  		}
  2024  		x := v_0.AuxInt
  2025  		if !(x == y) {
  2026  			break
  2027  		}
  2028  		v.reset(OpARM64FlagEQ)
  2029  		return true
  2030  	}
  2031  	// match: (CMPconst  (MOVDconst [x]) [y])
  2032  	// cond: int64(x)<int64(y) && uint64(x)<uint64(y)
  2033  	// result: (FlagLT_ULT)
  2034  	for {
  2035  		y := v.AuxInt
  2036  		v_0 := v.Args[0]
  2037  		if v_0.Op != OpARM64MOVDconst {
  2038  			break
  2039  		}
  2040  		x := v_0.AuxInt
  2041  		if !(int64(x) < int64(y) && uint64(x) < uint64(y)) {
  2042  			break
  2043  		}
  2044  		v.reset(OpARM64FlagLT_ULT)
  2045  		return true
  2046  	}
  2047  	// match: (CMPconst  (MOVDconst [x]) [y])
  2048  	// cond: int64(x)<int64(y) && uint64(x)>uint64(y)
  2049  	// result: (FlagLT_UGT)
  2050  	for {
  2051  		y := v.AuxInt
  2052  		v_0 := v.Args[0]
  2053  		if v_0.Op != OpARM64MOVDconst {
  2054  			break
  2055  		}
  2056  		x := v_0.AuxInt
  2057  		if !(int64(x) < int64(y) && uint64(x) > uint64(y)) {
  2058  			break
  2059  		}
  2060  		v.reset(OpARM64FlagLT_UGT)
  2061  		return true
  2062  	}
  2063  	// match: (CMPconst  (MOVDconst [x]) [y])
  2064  	// cond: int64(x)>int64(y) && uint64(x)<uint64(y)
  2065  	// result: (FlagGT_ULT)
  2066  	for {
  2067  		y := v.AuxInt
  2068  		v_0 := v.Args[0]
  2069  		if v_0.Op != OpARM64MOVDconst {
  2070  			break
  2071  		}
  2072  		x := v_0.AuxInt
  2073  		if !(int64(x) > int64(y) && uint64(x) < uint64(y)) {
  2074  			break
  2075  		}
  2076  		v.reset(OpARM64FlagGT_ULT)
  2077  		return true
  2078  	}
  2079  	// match: (CMPconst  (MOVDconst [x]) [y])
  2080  	// cond: int64(x)>int64(y) && uint64(x)>uint64(y)
  2081  	// result: (FlagGT_UGT)
  2082  	for {
  2083  		y := v.AuxInt
  2084  		v_0 := v.Args[0]
  2085  		if v_0.Op != OpARM64MOVDconst {
  2086  			break
  2087  		}
  2088  		x := v_0.AuxInt
  2089  		if !(int64(x) > int64(y) && uint64(x) > uint64(y)) {
  2090  			break
  2091  		}
  2092  		v.reset(OpARM64FlagGT_UGT)
  2093  		return true
  2094  	}
  2095  	// match: (CMPconst (MOVBUreg _) [c])
  2096  	// cond: 0xff < c
  2097  	// result: (FlagLT_ULT)
  2098  	for {
  2099  		c := v.AuxInt
  2100  		v_0 := v.Args[0]
  2101  		if v_0.Op != OpARM64MOVBUreg {
  2102  			break
  2103  		}
  2104  		if !(0xff < c) {
  2105  			break
  2106  		}
  2107  		v.reset(OpARM64FlagLT_ULT)
  2108  		return true
  2109  	}
  2110  	// match: (CMPconst (MOVHUreg _) [c])
  2111  	// cond: 0xffff < c
  2112  	// result: (FlagLT_ULT)
  2113  	for {
  2114  		c := v.AuxInt
  2115  		v_0 := v.Args[0]
  2116  		if v_0.Op != OpARM64MOVHUreg {
  2117  			break
  2118  		}
  2119  		if !(0xffff < c) {
  2120  			break
  2121  		}
  2122  		v.reset(OpARM64FlagLT_ULT)
  2123  		return true
  2124  	}
  2125  	// match: (CMPconst (MOVWUreg _) [c])
  2126  	// cond: 0xffffffff < c
  2127  	// result: (FlagLT_ULT)
  2128  	for {
  2129  		c := v.AuxInt
  2130  		v_0 := v.Args[0]
  2131  		if v_0.Op != OpARM64MOVWUreg {
  2132  			break
  2133  		}
  2134  		if !(0xffffffff < c) {
  2135  			break
  2136  		}
  2137  		v.reset(OpARM64FlagLT_ULT)
  2138  		return true
  2139  	}
  2140  	// match: (CMPconst (ANDconst _ [m]) [n])
  2141  	// cond: 0 <= m && m < n
  2142  	// result: (FlagLT_ULT)
  2143  	for {
  2144  		n := v.AuxInt
  2145  		v_0 := v.Args[0]
  2146  		if v_0.Op != OpARM64ANDconst {
  2147  			break
  2148  		}
  2149  		m := v_0.AuxInt
  2150  		if !(0 <= m && m < n) {
  2151  			break
  2152  		}
  2153  		v.reset(OpARM64FlagLT_ULT)
  2154  		return true
  2155  	}
  2156  	// match: (CMPconst (SRLconst _ [c]) [n])
  2157  	// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
  2158  	// result: (FlagLT_ULT)
  2159  	for {
  2160  		n := v.AuxInt
  2161  		v_0 := v.Args[0]
  2162  		if v_0.Op != OpARM64SRLconst {
  2163  			break
  2164  		}
  2165  		c := v_0.AuxInt
  2166  		if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
  2167  			break
  2168  		}
  2169  		v.reset(OpARM64FlagLT_ULT)
  2170  		return true
  2171  	}
  2172  	return false
  2173  }
  2174  func rewriteValueARM64_OpARM64CMPshiftLL(v *Value, config *Config) bool {
  2175  	b := v.Block
  2176  	_ = b
  2177  	// match: (CMPshiftLL (MOVDconst [c]) x [d])
  2178  	// cond:
  2179  	// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
  2180  	for {
  2181  		d := v.AuxInt
  2182  		v_0 := v.Args[0]
  2183  		if v_0.Op != OpARM64MOVDconst {
  2184  			break
  2185  		}
  2186  		c := v_0.AuxInt
  2187  		x := v.Args[1]
  2188  		v.reset(OpARM64InvertFlags)
  2189  		v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
  2190  		v0.AuxInt = c
  2191  		v1 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
  2192  		v1.AuxInt = d
  2193  		v1.AddArg(x)
  2194  		v0.AddArg(v1)
  2195  		v.AddArg(v0)
  2196  		return true
  2197  	}
  2198  	// match: (CMPshiftLL x (MOVDconst [c]) [d])
  2199  	// cond:
  2200  	// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
  2201  	for {
  2202  		d := v.AuxInt
  2203  		x := v.Args[0]
  2204  		v_1 := v.Args[1]
  2205  		if v_1.Op != OpARM64MOVDconst {
  2206  			break
  2207  		}
  2208  		c := v_1.AuxInt
  2209  		v.reset(OpARM64CMPconst)
  2210  		v.AuxInt = int64(uint64(c) << uint64(d))
  2211  		v.AddArg(x)
  2212  		return true
  2213  	}
  2214  	return false
  2215  }
  2216  func rewriteValueARM64_OpARM64CMPshiftRA(v *Value, config *Config) bool {
  2217  	b := v.Block
  2218  	_ = b
  2219  	// match: (CMPshiftRA (MOVDconst [c]) x [d])
  2220  	// cond:
  2221  	// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
  2222  	for {
  2223  		d := v.AuxInt
  2224  		v_0 := v.Args[0]
  2225  		if v_0.Op != OpARM64MOVDconst {
  2226  			break
  2227  		}
  2228  		c := v_0.AuxInt
  2229  		x := v.Args[1]
  2230  		v.reset(OpARM64InvertFlags)
  2231  		v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
  2232  		v0.AuxInt = c
  2233  		v1 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
  2234  		v1.AuxInt = d
  2235  		v1.AddArg(x)
  2236  		v0.AddArg(v1)
  2237  		v.AddArg(v0)
  2238  		return true
  2239  	}
  2240  	// match: (CMPshiftRA x (MOVDconst [c]) [d])
  2241  	// cond:
  2242  	// result: (CMPconst x [int64(int64(c)>>uint64(d))])
  2243  	for {
  2244  		d := v.AuxInt
  2245  		x := v.Args[0]
  2246  		v_1 := v.Args[1]
  2247  		if v_1.Op != OpARM64MOVDconst {
  2248  			break
  2249  		}
  2250  		c := v_1.AuxInt
  2251  		v.reset(OpARM64CMPconst)
  2252  		v.AuxInt = int64(int64(c) >> uint64(d))
  2253  		v.AddArg(x)
  2254  		return true
  2255  	}
  2256  	return false
  2257  }
  2258  func rewriteValueARM64_OpARM64CMPshiftRL(v *Value, config *Config) bool {
  2259  	b := v.Block
  2260  	_ = b
  2261  	// match: (CMPshiftRL (MOVDconst [c]) x [d])
  2262  	// cond:
  2263  	// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
  2264  	for {
  2265  		d := v.AuxInt
  2266  		v_0 := v.Args[0]
  2267  		if v_0.Op != OpARM64MOVDconst {
  2268  			break
  2269  		}
  2270  		c := v_0.AuxInt
  2271  		x := v.Args[1]
  2272  		v.reset(OpARM64InvertFlags)
  2273  		v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
  2274  		v0.AuxInt = c
  2275  		v1 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
  2276  		v1.AuxInt = d
  2277  		v1.AddArg(x)
  2278  		v0.AddArg(v1)
  2279  		v.AddArg(v0)
  2280  		return true
  2281  	}
  2282  	// match: (CMPshiftRL x (MOVDconst [c]) [d])
  2283  	// cond:
  2284  	// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
  2285  	for {
  2286  		d := v.AuxInt
  2287  		x := v.Args[0]
  2288  		v_1 := v.Args[1]
  2289  		if v_1.Op != OpARM64MOVDconst {
  2290  			break
  2291  		}
  2292  		c := v_1.AuxInt
  2293  		v.reset(OpARM64CMPconst)
  2294  		v.AuxInt = int64(uint64(c) >> uint64(d))
  2295  		v.AddArg(x)
  2296  		return true
  2297  	}
  2298  	return false
  2299  }
  2300  func rewriteValueARM64_OpARM64CSELULT(v *Value, config *Config) bool {
  2301  	b := v.Block
  2302  	_ = b
  2303  	// match: (CSELULT x (MOVDconst [0]) flag)
  2304  	// cond:
  2305  	// result: (CSELULT0 x flag)
  2306  	for {
  2307  		x := v.Args[0]
  2308  		v_1 := v.Args[1]
  2309  		if v_1.Op != OpARM64MOVDconst {
  2310  			break
  2311  		}
  2312  		if v_1.AuxInt != 0 {
  2313  			break
  2314  		}
  2315  		flag := v.Args[2]
  2316  		v.reset(OpARM64CSELULT0)
  2317  		v.AddArg(x)
  2318  		v.AddArg(flag)
  2319  		return true
  2320  	}
  2321  	// match: (CSELULT _ y (FlagEQ))
  2322  	// cond:
  2323  	// result: y
  2324  	for {
  2325  		y := v.Args[1]
  2326  		v_2 := v.Args[2]
  2327  		if v_2.Op != OpARM64FlagEQ {
  2328  			break
  2329  		}
  2330  		v.reset(OpCopy)
  2331  		v.Type = y.Type
  2332  		v.AddArg(y)
  2333  		return true
  2334  	}
  2335  	// match: (CSELULT x _ (FlagLT_ULT))
  2336  	// cond:
  2337  	// result: x
  2338  	for {
  2339  		x := v.Args[0]
  2340  		v_2 := v.Args[2]
  2341  		if v_2.Op != OpARM64FlagLT_ULT {
  2342  			break
  2343  		}
  2344  		v.reset(OpCopy)
  2345  		v.Type = x.Type
  2346  		v.AddArg(x)
  2347  		return true
  2348  	}
  2349  	// match: (CSELULT _ y (FlagLT_UGT))
  2350  	// cond:
  2351  	// result: y
  2352  	for {
  2353  		y := v.Args[1]
  2354  		v_2 := v.Args[2]
  2355  		if v_2.Op != OpARM64FlagLT_UGT {
  2356  			break
  2357  		}
  2358  		v.reset(OpCopy)
  2359  		v.Type = y.Type
  2360  		v.AddArg(y)
  2361  		return true
  2362  	}
  2363  	// match: (CSELULT x _ (FlagGT_ULT))
  2364  	// cond:
  2365  	// result: x
  2366  	for {
  2367  		x := v.Args[0]
  2368  		v_2 := v.Args[2]
  2369  		if v_2.Op != OpARM64FlagGT_ULT {
  2370  			break
  2371  		}
  2372  		v.reset(OpCopy)
  2373  		v.Type = x.Type
  2374  		v.AddArg(x)
  2375  		return true
  2376  	}
  2377  	// match: (CSELULT _ y (FlagGT_UGT))
  2378  	// cond:
  2379  	// result: y
  2380  	for {
  2381  		y := v.Args[1]
  2382  		v_2 := v.Args[2]
  2383  		if v_2.Op != OpARM64FlagGT_UGT {
  2384  			break
  2385  		}
  2386  		v.reset(OpCopy)
  2387  		v.Type = y.Type
  2388  		v.AddArg(y)
  2389  		return true
  2390  	}
  2391  	return false
  2392  }
  2393  func rewriteValueARM64_OpARM64CSELULT0(v *Value, config *Config) bool {
  2394  	b := v.Block
  2395  	_ = b
  2396  	// match: (CSELULT0 _ (FlagEQ))
  2397  	// cond:
  2398  	// result: (MOVDconst [0])
  2399  	for {
  2400  		v_1 := v.Args[1]
  2401  		if v_1.Op != OpARM64FlagEQ {
  2402  			break
  2403  		}
  2404  		v.reset(OpARM64MOVDconst)
  2405  		v.AuxInt = 0
  2406  		return true
  2407  	}
  2408  	// match: (CSELULT0 x (FlagLT_ULT))
  2409  	// cond:
  2410  	// result: x
  2411  	for {
  2412  		x := v.Args[0]
  2413  		v_1 := v.Args[1]
  2414  		if v_1.Op != OpARM64FlagLT_ULT {
  2415  			break
  2416  		}
  2417  		v.reset(OpCopy)
  2418  		v.Type = x.Type
  2419  		v.AddArg(x)
  2420  		return true
  2421  	}
  2422  	// match: (CSELULT0 _ (FlagLT_UGT))
  2423  	// cond:
  2424  	// result: (MOVDconst [0])
  2425  	for {
  2426  		v_1 := v.Args[1]
  2427  		if v_1.Op != OpARM64FlagLT_UGT {
  2428  			break
  2429  		}
  2430  		v.reset(OpARM64MOVDconst)
  2431  		v.AuxInt = 0
  2432  		return true
  2433  	}
  2434  	// match: (CSELULT0 x (FlagGT_ULT))
  2435  	// cond:
  2436  	// result: x
  2437  	for {
  2438  		x := v.Args[0]
  2439  		v_1 := v.Args[1]
  2440  		if v_1.Op != OpARM64FlagGT_ULT {
  2441  			break
  2442  		}
  2443  		v.reset(OpCopy)
  2444  		v.Type = x.Type
  2445  		v.AddArg(x)
  2446  		return true
  2447  	}
  2448  	// match: (CSELULT0 _ (FlagGT_UGT))
  2449  	// cond:
  2450  	// result: (MOVDconst [0])
  2451  	for {
  2452  		v_1 := v.Args[1]
  2453  		if v_1.Op != OpARM64FlagGT_UGT {
  2454  			break
  2455  		}
  2456  		v.reset(OpARM64MOVDconst)
  2457  		v.AuxInt = 0
  2458  		return true
  2459  	}
  2460  	return false
  2461  }
  2462  func rewriteValueARM64_OpARM64DIV(v *Value, config *Config) bool {
  2463  	b := v.Block
  2464  	_ = b
  2465  	// match: (DIV   (MOVDconst [c]) (MOVDconst [d]))
  2466  	// cond:
  2467  	// result: (MOVDconst [int64(c)/int64(d)])
  2468  	for {
  2469  		v_0 := v.Args[0]
  2470  		if v_0.Op != OpARM64MOVDconst {
  2471  			break
  2472  		}
  2473  		c := v_0.AuxInt
  2474  		v_1 := v.Args[1]
  2475  		if v_1.Op != OpARM64MOVDconst {
  2476  			break
  2477  		}
  2478  		d := v_1.AuxInt
  2479  		v.reset(OpARM64MOVDconst)
  2480  		v.AuxInt = int64(c) / int64(d)
  2481  		return true
  2482  	}
  2483  	return false
  2484  }
  2485  func rewriteValueARM64_OpARM64DIVW(v *Value, config *Config) bool {
  2486  	b := v.Block
  2487  	_ = b
  2488  	// match: (DIVW  (MOVDconst [c]) (MOVDconst [d]))
  2489  	// cond:
  2490  	// result: (MOVDconst [int64(int32(c)/int32(d))])
  2491  	for {
  2492  		v_0 := v.Args[0]
  2493  		if v_0.Op != OpARM64MOVDconst {
  2494  			break
  2495  		}
  2496  		c := v_0.AuxInt
  2497  		v_1 := v.Args[1]
  2498  		if v_1.Op != OpARM64MOVDconst {
  2499  			break
  2500  		}
  2501  		d := v_1.AuxInt
  2502  		v.reset(OpARM64MOVDconst)
  2503  		v.AuxInt = int64(int32(c) / int32(d))
  2504  		return true
  2505  	}
  2506  	return false
  2507  }
  2508  func rewriteValueARM64_OpARM64Equal(v *Value, config *Config) bool {
  2509  	b := v.Block
  2510  	_ = b
  2511  	// match: (Equal (FlagEQ))
  2512  	// cond:
  2513  	// result: (MOVDconst [1])
  2514  	for {
  2515  		v_0 := v.Args[0]
  2516  		if v_0.Op != OpARM64FlagEQ {
  2517  			break
  2518  		}
  2519  		v.reset(OpARM64MOVDconst)
  2520  		v.AuxInt = 1
  2521  		return true
  2522  	}
  2523  	// match: (Equal (FlagLT_ULT))
  2524  	// cond:
  2525  	// result: (MOVDconst [0])
  2526  	for {
  2527  		v_0 := v.Args[0]
  2528  		if v_0.Op != OpARM64FlagLT_ULT {
  2529  			break
  2530  		}
  2531  		v.reset(OpARM64MOVDconst)
  2532  		v.AuxInt = 0
  2533  		return true
  2534  	}
  2535  	// match: (Equal (FlagLT_UGT))
  2536  	// cond:
  2537  	// result: (MOVDconst [0])
  2538  	for {
  2539  		v_0 := v.Args[0]
  2540  		if v_0.Op != OpARM64FlagLT_UGT {
  2541  			break
  2542  		}
  2543  		v.reset(OpARM64MOVDconst)
  2544  		v.AuxInt = 0
  2545  		return true
  2546  	}
  2547  	// match: (Equal (FlagGT_ULT))
  2548  	// cond:
  2549  	// result: (MOVDconst [0])
  2550  	for {
  2551  		v_0 := v.Args[0]
  2552  		if v_0.Op != OpARM64FlagGT_ULT {
  2553  			break
  2554  		}
  2555  		v.reset(OpARM64MOVDconst)
  2556  		v.AuxInt = 0
  2557  		return true
  2558  	}
  2559  	// match: (Equal (FlagGT_UGT))
  2560  	// cond:
  2561  	// result: (MOVDconst [0])
  2562  	for {
  2563  		v_0 := v.Args[0]
  2564  		if v_0.Op != OpARM64FlagGT_UGT {
  2565  			break
  2566  		}
  2567  		v.reset(OpARM64MOVDconst)
  2568  		v.AuxInt = 0
  2569  		return true
  2570  	}
  2571  	// match: (Equal (InvertFlags x))
  2572  	// cond:
  2573  	// result: (Equal x)
  2574  	for {
  2575  		v_0 := v.Args[0]
  2576  		if v_0.Op != OpARM64InvertFlags {
  2577  			break
  2578  		}
  2579  		x := v_0.Args[0]
  2580  		v.reset(OpARM64Equal)
  2581  		v.AddArg(x)
  2582  		return true
  2583  	}
  2584  	return false
  2585  }
  2586  func rewriteValueARM64_OpARM64FMOVDload(v *Value, config *Config) bool {
  2587  	b := v.Block
  2588  	_ = b
  2589  	// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  2590  	// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  2591  	// result: (FMOVDload [off1+off2] {sym} ptr mem)
  2592  	for {
  2593  		off1 := v.AuxInt
  2594  		sym := v.Aux
  2595  		v_0 := v.Args[0]
  2596  		if v_0.Op != OpARM64ADDconst {
  2597  			break
  2598  		}
  2599  		off2 := v_0.AuxInt
  2600  		ptr := v_0.Args[0]
  2601  		mem := v.Args[1]
  2602  		if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  2603  			break
  2604  		}
  2605  		v.reset(OpARM64FMOVDload)
  2606  		v.AuxInt = off1 + off2
  2607  		v.Aux = sym
  2608  		v.AddArg(ptr)
  2609  		v.AddArg(mem)
  2610  		return true
  2611  	}
  2612  	// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  2613  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  2614  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  2615  	for {
  2616  		off1 := v.AuxInt
  2617  		sym1 := v.Aux
  2618  		v_0 := v.Args[0]
  2619  		if v_0.Op != OpARM64MOVDaddr {
  2620  			break
  2621  		}
  2622  		off2 := v_0.AuxInt
  2623  		sym2 := v_0.Aux
  2624  		ptr := v_0.Args[0]
  2625  		mem := v.Args[1]
  2626  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  2627  			break
  2628  		}
  2629  		v.reset(OpARM64FMOVDload)
  2630  		v.AuxInt = off1 + off2
  2631  		v.Aux = mergeSym(sym1, sym2)
  2632  		v.AddArg(ptr)
  2633  		v.AddArg(mem)
  2634  		return true
  2635  	}
  2636  	return false
  2637  }
  2638  func rewriteValueARM64_OpARM64FMOVDstore(v *Value, config *Config) bool {
  2639  	b := v.Block
  2640  	_ = b
  2641  	// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  2642  	// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  2643  	// result: (FMOVDstore [off1+off2] {sym} ptr val mem)
  2644  	for {
  2645  		off1 := v.AuxInt
  2646  		sym := v.Aux
  2647  		v_0 := v.Args[0]
  2648  		if v_0.Op != OpARM64ADDconst {
  2649  			break
  2650  		}
  2651  		off2 := v_0.AuxInt
  2652  		ptr := v_0.Args[0]
  2653  		val := v.Args[1]
  2654  		mem := v.Args[2]
  2655  		if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  2656  			break
  2657  		}
  2658  		v.reset(OpARM64FMOVDstore)
  2659  		v.AuxInt = off1 + off2
  2660  		v.Aux = sym
  2661  		v.AddArg(ptr)
  2662  		v.AddArg(val)
  2663  		v.AddArg(mem)
  2664  		return true
  2665  	}
  2666  	// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  2667  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  2668  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  2669  	for {
  2670  		off1 := v.AuxInt
  2671  		sym1 := v.Aux
  2672  		v_0 := v.Args[0]
  2673  		if v_0.Op != OpARM64MOVDaddr {
  2674  			break
  2675  		}
  2676  		off2 := v_0.AuxInt
  2677  		sym2 := v_0.Aux
  2678  		ptr := v_0.Args[0]
  2679  		val := v.Args[1]
  2680  		mem := v.Args[2]
  2681  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  2682  			break
  2683  		}
  2684  		v.reset(OpARM64FMOVDstore)
  2685  		v.AuxInt = off1 + off2
  2686  		v.Aux = mergeSym(sym1, sym2)
  2687  		v.AddArg(ptr)
  2688  		v.AddArg(val)
  2689  		v.AddArg(mem)
  2690  		return true
  2691  	}
  2692  	return false
  2693  }
  2694  func rewriteValueARM64_OpARM64FMOVSload(v *Value, config *Config) bool {
  2695  	b := v.Block
  2696  	_ = b
  2697  	// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
  2698  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  2699  	// result: (FMOVSload [off1+off2] {sym} ptr mem)
  2700  	for {
  2701  		off1 := v.AuxInt
  2702  		sym := v.Aux
  2703  		v_0 := v.Args[0]
  2704  		if v_0.Op != OpARM64ADDconst {
  2705  			break
  2706  		}
  2707  		off2 := v_0.AuxInt
  2708  		ptr := v_0.Args[0]
  2709  		mem := v.Args[1]
  2710  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  2711  			break
  2712  		}
  2713  		v.reset(OpARM64FMOVSload)
  2714  		v.AuxInt = off1 + off2
  2715  		v.Aux = sym
  2716  		v.AddArg(ptr)
  2717  		v.AddArg(mem)
  2718  		return true
  2719  	}
  2720  	// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  2721  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  2722  	// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  2723  	for {
  2724  		off1 := v.AuxInt
  2725  		sym1 := v.Aux
  2726  		v_0 := v.Args[0]
  2727  		if v_0.Op != OpARM64MOVDaddr {
  2728  			break
  2729  		}
  2730  		off2 := v_0.AuxInt
  2731  		sym2 := v_0.Aux
  2732  		ptr := v_0.Args[0]
  2733  		mem := v.Args[1]
  2734  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  2735  			break
  2736  		}
  2737  		v.reset(OpARM64FMOVSload)
  2738  		v.AuxInt = off1 + off2
  2739  		v.Aux = mergeSym(sym1, sym2)
  2740  		v.AddArg(ptr)
  2741  		v.AddArg(mem)
  2742  		return true
  2743  	}
  2744  	return false
  2745  }
  2746  func rewriteValueARM64_OpARM64FMOVSstore(v *Value, config *Config) bool {
  2747  	b := v.Block
  2748  	_ = b
  2749  	// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  2750  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  2751  	// result: (FMOVSstore [off1+off2] {sym} ptr val mem)
  2752  	for {
  2753  		off1 := v.AuxInt
  2754  		sym := v.Aux
  2755  		v_0 := v.Args[0]
  2756  		if v_0.Op != OpARM64ADDconst {
  2757  			break
  2758  		}
  2759  		off2 := v_0.AuxInt
  2760  		ptr := v_0.Args[0]
  2761  		val := v.Args[1]
  2762  		mem := v.Args[2]
  2763  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  2764  			break
  2765  		}
  2766  		v.reset(OpARM64FMOVSstore)
  2767  		v.AuxInt = off1 + off2
  2768  		v.Aux = sym
  2769  		v.AddArg(ptr)
  2770  		v.AddArg(val)
  2771  		v.AddArg(mem)
  2772  		return true
  2773  	}
  2774  	// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  2775  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  2776  	// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  2777  	for {
  2778  		off1 := v.AuxInt
  2779  		sym1 := v.Aux
  2780  		v_0 := v.Args[0]
  2781  		if v_0.Op != OpARM64MOVDaddr {
  2782  			break
  2783  		}
  2784  		off2 := v_0.AuxInt
  2785  		sym2 := v_0.Aux
  2786  		ptr := v_0.Args[0]
  2787  		val := v.Args[1]
  2788  		mem := v.Args[2]
  2789  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  2790  			break
  2791  		}
  2792  		v.reset(OpARM64FMOVSstore)
  2793  		v.AuxInt = off1 + off2
  2794  		v.Aux = mergeSym(sym1, sym2)
  2795  		v.AddArg(ptr)
  2796  		v.AddArg(val)
  2797  		v.AddArg(mem)
  2798  		return true
  2799  	}
  2800  	return false
  2801  }
  2802  func rewriteValueARM64_OpARM64GreaterEqual(v *Value, config *Config) bool {
  2803  	b := v.Block
  2804  	_ = b
  2805  	// match: (GreaterEqual (FlagEQ))
  2806  	// cond:
  2807  	// result: (MOVDconst [1])
  2808  	for {
  2809  		v_0 := v.Args[0]
  2810  		if v_0.Op != OpARM64FlagEQ {
  2811  			break
  2812  		}
  2813  		v.reset(OpARM64MOVDconst)
  2814  		v.AuxInt = 1
  2815  		return true
  2816  	}
  2817  	// match: (GreaterEqual (FlagLT_ULT))
  2818  	// cond:
  2819  	// result: (MOVDconst [0])
  2820  	for {
  2821  		v_0 := v.Args[0]
  2822  		if v_0.Op != OpARM64FlagLT_ULT {
  2823  			break
  2824  		}
  2825  		v.reset(OpARM64MOVDconst)
  2826  		v.AuxInt = 0
  2827  		return true
  2828  	}
  2829  	// match: (GreaterEqual (FlagLT_UGT))
  2830  	// cond:
  2831  	// result: (MOVDconst [0])
  2832  	for {
  2833  		v_0 := v.Args[0]
  2834  		if v_0.Op != OpARM64FlagLT_UGT {
  2835  			break
  2836  		}
  2837  		v.reset(OpARM64MOVDconst)
  2838  		v.AuxInt = 0
  2839  		return true
  2840  	}
  2841  	// match: (GreaterEqual (FlagGT_ULT))
  2842  	// cond:
  2843  	// result: (MOVDconst [1])
  2844  	for {
  2845  		v_0 := v.Args[0]
  2846  		if v_0.Op != OpARM64FlagGT_ULT {
  2847  			break
  2848  		}
  2849  		v.reset(OpARM64MOVDconst)
  2850  		v.AuxInt = 1
  2851  		return true
  2852  	}
  2853  	// match: (GreaterEqual (FlagGT_UGT))
  2854  	// cond:
  2855  	// result: (MOVDconst [1])
  2856  	for {
  2857  		v_0 := v.Args[0]
  2858  		if v_0.Op != OpARM64FlagGT_UGT {
  2859  			break
  2860  		}
  2861  		v.reset(OpARM64MOVDconst)
  2862  		v.AuxInt = 1
  2863  		return true
  2864  	}
  2865  	// match: (GreaterEqual (InvertFlags x))
  2866  	// cond:
  2867  	// result: (LessEqual x)
  2868  	for {
  2869  		v_0 := v.Args[0]
  2870  		if v_0.Op != OpARM64InvertFlags {
  2871  			break
  2872  		}
  2873  		x := v_0.Args[0]
  2874  		v.reset(OpARM64LessEqual)
  2875  		v.AddArg(x)
  2876  		return true
  2877  	}
  2878  	return false
  2879  }
  2880  func rewriteValueARM64_OpARM64GreaterEqualU(v *Value, config *Config) bool {
  2881  	b := v.Block
  2882  	_ = b
  2883  	// match: (GreaterEqualU (FlagEQ))
  2884  	// cond:
  2885  	// result: (MOVDconst [1])
  2886  	for {
  2887  		v_0 := v.Args[0]
  2888  		if v_0.Op != OpARM64FlagEQ {
  2889  			break
  2890  		}
  2891  		v.reset(OpARM64MOVDconst)
  2892  		v.AuxInt = 1
  2893  		return true
  2894  	}
  2895  	// match: (GreaterEqualU (FlagLT_ULT))
  2896  	// cond:
  2897  	// result: (MOVDconst [0])
  2898  	for {
  2899  		v_0 := v.Args[0]
  2900  		if v_0.Op != OpARM64FlagLT_ULT {
  2901  			break
  2902  		}
  2903  		v.reset(OpARM64MOVDconst)
  2904  		v.AuxInt = 0
  2905  		return true
  2906  	}
  2907  	// match: (GreaterEqualU (FlagLT_UGT))
  2908  	// cond:
  2909  	// result: (MOVDconst [1])
  2910  	for {
  2911  		v_0 := v.Args[0]
  2912  		if v_0.Op != OpARM64FlagLT_UGT {
  2913  			break
  2914  		}
  2915  		v.reset(OpARM64MOVDconst)
  2916  		v.AuxInt = 1
  2917  		return true
  2918  	}
  2919  	// match: (GreaterEqualU (FlagGT_ULT))
  2920  	// cond:
  2921  	// result: (MOVDconst [0])
  2922  	for {
  2923  		v_0 := v.Args[0]
  2924  		if v_0.Op != OpARM64FlagGT_ULT {
  2925  			break
  2926  		}
  2927  		v.reset(OpARM64MOVDconst)
  2928  		v.AuxInt = 0
  2929  		return true
  2930  	}
  2931  	// match: (GreaterEqualU (FlagGT_UGT))
  2932  	// cond:
  2933  	// result: (MOVDconst [1])
  2934  	for {
  2935  		v_0 := v.Args[0]
  2936  		if v_0.Op != OpARM64FlagGT_UGT {
  2937  			break
  2938  		}
  2939  		v.reset(OpARM64MOVDconst)
  2940  		v.AuxInt = 1
  2941  		return true
  2942  	}
  2943  	// match: (GreaterEqualU (InvertFlags x))
  2944  	// cond:
  2945  	// result: (LessEqualU x)
  2946  	for {
  2947  		v_0 := v.Args[0]
  2948  		if v_0.Op != OpARM64InvertFlags {
  2949  			break
  2950  		}
  2951  		x := v_0.Args[0]
  2952  		v.reset(OpARM64LessEqualU)
  2953  		v.AddArg(x)
  2954  		return true
  2955  	}
  2956  	return false
  2957  }
  2958  func rewriteValueARM64_OpARM64GreaterThan(v *Value, config *Config) bool {
  2959  	b := v.Block
  2960  	_ = b
  2961  	// match: (GreaterThan (FlagEQ))
  2962  	// cond:
  2963  	// result: (MOVDconst [0])
  2964  	for {
  2965  		v_0 := v.Args[0]
  2966  		if v_0.Op != OpARM64FlagEQ {
  2967  			break
  2968  		}
  2969  		v.reset(OpARM64MOVDconst)
  2970  		v.AuxInt = 0
  2971  		return true
  2972  	}
  2973  	// match: (GreaterThan (FlagLT_ULT))
  2974  	// cond:
  2975  	// result: (MOVDconst [0])
  2976  	for {
  2977  		v_0 := v.Args[0]
  2978  		if v_0.Op != OpARM64FlagLT_ULT {
  2979  			break
  2980  		}
  2981  		v.reset(OpARM64MOVDconst)
  2982  		v.AuxInt = 0
  2983  		return true
  2984  	}
  2985  	// match: (GreaterThan (FlagLT_UGT))
  2986  	// cond:
  2987  	// result: (MOVDconst [0])
  2988  	for {
  2989  		v_0 := v.Args[0]
  2990  		if v_0.Op != OpARM64FlagLT_UGT {
  2991  			break
  2992  		}
  2993  		v.reset(OpARM64MOVDconst)
  2994  		v.AuxInt = 0
  2995  		return true
  2996  	}
  2997  	// match: (GreaterThan (FlagGT_ULT))
  2998  	// cond:
  2999  	// result: (MOVDconst [1])
  3000  	for {
  3001  		v_0 := v.Args[0]
  3002  		if v_0.Op != OpARM64FlagGT_ULT {
  3003  			break
  3004  		}
  3005  		v.reset(OpARM64MOVDconst)
  3006  		v.AuxInt = 1
  3007  		return true
  3008  	}
  3009  	// match: (GreaterThan (FlagGT_UGT))
  3010  	// cond:
  3011  	// result: (MOVDconst [1])
  3012  	for {
  3013  		v_0 := v.Args[0]
  3014  		if v_0.Op != OpARM64FlagGT_UGT {
  3015  			break
  3016  		}
  3017  		v.reset(OpARM64MOVDconst)
  3018  		v.AuxInt = 1
  3019  		return true
  3020  	}
  3021  	// match: (GreaterThan (InvertFlags x))
  3022  	// cond:
  3023  	// result: (LessThan x)
  3024  	for {
  3025  		v_0 := v.Args[0]
  3026  		if v_0.Op != OpARM64InvertFlags {
  3027  			break
  3028  		}
  3029  		x := v_0.Args[0]
  3030  		v.reset(OpARM64LessThan)
  3031  		v.AddArg(x)
  3032  		return true
  3033  	}
  3034  	return false
  3035  }
  3036  func rewriteValueARM64_OpARM64GreaterThanU(v *Value, config *Config) bool {
  3037  	b := v.Block
  3038  	_ = b
  3039  	// match: (GreaterThanU (FlagEQ))
  3040  	// cond:
  3041  	// result: (MOVDconst [0])
  3042  	for {
  3043  		v_0 := v.Args[0]
  3044  		if v_0.Op != OpARM64FlagEQ {
  3045  			break
  3046  		}
  3047  		v.reset(OpARM64MOVDconst)
  3048  		v.AuxInt = 0
  3049  		return true
  3050  	}
  3051  	// match: (GreaterThanU (FlagLT_ULT))
  3052  	// cond:
  3053  	// result: (MOVDconst [0])
  3054  	for {
  3055  		v_0 := v.Args[0]
  3056  		if v_0.Op != OpARM64FlagLT_ULT {
  3057  			break
  3058  		}
  3059  		v.reset(OpARM64MOVDconst)
  3060  		v.AuxInt = 0
  3061  		return true
  3062  	}
  3063  	// match: (GreaterThanU (FlagLT_UGT))
  3064  	// cond:
  3065  	// result: (MOVDconst [1])
  3066  	for {
  3067  		v_0 := v.Args[0]
  3068  		if v_0.Op != OpARM64FlagLT_UGT {
  3069  			break
  3070  		}
  3071  		v.reset(OpARM64MOVDconst)
  3072  		v.AuxInt = 1
  3073  		return true
  3074  	}
  3075  	// match: (GreaterThanU (FlagGT_ULT))
  3076  	// cond:
  3077  	// result: (MOVDconst [0])
  3078  	for {
  3079  		v_0 := v.Args[0]
  3080  		if v_0.Op != OpARM64FlagGT_ULT {
  3081  			break
  3082  		}
  3083  		v.reset(OpARM64MOVDconst)
  3084  		v.AuxInt = 0
  3085  		return true
  3086  	}
  3087  	// match: (GreaterThanU (FlagGT_UGT))
  3088  	// cond:
  3089  	// result: (MOVDconst [1])
  3090  	for {
  3091  		v_0 := v.Args[0]
  3092  		if v_0.Op != OpARM64FlagGT_UGT {
  3093  			break
  3094  		}
  3095  		v.reset(OpARM64MOVDconst)
  3096  		v.AuxInt = 1
  3097  		return true
  3098  	}
  3099  	// match: (GreaterThanU (InvertFlags x))
  3100  	// cond:
  3101  	// result: (LessThanU x)
  3102  	for {
  3103  		v_0 := v.Args[0]
  3104  		if v_0.Op != OpARM64InvertFlags {
  3105  			break
  3106  		}
  3107  		x := v_0.Args[0]
  3108  		v.reset(OpARM64LessThanU)
  3109  		v.AddArg(x)
  3110  		return true
  3111  	}
  3112  	return false
  3113  }
  3114  func rewriteValueARM64_OpARM64LessEqual(v *Value, config *Config) bool {
  3115  	b := v.Block
  3116  	_ = b
  3117  	// match: (LessEqual (FlagEQ))
  3118  	// cond:
  3119  	// result: (MOVDconst [1])
  3120  	for {
  3121  		v_0 := v.Args[0]
  3122  		if v_0.Op != OpARM64FlagEQ {
  3123  			break
  3124  		}
  3125  		v.reset(OpARM64MOVDconst)
  3126  		v.AuxInt = 1
  3127  		return true
  3128  	}
  3129  	// match: (LessEqual (FlagLT_ULT))
  3130  	// cond:
  3131  	// result: (MOVDconst [1])
  3132  	for {
  3133  		v_0 := v.Args[0]
  3134  		if v_0.Op != OpARM64FlagLT_ULT {
  3135  			break
  3136  		}
  3137  		v.reset(OpARM64MOVDconst)
  3138  		v.AuxInt = 1
  3139  		return true
  3140  	}
  3141  	// match: (LessEqual (FlagLT_UGT))
  3142  	// cond:
  3143  	// result: (MOVDconst [1])
  3144  	for {
  3145  		v_0 := v.Args[0]
  3146  		if v_0.Op != OpARM64FlagLT_UGT {
  3147  			break
  3148  		}
  3149  		v.reset(OpARM64MOVDconst)
  3150  		v.AuxInt = 1
  3151  		return true
  3152  	}
  3153  	// match: (LessEqual (FlagGT_ULT))
  3154  	// cond:
  3155  	// result: (MOVDconst [0])
  3156  	for {
  3157  		v_0 := v.Args[0]
  3158  		if v_0.Op != OpARM64FlagGT_ULT {
  3159  			break
  3160  		}
  3161  		v.reset(OpARM64MOVDconst)
  3162  		v.AuxInt = 0
  3163  		return true
  3164  	}
  3165  	// match: (LessEqual (FlagGT_UGT))
  3166  	// cond:
  3167  	// result: (MOVDconst [0])
  3168  	for {
  3169  		v_0 := v.Args[0]
  3170  		if v_0.Op != OpARM64FlagGT_UGT {
  3171  			break
  3172  		}
  3173  		v.reset(OpARM64MOVDconst)
  3174  		v.AuxInt = 0
  3175  		return true
  3176  	}
  3177  	// match: (LessEqual (InvertFlags x))
  3178  	// cond:
  3179  	// result: (GreaterEqual x)
  3180  	for {
  3181  		v_0 := v.Args[0]
  3182  		if v_0.Op != OpARM64InvertFlags {
  3183  			break
  3184  		}
  3185  		x := v_0.Args[0]
  3186  		v.reset(OpARM64GreaterEqual)
  3187  		v.AddArg(x)
  3188  		return true
  3189  	}
  3190  	return false
  3191  }
  3192  func rewriteValueARM64_OpARM64LessEqualU(v *Value, config *Config) bool {
  3193  	b := v.Block
  3194  	_ = b
  3195  	// match: (LessEqualU (FlagEQ))
  3196  	// cond:
  3197  	// result: (MOVDconst [1])
  3198  	for {
  3199  		v_0 := v.Args[0]
  3200  		if v_0.Op != OpARM64FlagEQ {
  3201  			break
  3202  		}
  3203  		v.reset(OpARM64MOVDconst)
  3204  		v.AuxInt = 1
  3205  		return true
  3206  	}
  3207  	// match: (LessEqualU (FlagLT_ULT))
  3208  	// cond:
  3209  	// result: (MOVDconst [1])
  3210  	for {
  3211  		v_0 := v.Args[0]
  3212  		if v_0.Op != OpARM64FlagLT_ULT {
  3213  			break
  3214  		}
  3215  		v.reset(OpARM64MOVDconst)
  3216  		v.AuxInt = 1
  3217  		return true
  3218  	}
  3219  	// match: (LessEqualU (FlagLT_UGT))
  3220  	// cond:
  3221  	// result: (MOVDconst [0])
  3222  	for {
  3223  		v_0 := v.Args[0]
  3224  		if v_0.Op != OpARM64FlagLT_UGT {
  3225  			break
  3226  		}
  3227  		v.reset(OpARM64MOVDconst)
  3228  		v.AuxInt = 0
  3229  		return true
  3230  	}
  3231  	// match: (LessEqualU (FlagGT_ULT))
  3232  	// cond:
  3233  	// result: (MOVDconst [1])
  3234  	for {
  3235  		v_0 := v.Args[0]
  3236  		if v_0.Op != OpARM64FlagGT_ULT {
  3237  			break
  3238  		}
  3239  		v.reset(OpARM64MOVDconst)
  3240  		v.AuxInt = 1
  3241  		return true
  3242  	}
  3243  	// match: (LessEqualU (FlagGT_UGT))
  3244  	// cond:
  3245  	// result: (MOVDconst [0])
  3246  	for {
  3247  		v_0 := v.Args[0]
  3248  		if v_0.Op != OpARM64FlagGT_UGT {
  3249  			break
  3250  		}
  3251  		v.reset(OpARM64MOVDconst)
  3252  		v.AuxInt = 0
  3253  		return true
  3254  	}
  3255  	// match: (LessEqualU (InvertFlags x))
  3256  	// cond:
  3257  	// result: (GreaterEqualU x)
  3258  	for {
  3259  		v_0 := v.Args[0]
  3260  		if v_0.Op != OpARM64InvertFlags {
  3261  			break
  3262  		}
  3263  		x := v_0.Args[0]
  3264  		v.reset(OpARM64GreaterEqualU)
  3265  		v.AddArg(x)
  3266  		return true
  3267  	}
  3268  	return false
  3269  }
  3270  func rewriteValueARM64_OpARM64LessThan(v *Value, config *Config) bool {
  3271  	b := v.Block
  3272  	_ = b
  3273  	// match: (LessThan (FlagEQ))
  3274  	// cond:
  3275  	// result: (MOVDconst [0])
  3276  	for {
  3277  		v_0 := v.Args[0]
  3278  		if v_0.Op != OpARM64FlagEQ {
  3279  			break
  3280  		}
  3281  		v.reset(OpARM64MOVDconst)
  3282  		v.AuxInt = 0
  3283  		return true
  3284  	}
  3285  	// match: (LessThan (FlagLT_ULT))
  3286  	// cond:
  3287  	// result: (MOVDconst [1])
  3288  	for {
  3289  		v_0 := v.Args[0]
  3290  		if v_0.Op != OpARM64FlagLT_ULT {
  3291  			break
  3292  		}
  3293  		v.reset(OpARM64MOVDconst)
  3294  		v.AuxInt = 1
  3295  		return true
  3296  	}
  3297  	// match: (LessThan (FlagLT_UGT))
  3298  	// cond:
  3299  	// result: (MOVDconst [1])
  3300  	for {
  3301  		v_0 := v.Args[0]
  3302  		if v_0.Op != OpARM64FlagLT_UGT {
  3303  			break
  3304  		}
  3305  		v.reset(OpARM64MOVDconst)
  3306  		v.AuxInt = 1
  3307  		return true
  3308  	}
  3309  	// match: (LessThan (FlagGT_ULT))
  3310  	// cond:
  3311  	// result: (MOVDconst [0])
  3312  	for {
  3313  		v_0 := v.Args[0]
  3314  		if v_0.Op != OpARM64FlagGT_ULT {
  3315  			break
  3316  		}
  3317  		v.reset(OpARM64MOVDconst)
  3318  		v.AuxInt = 0
  3319  		return true
  3320  	}
  3321  	// match: (LessThan (FlagGT_UGT))
  3322  	// cond:
  3323  	// result: (MOVDconst [0])
  3324  	for {
  3325  		v_0 := v.Args[0]
  3326  		if v_0.Op != OpARM64FlagGT_UGT {
  3327  			break
  3328  		}
  3329  		v.reset(OpARM64MOVDconst)
  3330  		v.AuxInt = 0
  3331  		return true
  3332  	}
  3333  	// match: (LessThan (InvertFlags x))
  3334  	// cond:
  3335  	// result: (GreaterThan x)
  3336  	for {
  3337  		v_0 := v.Args[0]
  3338  		if v_0.Op != OpARM64InvertFlags {
  3339  			break
  3340  		}
  3341  		x := v_0.Args[0]
  3342  		v.reset(OpARM64GreaterThan)
  3343  		v.AddArg(x)
  3344  		return true
  3345  	}
  3346  	return false
  3347  }
  3348  func rewriteValueARM64_OpARM64LessThanU(v *Value, config *Config) bool {
  3349  	b := v.Block
  3350  	_ = b
  3351  	// match: (LessThanU (FlagEQ))
  3352  	// cond:
  3353  	// result: (MOVDconst [0])
  3354  	for {
  3355  		v_0 := v.Args[0]
  3356  		if v_0.Op != OpARM64FlagEQ {
  3357  			break
  3358  		}
  3359  		v.reset(OpARM64MOVDconst)
  3360  		v.AuxInt = 0
  3361  		return true
  3362  	}
  3363  	// match: (LessThanU (FlagLT_ULT))
  3364  	// cond:
  3365  	// result: (MOVDconst [1])
  3366  	for {
  3367  		v_0 := v.Args[0]
  3368  		if v_0.Op != OpARM64FlagLT_ULT {
  3369  			break
  3370  		}
  3371  		v.reset(OpARM64MOVDconst)
  3372  		v.AuxInt = 1
  3373  		return true
  3374  	}
  3375  	// match: (LessThanU (FlagLT_UGT))
  3376  	// cond:
  3377  	// result: (MOVDconst [0])
  3378  	for {
  3379  		v_0 := v.Args[0]
  3380  		if v_0.Op != OpARM64FlagLT_UGT {
  3381  			break
  3382  		}
  3383  		v.reset(OpARM64MOVDconst)
  3384  		v.AuxInt = 0
  3385  		return true
  3386  	}
  3387  	// match: (LessThanU (FlagGT_ULT))
  3388  	// cond:
  3389  	// result: (MOVDconst [1])
  3390  	for {
  3391  		v_0 := v.Args[0]
  3392  		if v_0.Op != OpARM64FlagGT_ULT {
  3393  			break
  3394  		}
  3395  		v.reset(OpARM64MOVDconst)
  3396  		v.AuxInt = 1
  3397  		return true
  3398  	}
  3399  	// match: (LessThanU (FlagGT_UGT))
  3400  	// cond:
  3401  	// result: (MOVDconst [0])
  3402  	for {
  3403  		v_0 := v.Args[0]
  3404  		if v_0.Op != OpARM64FlagGT_UGT {
  3405  			break
  3406  		}
  3407  		v.reset(OpARM64MOVDconst)
  3408  		v.AuxInt = 0
  3409  		return true
  3410  	}
  3411  	// match: (LessThanU (InvertFlags x))
  3412  	// cond:
  3413  	// result: (GreaterThanU x)
  3414  	for {
  3415  		v_0 := v.Args[0]
  3416  		if v_0.Op != OpARM64InvertFlags {
  3417  			break
  3418  		}
  3419  		x := v_0.Args[0]
  3420  		v.reset(OpARM64GreaterThanU)
  3421  		v.AddArg(x)
  3422  		return true
  3423  	}
  3424  	return false
  3425  }
  3426  func rewriteValueARM64_OpARM64MOD(v *Value, config *Config) bool {
  3427  	b := v.Block
  3428  	_ = b
  3429  	// match: (MOD   (MOVDconst [c]) (MOVDconst [d]))
  3430  	// cond:
  3431  	// result: (MOVDconst [int64(c)%int64(d)])
  3432  	for {
  3433  		v_0 := v.Args[0]
  3434  		if v_0.Op != OpARM64MOVDconst {
  3435  			break
  3436  		}
  3437  		c := v_0.AuxInt
  3438  		v_1 := v.Args[1]
  3439  		if v_1.Op != OpARM64MOVDconst {
  3440  			break
  3441  		}
  3442  		d := v_1.AuxInt
  3443  		v.reset(OpARM64MOVDconst)
  3444  		v.AuxInt = int64(c) % int64(d)
  3445  		return true
  3446  	}
  3447  	return false
  3448  }
  3449  func rewriteValueARM64_OpARM64MODW(v *Value, config *Config) bool {
  3450  	b := v.Block
  3451  	_ = b
  3452  	// match: (MODW  (MOVDconst [c]) (MOVDconst [d]))
  3453  	// cond:
  3454  	// result: (MOVDconst [int64(int32(c)%int32(d))])
  3455  	for {
  3456  		v_0 := v.Args[0]
  3457  		if v_0.Op != OpARM64MOVDconst {
  3458  			break
  3459  		}
  3460  		c := v_0.AuxInt
  3461  		v_1 := v.Args[1]
  3462  		if v_1.Op != OpARM64MOVDconst {
  3463  			break
  3464  		}
  3465  		d := v_1.AuxInt
  3466  		v.reset(OpARM64MOVDconst)
  3467  		v.AuxInt = int64(int32(c) % int32(d))
  3468  		return true
  3469  	}
  3470  	return false
  3471  }
  3472  func rewriteValueARM64_OpARM64MOVBUload(v *Value, config *Config) bool {
  3473  	b := v.Block
  3474  	_ = b
  3475  	// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
  3476  	// cond:
  3477  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3478  	for {
  3479  		off1 := v.AuxInt
  3480  		sym := v.Aux
  3481  		v_0 := v.Args[0]
  3482  		if v_0.Op != OpARM64ADDconst {
  3483  			break
  3484  		}
  3485  		off2 := v_0.AuxInt
  3486  		ptr := v_0.Args[0]
  3487  		mem := v.Args[1]
  3488  		v.reset(OpARM64MOVBUload)
  3489  		v.AuxInt = off1 + off2
  3490  		v.Aux = sym
  3491  		v.AddArg(ptr)
  3492  		v.AddArg(mem)
  3493  		return true
  3494  	}
  3495  	// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3496  	// cond: canMergeSym(sym1,sym2)
  3497  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3498  	for {
  3499  		off1 := v.AuxInt
  3500  		sym1 := v.Aux
  3501  		v_0 := v.Args[0]
  3502  		if v_0.Op != OpARM64MOVDaddr {
  3503  			break
  3504  		}
  3505  		off2 := v_0.AuxInt
  3506  		sym2 := v_0.Aux
  3507  		ptr := v_0.Args[0]
  3508  		mem := v.Args[1]
  3509  		if !(canMergeSym(sym1, sym2)) {
  3510  			break
  3511  		}
  3512  		v.reset(OpARM64MOVBUload)
  3513  		v.AuxInt = off1 + off2
  3514  		v.Aux = mergeSym(sym1, sym2)
  3515  		v.AddArg(ptr)
  3516  		v.AddArg(mem)
  3517  		return true
  3518  	}
  3519  	// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  3520  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3521  	// result: (MOVDconst [0])
  3522  	for {
  3523  		off := v.AuxInt
  3524  		sym := v.Aux
  3525  		ptr := v.Args[0]
  3526  		v_1 := v.Args[1]
  3527  		if v_1.Op != OpARM64MOVBstorezero {
  3528  			break
  3529  		}
  3530  		off2 := v_1.AuxInt
  3531  		sym2 := v_1.Aux
  3532  		ptr2 := v_1.Args[0]
  3533  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3534  			break
  3535  		}
  3536  		v.reset(OpARM64MOVDconst)
  3537  		v.AuxInt = 0
  3538  		return true
  3539  	}
  3540  	return false
  3541  }
  3542  func rewriteValueARM64_OpARM64MOVBUreg(v *Value, config *Config) bool {
  3543  	b := v.Block
  3544  	_ = b
  3545  	// match: (MOVBUreg x:(MOVBUload _ _))
  3546  	// cond:
  3547  	// result: (MOVDreg x)
  3548  	for {
  3549  		x := v.Args[0]
  3550  		if x.Op != OpARM64MOVBUload {
  3551  			break
  3552  		}
  3553  		v.reset(OpARM64MOVDreg)
  3554  		v.AddArg(x)
  3555  		return true
  3556  	}
  3557  	// match: (MOVBUreg x:(MOVBUreg _))
  3558  	// cond:
  3559  	// result: (MOVDreg x)
  3560  	for {
  3561  		x := v.Args[0]
  3562  		if x.Op != OpARM64MOVBUreg {
  3563  			break
  3564  		}
  3565  		v.reset(OpARM64MOVDreg)
  3566  		v.AddArg(x)
  3567  		return true
  3568  	}
  3569  	// match: (MOVBUreg (MOVDconst [c]))
  3570  	// cond:
  3571  	// result: (MOVDconst [int64(uint8(c))])
  3572  	for {
  3573  		v_0 := v.Args[0]
  3574  		if v_0.Op != OpARM64MOVDconst {
  3575  			break
  3576  		}
  3577  		c := v_0.AuxInt
  3578  		v.reset(OpARM64MOVDconst)
  3579  		v.AuxInt = int64(uint8(c))
  3580  		return true
  3581  	}
  3582  	return false
  3583  }
  3584  func rewriteValueARM64_OpARM64MOVBload(v *Value, config *Config) bool {
  3585  	b := v.Block
  3586  	_ = b
  3587  	// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
  3588  	// cond:
  3589  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  3590  	for {
  3591  		off1 := v.AuxInt
  3592  		sym := v.Aux
  3593  		v_0 := v.Args[0]
  3594  		if v_0.Op != OpARM64ADDconst {
  3595  			break
  3596  		}
  3597  		off2 := v_0.AuxInt
  3598  		ptr := v_0.Args[0]
  3599  		mem := v.Args[1]
  3600  		v.reset(OpARM64MOVBload)
  3601  		v.AuxInt = off1 + off2
  3602  		v.Aux = sym
  3603  		v.AddArg(ptr)
  3604  		v.AddArg(mem)
  3605  		return true
  3606  	}
  3607  	// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3608  	// cond: canMergeSym(sym1,sym2)
  3609  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3610  	for {
  3611  		off1 := v.AuxInt
  3612  		sym1 := v.Aux
  3613  		v_0 := v.Args[0]
  3614  		if v_0.Op != OpARM64MOVDaddr {
  3615  			break
  3616  		}
  3617  		off2 := v_0.AuxInt
  3618  		sym2 := v_0.Aux
  3619  		ptr := v_0.Args[0]
  3620  		mem := v.Args[1]
  3621  		if !(canMergeSym(sym1, sym2)) {
  3622  			break
  3623  		}
  3624  		v.reset(OpARM64MOVBload)
  3625  		v.AuxInt = off1 + off2
  3626  		v.Aux = mergeSym(sym1, sym2)
  3627  		v.AddArg(ptr)
  3628  		v.AddArg(mem)
  3629  		return true
  3630  	}
  3631  	// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  3632  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3633  	// result: (MOVDconst [0])
  3634  	for {
  3635  		off := v.AuxInt
  3636  		sym := v.Aux
  3637  		ptr := v.Args[0]
  3638  		v_1 := v.Args[1]
  3639  		if v_1.Op != OpARM64MOVBstorezero {
  3640  			break
  3641  		}
  3642  		off2 := v_1.AuxInt
  3643  		sym2 := v_1.Aux
  3644  		ptr2 := v_1.Args[0]
  3645  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3646  			break
  3647  		}
  3648  		v.reset(OpARM64MOVDconst)
  3649  		v.AuxInt = 0
  3650  		return true
  3651  	}
  3652  	return false
  3653  }
  3654  func rewriteValueARM64_OpARM64MOVBreg(v *Value, config *Config) bool {
  3655  	b := v.Block
  3656  	_ = b
  3657  	// match: (MOVBreg x:(MOVBload _ _))
  3658  	// cond:
  3659  	// result: (MOVDreg x)
  3660  	for {
  3661  		x := v.Args[0]
  3662  		if x.Op != OpARM64MOVBload {
  3663  			break
  3664  		}
  3665  		v.reset(OpARM64MOVDreg)
  3666  		v.AddArg(x)
  3667  		return true
  3668  	}
  3669  	// match: (MOVBreg x:(MOVBreg _))
  3670  	// cond:
  3671  	// result: (MOVDreg x)
  3672  	for {
  3673  		x := v.Args[0]
  3674  		if x.Op != OpARM64MOVBreg {
  3675  			break
  3676  		}
  3677  		v.reset(OpARM64MOVDreg)
  3678  		v.AddArg(x)
  3679  		return true
  3680  	}
  3681  	// match: (MOVBreg  (MOVDconst [c]))
  3682  	// cond:
  3683  	// result: (MOVDconst [int64(int8(c))])
  3684  	for {
  3685  		v_0 := v.Args[0]
  3686  		if v_0.Op != OpARM64MOVDconst {
  3687  			break
  3688  		}
  3689  		c := v_0.AuxInt
  3690  		v.reset(OpARM64MOVDconst)
  3691  		v.AuxInt = int64(int8(c))
  3692  		return true
  3693  	}
  3694  	return false
  3695  }
  3696  func rewriteValueARM64_OpARM64MOVBstore(v *Value, config *Config) bool {
  3697  	b := v.Block
  3698  	_ = b
  3699  	// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  3700  	// cond:
  3701  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3702  	for {
  3703  		off1 := v.AuxInt
  3704  		sym := v.Aux
  3705  		v_0 := v.Args[0]
  3706  		if v_0.Op != OpARM64ADDconst {
  3707  			break
  3708  		}
  3709  		off2 := v_0.AuxInt
  3710  		ptr := v_0.Args[0]
  3711  		val := v.Args[1]
  3712  		mem := v.Args[2]
  3713  		v.reset(OpARM64MOVBstore)
  3714  		v.AuxInt = off1 + off2
  3715  		v.Aux = sym
  3716  		v.AddArg(ptr)
  3717  		v.AddArg(val)
  3718  		v.AddArg(mem)
  3719  		return true
  3720  	}
  3721  	// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  3722  	// cond: canMergeSym(sym1,sym2)
  3723  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3724  	for {
  3725  		off1 := v.AuxInt
  3726  		sym1 := v.Aux
  3727  		v_0 := v.Args[0]
  3728  		if v_0.Op != OpARM64MOVDaddr {
  3729  			break
  3730  		}
  3731  		off2 := v_0.AuxInt
  3732  		sym2 := v_0.Aux
  3733  		ptr := v_0.Args[0]
  3734  		val := v.Args[1]
  3735  		mem := v.Args[2]
  3736  		if !(canMergeSym(sym1, sym2)) {
  3737  			break
  3738  		}
  3739  		v.reset(OpARM64MOVBstore)
  3740  		v.AuxInt = off1 + off2
  3741  		v.Aux = mergeSym(sym1, sym2)
  3742  		v.AddArg(ptr)
  3743  		v.AddArg(val)
  3744  		v.AddArg(mem)
  3745  		return true
  3746  	}
  3747  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  3748  	// cond:
  3749  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3750  	for {
  3751  		off := v.AuxInt
  3752  		sym := v.Aux
  3753  		ptr := v.Args[0]
  3754  		v_1 := v.Args[1]
  3755  		if v_1.Op != OpARM64MOVDconst {
  3756  			break
  3757  		}
  3758  		if v_1.AuxInt != 0 {
  3759  			break
  3760  		}
  3761  		mem := v.Args[2]
  3762  		v.reset(OpARM64MOVBstorezero)
  3763  		v.AuxInt = off
  3764  		v.Aux = sym
  3765  		v.AddArg(ptr)
  3766  		v.AddArg(mem)
  3767  		return true
  3768  	}
  3769  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3770  	// cond:
  3771  	// result: (MOVBstore [off] {sym} ptr x mem)
  3772  	for {
  3773  		off := v.AuxInt
  3774  		sym := v.Aux
  3775  		ptr := v.Args[0]
  3776  		v_1 := v.Args[1]
  3777  		if v_1.Op != OpARM64MOVBreg {
  3778  			break
  3779  		}
  3780  		x := v_1.Args[0]
  3781  		mem := v.Args[2]
  3782  		v.reset(OpARM64MOVBstore)
  3783  		v.AuxInt = off
  3784  		v.Aux = sym
  3785  		v.AddArg(ptr)
  3786  		v.AddArg(x)
  3787  		v.AddArg(mem)
  3788  		return true
  3789  	}
  3790  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3791  	// cond:
  3792  	// result: (MOVBstore [off] {sym} ptr x mem)
  3793  	for {
  3794  		off := v.AuxInt
  3795  		sym := v.Aux
  3796  		ptr := v.Args[0]
  3797  		v_1 := v.Args[1]
  3798  		if v_1.Op != OpARM64MOVBUreg {
  3799  			break
  3800  		}
  3801  		x := v_1.Args[0]
  3802  		mem := v.Args[2]
  3803  		v.reset(OpARM64MOVBstore)
  3804  		v.AuxInt = off
  3805  		v.Aux = sym
  3806  		v.AddArg(ptr)
  3807  		v.AddArg(x)
  3808  		v.AddArg(mem)
  3809  		return true
  3810  	}
  3811  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3812  	// cond:
  3813  	// result: (MOVBstore [off] {sym} ptr x mem)
  3814  	for {
  3815  		off := v.AuxInt
  3816  		sym := v.Aux
  3817  		ptr := v.Args[0]
  3818  		v_1 := v.Args[1]
  3819  		if v_1.Op != OpARM64MOVHreg {
  3820  			break
  3821  		}
  3822  		x := v_1.Args[0]
  3823  		mem := v.Args[2]
  3824  		v.reset(OpARM64MOVBstore)
  3825  		v.AuxInt = off
  3826  		v.Aux = sym
  3827  		v.AddArg(ptr)
  3828  		v.AddArg(x)
  3829  		v.AddArg(mem)
  3830  		return true
  3831  	}
  3832  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3833  	// cond:
  3834  	// result: (MOVBstore [off] {sym} ptr x mem)
  3835  	for {
  3836  		off := v.AuxInt
  3837  		sym := v.Aux
  3838  		ptr := v.Args[0]
  3839  		v_1 := v.Args[1]
  3840  		if v_1.Op != OpARM64MOVHUreg {
  3841  			break
  3842  		}
  3843  		x := v_1.Args[0]
  3844  		mem := v.Args[2]
  3845  		v.reset(OpARM64MOVBstore)
  3846  		v.AuxInt = off
  3847  		v.Aux = sym
  3848  		v.AddArg(ptr)
  3849  		v.AddArg(x)
  3850  		v.AddArg(mem)
  3851  		return true
  3852  	}
  3853  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3854  	// cond:
  3855  	// result: (MOVBstore [off] {sym} ptr x mem)
  3856  	for {
  3857  		off := v.AuxInt
  3858  		sym := v.Aux
  3859  		ptr := v.Args[0]
  3860  		v_1 := v.Args[1]
  3861  		if v_1.Op != OpARM64MOVWreg {
  3862  			break
  3863  		}
  3864  		x := v_1.Args[0]
  3865  		mem := v.Args[2]
  3866  		v.reset(OpARM64MOVBstore)
  3867  		v.AuxInt = off
  3868  		v.Aux = sym
  3869  		v.AddArg(ptr)
  3870  		v.AddArg(x)
  3871  		v.AddArg(mem)
  3872  		return true
  3873  	}
  3874  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3875  	// cond:
  3876  	// result: (MOVBstore [off] {sym} ptr x mem)
  3877  	for {
  3878  		off := v.AuxInt
  3879  		sym := v.Aux
  3880  		ptr := v.Args[0]
  3881  		v_1 := v.Args[1]
  3882  		if v_1.Op != OpARM64MOVWUreg {
  3883  			break
  3884  		}
  3885  		x := v_1.Args[0]
  3886  		mem := v.Args[2]
  3887  		v.reset(OpARM64MOVBstore)
  3888  		v.AuxInt = off
  3889  		v.Aux = sym
  3890  		v.AddArg(ptr)
  3891  		v.AddArg(x)
  3892  		v.AddArg(mem)
  3893  		return true
  3894  	}
  3895  	return false
  3896  }
  3897  func rewriteValueARM64_OpARM64MOVBstorezero(v *Value, config *Config) bool {
  3898  	b := v.Block
  3899  	_ = b
  3900  	// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  3901  	// cond:
  3902  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3903  	for {
  3904  		off1 := v.AuxInt
  3905  		sym := v.Aux
  3906  		v_0 := v.Args[0]
  3907  		if v_0.Op != OpARM64ADDconst {
  3908  			break
  3909  		}
  3910  		off2 := v_0.AuxInt
  3911  		ptr := v_0.Args[0]
  3912  		mem := v.Args[1]
  3913  		v.reset(OpARM64MOVBstorezero)
  3914  		v.AuxInt = off1 + off2
  3915  		v.Aux = sym
  3916  		v.AddArg(ptr)
  3917  		v.AddArg(mem)
  3918  		return true
  3919  	}
  3920  	// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3921  	// cond: canMergeSym(sym1,sym2)
  3922  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3923  	for {
  3924  		off1 := v.AuxInt
  3925  		sym1 := v.Aux
  3926  		v_0 := v.Args[0]
  3927  		if v_0.Op != OpARM64MOVDaddr {
  3928  			break
  3929  		}
  3930  		off2 := v_0.AuxInt
  3931  		sym2 := v_0.Aux
  3932  		ptr := v_0.Args[0]
  3933  		mem := v.Args[1]
  3934  		if !(canMergeSym(sym1, sym2)) {
  3935  			break
  3936  		}
  3937  		v.reset(OpARM64MOVBstorezero)
  3938  		v.AuxInt = off1 + off2
  3939  		v.Aux = mergeSym(sym1, sym2)
  3940  		v.AddArg(ptr)
  3941  		v.AddArg(mem)
  3942  		return true
  3943  	}
  3944  	return false
  3945  }
  3946  func rewriteValueARM64_OpARM64MOVDload(v *Value, config *Config) bool {
  3947  	b := v.Block
  3948  	_ = b
  3949  	// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  3950  	// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  3951  	// result: (MOVDload [off1+off2] {sym} ptr mem)
  3952  	for {
  3953  		off1 := v.AuxInt
  3954  		sym := v.Aux
  3955  		v_0 := v.Args[0]
  3956  		if v_0.Op != OpARM64ADDconst {
  3957  			break
  3958  		}
  3959  		off2 := v_0.AuxInt
  3960  		ptr := v_0.Args[0]
  3961  		mem := v.Args[1]
  3962  		if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  3963  			break
  3964  		}
  3965  		v.reset(OpARM64MOVDload)
  3966  		v.AuxInt = off1 + off2
  3967  		v.Aux = sym
  3968  		v.AddArg(ptr)
  3969  		v.AddArg(mem)
  3970  		return true
  3971  	}
  3972  	// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3973  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  3974  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3975  	for {
  3976  		off1 := v.AuxInt
  3977  		sym1 := v.Aux
  3978  		v_0 := v.Args[0]
  3979  		if v_0.Op != OpARM64MOVDaddr {
  3980  			break
  3981  		}
  3982  		off2 := v_0.AuxInt
  3983  		sym2 := v_0.Aux
  3984  		ptr := v_0.Args[0]
  3985  		mem := v.Args[1]
  3986  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  3987  			break
  3988  		}
  3989  		v.reset(OpARM64MOVDload)
  3990  		v.AuxInt = off1 + off2
  3991  		v.Aux = mergeSym(sym1, sym2)
  3992  		v.AddArg(ptr)
  3993  		v.AddArg(mem)
  3994  		return true
  3995  	}
  3996  	// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
  3997  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3998  	// result: (MOVDconst [0])
  3999  	for {
  4000  		off := v.AuxInt
  4001  		sym := v.Aux
  4002  		ptr := v.Args[0]
  4003  		v_1 := v.Args[1]
  4004  		if v_1.Op != OpARM64MOVDstorezero {
  4005  			break
  4006  		}
  4007  		off2 := v_1.AuxInt
  4008  		sym2 := v_1.Aux
  4009  		ptr2 := v_1.Args[0]
  4010  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4011  			break
  4012  		}
  4013  		v.reset(OpARM64MOVDconst)
  4014  		v.AuxInt = 0
  4015  		return true
  4016  	}
  4017  	return false
  4018  }
  4019  func rewriteValueARM64_OpARM64MOVDreg(v *Value, config *Config) bool {
  4020  	b := v.Block
  4021  	_ = b
  4022  	// match: (MOVDreg x)
  4023  	// cond: x.Uses == 1
  4024  	// result: (MOVDnop x)
  4025  	for {
  4026  		x := v.Args[0]
  4027  		if !(x.Uses == 1) {
  4028  			break
  4029  		}
  4030  		v.reset(OpARM64MOVDnop)
  4031  		v.AddArg(x)
  4032  		return true
  4033  	}
  4034  	// match: (MOVDreg  (MOVDconst [c]))
  4035  	// cond:
  4036  	// result: (MOVDconst [c])
  4037  	for {
  4038  		v_0 := v.Args[0]
  4039  		if v_0.Op != OpARM64MOVDconst {
  4040  			break
  4041  		}
  4042  		c := v_0.AuxInt
  4043  		v.reset(OpARM64MOVDconst)
  4044  		v.AuxInt = c
  4045  		return true
  4046  	}
  4047  	return false
  4048  }
  4049  func rewriteValueARM64_OpARM64MOVDstore(v *Value, config *Config) bool {
  4050  	b := v.Block
  4051  	_ = b
  4052  	// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  4053  	// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4054  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4055  	for {
  4056  		off1 := v.AuxInt
  4057  		sym := v.Aux
  4058  		v_0 := v.Args[0]
  4059  		if v_0.Op != OpARM64ADDconst {
  4060  			break
  4061  		}
  4062  		off2 := v_0.AuxInt
  4063  		ptr := v_0.Args[0]
  4064  		val := v.Args[1]
  4065  		mem := v.Args[2]
  4066  		if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4067  			break
  4068  		}
  4069  		v.reset(OpARM64MOVDstore)
  4070  		v.AuxInt = off1 + off2
  4071  		v.Aux = sym
  4072  		v.AddArg(ptr)
  4073  		v.AddArg(val)
  4074  		v.AddArg(mem)
  4075  		return true
  4076  	}
  4077  	// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  4078  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4079  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4080  	for {
  4081  		off1 := v.AuxInt
  4082  		sym1 := v.Aux
  4083  		v_0 := v.Args[0]
  4084  		if v_0.Op != OpARM64MOVDaddr {
  4085  			break
  4086  		}
  4087  		off2 := v_0.AuxInt
  4088  		sym2 := v_0.Aux
  4089  		ptr := v_0.Args[0]
  4090  		val := v.Args[1]
  4091  		mem := v.Args[2]
  4092  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4093  			break
  4094  		}
  4095  		v.reset(OpARM64MOVDstore)
  4096  		v.AuxInt = off1 + off2
  4097  		v.Aux = mergeSym(sym1, sym2)
  4098  		v.AddArg(ptr)
  4099  		v.AddArg(val)
  4100  		v.AddArg(mem)
  4101  		return true
  4102  	}
  4103  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4104  	// cond:
  4105  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4106  	for {
  4107  		off := v.AuxInt
  4108  		sym := v.Aux
  4109  		ptr := v.Args[0]
  4110  		v_1 := v.Args[1]
  4111  		if v_1.Op != OpARM64MOVDconst {
  4112  			break
  4113  		}
  4114  		if v_1.AuxInt != 0 {
  4115  			break
  4116  		}
  4117  		mem := v.Args[2]
  4118  		v.reset(OpARM64MOVDstorezero)
  4119  		v.AuxInt = off
  4120  		v.Aux = sym
  4121  		v.AddArg(ptr)
  4122  		v.AddArg(mem)
  4123  		return true
  4124  	}
  4125  	return false
  4126  }
  4127  func rewriteValueARM64_OpARM64MOVDstorezero(v *Value, config *Config) bool {
  4128  	b := v.Block
  4129  	_ = b
  4130  	// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  4131  	// cond: (off1+off2)%2==8 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4132  	// result: (MOVDstorezero [off1+off2] {sym} ptr mem)
  4133  	for {
  4134  		off1 := v.AuxInt
  4135  		sym := v.Aux
  4136  		v_0 := v.Args[0]
  4137  		if v_0.Op != OpARM64ADDconst {
  4138  			break
  4139  		}
  4140  		off2 := v_0.AuxInt
  4141  		ptr := v_0.Args[0]
  4142  		mem := v.Args[1]
  4143  		if !((off1+off2)%2 == 8 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4144  			break
  4145  		}
  4146  		v.reset(OpARM64MOVDstorezero)
  4147  		v.AuxInt = off1 + off2
  4148  		v.Aux = sym
  4149  		v.AddArg(ptr)
  4150  		v.AddArg(mem)
  4151  		return true
  4152  	}
  4153  	// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4154  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4155  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4156  	for {
  4157  		off1 := v.AuxInt
  4158  		sym1 := v.Aux
  4159  		v_0 := v.Args[0]
  4160  		if v_0.Op != OpARM64MOVDaddr {
  4161  			break
  4162  		}
  4163  		off2 := v_0.AuxInt
  4164  		sym2 := v_0.Aux
  4165  		ptr := v_0.Args[0]
  4166  		mem := v.Args[1]
  4167  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4168  			break
  4169  		}
  4170  		v.reset(OpARM64MOVDstorezero)
  4171  		v.AuxInt = off1 + off2
  4172  		v.Aux = mergeSym(sym1, sym2)
  4173  		v.AddArg(ptr)
  4174  		v.AddArg(mem)
  4175  		return true
  4176  	}
  4177  	return false
  4178  }
  4179  func rewriteValueARM64_OpARM64MOVHUload(v *Value, config *Config) bool {
  4180  	b := v.Block
  4181  	_ = b
  4182  	// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
  4183  	// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4184  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4185  	for {
  4186  		off1 := v.AuxInt
  4187  		sym := v.Aux
  4188  		v_0 := v.Args[0]
  4189  		if v_0.Op != OpARM64ADDconst {
  4190  			break
  4191  		}
  4192  		off2 := v_0.AuxInt
  4193  		ptr := v_0.Args[0]
  4194  		mem := v.Args[1]
  4195  		if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4196  			break
  4197  		}
  4198  		v.reset(OpARM64MOVHUload)
  4199  		v.AuxInt = off1 + off2
  4200  		v.Aux = sym
  4201  		v.AddArg(ptr)
  4202  		v.AddArg(mem)
  4203  		return true
  4204  	}
  4205  	// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4206  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4207  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4208  	for {
  4209  		off1 := v.AuxInt
  4210  		sym1 := v.Aux
  4211  		v_0 := v.Args[0]
  4212  		if v_0.Op != OpARM64MOVDaddr {
  4213  			break
  4214  		}
  4215  		off2 := v_0.AuxInt
  4216  		sym2 := v_0.Aux
  4217  		ptr := v_0.Args[0]
  4218  		mem := v.Args[1]
  4219  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4220  			break
  4221  		}
  4222  		v.reset(OpARM64MOVHUload)
  4223  		v.AuxInt = off1 + off2
  4224  		v.Aux = mergeSym(sym1, sym2)
  4225  		v.AddArg(ptr)
  4226  		v.AddArg(mem)
  4227  		return true
  4228  	}
  4229  	// match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
  4230  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4231  	// result: (MOVDconst [0])
  4232  	for {
  4233  		off := v.AuxInt
  4234  		sym := v.Aux
  4235  		ptr := v.Args[0]
  4236  		v_1 := v.Args[1]
  4237  		if v_1.Op != OpARM64MOVHstorezero {
  4238  			break
  4239  		}
  4240  		off2 := v_1.AuxInt
  4241  		sym2 := v_1.Aux
  4242  		ptr2 := v_1.Args[0]
  4243  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4244  			break
  4245  		}
  4246  		v.reset(OpARM64MOVDconst)
  4247  		v.AuxInt = 0
  4248  		return true
  4249  	}
  4250  	return false
  4251  }
  4252  func rewriteValueARM64_OpARM64MOVHUreg(v *Value, config *Config) bool {
  4253  	b := v.Block
  4254  	_ = b
  4255  	// match: (MOVHUreg x:(MOVBUload _ _))
  4256  	// cond:
  4257  	// result: (MOVDreg x)
  4258  	for {
  4259  		x := v.Args[0]
  4260  		if x.Op != OpARM64MOVBUload {
  4261  			break
  4262  		}
  4263  		v.reset(OpARM64MOVDreg)
  4264  		v.AddArg(x)
  4265  		return true
  4266  	}
  4267  	// match: (MOVHUreg x:(MOVHUload _ _))
  4268  	// cond:
  4269  	// result: (MOVDreg x)
  4270  	for {
  4271  		x := v.Args[0]
  4272  		if x.Op != OpARM64MOVHUload {
  4273  			break
  4274  		}
  4275  		v.reset(OpARM64MOVDreg)
  4276  		v.AddArg(x)
  4277  		return true
  4278  	}
  4279  	// match: (MOVHUreg x:(MOVBUreg _))
  4280  	// cond:
  4281  	// result: (MOVDreg x)
  4282  	for {
  4283  		x := v.Args[0]
  4284  		if x.Op != OpARM64MOVBUreg {
  4285  			break
  4286  		}
  4287  		v.reset(OpARM64MOVDreg)
  4288  		v.AddArg(x)
  4289  		return true
  4290  	}
  4291  	// match: (MOVHUreg x:(MOVHUreg _))
  4292  	// cond:
  4293  	// result: (MOVDreg x)
  4294  	for {
  4295  		x := v.Args[0]
  4296  		if x.Op != OpARM64MOVHUreg {
  4297  			break
  4298  		}
  4299  		v.reset(OpARM64MOVDreg)
  4300  		v.AddArg(x)
  4301  		return true
  4302  	}
  4303  	// match: (MOVHUreg (MOVDconst [c]))
  4304  	// cond:
  4305  	// result: (MOVDconst [int64(uint16(c))])
  4306  	for {
  4307  		v_0 := v.Args[0]
  4308  		if v_0.Op != OpARM64MOVDconst {
  4309  			break
  4310  		}
  4311  		c := v_0.AuxInt
  4312  		v.reset(OpARM64MOVDconst)
  4313  		v.AuxInt = int64(uint16(c))
  4314  		return true
  4315  	}
  4316  	return false
  4317  }
  4318  func rewriteValueARM64_OpARM64MOVHload(v *Value, config *Config) bool {
  4319  	b := v.Block
  4320  	_ = b
  4321  	// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
  4322  	// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4323  	// result: (MOVHload [off1+off2] {sym} ptr mem)
  4324  	for {
  4325  		off1 := v.AuxInt
  4326  		sym := v.Aux
  4327  		v_0 := v.Args[0]
  4328  		if v_0.Op != OpARM64ADDconst {
  4329  			break
  4330  		}
  4331  		off2 := v_0.AuxInt
  4332  		ptr := v_0.Args[0]
  4333  		mem := v.Args[1]
  4334  		if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4335  			break
  4336  		}
  4337  		v.reset(OpARM64MOVHload)
  4338  		v.AuxInt = off1 + off2
  4339  		v.Aux = sym
  4340  		v.AddArg(ptr)
  4341  		v.AddArg(mem)
  4342  		return true
  4343  	}
  4344  	// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4345  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4346  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4347  	for {
  4348  		off1 := v.AuxInt
  4349  		sym1 := v.Aux
  4350  		v_0 := v.Args[0]
  4351  		if v_0.Op != OpARM64MOVDaddr {
  4352  			break
  4353  		}
  4354  		off2 := v_0.AuxInt
  4355  		sym2 := v_0.Aux
  4356  		ptr := v_0.Args[0]
  4357  		mem := v.Args[1]
  4358  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4359  			break
  4360  		}
  4361  		v.reset(OpARM64MOVHload)
  4362  		v.AuxInt = off1 + off2
  4363  		v.Aux = mergeSym(sym1, sym2)
  4364  		v.AddArg(ptr)
  4365  		v.AddArg(mem)
  4366  		return true
  4367  	}
  4368  	// match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
  4369  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4370  	// result: (MOVDconst [0])
  4371  	for {
  4372  		off := v.AuxInt
  4373  		sym := v.Aux
  4374  		ptr := v.Args[0]
  4375  		v_1 := v.Args[1]
  4376  		if v_1.Op != OpARM64MOVHstorezero {
  4377  			break
  4378  		}
  4379  		off2 := v_1.AuxInt
  4380  		sym2 := v_1.Aux
  4381  		ptr2 := v_1.Args[0]
  4382  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4383  			break
  4384  		}
  4385  		v.reset(OpARM64MOVDconst)
  4386  		v.AuxInt = 0
  4387  		return true
  4388  	}
  4389  	return false
  4390  }
  4391  func rewriteValueARM64_OpARM64MOVHreg(v *Value, config *Config) bool {
  4392  	b := v.Block
  4393  	_ = b
  4394  	// match: (MOVHreg x:(MOVBload _ _))
  4395  	// cond:
  4396  	// result: (MOVDreg x)
  4397  	for {
  4398  		x := v.Args[0]
  4399  		if x.Op != OpARM64MOVBload {
  4400  			break
  4401  		}
  4402  		v.reset(OpARM64MOVDreg)
  4403  		v.AddArg(x)
  4404  		return true
  4405  	}
  4406  	// match: (MOVHreg x:(MOVBUload _ _))
  4407  	// cond:
  4408  	// result: (MOVDreg x)
  4409  	for {
  4410  		x := v.Args[0]
  4411  		if x.Op != OpARM64MOVBUload {
  4412  			break
  4413  		}
  4414  		v.reset(OpARM64MOVDreg)
  4415  		v.AddArg(x)
  4416  		return true
  4417  	}
  4418  	// match: (MOVHreg x:(MOVHload _ _))
  4419  	// cond:
  4420  	// result: (MOVDreg x)
  4421  	for {
  4422  		x := v.Args[0]
  4423  		if x.Op != OpARM64MOVHload {
  4424  			break
  4425  		}
  4426  		v.reset(OpARM64MOVDreg)
  4427  		v.AddArg(x)
  4428  		return true
  4429  	}
  4430  	// match: (MOVHreg x:(MOVBreg _))
  4431  	// cond:
  4432  	// result: (MOVDreg x)
  4433  	for {
  4434  		x := v.Args[0]
  4435  		if x.Op != OpARM64MOVBreg {
  4436  			break
  4437  		}
  4438  		v.reset(OpARM64MOVDreg)
  4439  		v.AddArg(x)
  4440  		return true
  4441  	}
  4442  	// match: (MOVHreg x:(MOVBUreg _))
  4443  	// cond:
  4444  	// result: (MOVDreg x)
  4445  	for {
  4446  		x := v.Args[0]
  4447  		if x.Op != OpARM64MOVBUreg {
  4448  			break
  4449  		}
  4450  		v.reset(OpARM64MOVDreg)
  4451  		v.AddArg(x)
  4452  		return true
  4453  	}
  4454  	// match: (MOVHreg x:(MOVHreg _))
  4455  	// cond:
  4456  	// result: (MOVDreg x)
  4457  	for {
  4458  		x := v.Args[0]
  4459  		if x.Op != OpARM64MOVHreg {
  4460  			break
  4461  		}
  4462  		v.reset(OpARM64MOVDreg)
  4463  		v.AddArg(x)
  4464  		return true
  4465  	}
  4466  	// match: (MOVHreg  (MOVDconst [c]))
  4467  	// cond:
  4468  	// result: (MOVDconst [int64(int16(c))])
  4469  	for {
  4470  		v_0 := v.Args[0]
  4471  		if v_0.Op != OpARM64MOVDconst {
  4472  			break
  4473  		}
  4474  		c := v_0.AuxInt
  4475  		v.reset(OpARM64MOVDconst)
  4476  		v.AuxInt = int64(int16(c))
  4477  		return true
  4478  	}
  4479  	return false
  4480  }
  4481  func rewriteValueARM64_OpARM64MOVHstore(v *Value, config *Config) bool {
  4482  	b := v.Block
  4483  	_ = b
  4484  	// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  4485  	// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4486  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4487  	for {
  4488  		off1 := v.AuxInt
  4489  		sym := v.Aux
  4490  		v_0 := v.Args[0]
  4491  		if v_0.Op != OpARM64ADDconst {
  4492  			break
  4493  		}
  4494  		off2 := v_0.AuxInt
  4495  		ptr := v_0.Args[0]
  4496  		val := v.Args[1]
  4497  		mem := v.Args[2]
  4498  		if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4499  			break
  4500  		}
  4501  		v.reset(OpARM64MOVHstore)
  4502  		v.AuxInt = off1 + off2
  4503  		v.Aux = sym
  4504  		v.AddArg(ptr)
  4505  		v.AddArg(val)
  4506  		v.AddArg(mem)
  4507  		return true
  4508  	}
  4509  	// match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  4510  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4511  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4512  	for {
  4513  		off1 := v.AuxInt
  4514  		sym1 := v.Aux
  4515  		v_0 := v.Args[0]
  4516  		if v_0.Op != OpARM64MOVDaddr {
  4517  			break
  4518  		}
  4519  		off2 := v_0.AuxInt
  4520  		sym2 := v_0.Aux
  4521  		ptr := v_0.Args[0]
  4522  		val := v.Args[1]
  4523  		mem := v.Args[2]
  4524  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4525  			break
  4526  		}
  4527  		v.reset(OpARM64MOVHstore)
  4528  		v.AuxInt = off1 + off2
  4529  		v.Aux = mergeSym(sym1, sym2)
  4530  		v.AddArg(ptr)
  4531  		v.AddArg(val)
  4532  		v.AddArg(mem)
  4533  		return true
  4534  	}
  4535  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  4536  	// cond:
  4537  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4538  	for {
  4539  		off := v.AuxInt
  4540  		sym := v.Aux
  4541  		ptr := v.Args[0]
  4542  		v_1 := v.Args[1]
  4543  		if v_1.Op != OpARM64MOVDconst {
  4544  			break
  4545  		}
  4546  		if v_1.AuxInt != 0 {
  4547  			break
  4548  		}
  4549  		mem := v.Args[2]
  4550  		v.reset(OpARM64MOVHstorezero)
  4551  		v.AuxInt = off
  4552  		v.Aux = sym
  4553  		v.AddArg(ptr)
  4554  		v.AddArg(mem)
  4555  		return true
  4556  	}
  4557  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4558  	// cond:
  4559  	// result: (MOVHstore [off] {sym} ptr x mem)
  4560  	for {
  4561  		off := v.AuxInt
  4562  		sym := v.Aux
  4563  		ptr := v.Args[0]
  4564  		v_1 := v.Args[1]
  4565  		if v_1.Op != OpARM64MOVHreg {
  4566  			break
  4567  		}
  4568  		x := v_1.Args[0]
  4569  		mem := v.Args[2]
  4570  		v.reset(OpARM64MOVHstore)
  4571  		v.AuxInt = off
  4572  		v.Aux = sym
  4573  		v.AddArg(ptr)
  4574  		v.AddArg(x)
  4575  		v.AddArg(mem)
  4576  		return true
  4577  	}
  4578  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4579  	// cond:
  4580  	// result: (MOVHstore [off] {sym} ptr x mem)
  4581  	for {
  4582  		off := v.AuxInt
  4583  		sym := v.Aux
  4584  		ptr := v.Args[0]
  4585  		v_1 := v.Args[1]
  4586  		if v_1.Op != OpARM64MOVHUreg {
  4587  			break
  4588  		}
  4589  		x := v_1.Args[0]
  4590  		mem := v.Args[2]
  4591  		v.reset(OpARM64MOVHstore)
  4592  		v.AuxInt = off
  4593  		v.Aux = sym
  4594  		v.AddArg(ptr)
  4595  		v.AddArg(x)
  4596  		v.AddArg(mem)
  4597  		return true
  4598  	}
  4599  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4600  	// cond:
  4601  	// result: (MOVHstore [off] {sym} ptr x mem)
  4602  	for {
  4603  		off := v.AuxInt
  4604  		sym := v.Aux
  4605  		ptr := v.Args[0]
  4606  		v_1 := v.Args[1]
  4607  		if v_1.Op != OpARM64MOVWreg {
  4608  			break
  4609  		}
  4610  		x := v_1.Args[0]
  4611  		mem := v.Args[2]
  4612  		v.reset(OpARM64MOVHstore)
  4613  		v.AuxInt = off
  4614  		v.Aux = sym
  4615  		v.AddArg(ptr)
  4616  		v.AddArg(x)
  4617  		v.AddArg(mem)
  4618  		return true
  4619  	}
  4620  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4621  	// cond:
  4622  	// result: (MOVHstore [off] {sym} ptr x mem)
  4623  	for {
  4624  		off := v.AuxInt
  4625  		sym := v.Aux
  4626  		ptr := v.Args[0]
  4627  		v_1 := v.Args[1]
  4628  		if v_1.Op != OpARM64MOVWUreg {
  4629  			break
  4630  		}
  4631  		x := v_1.Args[0]
  4632  		mem := v.Args[2]
  4633  		v.reset(OpARM64MOVHstore)
  4634  		v.AuxInt = off
  4635  		v.Aux = sym
  4636  		v.AddArg(ptr)
  4637  		v.AddArg(x)
  4638  		v.AddArg(mem)
  4639  		return true
  4640  	}
  4641  	return false
  4642  }
  4643  func rewriteValueARM64_OpARM64MOVHstorezero(v *Value, config *Config) bool {
  4644  	b := v.Block
  4645  	_ = b
  4646  	// match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  4647  	// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4648  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4649  	for {
  4650  		off1 := v.AuxInt
  4651  		sym := v.Aux
  4652  		v_0 := v.Args[0]
  4653  		if v_0.Op != OpARM64ADDconst {
  4654  			break
  4655  		}
  4656  		off2 := v_0.AuxInt
  4657  		ptr := v_0.Args[0]
  4658  		mem := v.Args[1]
  4659  		if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4660  			break
  4661  		}
  4662  		v.reset(OpARM64MOVHstorezero)
  4663  		v.AuxInt = off1 + off2
  4664  		v.Aux = sym
  4665  		v.AddArg(ptr)
  4666  		v.AddArg(mem)
  4667  		return true
  4668  	}
  4669  	// match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4670  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4671  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4672  	for {
  4673  		off1 := v.AuxInt
  4674  		sym1 := v.Aux
  4675  		v_0 := v.Args[0]
  4676  		if v_0.Op != OpARM64MOVDaddr {
  4677  			break
  4678  		}
  4679  		off2 := v_0.AuxInt
  4680  		sym2 := v_0.Aux
  4681  		ptr := v_0.Args[0]
  4682  		mem := v.Args[1]
  4683  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4684  			break
  4685  		}
  4686  		v.reset(OpARM64MOVHstorezero)
  4687  		v.AuxInt = off1 + off2
  4688  		v.Aux = mergeSym(sym1, sym2)
  4689  		v.AddArg(ptr)
  4690  		v.AddArg(mem)
  4691  		return true
  4692  	}
  4693  	return false
  4694  }
  4695  func rewriteValueARM64_OpARM64MOVWUload(v *Value, config *Config) bool {
  4696  	b := v.Block
  4697  	_ = b
  4698  	// match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem)
  4699  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4700  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4701  	for {
  4702  		off1 := v.AuxInt
  4703  		sym := v.Aux
  4704  		v_0 := v.Args[0]
  4705  		if v_0.Op != OpARM64ADDconst {
  4706  			break
  4707  		}
  4708  		off2 := v_0.AuxInt
  4709  		ptr := v_0.Args[0]
  4710  		mem := v.Args[1]
  4711  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4712  			break
  4713  		}
  4714  		v.reset(OpARM64MOVWUload)
  4715  		v.AuxInt = off1 + off2
  4716  		v.Aux = sym
  4717  		v.AddArg(ptr)
  4718  		v.AddArg(mem)
  4719  		return true
  4720  	}
  4721  	// match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4722  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4723  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4724  	for {
  4725  		off1 := v.AuxInt
  4726  		sym1 := v.Aux
  4727  		v_0 := v.Args[0]
  4728  		if v_0.Op != OpARM64MOVDaddr {
  4729  			break
  4730  		}
  4731  		off2 := v_0.AuxInt
  4732  		sym2 := v_0.Aux
  4733  		ptr := v_0.Args[0]
  4734  		mem := v.Args[1]
  4735  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4736  			break
  4737  		}
  4738  		v.reset(OpARM64MOVWUload)
  4739  		v.AuxInt = off1 + off2
  4740  		v.Aux = mergeSym(sym1, sym2)
  4741  		v.AddArg(ptr)
  4742  		v.AddArg(mem)
  4743  		return true
  4744  	}
  4745  	// match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
  4746  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4747  	// result: (MOVDconst [0])
  4748  	for {
  4749  		off := v.AuxInt
  4750  		sym := v.Aux
  4751  		ptr := v.Args[0]
  4752  		v_1 := v.Args[1]
  4753  		if v_1.Op != OpARM64MOVWstorezero {
  4754  			break
  4755  		}
  4756  		off2 := v_1.AuxInt
  4757  		sym2 := v_1.Aux
  4758  		ptr2 := v_1.Args[0]
  4759  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4760  			break
  4761  		}
  4762  		v.reset(OpARM64MOVDconst)
  4763  		v.AuxInt = 0
  4764  		return true
  4765  	}
  4766  	return false
  4767  }
  4768  func rewriteValueARM64_OpARM64MOVWUreg(v *Value, config *Config) bool {
  4769  	b := v.Block
  4770  	_ = b
  4771  	// match: (MOVWUreg x:(MOVBUload _ _))
  4772  	// cond:
  4773  	// result: (MOVDreg x)
  4774  	for {
  4775  		x := v.Args[0]
  4776  		if x.Op != OpARM64MOVBUload {
  4777  			break
  4778  		}
  4779  		v.reset(OpARM64MOVDreg)
  4780  		v.AddArg(x)
  4781  		return true
  4782  	}
  4783  	// match: (MOVWUreg x:(MOVHUload _ _))
  4784  	// cond:
  4785  	// result: (MOVDreg x)
  4786  	for {
  4787  		x := v.Args[0]
  4788  		if x.Op != OpARM64MOVHUload {
  4789  			break
  4790  		}
  4791  		v.reset(OpARM64MOVDreg)
  4792  		v.AddArg(x)
  4793  		return true
  4794  	}
  4795  	// match: (MOVWUreg x:(MOVWUload _ _))
  4796  	// cond:
  4797  	// result: (MOVDreg x)
  4798  	for {
  4799  		x := v.Args[0]
  4800  		if x.Op != OpARM64MOVWUload {
  4801  			break
  4802  		}
  4803  		v.reset(OpARM64MOVDreg)
  4804  		v.AddArg(x)
  4805  		return true
  4806  	}
  4807  	// match: (MOVWUreg x:(MOVBUreg _))
  4808  	// cond:
  4809  	// result: (MOVDreg x)
  4810  	for {
  4811  		x := v.Args[0]
  4812  		if x.Op != OpARM64MOVBUreg {
  4813  			break
  4814  		}
  4815  		v.reset(OpARM64MOVDreg)
  4816  		v.AddArg(x)
  4817  		return true
  4818  	}
  4819  	// match: (MOVWUreg x:(MOVHUreg _))
  4820  	// cond:
  4821  	// result: (MOVDreg x)
  4822  	for {
  4823  		x := v.Args[0]
  4824  		if x.Op != OpARM64MOVHUreg {
  4825  			break
  4826  		}
  4827  		v.reset(OpARM64MOVDreg)
  4828  		v.AddArg(x)
  4829  		return true
  4830  	}
  4831  	// match: (MOVWUreg x:(MOVWUreg _))
  4832  	// cond:
  4833  	// result: (MOVDreg x)
  4834  	for {
  4835  		x := v.Args[0]
  4836  		if x.Op != OpARM64MOVWUreg {
  4837  			break
  4838  		}
  4839  		v.reset(OpARM64MOVDreg)
  4840  		v.AddArg(x)
  4841  		return true
  4842  	}
  4843  	// match: (MOVWUreg (MOVDconst [c]))
  4844  	// cond:
  4845  	// result: (MOVDconst [int64(uint32(c))])
  4846  	for {
  4847  		v_0 := v.Args[0]
  4848  		if v_0.Op != OpARM64MOVDconst {
  4849  			break
  4850  		}
  4851  		c := v_0.AuxInt
  4852  		v.reset(OpARM64MOVDconst)
  4853  		v.AuxInt = int64(uint32(c))
  4854  		return true
  4855  	}
  4856  	return false
  4857  }
  4858  func rewriteValueARM64_OpARM64MOVWload(v *Value, config *Config) bool {
  4859  	b := v.Block
  4860  	_ = b
  4861  	// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
  4862  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  4863  	// result: (MOVWload [off1+off2] {sym} ptr mem)
  4864  	for {
  4865  		off1 := v.AuxInt
  4866  		sym := v.Aux
  4867  		v_0 := v.Args[0]
  4868  		if v_0.Op != OpARM64ADDconst {
  4869  			break
  4870  		}
  4871  		off2 := v_0.AuxInt
  4872  		ptr := v_0.Args[0]
  4873  		mem := v.Args[1]
  4874  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  4875  			break
  4876  		}
  4877  		v.reset(OpARM64MOVWload)
  4878  		v.AuxInt = off1 + off2
  4879  		v.Aux = sym
  4880  		v.AddArg(ptr)
  4881  		v.AddArg(mem)
  4882  		return true
  4883  	}
  4884  	// match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4885  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  4886  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4887  	for {
  4888  		off1 := v.AuxInt
  4889  		sym1 := v.Aux
  4890  		v_0 := v.Args[0]
  4891  		if v_0.Op != OpARM64MOVDaddr {
  4892  			break
  4893  		}
  4894  		off2 := v_0.AuxInt
  4895  		sym2 := v_0.Aux
  4896  		ptr := v_0.Args[0]
  4897  		mem := v.Args[1]
  4898  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  4899  			break
  4900  		}
  4901  		v.reset(OpARM64MOVWload)
  4902  		v.AuxInt = off1 + off2
  4903  		v.Aux = mergeSym(sym1, sym2)
  4904  		v.AddArg(ptr)
  4905  		v.AddArg(mem)
  4906  		return true
  4907  	}
  4908  	// match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
  4909  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4910  	// result: (MOVDconst [0])
  4911  	for {
  4912  		off := v.AuxInt
  4913  		sym := v.Aux
  4914  		ptr := v.Args[0]
  4915  		v_1 := v.Args[1]
  4916  		if v_1.Op != OpARM64MOVWstorezero {
  4917  			break
  4918  		}
  4919  		off2 := v_1.AuxInt
  4920  		sym2 := v_1.Aux
  4921  		ptr2 := v_1.Args[0]
  4922  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4923  			break
  4924  		}
  4925  		v.reset(OpARM64MOVDconst)
  4926  		v.AuxInt = 0
  4927  		return true
  4928  	}
  4929  	return false
  4930  }
  4931  func rewriteValueARM64_OpARM64MOVWreg(v *Value, config *Config) bool {
  4932  	b := v.Block
  4933  	_ = b
  4934  	// match: (MOVWreg x:(MOVBload _ _))
  4935  	// cond:
  4936  	// result: (MOVDreg x)
  4937  	for {
  4938  		x := v.Args[0]
  4939  		if x.Op != OpARM64MOVBload {
  4940  			break
  4941  		}
  4942  		v.reset(OpARM64MOVDreg)
  4943  		v.AddArg(x)
  4944  		return true
  4945  	}
  4946  	// match: (MOVWreg x:(MOVBUload _ _))
  4947  	// cond:
  4948  	// result: (MOVDreg x)
  4949  	for {
  4950  		x := v.Args[0]
  4951  		if x.Op != OpARM64MOVBUload {
  4952  			break
  4953  		}
  4954  		v.reset(OpARM64MOVDreg)
  4955  		v.AddArg(x)
  4956  		return true
  4957  	}
  4958  	// match: (MOVWreg x:(MOVHload _ _))
  4959  	// cond:
  4960  	// result: (MOVDreg x)
  4961  	for {
  4962  		x := v.Args[0]
  4963  		if x.Op != OpARM64MOVHload {
  4964  			break
  4965  		}
  4966  		v.reset(OpARM64MOVDreg)
  4967  		v.AddArg(x)
  4968  		return true
  4969  	}
  4970  	// match: (MOVWreg x:(MOVHUload _ _))
  4971  	// cond:
  4972  	// result: (MOVDreg x)
  4973  	for {
  4974  		x := v.Args[0]
  4975  		if x.Op != OpARM64MOVHUload {
  4976  			break
  4977  		}
  4978  		v.reset(OpARM64MOVDreg)
  4979  		v.AddArg(x)
  4980  		return true
  4981  	}
  4982  	// match: (MOVWreg x:(MOVWload _ _))
  4983  	// cond:
  4984  	// result: (MOVDreg x)
  4985  	for {
  4986  		x := v.Args[0]
  4987  		if x.Op != OpARM64MOVWload {
  4988  			break
  4989  		}
  4990  		v.reset(OpARM64MOVDreg)
  4991  		v.AddArg(x)
  4992  		return true
  4993  	}
  4994  	// match: (MOVWreg x:(MOVBreg _))
  4995  	// cond:
  4996  	// result: (MOVDreg x)
  4997  	for {
  4998  		x := v.Args[0]
  4999  		if x.Op != OpARM64MOVBreg {
  5000  			break
  5001  		}
  5002  		v.reset(OpARM64MOVDreg)
  5003  		v.AddArg(x)
  5004  		return true
  5005  	}
  5006  	// match: (MOVWreg x:(MOVBUreg _))
  5007  	// cond:
  5008  	// result: (MOVDreg x)
  5009  	for {
  5010  		x := v.Args[0]
  5011  		if x.Op != OpARM64MOVBUreg {
  5012  			break
  5013  		}
  5014  		v.reset(OpARM64MOVDreg)
  5015  		v.AddArg(x)
  5016  		return true
  5017  	}
  5018  	// match: (MOVWreg x:(MOVHreg _))
  5019  	// cond:
  5020  	// result: (MOVDreg x)
  5021  	for {
  5022  		x := v.Args[0]
  5023  		if x.Op != OpARM64MOVHreg {
  5024  			break
  5025  		}
  5026  		v.reset(OpARM64MOVDreg)
  5027  		v.AddArg(x)
  5028  		return true
  5029  	}
  5030  	// match: (MOVWreg x:(MOVHreg _))
  5031  	// cond:
  5032  	// result: (MOVDreg x)
  5033  	for {
  5034  		x := v.Args[0]
  5035  		if x.Op != OpARM64MOVHreg {
  5036  			break
  5037  		}
  5038  		v.reset(OpARM64MOVDreg)
  5039  		v.AddArg(x)
  5040  		return true
  5041  	}
  5042  	// match: (MOVWreg x:(MOVWreg _))
  5043  	// cond:
  5044  	// result: (MOVDreg x)
  5045  	for {
  5046  		x := v.Args[0]
  5047  		if x.Op != OpARM64MOVWreg {
  5048  			break
  5049  		}
  5050  		v.reset(OpARM64MOVDreg)
  5051  		v.AddArg(x)
  5052  		return true
  5053  	}
  5054  	// match: (MOVWreg  (MOVDconst [c]))
  5055  	// cond:
  5056  	// result: (MOVDconst [int64(int32(c))])
  5057  	for {
  5058  		v_0 := v.Args[0]
  5059  		if v_0.Op != OpARM64MOVDconst {
  5060  			break
  5061  		}
  5062  		c := v_0.AuxInt
  5063  		v.reset(OpARM64MOVDconst)
  5064  		v.AuxInt = int64(int32(c))
  5065  		return true
  5066  	}
  5067  	return false
  5068  }
  5069  func rewriteValueARM64_OpARM64MOVWstore(v *Value, config *Config) bool {
  5070  	b := v.Block
  5071  	_ = b
  5072  	// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  5073  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  5074  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5075  	for {
  5076  		off1 := v.AuxInt
  5077  		sym := v.Aux
  5078  		v_0 := v.Args[0]
  5079  		if v_0.Op != OpARM64ADDconst {
  5080  			break
  5081  		}
  5082  		off2 := v_0.AuxInt
  5083  		ptr := v_0.Args[0]
  5084  		val := v.Args[1]
  5085  		mem := v.Args[2]
  5086  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  5087  			break
  5088  		}
  5089  		v.reset(OpARM64MOVWstore)
  5090  		v.AuxInt = off1 + off2
  5091  		v.Aux = sym
  5092  		v.AddArg(ptr)
  5093  		v.AddArg(val)
  5094  		v.AddArg(mem)
  5095  		return true
  5096  	}
  5097  	// match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  5098  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  5099  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5100  	for {
  5101  		off1 := v.AuxInt
  5102  		sym1 := v.Aux
  5103  		v_0 := v.Args[0]
  5104  		if v_0.Op != OpARM64MOVDaddr {
  5105  			break
  5106  		}
  5107  		off2 := v_0.AuxInt
  5108  		sym2 := v_0.Aux
  5109  		ptr := v_0.Args[0]
  5110  		val := v.Args[1]
  5111  		mem := v.Args[2]
  5112  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  5113  			break
  5114  		}
  5115  		v.reset(OpARM64MOVWstore)
  5116  		v.AuxInt = off1 + off2
  5117  		v.Aux = mergeSym(sym1, sym2)
  5118  		v.AddArg(ptr)
  5119  		v.AddArg(val)
  5120  		v.AddArg(mem)
  5121  		return true
  5122  	}
  5123  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  5124  	// cond:
  5125  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5126  	for {
  5127  		off := v.AuxInt
  5128  		sym := v.Aux
  5129  		ptr := v.Args[0]
  5130  		v_1 := v.Args[1]
  5131  		if v_1.Op != OpARM64MOVDconst {
  5132  			break
  5133  		}
  5134  		if v_1.AuxInt != 0 {
  5135  			break
  5136  		}
  5137  		mem := v.Args[2]
  5138  		v.reset(OpARM64MOVWstorezero)
  5139  		v.AuxInt = off
  5140  		v.Aux = sym
  5141  		v.AddArg(ptr)
  5142  		v.AddArg(mem)
  5143  		return true
  5144  	}
  5145  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5146  	// cond:
  5147  	// result: (MOVWstore [off] {sym} ptr x mem)
  5148  	for {
  5149  		off := v.AuxInt
  5150  		sym := v.Aux
  5151  		ptr := v.Args[0]
  5152  		v_1 := v.Args[1]
  5153  		if v_1.Op != OpARM64MOVWreg {
  5154  			break
  5155  		}
  5156  		x := v_1.Args[0]
  5157  		mem := v.Args[2]
  5158  		v.reset(OpARM64MOVWstore)
  5159  		v.AuxInt = off
  5160  		v.Aux = sym
  5161  		v.AddArg(ptr)
  5162  		v.AddArg(x)
  5163  		v.AddArg(mem)
  5164  		return true
  5165  	}
  5166  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5167  	// cond:
  5168  	// result: (MOVWstore [off] {sym} ptr x mem)
  5169  	for {
  5170  		off := v.AuxInt
  5171  		sym := v.Aux
  5172  		ptr := v.Args[0]
  5173  		v_1 := v.Args[1]
  5174  		if v_1.Op != OpARM64MOVWUreg {
  5175  			break
  5176  		}
  5177  		x := v_1.Args[0]
  5178  		mem := v.Args[2]
  5179  		v.reset(OpARM64MOVWstore)
  5180  		v.AuxInt = off
  5181  		v.Aux = sym
  5182  		v.AddArg(ptr)
  5183  		v.AddArg(x)
  5184  		v.AddArg(mem)
  5185  		return true
  5186  	}
  5187  	return false
  5188  }
  5189  func rewriteValueARM64_OpARM64MOVWstorezero(v *Value, config *Config) bool {
  5190  	b := v.Block
  5191  	_ = b
  5192  	// match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  5193  	// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
  5194  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5195  	for {
  5196  		off1 := v.AuxInt
  5197  		sym := v.Aux
  5198  		v_0 := v.Args[0]
  5199  		if v_0.Op != OpARM64ADDconst {
  5200  			break
  5201  		}
  5202  		off2 := v_0.AuxInt
  5203  		ptr := v_0.Args[0]
  5204  		mem := v.Args[1]
  5205  		if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
  5206  			break
  5207  		}
  5208  		v.reset(OpARM64MOVWstorezero)
  5209  		v.AuxInt = off1 + off2
  5210  		v.Aux = sym
  5211  		v.AddArg(ptr)
  5212  		v.AddArg(mem)
  5213  		return true
  5214  	}
  5215  	// match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5216  	// cond: canMergeSym(sym1,sym2) 	&& ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
  5217  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5218  	for {
  5219  		off1 := v.AuxInt
  5220  		sym1 := v.Aux
  5221  		v_0 := v.Args[0]
  5222  		if v_0.Op != OpARM64MOVDaddr {
  5223  			break
  5224  		}
  5225  		off2 := v_0.AuxInt
  5226  		sym2 := v_0.Aux
  5227  		ptr := v_0.Args[0]
  5228  		mem := v.Args[1]
  5229  		if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
  5230  			break
  5231  		}
  5232  		v.reset(OpARM64MOVWstorezero)
  5233  		v.AuxInt = off1 + off2
  5234  		v.Aux = mergeSym(sym1, sym2)
  5235  		v.AddArg(ptr)
  5236  		v.AddArg(mem)
  5237  		return true
  5238  	}
  5239  	return false
  5240  }
  5241  func rewriteValueARM64_OpARM64MUL(v *Value, config *Config) bool {
  5242  	b := v.Block
  5243  	_ = b
  5244  	// match: (MUL x (MOVDconst [-1]))
  5245  	// cond:
  5246  	// result: (NEG x)
  5247  	for {
  5248  		x := v.Args[0]
  5249  		v_1 := v.Args[1]
  5250  		if v_1.Op != OpARM64MOVDconst {
  5251  			break
  5252  		}
  5253  		if v_1.AuxInt != -1 {
  5254  			break
  5255  		}
  5256  		v.reset(OpARM64NEG)
  5257  		v.AddArg(x)
  5258  		return true
  5259  	}
  5260  	// match: (MUL _ (MOVDconst [0]))
  5261  	// cond:
  5262  	// result: (MOVDconst [0])
  5263  	for {
  5264  		v_1 := v.Args[1]
  5265  		if v_1.Op != OpARM64MOVDconst {
  5266  			break
  5267  		}
  5268  		if v_1.AuxInt != 0 {
  5269  			break
  5270  		}
  5271  		v.reset(OpARM64MOVDconst)
  5272  		v.AuxInt = 0
  5273  		return true
  5274  	}
  5275  	// match: (MUL x (MOVDconst [1]))
  5276  	// cond:
  5277  	// result: x
  5278  	for {
  5279  		x := v.Args[0]
  5280  		v_1 := v.Args[1]
  5281  		if v_1.Op != OpARM64MOVDconst {
  5282  			break
  5283  		}
  5284  		if v_1.AuxInt != 1 {
  5285  			break
  5286  		}
  5287  		v.reset(OpCopy)
  5288  		v.Type = x.Type
  5289  		v.AddArg(x)
  5290  		return true
  5291  	}
  5292  	// match: (MUL x (MOVDconst [c]))
  5293  	// cond: isPowerOfTwo(c)
  5294  	// result: (SLLconst [log2(c)] x)
  5295  	for {
  5296  		x := v.Args[0]
  5297  		v_1 := v.Args[1]
  5298  		if v_1.Op != OpARM64MOVDconst {
  5299  			break
  5300  		}
  5301  		c := v_1.AuxInt
  5302  		if !(isPowerOfTwo(c)) {
  5303  			break
  5304  		}
  5305  		v.reset(OpARM64SLLconst)
  5306  		v.AuxInt = log2(c)
  5307  		v.AddArg(x)
  5308  		return true
  5309  	}
  5310  	// match: (MUL x (MOVDconst [c]))
  5311  	// cond: isPowerOfTwo(c-1) && c >= 3
  5312  	// result: (ADDshiftLL x x [log2(c-1)])
  5313  	for {
  5314  		x := v.Args[0]
  5315  		v_1 := v.Args[1]
  5316  		if v_1.Op != OpARM64MOVDconst {
  5317  			break
  5318  		}
  5319  		c := v_1.AuxInt
  5320  		if !(isPowerOfTwo(c-1) && c >= 3) {
  5321  			break
  5322  		}
  5323  		v.reset(OpARM64ADDshiftLL)
  5324  		v.AuxInt = log2(c - 1)
  5325  		v.AddArg(x)
  5326  		v.AddArg(x)
  5327  		return true
  5328  	}
  5329  	// match: (MUL x (MOVDconst [c]))
  5330  	// cond: isPowerOfTwo(c+1) && c >= 7
  5331  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5332  	for {
  5333  		x := v.Args[0]
  5334  		v_1 := v.Args[1]
  5335  		if v_1.Op != OpARM64MOVDconst {
  5336  			break
  5337  		}
  5338  		c := v_1.AuxInt
  5339  		if !(isPowerOfTwo(c+1) && c >= 7) {
  5340  			break
  5341  		}
  5342  		v.reset(OpARM64ADDshiftLL)
  5343  		v.AuxInt = log2(c + 1)
  5344  		v0 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5345  		v0.AddArg(x)
  5346  		v.AddArg(v0)
  5347  		v.AddArg(x)
  5348  		return true
  5349  	}
  5350  	// match: (MUL x (MOVDconst [c]))
  5351  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  5352  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5353  	for {
  5354  		x := v.Args[0]
  5355  		v_1 := v.Args[1]
  5356  		if v_1.Op != OpARM64MOVDconst {
  5357  			break
  5358  		}
  5359  		c := v_1.AuxInt
  5360  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  5361  			break
  5362  		}
  5363  		v.reset(OpARM64SLLconst)
  5364  		v.AuxInt = log2(c / 3)
  5365  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5366  		v0.AuxInt = 1
  5367  		v0.AddArg(x)
  5368  		v0.AddArg(x)
  5369  		v.AddArg(v0)
  5370  		return true
  5371  	}
  5372  	// match: (MUL x (MOVDconst [c]))
  5373  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  5374  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  5375  	for {
  5376  		x := v.Args[0]
  5377  		v_1 := v.Args[1]
  5378  		if v_1.Op != OpARM64MOVDconst {
  5379  			break
  5380  		}
  5381  		c := v_1.AuxInt
  5382  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  5383  			break
  5384  		}
  5385  		v.reset(OpARM64SLLconst)
  5386  		v.AuxInt = log2(c / 5)
  5387  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5388  		v0.AuxInt = 2
  5389  		v0.AddArg(x)
  5390  		v0.AddArg(x)
  5391  		v.AddArg(v0)
  5392  		return true
  5393  	}
  5394  	// match: (MUL x (MOVDconst [c]))
  5395  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  5396  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  5397  	for {
  5398  		x := v.Args[0]
  5399  		v_1 := v.Args[1]
  5400  		if v_1.Op != OpARM64MOVDconst {
  5401  			break
  5402  		}
  5403  		c := v_1.AuxInt
  5404  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  5405  			break
  5406  		}
  5407  		v.reset(OpARM64SLLconst)
  5408  		v.AuxInt = log2(c / 7)
  5409  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5410  		v0.AuxInt = 3
  5411  		v1 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5412  		v1.AddArg(x)
  5413  		v0.AddArg(v1)
  5414  		v0.AddArg(x)
  5415  		v.AddArg(v0)
  5416  		return true
  5417  	}
  5418  	// match: (MUL x (MOVDconst [c]))
  5419  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  5420  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  5421  	for {
  5422  		x := v.Args[0]
  5423  		v_1 := v.Args[1]
  5424  		if v_1.Op != OpARM64MOVDconst {
  5425  			break
  5426  		}
  5427  		c := v_1.AuxInt
  5428  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  5429  			break
  5430  		}
  5431  		v.reset(OpARM64SLLconst)
  5432  		v.AuxInt = log2(c / 9)
  5433  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5434  		v0.AuxInt = 3
  5435  		v0.AddArg(x)
  5436  		v0.AddArg(x)
  5437  		v.AddArg(v0)
  5438  		return true
  5439  	}
  5440  	// match: (MUL (MOVDconst [-1]) x)
  5441  	// cond:
  5442  	// result: (NEG x)
  5443  	for {
  5444  		v_0 := v.Args[0]
  5445  		if v_0.Op != OpARM64MOVDconst {
  5446  			break
  5447  		}
  5448  		if v_0.AuxInt != -1 {
  5449  			break
  5450  		}
  5451  		x := v.Args[1]
  5452  		v.reset(OpARM64NEG)
  5453  		v.AddArg(x)
  5454  		return true
  5455  	}
  5456  	// match: (MUL (MOVDconst [0]) _)
  5457  	// cond:
  5458  	// result: (MOVDconst [0])
  5459  	for {
  5460  		v_0 := v.Args[0]
  5461  		if v_0.Op != OpARM64MOVDconst {
  5462  			break
  5463  		}
  5464  		if v_0.AuxInt != 0 {
  5465  			break
  5466  		}
  5467  		v.reset(OpARM64MOVDconst)
  5468  		v.AuxInt = 0
  5469  		return true
  5470  	}
  5471  	// match: (MUL (MOVDconst [1]) x)
  5472  	// cond:
  5473  	// result: x
  5474  	for {
  5475  		v_0 := v.Args[0]
  5476  		if v_0.Op != OpARM64MOVDconst {
  5477  			break
  5478  		}
  5479  		if v_0.AuxInt != 1 {
  5480  			break
  5481  		}
  5482  		x := v.Args[1]
  5483  		v.reset(OpCopy)
  5484  		v.Type = x.Type
  5485  		v.AddArg(x)
  5486  		return true
  5487  	}
  5488  	// match: (MUL (MOVDconst [c]) x)
  5489  	// cond: isPowerOfTwo(c)
  5490  	// result: (SLLconst [log2(c)] x)
  5491  	for {
  5492  		v_0 := v.Args[0]
  5493  		if v_0.Op != OpARM64MOVDconst {
  5494  			break
  5495  		}
  5496  		c := v_0.AuxInt
  5497  		x := v.Args[1]
  5498  		if !(isPowerOfTwo(c)) {
  5499  			break
  5500  		}
  5501  		v.reset(OpARM64SLLconst)
  5502  		v.AuxInt = log2(c)
  5503  		v.AddArg(x)
  5504  		return true
  5505  	}
  5506  	// match: (MUL (MOVDconst [c]) x)
  5507  	// cond: isPowerOfTwo(c)
  5508  	// result: (SLLconst [log2(c)] x)
  5509  	for {
  5510  		v_0 := v.Args[0]
  5511  		if v_0.Op != OpARM64MOVDconst {
  5512  			break
  5513  		}
  5514  		c := v_0.AuxInt
  5515  		x := v.Args[1]
  5516  		if !(isPowerOfTwo(c)) {
  5517  			break
  5518  		}
  5519  		v.reset(OpARM64SLLconst)
  5520  		v.AuxInt = log2(c)
  5521  		v.AddArg(x)
  5522  		return true
  5523  	}
  5524  	// match: (MUL (MOVDconst [c]) x)
  5525  	// cond: isPowerOfTwo(c-1) && c >= 3
  5526  	// result: (ADDshiftLL x x [log2(c-1)])
  5527  	for {
  5528  		v_0 := v.Args[0]
  5529  		if v_0.Op != OpARM64MOVDconst {
  5530  			break
  5531  		}
  5532  		c := v_0.AuxInt
  5533  		x := v.Args[1]
  5534  		if !(isPowerOfTwo(c-1) && c >= 3) {
  5535  			break
  5536  		}
  5537  		v.reset(OpARM64ADDshiftLL)
  5538  		v.AuxInt = log2(c - 1)
  5539  		v.AddArg(x)
  5540  		v.AddArg(x)
  5541  		return true
  5542  	}
  5543  	// match: (MUL (MOVDconst [c]) x)
  5544  	// cond: isPowerOfTwo(c+1) && c >= 7
  5545  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5546  	for {
  5547  		v_0 := v.Args[0]
  5548  		if v_0.Op != OpARM64MOVDconst {
  5549  			break
  5550  		}
  5551  		c := v_0.AuxInt
  5552  		x := v.Args[1]
  5553  		if !(isPowerOfTwo(c+1) && c >= 7) {
  5554  			break
  5555  		}
  5556  		v.reset(OpARM64ADDshiftLL)
  5557  		v.AuxInt = log2(c + 1)
  5558  		v0 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5559  		v0.AddArg(x)
  5560  		v.AddArg(v0)
  5561  		v.AddArg(x)
  5562  		return true
  5563  	}
  5564  	// match: (MUL (MOVDconst [c]) x)
  5565  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  5566  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5567  	for {
  5568  		v_0 := v.Args[0]
  5569  		if v_0.Op != OpARM64MOVDconst {
  5570  			break
  5571  		}
  5572  		c := v_0.AuxInt
  5573  		x := v.Args[1]
  5574  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  5575  			break
  5576  		}
  5577  		v.reset(OpARM64SLLconst)
  5578  		v.AuxInt = log2(c / 3)
  5579  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5580  		v0.AuxInt = 1
  5581  		v0.AddArg(x)
  5582  		v0.AddArg(x)
  5583  		v.AddArg(v0)
  5584  		return true
  5585  	}
  5586  	// match: (MUL (MOVDconst [c]) x)
  5587  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  5588  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  5589  	for {
  5590  		v_0 := v.Args[0]
  5591  		if v_0.Op != OpARM64MOVDconst {
  5592  			break
  5593  		}
  5594  		c := v_0.AuxInt
  5595  		x := v.Args[1]
  5596  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  5597  			break
  5598  		}
  5599  		v.reset(OpARM64SLLconst)
  5600  		v.AuxInt = log2(c / 5)
  5601  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5602  		v0.AuxInt = 2
  5603  		v0.AddArg(x)
  5604  		v0.AddArg(x)
  5605  		v.AddArg(v0)
  5606  		return true
  5607  	}
  5608  	// match: (MUL (MOVDconst [c]) x)
  5609  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  5610  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  5611  	for {
  5612  		v_0 := v.Args[0]
  5613  		if v_0.Op != OpARM64MOVDconst {
  5614  			break
  5615  		}
  5616  		c := v_0.AuxInt
  5617  		x := v.Args[1]
  5618  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  5619  			break
  5620  		}
  5621  		v.reset(OpARM64SLLconst)
  5622  		v.AuxInt = log2(c / 7)
  5623  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5624  		v0.AuxInt = 3
  5625  		v1 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5626  		v1.AddArg(x)
  5627  		v0.AddArg(v1)
  5628  		v0.AddArg(x)
  5629  		v.AddArg(v0)
  5630  		return true
  5631  	}
  5632  	// match: (MUL (MOVDconst [c]) x)
  5633  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  5634  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  5635  	for {
  5636  		v_0 := v.Args[0]
  5637  		if v_0.Op != OpARM64MOVDconst {
  5638  			break
  5639  		}
  5640  		c := v_0.AuxInt
  5641  		x := v.Args[1]
  5642  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  5643  			break
  5644  		}
  5645  		v.reset(OpARM64SLLconst)
  5646  		v.AuxInt = log2(c / 9)
  5647  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5648  		v0.AuxInt = 3
  5649  		v0.AddArg(x)
  5650  		v0.AddArg(x)
  5651  		v.AddArg(v0)
  5652  		return true
  5653  	}
  5654  	// match: (MUL   (MOVDconst [c]) (MOVDconst [d]))
  5655  	// cond:
  5656  	// result: (MOVDconst [c*d])
  5657  	for {
  5658  		v_0 := v.Args[0]
  5659  		if v_0.Op != OpARM64MOVDconst {
  5660  			break
  5661  		}
  5662  		c := v_0.AuxInt
  5663  		v_1 := v.Args[1]
  5664  		if v_1.Op != OpARM64MOVDconst {
  5665  			break
  5666  		}
  5667  		d := v_1.AuxInt
  5668  		v.reset(OpARM64MOVDconst)
  5669  		v.AuxInt = c * d
  5670  		return true
  5671  	}
  5672  	return false
  5673  }
  5674  func rewriteValueARM64_OpARM64MULW(v *Value, config *Config) bool {
  5675  	b := v.Block
  5676  	_ = b
  5677  	// match: (MULW x (MOVDconst [c]))
  5678  	// cond: int32(c)==-1
  5679  	// result: (NEG x)
  5680  	for {
  5681  		x := v.Args[0]
  5682  		v_1 := v.Args[1]
  5683  		if v_1.Op != OpARM64MOVDconst {
  5684  			break
  5685  		}
  5686  		c := v_1.AuxInt
  5687  		if !(int32(c) == -1) {
  5688  			break
  5689  		}
  5690  		v.reset(OpARM64NEG)
  5691  		v.AddArg(x)
  5692  		return true
  5693  	}
  5694  	// match: (MULW _ (MOVDconst [c]))
  5695  	// cond: int32(c)==0
  5696  	// result: (MOVDconst [0])
  5697  	for {
  5698  		v_1 := v.Args[1]
  5699  		if v_1.Op != OpARM64MOVDconst {
  5700  			break
  5701  		}
  5702  		c := v_1.AuxInt
  5703  		if !(int32(c) == 0) {
  5704  			break
  5705  		}
  5706  		v.reset(OpARM64MOVDconst)
  5707  		v.AuxInt = 0
  5708  		return true
  5709  	}
  5710  	// match: (MULW x (MOVDconst [c]))
  5711  	// cond: int32(c)==1
  5712  	// result: x
  5713  	for {
  5714  		x := v.Args[0]
  5715  		v_1 := v.Args[1]
  5716  		if v_1.Op != OpARM64MOVDconst {
  5717  			break
  5718  		}
  5719  		c := v_1.AuxInt
  5720  		if !(int32(c) == 1) {
  5721  			break
  5722  		}
  5723  		v.reset(OpCopy)
  5724  		v.Type = x.Type
  5725  		v.AddArg(x)
  5726  		return true
  5727  	}
  5728  	// match: (MULW x (MOVDconst [c]))
  5729  	// cond: isPowerOfTwo(c)
  5730  	// result: (SLLconst [log2(c)] x)
  5731  	for {
  5732  		x := v.Args[0]
  5733  		v_1 := v.Args[1]
  5734  		if v_1.Op != OpARM64MOVDconst {
  5735  			break
  5736  		}
  5737  		c := v_1.AuxInt
  5738  		if !(isPowerOfTwo(c)) {
  5739  			break
  5740  		}
  5741  		v.reset(OpARM64SLLconst)
  5742  		v.AuxInt = log2(c)
  5743  		v.AddArg(x)
  5744  		return true
  5745  	}
  5746  	// match: (MULW x (MOVDconst [c]))
  5747  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  5748  	// result: (ADDshiftLL x x [log2(c-1)])
  5749  	for {
  5750  		x := v.Args[0]
  5751  		v_1 := v.Args[1]
  5752  		if v_1.Op != OpARM64MOVDconst {
  5753  			break
  5754  		}
  5755  		c := v_1.AuxInt
  5756  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  5757  			break
  5758  		}
  5759  		v.reset(OpARM64ADDshiftLL)
  5760  		v.AuxInt = log2(c - 1)
  5761  		v.AddArg(x)
  5762  		v.AddArg(x)
  5763  		return true
  5764  	}
  5765  	// match: (MULW x (MOVDconst [c]))
  5766  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  5767  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5768  	for {
  5769  		x := v.Args[0]
  5770  		v_1 := v.Args[1]
  5771  		if v_1.Op != OpARM64MOVDconst {
  5772  			break
  5773  		}
  5774  		c := v_1.AuxInt
  5775  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  5776  			break
  5777  		}
  5778  		v.reset(OpARM64ADDshiftLL)
  5779  		v.AuxInt = log2(c + 1)
  5780  		v0 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5781  		v0.AddArg(x)
  5782  		v.AddArg(v0)
  5783  		v.AddArg(x)
  5784  		return true
  5785  	}
  5786  	// match: (MULW x (MOVDconst [c]))
  5787  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  5788  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5789  	for {
  5790  		x := v.Args[0]
  5791  		v_1 := v.Args[1]
  5792  		if v_1.Op != OpARM64MOVDconst {
  5793  			break
  5794  		}
  5795  		c := v_1.AuxInt
  5796  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  5797  			break
  5798  		}
  5799  		v.reset(OpARM64SLLconst)
  5800  		v.AuxInt = log2(c / 3)
  5801  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5802  		v0.AuxInt = 1
  5803  		v0.AddArg(x)
  5804  		v0.AddArg(x)
  5805  		v.AddArg(v0)
  5806  		return true
  5807  	}
  5808  	// match: (MULW x (MOVDconst [c]))
  5809  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  5810  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  5811  	for {
  5812  		x := v.Args[0]
  5813  		v_1 := v.Args[1]
  5814  		if v_1.Op != OpARM64MOVDconst {
  5815  			break
  5816  		}
  5817  		c := v_1.AuxInt
  5818  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  5819  			break
  5820  		}
  5821  		v.reset(OpARM64SLLconst)
  5822  		v.AuxInt = log2(c / 5)
  5823  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5824  		v0.AuxInt = 2
  5825  		v0.AddArg(x)
  5826  		v0.AddArg(x)
  5827  		v.AddArg(v0)
  5828  		return true
  5829  	}
  5830  	// match: (MULW x (MOVDconst [c]))
  5831  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  5832  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  5833  	for {
  5834  		x := v.Args[0]
  5835  		v_1 := v.Args[1]
  5836  		if v_1.Op != OpARM64MOVDconst {
  5837  			break
  5838  		}
  5839  		c := v_1.AuxInt
  5840  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  5841  			break
  5842  		}
  5843  		v.reset(OpARM64SLLconst)
  5844  		v.AuxInt = log2(c / 7)
  5845  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5846  		v0.AuxInt = 3
  5847  		v1 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5848  		v1.AddArg(x)
  5849  		v0.AddArg(v1)
  5850  		v0.AddArg(x)
  5851  		v.AddArg(v0)
  5852  		return true
  5853  	}
  5854  	// match: (MULW x (MOVDconst [c]))
  5855  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  5856  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  5857  	for {
  5858  		x := v.Args[0]
  5859  		v_1 := v.Args[1]
  5860  		if v_1.Op != OpARM64MOVDconst {
  5861  			break
  5862  		}
  5863  		c := v_1.AuxInt
  5864  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  5865  			break
  5866  		}
  5867  		v.reset(OpARM64SLLconst)
  5868  		v.AuxInt = log2(c / 9)
  5869  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  5870  		v0.AuxInt = 3
  5871  		v0.AddArg(x)
  5872  		v0.AddArg(x)
  5873  		v.AddArg(v0)
  5874  		return true
  5875  	}
  5876  	// match: (MULW (MOVDconst [c]) x)
  5877  	// cond: int32(c)==-1
  5878  	// result: (NEG x)
  5879  	for {
  5880  		v_0 := v.Args[0]
  5881  		if v_0.Op != OpARM64MOVDconst {
  5882  			break
  5883  		}
  5884  		c := v_0.AuxInt
  5885  		x := v.Args[1]
  5886  		if !(int32(c) == -1) {
  5887  			break
  5888  		}
  5889  		v.reset(OpARM64NEG)
  5890  		v.AddArg(x)
  5891  		return true
  5892  	}
  5893  	// match: (MULW (MOVDconst [c]) _)
  5894  	// cond: int32(c)==0
  5895  	// result: (MOVDconst [0])
  5896  	for {
  5897  		v_0 := v.Args[0]
  5898  		if v_0.Op != OpARM64MOVDconst {
  5899  			break
  5900  		}
  5901  		c := v_0.AuxInt
  5902  		if !(int32(c) == 0) {
  5903  			break
  5904  		}
  5905  		v.reset(OpARM64MOVDconst)
  5906  		v.AuxInt = 0
  5907  		return true
  5908  	}
  5909  	// match: (MULW (MOVDconst [c]) x)
  5910  	// cond: int32(c)==1
  5911  	// result: x
  5912  	for {
  5913  		v_0 := v.Args[0]
  5914  		if v_0.Op != OpARM64MOVDconst {
  5915  			break
  5916  		}
  5917  		c := v_0.AuxInt
  5918  		x := v.Args[1]
  5919  		if !(int32(c) == 1) {
  5920  			break
  5921  		}
  5922  		v.reset(OpCopy)
  5923  		v.Type = x.Type
  5924  		v.AddArg(x)
  5925  		return true
  5926  	}
  5927  	// match: (MULW (MOVDconst [c]) x)
  5928  	// cond: isPowerOfTwo(c)
  5929  	// result: (SLLconst [log2(c)] x)
  5930  	for {
  5931  		v_0 := v.Args[0]
  5932  		if v_0.Op != OpARM64MOVDconst {
  5933  			break
  5934  		}
  5935  		c := v_0.AuxInt
  5936  		x := v.Args[1]
  5937  		if !(isPowerOfTwo(c)) {
  5938  			break
  5939  		}
  5940  		v.reset(OpARM64SLLconst)
  5941  		v.AuxInt = log2(c)
  5942  		v.AddArg(x)
  5943  		return true
  5944  	}
  5945  	// match: (MULW (MOVDconst [c]) x)
  5946  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  5947  	// result: (ADDshiftLL x x [log2(c-1)])
  5948  	for {
  5949  		v_0 := v.Args[0]
  5950  		if v_0.Op != OpARM64MOVDconst {
  5951  			break
  5952  		}
  5953  		c := v_0.AuxInt
  5954  		x := v.Args[1]
  5955  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  5956  			break
  5957  		}
  5958  		v.reset(OpARM64ADDshiftLL)
  5959  		v.AuxInt = log2(c - 1)
  5960  		v.AddArg(x)
  5961  		v.AddArg(x)
  5962  		return true
  5963  	}
  5964  	// match: (MULW (MOVDconst [c]) x)
  5965  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  5966  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5967  	for {
  5968  		v_0 := v.Args[0]
  5969  		if v_0.Op != OpARM64MOVDconst {
  5970  			break
  5971  		}
  5972  		c := v_0.AuxInt
  5973  		x := v.Args[1]
  5974  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  5975  			break
  5976  		}
  5977  		v.reset(OpARM64ADDshiftLL)
  5978  		v.AuxInt = log2(c + 1)
  5979  		v0 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  5980  		v0.AddArg(x)
  5981  		v.AddArg(v0)
  5982  		v.AddArg(x)
  5983  		return true
  5984  	}
  5985  	// match: (MULW (MOVDconst [c]) x)
  5986  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  5987  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5988  	for {
  5989  		v_0 := v.Args[0]
  5990  		if v_0.Op != OpARM64MOVDconst {
  5991  			break
  5992  		}
  5993  		c := v_0.AuxInt
  5994  		x := v.Args[1]
  5995  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  5996  			break
  5997  		}
  5998  		v.reset(OpARM64SLLconst)
  5999  		v.AuxInt = log2(c / 3)
  6000  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  6001  		v0.AuxInt = 1
  6002  		v0.AddArg(x)
  6003  		v0.AddArg(x)
  6004  		v.AddArg(v0)
  6005  		return true
  6006  	}
  6007  	// match: (MULW (MOVDconst [c]) x)
  6008  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  6009  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  6010  	for {
  6011  		v_0 := v.Args[0]
  6012  		if v_0.Op != OpARM64MOVDconst {
  6013  			break
  6014  		}
  6015  		c := v_0.AuxInt
  6016  		x := v.Args[1]
  6017  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  6018  			break
  6019  		}
  6020  		v.reset(OpARM64SLLconst)
  6021  		v.AuxInt = log2(c / 5)
  6022  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  6023  		v0.AuxInt = 2
  6024  		v0.AddArg(x)
  6025  		v0.AddArg(x)
  6026  		v.AddArg(v0)
  6027  		return true
  6028  	}
  6029  	// match: (MULW (MOVDconst [c]) x)
  6030  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  6031  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  6032  	for {
  6033  		v_0 := v.Args[0]
  6034  		if v_0.Op != OpARM64MOVDconst {
  6035  			break
  6036  		}
  6037  		c := v_0.AuxInt
  6038  		x := v.Args[1]
  6039  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  6040  			break
  6041  		}
  6042  		v.reset(OpARM64SLLconst)
  6043  		v.AuxInt = log2(c / 7)
  6044  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  6045  		v0.AuxInt = 3
  6046  		v1 := b.NewValue0(v.Line, OpARM64NEG, x.Type)
  6047  		v1.AddArg(x)
  6048  		v0.AddArg(v1)
  6049  		v0.AddArg(x)
  6050  		v.AddArg(v0)
  6051  		return true
  6052  	}
  6053  	// match: (MULW (MOVDconst [c]) x)
  6054  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  6055  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  6056  	for {
  6057  		v_0 := v.Args[0]
  6058  		if v_0.Op != OpARM64MOVDconst {
  6059  			break
  6060  		}
  6061  		c := v_0.AuxInt
  6062  		x := v.Args[1]
  6063  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  6064  			break
  6065  		}
  6066  		v.reset(OpARM64SLLconst)
  6067  		v.AuxInt = log2(c / 9)
  6068  		v0 := b.NewValue0(v.Line, OpARM64ADDshiftLL, x.Type)
  6069  		v0.AuxInt = 3
  6070  		v0.AddArg(x)
  6071  		v0.AddArg(x)
  6072  		v.AddArg(v0)
  6073  		return true
  6074  	}
  6075  	// match: (MULW  (MOVDconst [c]) (MOVDconst [d]))
  6076  	// cond:
  6077  	// result: (MOVDconst [int64(int32(c)*int32(d))])
  6078  	for {
  6079  		v_0 := v.Args[0]
  6080  		if v_0.Op != OpARM64MOVDconst {
  6081  			break
  6082  		}
  6083  		c := v_0.AuxInt
  6084  		v_1 := v.Args[1]
  6085  		if v_1.Op != OpARM64MOVDconst {
  6086  			break
  6087  		}
  6088  		d := v_1.AuxInt
  6089  		v.reset(OpARM64MOVDconst)
  6090  		v.AuxInt = int64(int32(c) * int32(d))
  6091  		return true
  6092  	}
  6093  	return false
  6094  }
  6095  func rewriteValueARM64_OpARM64MVN(v *Value, config *Config) bool {
  6096  	b := v.Block
  6097  	_ = b
  6098  	// match: (MVN (MOVDconst [c]))
  6099  	// cond:
  6100  	// result: (MOVDconst [^c])
  6101  	for {
  6102  		v_0 := v.Args[0]
  6103  		if v_0.Op != OpARM64MOVDconst {
  6104  			break
  6105  		}
  6106  		c := v_0.AuxInt
  6107  		v.reset(OpARM64MOVDconst)
  6108  		v.AuxInt = ^c
  6109  		return true
  6110  	}
  6111  	return false
  6112  }
  6113  func rewriteValueARM64_OpARM64NEG(v *Value, config *Config) bool {
  6114  	b := v.Block
  6115  	_ = b
  6116  	// match: (NEG (MOVDconst [c]))
  6117  	// cond:
  6118  	// result: (MOVDconst [-c])
  6119  	for {
  6120  		v_0 := v.Args[0]
  6121  		if v_0.Op != OpARM64MOVDconst {
  6122  			break
  6123  		}
  6124  		c := v_0.AuxInt
  6125  		v.reset(OpARM64MOVDconst)
  6126  		v.AuxInt = -c
  6127  		return true
  6128  	}
  6129  	return false
  6130  }
  6131  func rewriteValueARM64_OpARM64NotEqual(v *Value, config *Config) bool {
  6132  	b := v.Block
  6133  	_ = b
  6134  	// match: (NotEqual (FlagEQ))
  6135  	// cond:
  6136  	// result: (MOVDconst [0])
  6137  	for {
  6138  		v_0 := v.Args[0]
  6139  		if v_0.Op != OpARM64FlagEQ {
  6140  			break
  6141  		}
  6142  		v.reset(OpARM64MOVDconst)
  6143  		v.AuxInt = 0
  6144  		return true
  6145  	}
  6146  	// match: (NotEqual (FlagLT_ULT))
  6147  	// cond:
  6148  	// result: (MOVDconst [1])
  6149  	for {
  6150  		v_0 := v.Args[0]
  6151  		if v_0.Op != OpARM64FlagLT_ULT {
  6152  			break
  6153  		}
  6154  		v.reset(OpARM64MOVDconst)
  6155  		v.AuxInt = 1
  6156  		return true
  6157  	}
  6158  	// match: (NotEqual (FlagLT_UGT))
  6159  	// cond:
  6160  	// result: (MOVDconst [1])
  6161  	for {
  6162  		v_0 := v.Args[0]
  6163  		if v_0.Op != OpARM64FlagLT_UGT {
  6164  			break
  6165  		}
  6166  		v.reset(OpARM64MOVDconst)
  6167  		v.AuxInt = 1
  6168  		return true
  6169  	}
  6170  	// match: (NotEqual (FlagGT_ULT))
  6171  	// cond:
  6172  	// result: (MOVDconst [1])
  6173  	for {
  6174  		v_0 := v.Args[0]
  6175  		if v_0.Op != OpARM64FlagGT_ULT {
  6176  			break
  6177  		}
  6178  		v.reset(OpARM64MOVDconst)
  6179  		v.AuxInt = 1
  6180  		return true
  6181  	}
  6182  	// match: (NotEqual (FlagGT_UGT))
  6183  	// cond:
  6184  	// result: (MOVDconst [1])
  6185  	for {
  6186  		v_0 := v.Args[0]
  6187  		if v_0.Op != OpARM64FlagGT_UGT {
  6188  			break
  6189  		}
  6190  		v.reset(OpARM64MOVDconst)
  6191  		v.AuxInt = 1
  6192  		return true
  6193  	}
  6194  	// match: (NotEqual (InvertFlags x))
  6195  	// cond:
  6196  	// result: (NotEqual x)
  6197  	for {
  6198  		v_0 := v.Args[0]
  6199  		if v_0.Op != OpARM64InvertFlags {
  6200  			break
  6201  		}
  6202  		x := v_0.Args[0]
  6203  		v.reset(OpARM64NotEqual)
  6204  		v.AddArg(x)
  6205  		return true
  6206  	}
  6207  	return false
  6208  }
  6209  func rewriteValueARM64_OpARM64OR(v *Value, config *Config) bool {
  6210  	b := v.Block
  6211  	_ = b
  6212  	// match: (OR  (MOVDconst [c]) x)
  6213  	// cond:
  6214  	// result: (ORconst  [c] x)
  6215  	for {
  6216  		v_0 := v.Args[0]
  6217  		if v_0.Op != OpARM64MOVDconst {
  6218  			break
  6219  		}
  6220  		c := v_0.AuxInt
  6221  		x := v.Args[1]
  6222  		v.reset(OpARM64ORconst)
  6223  		v.AuxInt = c
  6224  		v.AddArg(x)
  6225  		return true
  6226  	}
  6227  	// match: (OR  x (MOVDconst [c]))
  6228  	// cond:
  6229  	// result: (ORconst  [c] x)
  6230  	for {
  6231  		x := v.Args[0]
  6232  		v_1 := v.Args[1]
  6233  		if v_1.Op != OpARM64MOVDconst {
  6234  			break
  6235  		}
  6236  		c := v_1.AuxInt
  6237  		v.reset(OpARM64ORconst)
  6238  		v.AuxInt = c
  6239  		v.AddArg(x)
  6240  		return true
  6241  	}
  6242  	// match: (OR  x x)
  6243  	// cond:
  6244  	// result: x
  6245  	for {
  6246  		x := v.Args[0]
  6247  		if x != v.Args[1] {
  6248  			break
  6249  		}
  6250  		v.reset(OpCopy)
  6251  		v.Type = x.Type
  6252  		v.AddArg(x)
  6253  		return true
  6254  	}
  6255  	// match: (OR  x s:(SLLconst [c] y))
  6256  	// cond: s.Uses == 1 && clobber(s)
  6257  	// result: (ORshiftLL  x y [c])
  6258  	for {
  6259  		x := v.Args[0]
  6260  		s := v.Args[1]
  6261  		if s.Op != OpARM64SLLconst {
  6262  			break
  6263  		}
  6264  		c := s.AuxInt
  6265  		y := s.Args[0]
  6266  		if !(s.Uses == 1 && clobber(s)) {
  6267  			break
  6268  		}
  6269  		v.reset(OpARM64ORshiftLL)
  6270  		v.AuxInt = c
  6271  		v.AddArg(x)
  6272  		v.AddArg(y)
  6273  		return true
  6274  	}
  6275  	// match: (OR  s:(SLLconst [c] y) x)
  6276  	// cond: s.Uses == 1 && clobber(s)
  6277  	// result: (ORshiftLL  x y [c])
  6278  	for {
  6279  		s := v.Args[0]
  6280  		if s.Op != OpARM64SLLconst {
  6281  			break
  6282  		}
  6283  		c := s.AuxInt
  6284  		y := s.Args[0]
  6285  		x := v.Args[1]
  6286  		if !(s.Uses == 1 && clobber(s)) {
  6287  			break
  6288  		}
  6289  		v.reset(OpARM64ORshiftLL)
  6290  		v.AuxInt = c
  6291  		v.AddArg(x)
  6292  		v.AddArg(y)
  6293  		return true
  6294  	}
  6295  	// match: (OR  x (SLLconst [c] y))
  6296  	// cond:
  6297  	// result: (ORshiftLL  x y [c])
  6298  	for {
  6299  		x := v.Args[0]
  6300  		v_1 := v.Args[1]
  6301  		if v_1.Op != OpARM64SLLconst {
  6302  			break
  6303  		}
  6304  		c := v_1.AuxInt
  6305  		y := v_1.Args[0]
  6306  		v.reset(OpARM64ORshiftLL)
  6307  		v.AuxInt = c
  6308  		v.AddArg(x)
  6309  		v.AddArg(y)
  6310  		return true
  6311  	}
  6312  	// match: (OR  (SLLconst [c] y) x)
  6313  	// cond:
  6314  	// result: (ORshiftLL  x y [c])
  6315  	for {
  6316  		v_0 := v.Args[0]
  6317  		if v_0.Op != OpARM64SLLconst {
  6318  			break
  6319  		}
  6320  		c := v_0.AuxInt
  6321  		y := v_0.Args[0]
  6322  		x := v.Args[1]
  6323  		v.reset(OpARM64ORshiftLL)
  6324  		v.AuxInt = c
  6325  		v.AddArg(x)
  6326  		v.AddArg(y)
  6327  		return true
  6328  	}
  6329  	// match: (OR  x (SRLconst [c] y))
  6330  	// cond:
  6331  	// result: (ORshiftRL  x y [c])
  6332  	for {
  6333  		x := v.Args[0]
  6334  		v_1 := v.Args[1]
  6335  		if v_1.Op != OpARM64SRLconst {
  6336  			break
  6337  		}
  6338  		c := v_1.AuxInt
  6339  		y := v_1.Args[0]
  6340  		v.reset(OpARM64ORshiftRL)
  6341  		v.AuxInt = c
  6342  		v.AddArg(x)
  6343  		v.AddArg(y)
  6344  		return true
  6345  	}
  6346  	// match: (OR  (SRLconst [c] y) x)
  6347  	// cond:
  6348  	// result: (ORshiftRL  x y [c])
  6349  	for {
  6350  		v_0 := v.Args[0]
  6351  		if v_0.Op != OpARM64SRLconst {
  6352  			break
  6353  		}
  6354  		c := v_0.AuxInt
  6355  		y := v_0.Args[0]
  6356  		x := v.Args[1]
  6357  		v.reset(OpARM64ORshiftRL)
  6358  		v.AuxInt = c
  6359  		v.AddArg(x)
  6360  		v.AddArg(y)
  6361  		return true
  6362  	}
  6363  	// match: (OR  x (SRAconst [c] y))
  6364  	// cond:
  6365  	// result: (ORshiftRA  x y [c])
  6366  	for {
  6367  		x := v.Args[0]
  6368  		v_1 := v.Args[1]
  6369  		if v_1.Op != OpARM64SRAconst {
  6370  			break
  6371  		}
  6372  		c := v_1.AuxInt
  6373  		y := v_1.Args[0]
  6374  		v.reset(OpARM64ORshiftRA)
  6375  		v.AuxInt = c
  6376  		v.AddArg(x)
  6377  		v.AddArg(y)
  6378  		return true
  6379  	}
  6380  	// match: (OR  (SRAconst [c] y) x)
  6381  	// cond:
  6382  	// result: (ORshiftRA  x y [c])
  6383  	for {
  6384  		v_0 := v.Args[0]
  6385  		if v_0.Op != OpARM64SRAconst {
  6386  			break
  6387  		}
  6388  		c := v_0.AuxInt
  6389  		y := v_0.Args[0]
  6390  		x := v.Args[1]
  6391  		v.reset(OpARM64ORshiftRA)
  6392  		v.AuxInt = c
  6393  		v.AddArg(x)
  6394  		v.AddArg(y)
  6395  		return true
  6396  	}
  6397  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem))) 	y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem)))
  6398  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  6399  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i-3] p) mem)
  6400  	for {
  6401  		t := v.Type
  6402  		o0 := v.Args[0]
  6403  		if o0.Op != OpARM64ORshiftLL {
  6404  			break
  6405  		}
  6406  		if o0.AuxInt != 8 {
  6407  			break
  6408  		}
  6409  		o1 := o0.Args[0]
  6410  		if o1.Op != OpARM64ORshiftLL {
  6411  			break
  6412  		}
  6413  		if o1.AuxInt != 16 {
  6414  			break
  6415  		}
  6416  		s0 := o1.Args[0]
  6417  		if s0.Op != OpARM64SLLconst {
  6418  			break
  6419  		}
  6420  		if s0.AuxInt != 24 {
  6421  			break
  6422  		}
  6423  		y0 := s0.Args[0]
  6424  		if y0.Op != OpARM64MOVDnop {
  6425  			break
  6426  		}
  6427  		x0 := y0.Args[0]
  6428  		if x0.Op != OpARM64MOVBUload {
  6429  			break
  6430  		}
  6431  		i := x0.AuxInt
  6432  		s := x0.Aux
  6433  		p := x0.Args[0]
  6434  		mem := x0.Args[1]
  6435  		y1 := o1.Args[1]
  6436  		if y1.Op != OpARM64MOVDnop {
  6437  			break
  6438  		}
  6439  		x1 := y1.Args[0]
  6440  		if x1.Op != OpARM64MOVBUload {
  6441  			break
  6442  		}
  6443  		if x1.AuxInt != i-1 {
  6444  			break
  6445  		}
  6446  		if x1.Aux != s {
  6447  			break
  6448  		}
  6449  		if p != x1.Args[0] {
  6450  			break
  6451  		}
  6452  		if mem != x1.Args[1] {
  6453  			break
  6454  		}
  6455  		y2 := o0.Args[1]
  6456  		if y2.Op != OpARM64MOVDnop {
  6457  			break
  6458  		}
  6459  		x2 := y2.Args[0]
  6460  		if x2.Op != OpARM64MOVBUload {
  6461  			break
  6462  		}
  6463  		if x2.AuxInt != i-2 {
  6464  			break
  6465  		}
  6466  		if x2.Aux != s {
  6467  			break
  6468  		}
  6469  		if p != x2.Args[0] {
  6470  			break
  6471  		}
  6472  		if mem != x2.Args[1] {
  6473  			break
  6474  		}
  6475  		y3 := v.Args[1]
  6476  		if y3.Op != OpARM64MOVDnop {
  6477  			break
  6478  		}
  6479  		x3 := y3.Args[0]
  6480  		if x3.Op != OpARM64MOVBUload {
  6481  			break
  6482  		}
  6483  		if x3.AuxInt != i-3 {
  6484  			break
  6485  		}
  6486  		if x3.Aux != s {
  6487  			break
  6488  		}
  6489  		if p != x3.Args[0] {
  6490  			break
  6491  		}
  6492  		if mem != x3.Args[1] {
  6493  			break
  6494  		}
  6495  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  6496  			break
  6497  		}
  6498  		b = mergePoint(b, x0, x1, x2, x3)
  6499  		v0 := b.NewValue0(v.Line, OpARM64MOVWUload, t)
  6500  		v.reset(OpCopy)
  6501  		v.AddArg(v0)
  6502  		v0.Aux = s
  6503  		v1 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  6504  		v1.AuxInt = i - 3
  6505  		v1.AddArg(p)
  6506  		v0.AddArg(v1)
  6507  		v0.AddArg(mem)
  6508  		return true
  6509  	}
  6510  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem))) 	y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem))) 	y4:(MOVDnop x4:(MOVBUload [i-4] {s} p mem))) 	y5:(MOVDnop x5:(MOVBUload [i-5] {s} p mem))) 	y6:(MOVDnop x6:(MOVBUload [i-6] {s} p mem))) 	y7:(MOVDnop x7:(MOVBUload [i-7] {s} p mem)))
  6511  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  6512  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i-7] p) mem))
  6513  	for {
  6514  		t := v.Type
  6515  		o0 := v.Args[0]
  6516  		if o0.Op != OpARM64ORshiftLL {
  6517  			break
  6518  		}
  6519  		if o0.AuxInt != 8 {
  6520  			break
  6521  		}
  6522  		o1 := o0.Args[0]
  6523  		if o1.Op != OpARM64ORshiftLL {
  6524  			break
  6525  		}
  6526  		if o1.AuxInt != 16 {
  6527  			break
  6528  		}
  6529  		o2 := o1.Args[0]
  6530  		if o2.Op != OpARM64ORshiftLL {
  6531  			break
  6532  		}
  6533  		if o2.AuxInt != 24 {
  6534  			break
  6535  		}
  6536  		o3 := o2.Args[0]
  6537  		if o3.Op != OpARM64ORshiftLL {
  6538  			break
  6539  		}
  6540  		if o3.AuxInt != 32 {
  6541  			break
  6542  		}
  6543  		o4 := o3.Args[0]
  6544  		if o4.Op != OpARM64ORshiftLL {
  6545  			break
  6546  		}
  6547  		if o4.AuxInt != 40 {
  6548  			break
  6549  		}
  6550  		o5 := o4.Args[0]
  6551  		if o5.Op != OpARM64ORshiftLL {
  6552  			break
  6553  		}
  6554  		if o5.AuxInt != 48 {
  6555  			break
  6556  		}
  6557  		s0 := o5.Args[0]
  6558  		if s0.Op != OpARM64SLLconst {
  6559  			break
  6560  		}
  6561  		if s0.AuxInt != 56 {
  6562  			break
  6563  		}
  6564  		y0 := s0.Args[0]
  6565  		if y0.Op != OpARM64MOVDnop {
  6566  			break
  6567  		}
  6568  		x0 := y0.Args[0]
  6569  		if x0.Op != OpARM64MOVBUload {
  6570  			break
  6571  		}
  6572  		i := x0.AuxInt
  6573  		s := x0.Aux
  6574  		p := x0.Args[0]
  6575  		mem := x0.Args[1]
  6576  		y1 := o5.Args[1]
  6577  		if y1.Op != OpARM64MOVDnop {
  6578  			break
  6579  		}
  6580  		x1 := y1.Args[0]
  6581  		if x1.Op != OpARM64MOVBUload {
  6582  			break
  6583  		}
  6584  		if x1.AuxInt != i-1 {
  6585  			break
  6586  		}
  6587  		if x1.Aux != s {
  6588  			break
  6589  		}
  6590  		if p != x1.Args[0] {
  6591  			break
  6592  		}
  6593  		if mem != x1.Args[1] {
  6594  			break
  6595  		}
  6596  		y2 := o4.Args[1]
  6597  		if y2.Op != OpARM64MOVDnop {
  6598  			break
  6599  		}
  6600  		x2 := y2.Args[0]
  6601  		if x2.Op != OpARM64MOVBUload {
  6602  			break
  6603  		}
  6604  		if x2.AuxInt != i-2 {
  6605  			break
  6606  		}
  6607  		if x2.Aux != s {
  6608  			break
  6609  		}
  6610  		if p != x2.Args[0] {
  6611  			break
  6612  		}
  6613  		if mem != x2.Args[1] {
  6614  			break
  6615  		}
  6616  		y3 := o3.Args[1]
  6617  		if y3.Op != OpARM64MOVDnop {
  6618  			break
  6619  		}
  6620  		x3 := y3.Args[0]
  6621  		if x3.Op != OpARM64MOVBUload {
  6622  			break
  6623  		}
  6624  		if x3.AuxInt != i-3 {
  6625  			break
  6626  		}
  6627  		if x3.Aux != s {
  6628  			break
  6629  		}
  6630  		if p != x3.Args[0] {
  6631  			break
  6632  		}
  6633  		if mem != x3.Args[1] {
  6634  			break
  6635  		}
  6636  		y4 := o2.Args[1]
  6637  		if y4.Op != OpARM64MOVDnop {
  6638  			break
  6639  		}
  6640  		x4 := y4.Args[0]
  6641  		if x4.Op != OpARM64MOVBUload {
  6642  			break
  6643  		}
  6644  		if x4.AuxInt != i-4 {
  6645  			break
  6646  		}
  6647  		if x4.Aux != s {
  6648  			break
  6649  		}
  6650  		if p != x4.Args[0] {
  6651  			break
  6652  		}
  6653  		if mem != x4.Args[1] {
  6654  			break
  6655  		}
  6656  		y5 := o1.Args[1]
  6657  		if y5.Op != OpARM64MOVDnop {
  6658  			break
  6659  		}
  6660  		x5 := y5.Args[0]
  6661  		if x5.Op != OpARM64MOVBUload {
  6662  			break
  6663  		}
  6664  		if x5.AuxInt != i-5 {
  6665  			break
  6666  		}
  6667  		if x5.Aux != s {
  6668  			break
  6669  		}
  6670  		if p != x5.Args[0] {
  6671  			break
  6672  		}
  6673  		if mem != x5.Args[1] {
  6674  			break
  6675  		}
  6676  		y6 := o0.Args[1]
  6677  		if y6.Op != OpARM64MOVDnop {
  6678  			break
  6679  		}
  6680  		x6 := y6.Args[0]
  6681  		if x6.Op != OpARM64MOVBUload {
  6682  			break
  6683  		}
  6684  		if x6.AuxInt != i-6 {
  6685  			break
  6686  		}
  6687  		if x6.Aux != s {
  6688  			break
  6689  		}
  6690  		if p != x6.Args[0] {
  6691  			break
  6692  		}
  6693  		if mem != x6.Args[1] {
  6694  			break
  6695  		}
  6696  		y7 := v.Args[1]
  6697  		if y7.Op != OpARM64MOVDnop {
  6698  			break
  6699  		}
  6700  		x7 := y7.Args[0]
  6701  		if x7.Op != OpARM64MOVBUload {
  6702  			break
  6703  		}
  6704  		if x7.AuxInt != i-7 {
  6705  			break
  6706  		}
  6707  		if x7.Aux != s {
  6708  			break
  6709  		}
  6710  		if p != x7.Args[0] {
  6711  			break
  6712  		}
  6713  		if mem != x7.Args[1] {
  6714  			break
  6715  		}
  6716  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  6717  			break
  6718  		}
  6719  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  6720  		v0 := b.NewValue0(v.Line, OpARM64REV, t)
  6721  		v.reset(OpCopy)
  6722  		v.AddArg(v0)
  6723  		v1 := b.NewValue0(v.Line, OpARM64MOVDload, t)
  6724  		v1.Aux = s
  6725  		v2 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  6726  		v2.AuxInt = i - 7
  6727  		v2.AddArg(p)
  6728  		v1.AddArg(v2)
  6729  		v1.AddArg(mem)
  6730  		v0.AddArg(v1)
  6731  		return true
  6732  	}
  6733  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem))) 	y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i+2] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i+3] {s} p mem)))
  6734  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  6735  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i] p) mem))
  6736  	for {
  6737  		t := v.Type
  6738  		o0 := v.Args[0]
  6739  		if o0.Op != OpARM64ORshiftLL {
  6740  			break
  6741  		}
  6742  		if o0.AuxInt != 8 {
  6743  			break
  6744  		}
  6745  		o1 := o0.Args[0]
  6746  		if o1.Op != OpARM64ORshiftLL {
  6747  			break
  6748  		}
  6749  		if o1.AuxInt != 16 {
  6750  			break
  6751  		}
  6752  		s0 := o1.Args[0]
  6753  		if s0.Op != OpARM64SLLconst {
  6754  			break
  6755  		}
  6756  		if s0.AuxInt != 24 {
  6757  			break
  6758  		}
  6759  		y0 := s0.Args[0]
  6760  		if y0.Op != OpARM64MOVDnop {
  6761  			break
  6762  		}
  6763  		x0 := y0.Args[0]
  6764  		if x0.Op != OpARM64MOVBUload {
  6765  			break
  6766  		}
  6767  		i := x0.AuxInt
  6768  		s := x0.Aux
  6769  		p := x0.Args[0]
  6770  		mem := x0.Args[1]
  6771  		y1 := o1.Args[1]
  6772  		if y1.Op != OpARM64MOVDnop {
  6773  			break
  6774  		}
  6775  		x1 := y1.Args[0]
  6776  		if x1.Op != OpARM64MOVBUload {
  6777  			break
  6778  		}
  6779  		if x1.AuxInt != i+1 {
  6780  			break
  6781  		}
  6782  		if x1.Aux != s {
  6783  			break
  6784  		}
  6785  		if p != x1.Args[0] {
  6786  			break
  6787  		}
  6788  		if mem != x1.Args[1] {
  6789  			break
  6790  		}
  6791  		y2 := o0.Args[1]
  6792  		if y2.Op != OpARM64MOVDnop {
  6793  			break
  6794  		}
  6795  		x2 := y2.Args[0]
  6796  		if x2.Op != OpARM64MOVBUload {
  6797  			break
  6798  		}
  6799  		if x2.AuxInt != i+2 {
  6800  			break
  6801  		}
  6802  		if x2.Aux != s {
  6803  			break
  6804  		}
  6805  		if p != x2.Args[0] {
  6806  			break
  6807  		}
  6808  		if mem != x2.Args[1] {
  6809  			break
  6810  		}
  6811  		y3 := v.Args[1]
  6812  		if y3.Op != OpARM64MOVDnop {
  6813  			break
  6814  		}
  6815  		x3 := y3.Args[0]
  6816  		if x3.Op != OpARM64MOVBUload {
  6817  			break
  6818  		}
  6819  		if x3.AuxInt != i+3 {
  6820  			break
  6821  		}
  6822  		if x3.Aux != s {
  6823  			break
  6824  		}
  6825  		if p != x3.Args[0] {
  6826  			break
  6827  		}
  6828  		if mem != x3.Args[1] {
  6829  			break
  6830  		}
  6831  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  6832  			break
  6833  		}
  6834  		b = mergePoint(b, x0, x1, x2, x3)
  6835  		v0 := b.NewValue0(v.Line, OpARM64REVW, t)
  6836  		v.reset(OpCopy)
  6837  		v.AddArg(v0)
  6838  		v1 := b.NewValue0(v.Line, OpARM64MOVWUload, t)
  6839  		v1.Aux = s
  6840  		v2 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  6841  		v2.AuxInt = i
  6842  		v2.AddArg(p)
  6843  		v1.AddArg(v2)
  6844  		v1.AddArg(mem)
  6845  		v0.AddArg(v1)
  6846  		return true
  6847  	}
  6848  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem))) 	y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i+2] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i+3] {s} p mem))) 	y4:(MOVDnop x4:(MOVBUload [i+4] {s} p mem))) 	y5:(MOVDnop x5:(MOVBUload [i+5] {s} p mem))) 	y6:(MOVDnop x6:(MOVBUload [i+6] {s} p mem))) 	y7:(MOVDnop x7:(MOVBUload [i+7] {s} p mem)))
  6849  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  6850  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i] p) mem))
  6851  	for {
  6852  		t := v.Type
  6853  		o0 := v.Args[0]
  6854  		if o0.Op != OpARM64ORshiftLL {
  6855  			break
  6856  		}
  6857  		if o0.AuxInt != 8 {
  6858  			break
  6859  		}
  6860  		o1 := o0.Args[0]
  6861  		if o1.Op != OpARM64ORshiftLL {
  6862  			break
  6863  		}
  6864  		if o1.AuxInt != 16 {
  6865  			break
  6866  		}
  6867  		o2 := o1.Args[0]
  6868  		if o2.Op != OpARM64ORshiftLL {
  6869  			break
  6870  		}
  6871  		if o2.AuxInt != 24 {
  6872  			break
  6873  		}
  6874  		o3 := o2.Args[0]
  6875  		if o3.Op != OpARM64ORshiftLL {
  6876  			break
  6877  		}
  6878  		if o3.AuxInt != 32 {
  6879  			break
  6880  		}
  6881  		o4 := o3.Args[0]
  6882  		if o4.Op != OpARM64ORshiftLL {
  6883  			break
  6884  		}
  6885  		if o4.AuxInt != 40 {
  6886  			break
  6887  		}
  6888  		o5 := o4.Args[0]
  6889  		if o5.Op != OpARM64ORshiftLL {
  6890  			break
  6891  		}
  6892  		if o5.AuxInt != 48 {
  6893  			break
  6894  		}
  6895  		s0 := o5.Args[0]
  6896  		if s0.Op != OpARM64SLLconst {
  6897  			break
  6898  		}
  6899  		if s0.AuxInt != 56 {
  6900  			break
  6901  		}
  6902  		y0 := s0.Args[0]
  6903  		if y0.Op != OpARM64MOVDnop {
  6904  			break
  6905  		}
  6906  		x0 := y0.Args[0]
  6907  		if x0.Op != OpARM64MOVBUload {
  6908  			break
  6909  		}
  6910  		i := x0.AuxInt
  6911  		s := x0.Aux
  6912  		p := x0.Args[0]
  6913  		mem := x0.Args[1]
  6914  		y1 := o5.Args[1]
  6915  		if y1.Op != OpARM64MOVDnop {
  6916  			break
  6917  		}
  6918  		x1 := y1.Args[0]
  6919  		if x1.Op != OpARM64MOVBUload {
  6920  			break
  6921  		}
  6922  		if x1.AuxInt != i+1 {
  6923  			break
  6924  		}
  6925  		if x1.Aux != s {
  6926  			break
  6927  		}
  6928  		if p != x1.Args[0] {
  6929  			break
  6930  		}
  6931  		if mem != x1.Args[1] {
  6932  			break
  6933  		}
  6934  		y2 := o4.Args[1]
  6935  		if y2.Op != OpARM64MOVDnop {
  6936  			break
  6937  		}
  6938  		x2 := y2.Args[0]
  6939  		if x2.Op != OpARM64MOVBUload {
  6940  			break
  6941  		}
  6942  		if x2.AuxInt != i+2 {
  6943  			break
  6944  		}
  6945  		if x2.Aux != s {
  6946  			break
  6947  		}
  6948  		if p != x2.Args[0] {
  6949  			break
  6950  		}
  6951  		if mem != x2.Args[1] {
  6952  			break
  6953  		}
  6954  		y3 := o3.Args[1]
  6955  		if y3.Op != OpARM64MOVDnop {
  6956  			break
  6957  		}
  6958  		x3 := y3.Args[0]
  6959  		if x3.Op != OpARM64MOVBUload {
  6960  			break
  6961  		}
  6962  		if x3.AuxInt != i+3 {
  6963  			break
  6964  		}
  6965  		if x3.Aux != s {
  6966  			break
  6967  		}
  6968  		if p != x3.Args[0] {
  6969  			break
  6970  		}
  6971  		if mem != x3.Args[1] {
  6972  			break
  6973  		}
  6974  		y4 := o2.Args[1]
  6975  		if y4.Op != OpARM64MOVDnop {
  6976  			break
  6977  		}
  6978  		x4 := y4.Args[0]
  6979  		if x4.Op != OpARM64MOVBUload {
  6980  			break
  6981  		}
  6982  		if x4.AuxInt != i+4 {
  6983  			break
  6984  		}
  6985  		if x4.Aux != s {
  6986  			break
  6987  		}
  6988  		if p != x4.Args[0] {
  6989  			break
  6990  		}
  6991  		if mem != x4.Args[1] {
  6992  			break
  6993  		}
  6994  		y5 := o1.Args[1]
  6995  		if y5.Op != OpARM64MOVDnop {
  6996  			break
  6997  		}
  6998  		x5 := y5.Args[0]
  6999  		if x5.Op != OpARM64MOVBUload {
  7000  			break
  7001  		}
  7002  		if x5.AuxInt != i+5 {
  7003  			break
  7004  		}
  7005  		if x5.Aux != s {
  7006  			break
  7007  		}
  7008  		if p != x5.Args[0] {
  7009  			break
  7010  		}
  7011  		if mem != x5.Args[1] {
  7012  			break
  7013  		}
  7014  		y6 := o0.Args[1]
  7015  		if y6.Op != OpARM64MOVDnop {
  7016  			break
  7017  		}
  7018  		x6 := y6.Args[0]
  7019  		if x6.Op != OpARM64MOVBUload {
  7020  			break
  7021  		}
  7022  		if x6.AuxInt != i+6 {
  7023  			break
  7024  		}
  7025  		if x6.Aux != s {
  7026  			break
  7027  		}
  7028  		if p != x6.Args[0] {
  7029  			break
  7030  		}
  7031  		if mem != x6.Args[1] {
  7032  			break
  7033  		}
  7034  		y7 := v.Args[1]
  7035  		if y7.Op != OpARM64MOVDnop {
  7036  			break
  7037  		}
  7038  		x7 := y7.Args[0]
  7039  		if x7.Op != OpARM64MOVBUload {
  7040  			break
  7041  		}
  7042  		if x7.AuxInt != i+7 {
  7043  			break
  7044  		}
  7045  		if x7.Aux != s {
  7046  			break
  7047  		}
  7048  		if p != x7.Args[0] {
  7049  			break
  7050  		}
  7051  		if mem != x7.Args[1] {
  7052  			break
  7053  		}
  7054  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  7055  			break
  7056  		}
  7057  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  7058  		v0 := b.NewValue0(v.Line, OpARM64REV, t)
  7059  		v.reset(OpCopy)
  7060  		v.AddArg(v0)
  7061  		v1 := b.NewValue0(v.Line, OpARM64MOVDload, t)
  7062  		v1.Aux = s
  7063  		v2 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7064  		v2.AuxInt = i
  7065  		v2.AddArg(p)
  7066  		v1.AddArg(v2)
  7067  		v1.AddArg(mem)
  7068  		v0.AddArg(v1)
  7069  		return true
  7070  	}
  7071  	return false
  7072  }
  7073  func rewriteValueARM64_OpARM64ORconst(v *Value, config *Config) bool {
  7074  	b := v.Block
  7075  	_ = b
  7076  	// match: (ORconst  [0]  x)
  7077  	// cond:
  7078  	// result: x
  7079  	for {
  7080  		if v.AuxInt != 0 {
  7081  			break
  7082  		}
  7083  		x := v.Args[0]
  7084  		v.reset(OpCopy)
  7085  		v.Type = x.Type
  7086  		v.AddArg(x)
  7087  		return true
  7088  	}
  7089  	// match: (ORconst  [-1] _)
  7090  	// cond:
  7091  	// result: (MOVDconst [-1])
  7092  	for {
  7093  		if v.AuxInt != -1 {
  7094  			break
  7095  		}
  7096  		v.reset(OpARM64MOVDconst)
  7097  		v.AuxInt = -1
  7098  		return true
  7099  	}
  7100  	// match: (ORconst  [c] (MOVDconst [d]))
  7101  	// cond:
  7102  	// result: (MOVDconst [c|d])
  7103  	for {
  7104  		c := v.AuxInt
  7105  		v_0 := v.Args[0]
  7106  		if v_0.Op != OpARM64MOVDconst {
  7107  			break
  7108  		}
  7109  		d := v_0.AuxInt
  7110  		v.reset(OpARM64MOVDconst)
  7111  		v.AuxInt = c | d
  7112  		return true
  7113  	}
  7114  	// match: (ORconst  [c] (ORconst [d] x))
  7115  	// cond:
  7116  	// result: (ORconst [c|d] x)
  7117  	for {
  7118  		c := v.AuxInt
  7119  		v_0 := v.Args[0]
  7120  		if v_0.Op != OpARM64ORconst {
  7121  			break
  7122  		}
  7123  		d := v_0.AuxInt
  7124  		x := v_0.Args[0]
  7125  		v.reset(OpARM64ORconst)
  7126  		v.AuxInt = c | d
  7127  		v.AddArg(x)
  7128  		return true
  7129  	}
  7130  	return false
  7131  }
  7132  func rewriteValueARM64_OpARM64ORshiftLL(v *Value, config *Config) bool {
  7133  	b := v.Block
  7134  	_ = b
  7135  	// match: (ORshiftLL  (MOVDconst [c]) x [d])
  7136  	// cond:
  7137  	// result: (ORconst  [c] (SLLconst <x.Type> x [d]))
  7138  	for {
  7139  		d := v.AuxInt
  7140  		v_0 := v.Args[0]
  7141  		if v_0.Op != OpARM64MOVDconst {
  7142  			break
  7143  		}
  7144  		c := v_0.AuxInt
  7145  		x := v.Args[1]
  7146  		v.reset(OpARM64ORconst)
  7147  		v.AuxInt = c
  7148  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
  7149  		v0.AuxInt = d
  7150  		v0.AddArg(x)
  7151  		v.AddArg(v0)
  7152  		return true
  7153  	}
  7154  	// match: (ORshiftLL  x (MOVDconst [c]) [d])
  7155  	// cond:
  7156  	// result: (ORconst  x [int64(uint64(c)<<uint64(d))])
  7157  	for {
  7158  		d := v.AuxInt
  7159  		x := v.Args[0]
  7160  		v_1 := v.Args[1]
  7161  		if v_1.Op != OpARM64MOVDconst {
  7162  			break
  7163  		}
  7164  		c := v_1.AuxInt
  7165  		v.reset(OpARM64ORconst)
  7166  		v.AuxInt = int64(uint64(c) << uint64(d))
  7167  		v.AddArg(x)
  7168  		return true
  7169  	}
  7170  	// match: (ORshiftLL  x y:(SLLconst x [c]) [d])
  7171  	// cond: c==d
  7172  	// result: y
  7173  	for {
  7174  		d := v.AuxInt
  7175  		x := v.Args[0]
  7176  		y := v.Args[1]
  7177  		if y.Op != OpARM64SLLconst {
  7178  			break
  7179  		}
  7180  		c := y.AuxInt
  7181  		if x != y.Args[0] {
  7182  			break
  7183  		}
  7184  		if !(c == d) {
  7185  			break
  7186  		}
  7187  		v.reset(OpCopy)
  7188  		v.Type = y.Type
  7189  		v.AddArg(y)
  7190  		return true
  7191  	}
  7192  	// match: (ORshiftLL <t> [8] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem)) 	y1:(MOVDnop x1:(MOVBUload [i+1] {s} p mem)))
  7193  	// cond: x0.Uses == 1 && x1.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 	&& mergePoint(b,x0,x1) != nil 	&& clobber(x0) && clobber(x1) 	&& clobber(y0) && clobber(y1)
  7194  	// result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i] p) mem)
  7195  	for {
  7196  		t := v.Type
  7197  		if v.AuxInt != 8 {
  7198  			break
  7199  		}
  7200  		y0 := v.Args[0]
  7201  		if y0.Op != OpARM64MOVDnop {
  7202  			break
  7203  		}
  7204  		x0 := y0.Args[0]
  7205  		if x0.Op != OpARM64MOVBUload {
  7206  			break
  7207  		}
  7208  		i := x0.AuxInt
  7209  		s := x0.Aux
  7210  		p := x0.Args[0]
  7211  		mem := x0.Args[1]
  7212  		y1 := v.Args[1]
  7213  		if y1.Op != OpARM64MOVDnop {
  7214  			break
  7215  		}
  7216  		x1 := y1.Args[0]
  7217  		if x1.Op != OpARM64MOVBUload {
  7218  			break
  7219  		}
  7220  		if x1.AuxInt != i+1 {
  7221  			break
  7222  		}
  7223  		if x1.Aux != s {
  7224  			break
  7225  		}
  7226  		if p != x1.Args[0] {
  7227  			break
  7228  		}
  7229  		if mem != x1.Args[1] {
  7230  			break
  7231  		}
  7232  		if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
  7233  			break
  7234  		}
  7235  		b = mergePoint(b, x0, x1)
  7236  		v0 := b.NewValue0(v.Line, OpARM64MOVHUload, t)
  7237  		v.reset(OpCopy)
  7238  		v.AddArg(v0)
  7239  		v0.Aux = s
  7240  		v1 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7241  		v1.AuxInt = i
  7242  		v1.AddArg(p)
  7243  		v0.AddArg(v1)
  7244  		v0.AddArg(mem)
  7245  		return true
  7246  	}
  7247  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] 	            x0:(MOVHUload [i]   {s} p mem) 	y1:(MOVDnop x1:(MOVBUload [i+2] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i+3] {s} p mem)))
  7248  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 	&& y1.Uses == 1 && y2.Uses == 1 	&& o0.Uses == 1 	&& mergePoint(b,x0,x1,x2) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) 	&& clobber(y1) && clobber(y2) 	&& clobber(o0)
  7249  	// result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i] p) mem)
  7250  	for {
  7251  		t := v.Type
  7252  		if v.AuxInt != 24 {
  7253  			break
  7254  		}
  7255  		o0 := v.Args[0]
  7256  		if o0.Op != OpARM64ORshiftLL {
  7257  			break
  7258  		}
  7259  		if o0.AuxInt != 16 {
  7260  			break
  7261  		}
  7262  		x0 := o0.Args[0]
  7263  		if x0.Op != OpARM64MOVHUload {
  7264  			break
  7265  		}
  7266  		i := x0.AuxInt
  7267  		s := x0.Aux
  7268  		p := x0.Args[0]
  7269  		mem := x0.Args[1]
  7270  		y1 := o0.Args[1]
  7271  		if y1.Op != OpARM64MOVDnop {
  7272  			break
  7273  		}
  7274  		x1 := y1.Args[0]
  7275  		if x1.Op != OpARM64MOVBUload {
  7276  			break
  7277  		}
  7278  		if x1.AuxInt != i+2 {
  7279  			break
  7280  		}
  7281  		if x1.Aux != s {
  7282  			break
  7283  		}
  7284  		if p != x1.Args[0] {
  7285  			break
  7286  		}
  7287  		if mem != x1.Args[1] {
  7288  			break
  7289  		}
  7290  		y2 := v.Args[1]
  7291  		if y2.Op != OpARM64MOVDnop {
  7292  			break
  7293  		}
  7294  		x2 := y2.Args[0]
  7295  		if x2.Op != OpARM64MOVBUload {
  7296  			break
  7297  		}
  7298  		if x2.AuxInt != i+3 {
  7299  			break
  7300  		}
  7301  		if x2.Aux != s {
  7302  			break
  7303  		}
  7304  		if p != x2.Args[0] {
  7305  			break
  7306  		}
  7307  		if mem != x2.Args[1] {
  7308  			break
  7309  		}
  7310  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
  7311  			break
  7312  		}
  7313  		b = mergePoint(b, x0, x1, x2)
  7314  		v0 := b.NewValue0(v.Line, OpARM64MOVWUload, t)
  7315  		v.reset(OpCopy)
  7316  		v.AddArg(v0)
  7317  		v0.Aux = s
  7318  		v1 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7319  		v1.AuxInt = i
  7320  		v1.AddArg(p)
  7321  		v0.AddArg(v1)
  7322  		v0.AddArg(mem)
  7323  		return true
  7324  	}
  7325  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] 	            x0:(MOVWUload [i]   {s} p mem) 	y1:(MOVDnop x1:(MOVBUload [i+4] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i+5] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i+6] {s} p mem))) 	y4:(MOVDnop x4:(MOVBUload [i+7] {s} p mem)))
  7326  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 	&& y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) 	&& clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) 	&& clobber(o0) && clobber(o1) && clobber(o2)
  7327  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i] p) mem)
  7328  	for {
  7329  		t := v.Type
  7330  		if v.AuxInt != 56 {
  7331  			break
  7332  		}
  7333  		o0 := v.Args[0]
  7334  		if o0.Op != OpARM64ORshiftLL {
  7335  			break
  7336  		}
  7337  		if o0.AuxInt != 48 {
  7338  			break
  7339  		}
  7340  		o1 := o0.Args[0]
  7341  		if o1.Op != OpARM64ORshiftLL {
  7342  			break
  7343  		}
  7344  		if o1.AuxInt != 40 {
  7345  			break
  7346  		}
  7347  		o2 := o1.Args[0]
  7348  		if o2.Op != OpARM64ORshiftLL {
  7349  			break
  7350  		}
  7351  		if o2.AuxInt != 32 {
  7352  			break
  7353  		}
  7354  		x0 := o2.Args[0]
  7355  		if x0.Op != OpARM64MOVWUload {
  7356  			break
  7357  		}
  7358  		i := x0.AuxInt
  7359  		s := x0.Aux
  7360  		p := x0.Args[0]
  7361  		mem := x0.Args[1]
  7362  		y1 := o2.Args[1]
  7363  		if y1.Op != OpARM64MOVDnop {
  7364  			break
  7365  		}
  7366  		x1 := y1.Args[0]
  7367  		if x1.Op != OpARM64MOVBUload {
  7368  			break
  7369  		}
  7370  		if x1.AuxInt != i+4 {
  7371  			break
  7372  		}
  7373  		if x1.Aux != s {
  7374  			break
  7375  		}
  7376  		if p != x1.Args[0] {
  7377  			break
  7378  		}
  7379  		if mem != x1.Args[1] {
  7380  			break
  7381  		}
  7382  		y2 := o1.Args[1]
  7383  		if y2.Op != OpARM64MOVDnop {
  7384  			break
  7385  		}
  7386  		x2 := y2.Args[0]
  7387  		if x2.Op != OpARM64MOVBUload {
  7388  			break
  7389  		}
  7390  		if x2.AuxInt != i+5 {
  7391  			break
  7392  		}
  7393  		if x2.Aux != s {
  7394  			break
  7395  		}
  7396  		if p != x2.Args[0] {
  7397  			break
  7398  		}
  7399  		if mem != x2.Args[1] {
  7400  			break
  7401  		}
  7402  		y3 := o0.Args[1]
  7403  		if y3.Op != OpARM64MOVDnop {
  7404  			break
  7405  		}
  7406  		x3 := y3.Args[0]
  7407  		if x3.Op != OpARM64MOVBUload {
  7408  			break
  7409  		}
  7410  		if x3.AuxInt != i+6 {
  7411  			break
  7412  		}
  7413  		if x3.Aux != s {
  7414  			break
  7415  		}
  7416  		if p != x3.Args[0] {
  7417  			break
  7418  		}
  7419  		if mem != x3.Args[1] {
  7420  			break
  7421  		}
  7422  		y4 := v.Args[1]
  7423  		if y4.Op != OpARM64MOVDnop {
  7424  			break
  7425  		}
  7426  		x4 := y4.Args[0]
  7427  		if x4.Op != OpARM64MOVBUload {
  7428  			break
  7429  		}
  7430  		if x4.AuxInt != i+7 {
  7431  			break
  7432  		}
  7433  		if x4.Aux != s {
  7434  			break
  7435  		}
  7436  		if p != x4.Args[0] {
  7437  			break
  7438  		}
  7439  		if mem != x4.Args[1] {
  7440  			break
  7441  		}
  7442  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
  7443  			break
  7444  		}
  7445  		b = mergePoint(b, x0, x1, x2, x3, x4)
  7446  		v0 := b.NewValue0(v.Line, OpARM64MOVDload, t)
  7447  		v.reset(OpCopy)
  7448  		v.AddArg(v0)
  7449  		v0.Aux = s
  7450  		v1 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7451  		v1.AuxInt = i
  7452  		v1.AddArg(p)
  7453  		v0.AddArg(v1)
  7454  		v0.AddArg(mem)
  7455  		return true
  7456  	}
  7457  	// match: (ORshiftLL <t> [8] 	y0:(MOVDnop x0:(MOVBUload [i]   {s} p mem)) 	y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem)))
  7458  	// cond: ((i-1)%2 == 0 || i-1<256 && i-1>-256 && !isArg(s) && !isAuto(s)) 	&& x0.Uses == 1 && x1.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 	&& mergePoint(b,x0,x1) != nil 	&& clobber(x0) && clobber(x1) 	&& clobber(y0) && clobber(y1)
  7459  	// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i-1] {s} p mem))
  7460  	for {
  7461  		t := v.Type
  7462  		if v.AuxInt != 8 {
  7463  			break
  7464  		}
  7465  		y0 := v.Args[0]
  7466  		if y0.Op != OpARM64MOVDnop {
  7467  			break
  7468  		}
  7469  		x0 := y0.Args[0]
  7470  		if x0.Op != OpARM64MOVBUload {
  7471  			break
  7472  		}
  7473  		i := x0.AuxInt
  7474  		s := x0.Aux
  7475  		p := x0.Args[0]
  7476  		mem := x0.Args[1]
  7477  		y1 := v.Args[1]
  7478  		if y1.Op != OpARM64MOVDnop {
  7479  			break
  7480  		}
  7481  		x1 := y1.Args[0]
  7482  		if x1.Op != OpARM64MOVBUload {
  7483  			break
  7484  		}
  7485  		if x1.AuxInt != i-1 {
  7486  			break
  7487  		}
  7488  		if x1.Aux != s {
  7489  			break
  7490  		}
  7491  		if p != x1.Args[0] {
  7492  			break
  7493  		}
  7494  		if mem != x1.Args[1] {
  7495  			break
  7496  		}
  7497  		if !(((i-1)%2 == 0 || i-1 < 256 && i-1 > -256 && !isArg(s) && !isAuto(s)) && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
  7498  			break
  7499  		}
  7500  		b = mergePoint(b, x0, x1)
  7501  		v0 := b.NewValue0(v.Line, OpARM64REV16W, t)
  7502  		v.reset(OpCopy)
  7503  		v.AddArg(v0)
  7504  		v1 := b.NewValue0(v.Line, OpARM64MOVHUload, t)
  7505  		v1.AuxInt = i - 1
  7506  		v1.Aux = s
  7507  		v1.AddArg(p)
  7508  		v1.AddArg(mem)
  7509  		v0.AddArg(v1)
  7510  		return true
  7511  	}
  7512  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] 	y0:(REV16W  x0:(MOVHUload [i]   {s} p mem)) 	y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem)))
  7513  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 	&& o0.Uses == 1 	&& mergePoint(b,x0,x1,x2) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) 	&& clobber(y0) && clobber(y1) && clobber(y2) 	&& clobber(o0)
  7514  	// result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i-2] p) mem))
  7515  	for {
  7516  		t := v.Type
  7517  		if v.AuxInt != 24 {
  7518  			break
  7519  		}
  7520  		o0 := v.Args[0]
  7521  		if o0.Op != OpARM64ORshiftLL {
  7522  			break
  7523  		}
  7524  		if o0.AuxInt != 16 {
  7525  			break
  7526  		}
  7527  		y0 := o0.Args[0]
  7528  		if y0.Op != OpARM64REV16W {
  7529  			break
  7530  		}
  7531  		x0 := y0.Args[0]
  7532  		if x0.Op != OpARM64MOVHUload {
  7533  			break
  7534  		}
  7535  		i := x0.AuxInt
  7536  		s := x0.Aux
  7537  		p := x0.Args[0]
  7538  		mem := x0.Args[1]
  7539  		y1 := o0.Args[1]
  7540  		if y1.Op != OpARM64MOVDnop {
  7541  			break
  7542  		}
  7543  		x1 := y1.Args[0]
  7544  		if x1.Op != OpARM64MOVBUload {
  7545  			break
  7546  		}
  7547  		if x1.AuxInt != i-1 {
  7548  			break
  7549  		}
  7550  		if x1.Aux != s {
  7551  			break
  7552  		}
  7553  		if p != x1.Args[0] {
  7554  			break
  7555  		}
  7556  		if mem != x1.Args[1] {
  7557  			break
  7558  		}
  7559  		y2 := v.Args[1]
  7560  		if y2.Op != OpARM64MOVDnop {
  7561  			break
  7562  		}
  7563  		x2 := y2.Args[0]
  7564  		if x2.Op != OpARM64MOVBUload {
  7565  			break
  7566  		}
  7567  		if x2.AuxInt != i-2 {
  7568  			break
  7569  		}
  7570  		if x2.Aux != s {
  7571  			break
  7572  		}
  7573  		if p != x2.Args[0] {
  7574  			break
  7575  		}
  7576  		if mem != x2.Args[1] {
  7577  			break
  7578  		}
  7579  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) {
  7580  			break
  7581  		}
  7582  		b = mergePoint(b, x0, x1, x2)
  7583  		v0 := b.NewValue0(v.Line, OpARM64REVW, t)
  7584  		v.reset(OpCopy)
  7585  		v.AddArg(v0)
  7586  		v1 := b.NewValue0(v.Line, OpARM64MOVWUload, t)
  7587  		v1.Aux = s
  7588  		v2 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7589  		v2.AuxInt = i - 2
  7590  		v2.AddArg(p)
  7591  		v1.AddArg(v2)
  7592  		v1.AddArg(mem)
  7593  		v0.AddArg(v1)
  7594  		return true
  7595  	}
  7596  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] 	y0:(REVW    x0:(MOVWUload [i]   {s} p mem)) 	y1:(MOVDnop x1:(MOVBUload [i-1] {s} p mem))) 	y2:(MOVDnop x2:(MOVBUload [i-2] {s} p mem))) 	y3:(MOVDnop x3:(MOVBUload [i-3] {s} p mem))) 	y4:(MOVDnop x4:(MOVBUload [i-4] {s} p mem)))
  7597  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) 	&& clobber(o0) && clobber(o1) && clobber(o2)
  7598  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i-4] p) mem))
  7599  	for {
  7600  		t := v.Type
  7601  		if v.AuxInt != 56 {
  7602  			break
  7603  		}
  7604  		o0 := v.Args[0]
  7605  		if o0.Op != OpARM64ORshiftLL {
  7606  			break
  7607  		}
  7608  		if o0.AuxInt != 48 {
  7609  			break
  7610  		}
  7611  		o1 := o0.Args[0]
  7612  		if o1.Op != OpARM64ORshiftLL {
  7613  			break
  7614  		}
  7615  		if o1.AuxInt != 40 {
  7616  			break
  7617  		}
  7618  		o2 := o1.Args[0]
  7619  		if o2.Op != OpARM64ORshiftLL {
  7620  			break
  7621  		}
  7622  		if o2.AuxInt != 32 {
  7623  			break
  7624  		}
  7625  		y0 := o2.Args[0]
  7626  		if y0.Op != OpARM64REVW {
  7627  			break
  7628  		}
  7629  		x0 := y0.Args[0]
  7630  		if x0.Op != OpARM64MOVWUload {
  7631  			break
  7632  		}
  7633  		i := x0.AuxInt
  7634  		s := x0.Aux
  7635  		p := x0.Args[0]
  7636  		mem := x0.Args[1]
  7637  		y1 := o2.Args[1]
  7638  		if y1.Op != OpARM64MOVDnop {
  7639  			break
  7640  		}
  7641  		x1 := y1.Args[0]
  7642  		if x1.Op != OpARM64MOVBUload {
  7643  			break
  7644  		}
  7645  		if x1.AuxInt != i-1 {
  7646  			break
  7647  		}
  7648  		if x1.Aux != s {
  7649  			break
  7650  		}
  7651  		if p != x1.Args[0] {
  7652  			break
  7653  		}
  7654  		if mem != x1.Args[1] {
  7655  			break
  7656  		}
  7657  		y2 := o1.Args[1]
  7658  		if y2.Op != OpARM64MOVDnop {
  7659  			break
  7660  		}
  7661  		x2 := y2.Args[0]
  7662  		if x2.Op != OpARM64MOVBUload {
  7663  			break
  7664  		}
  7665  		if x2.AuxInt != i-2 {
  7666  			break
  7667  		}
  7668  		if x2.Aux != s {
  7669  			break
  7670  		}
  7671  		if p != x2.Args[0] {
  7672  			break
  7673  		}
  7674  		if mem != x2.Args[1] {
  7675  			break
  7676  		}
  7677  		y3 := o0.Args[1]
  7678  		if y3.Op != OpARM64MOVDnop {
  7679  			break
  7680  		}
  7681  		x3 := y3.Args[0]
  7682  		if x3.Op != OpARM64MOVBUload {
  7683  			break
  7684  		}
  7685  		if x3.AuxInt != i-3 {
  7686  			break
  7687  		}
  7688  		if x3.Aux != s {
  7689  			break
  7690  		}
  7691  		if p != x3.Args[0] {
  7692  			break
  7693  		}
  7694  		if mem != x3.Args[1] {
  7695  			break
  7696  		}
  7697  		y4 := v.Args[1]
  7698  		if y4.Op != OpARM64MOVDnop {
  7699  			break
  7700  		}
  7701  		x4 := y4.Args[0]
  7702  		if x4.Op != OpARM64MOVBUload {
  7703  			break
  7704  		}
  7705  		if x4.AuxInt != i-4 {
  7706  			break
  7707  		}
  7708  		if x4.Aux != s {
  7709  			break
  7710  		}
  7711  		if p != x4.Args[0] {
  7712  			break
  7713  		}
  7714  		if mem != x4.Args[1] {
  7715  			break
  7716  		}
  7717  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
  7718  			break
  7719  		}
  7720  		b = mergePoint(b, x0, x1, x2, x3, x4)
  7721  		v0 := b.NewValue0(v.Line, OpARM64REV, t)
  7722  		v.reset(OpCopy)
  7723  		v.AddArg(v0)
  7724  		v1 := b.NewValue0(v.Line, OpARM64MOVDload, t)
  7725  		v1.Aux = s
  7726  		v2 := b.NewValue0(v.Line, OpOffPtr, p.Type)
  7727  		v2.AuxInt = i - 4
  7728  		v2.AddArg(p)
  7729  		v1.AddArg(v2)
  7730  		v1.AddArg(mem)
  7731  		v0.AddArg(v1)
  7732  		return true
  7733  	}
  7734  	return false
  7735  }
  7736  func rewriteValueARM64_OpARM64ORshiftRA(v *Value, config *Config) bool {
  7737  	b := v.Block
  7738  	_ = b
  7739  	// match: (ORshiftRA  (MOVDconst [c]) x [d])
  7740  	// cond:
  7741  	// result: (ORconst  [c] (SRAconst <x.Type> x [d]))
  7742  	for {
  7743  		d := v.AuxInt
  7744  		v_0 := v.Args[0]
  7745  		if v_0.Op != OpARM64MOVDconst {
  7746  			break
  7747  		}
  7748  		c := v_0.AuxInt
  7749  		x := v.Args[1]
  7750  		v.reset(OpARM64ORconst)
  7751  		v.AuxInt = c
  7752  		v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
  7753  		v0.AuxInt = d
  7754  		v0.AddArg(x)
  7755  		v.AddArg(v0)
  7756  		return true
  7757  	}
  7758  	// match: (ORshiftRA  x (MOVDconst [c]) [d])
  7759  	// cond:
  7760  	// result: (ORconst  x [int64(int64(c)>>uint64(d))])
  7761  	for {
  7762  		d := v.AuxInt
  7763  		x := v.Args[0]
  7764  		v_1 := v.Args[1]
  7765  		if v_1.Op != OpARM64MOVDconst {
  7766  			break
  7767  		}
  7768  		c := v_1.AuxInt
  7769  		v.reset(OpARM64ORconst)
  7770  		v.AuxInt = int64(int64(c) >> uint64(d))
  7771  		v.AddArg(x)
  7772  		return true
  7773  	}
  7774  	// match: (ORshiftRA  x y:(SRAconst x [c]) [d])
  7775  	// cond: c==d
  7776  	// result: y
  7777  	for {
  7778  		d := v.AuxInt
  7779  		x := v.Args[0]
  7780  		y := v.Args[1]
  7781  		if y.Op != OpARM64SRAconst {
  7782  			break
  7783  		}
  7784  		c := y.AuxInt
  7785  		if x != y.Args[0] {
  7786  			break
  7787  		}
  7788  		if !(c == d) {
  7789  			break
  7790  		}
  7791  		v.reset(OpCopy)
  7792  		v.Type = y.Type
  7793  		v.AddArg(y)
  7794  		return true
  7795  	}
  7796  	return false
  7797  }
  7798  func rewriteValueARM64_OpARM64ORshiftRL(v *Value, config *Config) bool {
  7799  	b := v.Block
  7800  	_ = b
  7801  	// match: (ORshiftRL  (MOVDconst [c]) x [d])
  7802  	// cond:
  7803  	// result: (ORconst  [c] (SRLconst <x.Type> x [d]))
  7804  	for {
  7805  		d := v.AuxInt
  7806  		v_0 := v.Args[0]
  7807  		if v_0.Op != OpARM64MOVDconst {
  7808  			break
  7809  		}
  7810  		c := v_0.AuxInt
  7811  		x := v.Args[1]
  7812  		v.reset(OpARM64ORconst)
  7813  		v.AuxInt = c
  7814  		v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
  7815  		v0.AuxInt = d
  7816  		v0.AddArg(x)
  7817  		v.AddArg(v0)
  7818  		return true
  7819  	}
  7820  	// match: (ORshiftRL  x (MOVDconst [c]) [d])
  7821  	// cond:
  7822  	// result: (ORconst  x [int64(uint64(c)>>uint64(d))])
  7823  	for {
  7824  		d := v.AuxInt
  7825  		x := v.Args[0]
  7826  		v_1 := v.Args[1]
  7827  		if v_1.Op != OpARM64MOVDconst {
  7828  			break
  7829  		}
  7830  		c := v_1.AuxInt
  7831  		v.reset(OpARM64ORconst)
  7832  		v.AuxInt = int64(uint64(c) >> uint64(d))
  7833  		v.AddArg(x)
  7834  		return true
  7835  	}
  7836  	// match: (ORshiftRL  x y:(SRLconst x [c]) [d])
  7837  	// cond: c==d
  7838  	// result: y
  7839  	for {
  7840  		d := v.AuxInt
  7841  		x := v.Args[0]
  7842  		y := v.Args[1]
  7843  		if y.Op != OpARM64SRLconst {
  7844  			break
  7845  		}
  7846  		c := y.AuxInt
  7847  		if x != y.Args[0] {
  7848  			break
  7849  		}
  7850  		if !(c == d) {
  7851  			break
  7852  		}
  7853  		v.reset(OpCopy)
  7854  		v.Type = y.Type
  7855  		v.AddArg(y)
  7856  		return true
  7857  	}
  7858  	return false
  7859  }
  7860  func rewriteValueARM64_OpARM64SLL(v *Value, config *Config) bool {
  7861  	b := v.Block
  7862  	_ = b
  7863  	// match: (SLL x (MOVDconst [c]))
  7864  	// cond:
  7865  	// result: (SLLconst x [c&63])
  7866  	for {
  7867  		x := v.Args[0]
  7868  		v_1 := v.Args[1]
  7869  		if v_1.Op != OpARM64MOVDconst {
  7870  			break
  7871  		}
  7872  		c := v_1.AuxInt
  7873  		v.reset(OpARM64SLLconst)
  7874  		v.AuxInt = c & 63
  7875  		v.AddArg(x)
  7876  		return true
  7877  	}
  7878  	return false
  7879  }
  7880  func rewriteValueARM64_OpARM64SLLconst(v *Value, config *Config) bool {
  7881  	b := v.Block
  7882  	_ = b
  7883  	// match: (SLLconst [c] (MOVDconst [d]))
  7884  	// cond:
  7885  	// result: (MOVDconst [int64(d)<<uint64(c)])
  7886  	for {
  7887  		c := v.AuxInt
  7888  		v_0 := v.Args[0]
  7889  		if v_0.Op != OpARM64MOVDconst {
  7890  			break
  7891  		}
  7892  		d := v_0.AuxInt
  7893  		v.reset(OpARM64MOVDconst)
  7894  		v.AuxInt = int64(d) << uint64(c)
  7895  		return true
  7896  	}
  7897  	return false
  7898  }
  7899  func rewriteValueARM64_OpARM64SRA(v *Value, config *Config) bool {
  7900  	b := v.Block
  7901  	_ = b
  7902  	// match: (SRA x (MOVDconst [c]))
  7903  	// cond:
  7904  	// result: (SRAconst x [c&63])
  7905  	for {
  7906  		x := v.Args[0]
  7907  		v_1 := v.Args[1]
  7908  		if v_1.Op != OpARM64MOVDconst {
  7909  			break
  7910  		}
  7911  		c := v_1.AuxInt
  7912  		v.reset(OpARM64SRAconst)
  7913  		v.AuxInt = c & 63
  7914  		v.AddArg(x)
  7915  		return true
  7916  	}
  7917  	return false
  7918  }
  7919  func rewriteValueARM64_OpARM64SRAconst(v *Value, config *Config) bool {
  7920  	b := v.Block
  7921  	_ = b
  7922  	// match: (SRAconst [c] (MOVDconst [d]))
  7923  	// cond:
  7924  	// result: (MOVDconst [int64(d)>>uint64(c)])
  7925  	for {
  7926  		c := v.AuxInt
  7927  		v_0 := v.Args[0]
  7928  		if v_0.Op != OpARM64MOVDconst {
  7929  			break
  7930  		}
  7931  		d := v_0.AuxInt
  7932  		v.reset(OpARM64MOVDconst)
  7933  		v.AuxInt = int64(d) >> uint64(c)
  7934  		return true
  7935  	}
  7936  	return false
  7937  }
  7938  func rewriteValueARM64_OpARM64SRL(v *Value, config *Config) bool {
  7939  	b := v.Block
  7940  	_ = b
  7941  	// match: (SRL x (MOVDconst [c]))
  7942  	// cond:
  7943  	// result: (SRLconst x [c&63])
  7944  	for {
  7945  		x := v.Args[0]
  7946  		v_1 := v.Args[1]
  7947  		if v_1.Op != OpARM64MOVDconst {
  7948  			break
  7949  		}
  7950  		c := v_1.AuxInt
  7951  		v.reset(OpARM64SRLconst)
  7952  		v.AuxInt = c & 63
  7953  		v.AddArg(x)
  7954  		return true
  7955  	}
  7956  	return false
  7957  }
  7958  func rewriteValueARM64_OpARM64SRLconst(v *Value, config *Config) bool {
  7959  	b := v.Block
  7960  	_ = b
  7961  	// match: (SRLconst [c] (MOVDconst [d]))
  7962  	// cond:
  7963  	// result: (MOVDconst [int64(uint64(d)>>uint64(c))])
  7964  	for {
  7965  		c := v.AuxInt
  7966  		v_0 := v.Args[0]
  7967  		if v_0.Op != OpARM64MOVDconst {
  7968  			break
  7969  		}
  7970  		d := v_0.AuxInt
  7971  		v.reset(OpARM64MOVDconst)
  7972  		v.AuxInt = int64(uint64(d) >> uint64(c))
  7973  		return true
  7974  	}
  7975  	return false
  7976  }
  7977  func rewriteValueARM64_OpARM64SUB(v *Value, config *Config) bool {
  7978  	b := v.Block
  7979  	_ = b
  7980  	// match: (SUB x (MOVDconst [c]))
  7981  	// cond:
  7982  	// result: (SUBconst [c] x)
  7983  	for {
  7984  		x := v.Args[0]
  7985  		v_1 := v.Args[1]
  7986  		if v_1.Op != OpARM64MOVDconst {
  7987  			break
  7988  		}
  7989  		c := v_1.AuxInt
  7990  		v.reset(OpARM64SUBconst)
  7991  		v.AuxInt = c
  7992  		v.AddArg(x)
  7993  		return true
  7994  	}
  7995  	// match: (SUB x x)
  7996  	// cond:
  7997  	// result: (MOVDconst [0])
  7998  	for {
  7999  		x := v.Args[0]
  8000  		if x != v.Args[1] {
  8001  			break
  8002  		}
  8003  		v.reset(OpARM64MOVDconst)
  8004  		v.AuxInt = 0
  8005  		return true
  8006  	}
  8007  	// match: (SUB x (SLLconst [c] y))
  8008  	// cond:
  8009  	// result: (SUBshiftLL x y [c])
  8010  	for {
  8011  		x := v.Args[0]
  8012  		v_1 := v.Args[1]
  8013  		if v_1.Op != OpARM64SLLconst {
  8014  			break
  8015  		}
  8016  		c := v_1.AuxInt
  8017  		y := v_1.Args[0]
  8018  		v.reset(OpARM64SUBshiftLL)
  8019  		v.AuxInt = c
  8020  		v.AddArg(x)
  8021  		v.AddArg(y)
  8022  		return true
  8023  	}
  8024  	// match: (SUB x (SRLconst [c] y))
  8025  	// cond:
  8026  	// result: (SUBshiftRL x y [c])
  8027  	for {
  8028  		x := v.Args[0]
  8029  		v_1 := v.Args[1]
  8030  		if v_1.Op != OpARM64SRLconst {
  8031  			break
  8032  		}
  8033  		c := v_1.AuxInt
  8034  		y := v_1.Args[0]
  8035  		v.reset(OpARM64SUBshiftRL)
  8036  		v.AuxInt = c
  8037  		v.AddArg(x)
  8038  		v.AddArg(y)
  8039  		return true
  8040  	}
  8041  	// match: (SUB x (SRAconst [c] y))
  8042  	// cond:
  8043  	// result: (SUBshiftRA x y [c])
  8044  	for {
  8045  		x := v.Args[0]
  8046  		v_1 := v.Args[1]
  8047  		if v_1.Op != OpARM64SRAconst {
  8048  			break
  8049  		}
  8050  		c := v_1.AuxInt
  8051  		y := v_1.Args[0]
  8052  		v.reset(OpARM64SUBshiftRA)
  8053  		v.AuxInt = c
  8054  		v.AddArg(x)
  8055  		v.AddArg(y)
  8056  		return true
  8057  	}
  8058  	return false
  8059  }
  8060  func rewriteValueARM64_OpARM64SUBconst(v *Value, config *Config) bool {
  8061  	b := v.Block
  8062  	_ = b
  8063  	// match: (SUBconst [0]  x)
  8064  	// cond:
  8065  	// result: x
  8066  	for {
  8067  		if v.AuxInt != 0 {
  8068  			break
  8069  		}
  8070  		x := v.Args[0]
  8071  		v.reset(OpCopy)
  8072  		v.Type = x.Type
  8073  		v.AddArg(x)
  8074  		return true
  8075  	}
  8076  	// match: (SUBconst [c] (MOVDconst [d]))
  8077  	// cond:
  8078  	// result: (MOVDconst [d-c])
  8079  	for {
  8080  		c := v.AuxInt
  8081  		v_0 := v.Args[0]
  8082  		if v_0.Op != OpARM64MOVDconst {
  8083  			break
  8084  		}
  8085  		d := v_0.AuxInt
  8086  		v.reset(OpARM64MOVDconst)
  8087  		v.AuxInt = d - c
  8088  		return true
  8089  	}
  8090  	// match: (SUBconst [c] (SUBconst [d] x))
  8091  	// cond:
  8092  	// result: (ADDconst [-c-d] x)
  8093  	for {
  8094  		c := v.AuxInt
  8095  		v_0 := v.Args[0]
  8096  		if v_0.Op != OpARM64SUBconst {
  8097  			break
  8098  		}
  8099  		d := v_0.AuxInt
  8100  		x := v_0.Args[0]
  8101  		v.reset(OpARM64ADDconst)
  8102  		v.AuxInt = -c - d
  8103  		v.AddArg(x)
  8104  		return true
  8105  	}
  8106  	// match: (SUBconst [c] (ADDconst [d] x))
  8107  	// cond:
  8108  	// result: (ADDconst [-c+d] x)
  8109  	for {
  8110  		c := v.AuxInt
  8111  		v_0 := v.Args[0]
  8112  		if v_0.Op != OpARM64ADDconst {
  8113  			break
  8114  		}
  8115  		d := v_0.AuxInt
  8116  		x := v_0.Args[0]
  8117  		v.reset(OpARM64ADDconst)
  8118  		v.AuxInt = -c + d
  8119  		v.AddArg(x)
  8120  		return true
  8121  	}
  8122  	return false
  8123  }
  8124  func rewriteValueARM64_OpARM64SUBshiftLL(v *Value, config *Config) bool {
  8125  	b := v.Block
  8126  	_ = b
  8127  	// match: (SUBshiftLL x (MOVDconst [c]) [d])
  8128  	// cond:
  8129  	// result: (SUBconst x [int64(uint64(c)<<uint64(d))])
  8130  	for {
  8131  		d := v.AuxInt
  8132  		x := v.Args[0]
  8133  		v_1 := v.Args[1]
  8134  		if v_1.Op != OpARM64MOVDconst {
  8135  			break
  8136  		}
  8137  		c := v_1.AuxInt
  8138  		v.reset(OpARM64SUBconst)
  8139  		v.AuxInt = int64(uint64(c) << uint64(d))
  8140  		v.AddArg(x)
  8141  		return true
  8142  	}
  8143  	// match: (SUBshiftLL x (SLLconst x [c]) [d])
  8144  	// cond: c==d
  8145  	// result: (MOVDconst [0])
  8146  	for {
  8147  		d := v.AuxInt
  8148  		x := v.Args[0]
  8149  		v_1 := v.Args[1]
  8150  		if v_1.Op != OpARM64SLLconst {
  8151  			break
  8152  		}
  8153  		c := v_1.AuxInt
  8154  		if x != v_1.Args[0] {
  8155  			break
  8156  		}
  8157  		if !(c == d) {
  8158  			break
  8159  		}
  8160  		v.reset(OpARM64MOVDconst)
  8161  		v.AuxInt = 0
  8162  		return true
  8163  	}
  8164  	return false
  8165  }
  8166  func rewriteValueARM64_OpARM64SUBshiftRA(v *Value, config *Config) bool {
  8167  	b := v.Block
  8168  	_ = b
  8169  	// match: (SUBshiftRA x (MOVDconst [c]) [d])
  8170  	// cond:
  8171  	// result: (SUBconst x [int64(int64(c)>>uint64(d))])
  8172  	for {
  8173  		d := v.AuxInt
  8174  		x := v.Args[0]
  8175  		v_1 := v.Args[1]
  8176  		if v_1.Op != OpARM64MOVDconst {
  8177  			break
  8178  		}
  8179  		c := v_1.AuxInt
  8180  		v.reset(OpARM64SUBconst)
  8181  		v.AuxInt = int64(int64(c) >> uint64(d))
  8182  		v.AddArg(x)
  8183  		return true
  8184  	}
  8185  	// match: (SUBshiftRA x (SRAconst x [c]) [d])
  8186  	// cond: c==d
  8187  	// result: (MOVDconst [0])
  8188  	for {
  8189  		d := v.AuxInt
  8190  		x := v.Args[0]
  8191  		v_1 := v.Args[1]
  8192  		if v_1.Op != OpARM64SRAconst {
  8193  			break
  8194  		}
  8195  		c := v_1.AuxInt
  8196  		if x != v_1.Args[0] {
  8197  			break
  8198  		}
  8199  		if !(c == d) {
  8200  			break
  8201  		}
  8202  		v.reset(OpARM64MOVDconst)
  8203  		v.AuxInt = 0
  8204  		return true
  8205  	}
  8206  	return false
  8207  }
  8208  func rewriteValueARM64_OpARM64SUBshiftRL(v *Value, config *Config) bool {
  8209  	b := v.Block
  8210  	_ = b
  8211  	// match: (SUBshiftRL x (MOVDconst [c]) [d])
  8212  	// cond:
  8213  	// result: (SUBconst x [int64(uint64(c)>>uint64(d))])
  8214  	for {
  8215  		d := v.AuxInt
  8216  		x := v.Args[0]
  8217  		v_1 := v.Args[1]
  8218  		if v_1.Op != OpARM64MOVDconst {
  8219  			break
  8220  		}
  8221  		c := v_1.AuxInt
  8222  		v.reset(OpARM64SUBconst)
  8223  		v.AuxInt = int64(uint64(c) >> uint64(d))
  8224  		v.AddArg(x)
  8225  		return true
  8226  	}
  8227  	// match: (SUBshiftRL x (SRLconst x [c]) [d])
  8228  	// cond: c==d
  8229  	// result: (MOVDconst [0])
  8230  	for {
  8231  		d := v.AuxInt
  8232  		x := v.Args[0]
  8233  		v_1 := v.Args[1]
  8234  		if v_1.Op != OpARM64SRLconst {
  8235  			break
  8236  		}
  8237  		c := v_1.AuxInt
  8238  		if x != v_1.Args[0] {
  8239  			break
  8240  		}
  8241  		if !(c == d) {
  8242  			break
  8243  		}
  8244  		v.reset(OpARM64MOVDconst)
  8245  		v.AuxInt = 0
  8246  		return true
  8247  	}
  8248  	return false
  8249  }
  8250  func rewriteValueARM64_OpARM64UDIV(v *Value, config *Config) bool {
  8251  	b := v.Block
  8252  	_ = b
  8253  	// match: (UDIV x (MOVDconst [1]))
  8254  	// cond:
  8255  	// result: x
  8256  	for {
  8257  		x := v.Args[0]
  8258  		v_1 := v.Args[1]
  8259  		if v_1.Op != OpARM64MOVDconst {
  8260  			break
  8261  		}
  8262  		if v_1.AuxInt != 1 {
  8263  			break
  8264  		}
  8265  		v.reset(OpCopy)
  8266  		v.Type = x.Type
  8267  		v.AddArg(x)
  8268  		return true
  8269  	}
  8270  	// match: (UDIV x (MOVDconst [c]))
  8271  	// cond: isPowerOfTwo(c)
  8272  	// result: (SRLconst [log2(c)] x)
  8273  	for {
  8274  		x := v.Args[0]
  8275  		v_1 := v.Args[1]
  8276  		if v_1.Op != OpARM64MOVDconst {
  8277  			break
  8278  		}
  8279  		c := v_1.AuxInt
  8280  		if !(isPowerOfTwo(c)) {
  8281  			break
  8282  		}
  8283  		v.reset(OpARM64SRLconst)
  8284  		v.AuxInt = log2(c)
  8285  		v.AddArg(x)
  8286  		return true
  8287  	}
  8288  	// match: (UDIV  (MOVDconst [c]) (MOVDconst [d]))
  8289  	// cond:
  8290  	// result: (MOVDconst [int64(uint64(c)/uint64(d))])
  8291  	for {
  8292  		v_0 := v.Args[0]
  8293  		if v_0.Op != OpARM64MOVDconst {
  8294  			break
  8295  		}
  8296  		c := v_0.AuxInt
  8297  		v_1 := v.Args[1]
  8298  		if v_1.Op != OpARM64MOVDconst {
  8299  			break
  8300  		}
  8301  		d := v_1.AuxInt
  8302  		v.reset(OpARM64MOVDconst)
  8303  		v.AuxInt = int64(uint64(c) / uint64(d))
  8304  		return true
  8305  	}
  8306  	return false
  8307  }
  8308  func rewriteValueARM64_OpARM64UDIVW(v *Value, config *Config) bool {
  8309  	b := v.Block
  8310  	_ = b
  8311  	// match: (UDIVW x (MOVDconst [c]))
  8312  	// cond: uint32(c)==1
  8313  	// result: x
  8314  	for {
  8315  		x := v.Args[0]
  8316  		v_1 := v.Args[1]
  8317  		if v_1.Op != OpARM64MOVDconst {
  8318  			break
  8319  		}
  8320  		c := v_1.AuxInt
  8321  		if !(uint32(c) == 1) {
  8322  			break
  8323  		}
  8324  		v.reset(OpCopy)
  8325  		v.Type = x.Type
  8326  		v.AddArg(x)
  8327  		return true
  8328  	}
  8329  	// match: (UDIVW x (MOVDconst [c]))
  8330  	// cond: isPowerOfTwo(c) && is32Bit(c)
  8331  	// result: (SRLconst [log2(c)] x)
  8332  	for {
  8333  		x := v.Args[0]
  8334  		v_1 := v.Args[1]
  8335  		if v_1.Op != OpARM64MOVDconst {
  8336  			break
  8337  		}
  8338  		c := v_1.AuxInt
  8339  		if !(isPowerOfTwo(c) && is32Bit(c)) {
  8340  			break
  8341  		}
  8342  		v.reset(OpARM64SRLconst)
  8343  		v.AuxInt = log2(c)
  8344  		v.AddArg(x)
  8345  		return true
  8346  	}
  8347  	// match: (UDIVW (MOVDconst [c]) (MOVDconst [d]))
  8348  	// cond:
  8349  	// result: (MOVDconst [int64(uint32(c)/uint32(d))])
  8350  	for {
  8351  		v_0 := v.Args[0]
  8352  		if v_0.Op != OpARM64MOVDconst {
  8353  			break
  8354  		}
  8355  		c := v_0.AuxInt
  8356  		v_1 := v.Args[1]
  8357  		if v_1.Op != OpARM64MOVDconst {
  8358  			break
  8359  		}
  8360  		d := v_1.AuxInt
  8361  		v.reset(OpARM64MOVDconst)
  8362  		v.AuxInt = int64(uint32(c) / uint32(d))
  8363  		return true
  8364  	}
  8365  	return false
  8366  }
  8367  func rewriteValueARM64_OpARM64UMOD(v *Value, config *Config) bool {
  8368  	b := v.Block
  8369  	_ = b
  8370  	// match: (UMOD _ (MOVDconst [1]))
  8371  	// cond:
  8372  	// result: (MOVDconst [0])
  8373  	for {
  8374  		v_1 := v.Args[1]
  8375  		if v_1.Op != OpARM64MOVDconst {
  8376  			break
  8377  		}
  8378  		if v_1.AuxInt != 1 {
  8379  			break
  8380  		}
  8381  		v.reset(OpARM64MOVDconst)
  8382  		v.AuxInt = 0
  8383  		return true
  8384  	}
  8385  	// match: (UMOD x (MOVDconst [c]))
  8386  	// cond: isPowerOfTwo(c)
  8387  	// result: (ANDconst [c-1] x)
  8388  	for {
  8389  		x := v.Args[0]
  8390  		v_1 := v.Args[1]
  8391  		if v_1.Op != OpARM64MOVDconst {
  8392  			break
  8393  		}
  8394  		c := v_1.AuxInt
  8395  		if !(isPowerOfTwo(c)) {
  8396  			break
  8397  		}
  8398  		v.reset(OpARM64ANDconst)
  8399  		v.AuxInt = c - 1
  8400  		v.AddArg(x)
  8401  		return true
  8402  	}
  8403  	// match: (UMOD  (MOVDconst [c]) (MOVDconst [d]))
  8404  	// cond:
  8405  	// result: (MOVDconst [int64(uint64(c)%uint64(d))])
  8406  	for {
  8407  		v_0 := v.Args[0]
  8408  		if v_0.Op != OpARM64MOVDconst {
  8409  			break
  8410  		}
  8411  		c := v_0.AuxInt
  8412  		v_1 := v.Args[1]
  8413  		if v_1.Op != OpARM64MOVDconst {
  8414  			break
  8415  		}
  8416  		d := v_1.AuxInt
  8417  		v.reset(OpARM64MOVDconst)
  8418  		v.AuxInt = int64(uint64(c) % uint64(d))
  8419  		return true
  8420  	}
  8421  	return false
  8422  }
  8423  func rewriteValueARM64_OpARM64UMODW(v *Value, config *Config) bool {
  8424  	b := v.Block
  8425  	_ = b
  8426  	// match: (UMODW _ (MOVDconst [c]))
  8427  	// cond: uint32(c)==1
  8428  	// result: (MOVDconst [0])
  8429  	for {
  8430  		v_1 := v.Args[1]
  8431  		if v_1.Op != OpARM64MOVDconst {
  8432  			break
  8433  		}
  8434  		c := v_1.AuxInt
  8435  		if !(uint32(c) == 1) {
  8436  			break
  8437  		}
  8438  		v.reset(OpARM64MOVDconst)
  8439  		v.AuxInt = 0
  8440  		return true
  8441  	}
  8442  	// match: (UMODW x (MOVDconst [c]))
  8443  	// cond: isPowerOfTwo(c) && is32Bit(c)
  8444  	// result: (ANDconst [c-1] x)
  8445  	for {
  8446  		x := v.Args[0]
  8447  		v_1 := v.Args[1]
  8448  		if v_1.Op != OpARM64MOVDconst {
  8449  			break
  8450  		}
  8451  		c := v_1.AuxInt
  8452  		if !(isPowerOfTwo(c) && is32Bit(c)) {
  8453  			break
  8454  		}
  8455  		v.reset(OpARM64ANDconst)
  8456  		v.AuxInt = c - 1
  8457  		v.AddArg(x)
  8458  		return true
  8459  	}
  8460  	// match: (UMODW (MOVDconst [c]) (MOVDconst [d]))
  8461  	// cond:
  8462  	// result: (MOVDconst [int64(uint32(c)%uint32(d))])
  8463  	for {
  8464  		v_0 := v.Args[0]
  8465  		if v_0.Op != OpARM64MOVDconst {
  8466  			break
  8467  		}
  8468  		c := v_0.AuxInt
  8469  		v_1 := v.Args[1]
  8470  		if v_1.Op != OpARM64MOVDconst {
  8471  			break
  8472  		}
  8473  		d := v_1.AuxInt
  8474  		v.reset(OpARM64MOVDconst)
  8475  		v.AuxInt = int64(uint32(c) % uint32(d))
  8476  		return true
  8477  	}
  8478  	return false
  8479  }
  8480  func rewriteValueARM64_OpARM64XOR(v *Value, config *Config) bool {
  8481  	b := v.Block
  8482  	_ = b
  8483  	// match: (XOR (MOVDconst [c]) x)
  8484  	// cond:
  8485  	// result: (XORconst [c] x)
  8486  	for {
  8487  		v_0 := v.Args[0]
  8488  		if v_0.Op != OpARM64MOVDconst {
  8489  			break
  8490  		}
  8491  		c := v_0.AuxInt
  8492  		x := v.Args[1]
  8493  		v.reset(OpARM64XORconst)
  8494  		v.AuxInt = c
  8495  		v.AddArg(x)
  8496  		return true
  8497  	}
  8498  	// match: (XOR x (MOVDconst [c]))
  8499  	// cond:
  8500  	// result: (XORconst [c] x)
  8501  	for {
  8502  		x := v.Args[0]
  8503  		v_1 := v.Args[1]
  8504  		if v_1.Op != OpARM64MOVDconst {
  8505  			break
  8506  		}
  8507  		c := v_1.AuxInt
  8508  		v.reset(OpARM64XORconst)
  8509  		v.AuxInt = c
  8510  		v.AddArg(x)
  8511  		return true
  8512  	}
  8513  	// match: (XOR x x)
  8514  	// cond:
  8515  	// result: (MOVDconst [0])
  8516  	for {
  8517  		x := v.Args[0]
  8518  		if x != v.Args[1] {
  8519  			break
  8520  		}
  8521  		v.reset(OpARM64MOVDconst)
  8522  		v.AuxInt = 0
  8523  		return true
  8524  	}
  8525  	// match: (XOR x (SLLconst [c] y))
  8526  	// cond:
  8527  	// result: (XORshiftLL x y [c])
  8528  	for {
  8529  		x := v.Args[0]
  8530  		v_1 := v.Args[1]
  8531  		if v_1.Op != OpARM64SLLconst {
  8532  			break
  8533  		}
  8534  		c := v_1.AuxInt
  8535  		y := v_1.Args[0]
  8536  		v.reset(OpARM64XORshiftLL)
  8537  		v.AuxInt = c
  8538  		v.AddArg(x)
  8539  		v.AddArg(y)
  8540  		return true
  8541  	}
  8542  	// match: (XOR (SLLconst [c] y) x)
  8543  	// cond:
  8544  	// result: (XORshiftLL x y [c])
  8545  	for {
  8546  		v_0 := v.Args[0]
  8547  		if v_0.Op != OpARM64SLLconst {
  8548  			break
  8549  		}
  8550  		c := v_0.AuxInt
  8551  		y := v_0.Args[0]
  8552  		x := v.Args[1]
  8553  		v.reset(OpARM64XORshiftLL)
  8554  		v.AuxInt = c
  8555  		v.AddArg(x)
  8556  		v.AddArg(y)
  8557  		return true
  8558  	}
  8559  	// match: (XOR x (SRLconst [c] y))
  8560  	// cond:
  8561  	// result: (XORshiftRL x y [c])
  8562  	for {
  8563  		x := v.Args[0]
  8564  		v_1 := v.Args[1]
  8565  		if v_1.Op != OpARM64SRLconst {
  8566  			break
  8567  		}
  8568  		c := v_1.AuxInt
  8569  		y := v_1.Args[0]
  8570  		v.reset(OpARM64XORshiftRL)
  8571  		v.AuxInt = c
  8572  		v.AddArg(x)
  8573  		v.AddArg(y)
  8574  		return true
  8575  	}
  8576  	// match: (XOR (SRLconst [c] y) x)
  8577  	// cond:
  8578  	// result: (XORshiftRL x y [c])
  8579  	for {
  8580  		v_0 := v.Args[0]
  8581  		if v_0.Op != OpARM64SRLconst {
  8582  			break
  8583  		}
  8584  		c := v_0.AuxInt
  8585  		y := v_0.Args[0]
  8586  		x := v.Args[1]
  8587  		v.reset(OpARM64XORshiftRL)
  8588  		v.AuxInt = c
  8589  		v.AddArg(x)
  8590  		v.AddArg(y)
  8591  		return true
  8592  	}
  8593  	// match: (XOR x (SRAconst [c] y))
  8594  	// cond:
  8595  	// result: (XORshiftRA x y [c])
  8596  	for {
  8597  		x := v.Args[0]
  8598  		v_1 := v.Args[1]
  8599  		if v_1.Op != OpARM64SRAconst {
  8600  			break
  8601  		}
  8602  		c := v_1.AuxInt
  8603  		y := v_1.Args[0]
  8604  		v.reset(OpARM64XORshiftRA)
  8605  		v.AuxInt = c
  8606  		v.AddArg(x)
  8607  		v.AddArg(y)
  8608  		return true
  8609  	}
  8610  	// match: (XOR (SRAconst [c] y) x)
  8611  	// cond:
  8612  	// result: (XORshiftRA x y [c])
  8613  	for {
  8614  		v_0 := v.Args[0]
  8615  		if v_0.Op != OpARM64SRAconst {
  8616  			break
  8617  		}
  8618  		c := v_0.AuxInt
  8619  		y := v_0.Args[0]
  8620  		x := v.Args[1]
  8621  		v.reset(OpARM64XORshiftRA)
  8622  		v.AuxInt = c
  8623  		v.AddArg(x)
  8624  		v.AddArg(y)
  8625  		return true
  8626  	}
  8627  	return false
  8628  }
  8629  func rewriteValueARM64_OpARM64XORconst(v *Value, config *Config) bool {
  8630  	b := v.Block
  8631  	_ = b
  8632  	// match: (XORconst [0]  x)
  8633  	// cond:
  8634  	// result: x
  8635  	for {
  8636  		if v.AuxInt != 0 {
  8637  			break
  8638  		}
  8639  		x := v.Args[0]
  8640  		v.reset(OpCopy)
  8641  		v.Type = x.Type
  8642  		v.AddArg(x)
  8643  		return true
  8644  	}
  8645  	// match: (XORconst [-1] x)
  8646  	// cond:
  8647  	// result: (MVN x)
  8648  	for {
  8649  		if v.AuxInt != -1 {
  8650  			break
  8651  		}
  8652  		x := v.Args[0]
  8653  		v.reset(OpARM64MVN)
  8654  		v.AddArg(x)
  8655  		return true
  8656  	}
  8657  	// match: (XORconst [c] (MOVDconst [d]))
  8658  	// cond:
  8659  	// result: (MOVDconst [c^d])
  8660  	for {
  8661  		c := v.AuxInt
  8662  		v_0 := v.Args[0]
  8663  		if v_0.Op != OpARM64MOVDconst {
  8664  			break
  8665  		}
  8666  		d := v_0.AuxInt
  8667  		v.reset(OpARM64MOVDconst)
  8668  		v.AuxInt = c ^ d
  8669  		return true
  8670  	}
  8671  	// match: (XORconst [c] (XORconst [d] x))
  8672  	// cond:
  8673  	// result: (XORconst [c^d] x)
  8674  	for {
  8675  		c := v.AuxInt
  8676  		v_0 := v.Args[0]
  8677  		if v_0.Op != OpARM64XORconst {
  8678  			break
  8679  		}
  8680  		d := v_0.AuxInt
  8681  		x := v_0.Args[0]
  8682  		v.reset(OpARM64XORconst)
  8683  		v.AuxInt = c ^ d
  8684  		v.AddArg(x)
  8685  		return true
  8686  	}
  8687  	return false
  8688  }
  8689  func rewriteValueARM64_OpARM64XORshiftLL(v *Value, config *Config) bool {
  8690  	b := v.Block
  8691  	_ = b
  8692  	// match: (XORshiftLL (MOVDconst [c]) x [d])
  8693  	// cond:
  8694  	// result: (XORconst [c] (SLLconst <x.Type> x [d]))
  8695  	for {
  8696  		d := v.AuxInt
  8697  		v_0 := v.Args[0]
  8698  		if v_0.Op != OpARM64MOVDconst {
  8699  			break
  8700  		}
  8701  		c := v_0.AuxInt
  8702  		x := v.Args[1]
  8703  		v.reset(OpARM64XORconst)
  8704  		v.AuxInt = c
  8705  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
  8706  		v0.AuxInt = d
  8707  		v0.AddArg(x)
  8708  		v.AddArg(v0)
  8709  		return true
  8710  	}
  8711  	// match: (XORshiftLL x (MOVDconst [c]) [d])
  8712  	// cond:
  8713  	// result: (XORconst x [int64(uint64(c)<<uint64(d))])
  8714  	for {
  8715  		d := v.AuxInt
  8716  		x := v.Args[0]
  8717  		v_1 := v.Args[1]
  8718  		if v_1.Op != OpARM64MOVDconst {
  8719  			break
  8720  		}
  8721  		c := v_1.AuxInt
  8722  		v.reset(OpARM64XORconst)
  8723  		v.AuxInt = int64(uint64(c) << uint64(d))
  8724  		v.AddArg(x)
  8725  		return true
  8726  	}
  8727  	// match: (XORshiftLL x (SLLconst x [c]) [d])
  8728  	// cond: c==d
  8729  	// result: (MOVDconst [0])
  8730  	for {
  8731  		d := v.AuxInt
  8732  		x := v.Args[0]
  8733  		v_1 := v.Args[1]
  8734  		if v_1.Op != OpARM64SLLconst {
  8735  			break
  8736  		}
  8737  		c := v_1.AuxInt
  8738  		if x != v_1.Args[0] {
  8739  			break
  8740  		}
  8741  		if !(c == d) {
  8742  			break
  8743  		}
  8744  		v.reset(OpARM64MOVDconst)
  8745  		v.AuxInt = 0
  8746  		return true
  8747  	}
  8748  	return false
  8749  }
  8750  func rewriteValueARM64_OpARM64XORshiftRA(v *Value, config *Config) bool {
  8751  	b := v.Block
  8752  	_ = b
  8753  	// match: (XORshiftRA (MOVDconst [c]) x [d])
  8754  	// cond:
  8755  	// result: (XORconst [c] (SRAconst <x.Type> x [d]))
  8756  	for {
  8757  		d := v.AuxInt
  8758  		v_0 := v.Args[0]
  8759  		if v_0.Op != OpARM64MOVDconst {
  8760  			break
  8761  		}
  8762  		c := v_0.AuxInt
  8763  		x := v.Args[1]
  8764  		v.reset(OpARM64XORconst)
  8765  		v.AuxInt = c
  8766  		v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
  8767  		v0.AuxInt = d
  8768  		v0.AddArg(x)
  8769  		v.AddArg(v0)
  8770  		return true
  8771  	}
  8772  	// match: (XORshiftRA x (MOVDconst [c]) [d])
  8773  	// cond:
  8774  	// result: (XORconst x [int64(int64(c)>>uint64(d))])
  8775  	for {
  8776  		d := v.AuxInt
  8777  		x := v.Args[0]
  8778  		v_1 := v.Args[1]
  8779  		if v_1.Op != OpARM64MOVDconst {
  8780  			break
  8781  		}
  8782  		c := v_1.AuxInt
  8783  		v.reset(OpARM64XORconst)
  8784  		v.AuxInt = int64(int64(c) >> uint64(d))
  8785  		v.AddArg(x)
  8786  		return true
  8787  	}
  8788  	// match: (XORshiftRA x (SRAconst x [c]) [d])
  8789  	// cond: c==d
  8790  	// result: (MOVDconst [0])
  8791  	for {
  8792  		d := v.AuxInt
  8793  		x := v.Args[0]
  8794  		v_1 := v.Args[1]
  8795  		if v_1.Op != OpARM64SRAconst {
  8796  			break
  8797  		}
  8798  		c := v_1.AuxInt
  8799  		if x != v_1.Args[0] {
  8800  			break
  8801  		}
  8802  		if !(c == d) {
  8803  			break
  8804  		}
  8805  		v.reset(OpARM64MOVDconst)
  8806  		v.AuxInt = 0
  8807  		return true
  8808  	}
  8809  	return false
  8810  }
  8811  func rewriteValueARM64_OpARM64XORshiftRL(v *Value, config *Config) bool {
  8812  	b := v.Block
  8813  	_ = b
  8814  	// match: (XORshiftRL (MOVDconst [c]) x [d])
  8815  	// cond:
  8816  	// result: (XORconst [c] (SRLconst <x.Type> x [d]))
  8817  	for {
  8818  		d := v.AuxInt
  8819  		v_0 := v.Args[0]
  8820  		if v_0.Op != OpARM64MOVDconst {
  8821  			break
  8822  		}
  8823  		c := v_0.AuxInt
  8824  		x := v.Args[1]
  8825  		v.reset(OpARM64XORconst)
  8826  		v.AuxInt = c
  8827  		v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
  8828  		v0.AuxInt = d
  8829  		v0.AddArg(x)
  8830  		v.AddArg(v0)
  8831  		return true
  8832  	}
  8833  	// match: (XORshiftRL x (MOVDconst [c]) [d])
  8834  	// cond:
  8835  	// result: (XORconst x [int64(uint64(c)>>uint64(d))])
  8836  	for {
  8837  		d := v.AuxInt
  8838  		x := v.Args[0]
  8839  		v_1 := v.Args[1]
  8840  		if v_1.Op != OpARM64MOVDconst {
  8841  			break
  8842  		}
  8843  		c := v_1.AuxInt
  8844  		v.reset(OpARM64XORconst)
  8845  		v.AuxInt = int64(uint64(c) >> uint64(d))
  8846  		v.AddArg(x)
  8847  		return true
  8848  	}
  8849  	// match: (XORshiftRL x (SRLconst x [c]) [d])
  8850  	// cond: c==d
  8851  	// result: (MOVDconst [0])
  8852  	for {
  8853  		d := v.AuxInt
  8854  		x := v.Args[0]
  8855  		v_1 := v.Args[1]
  8856  		if v_1.Op != OpARM64SRLconst {
  8857  			break
  8858  		}
  8859  		c := v_1.AuxInt
  8860  		if x != v_1.Args[0] {
  8861  			break
  8862  		}
  8863  		if !(c == d) {
  8864  			break
  8865  		}
  8866  		v.reset(OpARM64MOVDconst)
  8867  		v.AuxInt = 0
  8868  		return true
  8869  	}
  8870  	return false
  8871  }
  8872  func rewriteValueARM64_OpAdd16(v *Value, config *Config) bool {
  8873  	b := v.Block
  8874  	_ = b
  8875  	// match: (Add16 x y)
  8876  	// cond:
  8877  	// result: (ADD x y)
  8878  	for {
  8879  		x := v.Args[0]
  8880  		y := v.Args[1]
  8881  		v.reset(OpARM64ADD)
  8882  		v.AddArg(x)
  8883  		v.AddArg(y)
  8884  		return true
  8885  	}
  8886  }
  8887  func rewriteValueARM64_OpAdd32(v *Value, config *Config) bool {
  8888  	b := v.Block
  8889  	_ = b
  8890  	// match: (Add32 x y)
  8891  	// cond:
  8892  	// result: (ADD x y)
  8893  	for {
  8894  		x := v.Args[0]
  8895  		y := v.Args[1]
  8896  		v.reset(OpARM64ADD)
  8897  		v.AddArg(x)
  8898  		v.AddArg(y)
  8899  		return true
  8900  	}
  8901  }
  8902  func rewriteValueARM64_OpAdd32F(v *Value, config *Config) bool {
  8903  	b := v.Block
  8904  	_ = b
  8905  	// match: (Add32F x y)
  8906  	// cond:
  8907  	// result: (FADDS x y)
  8908  	for {
  8909  		x := v.Args[0]
  8910  		y := v.Args[1]
  8911  		v.reset(OpARM64FADDS)
  8912  		v.AddArg(x)
  8913  		v.AddArg(y)
  8914  		return true
  8915  	}
  8916  }
  8917  func rewriteValueARM64_OpAdd64(v *Value, config *Config) bool {
  8918  	b := v.Block
  8919  	_ = b
  8920  	// match: (Add64 x y)
  8921  	// cond:
  8922  	// result: (ADD x y)
  8923  	for {
  8924  		x := v.Args[0]
  8925  		y := v.Args[1]
  8926  		v.reset(OpARM64ADD)
  8927  		v.AddArg(x)
  8928  		v.AddArg(y)
  8929  		return true
  8930  	}
  8931  }
  8932  func rewriteValueARM64_OpAdd64F(v *Value, config *Config) bool {
  8933  	b := v.Block
  8934  	_ = b
  8935  	// match: (Add64F x y)
  8936  	// cond:
  8937  	// result: (FADDD x y)
  8938  	for {
  8939  		x := v.Args[0]
  8940  		y := v.Args[1]
  8941  		v.reset(OpARM64FADDD)
  8942  		v.AddArg(x)
  8943  		v.AddArg(y)
  8944  		return true
  8945  	}
  8946  }
  8947  func rewriteValueARM64_OpAdd8(v *Value, config *Config) bool {
  8948  	b := v.Block
  8949  	_ = b
  8950  	// match: (Add8 x y)
  8951  	// cond:
  8952  	// result: (ADD x y)
  8953  	for {
  8954  		x := v.Args[0]
  8955  		y := v.Args[1]
  8956  		v.reset(OpARM64ADD)
  8957  		v.AddArg(x)
  8958  		v.AddArg(y)
  8959  		return true
  8960  	}
  8961  }
  8962  func rewriteValueARM64_OpAddPtr(v *Value, config *Config) bool {
  8963  	b := v.Block
  8964  	_ = b
  8965  	// match: (AddPtr x y)
  8966  	// cond:
  8967  	// result: (ADD x y)
  8968  	for {
  8969  		x := v.Args[0]
  8970  		y := v.Args[1]
  8971  		v.reset(OpARM64ADD)
  8972  		v.AddArg(x)
  8973  		v.AddArg(y)
  8974  		return true
  8975  	}
  8976  }
  8977  func rewriteValueARM64_OpAddr(v *Value, config *Config) bool {
  8978  	b := v.Block
  8979  	_ = b
  8980  	// match: (Addr {sym} base)
  8981  	// cond:
  8982  	// result: (MOVDaddr {sym} base)
  8983  	for {
  8984  		sym := v.Aux
  8985  		base := v.Args[0]
  8986  		v.reset(OpARM64MOVDaddr)
  8987  		v.Aux = sym
  8988  		v.AddArg(base)
  8989  		return true
  8990  	}
  8991  }
  8992  func rewriteValueARM64_OpAnd16(v *Value, config *Config) bool {
  8993  	b := v.Block
  8994  	_ = b
  8995  	// match: (And16 x y)
  8996  	// cond:
  8997  	// result: (AND x y)
  8998  	for {
  8999  		x := v.Args[0]
  9000  		y := v.Args[1]
  9001  		v.reset(OpARM64AND)
  9002  		v.AddArg(x)
  9003  		v.AddArg(y)
  9004  		return true
  9005  	}
  9006  }
  9007  func rewriteValueARM64_OpAnd32(v *Value, config *Config) bool {
  9008  	b := v.Block
  9009  	_ = b
  9010  	// match: (And32 x y)
  9011  	// cond:
  9012  	// result: (AND x y)
  9013  	for {
  9014  		x := v.Args[0]
  9015  		y := v.Args[1]
  9016  		v.reset(OpARM64AND)
  9017  		v.AddArg(x)
  9018  		v.AddArg(y)
  9019  		return true
  9020  	}
  9021  }
  9022  func rewriteValueARM64_OpAnd64(v *Value, config *Config) bool {
  9023  	b := v.Block
  9024  	_ = b
  9025  	// match: (And64 x y)
  9026  	// cond:
  9027  	// result: (AND x y)
  9028  	for {
  9029  		x := v.Args[0]
  9030  		y := v.Args[1]
  9031  		v.reset(OpARM64AND)
  9032  		v.AddArg(x)
  9033  		v.AddArg(y)
  9034  		return true
  9035  	}
  9036  }
  9037  func rewriteValueARM64_OpAnd8(v *Value, config *Config) bool {
  9038  	b := v.Block
  9039  	_ = b
  9040  	// match: (And8 x y)
  9041  	// cond:
  9042  	// result: (AND x y)
  9043  	for {
  9044  		x := v.Args[0]
  9045  		y := v.Args[1]
  9046  		v.reset(OpARM64AND)
  9047  		v.AddArg(x)
  9048  		v.AddArg(y)
  9049  		return true
  9050  	}
  9051  }
  9052  func rewriteValueARM64_OpAndB(v *Value, config *Config) bool {
  9053  	b := v.Block
  9054  	_ = b
  9055  	// match: (AndB x y)
  9056  	// cond:
  9057  	// result: (AND x y)
  9058  	for {
  9059  		x := v.Args[0]
  9060  		y := v.Args[1]
  9061  		v.reset(OpARM64AND)
  9062  		v.AddArg(x)
  9063  		v.AddArg(y)
  9064  		return true
  9065  	}
  9066  }
  9067  func rewriteValueARM64_OpAvg64u(v *Value, config *Config) bool {
  9068  	b := v.Block
  9069  	_ = b
  9070  	// match: (Avg64u <t> x y)
  9071  	// cond:
  9072  	// result: (ADD (ADD <t> (SRLconst <t> x [1]) (SRLconst <t> y [1])) (AND <t> (AND <t> x y) (MOVDconst [1])))
  9073  	for {
  9074  		t := v.Type
  9075  		x := v.Args[0]
  9076  		y := v.Args[1]
  9077  		v.reset(OpARM64ADD)
  9078  		v0 := b.NewValue0(v.Line, OpARM64ADD, t)
  9079  		v1 := b.NewValue0(v.Line, OpARM64SRLconst, t)
  9080  		v1.AuxInt = 1
  9081  		v1.AddArg(x)
  9082  		v0.AddArg(v1)
  9083  		v2 := b.NewValue0(v.Line, OpARM64SRLconst, t)
  9084  		v2.AuxInt = 1
  9085  		v2.AddArg(y)
  9086  		v0.AddArg(v2)
  9087  		v.AddArg(v0)
  9088  		v3 := b.NewValue0(v.Line, OpARM64AND, t)
  9089  		v4 := b.NewValue0(v.Line, OpARM64AND, t)
  9090  		v4.AddArg(x)
  9091  		v4.AddArg(y)
  9092  		v3.AddArg(v4)
  9093  		v5 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
  9094  		v5.AuxInt = 1
  9095  		v3.AddArg(v5)
  9096  		v.AddArg(v3)
  9097  		return true
  9098  	}
  9099  }
  9100  func rewriteValueARM64_OpClosureCall(v *Value, config *Config) bool {
  9101  	b := v.Block
  9102  	_ = b
  9103  	// match: (ClosureCall [argwid] entry closure mem)
  9104  	// cond:
  9105  	// result: (CALLclosure [argwid] entry closure mem)
  9106  	for {
  9107  		argwid := v.AuxInt
  9108  		entry := v.Args[0]
  9109  		closure := v.Args[1]
  9110  		mem := v.Args[2]
  9111  		v.reset(OpARM64CALLclosure)
  9112  		v.AuxInt = argwid
  9113  		v.AddArg(entry)
  9114  		v.AddArg(closure)
  9115  		v.AddArg(mem)
  9116  		return true
  9117  	}
  9118  }
  9119  func rewriteValueARM64_OpCom16(v *Value, config *Config) bool {
  9120  	b := v.Block
  9121  	_ = b
  9122  	// match: (Com16 x)
  9123  	// cond:
  9124  	// result: (MVN x)
  9125  	for {
  9126  		x := v.Args[0]
  9127  		v.reset(OpARM64MVN)
  9128  		v.AddArg(x)
  9129  		return true
  9130  	}
  9131  }
  9132  func rewriteValueARM64_OpCom32(v *Value, config *Config) bool {
  9133  	b := v.Block
  9134  	_ = b
  9135  	// match: (Com32 x)
  9136  	// cond:
  9137  	// result: (MVN x)
  9138  	for {
  9139  		x := v.Args[0]
  9140  		v.reset(OpARM64MVN)
  9141  		v.AddArg(x)
  9142  		return true
  9143  	}
  9144  }
  9145  func rewriteValueARM64_OpCom64(v *Value, config *Config) bool {
  9146  	b := v.Block
  9147  	_ = b
  9148  	// match: (Com64 x)
  9149  	// cond:
  9150  	// result: (MVN x)
  9151  	for {
  9152  		x := v.Args[0]
  9153  		v.reset(OpARM64MVN)
  9154  		v.AddArg(x)
  9155  		return true
  9156  	}
  9157  }
  9158  func rewriteValueARM64_OpCom8(v *Value, config *Config) bool {
  9159  	b := v.Block
  9160  	_ = b
  9161  	// match: (Com8 x)
  9162  	// cond:
  9163  	// result: (MVN x)
  9164  	for {
  9165  		x := v.Args[0]
  9166  		v.reset(OpARM64MVN)
  9167  		v.AddArg(x)
  9168  		return true
  9169  	}
  9170  }
  9171  func rewriteValueARM64_OpConst16(v *Value, config *Config) bool {
  9172  	b := v.Block
  9173  	_ = b
  9174  	// match: (Const16 [val])
  9175  	// cond:
  9176  	// result: (MOVDconst [val])
  9177  	for {
  9178  		val := v.AuxInt
  9179  		v.reset(OpARM64MOVDconst)
  9180  		v.AuxInt = val
  9181  		return true
  9182  	}
  9183  }
  9184  func rewriteValueARM64_OpConst32(v *Value, config *Config) bool {
  9185  	b := v.Block
  9186  	_ = b
  9187  	// match: (Const32 [val])
  9188  	// cond:
  9189  	// result: (MOVDconst [val])
  9190  	for {
  9191  		val := v.AuxInt
  9192  		v.reset(OpARM64MOVDconst)
  9193  		v.AuxInt = val
  9194  		return true
  9195  	}
  9196  }
  9197  func rewriteValueARM64_OpConst32F(v *Value, config *Config) bool {
  9198  	b := v.Block
  9199  	_ = b
  9200  	// match: (Const32F [val])
  9201  	// cond:
  9202  	// result: (FMOVSconst [val])
  9203  	for {
  9204  		val := v.AuxInt
  9205  		v.reset(OpARM64FMOVSconst)
  9206  		v.AuxInt = val
  9207  		return true
  9208  	}
  9209  }
  9210  func rewriteValueARM64_OpConst64(v *Value, config *Config) bool {
  9211  	b := v.Block
  9212  	_ = b
  9213  	// match: (Const64 [val])
  9214  	// cond:
  9215  	// result: (MOVDconst [val])
  9216  	for {
  9217  		val := v.AuxInt
  9218  		v.reset(OpARM64MOVDconst)
  9219  		v.AuxInt = val
  9220  		return true
  9221  	}
  9222  }
  9223  func rewriteValueARM64_OpConst64F(v *Value, config *Config) bool {
  9224  	b := v.Block
  9225  	_ = b
  9226  	// match: (Const64F [val])
  9227  	// cond:
  9228  	// result: (FMOVDconst [val])
  9229  	for {
  9230  		val := v.AuxInt
  9231  		v.reset(OpARM64FMOVDconst)
  9232  		v.AuxInt = val
  9233  		return true
  9234  	}
  9235  }
  9236  func rewriteValueARM64_OpConst8(v *Value, config *Config) bool {
  9237  	b := v.Block
  9238  	_ = b
  9239  	// match: (Const8 [val])
  9240  	// cond:
  9241  	// result: (MOVDconst [val])
  9242  	for {
  9243  		val := v.AuxInt
  9244  		v.reset(OpARM64MOVDconst)
  9245  		v.AuxInt = val
  9246  		return true
  9247  	}
  9248  }
  9249  func rewriteValueARM64_OpConstBool(v *Value, config *Config) bool {
  9250  	b := v.Block
  9251  	_ = b
  9252  	// match: (ConstBool [b])
  9253  	// cond:
  9254  	// result: (MOVDconst [b])
  9255  	for {
  9256  		b := v.AuxInt
  9257  		v.reset(OpARM64MOVDconst)
  9258  		v.AuxInt = b
  9259  		return true
  9260  	}
  9261  }
  9262  func rewriteValueARM64_OpConstNil(v *Value, config *Config) bool {
  9263  	b := v.Block
  9264  	_ = b
  9265  	// match: (ConstNil)
  9266  	// cond:
  9267  	// result: (MOVDconst [0])
  9268  	for {
  9269  		v.reset(OpARM64MOVDconst)
  9270  		v.AuxInt = 0
  9271  		return true
  9272  	}
  9273  }
  9274  func rewriteValueARM64_OpConvert(v *Value, config *Config) bool {
  9275  	b := v.Block
  9276  	_ = b
  9277  	// match: (Convert x mem)
  9278  	// cond:
  9279  	// result: (MOVDconvert x mem)
  9280  	for {
  9281  		x := v.Args[0]
  9282  		mem := v.Args[1]
  9283  		v.reset(OpARM64MOVDconvert)
  9284  		v.AddArg(x)
  9285  		v.AddArg(mem)
  9286  		return true
  9287  	}
  9288  }
  9289  func rewriteValueARM64_OpCvt32Fto32(v *Value, config *Config) bool {
  9290  	b := v.Block
  9291  	_ = b
  9292  	// match: (Cvt32Fto32 x)
  9293  	// cond:
  9294  	// result: (FCVTZSSW x)
  9295  	for {
  9296  		x := v.Args[0]
  9297  		v.reset(OpARM64FCVTZSSW)
  9298  		v.AddArg(x)
  9299  		return true
  9300  	}
  9301  }
  9302  func rewriteValueARM64_OpCvt32Fto32U(v *Value, config *Config) bool {
  9303  	b := v.Block
  9304  	_ = b
  9305  	// match: (Cvt32Fto32U x)
  9306  	// cond:
  9307  	// result: (FCVTZUSW x)
  9308  	for {
  9309  		x := v.Args[0]
  9310  		v.reset(OpARM64FCVTZUSW)
  9311  		v.AddArg(x)
  9312  		return true
  9313  	}
  9314  }
  9315  func rewriteValueARM64_OpCvt32Fto64(v *Value, config *Config) bool {
  9316  	b := v.Block
  9317  	_ = b
  9318  	// match: (Cvt32Fto64 x)
  9319  	// cond:
  9320  	// result: (FCVTZSS x)
  9321  	for {
  9322  		x := v.Args[0]
  9323  		v.reset(OpARM64FCVTZSS)
  9324  		v.AddArg(x)
  9325  		return true
  9326  	}
  9327  }
  9328  func rewriteValueARM64_OpCvt32Fto64F(v *Value, config *Config) bool {
  9329  	b := v.Block
  9330  	_ = b
  9331  	// match: (Cvt32Fto64F x)
  9332  	// cond:
  9333  	// result: (FCVTSD x)
  9334  	for {
  9335  		x := v.Args[0]
  9336  		v.reset(OpARM64FCVTSD)
  9337  		v.AddArg(x)
  9338  		return true
  9339  	}
  9340  }
  9341  func rewriteValueARM64_OpCvt32Fto64U(v *Value, config *Config) bool {
  9342  	b := v.Block
  9343  	_ = b
  9344  	// match: (Cvt32Fto64U x)
  9345  	// cond:
  9346  	// result: (FCVTZUS x)
  9347  	for {
  9348  		x := v.Args[0]
  9349  		v.reset(OpARM64FCVTZUS)
  9350  		v.AddArg(x)
  9351  		return true
  9352  	}
  9353  }
  9354  func rewriteValueARM64_OpCvt32Uto32F(v *Value, config *Config) bool {
  9355  	b := v.Block
  9356  	_ = b
  9357  	// match: (Cvt32Uto32F x)
  9358  	// cond:
  9359  	// result: (UCVTFWS x)
  9360  	for {
  9361  		x := v.Args[0]
  9362  		v.reset(OpARM64UCVTFWS)
  9363  		v.AddArg(x)
  9364  		return true
  9365  	}
  9366  }
  9367  func rewriteValueARM64_OpCvt32Uto64F(v *Value, config *Config) bool {
  9368  	b := v.Block
  9369  	_ = b
  9370  	// match: (Cvt32Uto64F x)
  9371  	// cond:
  9372  	// result: (UCVTFWD x)
  9373  	for {
  9374  		x := v.Args[0]
  9375  		v.reset(OpARM64UCVTFWD)
  9376  		v.AddArg(x)
  9377  		return true
  9378  	}
  9379  }
  9380  func rewriteValueARM64_OpCvt32to32F(v *Value, config *Config) bool {
  9381  	b := v.Block
  9382  	_ = b
  9383  	// match: (Cvt32to32F x)
  9384  	// cond:
  9385  	// result: (SCVTFWS x)
  9386  	for {
  9387  		x := v.Args[0]
  9388  		v.reset(OpARM64SCVTFWS)
  9389  		v.AddArg(x)
  9390  		return true
  9391  	}
  9392  }
  9393  func rewriteValueARM64_OpCvt32to64F(v *Value, config *Config) bool {
  9394  	b := v.Block
  9395  	_ = b
  9396  	// match: (Cvt32to64F x)
  9397  	// cond:
  9398  	// result: (SCVTFWD x)
  9399  	for {
  9400  		x := v.Args[0]
  9401  		v.reset(OpARM64SCVTFWD)
  9402  		v.AddArg(x)
  9403  		return true
  9404  	}
  9405  }
  9406  func rewriteValueARM64_OpCvt64Fto32(v *Value, config *Config) bool {
  9407  	b := v.Block
  9408  	_ = b
  9409  	// match: (Cvt64Fto32 x)
  9410  	// cond:
  9411  	// result: (FCVTZSDW x)
  9412  	for {
  9413  		x := v.Args[0]
  9414  		v.reset(OpARM64FCVTZSDW)
  9415  		v.AddArg(x)
  9416  		return true
  9417  	}
  9418  }
  9419  func rewriteValueARM64_OpCvt64Fto32F(v *Value, config *Config) bool {
  9420  	b := v.Block
  9421  	_ = b
  9422  	// match: (Cvt64Fto32F x)
  9423  	// cond:
  9424  	// result: (FCVTDS x)
  9425  	for {
  9426  		x := v.Args[0]
  9427  		v.reset(OpARM64FCVTDS)
  9428  		v.AddArg(x)
  9429  		return true
  9430  	}
  9431  }
  9432  func rewriteValueARM64_OpCvt64Fto32U(v *Value, config *Config) bool {
  9433  	b := v.Block
  9434  	_ = b
  9435  	// match: (Cvt64Fto32U x)
  9436  	// cond:
  9437  	// result: (FCVTZUDW x)
  9438  	for {
  9439  		x := v.Args[0]
  9440  		v.reset(OpARM64FCVTZUDW)
  9441  		v.AddArg(x)
  9442  		return true
  9443  	}
  9444  }
  9445  func rewriteValueARM64_OpCvt64Fto64(v *Value, config *Config) bool {
  9446  	b := v.Block
  9447  	_ = b
  9448  	// match: (Cvt64Fto64 x)
  9449  	// cond:
  9450  	// result: (FCVTZSD x)
  9451  	for {
  9452  		x := v.Args[0]
  9453  		v.reset(OpARM64FCVTZSD)
  9454  		v.AddArg(x)
  9455  		return true
  9456  	}
  9457  }
  9458  func rewriteValueARM64_OpCvt64Fto64U(v *Value, config *Config) bool {
  9459  	b := v.Block
  9460  	_ = b
  9461  	// match: (Cvt64Fto64U x)
  9462  	// cond:
  9463  	// result: (FCVTZUD x)
  9464  	for {
  9465  		x := v.Args[0]
  9466  		v.reset(OpARM64FCVTZUD)
  9467  		v.AddArg(x)
  9468  		return true
  9469  	}
  9470  }
  9471  func rewriteValueARM64_OpCvt64Uto32F(v *Value, config *Config) bool {
  9472  	b := v.Block
  9473  	_ = b
  9474  	// match: (Cvt64Uto32F x)
  9475  	// cond:
  9476  	// result: (UCVTFS x)
  9477  	for {
  9478  		x := v.Args[0]
  9479  		v.reset(OpARM64UCVTFS)
  9480  		v.AddArg(x)
  9481  		return true
  9482  	}
  9483  }
  9484  func rewriteValueARM64_OpCvt64Uto64F(v *Value, config *Config) bool {
  9485  	b := v.Block
  9486  	_ = b
  9487  	// match: (Cvt64Uto64F x)
  9488  	// cond:
  9489  	// result: (UCVTFD x)
  9490  	for {
  9491  		x := v.Args[0]
  9492  		v.reset(OpARM64UCVTFD)
  9493  		v.AddArg(x)
  9494  		return true
  9495  	}
  9496  }
  9497  func rewriteValueARM64_OpCvt64to32F(v *Value, config *Config) bool {
  9498  	b := v.Block
  9499  	_ = b
  9500  	// match: (Cvt64to32F x)
  9501  	// cond:
  9502  	// result: (SCVTFS x)
  9503  	for {
  9504  		x := v.Args[0]
  9505  		v.reset(OpARM64SCVTFS)
  9506  		v.AddArg(x)
  9507  		return true
  9508  	}
  9509  }
  9510  func rewriteValueARM64_OpCvt64to64F(v *Value, config *Config) bool {
  9511  	b := v.Block
  9512  	_ = b
  9513  	// match: (Cvt64to64F x)
  9514  	// cond:
  9515  	// result: (SCVTFD x)
  9516  	for {
  9517  		x := v.Args[0]
  9518  		v.reset(OpARM64SCVTFD)
  9519  		v.AddArg(x)
  9520  		return true
  9521  	}
  9522  }
  9523  func rewriteValueARM64_OpDeferCall(v *Value, config *Config) bool {
  9524  	b := v.Block
  9525  	_ = b
  9526  	// match: (DeferCall [argwid] mem)
  9527  	// cond:
  9528  	// result: (CALLdefer [argwid] mem)
  9529  	for {
  9530  		argwid := v.AuxInt
  9531  		mem := v.Args[0]
  9532  		v.reset(OpARM64CALLdefer)
  9533  		v.AuxInt = argwid
  9534  		v.AddArg(mem)
  9535  		return true
  9536  	}
  9537  }
  9538  func rewriteValueARM64_OpDiv16(v *Value, config *Config) bool {
  9539  	b := v.Block
  9540  	_ = b
  9541  	// match: (Div16 x y)
  9542  	// cond:
  9543  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  9544  	for {
  9545  		x := v.Args[0]
  9546  		y := v.Args[1]
  9547  		v.reset(OpARM64DIVW)
  9548  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  9549  		v0.AddArg(x)
  9550  		v.AddArg(v0)
  9551  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  9552  		v1.AddArg(y)
  9553  		v.AddArg(v1)
  9554  		return true
  9555  	}
  9556  }
  9557  func rewriteValueARM64_OpDiv16u(v *Value, config *Config) bool {
  9558  	b := v.Block
  9559  	_ = b
  9560  	// match: (Div16u x y)
  9561  	// cond:
  9562  	// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
  9563  	for {
  9564  		x := v.Args[0]
  9565  		y := v.Args[1]
  9566  		v.reset(OpARM64UDIVW)
  9567  		v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9568  		v0.AddArg(x)
  9569  		v.AddArg(v0)
  9570  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9571  		v1.AddArg(y)
  9572  		v.AddArg(v1)
  9573  		return true
  9574  	}
  9575  }
  9576  func rewriteValueARM64_OpDiv32(v *Value, config *Config) bool {
  9577  	b := v.Block
  9578  	_ = b
  9579  	// match: (Div32 x y)
  9580  	// cond:
  9581  	// result: (DIVW x y)
  9582  	for {
  9583  		x := v.Args[0]
  9584  		y := v.Args[1]
  9585  		v.reset(OpARM64DIVW)
  9586  		v.AddArg(x)
  9587  		v.AddArg(y)
  9588  		return true
  9589  	}
  9590  }
  9591  func rewriteValueARM64_OpDiv32F(v *Value, config *Config) bool {
  9592  	b := v.Block
  9593  	_ = b
  9594  	// match: (Div32F x y)
  9595  	// cond:
  9596  	// result: (FDIVS x y)
  9597  	for {
  9598  		x := v.Args[0]
  9599  		y := v.Args[1]
  9600  		v.reset(OpARM64FDIVS)
  9601  		v.AddArg(x)
  9602  		v.AddArg(y)
  9603  		return true
  9604  	}
  9605  }
  9606  func rewriteValueARM64_OpDiv32u(v *Value, config *Config) bool {
  9607  	b := v.Block
  9608  	_ = b
  9609  	// match: (Div32u x y)
  9610  	// cond:
  9611  	// result: (UDIVW x y)
  9612  	for {
  9613  		x := v.Args[0]
  9614  		y := v.Args[1]
  9615  		v.reset(OpARM64UDIVW)
  9616  		v.AddArg(x)
  9617  		v.AddArg(y)
  9618  		return true
  9619  	}
  9620  }
  9621  func rewriteValueARM64_OpDiv64(v *Value, config *Config) bool {
  9622  	b := v.Block
  9623  	_ = b
  9624  	// match: (Div64 x y)
  9625  	// cond:
  9626  	// result: (DIV x y)
  9627  	for {
  9628  		x := v.Args[0]
  9629  		y := v.Args[1]
  9630  		v.reset(OpARM64DIV)
  9631  		v.AddArg(x)
  9632  		v.AddArg(y)
  9633  		return true
  9634  	}
  9635  }
  9636  func rewriteValueARM64_OpDiv64F(v *Value, config *Config) bool {
  9637  	b := v.Block
  9638  	_ = b
  9639  	// match: (Div64F x y)
  9640  	// cond:
  9641  	// result: (FDIVD x y)
  9642  	for {
  9643  		x := v.Args[0]
  9644  		y := v.Args[1]
  9645  		v.reset(OpARM64FDIVD)
  9646  		v.AddArg(x)
  9647  		v.AddArg(y)
  9648  		return true
  9649  	}
  9650  }
  9651  func rewriteValueARM64_OpDiv64u(v *Value, config *Config) bool {
  9652  	b := v.Block
  9653  	_ = b
  9654  	// match: (Div64u x y)
  9655  	// cond:
  9656  	// result: (UDIV x y)
  9657  	for {
  9658  		x := v.Args[0]
  9659  		y := v.Args[1]
  9660  		v.reset(OpARM64UDIV)
  9661  		v.AddArg(x)
  9662  		v.AddArg(y)
  9663  		return true
  9664  	}
  9665  }
  9666  func rewriteValueARM64_OpDiv8(v *Value, config *Config) bool {
  9667  	b := v.Block
  9668  	_ = b
  9669  	// match: (Div8 x y)
  9670  	// cond:
  9671  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  9672  	for {
  9673  		x := v.Args[0]
  9674  		y := v.Args[1]
  9675  		v.reset(OpARM64DIVW)
  9676  		v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  9677  		v0.AddArg(x)
  9678  		v.AddArg(v0)
  9679  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  9680  		v1.AddArg(y)
  9681  		v.AddArg(v1)
  9682  		return true
  9683  	}
  9684  }
  9685  func rewriteValueARM64_OpDiv8u(v *Value, config *Config) bool {
  9686  	b := v.Block
  9687  	_ = b
  9688  	// match: (Div8u x y)
  9689  	// cond:
  9690  	// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
  9691  	for {
  9692  		x := v.Args[0]
  9693  		y := v.Args[1]
  9694  		v.reset(OpARM64UDIVW)
  9695  		v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  9696  		v0.AddArg(x)
  9697  		v.AddArg(v0)
  9698  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  9699  		v1.AddArg(y)
  9700  		v.AddArg(v1)
  9701  		return true
  9702  	}
  9703  }
  9704  func rewriteValueARM64_OpEq16(v *Value, config *Config) bool {
  9705  	b := v.Block
  9706  	_ = b
  9707  	// match: (Eq16 x y)
  9708  	// cond:
  9709  	// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
  9710  	for {
  9711  		x := v.Args[0]
  9712  		y := v.Args[1]
  9713  		v.reset(OpARM64Equal)
  9714  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9715  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9716  		v1.AddArg(x)
  9717  		v0.AddArg(v1)
  9718  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9719  		v2.AddArg(y)
  9720  		v0.AddArg(v2)
  9721  		v.AddArg(v0)
  9722  		return true
  9723  	}
  9724  }
  9725  func rewriteValueARM64_OpEq32(v *Value, config *Config) bool {
  9726  	b := v.Block
  9727  	_ = b
  9728  	// match: (Eq32 x y)
  9729  	// cond:
  9730  	// result: (Equal (CMPW x y))
  9731  	for {
  9732  		x := v.Args[0]
  9733  		y := v.Args[1]
  9734  		v.reset(OpARM64Equal)
  9735  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9736  		v0.AddArg(x)
  9737  		v0.AddArg(y)
  9738  		v.AddArg(v0)
  9739  		return true
  9740  	}
  9741  }
  9742  func rewriteValueARM64_OpEq32F(v *Value, config *Config) bool {
  9743  	b := v.Block
  9744  	_ = b
  9745  	// match: (Eq32F x y)
  9746  	// cond:
  9747  	// result: (Equal (FCMPS x y))
  9748  	for {
  9749  		x := v.Args[0]
  9750  		y := v.Args[1]
  9751  		v.reset(OpARM64Equal)
  9752  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
  9753  		v0.AddArg(x)
  9754  		v0.AddArg(y)
  9755  		v.AddArg(v0)
  9756  		return true
  9757  	}
  9758  }
  9759  func rewriteValueARM64_OpEq64(v *Value, config *Config) bool {
  9760  	b := v.Block
  9761  	_ = b
  9762  	// match: (Eq64 x y)
  9763  	// cond:
  9764  	// result: (Equal (CMP x y))
  9765  	for {
  9766  		x := v.Args[0]
  9767  		y := v.Args[1]
  9768  		v.reset(OpARM64Equal)
  9769  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
  9770  		v0.AddArg(x)
  9771  		v0.AddArg(y)
  9772  		v.AddArg(v0)
  9773  		return true
  9774  	}
  9775  }
  9776  func rewriteValueARM64_OpEq64F(v *Value, config *Config) bool {
  9777  	b := v.Block
  9778  	_ = b
  9779  	// match: (Eq64F x y)
  9780  	// cond:
  9781  	// result: (Equal (FCMPD x y))
  9782  	for {
  9783  		x := v.Args[0]
  9784  		y := v.Args[1]
  9785  		v.reset(OpARM64Equal)
  9786  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
  9787  		v0.AddArg(x)
  9788  		v0.AddArg(y)
  9789  		v.AddArg(v0)
  9790  		return true
  9791  	}
  9792  }
  9793  func rewriteValueARM64_OpEq8(v *Value, config *Config) bool {
  9794  	b := v.Block
  9795  	_ = b
  9796  	// match: (Eq8 x y)
  9797  	// cond:
  9798  	// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
  9799  	for {
  9800  		x := v.Args[0]
  9801  		y := v.Args[1]
  9802  		v.reset(OpARM64Equal)
  9803  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9804  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  9805  		v1.AddArg(x)
  9806  		v0.AddArg(v1)
  9807  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  9808  		v2.AddArg(y)
  9809  		v0.AddArg(v2)
  9810  		v.AddArg(v0)
  9811  		return true
  9812  	}
  9813  }
  9814  func rewriteValueARM64_OpEqB(v *Value, config *Config) bool {
  9815  	b := v.Block
  9816  	_ = b
  9817  	// match: (EqB x y)
  9818  	// cond:
  9819  	// result: (XOR (MOVDconst [1]) (XOR <config.fe.TypeBool()> x y))
  9820  	for {
  9821  		x := v.Args[0]
  9822  		y := v.Args[1]
  9823  		v.reset(OpARM64XOR)
  9824  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
  9825  		v0.AuxInt = 1
  9826  		v.AddArg(v0)
  9827  		v1 := b.NewValue0(v.Line, OpARM64XOR, config.fe.TypeBool())
  9828  		v1.AddArg(x)
  9829  		v1.AddArg(y)
  9830  		v.AddArg(v1)
  9831  		return true
  9832  	}
  9833  }
  9834  func rewriteValueARM64_OpEqPtr(v *Value, config *Config) bool {
  9835  	b := v.Block
  9836  	_ = b
  9837  	// match: (EqPtr x y)
  9838  	// cond:
  9839  	// result: (Equal (CMP x y))
  9840  	for {
  9841  		x := v.Args[0]
  9842  		y := v.Args[1]
  9843  		v.reset(OpARM64Equal)
  9844  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
  9845  		v0.AddArg(x)
  9846  		v0.AddArg(y)
  9847  		v.AddArg(v0)
  9848  		return true
  9849  	}
  9850  }
  9851  func rewriteValueARM64_OpGeq16(v *Value, config *Config) bool {
  9852  	b := v.Block
  9853  	_ = b
  9854  	// match: (Geq16 x y)
  9855  	// cond:
  9856  	// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
  9857  	for {
  9858  		x := v.Args[0]
  9859  		y := v.Args[1]
  9860  		v.reset(OpARM64GreaterEqual)
  9861  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9862  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  9863  		v1.AddArg(x)
  9864  		v0.AddArg(v1)
  9865  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  9866  		v2.AddArg(y)
  9867  		v0.AddArg(v2)
  9868  		v.AddArg(v0)
  9869  		return true
  9870  	}
  9871  }
  9872  func rewriteValueARM64_OpGeq16U(v *Value, config *Config) bool {
  9873  	b := v.Block
  9874  	_ = b
  9875  	// match: (Geq16U x y)
  9876  	// cond:
  9877  	// result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
  9878  	for {
  9879  		x := v.Args[0]
  9880  		y := v.Args[1]
  9881  		v.reset(OpARM64GreaterEqualU)
  9882  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9883  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9884  		v1.AddArg(x)
  9885  		v0.AddArg(v1)
  9886  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  9887  		v2.AddArg(y)
  9888  		v0.AddArg(v2)
  9889  		v.AddArg(v0)
  9890  		return true
  9891  	}
  9892  }
  9893  func rewriteValueARM64_OpGeq32(v *Value, config *Config) bool {
  9894  	b := v.Block
  9895  	_ = b
  9896  	// match: (Geq32 x y)
  9897  	// cond:
  9898  	// result: (GreaterEqual (CMPW x y))
  9899  	for {
  9900  		x := v.Args[0]
  9901  		y := v.Args[1]
  9902  		v.reset(OpARM64GreaterEqual)
  9903  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9904  		v0.AddArg(x)
  9905  		v0.AddArg(y)
  9906  		v.AddArg(v0)
  9907  		return true
  9908  	}
  9909  }
  9910  func rewriteValueARM64_OpGeq32F(v *Value, config *Config) bool {
  9911  	b := v.Block
  9912  	_ = b
  9913  	// match: (Geq32F x y)
  9914  	// cond:
  9915  	// result: (GreaterEqual (FCMPS x y))
  9916  	for {
  9917  		x := v.Args[0]
  9918  		y := v.Args[1]
  9919  		v.reset(OpARM64GreaterEqual)
  9920  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
  9921  		v0.AddArg(x)
  9922  		v0.AddArg(y)
  9923  		v.AddArg(v0)
  9924  		return true
  9925  	}
  9926  }
  9927  func rewriteValueARM64_OpGeq32U(v *Value, config *Config) bool {
  9928  	b := v.Block
  9929  	_ = b
  9930  	// match: (Geq32U x y)
  9931  	// cond:
  9932  	// result: (GreaterEqualU (CMPW x y))
  9933  	for {
  9934  		x := v.Args[0]
  9935  		y := v.Args[1]
  9936  		v.reset(OpARM64GreaterEqualU)
  9937  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
  9938  		v0.AddArg(x)
  9939  		v0.AddArg(y)
  9940  		v.AddArg(v0)
  9941  		return true
  9942  	}
  9943  }
  9944  func rewriteValueARM64_OpGeq64(v *Value, config *Config) bool {
  9945  	b := v.Block
  9946  	_ = b
  9947  	// match: (Geq64 x y)
  9948  	// cond:
  9949  	// result: (GreaterEqual (CMP x y))
  9950  	for {
  9951  		x := v.Args[0]
  9952  		y := v.Args[1]
  9953  		v.reset(OpARM64GreaterEqual)
  9954  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
  9955  		v0.AddArg(x)
  9956  		v0.AddArg(y)
  9957  		v.AddArg(v0)
  9958  		return true
  9959  	}
  9960  }
  9961  func rewriteValueARM64_OpGeq64F(v *Value, config *Config) bool {
  9962  	b := v.Block
  9963  	_ = b
  9964  	// match: (Geq64F x y)
  9965  	// cond:
  9966  	// result: (GreaterEqual (FCMPD x y))
  9967  	for {
  9968  		x := v.Args[0]
  9969  		y := v.Args[1]
  9970  		v.reset(OpARM64GreaterEqual)
  9971  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
  9972  		v0.AddArg(x)
  9973  		v0.AddArg(y)
  9974  		v.AddArg(v0)
  9975  		return true
  9976  	}
  9977  }
  9978  func rewriteValueARM64_OpGeq64U(v *Value, config *Config) bool {
  9979  	b := v.Block
  9980  	_ = b
  9981  	// match: (Geq64U x y)
  9982  	// cond:
  9983  	// result: (GreaterEqualU (CMP x y))
  9984  	for {
  9985  		x := v.Args[0]
  9986  		y := v.Args[1]
  9987  		v.reset(OpARM64GreaterEqualU)
  9988  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
  9989  		v0.AddArg(x)
  9990  		v0.AddArg(y)
  9991  		v.AddArg(v0)
  9992  		return true
  9993  	}
  9994  }
  9995  func rewriteValueARM64_OpGeq8(v *Value, config *Config) bool {
  9996  	b := v.Block
  9997  	_ = b
  9998  	// match: (Geq8 x y)
  9999  	// cond:
 10000  	// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 10001  	for {
 10002  		x := v.Args[0]
 10003  		y := v.Args[1]
 10004  		v.reset(OpARM64GreaterEqual)
 10005  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10006  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10007  		v1.AddArg(x)
 10008  		v0.AddArg(v1)
 10009  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10010  		v2.AddArg(y)
 10011  		v0.AddArg(v2)
 10012  		v.AddArg(v0)
 10013  		return true
 10014  	}
 10015  }
 10016  func rewriteValueARM64_OpGeq8U(v *Value, config *Config) bool {
 10017  	b := v.Block
 10018  	_ = b
 10019  	// match: (Geq8U x y)
 10020  	// cond:
 10021  	// result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 10022  	for {
 10023  		x := v.Args[0]
 10024  		y := v.Args[1]
 10025  		v.reset(OpARM64GreaterEqualU)
 10026  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10027  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10028  		v1.AddArg(x)
 10029  		v0.AddArg(v1)
 10030  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10031  		v2.AddArg(y)
 10032  		v0.AddArg(v2)
 10033  		v.AddArg(v0)
 10034  		return true
 10035  	}
 10036  }
 10037  func rewriteValueARM64_OpGetClosurePtr(v *Value, config *Config) bool {
 10038  	b := v.Block
 10039  	_ = b
 10040  	// match: (GetClosurePtr)
 10041  	// cond:
 10042  	// result: (LoweredGetClosurePtr)
 10043  	for {
 10044  		v.reset(OpARM64LoweredGetClosurePtr)
 10045  		return true
 10046  	}
 10047  }
 10048  func rewriteValueARM64_OpGoCall(v *Value, config *Config) bool {
 10049  	b := v.Block
 10050  	_ = b
 10051  	// match: (GoCall [argwid] mem)
 10052  	// cond:
 10053  	// result: (CALLgo [argwid] mem)
 10054  	for {
 10055  		argwid := v.AuxInt
 10056  		mem := v.Args[0]
 10057  		v.reset(OpARM64CALLgo)
 10058  		v.AuxInt = argwid
 10059  		v.AddArg(mem)
 10060  		return true
 10061  	}
 10062  }
 10063  func rewriteValueARM64_OpGreater16(v *Value, config *Config) bool {
 10064  	b := v.Block
 10065  	_ = b
 10066  	// match: (Greater16 x y)
 10067  	// cond:
 10068  	// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 10069  	for {
 10070  		x := v.Args[0]
 10071  		y := v.Args[1]
 10072  		v.reset(OpARM64GreaterThan)
 10073  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10074  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10075  		v1.AddArg(x)
 10076  		v0.AddArg(v1)
 10077  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10078  		v2.AddArg(y)
 10079  		v0.AddArg(v2)
 10080  		v.AddArg(v0)
 10081  		return true
 10082  	}
 10083  }
 10084  func rewriteValueARM64_OpGreater16U(v *Value, config *Config) bool {
 10085  	b := v.Block
 10086  	_ = b
 10087  	// match: (Greater16U x y)
 10088  	// cond:
 10089  	// result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 10090  	for {
 10091  		x := v.Args[0]
 10092  		y := v.Args[1]
 10093  		v.reset(OpARM64GreaterThanU)
 10094  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10095  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10096  		v1.AddArg(x)
 10097  		v0.AddArg(v1)
 10098  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10099  		v2.AddArg(y)
 10100  		v0.AddArg(v2)
 10101  		v.AddArg(v0)
 10102  		return true
 10103  	}
 10104  }
 10105  func rewriteValueARM64_OpGreater32(v *Value, config *Config) bool {
 10106  	b := v.Block
 10107  	_ = b
 10108  	// match: (Greater32 x y)
 10109  	// cond:
 10110  	// result: (GreaterThan (CMPW x y))
 10111  	for {
 10112  		x := v.Args[0]
 10113  		y := v.Args[1]
 10114  		v.reset(OpARM64GreaterThan)
 10115  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10116  		v0.AddArg(x)
 10117  		v0.AddArg(y)
 10118  		v.AddArg(v0)
 10119  		return true
 10120  	}
 10121  }
 10122  func rewriteValueARM64_OpGreater32F(v *Value, config *Config) bool {
 10123  	b := v.Block
 10124  	_ = b
 10125  	// match: (Greater32F x y)
 10126  	// cond:
 10127  	// result: (GreaterThan (FCMPS x y))
 10128  	for {
 10129  		x := v.Args[0]
 10130  		y := v.Args[1]
 10131  		v.reset(OpARM64GreaterThan)
 10132  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
 10133  		v0.AddArg(x)
 10134  		v0.AddArg(y)
 10135  		v.AddArg(v0)
 10136  		return true
 10137  	}
 10138  }
 10139  func rewriteValueARM64_OpGreater32U(v *Value, config *Config) bool {
 10140  	b := v.Block
 10141  	_ = b
 10142  	// match: (Greater32U x y)
 10143  	// cond:
 10144  	// result: (GreaterThanU (CMPW x y))
 10145  	for {
 10146  		x := v.Args[0]
 10147  		y := v.Args[1]
 10148  		v.reset(OpARM64GreaterThanU)
 10149  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10150  		v0.AddArg(x)
 10151  		v0.AddArg(y)
 10152  		v.AddArg(v0)
 10153  		return true
 10154  	}
 10155  }
 10156  func rewriteValueARM64_OpGreater64(v *Value, config *Config) bool {
 10157  	b := v.Block
 10158  	_ = b
 10159  	// match: (Greater64 x y)
 10160  	// cond:
 10161  	// result: (GreaterThan (CMP x y))
 10162  	for {
 10163  		x := v.Args[0]
 10164  		y := v.Args[1]
 10165  		v.reset(OpARM64GreaterThan)
 10166  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10167  		v0.AddArg(x)
 10168  		v0.AddArg(y)
 10169  		v.AddArg(v0)
 10170  		return true
 10171  	}
 10172  }
 10173  func rewriteValueARM64_OpGreater64F(v *Value, config *Config) bool {
 10174  	b := v.Block
 10175  	_ = b
 10176  	// match: (Greater64F x y)
 10177  	// cond:
 10178  	// result: (GreaterThan (FCMPD x y))
 10179  	for {
 10180  		x := v.Args[0]
 10181  		y := v.Args[1]
 10182  		v.reset(OpARM64GreaterThan)
 10183  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
 10184  		v0.AddArg(x)
 10185  		v0.AddArg(y)
 10186  		v.AddArg(v0)
 10187  		return true
 10188  	}
 10189  }
 10190  func rewriteValueARM64_OpGreater64U(v *Value, config *Config) bool {
 10191  	b := v.Block
 10192  	_ = b
 10193  	// match: (Greater64U x y)
 10194  	// cond:
 10195  	// result: (GreaterThanU (CMP x y))
 10196  	for {
 10197  		x := v.Args[0]
 10198  		y := v.Args[1]
 10199  		v.reset(OpARM64GreaterThanU)
 10200  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10201  		v0.AddArg(x)
 10202  		v0.AddArg(y)
 10203  		v.AddArg(v0)
 10204  		return true
 10205  	}
 10206  }
 10207  func rewriteValueARM64_OpGreater8(v *Value, config *Config) bool {
 10208  	b := v.Block
 10209  	_ = b
 10210  	// match: (Greater8 x y)
 10211  	// cond:
 10212  	// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 10213  	for {
 10214  		x := v.Args[0]
 10215  		y := v.Args[1]
 10216  		v.reset(OpARM64GreaterThan)
 10217  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10218  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10219  		v1.AddArg(x)
 10220  		v0.AddArg(v1)
 10221  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10222  		v2.AddArg(y)
 10223  		v0.AddArg(v2)
 10224  		v.AddArg(v0)
 10225  		return true
 10226  	}
 10227  }
 10228  func rewriteValueARM64_OpGreater8U(v *Value, config *Config) bool {
 10229  	b := v.Block
 10230  	_ = b
 10231  	// match: (Greater8U x y)
 10232  	// cond:
 10233  	// result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 10234  	for {
 10235  		x := v.Args[0]
 10236  		y := v.Args[1]
 10237  		v.reset(OpARM64GreaterThanU)
 10238  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10239  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10240  		v1.AddArg(x)
 10241  		v0.AddArg(v1)
 10242  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10243  		v2.AddArg(y)
 10244  		v0.AddArg(v2)
 10245  		v.AddArg(v0)
 10246  		return true
 10247  	}
 10248  }
 10249  func rewriteValueARM64_OpHmul16(v *Value, config *Config) bool {
 10250  	b := v.Block
 10251  	_ = b
 10252  	// match: (Hmul16 x y)
 10253  	// cond:
 10254  	// result: (SRAconst (MULW <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16])
 10255  	for {
 10256  		x := v.Args[0]
 10257  		y := v.Args[1]
 10258  		v.reset(OpARM64SRAconst)
 10259  		v.AuxInt = 16
 10260  		v0 := b.NewValue0(v.Line, OpARM64MULW, config.fe.TypeInt32())
 10261  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10262  		v1.AddArg(x)
 10263  		v0.AddArg(v1)
 10264  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10265  		v2.AddArg(y)
 10266  		v0.AddArg(v2)
 10267  		v.AddArg(v0)
 10268  		return true
 10269  	}
 10270  }
 10271  func rewriteValueARM64_OpHmul16u(v *Value, config *Config) bool {
 10272  	b := v.Block
 10273  	_ = b
 10274  	// match: (Hmul16u x y)
 10275  	// cond:
 10276  	// result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16])
 10277  	for {
 10278  		x := v.Args[0]
 10279  		y := v.Args[1]
 10280  		v.reset(OpARM64SRLconst)
 10281  		v.AuxInt = 16
 10282  		v0 := b.NewValue0(v.Line, OpARM64MUL, config.fe.TypeUInt32())
 10283  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10284  		v1.AddArg(x)
 10285  		v0.AddArg(v1)
 10286  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10287  		v2.AddArg(y)
 10288  		v0.AddArg(v2)
 10289  		v.AddArg(v0)
 10290  		return true
 10291  	}
 10292  }
 10293  func rewriteValueARM64_OpHmul32(v *Value, config *Config) bool {
 10294  	b := v.Block
 10295  	_ = b
 10296  	// match: (Hmul32 x y)
 10297  	// cond:
 10298  	// result: (SRAconst (MULL <config.fe.TypeInt64()> x y) [32])
 10299  	for {
 10300  		x := v.Args[0]
 10301  		y := v.Args[1]
 10302  		v.reset(OpARM64SRAconst)
 10303  		v.AuxInt = 32
 10304  		v0 := b.NewValue0(v.Line, OpARM64MULL, config.fe.TypeInt64())
 10305  		v0.AddArg(x)
 10306  		v0.AddArg(y)
 10307  		v.AddArg(v0)
 10308  		return true
 10309  	}
 10310  }
 10311  func rewriteValueARM64_OpHmul32u(v *Value, config *Config) bool {
 10312  	b := v.Block
 10313  	_ = b
 10314  	// match: (Hmul32u x y)
 10315  	// cond:
 10316  	// result: (SRAconst (UMULL <config.fe.TypeUInt64()> x y) [32])
 10317  	for {
 10318  		x := v.Args[0]
 10319  		y := v.Args[1]
 10320  		v.reset(OpARM64SRAconst)
 10321  		v.AuxInt = 32
 10322  		v0 := b.NewValue0(v.Line, OpARM64UMULL, config.fe.TypeUInt64())
 10323  		v0.AddArg(x)
 10324  		v0.AddArg(y)
 10325  		v.AddArg(v0)
 10326  		return true
 10327  	}
 10328  }
 10329  func rewriteValueARM64_OpHmul64(v *Value, config *Config) bool {
 10330  	b := v.Block
 10331  	_ = b
 10332  	// match: (Hmul64 x y)
 10333  	// cond:
 10334  	// result: (MULH x y)
 10335  	for {
 10336  		x := v.Args[0]
 10337  		y := v.Args[1]
 10338  		v.reset(OpARM64MULH)
 10339  		v.AddArg(x)
 10340  		v.AddArg(y)
 10341  		return true
 10342  	}
 10343  }
 10344  func rewriteValueARM64_OpHmul64u(v *Value, config *Config) bool {
 10345  	b := v.Block
 10346  	_ = b
 10347  	// match: (Hmul64u x y)
 10348  	// cond:
 10349  	// result: (UMULH x y)
 10350  	for {
 10351  		x := v.Args[0]
 10352  		y := v.Args[1]
 10353  		v.reset(OpARM64UMULH)
 10354  		v.AddArg(x)
 10355  		v.AddArg(y)
 10356  		return true
 10357  	}
 10358  }
 10359  func rewriteValueARM64_OpHmul8(v *Value, config *Config) bool {
 10360  	b := v.Block
 10361  	_ = b
 10362  	// match: (Hmul8 x y)
 10363  	// cond:
 10364  	// result: (SRAconst (MULW <config.fe.TypeInt16()> (SignExt8to32 x) (SignExt8to32 y)) [8])
 10365  	for {
 10366  		x := v.Args[0]
 10367  		y := v.Args[1]
 10368  		v.reset(OpARM64SRAconst)
 10369  		v.AuxInt = 8
 10370  		v0 := b.NewValue0(v.Line, OpARM64MULW, config.fe.TypeInt16())
 10371  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10372  		v1.AddArg(x)
 10373  		v0.AddArg(v1)
 10374  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10375  		v2.AddArg(y)
 10376  		v0.AddArg(v2)
 10377  		v.AddArg(v0)
 10378  		return true
 10379  	}
 10380  }
 10381  func rewriteValueARM64_OpHmul8u(v *Value, config *Config) bool {
 10382  	b := v.Block
 10383  	_ = b
 10384  	// match: (Hmul8u x y)
 10385  	// cond:
 10386  	// result: (SRLconst (MUL <config.fe.TypeUInt16()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8])
 10387  	for {
 10388  		x := v.Args[0]
 10389  		y := v.Args[1]
 10390  		v.reset(OpARM64SRLconst)
 10391  		v.AuxInt = 8
 10392  		v0 := b.NewValue0(v.Line, OpARM64MUL, config.fe.TypeUInt16())
 10393  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10394  		v1.AddArg(x)
 10395  		v0.AddArg(v1)
 10396  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10397  		v2.AddArg(y)
 10398  		v0.AddArg(v2)
 10399  		v.AddArg(v0)
 10400  		return true
 10401  	}
 10402  }
 10403  func rewriteValueARM64_OpInterCall(v *Value, config *Config) bool {
 10404  	b := v.Block
 10405  	_ = b
 10406  	// match: (InterCall [argwid] entry mem)
 10407  	// cond:
 10408  	// result: (CALLinter [argwid] entry mem)
 10409  	for {
 10410  		argwid := v.AuxInt
 10411  		entry := v.Args[0]
 10412  		mem := v.Args[1]
 10413  		v.reset(OpARM64CALLinter)
 10414  		v.AuxInt = argwid
 10415  		v.AddArg(entry)
 10416  		v.AddArg(mem)
 10417  		return true
 10418  	}
 10419  }
 10420  func rewriteValueARM64_OpIsInBounds(v *Value, config *Config) bool {
 10421  	b := v.Block
 10422  	_ = b
 10423  	// match: (IsInBounds idx len)
 10424  	// cond:
 10425  	// result: (LessThanU (CMP idx len))
 10426  	for {
 10427  		idx := v.Args[0]
 10428  		len := v.Args[1]
 10429  		v.reset(OpARM64LessThanU)
 10430  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10431  		v0.AddArg(idx)
 10432  		v0.AddArg(len)
 10433  		v.AddArg(v0)
 10434  		return true
 10435  	}
 10436  }
 10437  func rewriteValueARM64_OpIsNonNil(v *Value, config *Config) bool {
 10438  	b := v.Block
 10439  	_ = b
 10440  	// match: (IsNonNil ptr)
 10441  	// cond:
 10442  	// result: (NotEqual (CMPconst [0] ptr))
 10443  	for {
 10444  		ptr := v.Args[0]
 10445  		v.reset(OpARM64NotEqual)
 10446  		v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 10447  		v0.AuxInt = 0
 10448  		v0.AddArg(ptr)
 10449  		v.AddArg(v0)
 10450  		return true
 10451  	}
 10452  }
 10453  func rewriteValueARM64_OpIsSliceInBounds(v *Value, config *Config) bool {
 10454  	b := v.Block
 10455  	_ = b
 10456  	// match: (IsSliceInBounds idx len)
 10457  	// cond:
 10458  	// result: (LessEqualU (CMP idx len))
 10459  	for {
 10460  		idx := v.Args[0]
 10461  		len := v.Args[1]
 10462  		v.reset(OpARM64LessEqualU)
 10463  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10464  		v0.AddArg(idx)
 10465  		v0.AddArg(len)
 10466  		v.AddArg(v0)
 10467  		return true
 10468  	}
 10469  }
 10470  func rewriteValueARM64_OpLeq16(v *Value, config *Config) bool {
 10471  	b := v.Block
 10472  	_ = b
 10473  	// match: (Leq16 x y)
 10474  	// cond:
 10475  	// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 10476  	for {
 10477  		x := v.Args[0]
 10478  		y := v.Args[1]
 10479  		v.reset(OpARM64LessEqual)
 10480  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10481  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10482  		v1.AddArg(x)
 10483  		v0.AddArg(v1)
 10484  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10485  		v2.AddArg(y)
 10486  		v0.AddArg(v2)
 10487  		v.AddArg(v0)
 10488  		return true
 10489  	}
 10490  }
 10491  func rewriteValueARM64_OpLeq16U(v *Value, config *Config) bool {
 10492  	b := v.Block
 10493  	_ = b
 10494  	// match: (Leq16U x y)
 10495  	// cond:
 10496  	// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 10497  	for {
 10498  		x := v.Args[0]
 10499  		y := v.Args[1]
 10500  		v.reset(OpARM64LessEqualU)
 10501  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10502  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10503  		v1.AddArg(x)
 10504  		v0.AddArg(v1)
 10505  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10506  		v2.AddArg(y)
 10507  		v0.AddArg(v2)
 10508  		v.AddArg(v0)
 10509  		return true
 10510  	}
 10511  }
 10512  func rewriteValueARM64_OpLeq32(v *Value, config *Config) bool {
 10513  	b := v.Block
 10514  	_ = b
 10515  	// match: (Leq32 x y)
 10516  	// cond:
 10517  	// result: (LessEqual (CMPW x y))
 10518  	for {
 10519  		x := v.Args[0]
 10520  		y := v.Args[1]
 10521  		v.reset(OpARM64LessEqual)
 10522  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10523  		v0.AddArg(x)
 10524  		v0.AddArg(y)
 10525  		v.AddArg(v0)
 10526  		return true
 10527  	}
 10528  }
 10529  func rewriteValueARM64_OpLeq32F(v *Value, config *Config) bool {
 10530  	b := v.Block
 10531  	_ = b
 10532  	// match: (Leq32F x y)
 10533  	// cond:
 10534  	// result: (GreaterEqual (FCMPS y x))
 10535  	for {
 10536  		x := v.Args[0]
 10537  		y := v.Args[1]
 10538  		v.reset(OpARM64GreaterEqual)
 10539  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
 10540  		v0.AddArg(y)
 10541  		v0.AddArg(x)
 10542  		v.AddArg(v0)
 10543  		return true
 10544  	}
 10545  }
 10546  func rewriteValueARM64_OpLeq32U(v *Value, config *Config) bool {
 10547  	b := v.Block
 10548  	_ = b
 10549  	// match: (Leq32U x y)
 10550  	// cond:
 10551  	// result: (LessEqualU (CMPW x y))
 10552  	for {
 10553  		x := v.Args[0]
 10554  		y := v.Args[1]
 10555  		v.reset(OpARM64LessEqualU)
 10556  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10557  		v0.AddArg(x)
 10558  		v0.AddArg(y)
 10559  		v.AddArg(v0)
 10560  		return true
 10561  	}
 10562  }
 10563  func rewriteValueARM64_OpLeq64(v *Value, config *Config) bool {
 10564  	b := v.Block
 10565  	_ = b
 10566  	// match: (Leq64 x y)
 10567  	// cond:
 10568  	// result: (LessEqual (CMP x y))
 10569  	for {
 10570  		x := v.Args[0]
 10571  		y := v.Args[1]
 10572  		v.reset(OpARM64LessEqual)
 10573  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10574  		v0.AddArg(x)
 10575  		v0.AddArg(y)
 10576  		v.AddArg(v0)
 10577  		return true
 10578  	}
 10579  }
 10580  func rewriteValueARM64_OpLeq64F(v *Value, config *Config) bool {
 10581  	b := v.Block
 10582  	_ = b
 10583  	// match: (Leq64F x y)
 10584  	// cond:
 10585  	// result: (GreaterEqual (FCMPD y x))
 10586  	for {
 10587  		x := v.Args[0]
 10588  		y := v.Args[1]
 10589  		v.reset(OpARM64GreaterEqual)
 10590  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
 10591  		v0.AddArg(y)
 10592  		v0.AddArg(x)
 10593  		v.AddArg(v0)
 10594  		return true
 10595  	}
 10596  }
 10597  func rewriteValueARM64_OpLeq64U(v *Value, config *Config) bool {
 10598  	b := v.Block
 10599  	_ = b
 10600  	// match: (Leq64U x y)
 10601  	// cond:
 10602  	// result: (LessEqualU (CMP x y))
 10603  	for {
 10604  		x := v.Args[0]
 10605  		y := v.Args[1]
 10606  		v.reset(OpARM64LessEqualU)
 10607  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10608  		v0.AddArg(x)
 10609  		v0.AddArg(y)
 10610  		v.AddArg(v0)
 10611  		return true
 10612  	}
 10613  }
 10614  func rewriteValueARM64_OpLeq8(v *Value, config *Config) bool {
 10615  	b := v.Block
 10616  	_ = b
 10617  	// match: (Leq8 x y)
 10618  	// cond:
 10619  	// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 10620  	for {
 10621  		x := v.Args[0]
 10622  		y := v.Args[1]
 10623  		v.reset(OpARM64LessEqual)
 10624  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10625  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10626  		v1.AddArg(x)
 10627  		v0.AddArg(v1)
 10628  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10629  		v2.AddArg(y)
 10630  		v0.AddArg(v2)
 10631  		v.AddArg(v0)
 10632  		return true
 10633  	}
 10634  }
 10635  func rewriteValueARM64_OpLeq8U(v *Value, config *Config) bool {
 10636  	b := v.Block
 10637  	_ = b
 10638  	// match: (Leq8U x y)
 10639  	// cond:
 10640  	// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 10641  	for {
 10642  		x := v.Args[0]
 10643  		y := v.Args[1]
 10644  		v.reset(OpARM64LessEqualU)
 10645  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10646  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10647  		v1.AddArg(x)
 10648  		v0.AddArg(v1)
 10649  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10650  		v2.AddArg(y)
 10651  		v0.AddArg(v2)
 10652  		v.AddArg(v0)
 10653  		return true
 10654  	}
 10655  }
 10656  func rewriteValueARM64_OpLess16(v *Value, config *Config) bool {
 10657  	b := v.Block
 10658  	_ = b
 10659  	// match: (Less16 x y)
 10660  	// cond:
 10661  	// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 10662  	for {
 10663  		x := v.Args[0]
 10664  		y := v.Args[1]
 10665  		v.reset(OpARM64LessThan)
 10666  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10667  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10668  		v1.AddArg(x)
 10669  		v0.AddArg(v1)
 10670  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 10671  		v2.AddArg(y)
 10672  		v0.AddArg(v2)
 10673  		v.AddArg(v0)
 10674  		return true
 10675  	}
 10676  }
 10677  func rewriteValueARM64_OpLess16U(v *Value, config *Config) bool {
 10678  	b := v.Block
 10679  	_ = b
 10680  	// match: (Less16U x y)
 10681  	// cond:
 10682  	// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 10683  	for {
 10684  		x := v.Args[0]
 10685  		y := v.Args[1]
 10686  		v.reset(OpARM64LessThanU)
 10687  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10688  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10689  		v1.AddArg(x)
 10690  		v0.AddArg(v1)
 10691  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 10692  		v2.AddArg(y)
 10693  		v0.AddArg(v2)
 10694  		v.AddArg(v0)
 10695  		return true
 10696  	}
 10697  }
 10698  func rewriteValueARM64_OpLess32(v *Value, config *Config) bool {
 10699  	b := v.Block
 10700  	_ = b
 10701  	// match: (Less32 x y)
 10702  	// cond:
 10703  	// result: (LessThan (CMPW x y))
 10704  	for {
 10705  		x := v.Args[0]
 10706  		y := v.Args[1]
 10707  		v.reset(OpARM64LessThan)
 10708  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10709  		v0.AddArg(x)
 10710  		v0.AddArg(y)
 10711  		v.AddArg(v0)
 10712  		return true
 10713  	}
 10714  }
 10715  func rewriteValueARM64_OpLess32F(v *Value, config *Config) bool {
 10716  	b := v.Block
 10717  	_ = b
 10718  	// match: (Less32F x y)
 10719  	// cond:
 10720  	// result: (GreaterThan (FCMPS y x))
 10721  	for {
 10722  		x := v.Args[0]
 10723  		y := v.Args[1]
 10724  		v.reset(OpARM64GreaterThan)
 10725  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
 10726  		v0.AddArg(y)
 10727  		v0.AddArg(x)
 10728  		v.AddArg(v0)
 10729  		return true
 10730  	}
 10731  }
 10732  func rewriteValueARM64_OpLess32U(v *Value, config *Config) bool {
 10733  	b := v.Block
 10734  	_ = b
 10735  	// match: (Less32U x y)
 10736  	// cond:
 10737  	// result: (LessThanU (CMPW x y))
 10738  	for {
 10739  		x := v.Args[0]
 10740  		y := v.Args[1]
 10741  		v.reset(OpARM64LessThanU)
 10742  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10743  		v0.AddArg(x)
 10744  		v0.AddArg(y)
 10745  		v.AddArg(v0)
 10746  		return true
 10747  	}
 10748  }
 10749  func rewriteValueARM64_OpLess64(v *Value, config *Config) bool {
 10750  	b := v.Block
 10751  	_ = b
 10752  	// match: (Less64 x y)
 10753  	// cond:
 10754  	// result: (LessThan (CMP x y))
 10755  	for {
 10756  		x := v.Args[0]
 10757  		y := v.Args[1]
 10758  		v.reset(OpARM64LessThan)
 10759  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10760  		v0.AddArg(x)
 10761  		v0.AddArg(y)
 10762  		v.AddArg(v0)
 10763  		return true
 10764  	}
 10765  }
 10766  func rewriteValueARM64_OpLess64F(v *Value, config *Config) bool {
 10767  	b := v.Block
 10768  	_ = b
 10769  	// match: (Less64F x y)
 10770  	// cond:
 10771  	// result: (GreaterThan (FCMPD y x))
 10772  	for {
 10773  		x := v.Args[0]
 10774  		y := v.Args[1]
 10775  		v.reset(OpARM64GreaterThan)
 10776  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
 10777  		v0.AddArg(y)
 10778  		v0.AddArg(x)
 10779  		v.AddArg(v0)
 10780  		return true
 10781  	}
 10782  }
 10783  func rewriteValueARM64_OpLess64U(v *Value, config *Config) bool {
 10784  	b := v.Block
 10785  	_ = b
 10786  	// match: (Less64U x y)
 10787  	// cond:
 10788  	// result: (LessThanU (CMP x y))
 10789  	for {
 10790  		x := v.Args[0]
 10791  		y := v.Args[1]
 10792  		v.reset(OpARM64LessThanU)
 10793  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 10794  		v0.AddArg(x)
 10795  		v0.AddArg(y)
 10796  		v.AddArg(v0)
 10797  		return true
 10798  	}
 10799  }
 10800  func rewriteValueARM64_OpLess8(v *Value, config *Config) bool {
 10801  	b := v.Block
 10802  	_ = b
 10803  	// match: (Less8 x y)
 10804  	// cond:
 10805  	// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 10806  	for {
 10807  		x := v.Args[0]
 10808  		y := v.Args[1]
 10809  		v.reset(OpARM64LessThan)
 10810  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10811  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10812  		v1.AddArg(x)
 10813  		v0.AddArg(v1)
 10814  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 10815  		v2.AddArg(y)
 10816  		v0.AddArg(v2)
 10817  		v.AddArg(v0)
 10818  		return true
 10819  	}
 10820  }
 10821  func rewriteValueARM64_OpLess8U(v *Value, config *Config) bool {
 10822  	b := v.Block
 10823  	_ = b
 10824  	// match: (Less8U x y)
 10825  	// cond:
 10826  	// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 10827  	for {
 10828  		x := v.Args[0]
 10829  		y := v.Args[1]
 10830  		v.reset(OpARM64LessThanU)
 10831  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 10832  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10833  		v1.AddArg(x)
 10834  		v0.AddArg(v1)
 10835  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 10836  		v2.AddArg(y)
 10837  		v0.AddArg(v2)
 10838  		v.AddArg(v0)
 10839  		return true
 10840  	}
 10841  }
 10842  func rewriteValueARM64_OpLoad(v *Value, config *Config) bool {
 10843  	b := v.Block
 10844  	_ = b
 10845  	// match: (Load <t> ptr mem)
 10846  	// cond: t.IsBoolean()
 10847  	// result: (MOVBUload ptr mem)
 10848  	for {
 10849  		t := v.Type
 10850  		ptr := v.Args[0]
 10851  		mem := v.Args[1]
 10852  		if !(t.IsBoolean()) {
 10853  			break
 10854  		}
 10855  		v.reset(OpARM64MOVBUload)
 10856  		v.AddArg(ptr)
 10857  		v.AddArg(mem)
 10858  		return true
 10859  	}
 10860  	// match: (Load <t> ptr mem)
 10861  	// cond: (is8BitInt(t) && isSigned(t))
 10862  	// result: (MOVBload ptr mem)
 10863  	for {
 10864  		t := v.Type
 10865  		ptr := v.Args[0]
 10866  		mem := v.Args[1]
 10867  		if !(is8BitInt(t) && isSigned(t)) {
 10868  			break
 10869  		}
 10870  		v.reset(OpARM64MOVBload)
 10871  		v.AddArg(ptr)
 10872  		v.AddArg(mem)
 10873  		return true
 10874  	}
 10875  	// match: (Load <t> ptr mem)
 10876  	// cond: (is8BitInt(t) && !isSigned(t))
 10877  	// result: (MOVBUload ptr mem)
 10878  	for {
 10879  		t := v.Type
 10880  		ptr := v.Args[0]
 10881  		mem := v.Args[1]
 10882  		if !(is8BitInt(t) && !isSigned(t)) {
 10883  			break
 10884  		}
 10885  		v.reset(OpARM64MOVBUload)
 10886  		v.AddArg(ptr)
 10887  		v.AddArg(mem)
 10888  		return true
 10889  	}
 10890  	// match: (Load <t> ptr mem)
 10891  	// cond: (is16BitInt(t) && isSigned(t))
 10892  	// result: (MOVHload ptr mem)
 10893  	for {
 10894  		t := v.Type
 10895  		ptr := v.Args[0]
 10896  		mem := v.Args[1]
 10897  		if !(is16BitInt(t) && isSigned(t)) {
 10898  			break
 10899  		}
 10900  		v.reset(OpARM64MOVHload)
 10901  		v.AddArg(ptr)
 10902  		v.AddArg(mem)
 10903  		return true
 10904  	}
 10905  	// match: (Load <t> ptr mem)
 10906  	// cond: (is16BitInt(t) && !isSigned(t))
 10907  	// result: (MOVHUload ptr mem)
 10908  	for {
 10909  		t := v.Type
 10910  		ptr := v.Args[0]
 10911  		mem := v.Args[1]
 10912  		if !(is16BitInt(t) && !isSigned(t)) {
 10913  			break
 10914  		}
 10915  		v.reset(OpARM64MOVHUload)
 10916  		v.AddArg(ptr)
 10917  		v.AddArg(mem)
 10918  		return true
 10919  	}
 10920  	// match: (Load <t> ptr mem)
 10921  	// cond: (is32BitInt(t) && isSigned(t))
 10922  	// result: (MOVWload ptr mem)
 10923  	for {
 10924  		t := v.Type
 10925  		ptr := v.Args[0]
 10926  		mem := v.Args[1]
 10927  		if !(is32BitInt(t) && isSigned(t)) {
 10928  			break
 10929  		}
 10930  		v.reset(OpARM64MOVWload)
 10931  		v.AddArg(ptr)
 10932  		v.AddArg(mem)
 10933  		return true
 10934  	}
 10935  	// match: (Load <t> ptr mem)
 10936  	// cond: (is32BitInt(t) && !isSigned(t))
 10937  	// result: (MOVWUload ptr mem)
 10938  	for {
 10939  		t := v.Type
 10940  		ptr := v.Args[0]
 10941  		mem := v.Args[1]
 10942  		if !(is32BitInt(t) && !isSigned(t)) {
 10943  			break
 10944  		}
 10945  		v.reset(OpARM64MOVWUload)
 10946  		v.AddArg(ptr)
 10947  		v.AddArg(mem)
 10948  		return true
 10949  	}
 10950  	// match: (Load <t> ptr mem)
 10951  	// cond: (is64BitInt(t) || isPtr(t))
 10952  	// result: (MOVDload ptr mem)
 10953  	for {
 10954  		t := v.Type
 10955  		ptr := v.Args[0]
 10956  		mem := v.Args[1]
 10957  		if !(is64BitInt(t) || isPtr(t)) {
 10958  			break
 10959  		}
 10960  		v.reset(OpARM64MOVDload)
 10961  		v.AddArg(ptr)
 10962  		v.AddArg(mem)
 10963  		return true
 10964  	}
 10965  	// match: (Load <t> ptr mem)
 10966  	// cond: is32BitFloat(t)
 10967  	// result: (FMOVSload ptr mem)
 10968  	for {
 10969  		t := v.Type
 10970  		ptr := v.Args[0]
 10971  		mem := v.Args[1]
 10972  		if !(is32BitFloat(t)) {
 10973  			break
 10974  		}
 10975  		v.reset(OpARM64FMOVSload)
 10976  		v.AddArg(ptr)
 10977  		v.AddArg(mem)
 10978  		return true
 10979  	}
 10980  	// match: (Load <t> ptr mem)
 10981  	// cond: is64BitFloat(t)
 10982  	// result: (FMOVDload ptr mem)
 10983  	for {
 10984  		t := v.Type
 10985  		ptr := v.Args[0]
 10986  		mem := v.Args[1]
 10987  		if !(is64BitFloat(t)) {
 10988  			break
 10989  		}
 10990  		v.reset(OpARM64FMOVDload)
 10991  		v.AddArg(ptr)
 10992  		v.AddArg(mem)
 10993  		return true
 10994  	}
 10995  	return false
 10996  }
 10997  func rewriteValueARM64_OpLrot16(v *Value, config *Config) bool {
 10998  	b := v.Block
 10999  	_ = b
 11000  	// match: (Lrot16 <t> x [c])
 11001  	// cond:
 11002  	// result: (OR (SLLconst <t> x [c&15]) (SRLconst <t> (ZeroExt16to64 x) [16-c&15]))
 11003  	for {
 11004  		t := v.Type
 11005  		c := v.AuxInt
 11006  		x := v.Args[0]
 11007  		v.reset(OpARM64OR)
 11008  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, t)
 11009  		v0.AuxInt = c & 15
 11010  		v0.AddArg(x)
 11011  		v.AddArg(v0)
 11012  		v1 := b.NewValue0(v.Line, OpARM64SRLconst, t)
 11013  		v1.AuxInt = 16 - c&15
 11014  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11015  		v2.AddArg(x)
 11016  		v1.AddArg(v2)
 11017  		v.AddArg(v1)
 11018  		return true
 11019  	}
 11020  }
 11021  func rewriteValueARM64_OpLrot32(v *Value, config *Config) bool {
 11022  	b := v.Block
 11023  	_ = b
 11024  	// match: (Lrot32 x [c])
 11025  	// cond:
 11026  	// result: (RORWconst x [32-c&31])
 11027  	for {
 11028  		c := v.AuxInt
 11029  		x := v.Args[0]
 11030  		v.reset(OpARM64RORWconst)
 11031  		v.AuxInt = 32 - c&31
 11032  		v.AddArg(x)
 11033  		return true
 11034  	}
 11035  }
 11036  func rewriteValueARM64_OpLrot64(v *Value, config *Config) bool {
 11037  	b := v.Block
 11038  	_ = b
 11039  	// match: (Lrot64 x [c])
 11040  	// cond:
 11041  	// result: (RORconst  x [64-c&63])
 11042  	for {
 11043  		c := v.AuxInt
 11044  		x := v.Args[0]
 11045  		v.reset(OpARM64RORconst)
 11046  		v.AuxInt = 64 - c&63
 11047  		v.AddArg(x)
 11048  		return true
 11049  	}
 11050  }
 11051  func rewriteValueARM64_OpLrot8(v *Value, config *Config) bool {
 11052  	b := v.Block
 11053  	_ = b
 11054  	// match: (Lrot8  <t> x [c])
 11055  	// cond:
 11056  	// result: (OR (SLLconst <t> x [c&7])  (SRLconst <t> (ZeroExt8to64  x) [8-c&7]))
 11057  	for {
 11058  		t := v.Type
 11059  		c := v.AuxInt
 11060  		x := v.Args[0]
 11061  		v.reset(OpARM64OR)
 11062  		v0 := b.NewValue0(v.Line, OpARM64SLLconst, t)
 11063  		v0.AuxInt = c & 7
 11064  		v0.AddArg(x)
 11065  		v.AddArg(v0)
 11066  		v1 := b.NewValue0(v.Line, OpARM64SRLconst, t)
 11067  		v1.AuxInt = 8 - c&7
 11068  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11069  		v2.AddArg(x)
 11070  		v1.AddArg(v2)
 11071  		v.AddArg(v1)
 11072  		return true
 11073  	}
 11074  }
 11075  func rewriteValueARM64_OpLsh16x16(v *Value, config *Config) bool {
 11076  	b := v.Block
 11077  	_ = b
 11078  	// match: (Lsh16x16 <t> x y)
 11079  	// cond:
 11080  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 11081  	for {
 11082  		t := v.Type
 11083  		x := v.Args[0]
 11084  		y := v.Args[1]
 11085  		v.reset(OpARM64CSELULT)
 11086  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11087  		v0.AddArg(x)
 11088  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11089  		v1.AddArg(y)
 11090  		v0.AddArg(v1)
 11091  		v.AddArg(v0)
 11092  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11093  		v2.AuxInt = 0
 11094  		v.AddArg(v2)
 11095  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11096  		v3.AuxInt = 64
 11097  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11098  		v4.AddArg(y)
 11099  		v3.AddArg(v4)
 11100  		v.AddArg(v3)
 11101  		return true
 11102  	}
 11103  }
 11104  func rewriteValueARM64_OpLsh16x32(v *Value, config *Config) bool {
 11105  	b := v.Block
 11106  	_ = b
 11107  	// match: (Lsh16x32 <t> x y)
 11108  	// cond:
 11109  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 11110  	for {
 11111  		t := v.Type
 11112  		x := v.Args[0]
 11113  		y := v.Args[1]
 11114  		v.reset(OpARM64CSELULT)
 11115  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11116  		v0.AddArg(x)
 11117  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11118  		v1.AddArg(y)
 11119  		v0.AddArg(v1)
 11120  		v.AddArg(v0)
 11121  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11122  		v2.AuxInt = 0
 11123  		v.AddArg(v2)
 11124  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11125  		v3.AuxInt = 64
 11126  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11127  		v4.AddArg(y)
 11128  		v3.AddArg(v4)
 11129  		v.AddArg(v3)
 11130  		return true
 11131  	}
 11132  }
 11133  func rewriteValueARM64_OpLsh16x64(v *Value, config *Config) bool {
 11134  	b := v.Block
 11135  	_ = b
 11136  	// match: (Lsh16x64  x (MOVDconst [c]))
 11137  	// cond: uint64(c) < 16
 11138  	// result: (SLLconst x [c])
 11139  	for {
 11140  		x := v.Args[0]
 11141  		v_1 := v.Args[1]
 11142  		if v_1.Op != OpARM64MOVDconst {
 11143  			break
 11144  		}
 11145  		c := v_1.AuxInt
 11146  		if !(uint64(c) < 16) {
 11147  			break
 11148  		}
 11149  		v.reset(OpARM64SLLconst)
 11150  		v.AuxInt = c
 11151  		v.AddArg(x)
 11152  		return true
 11153  	}
 11154  	// match: (Lsh16x64  _ (MOVDconst [c]))
 11155  	// cond: uint64(c) >= 16
 11156  	// result: (MOVDconst [0])
 11157  	for {
 11158  		v_1 := v.Args[1]
 11159  		if v_1.Op != OpARM64MOVDconst {
 11160  			break
 11161  		}
 11162  		c := v_1.AuxInt
 11163  		if !(uint64(c) >= 16) {
 11164  			break
 11165  		}
 11166  		v.reset(OpARM64MOVDconst)
 11167  		v.AuxInt = 0
 11168  		return true
 11169  	}
 11170  	// match: (Lsh16x64 <t> x y)
 11171  	// cond:
 11172  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 11173  	for {
 11174  		t := v.Type
 11175  		x := v.Args[0]
 11176  		y := v.Args[1]
 11177  		v.reset(OpARM64CSELULT)
 11178  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11179  		v0.AddArg(x)
 11180  		v0.AddArg(y)
 11181  		v.AddArg(v0)
 11182  		v1 := b.NewValue0(v.Line, OpConst64, t)
 11183  		v1.AuxInt = 0
 11184  		v.AddArg(v1)
 11185  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11186  		v2.AuxInt = 64
 11187  		v2.AddArg(y)
 11188  		v.AddArg(v2)
 11189  		return true
 11190  	}
 11191  }
 11192  func rewriteValueARM64_OpLsh16x8(v *Value, config *Config) bool {
 11193  	b := v.Block
 11194  	_ = b
 11195  	// match: (Lsh16x8  <t> x y)
 11196  	// cond:
 11197  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 11198  	for {
 11199  		t := v.Type
 11200  		x := v.Args[0]
 11201  		y := v.Args[1]
 11202  		v.reset(OpARM64CSELULT)
 11203  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11204  		v0.AddArg(x)
 11205  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11206  		v1.AddArg(y)
 11207  		v0.AddArg(v1)
 11208  		v.AddArg(v0)
 11209  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11210  		v2.AuxInt = 0
 11211  		v.AddArg(v2)
 11212  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11213  		v3.AuxInt = 64
 11214  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11215  		v4.AddArg(y)
 11216  		v3.AddArg(v4)
 11217  		v.AddArg(v3)
 11218  		return true
 11219  	}
 11220  }
 11221  func rewriteValueARM64_OpLsh32x16(v *Value, config *Config) bool {
 11222  	b := v.Block
 11223  	_ = b
 11224  	// match: (Lsh32x16 <t> x y)
 11225  	// cond:
 11226  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 11227  	for {
 11228  		t := v.Type
 11229  		x := v.Args[0]
 11230  		y := v.Args[1]
 11231  		v.reset(OpARM64CSELULT)
 11232  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11233  		v0.AddArg(x)
 11234  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11235  		v1.AddArg(y)
 11236  		v0.AddArg(v1)
 11237  		v.AddArg(v0)
 11238  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11239  		v2.AuxInt = 0
 11240  		v.AddArg(v2)
 11241  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11242  		v3.AuxInt = 64
 11243  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11244  		v4.AddArg(y)
 11245  		v3.AddArg(v4)
 11246  		v.AddArg(v3)
 11247  		return true
 11248  	}
 11249  }
 11250  func rewriteValueARM64_OpLsh32x32(v *Value, config *Config) bool {
 11251  	b := v.Block
 11252  	_ = b
 11253  	// match: (Lsh32x32 <t> x y)
 11254  	// cond:
 11255  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 11256  	for {
 11257  		t := v.Type
 11258  		x := v.Args[0]
 11259  		y := v.Args[1]
 11260  		v.reset(OpARM64CSELULT)
 11261  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11262  		v0.AddArg(x)
 11263  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11264  		v1.AddArg(y)
 11265  		v0.AddArg(v1)
 11266  		v.AddArg(v0)
 11267  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11268  		v2.AuxInt = 0
 11269  		v.AddArg(v2)
 11270  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11271  		v3.AuxInt = 64
 11272  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11273  		v4.AddArg(y)
 11274  		v3.AddArg(v4)
 11275  		v.AddArg(v3)
 11276  		return true
 11277  	}
 11278  }
 11279  func rewriteValueARM64_OpLsh32x64(v *Value, config *Config) bool {
 11280  	b := v.Block
 11281  	_ = b
 11282  	// match: (Lsh32x64  x (MOVDconst [c]))
 11283  	// cond: uint64(c) < 32
 11284  	// result: (SLLconst x [c])
 11285  	for {
 11286  		x := v.Args[0]
 11287  		v_1 := v.Args[1]
 11288  		if v_1.Op != OpARM64MOVDconst {
 11289  			break
 11290  		}
 11291  		c := v_1.AuxInt
 11292  		if !(uint64(c) < 32) {
 11293  			break
 11294  		}
 11295  		v.reset(OpARM64SLLconst)
 11296  		v.AuxInt = c
 11297  		v.AddArg(x)
 11298  		return true
 11299  	}
 11300  	// match: (Lsh32x64  _ (MOVDconst [c]))
 11301  	// cond: uint64(c) >= 32
 11302  	// result: (MOVDconst [0])
 11303  	for {
 11304  		v_1 := v.Args[1]
 11305  		if v_1.Op != OpARM64MOVDconst {
 11306  			break
 11307  		}
 11308  		c := v_1.AuxInt
 11309  		if !(uint64(c) >= 32) {
 11310  			break
 11311  		}
 11312  		v.reset(OpARM64MOVDconst)
 11313  		v.AuxInt = 0
 11314  		return true
 11315  	}
 11316  	// match: (Lsh32x64 <t> x y)
 11317  	// cond:
 11318  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 11319  	for {
 11320  		t := v.Type
 11321  		x := v.Args[0]
 11322  		y := v.Args[1]
 11323  		v.reset(OpARM64CSELULT)
 11324  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11325  		v0.AddArg(x)
 11326  		v0.AddArg(y)
 11327  		v.AddArg(v0)
 11328  		v1 := b.NewValue0(v.Line, OpConst64, t)
 11329  		v1.AuxInt = 0
 11330  		v.AddArg(v1)
 11331  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11332  		v2.AuxInt = 64
 11333  		v2.AddArg(y)
 11334  		v.AddArg(v2)
 11335  		return true
 11336  	}
 11337  }
 11338  func rewriteValueARM64_OpLsh32x8(v *Value, config *Config) bool {
 11339  	b := v.Block
 11340  	_ = b
 11341  	// match: (Lsh32x8  <t> x y)
 11342  	// cond:
 11343  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 11344  	for {
 11345  		t := v.Type
 11346  		x := v.Args[0]
 11347  		y := v.Args[1]
 11348  		v.reset(OpARM64CSELULT)
 11349  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11350  		v0.AddArg(x)
 11351  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11352  		v1.AddArg(y)
 11353  		v0.AddArg(v1)
 11354  		v.AddArg(v0)
 11355  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11356  		v2.AuxInt = 0
 11357  		v.AddArg(v2)
 11358  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11359  		v3.AuxInt = 64
 11360  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11361  		v4.AddArg(y)
 11362  		v3.AddArg(v4)
 11363  		v.AddArg(v3)
 11364  		return true
 11365  	}
 11366  }
 11367  func rewriteValueARM64_OpLsh64x16(v *Value, config *Config) bool {
 11368  	b := v.Block
 11369  	_ = b
 11370  	// match: (Lsh64x16 <t> x y)
 11371  	// cond:
 11372  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 11373  	for {
 11374  		t := v.Type
 11375  		x := v.Args[0]
 11376  		y := v.Args[1]
 11377  		v.reset(OpARM64CSELULT)
 11378  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11379  		v0.AddArg(x)
 11380  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11381  		v1.AddArg(y)
 11382  		v0.AddArg(v1)
 11383  		v.AddArg(v0)
 11384  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11385  		v2.AuxInt = 0
 11386  		v.AddArg(v2)
 11387  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11388  		v3.AuxInt = 64
 11389  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11390  		v4.AddArg(y)
 11391  		v3.AddArg(v4)
 11392  		v.AddArg(v3)
 11393  		return true
 11394  	}
 11395  }
 11396  func rewriteValueARM64_OpLsh64x32(v *Value, config *Config) bool {
 11397  	b := v.Block
 11398  	_ = b
 11399  	// match: (Lsh64x32 <t> x y)
 11400  	// cond:
 11401  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 11402  	for {
 11403  		t := v.Type
 11404  		x := v.Args[0]
 11405  		y := v.Args[1]
 11406  		v.reset(OpARM64CSELULT)
 11407  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11408  		v0.AddArg(x)
 11409  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11410  		v1.AddArg(y)
 11411  		v0.AddArg(v1)
 11412  		v.AddArg(v0)
 11413  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11414  		v2.AuxInt = 0
 11415  		v.AddArg(v2)
 11416  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11417  		v3.AuxInt = 64
 11418  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11419  		v4.AddArg(y)
 11420  		v3.AddArg(v4)
 11421  		v.AddArg(v3)
 11422  		return true
 11423  	}
 11424  }
 11425  func rewriteValueARM64_OpLsh64x64(v *Value, config *Config) bool {
 11426  	b := v.Block
 11427  	_ = b
 11428  	// match: (Lsh64x64  x (MOVDconst [c]))
 11429  	// cond: uint64(c) < 64
 11430  	// result: (SLLconst x [c])
 11431  	for {
 11432  		x := v.Args[0]
 11433  		v_1 := v.Args[1]
 11434  		if v_1.Op != OpARM64MOVDconst {
 11435  			break
 11436  		}
 11437  		c := v_1.AuxInt
 11438  		if !(uint64(c) < 64) {
 11439  			break
 11440  		}
 11441  		v.reset(OpARM64SLLconst)
 11442  		v.AuxInt = c
 11443  		v.AddArg(x)
 11444  		return true
 11445  	}
 11446  	// match: (Lsh64x64  _ (MOVDconst [c]))
 11447  	// cond: uint64(c) >= 64
 11448  	// result: (MOVDconst [0])
 11449  	for {
 11450  		v_1 := v.Args[1]
 11451  		if v_1.Op != OpARM64MOVDconst {
 11452  			break
 11453  		}
 11454  		c := v_1.AuxInt
 11455  		if !(uint64(c) >= 64) {
 11456  			break
 11457  		}
 11458  		v.reset(OpARM64MOVDconst)
 11459  		v.AuxInt = 0
 11460  		return true
 11461  	}
 11462  	// match: (Lsh64x64 <t> x y)
 11463  	// cond:
 11464  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 11465  	for {
 11466  		t := v.Type
 11467  		x := v.Args[0]
 11468  		y := v.Args[1]
 11469  		v.reset(OpARM64CSELULT)
 11470  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11471  		v0.AddArg(x)
 11472  		v0.AddArg(y)
 11473  		v.AddArg(v0)
 11474  		v1 := b.NewValue0(v.Line, OpConst64, t)
 11475  		v1.AuxInt = 0
 11476  		v.AddArg(v1)
 11477  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11478  		v2.AuxInt = 64
 11479  		v2.AddArg(y)
 11480  		v.AddArg(v2)
 11481  		return true
 11482  	}
 11483  }
 11484  func rewriteValueARM64_OpLsh64x8(v *Value, config *Config) bool {
 11485  	b := v.Block
 11486  	_ = b
 11487  	// match: (Lsh64x8  <t> x y)
 11488  	// cond:
 11489  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 11490  	for {
 11491  		t := v.Type
 11492  		x := v.Args[0]
 11493  		y := v.Args[1]
 11494  		v.reset(OpARM64CSELULT)
 11495  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11496  		v0.AddArg(x)
 11497  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11498  		v1.AddArg(y)
 11499  		v0.AddArg(v1)
 11500  		v.AddArg(v0)
 11501  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11502  		v2.AuxInt = 0
 11503  		v.AddArg(v2)
 11504  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11505  		v3.AuxInt = 64
 11506  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11507  		v4.AddArg(y)
 11508  		v3.AddArg(v4)
 11509  		v.AddArg(v3)
 11510  		return true
 11511  	}
 11512  }
 11513  func rewriteValueARM64_OpLsh8x16(v *Value, config *Config) bool {
 11514  	b := v.Block
 11515  	_ = b
 11516  	// match: (Lsh8x16 <t> x y)
 11517  	// cond:
 11518  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 11519  	for {
 11520  		t := v.Type
 11521  		x := v.Args[0]
 11522  		y := v.Args[1]
 11523  		v.reset(OpARM64CSELULT)
 11524  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11525  		v0.AddArg(x)
 11526  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11527  		v1.AddArg(y)
 11528  		v0.AddArg(v1)
 11529  		v.AddArg(v0)
 11530  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11531  		v2.AuxInt = 0
 11532  		v.AddArg(v2)
 11533  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11534  		v3.AuxInt = 64
 11535  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 11536  		v4.AddArg(y)
 11537  		v3.AddArg(v4)
 11538  		v.AddArg(v3)
 11539  		return true
 11540  	}
 11541  }
 11542  func rewriteValueARM64_OpLsh8x32(v *Value, config *Config) bool {
 11543  	b := v.Block
 11544  	_ = b
 11545  	// match: (Lsh8x32 <t> x y)
 11546  	// cond:
 11547  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 11548  	for {
 11549  		t := v.Type
 11550  		x := v.Args[0]
 11551  		y := v.Args[1]
 11552  		v.reset(OpARM64CSELULT)
 11553  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11554  		v0.AddArg(x)
 11555  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11556  		v1.AddArg(y)
 11557  		v0.AddArg(v1)
 11558  		v.AddArg(v0)
 11559  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11560  		v2.AuxInt = 0
 11561  		v.AddArg(v2)
 11562  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11563  		v3.AuxInt = 64
 11564  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 11565  		v4.AddArg(y)
 11566  		v3.AddArg(v4)
 11567  		v.AddArg(v3)
 11568  		return true
 11569  	}
 11570  }
 11571  func rewriteValueARM64_OpLsh8x64(v *Value, config *Config) bool {
 11572  	b := v.Block
 11573  	_ = b
 11574  	// match: (Lsh8x64   x (MOVDconst [c]))
 11575  	// cond: uint64(c) < 8
 11576  	// result: (SLLconst x [c])
 11577  	for {
 11578  		x := v.Args[0]
 11579  		v_1 := v.Args[1]
 11580  		if v_1.Op != OpARM64MOVDconst {
 11581  			break
 11582  		}
 11583  		c := v_1.AuxInt
 11584  		if !(uint64(c) < 8) {
 11585  			break
 11586  		}
 11587  		v.reset(OpARM64SLLconst)
 11588  		v.AuxInt = c
 11589  		v.AddArg(x)
 11590  		return true
 11591  	}
 11592  	// match: (Lsh8x64   _ (MOVDconst [c]))
 11593  	// cond: uint64(c) >= 8
 11594  	// result: (MOVDconst [0])
 11595  	for {
 11596  		v_1 := v.Args[1]
 11597  		if v_1.Op != OpARM64MOVDconst {
 11598  			break
 11599  		}
 11600  		c := v_1.AuxInt
 11601  		if !(uint64(c) >= 8) {
 11602  			break
 11603  		}
 11604  		v.reset(OpARM64MOVDconst)
 11605  		v.AuxInt = 0
 11606  		return true
 11607  	}
 11608  	// match: (Lsh8x64 <t> x y)
 11609  	// cond:
 11610  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 11611  	for {
 11612  		t := v.Type
 11613  		x := v.Args[0]
 11614  		y := v.Args[1]
 11615  		v.reset(OpARM64CSELULT)
 11616  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11617  		v0.AddArg(x)
 11618  		v0.AddArg(y)
 11619  		v.AddArg(v0)
 11620  		v1 := b.NewValue0(v.Line, OpConst64, t)
 11621  		v1.AuxInt = 0
 11622  		v.AddArg(v1)
 11623  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11624  		v2.AuxInt = 64
 11625  		v2.AddArg(y)
 11626  		v.AddArg(v2)
 11627  		return true
 11628  	}
 11629  }
 11630  func rewriteValueARM64_OpLsh8x8(v *Value, config *Config) bool {
 11631  	b := v.Block
 11632  	_ = b
 11633  	// match: (Lsh8x8  <t> x y)
 11634  	// cond:
 11635  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 11636  	for {
 11637  		t := v.Type
 11638  		x := v.Args[0]
 11639  		y := v.Args[1]
 11640  		v.reset(OpARM64CSELULT)
 11641  		v0 := b.NewValue0(v.Line, OpARM64SLL, t)
 11642  		v0.AddArg(x)
 11643  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11644  		v1.AddArg(y)
 11645  		v0.AddArg(v1)
 11646  		v.AddArg(v0)
 11647  		v2 := b.NewValue0(v.Line, OpConst64, t)
 11648  		v2.AuxInt = 0
 11649  		v.AddArg(v2)
 11650  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 11651  		v3.AuxInt = 64
 11652  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 11653  		v4.AddArg(y)
 11654  		v3.AddArg(v4)
 11655  		v.AddArg(v3)
 11656  		return true
 11657  	}
 11658  }
 11659  func rewriteValueARM64_OpMod16(v *Value, config *Config) bool {
 11660  	b := v.Block
 11661  	_ = b
 11662  	// match: (Mod16 x y)
 11663  	// cond:
 11664  	// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
 11665  	for {
 11666  		x := v.Args[0]
 11667  		y := v.Args[1]
 11668  		v.reset(OpARM64MODW)
 11669  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 11670  		v0.AddArg(x)
 11671  		v.AddArg(v0)
 11672  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
 11673  		v1.AddArg(y)
 11674  		v.AddArg(v1)
 11675  		return true
 11676  	}
 11677  }
 11678  func rewriteValueARM64_OpMod16u(v *Value, config *Config) bool {
 11679  	b := v.Block
 11680  	_ = b
 11681  	// match: (Mod16u x y)
 11682  	// cond:
 11683  	// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
 11684  	for {
 11685  		x := v.Args[0]
 11686  		y := v.Args[1]
 11687  		v.reset(OpARM64UMODW)
 11688  		v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 11689  		v0.AddArg(x)
 11690  		v.AddArg(v0)
 11691  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 11692  		v1.AddArg(y)
 11693  		v.AddArg(v1)
 11694  		return true
 11695  	}
 11696  }
 11697  func rewriteValueARM64_OpMod32(v *Value, config *Config) bool {
 11698  	b := v.Block
 11699  	_ = b
 11700  	// match: (Mod32 x y)
 11701  	// cond:
 11702  	// result: (MODW x y)
 11703  	for {
 11704  		x := v.Args[0]
 11705  		y := v.Args[1]
 11706  		v.reset(OpARM64MODW)
 11707  		v.AddArg(x)
 11708  		v.AddArg(y)
 11709  		return true
 11710  	}
 11711  }
 11712  func rewriteValueARM64_OpMod32u(v *Value, config *Config) bool {
 11713  	b := v.Block
 11714  	_ = b
 11715  	// match: (Mod32u x y)
 11716  	// cond:
 11717  	// result: (UMODW x y)
 11718  	for {
 11719  		x := v.Args[0]
 11720  		y := v.Args[1]
 11721  		v.reset(OpARM64UMODW)
 11722  		v.AddArg(x)
 11723  		v.AddArg(y)
 11724  		return true
 11725  	}
 11726  }
 11727  func rewriteValueARM64_OpMod64(v *Value, config *Config) bool {
 11728  	b := v.Block
 11729  	_ = b
 11730  	// match: (Mod64 x y)
 11731  	// cond:
 11732  	// result: (MOD x y)
 11733  	for {
 11734  		x := v.Args[0]
 11735  		y := v.Args[1]
 11736  		v.reset(OpARM64MOD)
 11737  		v.AddArg(x)
 11738  		v.AddArg(y)
 11739  		return true
 11740  	}
 11741  }
 11742  func rewriteValueARM64_OpMod64u(v *Value, config *Config) bool {
 11743  	b := v.Block
 11744  	_ = b
 11745  	// match: (Mod64u x y)
 11746  	// cond:
 11747  	// result: (UMOD x y)
 11748  	for {
 11749  		x := v.Args[0]
 11750  		y := v.Args[1]
 11751  		v.reset(OpARM64UMOD)
 11752  		v.AddArg(x)
 11753  		v.AddArg(y)
 11754  		return true
 11755  	}
 11756  }
 11757  func rewriteValueARM64_OpMod8(v *Value, config *Config) bool {
 11758  	b := v.Block
 11759  	_ = b
 11760  	// match: (Mod8 x y)
 11761  	// cond:
 11762  	// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
 11763  	for {
 11764  		x := v.Args[0]
 11765  		y := v.Args[1]
 11766  		v.reset(OpARM64MODW)
 11767  		v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 11768  		v0.AddArg(x)
 11769  		v.AddArg(v0)
 11770  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
 11771  		v1.AddArg(y)
 11772  		v.AddArg(v1)
 11773  		return true
 11774  	}
 11775  }
 11776  func rewriteValueARM64_OpMod8u(v *Value, config *Config) bool {
 11777  	b := v.Block
 11778  	_ = b
 11779  	// match: (Mod8u x y)
 11780  	// cond:
 11781  	// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
 11782  	for {
 11783  		x := v.Args[0]
 11784  		y := v.Args[1]
 11785  		v.reset(OpARM64UMODW)
 11786  		v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 11787  		v0.AddArg(x)
 11788  		v.AddArg(v0)
 11789  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 11790  		v1.AddArg(y)
 11791  		v.AddArg(v1)
 11792  		return true
 11793  	}
 11794  }
 11795  func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
 11796  	b := v.Block
 11797  	_ = b
 11798  	// match: (Move [s] _ _ mem)
 11799  	// cond: SizeAndAlign(s).Size() == 0
 11800  	// result: mem
 11801  	for {
 11802  		s := v.AuxInt
 11803  		mem := v.Args[2]
 11804  		if !(SizeAndAlign(s).Size() == 0) {
 11805  			break
 11806  		}
 11807  		v.reset(OpCopy)
 11808  		v.Type = mem.Type
 11809  		v.AddArg(mem)
 11810  		return true
 11811  	}
 11812  	// match: (Move [s] dst src mem)
 11813  	// cond: SizeAndAlign(s).Size() == 1
 11814  	// result: (MOVBstore dst (MOVBUload src mem) mem)
 11815  	for {
 11816  		s := v.AuxInt
 11817  		dst := v.Args[0]
 11818  		src := v.Args[1]
 11819  		mem := v.Args[2]
 11820  		if !(SizeAndAlign(s).Size() == 1) {
 11821  			break
 11822  		}
 11823  		v.reset(OpARM64MOVBstore)
 11824  		v.AddArg(dst)
 11825  		v0 := b.NewValue0(v.Line, OpARM64MOVBUload, config.fe.TypeUInt8())
 11826  		v0.AddArg(src)
 11827  		v0.AddArg(mem)
 11828  		v.AddArg(v0)
 11829  		v.AddArg(mem)
 11830  		return true
 11831  	}
 11832  	// match: (Move [s] dst src mem)
 11833  	// cond: SizeAndAlign(s).Size() == 2
 11834  	// result: (MOVHstore dst (MOVHUload src mem) mem)
 11835  	for {
 11836  		s := v.AuxInt
 11837  		dst := v.Args[0]
 11838  		src := v.Args[1]
 11839  		mem := v.Args[2]
 11840  		if !(SizeAndAlign(s).Size() == 2) {
 11841  			break
 11842  		}
 11843  		v.reset(OpARM64MOVHstore)
 11844  		v.AddArg(dst)
 11845  		v0 := b.NewValue0(v.Line, OpARM64MOVHUload, config.fe.TypeUInt16())
 11846  		v0.AddArg(src)
 11847  		v0.AddArg(mem)
 11848  		v.AddArg(v0)
 11849  		v.AddArg(mem)
 11850  		return true
 11851  	}
 11852  	// match: (Move [s] dst src mem)
 11853  	// cond: SizeAndAlign(s).Size() == 4
 11854  	// result: (MOVWstore dst (MOVWUload src mem) mem)
 11855  	for {
 11856  		s := v.AuxInt
 11857  		dst := v.Args[0]
 11858  		src := v.Args[1]
 11859  		mem := v.Args[2]
 11860  		if !(SizeAndAlign(s).Size() == 4) {
 11861  			break
 11862  		}
 11863  		v.reset(OpARM64MOVWstore)
 11864  		v.AddArg(dst)
 11865  		v0 := b.NewValue0(v.Line, OpARM64MOVWUload, config.fe.TypeUInt32())
 11866  		v0.AddArg(src)
 11867  		v0.AddArg(mem)
 11868  		v.AddArg(v0)
 11869  		v.AddArg(mem)
 11870  		return true
 11871  	}
 11872  	// match: (Move [s] dst src mem)
 11873  	// cond: SizeAndAlign(s).Size() == 8
 11874  	// result: (MOVDstore dst (MOVDload src mem) mem)
 11875  	for {
 11876  		s := v.AuxInt
 11877  		dst := v.Args[0]
 11878  		src := v.Args[1]
 11879  		mem := v.Args[2]
 11880  		if !(SizeAndAlign(s).Size() == 8) {
 11881  			break
 11882  		}
 11883  		v.reset(OpARM64MOVDstore)
 11884  		v.AddArg(dst)
 11885  		v0 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 11886  		v0.AddArg(src)
 11887  		v0.AddArg(mem)
 11888  		v.AddArg(v0)
 11889  		v.AddArg(mem)
 11890  		return true
 11891  	}
 11892  	// match: (Move [s] dst src mem)
 11893  	// cond: SizeAndAlign(s).Size() == 3
 11894  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
 11895  	for {
 11896  		s := v.AuxInt
 11897  		dst := v.Args[0]
 11898  		src := v.Args[1]
 11899  		mem := v.Args[2]
 11900  		if !(SizeAndAlign(s).Size() == 3) {
 11901  			break
 11902  		}
 11903  		v.reset(OpARM64MOVBstore)
 11904  		v.AuxInt = 2
 11905  		v.AddArg(dst)
 11906  		v0 := b.NewValue0(v.Line, OpARM64MOVBUload, config.fe.TypeUInt8())
 11907  		v0.AuxInt = 2
 11908  		v0.AddArg(src)
 11909  		v0.AddArg(mem)
 11910  		v.AddArg(v0)
 11911  		v1 := b.NewValue0(v.Line, OpARM64MOVHstore, TypeMem)
 11912  		v1.AddArg(dst)
 11913  		v2 := b.NewValue0(v.Line, OpARM64MOVHUload, config.fe.TypeUInt16())
 11914  		v2.AddArg(src)
 11915  		v2.AddArg(mem)
 11916  		v1.AddArg(v2)
 11917  		v1.AddArg(mem)
 11918  		v.AddArg(v1)
 11919  		return true
 11920  	}
 11921  	// match: (Move [s] dst src mem)
 11922  	// cond: SizeAndAlign(s).Size() == 5
 11923  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) 		(MOVWstore dst (MOVWUload src mem) mem))
 11924  	for {
 11925  		s := v.AuxInt
 11926  		dst := v.Args[0]
 11927  		src := v.Args[1]
 11928  		mem := v.Args[2]
 11929  		if !(SizeAndAlign(s).Size() == 5) {
 11930  			break
 11931  		}
 11932  		v.reset(OpARM64MOVBstore)
 11933  		v.AuxInt = 4
 11934  		v.AddArg(dst)
 11935  		v0 := b.NewValue0(v.Line, OpARM64MOVBUload, config.fe.TypeUInt8())
 11936  		v0.AuxInt = 4
 11937  		v0.AddArg(src)
 11938  		v0.AddArg(mem)
 11939  		v.AddArg(v0)
 11940  		v1 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 11941  		v1.AddArg(dst)
 11942  		v2 := b.NewValue0(v.Line, OpARM64MOVWUload, config.fe.TypeUInt32())
 11943  		v2.AddArg(src)
 11944  		v2.AddArg(mem)
 11945  		v1.AddArg(v2)
 11946  		v1.AddArg(mem)
 11947  		v.AddArg(v1)
 11948  		return true
 11949  	}
 11950  	// match: (Move [s] dst src mem)
 11951  	// cond: SizeAndAlign(s).Size() == 6
 11952  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) 		(MOVWstore dst (MOVWUload src mem) mem))
 11953  	for {
 11954  		s := v.AuxInt
 11955  		dst := v.Args[0]
 11956  		src := v.Args[1]
 11957  		mem := v.Args[2]
 11958  		if !(SizeAndAlign(s).Size() == 6) {
 11959  			break
 11960  		}
 11961  		v.reset(OpARM64MOVHstore)
 11962  		v.AuxInt = 4
 11963  		v.AddArg(dst)
 11964  		v0 := b.NewValue0(v.Line, OpARM64MOVHUload, config.fe.TypeUInt16())
 11965  		v0.AuxInt = 4
 11966  		v0.AddArg(src)
 11967  		v0.AddArg(mem)
 11968  		v.AddArg(v0)
 11969  		v1 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 11970  		v1.AddArg(dst)
 11971  		v2 := b.NewValue0(v.Line, OpARM64MOVWUload, config.fe.TypeUInt32())
 11972  		v2.AddArg(src)
 11973  		v2.AddArg(mem)
 11974  		v1.AddArg(v2)
 11975  		v1.AddArg(mem)
 11976  		v.AddArg(v1)
 11977  		return true
 11978  	}
 11979  	// match: (Move [s] dst src mem)
 11980  	// cond: SizeAndAlign(s).Size() == 7
 11981  	// result: (MOVBstore [6] dst (MOVBUload [6] src mem) 		(MOVHstore [4] dst (MOVHUload [4] src mem) 			(MOVWstore dst (MOVWUload src mem) mem)))
 11982  	for {
 11983  		s := v.AuxInt
 11984  		dst := v.Args[0]
 11985  		src := v.Args[1]
 11986  		mem := v.Args[2]
 11987  		if !(SizeAndAlign(s).Size() == 7) {
 11988  			break
 11989  		}
 11990  		v.reset(OpARM64MOVBstore)
 11991  		v.AuxInt = 6
 11992  		v.AddArg(dst)
 11993  		v0 := b.NewValue0(v.Line, OpARM64MOVBUload, config.fe.TypeUInt8())
 11994  		v0.AuxInt = 6
 11995  		v0.AddArg(src)
 11996  		v0.AddArg(mem)
 11997  		v.AddArg(v0)
 11998  		v1 := b.NewValue0(v.Line, OpARM64MOVHstore, TypeMem)
 11999  		v1.AuxInt = 4
 12000  		v1.AddArg(dst)
 12001  		v2 := b.NewValue0(v.Line, OpARM64MOVHUload, config.fe.TypeUInt16())
 12002  		v2.AuxInt = 4
 12003  		v2.AddArg(src)
 12004  		v2.AddArg(mem)
 12005  		v1.AddArg(v2)
 12006  		v3 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 12007  		v3.AddArg(dst)
 12008  		v4 := b.NewValue0(v.Line, OpARM64MOVWUload, config.fe.TypeUInt32())
 12009  		v4.AddArg(src)
 12010  		v4.AddArg(mem)
 12011  		v3.AddArg(v4)
 12012  		v3.AddArg(mem)
 12013  		v1.AddArg(v3)
 12014  		v.AddArg(v1)
 12015  		return true
 12016  	}
 12017  	// match: (Move [s] dst src mem)
 12018  	// cond: SizeAndAlign(s).Size() == 12
 12019  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) 		(MOVDstore dst (MOVDload src mem) mem))
 12020  	for {
 12021  		s := v.AuxInt
 12022  		dst := v.Args[0]
 12023  		src := v.Args[1]
 12024  		mem := v.Args[2]
 12025  		if !(SizeAndAlign(s).Size() == 12) {
 12026  			break
 12027  		}
 12028  		v.reset(OpARM64MOVWstore)
 12029  		v.AuxInt = 8
 12030  		v.AddArg(dst)
 12031  		v0 := b.NewValue0(v.Line, OpARM64MOVWUload, config.fe.TypeUInt32())
 12032  		v0.AuxInt = 8
 12033  		v0.AddArg(src)
 12034  		v0.AddArg(mem)
 12035  		v.AddArg(v0)
 12036  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 12037  		v1.AddArg(dst)
 12038  		v2 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12039  		v2.AddArg(src)
 12040  		v2.AddArg(mem)
 12041  		v1.AddArg(v2)
 12042  		v1.AddArg(mem)
 12043  		v.AddArg(v1)
 12044  		return true
 12045  	}
 12046  	// match: (Move [s] dst src mem)
 12047  	// cond: SizeAndAlign(s).Size() == 16
 12048  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) 		(MOVDstore dst (MOVDload src mem) mem))
 12049  	for {
 12050  		s := v.AuxInt
 12051  		dst := v.Args[0]
 12052  		src := v.Args[1]
 12053  		mem := v.Args[2]
 12054  		if !(SizeAndAlign(s).Size() == 16) {
 12055  			break
 12056  		}
 12057  		v.reset(OpARM64MOVDstore)
 12058  		v.AuxInt = 8
 12059  		v.AddArg(dst)
 12060  		v0 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12061  		v0.AuxInt = 8
 12062  		v0.AddArg(src)
 12063  		v0.AddArg(mem)
 12064  		v.AddArg(v0)
 12065  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 12066  		v1.AddArg(dst)
 12067  		v2 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12068  		v2.AddArg(src)
 12069  		v2.AddArg(mem)
 12070  		v1.AddArg(v2)
 12071  		v1.AddArg(mem)
 12072  		v.AddArg(v1)
 12073  		return true
 12074  	}
 12075  	// match: (Move [s] dst src mem)
 12076  	// cond: SizeAndAlign(s).Size() == 24
 12077  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) 		(MOVDstore [8] dst (MOVDload [8] src mem) 			(MOVDstore dst (MOVDload src mem) mem)))
 12078  	for {
 12079  		s := v.AuxInt
 12080  		dst := v.Args[0]
 12081  		src := v.Args[1]
 12082  		mem := v.Args[2]
 12083  		if !(SizeAndAlign(s).Size() == 24) {
 12084  			break
 12085  		}
 12086  		v.reset(OpARM64MOVDstore)
 12087  		v.AuxInt = 16
 12088  		v.AddArg(dst)
 12089  		v0 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12090  		v0.AuxInt = 16
 12091  		v0.AddArg(src)
 12092  		v0.AddArg(mem)
 12093  		v.AddArg(v0)
 12094  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 12095  		v1.AuxInt = 8
 12096  		v1.AddArg(dst)
 12097  		v2 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12098  		v2.AuxInt = 8
 12099  		v2.AddArg(src)
 12100  		v2.AddArg(mem)
 12101  		v1.AddArg(v2)
 12102  		v3 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 12103  		v3.AddArg(dst)
 12104  		v4 := b.NewValue0(v.Line, OpARM64MOVDload, config.fe.TypeUInt64())
 12105  		v4.AddArg(src)
 12106  		v4.AddArg(mem)
 12107  		v3.AddArg(v4)
 12108  		v3.AddArg(mem)
 12109  		v1.AddArg(v3)
 12110  		v.AddArg(v1)
 12111  		return true
 12112  	}
 12113  	// match: (Move [s] dst src mem)
 12114  	// cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8
 12115  	// result: (Move [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] 		(OffPtr <dst.Type> dst [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) 		(OffPtr <src.Type> src [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) 		(Move [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] dst src mem))
 12116  	for {
 12117  		s := v.AuxInt
 12118  		dst := v.Args[0]
 12119  		src := v.Args[1]
 12120  		mem := v.Args[2]
 12121  		if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) {
 12122  			break
 12123  		}
 12124  		v.reset(OpMove)
 12125  		v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()
 12126  		v0 := b.NewValue0(v.Line, OpOffPtr, dst.Type)
 12127  		v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8
 12128  		v0.AddArg(dst)
 12129  		v.AddArg(v0)
 12130  		v1 := b.NewValue0(v.Line, OpOffPtr, src.Type)
 12131  		v1.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8
 12132  		v1.AddArg(src)
 12133  		v.AddArg(v1)
 12134  		v2 := b.NewValue0(v.Line, OpMove, TypeMem)
 12135  		v2.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()
 12136  		v2.AddArg(dst)
 12137  		v2.AddArg(src)
 12138  		v2.AddArg(mem)
 12139  		v.AddArg(v2)
 12140  		return true
 12141  	}
 12142  	// match: (Move [s] dst src mem)
 12143  	// cond: SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0
 12144  	// result: (LoweredMove 		dst 		src 		(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
 12145  	for {
 12146  		s := v.AuxInt
 12147  		dst := v.Args[0]
 12148  		src := v.Args[1]
 12149  		mem := v.Args[2]
 12150  		if !(SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0) {
 12151  			break
 12152  		}
 12153  		v.reset(OpARM64LoweredMove)
 12154  		v.AddArg(dst)
 12155  		v.AddArg(src)
 12156  		v0 := b.NewValue0(v.Line, OpARM64ADDconst, src.Type)
 12157  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
 12158  		v0.AddArg(src)
 12159  		v.AddArg(v0)
 12160  		v.AddArg(mem)
 12161  		return true
 12162  	}
 12163  	return false
 12164  }
 12165  func rewriteValueARM64_OpMul16(v *Value, config *Config) bool {
 12166  	b := v.Block
 12167  	_ = b
 12168  	// match: (Mul16 x y)
 12169  	// cond:
 12170  	// result: (MULW x y)
 12171  	for {
 12172  		x := v.Args[0]
 12173  		y := v.Args[1]
 12174  		v.reset(OpARM64MULW)
 12175  		v.AddArg(x)
 12176  		v.AddArg(y)
 12177  		return true
 12178  	}
 12179  }
 12180  func rewriteValueARM64_OpMul32(v *Value, config *Config) bool {
 12181  	b := v.Block
 12182  	_ = b
 12183  	// match: (Mul32 x y)
 12184  	// cond:
 12185  	// result: (MULW x y)
 12186  	for {
 12187  		x := v.Args[0]
 12188  		y := v.Args[1]
 12189  		v.reset(OpARM64MULW)
 12190  		v.AddArg(x)
 12191  		v.AddArg(y)
 12192  		return true
 12193  	}
 12194  }
 12195  func rewriteValueARM64_OpMul32F(v *Value, config *Config) bool {
 12196  	b := v.Block
 12197  	_ = b
 12198  	// match: (Mul32F x y)
 12199  	// cond:
 12200  	// result: (FMULS x y)
 12201  	for {
 12202  		x := v.Args[0]
 12203  		y := v.Args[1]
 12204  		v.reset(OpARM64FMULS)
 12205  		v.AddArg(x)
 12206  		v.AddArg(y)
 12207  		return true
 12208  	}
 12209  }
 12210  func rewriteValueARM64_OpMul64(v *Value, config *Config) bool {
 12211  	b := v.Block
 12212  	_ = b
 12213  	// match: (Mul64 x y)
 12214  	// cond:
 12215  	// result: (MUL x y)
 12216  	for {
 12217  		x := v.Args[0]
 12218  		y := v.Args[1]
 12219  		v.reset(OpARM64MUL)
 12220  		v.AddArg(x)
 12221  		v.AddArg(y)
 12222  		return true
 12223  	}
 12224  }
 12225  func rewriteValueARM64_OpMul64F(v *Value, config *Config) bool {
 12226  	b := v.Block
 12227  	_ = b
 12228  	// match: (Mul64F x y)
 12229  	// cond:
 12230  	// result: (FMULD x y)
 12231  	for {
 12232  		x := v.Args[0]
 12233  		y := v.Args[1]
 12234  		v.reset(OpARM64FMULD)
 12235  		v.AddArg(x)
 12236  		v.AddArg(y)
 12237  		return true
 12238  	}
 12239  }
 12240  func rewriteValueARM64_OpMul8(v *Value, config *Config) bool {
 12241  	b := v.Block
 12242  	_ = b
 12243  	// match: (Mul8 x y)
 12244  	// cond:
 12245  	// result: (MULW x y)
 12246  	for {
 12247  		x := v.Args[0]
 12248  		y := v.Args[1]
 12249  		v.reset(OpARM64MULW)
 12250  		v.AddArg(x)
 12251  		v.AddArg(y)
 12252  		return true
 12253  	}
 12254  }
 12255  func rewriteValueARM64_OpNeg16(v *Value, config *Config) bool {
 12256  	b := v.Block
 12257  	_ = b
 12258  	// match: (Neg16 x)
 12259  	// cond:
 12260  	// result: (NEG x)
 12261  	for {
 12262  		x := v.Args[0]
 12263  		v.reset(OpARM64NEG)
 12264  		v.AddArg(x)
 12265  		return true
 12266  	}
 12267  }
 12268  func rewriteValueARM64_OpNeg32(v *Value, config *Config) bool {
 12269  	b := v.Block
 12270  	_ = b
 12271  	// match: (Neg32 x)
 12272  	// cond:
 12273  	// result: (NEG x)
 12274  	for {
 12275  		x := v.Args[0]
 12276  		v.reset(OpARM64NEG)
 12277  		v.AddArg(x)
 12278  		return true
 12279  	}
 12280  }
 12281  func rewriteValueARM64_OpNeg32F(v *Value, config *Config) bool {
 12282  	b := v.Block
 12283  	_ = b
 12284  	// match: (Neg32F x)
 12285  	// cond:
 12286  	// result: (FNEGS x)
 12287  	for {
 12288  		x := v.Args[0]
 12289  		v.reset(OpARM64FNEGS)
 12290  		v.AddArg(x)
 12291  		return true
 12292  	}
 12293  }
 12294  func rewriteValueARM64_OpNeg64(v *Value, config *Config) bool {
 12295  	b := v.Block
 12296  	_ = b
 12297  	// match: (Neg64 x)
 12298  	// cond:
 12299  	// result: (NEG x)
 12300  	for {
 12301  		x := v.Args[0]
 12302  		v.reset(OpARM64NEG)
 12303  		v.AddArg(x)
 12304  		return true
 12305  	}
 12306  }
 12307  func rewriteValueARM64_OpNeg64F(v *Value, config *Config) bool {
 12308  	b := v.Block
 12309  	_ = b
 12310  	// match: (Neg64F x)
 12311  	// cond:
 12312  	// result: (FNEGD x)
 12313  	for {
 12314  		x := v.Args[0]
 12315  		v.reset(OpARM64FNEGD)
 12316  		v.AddArg(x)
 12317  		return true
 12318  	}
 12319  }
 12320  func rewriteValueARM64_OpNeg8(v *Value, config *Config) bool {
 12321  	b := v.Block
 12322  	_ = b
 12323  	// match: (Neg8 x)
 12324  	// cond:
 12325  	// result: (NEG x)
 12326  	for {
 12327  		x := v.Args[0]
 12328  		v.reset(OpARM64NEG)
 12329  		v.AddArg(x)
 12330  		return true
 12331  	}
 12332  }
 12333  func rewriteValueARM64_OpNeq16(v *Value, config *Config) bool {
 12334  	b := v.Block
 12335  	_ = b
 12336  	// match: (Neq16 x y)
 12337  	// cond:
 12338  	// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 12339  	for {
 12340  		x := v.Args[0]
 12341  		y := v.Args[1]
 12342  		v.reset(OpARM64NotEqual)
 12343  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 12344  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 12345  		v1.AddArg(x)
 12346  		v0.AddArg(v1)
 12347  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
 12348  		v2.AddArg(y)
 12349  		v0.AddArg(v2)
 12350  		v.AddArg(v0)
 12351  		return true
 12352  	}
 12353  }
 12354  func rewriteValueARM64_OpNeq32(v *Value, config *Config) bool {
 12355  	b := v.Block
 12356  	_ = b
 12357  	// match: (Neq32 x y)
 12358  	// cond:
 12359  	// result: (NotEqual (CMPW x y))
 12360  	for {
 12361  		x := v.Args[0]
 12362  		y := v.Args[1]
 12363  		v.reset(OpARM64NotEqual)
 12364  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 12365  		v0.AddArg(x)
 12366  		v0.AddArg(y)
 12367  		v.AddArg(v0)
 12368  		return true
 12369  	}
 12370  }
 12371  func rewriteValueARM64_OpNeq32F(v *Value, config *Config) bool {
 12372  	b := v.Block
 12373  	_ = b
 12374  	// match: (Neq32F x y)
 12375  	// cond:
 12376  	// result: (NotEqual (FCMPS x y))
 12377  	for {
 12378  		x := v.Args[0]
 12379  		y := v.Args[1]
 12380  		v.reset(OpARM64NotEqual)
 12381  		v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
 12382  		v0.AddArg(x)
 12383  		v0.AddArg(y)
 12384  		v.AddArg(v0)
 12385  		return true
 12386  	}
 12387  }
 12388  func rewriteValueARM64_OpNeq64(v *Value, config *Config) bool {
 12389  	b := v.Block
 12390  	_ = b
 12391  	// match: (Neq64 x y)
 12392  	// cond:
 12393  	// result: (NotEqual (CMP x y))
 12394  	for {
 12395  		x := v.Args[0]
 12396  		y := v.Args[1]
 12397  		v.reset(OpARM64NotEqual)
 12398  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 12399  		v0.AddArg(x)
 12400  		v0.AddArg(y)
 12401  		v.AddArg(v0)
 12402  		return true
 12403  	}
 12404  }
 12405  func rewriteValueARM64_OpNeq64F(v *Value, config *Config) bool {
 12406  	b := v.Block
 12407  	_ = b
 12408  	// match: (Neq64F x y)
 12409  	// cond:
 12410  	// result: (NotEqual (FCMPD x y))
 12411  	for {
 12412  		x := v.Args[0]
 12413  		y := v.Args[1]
 12414  		v.reset(OpARM64NotEqual)
 12415  		v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
 12416  		v0.AddArg(x)
 12417  		v0.AddArg(y)
 12418  		v.AddArg(v0)
 12419  		return true
 12420  	}
 12421  }
 12422  func rewriteValueARM64_OpNeq8(v *Value, config *Config) bool {
 12423  	b := v.Block
 12424  	_ = b
 12425  	// match: (Neq8 x y)
 12426  	// cond:
 12427  	// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 12428  	for {
 12429  		x := v.Args[0]
 12430  		y := v.Args[1]
 12431  		v.reset(OpARM64NotEqual)
 12432  		v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
 12433  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 12434  		v1.AddArg(x)
 12435  		v0.AddArg(v1)
 12436  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
 12437  		v2.AddArg(y)
 12438  		v0.AddArg(v2)
 12439  		v.AddArg(v0)
 12440  		return true
 12441  	}
 12442  }
 12443  func rewriteValueARM64_OpNeqB(v *Value, config *Config) bool {
 12444  	b := v.Block
 12445  	_ = b
 12446  	// match: (NeqB x y)
 12447  	// cond:
 12448  	// result: (XOR x y)
 12449  	for {
 12450  		x := v.Args[0]
 12451  		y := v.Args[1]
 12452  		v.reset(OpARM64XOR)
 12453  		v.AddArg(x)
 12454  		v.AddArg(y)
 12455  		return true
 12456  	}
 12457  }
 12458  func rewriteValueARM64_OpNeqPtr(v *Value, config *Config) bool {
 12459  	b := v.Block
 12460  	_ = b
 12461  	// match: (NeqPtr x y)
 12462  	// cond:
 12463  	// result: (NotEqual (CMP x y))
 12464  	for {
 12465  		x := v.Args[0]
 12466  		y := v.Args[1]
 12467  		v.reset(OpARM64NotEqual)
 12468  		v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
 12469  		v0.AddArg(x)
 12470  		v0.AddArg(y)
 12471  		v.AddArg(v0)
 12472  		return true
 12473  	}
 12474  }
 12475  func rewriteValueARM64_OpNilCheck(v *Value, config *Config) bool {
 12476  	b := v.Block
 12477  	_ = b
 12478  	// match: (NilCheck ptr mem)
 12479  	// cond:
 12480  	// result: (LoweredNilCheck ptr mem)
 12481  	for {
 12482  		ptr := v.Args[0]
 12483  		mem := v.Args[1]
 12484  		v.reset(OpARM64LoweredNilCheck)
 12485  		v.AddArg(ptr)
 12486  		v.AddArg(mem)
 12487  		return true
 12488  	}
 12489  }
 12490  func rewriteValueARM64_OpNot(v *Value, config *Config) bool {
 12491  	b := v.Block
 12492  	_ = b
 12493  	// match: (Not x)
 12494  	// cond:
 12495  	// result: (XOR (MOVDconst [1]) x)
 12496  	for {
 12497  		x := v.Args[0]
 12498  		v.reset(OpARM64XOR)
 12499  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 12500  		v0.AuxInt = 1
 12501  		v.AddArg(v0)
 12502  		v.AddArg(x)
 12503  		return true
 12504  	}
 12505  }
 12506  func rewriteValueARM64_OpOffPtr(v *Value, config *Config) bool {
 12507  	b := v.Block
 12508  	_ = b
 12509  	// match: (OffPtr [off] ptr:(SP))
 12510  	// cond:
 12511  	// result: (MOVDaddr [off] ptr)
 12512  	for {
 12513  		off := v.AuxInt
 12514  		ptr := v.Args[0]
 12515  		if ptr.Op != OpSP {
 12516  			break
 12517  		}
 12518  		v.reset(OpARM64MOVDaddr)
 12519  		v.AuxInt = off
 12520  		v.AddArg(ptr)
 12521  		return true
 12522  	}
 12523  	// match: (OffPtr [off] ptr)
 12524  	// cond:
 12525  	// result: (ADDconst [off] ptr)
 12526  	for {
 12527  		off := v.AuxInt
 12528  		ptr := v.Args[0]
 12529  		v.reset(OpARM64ADDconst)
 12530  		v.AuxInt = off
 12531  		v.AddArg(ptr)
 12532  		return true
 12533  	}
 12534  }
 12535  func rewriteValueARM64_OpOr16(v *Value, config *Config) bool {
 12536  	b := v.Block
 12537  	_ = b
 12538  	// match: (Or16 x y)
 12539  	// cond:
 12540  	// result: (OR x y)
 12541  	for {
 12542  		x := v.Args[0]
 12543  		y := v.Args[1]
 12544  		v.reset(OpARM64OR)
 12545  		v.AddArg(x)
 12546  		v.AddArg(y)
 12547  		return true
 12548  	}
 12549  }
 12550  func rewriteValueARM64_OpOr32(v *Value, config *Config) bool {
 12551  	b := v.Block
 12552  	_ = b
 12553  	// match: (Or32 x y)
 12554  	// cond:
 12555  	// result: (OR x y)
 12556  	for {
 12557  		x := v.Args[0]
 12558  		y := v.Args[1]
 12559  		v.reset(OpARM64OR)
 12560  		v.AddArg(x)
 12561  		v.AddArg(y)
 12562  		return true
 12563  	}
 12564  }
 12565  func rewriteValueARM64_OpOr64(v *Value, config *Config) bool {
 12566  	b := v.Block
 12567  	_ = b
 12568  	// match: (Or64 x y)
 12569  	// cond:
 12570  	// result: (OR x y)
 12571  	for {
 12572  		x := v.Args[0]
 12573  		y := v.Args[1]
 12574  		v.reset(OpARM64OR)
 12575  		v.AddArg(x)
 12576  		v.AddArg(y)
 12577  		return true
 12578  	}
 12579  }
 12580  func rewriteValueARM64_OpOr8(v *Value, config *Config) bool {
 12581  	b := v.Block
 12582  	_ = b
 12583  	// match: (Or8 x y)
 12584  	// cond:
 12585  	// result: (OR x y)
 12586  	for {
 12587  		x := v.Args[0]
 12588  		y := v.Args[1]
 12589  		v.reset(OpARM64OR)
 12590  		v.AddArg(x)
 12591  		v.AddArg(y)
 12592  		return true
 12593  	}
 12594  }
 12595  func rewriteValueARM64_OpOrB(v *Value, config *Config) bool {
 12596  	b := v.Block
 12597  	_ = b
 12598  	// match: (OrB x y)
 12599  	// cond:
 12600  	// result: (OR x y)
 12601  	for {
 12602  		x := v.Args[0]
 12603  		y := v.Args[1]
 12604  		v.reset(OpARM64OR)
 12605  		v.AddArg(x)
 12606  		v.AddArg(y)
 12607  		return true
 12608  	}
 12609  }
 12610  func rewriteValueARM64_OpRsh16Ux16(v *Value, config *Config) bool {
 12611  	b := v.Block
 12612  	_ = b
 12613  	// match: (Rsh16Ux16 <t> x y)
 12614  	// cond:
 12615  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 12616  	for {
 12617  		t := v.Type
 12618  		x := v.Args[0]
 12619  		y := v.Args[1]
 12620  		v.reset(OpARM64CSELULT)
 12621  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12622  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12623  		v1.AddArg(x)
 12624  		v0.AddArg(v1)
 12625  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12626  		v2.AddArg(y)
 12627  		v0.AddArg(v2)
 12628  		v.AddArg(v0)
 12629  		v3 := b.NewValue0(v.Line, OpConst64, t)
 12630  		v3.AuxInt = 0
 12631  		v.AddArg(v3)
 12632  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12633  		v4.AuxInt = 64
 12634  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12635  		v5.AddArg(y)
 12636  		v4.AddArg(v5)
 12637  		v.AddArg(v4)
 12638  		return true
 12639  	}
 12640  }
 12641  func rewriteValueARM64_OpRsh16Ux32(v *Value, config *Config) bool {
 12642  	b := v.Block
 12643  	_ = b
 12644  	// match: (Rsh16Ux32 <t> x y)
 12645  	// cond:
 12646  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 12647  	for {
 12648  		t := v.Type
 12649  		x := v.Args[0]
 12650  		y := v.Args[1]
 12651  		v.reset(OpARM64CSELULT)
 12652  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12653  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12654  		v1.AddArg(x)
 12655  		v0.AddArg(v1)
 12656  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12657  		v2.AddArg(y)
 12658  		v0.AddArg(v2)
 12659  		v.AddArg(v0)
 12660  		v3 := b.NewValue0(v.Line, OpConst64, t)
 12661  		v3.AuxInt = 0
 12662  		v.AddArg(v3)
 12663  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12664  		v4.AuxInt = 64
 12665  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12666  		v5.AddArg(y)
 12667  		v4.AddArg(v5)
 12668  		v.AddArg(v4)
 12669  		return true
 12670  	}
 12671  }
 12672  func rewriteValueARM64_OpRsh16Ux64(v *Value, config *Config) bool {
 12673  	b := v.Block
 12674  	_ = b
 12675  	// match: (Rsh16Ux64 x (MOVDconst [c]))
 12676  	// cond: uint64(c) < 16
 12677  	// result: (SRLconst (ZeroExt16to64 x) [c])
 12678  	for {
 12679  		x := v.Args[0]
 12680  		v_1 := v.Args[1]
 12681  		if v_1.Op != OpARM64MOVDconst {
 12682  			break
 12683  		}
 12684  		c := v_1.AuxInt
 12685  		if !(uint64(c) < 16) {
 12686  			break
 12687  		}
 12688  		v.reset(OpARM64SRLconst)
 12689  		v.AuxInt = c
 12690  		v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12691  		v0.AddArg(x)
 12692  		v.AddArg(v0)
 12693  		return true
 12694  	}
 12695  	// match: (Rsh16Ux64 _ (MOVDconst [c]))
 12696  	// cond: uint64(c) >= 16
 12697  	// result: (MOVDconst [0])
 12698  	for {
 12699  		v_1 := v.Args[1]
 12700  		if v_1.Op != OpARM64MOVDconst {
 12701  			break
 12702  		}
 12703  		c := v_1.AuxInt
 12704  		if !(uint64(c) >= 16) {
 12705  			break
 12706  		}
 12707  		v.reset(OpARM64MOVDconst)
 12708  		v.AuxInt = 0
 12709  		return true
 12710  	}
 12711  	// match: (Rsh16Ux64 <t> x y)
 12712  	// cond:
 12713  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 12714  	for {
 12715  		t := v.Type
 12716  		x := v.Args[0]
 12717  		y := v.Args[1]
 12718  		v.reset(OpARM64CSELULT)
 12719  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12720  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12721  		v1.AddArg(x)
 12722  		v0.AddArg(v1)
 12723  		v0.AddArg(y)
 12724  		v.AddArg(v0)
 12725  		v2 := b.NewValue0(v.Line, OpConst64, t)
 12726  		v2.AuxInt = 0
 12727  		v.AddArg(v2)
 12728  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12729  		v3.AuxInt = 64
 12730  		v3.AddArg(y)
 12731  		v.AddArg(v3)
 12732  		return true
 12733  	}
 12734  }
 12735  func rewriteValueARM64_OpRsh16Ux8(v *Value, config *Config) bool {
 12736  	b := v.Block
 12737  	_ = b
 12738  	// match: (Rsh16Ux8  <t> x y)
 12739  	// cond:
 12740  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 12741  	for {
 12742  		t := v.Type
 12743  		x := v.Args[0]
 12744  		y := v.Args[1]
 12745  		v.reset(OpARM64CSELULT)
 12746  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12747  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12748  		v1.AddArg(x)
 12749  		v0.AddArg(v1)
 12750  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 12751  		v2.AddArg(y)
 12752  		v0.AddArg(v2)
 12753  		v.AddArg(v0)
 12754  		v3 := b.NewValue0(v.Line, OpConst64, t)
 12755  		v3.AuxInt = 0
 12756  		v.AddArg(v3)
 12757  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12758  		v4.AuxInt = 64
 12759  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 12760  		v5.AddArg(y)
 12761  		v4.AddArg(v5)
 12762  		v.AddArg(v4)
 12763  		return true
 12764  	}
 12765  }
 12766  func rewriteValueARM64_OpRsh16x16(v *Value, config *Config) bool {
 12767  	b := v.Block
 12768  	_ = b
 12769  	// match: (Rsh16x16 x y)
 12770  	// cond:
 12771  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 12772  	for {
 12773  		x := v.Args[0]
 12774  		y := v.Args[1]
 12775  		v.reset(OpARM64SRA)
 12776  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12777  		v0.AddArg(x)
 12778  		v.AddArg(v0)
 12779  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 12780  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12781  		v2.AddArg(y)
 12782  		v1.AddArg(v2)
 12783  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 12784  		v3.AuxInt = 63
 12785  		v1.AddArg(v3)
 12786  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12787  		v4.AuxInt = 64
 12788  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12789  		v5.AddArg(y)
 12790  		v4.AddArg(v5)
 12791  		v1.AddArg(v4)
 12792  		v.AddArg(v1)
 12793  		return true
 12794  	}
 12795  }
 12796  func rewriteValueARM64_OpRsh16x32(v *Value, config *Config) bool {
 12797  	b := v.Block
 12798  	_ = b
 12799  	// match: (Rsh16x32 x y)
 12800  	// cond:
 12801  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 12802  	for {
 12803  		x := v.Args[0]
 12804  		y := v.Args[1]
 12805  		v.reset(OpARM64SRA)
 12806  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12807  		v0.AddArg(x)
 12808  		v.AddArg(v0)
 12809  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 12810  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12811  		v2.AddArg(y)
 12812  		v1.AddArg(v2)
 12813  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 12814  		v3.AuxInt = 63
 12815  		v1.AddArg(v3)
 12816  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12817  		v4.AuxInt = 64
 12818  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12819  		v5.AddArg(y)
 12820  		v4.AddArg(v5)
 12821  		v1.AddArg(v4)
 12822  		v.AddArg(v1)
 12823  		return true
 12824  	}
 12825  }
 12826  func rewriteValueARM64_OpRsh16x64(v *Value, config *Config) bool {
 12827  	b := v.Block
 12828  	_ = b
 12829  	// match: (Rsh16x64  x (MOVDconst [c]))
 12830  	// cond: uint64(c) < 16
 12831  	// result: (SRAconst (SignExt16to64 x) [c])
 12832  	for {
 12833  		x := v.Args[0]
 12834  		v_1 := v.Args[1]
 12835  		if v_1.Op != OpARM64MOVDconst {
 12836  			break
 12837  		}
 12838  		c := v_1.AuxInt
 12839  		if !(uint64(c) < 16) {
 12840  			break
 12841  		}
 12842  		v.reset(OpARM64SRAconst)
 12843  		v.AuxInt = c
 12844  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12845  		v0.AddArg(x)
 12846  		v.AddArg(v0)
 12847  		return true
 12848  	}
 12849  	// match: (Rsh16x64 x (MOVDconst [c]))
 12850  	// cond: uint64(c) >= 16
 12851  	// result: (SRAconst (SignExt16to64 x) [63])
 12852  	for {
 12853  		x := v.Args[0]
 12854  		v_1 := v.Args[1]
 12855  		if v_1.Op != OpARM64MOVDconst {
 12856  			break
 12857  		}
 12858  		c := v_1.AuxInt
 12859  		if !(uint64(c) >= 16) {
 12860  			break
 12861  		}
 12862  		v.reset(OpARM64SRAconst)
 12863  		v.AuxInt = 63
 12864  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12865  		v0.AddArg(x)
 12866  		v.AddArg(v0)
 12867  		return true
 12868  	}
 12869  	// match: (Rsh16x64 x y)
 12870  	// cond:
 12871  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 12872  	for {
 12873  		x := v.Args[0]
 12874  		y := v.Args[1]
 12875  		v.reset(OpARM64SRA)
 12876  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12877  		v0.AddArg(x)
 12878  		v.AddArg(v0)
 12879  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 12880  		v1.AddArg(y)
 12881  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 12882  		v2.AuxInt = 63
 12883  		v1.AddArg(v2)
 12884  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12885  		v3.AuxInt = 64
 12886  		v3.AddArg(y)
 12887  		v1.AddArg(v3)
 12888  		v.AddArg(v1)
 12889  		return true
 12890  	}
 12891  }
 12892  func rewriteValueARM64_OpRsh16x8(v *Value, config *Config) bool {
 12893  	b := v.Block
 12894  	_ = b
 12895  	// match: (Rsh16x8  x y)
 12896  	// cond:
 12897  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 12898  	for {
 12899  		x := v.Args[0]
 12900  		y := v.Args[1]
 12901  		v.reset(OpARM64SRA)
 12902  		v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
 12903  		v0.AddArg(x)
 12904  		v.AddArg(v0)
 12905  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 12906  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 12907  		v2.AddArg(y)
 12908  		v1.AddArg(v2)
 12909  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 12910  		v3.AuxInt = 63
 12911  		v1.AddArg(v3)
 12912  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12913  		v4.AuxInt = 64
 12914  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 12915  		v5.AddArg(y)
 12916  		v4.AddArg(v5)
 12917  		v1.AddArg(v4)
 12918  		v.AddArg(v1)
 12919  		return true
 12920  	}
 12921  }
 12922  func rewriteValueARM64_OpRsh32Ux16(v *Value, config *Config) bool {
 12923  	b := v.Block
 12924  	_ = b
 12925  	// match: (Rsh32Ux16 <t> x y)
 12926  	// cond:
 12927  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 12928  	for {
 12929  		t := v.Type
 12930  		x := v.Args[0]
 12931  		y := v.Args[1]
 12932  		v.reset(OpARM64CSELULT)
 12933  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12934  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12935  		v1.AddArg(x)
 12936  		v0.AddArg(v1)
 12937  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12938  		v2.AddArg(y)
 12939  		v0.AddArg(v2)
 12940  		v.AddArg(v0)
 12941  		v3 := b.NewValue0(v.Line, OpConst64, t)
 12942  		v3.AuxInt = 0
 12943  		v.AddArg(v3)
 12944  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12945  		v4.AuxInt = 64
 12946  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 12947  		v5.AddArg(y)
 12948  		v4.AddArg(v5)
 12949  		v.AddArg(v4)
 12950  		return true
 12951  	}
 12952  }
 12953  func rewriteValueARM64_OpRsh32Ux32(v *Value, config *Config) bool {
 12954  	b := v.Block
 12955  	_ = b
 12956  	// match: (Rsh32Ux32 <t> x y)
 12957  	// cond:
 12958  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 12959  	for {
 12960  		t := v.Type
 12961  		x := v.Args[0]
 12962  		y := v.Args[1]
 12963  		v.reset(OpARM64CSELULT)
 12964  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 12965  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12966  		v1.AddArg(x)
 12967  		v0.AddArg(v1)
 12968  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12969  		v2.AddArg(y)
 12970  		v0.AddArg(v2)
 12971  		v.AddArg(v0)
 12972  		v3 := b.NewValue0(v.Line, OpConst64, t)
 12973  		v3.AuxInt = 0
 12974  		v.AddArg(v3)
 12975  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 12976  		v4.AuxInt = 64
 12977  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 12978  		v5.AddArg(y)
 12979  		v4.AddArg(v5)
 12980  		v.AddArg(v4)
 12981  		return true
 12982  	}
 12983  }
 12984  func rewriteValueARM64_OpRsh32Ux64(v *Value, config *Config) bool {
 12985  	b := v.Block
 12986  	_ = b
 12987  	// match: (Rsh32Ux64 x (MOVDconst [c]))
 12988  	// cond: uint64(c) < 32
 12989  	// result: (SRLconst (ZeroExt32to64 x) [c])
 12990  	for {
 12991  		x := v.Args[0]
 12992  		v_1 := v.Args[1]
 12993  		if v_1.Op != OpARM64MOVDconst {
 12994  			break
 12995  		}
 12996  		c := v_1.AuxInt
 12997  		if !(uint64(c) < 32) {
 12998  			break
 12999  		}
 13000  		v.reset(OpARM64SRLconst)
 13001  		v.AuxInt = c
 13002  		v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13003  		v0.AddArg(x)
 13004  		v.AddArg(v0)
 13005  		return true
 13006  	}
 13007  	// match: (Rsh32Ux64 _ (MOVDconst [c]))
 13008  	// cond: uint64(c) >= 32
 13009  	// result: (MOVDconst [0])
 13010  	for {
 13011  		v_1 := v.Args[1]
 13012  		if v_1.Op != OpARM64MOVDconst {
 13013  			break
 13014  		}
 13015  		c := v_1.AuxInt
 13016  		if !(uint64(c) >= 32) {
 13017  			break
 13018  		}
 13019  		v.reset(OpARM64MOVDconst)
 13020  		v.AuxInt = 0
 13021  		return true
 13022  	}
 13023  	// match: (Rsh32Ux64 <t> x y)
 13024  	// cond:
 13025  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 13026  	for {
 13027  		t := v.Type
 13028  		x := v.Args[0]
 13029  		y := v.Args[1]
 13030  		v.reset(OpARM64CSELULT)
 13031  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13032  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13033  		v1.AddArg(x)
 13034  		v0.AddArg(v1)
 13035  		v0.AddArg(y)
 13036  		v.AddArg(v0)
 13037  		v2 := b.NewValue0(v.Line, OpConst64, t)
 13038  		v2.AuxInt = 0
 13039  		v.AddArg(v2)
 13040  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13041  		v3.AuxInt = 64
 13042  		v3.AddArg(y)
 13043  		v.AddArg(v3)
 13044  		return true
 13045  	}
 13046  }
 13047  func rewriteValueARM64_OpRsh32Ux8(v *Value, config *Config) bool {
 13048  	b := v.Block
 13049  	_ = b
 13050  	// match: (Rsh32Ux8  <t> x y)
 13051  	// cond:
 13052  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 13053  	for {
 13054  		t := v.Type
 13055  		x := v.Args[0]
 13056  		y := v.Args[1]
 13057  		v.reset(OpARM64CSELULT)
 13058  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13059  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13060  		v1.AddArg(x)
 13061  		v0.AddArg(v1)
 13062  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13063  		v2.AddArg(y)
 13064  		v0.AddArg(v2)
 13065  		v.AddArg(v0)
 13066  		v3 := b.NewValue0(v.Line, OpConst64, t)
 13067  		v3.AuxInt = 0
 13068  		v.AddArg(v3)
 13069  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13070  		v4.AuxInt = 64
 13071  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13072  		v5.AddArg(y)
 13073  		v4.AddArg(v5)
 13074  		v.AddArg(v4)
 13075  		return true
 13076  	}
 13077  }
 13078  func rewriteValueARM64_OpRsh32x16(v *Value, config *Config) bool {
 13079  	b := v.Block
 13080  	_ = b
 13081  	// match: (Rsh32x16 x y)
 13082  	// cond:
 13083  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 13084  	for {
 13085  		x := v.Args[0]
 13086  		y := v.Args[1]
 13087  		v.reset(OpARM64SRA)
 13088  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13089  		v0.AddArg(x)
 13090  		v.AddArg(v0)
 13091  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13092  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13093  		v2.AddArg(y)
 13094  		v1.AddArg(v2)
 13095  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13096  		v3.AuxInt = 63
 13097  		v1.AddArg(v3)
 13098  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13099  		v4.AuxInt = 64
 13100  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13101  		v5.AddArg(y)
 13102  		v4.AddArg(v5)
 13103  		v1.AddArg(v4)
 13104  		v.AddArg(v1)
 13105  		return true
 13106  	}
 13107  }
 13108  func rewriteValueARM64_OpRsh32x32(v *Value, config *Config) bool {
 13109  	b := v.Block
 13110  	_ = b
 13111  	// match: (Rsh32x32 x y)
 13112  	// cond:
 13113  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 13114  	for {
 13115  		x := v.Args[0]
 13116  		y := v.Args[1]
 13117  		v.reset(OpARM64SRA)
 13118  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13119  		v0.AddArg(x)
 13120  		v.AddArg(v0)
 13121  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13122  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13123  		v2.AddArg(y)
 13124  		v1.AddArg(v2)
 13125  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13126  		v3.AuxInt = 63
 13127  		v1.AddArg(v3)
 13128  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13129  		v4.AuxInt = 64
 13130  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13131  		v5.AddArg(y)
 13132  		v4.AddArg(v5)
 13133  		v1.AddArg(v4)
 13134  		v.AddArg(v1)
 13135  		return true
 13136  	}
 13137  }
 13138  func rewriteValueARM64_OpRsh32x64(v *Value, config *Config) bool {
 13139  	b := v.Block
 13140  	_ = b
 13141  	// match: (Rsh32x64  x (MOVDconst [c]))
 13142  	// cond: uint64(c) < 32
 13143  	// result: (SRAconst (SignExt32to64 x) [c])
 13144  	for {
 13145  		x := v.Args[0]
 13146  		v_1 := v.Args[1]
 13147  		if v_1.Op != OpARM64MOVDconst {
 13148  			break
 13149  		}
 13150  		c := v_1.AuxInt
 13151  		if !(uint64(c) < 32) {
 13152  			break
 13153  		}
 13154  		v.reset(OpARM64SRAconst)
 13155  		v.AuxInt = c
 13156  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13157  		v0.AddArg(x)
 13158  		v.AddArg(v0)
 13159  		return true
 13160  	}
 13161  	// match: (Rsh32x64 x (MOVDconst [c]))
 13162  	// cond: uint64(c) >= 32
 13163  	// result: (SRAconst (SignExt32to64 x) [63])
 13164  	for {
 13165  		x := v.Args[0]
 13166  		v_1 := v.Args[1]
 13167  		if v_1.Op != OpARM64MOVDconst {
 13168  			break
 13169  		}
 13170  		c := v_1.AuxInt
 13171  		if !(uint64(c) >= 32) {
 13172  			break
 13173  		}
 13174  		v.reset(OpARM64SRAconst)
 13175  		v.AuxInt = 63
 13176  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13177  		v0.AddArg(x)
 13178  		v.AddArg(v0)
 13179  		return true
 13180  	}
 13181  	// match: (Rsh32x64 x y)
 13182  	// cond:
 13183  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 13184  	for {
 13185  		x := v.Args[0]
 13186  		y := v.Args[1]
 13187  		v.reset(OpARM64SRA)
 13188  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13189  		v0.AddArg(x)
 13190  		v.AddArg(v0)
 13191  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13192  		v1.AddArg(y)
 13193  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 13194  		v2.AuxInt = 63
 13195  		v1.AddArg(v2)
 13196  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13197  		v3.AuxInt = 64
 13198  		v3.AddArg(y)
 13199  		v1.AddArg(v3)
 13200  		v.AddArg(v1)
 13201  		return true
 13202  	}
 13203  }
 13204  func rewriteValueARM64_OpRsh32x8(v *Value, config *Config) bool {
 13205  	b := v.Block
 13206  	_ = b
 13207  	// match: (Rsh32x8  x y)
 13208  	// cond:
 13209  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 13210  	for {
 13211  		x := v.Args[0]
 13212  		y := v.Args[1]
 13213  		v.reset(OpARM64SRA)
 13214  		v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
 13215  		v0.AddArg(x)
 13216  		v.AddArg(v0)
 13217  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13218  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13219  		v2.AddArg(y)
 13220  		v1.AddArg(v2)
 13221  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13222  		v3.AuxInt = 63
 13223  		v1.AddArg(v3)
 13224  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13225  		v4.AuxInt = 64
 13226  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13227  		v5.AddArg(y)
 13228  		v4.AddArg(v5)
 13229  		v1.AddArg(v4)
 13230  		v.AddArg(v1)
 13231  		return true
 13232  	}
 13233  }
 13234  func rewriteValueARM64_OpRsh64Ux16(v *Value, config *Config) bool {
 13235  	b := v.Block
 13236  	_ = b
 13237  	// match: (Rsh64Ux16 <t> x y)
 13238  	// cond:
 13239  	// result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 13240  	for {
 13241  		t := v.Type
 13242  		x := v.Args[0]
 13243  		y := v.Args[1]
 13244  		v.reset(OpARM64CSELULT)
 13245  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13246  		v0.AddArg(x)
 13247  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13248  		v1.AddArg(y)
 13249  		v0.AddArg(v1)
 13250  		v.AddArg(v0)
 13251  		v2 := b.NewValue0(v.Line, OpConst64, t)
 13252  		v2.AuxInt = 0
 13253  		v.AddArg(v2)
 13254  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13255  		v3.AuxInt = 64
 13256  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13257  		v4.AddArg(y)
 13258  		v3.AddArg(v4)
 13259  		v.AddArg(v3)
 13260  		return true
 13261  	}
 13262  }
 13263  func rewriteValueARM64_OpRsh64Ux32(v *Value, config *Config) bool {
 13264  	b := v.Block
 13265  	_ = b
 13266  	// match: (Rsh64Ux32 <t> x y)
 13267  	// cond:
 13268  	// result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 13269  	for {
 13270  		t := v.Type
 13271  		x := v.Args[0]
 13272  		y := v.Args[1]
 13273  		v.reset(OpARM64CSELULT)
 13274  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13275  		v0.AddArg(x)
 13276  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13277  		v1.AddArg(y)
 13278  		v0.AddArg(v1)
 13279  		v.AddArg(v0)
 13280  		v2 := b.NewValue0(v.Line, OpConst64, t)
 13281  		v2.AuxInt = 0
 13282  		v.AddArg(v2)
 13283  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13284  		v3.AuxInt = 64
 13285  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13286  		v4.AddArg(y)
 13287  		v3.AddArg(v4)
 13288  		v.AddArg(v3)
 13289  		return true
 13290  	}
 13291  }
 13292  func rewriteValueARM64_OpRsh64Ux64(v *Value, config *Config) bool {
 13293  	b := v.Block
 13294  	_ = b
 13295  	// match: (Rsh64Ux64 x (MOVDconst [c]))
 13296  	// cond: uint64(c) < 64
 13297  	// result: (SRLconst x [c])
 13298  	for {
 13299  		x := v.Args[0]
 13300  		v_1 := v.Args[1]
 13301  		if v_1.Op != OpARM64MOVDconst {
 13302  			break
 13303  		}
 13304  		c := v_1.AuxInt
 13305  		if !(uint64(c) < 64) {
 13306  			break
 13307  		}
 13308  		v.reset(OpARM64SRLconst)
 13309  		v.AuxInt = c
 13310  		v.AddArg(x)
 13311  		return true
 13312  	}
 13313  	// match: (Rsh64Ux64 _ (MOVDconst [c]))
 13314  	// cond: uint64(c) >= 64
 13315  	// result: (MOVDconst [0])
 13316  	for {
 13317  		v_1 := v.Args[1]
 13318  		if v_1.Op != OpARM64MOVDconst {
 13319  			break
 13320  		}
 13321  		c := v_1.AuxInt
 13322  		if !(uint64(c) >= 64) {
 13323  			break
 13324  		}
 13325  		v.reset(OpARM64MOVDconst)
 13326  		v.AuxInt = 0
 13327  		return true
 13328  	}
 13329  	// match: (Rsh64Ux64 <t> x y)
 13330  	// cond:
 13331  	// result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 13332  	for {
 13333  		t := v.Type
 13334  		x := v.Args[0]
 13335  		y := v.Args[1]
 13336  		v.reset(OpARM64CSELULT)
 13337  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13338  		v0.AddArg(x)
 13339  		v0.AddArg(y)
 13340  		v.AddArg(v0)
 13341  		v1 := b.NewValue0(v.Line, OpConst64, t)
 13342  		v1.AuxInt = 0
 13343  		v.AddArg(v1)
 13344  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13345  		v2.AuxInt = 64
 13346  		v2.AddArg(y)
 13347  		v.AddArg(v2)
 13348  		return true
 13349  	}
 13350  }
 13351  func rewriteValueARM64_OpRsh64Ux8(v *Value, config *Config) bool {
 13352  	b := v.Block
 13353  	_ = b
 13354  	// match: (Rsh64Ux8  <t> x y)
 13355  	// cond:
 13356  	// result: (CSELULT (SRL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 13357  	for {
 13358  		t := v.Type
 13359  		x := v.Args[0]
 13360  		y := v.Args[1]
 13361  		v.reset(OpARM64CSELULT)
 13362  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13363  		v0.AddArg(x)
 13364  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13365  		v1.AddArg(y)
 13366  		v0.AddArg(v1)
 13367  		v.AddArg(v0)
 13368  		v2 := b.NewValue0(v.Line, OpConst64, t)
 13369  		v2.AuxInt = 0
 13370  		v.AddArg(v2)
 13371  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13372  		v3.AuxInt = 64
 13373  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13374  		v4.AddArg(y)
 13375  		v3.AddArg(v4)
 13376  		v.AddArg(v3)
 13377  		return true
 13378  	}
 13379  }
 13380  func rewriteValueARM64_OpRsh64x16(v *Value, config *Config) bool {
 13381  	b := v.Block
 13382  	_ = b
 13383  	// match: (Rsh64x16 x y)
 13384  	// cond:
 13385  	// result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 13386  	for {
 13387  		x := v.Args[0]
 13388  		y := v.Args[1]
 13389  		v.reset(OpARM64SRA)
 13390  		v.AddArg(x)
 13391  		v0 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13392  		v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13393  		v1.AddArg(y)
 13394  		v0.AddArg(v1)
 13395  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 13396  		v2.AuxInt = 63
 13397  		v0.AddArg(v2)
 13398  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13399  		v3.AuxInt = 64
 13400  		v4 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13401  		v4.AddArg(y)
 13402  		v3.AddArg(v4)
 13403  		v0.AddArg(v3)
 13404  		v.AddArg(v0)
 13405  		return true
 13406  	}
 13407  }
 13408  func rewriteValueARM64_OpRsh64x32(v *Value, config *Config) bool {
 13409  	b := v.Block
 13410  	_ = b
 13411  	// match: (Rsh64x32 x y)
 13412  	// cond:
 13413  	// result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 13414  	for {
 13415  		x := v.Args[0]
 13416  		y := v.Args[1]
 13417  		v.reset(OpARM64SRA)
 13418  		v.AddArg(x)
 13419  		v0 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13420  		v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13421  		v1.AddArg(y)
 13422  		v0.AddArg(v1)
 13423  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 13424  		v2.AuxInt = 63
 13425  		v0.AddArg(v2)
 13426  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13427  		v3.AuxInt = 64
 13428  		v4 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13429  		v4.AddArg(y)
 13430  		v3.AddArg(v4)
 13431  		v0.AddArg(v3)
 13432  		v.AddArg(v0)
 13433  		return true
 13434  	}
 13435  }
 13436  func rewriteValueARM64_OpRsh64x64(v *Value, config *Config) bool {
 13437  	b := v.Block
 13438  	_ = b
 13439  	// match: (Rsh64x64  x (MOVDconst [c]))
 13440  	// cond: uint64(c) < 64
 13441  	// result: (SRAconst x [c])
 13442  	for {
 13443  		x := v.Args[0]
 13444  		v_1 := v.Args[1]
 13445  		if v_1.Op != OpARM64MOVDconst {
 13446  			break
 13447  		}
 13448  		c := v_1.AuxInt
 13449  		if !(uint64(c) < 64) {
 13450  			break
 13451  		}
 13452  		v.reset(OpARM64SRAconst)
 13453  		v.AuxInt = c
 13454  		v.AddArg(x)
 13455  		return true
 13456  	}
 13457  	// match: (Rsh64x64 x (MOVDconst [c]))
 13458  	// cond: uint64(c) >= 64
 13459  	// result: (SRAconst x [63])
 13460  	for {
 13461  		x := v.Args[0]
 13462  		v_1 := v.Args[1]
 13463  		if v_1.Op != OpARM64MOVDconst {
 13464  			break
 13465  		}
 13466  		c := v_1.AuxInt
 13467  		if !(uint64(c) >= 64) {
 13468  			break
 13469  		}
 13470  		v.reset(OpARM64SRAconst)
 13471  		v.AuxInt = 63
 13472  		v.AddArg(x)
 13473  		return true
 13474  	}
 13475  	// match: (Rsh64x64 x y)
 13476  	// cond:
 13477  	// result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 13478  	for {
 13479  		x := v.Args[0]
 13480  		y := v.Args[1]
 13481  		v.reset(OpARM64SRA)
 13482  		v.AddArg(x)
 13483  		v0 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13484  		v0.AddArg(y)
 13485  		v1 := b.NewValue0(v.Line, OpConst64, y.Type)
 13486  		v1.AuxInt = 63
 13487  		v0.AddArg(v1)
 13488  		v2 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13489  		v2.AuxInt = 64
 13490  		v2.AddArg(y)
 13491  		v0.AddArg(v2)
 13492  		v.AddArg(v0)
 13493  		return true
 13494  	}
 13495  }
 13496  func rewriteValueARM64_OpRsh64x8(v *Value, config *Config) bool {
 13497  	b := v.Block
 13498  	_ = b
 13499  	// match: (Rsh64x8  x y)
 13500  	// cond:
 13501  	// result: (SRA x (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 13502  	for {
 13503  		x := v.Args[0]
 13504  		y := v.Args[1]
 13505  		v.reset(OpARM64SRA)
 13506  		v.AddArg(x)
 13507  		v0 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13508  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13509  		v1.AddArg(y)
 13510  		v0.AddArg(v1)
 13511  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 13512  		v2.AuxInt = 63
 13513  		v0.AddArg(v2)
 13514  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13515  		v3.AuxInt = 64
 13516  		v4 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13517  		v4.AddArg(y)
 13518  		v3.AddArg(v4)
 13519  		v0.AddArg(v3)
 13520  		v.AddArg(v0)
 13521  		return true
 13522  	}
 13523  }
 13524  func rewriteValueARM64_OpRsh8Ux16(v *Value, config *Config) bool {
 13525  	b := v.Block
 13526  	_ = b
 13527  	// match: (Rsh8Ux16 <t> x y)
 13528  	// cond:
 13529  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 13530  	for {
 13531  		t := v.Type
 13532  		x := v.Args[0]
 13533  		y := v.Args[1]
 13534  		v.reset(OpARM64CSELULT)
 13535  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13536  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13537  		v1.AddArg(x)
 13538  		v0.AddArg(v1)
 13539  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13540  		v2.AddArg(y)
 13541  		v0.AddArg(v2)
 13542  		v.AddArg(v0)
 13543  		v3 := b.NewValue0(v.Line, OpConst64, t)
 13544  		v3.AuxInt = 0
 13545  		v.AddArg(v3)
 13546  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13547  		v4.AuxInt = 64
 13548  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13549  		v5.AddArg(y)
 13550  		v4.AddArg(v5)
 13551  		v.AddArg(v4)
 13552  		return true
 13553  	}
 13554  }
 13555  func rewriteValueARM64_OpRsh8Ux32(v *Value, config *Config) bool {
 13556  	b := v.Block
 13557  	_ = b
 13558  	// match: (Rsh8Ux32 <t> x y)
 13559  	// cond:
 13560  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 13561  	for {
 13562  		t := v.Type
 13563  		x := v.Args[0]
 13564  		y := v.Args[1]
 13565  		v.reset(OpARM64CSELULT)
 13566  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13567  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13568  		v1.AddArg(x)
 13569  		v0.AddArg(v1)
 13570  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13571  		v2.AddArg(y)
 13572  		v0.AddArg(v2)
 13573  		v.AddArg(v0)
 13574  		v3 := b.NewValue0(v.Line, OpConst64, t)
 13575  		v3.AuxInt = 0
 13576  		v.AddArg(v3)
 13577  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13578  		v4.AuxInt = 64
 13579  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13580  		v5.AddArg(y)
 13581  		v4.AddArg(v5)
 13582  		v.AddArg(v4)
 13583  		return true
 13584  	}
 13585  }
 13586  func rewriteValueARM64_OpRsh8Ux64(v *Value, config *Config) bool {
 13587  	b := v.Block
 13588  	_ = b
 13589  	// match: (Rsh8Ux64  x (MOVDconst [c]))
 13590  	// cond: uint64(c) < 8
 13591  	// result: (SRLconst (ZeroExt8to64  x) [c])
 13592  	for {
 13593  		x := v.Args[0]
 13594  		v_1 := v.Args[1]
 13595  		if v_1.Op != OpARM64MOVDconst {
 13596  			break
 13597  		}
 13598  		c := v_1.AuxInt
 13599  		if !(uint64(c) < 8) {
 13600  			break
 13601  		}
 13602  		v.reset(OpARM64SRLconst)
 13603  		v.AuxInt = c
 13604  		v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13605  		v0.AddArg(x)
 13606  		v.AddArg(v0)
 13607  		return true
 13608  	}
 13609  	// match: (Rsh8Ux64  _ (MOVDconst [c]))
 13610  	// cond: uint64(c) >= 8
 13611  	// result: (MOVDconst [0])
 13612  	for {
 13613  		v_1 := v.Args[1]
 13614  		if v_1.Op != OpARM64MOVDconst {
 13615  			break
 13616  		}
 13617  		c := v_1.AuxInt
 13618  		if !(uint64(c) >= 8) {
 13619  			break
 13620  		}
 13621  		v.reset(OpARM64MOVDconst)
 13622  		v.AuxInt = 0
 13623  		return true
 13624  	}
 13625  	// match: (Rsh8Ux64 <t> x y)
 13626  	// cond:
 13627  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 13628  	for {
 13629  		t := v.Type
 13630  		x := v.Args[0]
 13631  		y := v.Args[1]
 13632  		v.reset(OpARM64CSELULT)
 13633  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13634  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13635  		v1.AddArg(x)
 13636  		v0.AddArg(v1)
 13637  		v0.AddArg(y)
 13638  		v.AddArg(v0)
 13639  		v2 := b.NewValue0(v.Line, OpConst64, t)
 13640  		v2.AuxInt = 0
 13641  		v.AddArg(v2)
 13642  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13643  		v3.AuxInt = 64
 13644  		v3.AddArg(y)
 13645  		v.AddArg(v3)
 13646  		return true
 13647  	}
 13648  }
 13649  func rewriteValueARM64_OpRsh8Ux8(v *Value, config *Config) bool {
 13650  	b := v.Block
 13651  	_ = b
 13652  	// match: (Rsh8Ux8  <t> x y)
 13653  	// cond:
 13654  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 13655  	for {
 13656  		t := v.Type
 13657  		x := v.Args[0]
 13658  		y := v.Args[1]
 13659  		v.reset(OpARM64CSELULT)
 13660  		v0 := b.NewValue0(v.Line, OpARM64SRL, t)
 13661  		v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13662  		v1.AddArg(x)
 13663  		v0.AddArg(v1)
 13664  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13665  		v2.AddArg(y)
 13666  		v0.AddArg(v2)
 13667  		v.AddArg(v0)
 13668  		v3 := b.NewValue0(v.Line, OpConst64, t)
 13669  		v3.AuxInt = 0
 13670  		v.AddArg(v3)
 13671  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13672  		v4.AuxInt = 64
 13673  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13674  		v5.AddArg(y)
 13675  		v4.AddArg(v5)
 13676  		v.AddArg(v4)
 13677  		return true
 13678  	}
 13679  }
 13680  func rewriteValueARM64_OpRsh8x16(v *Value, config *Config) bool {
 13681  	b := v.Block
 13682  	_ = b
 13683  	// match: (Rsh8x16 x y)
 13684  	// cond:
 13685  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 13686  	for {
 13687  		x := v.Args[0]
 13688  		y := v.Args[1]
 13689  		v.reset(OpARM64SRA)
 13690  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13691  		v0.AddArg(x)
 13692  		v.AddArg(v0)
 13693  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13694  		v2 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13695  		v2.AddArg(y)
 13696  		v1.AddArg(v2)
 13697  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13698  		v3.AuxInt = 63
 13699  		v1.AddArg(v3)
 13700  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13701  		v4.AuxInt = 64
 13702  		v5 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
 13703  		v5.AddArg(y)
 13704  		v4.AddArg(v5)
 13705  		v1.AddArg(v4)
 13706  		v.AddArg(v1)
 13707  		return true
 13708  	}
 13709  }
 13710  func rewriteValueARM64_OpRsh8x32(v *Value, config *Config) bool {
 13711  	b := v.Block
 13712  	_ = b
 13713  	// match: (Rsh8x32 x y)
 13714  	// cond:
 13715  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 13716  	for {
 13717  		x := v.Args[0]
 13718  		y := v.Args[1]
 13719  		v.reset(OpARM64SRA)
 13720  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13721  		v0.AddArg(x)
 13722  		v.AddArg(v0)
 13723  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13724  		v2 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13725  		v2.AddArg(y)
 13726  		v1.AddArg(v2)
 13727  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13728  		v3.AuxInt = 63
 13729  		v1.AddArg(v3)
 13730  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13731  		v4.AuxInt = 64
 13732  		v5 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
 13733  		v5.AddArg(y)
 13734  		v4.AddArg(v5)
 13735  		v1.AddArg(v4)
 13736  		v.AddArg(v1)
 13737  		return true
 13738  	}
 13739  }
 13740  func rewriteValueARM64_OpRsh8x64(v *Value, config *Config) bool {
 13741  	b := v.Block
 13742  	_ = b
 13743  	// match: (Rsh8x64   x (MOVDconst [c]))
 13744  	// cond: uint64(c) < 8
 13745  	// result: (SRAconst (SignExt8to64  x) [c])
 13746  	for {
 13747  		x := v.Args[0]
 13748  		v_1 := v.Args[1]
 13749  		if v_1.Op != OpARM64MOVDconst {
 13750  			break
 13751  		}
 13752  		c := v_1.AuxInt
 13753  		if !(uint64(c) < 8) {
 13754  			break
 13755  		}
 13756  		v.reset(OpARM64SRAconst)
 13757  		v.AuxInt = c
 13758  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13759  		v0.AddArg(x)
 13760  		v.AddArg(v0)
 13761  		return true
 13762  	}
 13763  	// match: (Rsh8x64  x (MOVDconst [c]))
 13764  	// cond: uint64(c) >= 8
 13765  	// result: (SRAconst (SignExt8to64  x) [63])
 13766  	for {
 13767  		x := v.Args[0]
 13768  		v_1 := v.Args[1]
 13769  		if v_1.Op != OpARM64MOVDconst {
 13770  			break
 13771  		}
 13772  		c := v_1.AuxInt
 13773  		if !(uint64(c) >= 8) {
 13774  			break
 13775  		}
 13776  		v.reset(OpARM64SRAconst)
 13777  		v.AuxInt = 63
 13778  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13779  		v0.AddArg(x)
 13780  		v.AddArg(v0)
 13781  		return true
 13782  	}
 13783  	// match: (Rsh8x64 x y)
 13784  	// cond:
 13785  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 13786  	for {
 13787  		x := v.Args[0]
 13788  		y := v.Args[1]
 13789  		v.reset(OpARM64SRA)
 13790  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13791  		v0.AddArg(x)
 13792  		v.AddArg(v0)
 13793  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13794  		v1.AddArg(y)
 13795  		v2 := b.NewValue0(v.Line, OpConst64, y.Type)
 13796  		v2.AuxInt = 63
 13797  		v1.AddArg(v2)
 13798  		v3 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13799  		v3.AuxInt = 64
 13800  		v3.AddArg(y)
 13801  		v1.AddArg(v3)
 13802  		v.AddArg(v1)
 13803  		return true
 13804  	}
 13805  }
 13806  func rewriteValueARM64_OpRsh8x8(v *Value, config *Config) bool {
 13807  	b := v.Block
 13808  	_ = b
 13809  	// match: (Rsh8x8  x y)
 13810  	// cond:
 13811  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 13812  	for {
 13813  		x := v.Args[0]
 13814  		y := v.Args[1]
 13815  		v.reset(OpARM64SRA)
 13816  		v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
 13817  		v0.AddArg(x)
 13818  		v.AddArg(v0)
 13819  		v1 := b.NewValue0(v.Line, OpARM64CSELULT, y.Type)
 13820  		v2 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13821  		v2.AddArg(y)
 13822  		v1.AddArg(v2)
 13823  		v3 := b.NewValue0(v.Line, OpConst64, y.Type)
 13824  		v3.AuxInt = 63
 13825  		v1.AddArg(v3)
 13826  		v4 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 13827  		v4.AuxInt = 64
 13828  		v5 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
 13829  		v5.AddArg(y)
 13830  		v4.AddArg(v5)
 13831  		v1.AddArg(v4)
 13832  		v.AddArg(v1)
 13833  		return true
 13834  	}
 13835  }
 13836  func rewriteValueARM64_OpSignExt16to32(v *Value, config *Config) bool {
 13837  	b := v.Block
 13838  	_ = b
 13839  	// match: (SignExt16to32 x)
 13840  	// cond:
 13841  	// result: (MOVHreg x)
 13842  	for {
 13843  		x := v.Args[0]
 13844  		v.reset(OpARM64MOVHreg)
 13845  		v.AddArg(x)
 13846  		return true
 13847  	}
 13848  }
 13849  func rewriteValueARM64_OpSignExt16to64(v *Value, config *Config) bool {
 13850  	b := v.Block
 13851  	_ = b
 13852  	// match: (SignExt16to64 x)
 13853  	// cond:
 13854  	// result: (MOVHreg x)
 13855  	for {
 13856  		x := v.Args[0]
 13857  		v.reset(OpARM64MOVHreg)
 13858  		v.AddArg(x)
 13859  		return true
 13860  	}
 13861  }
 13862  func rewriteValueARM64_OpSignExt32to64(v *Value, config *Config) bool {
 13863  	b := v.Block
 13864  	_ = b
 13865  	// match: (SignExt32to64 x)
 13866  	// cond:
 13867  	// result: (MOVWreg x)
 13868  	for {
 13869  		x := v.Args[0]
 13870  		v.reset(OpARM64MOVWreg)
 13871  		v.AddArg(x)
 13872  		return true
 13873  	}
 13874  }
 13875  func rewriteValueARM64_OpSignExt8to16(v *Value, config *Config) bool {
 13876  	b := v.Block
 13877  	_ = b
 13878  	// match: (SignExt8to16 x)
 13879  	// cond:
 13880  	// result: (MOVBreg x)
 13881  	for {
 13882  		x := v.Args[0]
 13883  		v.reset(OpARM64MOVBreg)
 13884  		v.AddArg(x)
 13885  		return true
 13886  	}
 13887  }
 13888  func rewriteValueARM64_OpSignExt8to32(v *Value, config *Config) bool {
 13889  	b := v.Block
 13890  	_ = b
 13891  	// match: (SignExt8to32 x)
 13892  	// cond:
 13893  	// result: (MOVBreg x)
 13894  	for {
 13895  		x := v.Args[0]
 13896  		v.reset(OpARM64MOVBreg)
 13897  		v.AddArg(x)
 13898  		return true
 13899  	}
 13900  }
 13901  func rewriteValueARM64_OpSignExt8to64(v *Value, config *Config) bool {
 13902  	b := v.Block
 13903  	_ = b
 13904  	// match: (SignExt8to64 x)
 13905  	// cond:
 13906  	// result: (MOVBreg x)
 13907  	for {
 13908  		x := v.Args[0]
 13909  		v.reset(OpARM64MOVBreg)
 13910  		v.AddArg(x)
 13911  		return true
 13912  	}
 13913  }
 13914  func rewriteValueARM64_OpSqrt(v *Value, config *Config) bool {
 13915  	b := v.Block
 13916  	_ = b
 13917  	// match: (Sqrt x)
 13918  	// cond:
 13919  	// result: (FSQRTD x)
 13920  	for {
 13921  		x := v.Args[0]
 13922  		v.reset(OpARM64FSQRTD)
 13923  		v.AddArg(x)
 13924  		return true
 13925  	}
 13926  }
 13927  func rewriteValueARM64_OpStaticCall(v *Value, config *Config) bool {
 13928  	b := v.Block
 13929  	_ = b
 13930  	// match: (StaticCall [argwid] {target} mem)
 13931  	// cond:
 13932  	// result: (CALLstatic [argwid] {target} mem)
 13933  	for {
 13934  		argwid := v.AuxInt
 13935  		target := v.Aux
 13936  		mem := v.Args[0]
 13937  		v.reset(OpARM64CALLstatic)
 13938  		v.AuxInt = argwid
 13939  		v.Aux = target
 13940  		v.AddArg(mem)
 13941  		return true
 13942  	}
 13943  }
 13944  func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
 13945  	b := v.Block
 13946  	_ = b
 13947  	// match: (Store [1] ptr val mem)
 13948  	// cond:
 13949  	// result: (MOVBstore ptr val mem)
 13950  	for {
 13951  		if v.AuxInt != 1 {
 13952  			break
 13953  		}
 13954  		ptr := v.Args[0]
 13955  		val := v.Args[1]
 13956  		mem := v.Args[2]
 13957  		v.reset(OpARM64MOVBstore)
 13958  		v.AddArg(ptr)
 13959  		v.AddArg(val)
 13960  		v.AddArg(mem)
 13961  		return true
 13962  	}
 13963  	// match: (Store [2] ptr val mem)
 13964  	// cond:
 13965  	// result: (MOVHstore ptr val mem)
 13966  	for {
 13967  		if v.AuxInt != 2 {
 13968  			break
 13969  		}
 13970  		ptr := v.Args[0]
 13971  		val := v.Args[1]
 13972  		mem := v.Args[2]
 13973  		v.reset(OpARM64MOVHstore)
 13974  		v.AddArg(ptr)
 13975  		v.AddArg(val)
 13976  		v.AddArg(mem)
 13977  		return true
 13978  	}
 13979  	// match: (Store [4] ptr val mem)
 13980  	// cond: !is32BitFloat(val.Type)
 13981  	// result: (MOVWstore ptr val mem)
 13982  	for {
 13983  		if v.AuxInt != 4 {
 13984  			break
 13985  		}
 13986  		ptr := v.Args[0]
 13987  		val := v.Args[1]
 13988  		mem := v.Args[2]
 13989  		if !(!is32BitFloat(val.Type)) {
 13990  			break
 13991  		}
 13992  		v.reset(OpARM64MOVWstore)
 13993  		v.AddArg(ptr)
 13994  		v.AddArg(val)
 13995  		v.AddArg(mem)
 13996  		return true
 13997  	}
 13998  	// match: (Store [8] ptr val mem)
 13999  	// cond: !is64BitFloat(val.Type)
 14000  	// result: (MOVDstore ptr val mem)
 14001  	for {
 14002  		if v.AuxInt != 8 {
 14003  			break
 14004  		}
 14005  		ptr := v.Args[0]
 14006  		val := v.Args[1]
 14007  		mem := v.Args[2]
 14008  		if !(!is64BitFloat(val.Type)) {
 14009  			break
 14010  		}
 14011  		v.reset(OpARM64MOVDstore)
 14012  		v.AddArg(ptr)
 14013  		v.AddArg(val)
 14014  		v.AddArg(mem)
 14015  		return true
 14016  	}
 14017  	// match: (Store [4] ptr val mem)
 14018  	// cond: is32BitFloat(val.Type)
 14019  	// result: (FMOVSstore ptr val mem)
 14020  	for {
 14021  		if v.AuxInt != 4 {
 14022  			break
 14023  		}
 14024  		ptr := v.Args[0]
 14025  		val := v.Args[1]
 14026  		mem := v.Args[2]
 14027  		if !(is32BitFloat(val.Type)) {
 14028  			break
 14029  		}
 14030  		v.reset(OpARM64FMOVSstore)
 14031  		v.AddArg(ptr)
 14032  		v.AddArg(val)
 14033  		v.AddArg(mem)
 14034  		return true
 14035  	}
 14036  	// match: (Store [8] ptr val mem)
 14037  	// cond: is64BitFloat(val.Type)
 14038  	// result: (FMOVDstore ptr val mem)
 14039  	for {
 14040  		if v.AuxInt != 8 {
 14041  			break
 14042  		}
 14043  		ptr := v.Args[0]
 14044  		val := v.Args[1]
 14045  		mem := v.Args[2]
 14046  		if !(is64BitFloat(val.Type)) {
 14047  			break
 14048  		}
 14049  		v.reset(OpARM64FMOVDstore)
 14050  		v.AddArg(ptr)
 14051  		v.AddArg(val)
 14052  		v.AddArg(mem)
 14053  		return true
 14054  	}
 14055  	return false
 14056  }
 14057  func rewriteValueARM64_OpSub16(v *Value, config *Config) bool {
 14058  	b := v.Block
 14059  	_ = b
 14060  	// match: (Sub16 x y)
 14061  	// cond:
 14062  	// result: (SUB x y)
 14063  	for {
 14064  		x := v.Args[0]
 14065  		y := v.Args[1]
 14066  		v.reset(OpARM64SUB)
 14067  		v.AddArg(x)
 14068  		v.AddArg(y)
 14069  		return true
 14070  	}
 14071  }
 14072  func rewriteValueARM64_OpSub32(v *Value, config *Config) bool {
 14073  	b := v.Block
 14074  	_ = b
 14075  	// match: (Sub32 x y)
 14076  	// cond:
 14077  	// result: (SUB x y)
 14078  	for {
 14079  		x := v.Args[0]
 14080  		y := v.Args[1]
 14081  		v.reset(OpARM64SUB)
 14082  		v.AddArg(x)
 14083  		v.AddArg(y)
 14084  		return true
 14085  	}
 14086  }
 14087  func rewriteValueARM64_OpSub32F(v *Value, config *Config) bool {
 14088  	b := v.Block
 14089  	_ = b
 14090  	// match: (Sub32F x y)
 14091  	// cond:
 14092  	// result: (FSUBS x y)
 14093  	for {
 14094  		x := v.Args[0]
 14095  		y := v.Args[1]
 14096  		v.reset(OpARM64FSUBS)
 14097  		v.AddArg(x)
 14098  		v.AddArg(y)
 14099  		return true
 14100  	}
 14101  }
 14102  func rewriteValueARM64_OpSub64(v *Value, config *Config) bool {
 14103  	b := v.Block
 14104  	_ = b
 14105  	// match: (Sub64 x y)
 14106  	// cond:
 14107  	// result: (SUB x y)
 14108  	for {
 14109  		x := v.Args[0]
 14110  		y := v.Args[1]
 14111  		v.reset(OpARM64SUB)
 14112  		v.AddArg(x)
 14113  		v.AddArg(y)
 14114  		return true
 14115  	}
 14116  }
 14117  func rewriteValueARM64_OpSub64F(v *Value, config *Config) bool {
 14118  	b := v.Block
 14119  	_ = b
 14120  	// match: (Sub64F x y)
 14121  	// cond:
 14122  	// result: (FSUBD x y)
 14123  	for {
 14124  		x := v.Args[0]
 14125  		y := v.Args[1]
 14126  		v.reset(OpARM64FSUBD)
 14127  		v.AddArg(x)
 14128  		v.AddArg(y)
 14129  		return true
 14130  	}
 14131  }
 14132  func rewriteValueARM64_OpSub8(v *Value, config *Config) bool {
 14133  	b := v.Block
 14134  	_ = b
 14135  	// match: (Sub8 x y)
 14136  	// cond:
 14137  	// result: (SUB x y)
 14138  	for {
 14139  		x := v.Args[0]
 14140  		y := v.Args[1]
 14141  		v.reset(OpARM64SUB)
 14142  		v.AddArg(x)
 14143  		v.AddArg(y)
 14144  		return true
 14145  	}
 14146  }
 14147  func rewriteValueARM64_OpSubPtr(v *Value, config *Config) bool {
 14148  	b := v.Block
 14149  	_ = b
 14150  	// match: (SubPtr x y)
 14151  	// cond:
 14152  	// result: (SUB x y)
 14153  	for {
 14154  		x := v.Args[0]
 14155  		y := v.Args[1]
 14156  		v.reset(OpARM64SUB)
 14157  		v.AddArg(x)
 14158  		v.AddArg(y)
 14159  		return true
 14160  	}
 14161  }
 14162  func rewriteValueARM64_OpTrunc16to8(v *Value, config *Config) bool {
 14163  	b := v.Block
 14164  	_ = b
 14165  	// match: (Trunc16to8 x)
 14166  	// cond:
 14167  	// result: x
 14168  	for {
 14169  		x := v.Args[0]
 14170  		v.reset(OpCopy)
 14171  		v.Type = x.Type
 14172  		v.AddArg(x)
 14173  		return true
 14174  	}
 14175  }
 14176  func rewriteValueARM64_OpTrunc32to16(v *Value, config *Config) bool {
 14177  	b := v.Block
 14178  	_ = b
 14179  	// match: (Trunc32to16 x)
 14180  	// cond:
 14181  	// result: x
 14182  	for {
 14183  		x := v.Args[0]
 14184  		v.reset(OpCopy)
 14185  		v.Type = x.Type
 14186  		v.AddArg(x)
 14187  		return true
 14188  	}
 14189  }
 14190  func rewriteValueARM64_OpTrunc32to8(v *Value, config *Config) bool {
 14191  	b := v.Block
 14192  	_ = b
 14193  	// match: (Trunc32to8 x)
 14194  	// cond:
 14195  	// result: x
 14196  	for {
 14197  		x := v.Args[0]
 14198  		v.reset(OpCopy)
 14199  		v.Type = x.Type
 14200  		v.AddArg(x)
 14201  		return true
 14202  	}
 14203  }
 14204  func rewriteValueARM64_OpTrunc64to16(v *Value, config *Config) bool {
 14205  	b := v.Block
 14206  	_ = b
 14207  	// match: (Trunc64to16 x)
 14208  	// cond:
 14209  	// result: x
 14210  	for {
 14211  		x := v.Args[0]
 14212  		v.reset(OpCopy)
 14213  		v.Type = x.Type
 14214  		v.AddArg(x)
 14215  		return true
 14216  	}
 14217  }
 14218  func rewriteValueARM64_OpTrunc64to32(v *Value, config *Config) bool {
 14219  	b := v.Block
 14220  	_ = b
 14221  	// match: (Trunc64to32 x)
 14222  	// cond:
 14223  	// result: x
 14224  	for {
 14225  		x := v.Args[0]
 14226  		v.reset(OpCopy)
 14227  		v.Type = x.Type
 14228  		v.AddArg(x)
 14229  		return true
 14230  	}
 14231  }
 14232  func rewriteValueARM64_OpTrunc64to8(v *Value, config *Config) bool {
 14233  	b := v.Block
 14234  	_ = b
 14235  	// match: (Trunc64to8 x)
 14236  	// cond:
 14237  	// result: x
 14238  	for {
 14239  		x := v.Args[0]
 14240  		v.reset(OpCopy)
 14241  		v.Type = x.Type
 14242  		v.AddArg(x)
 14243  		return true
 14244  	}
 14245  }
 14246  func rewriteValueARM64_OpXor16(v *Value, config *Config) bool {
 14247  	b := v.Block
 14248  	_ = b
 14249  	// match: (Xor16 x y)
 14250  	// cond:
 14251  	// result: (XOR x y)
 14252  	for {
 14253  		x := v.Args[0]
 14254  		y := v.Args[1]
 14255  		v.reset(OpARM64XOR)
 14256  		v.AddArg(x)
 14257  		v.AddArg(y)
 14258  		return true
 14259  	}
 14260  }
 14261  func rewriteValueARM64_OpXor32(v *Value, config *Config) bool {
 14262  	b := v.Block
 14263  	_ = b
 14264  	// match: (Xor32 x y)
 14265  	// cond:
 14266  	// result: (XOR x y)
 14267  	for {
 14268  		x := v.Args[0]
 14269  		y := v.Args[1]
 14270  		v.reset(OpARM64XOR)
 14271  		v.AddArg(x)
 14272  		v.AddArg(y)
 14273  		return true
 14274  	}
 14275  }
 14276  func rewriteValueARM64_OpXor64(v *Value, config *Config) bool {
 14277  	b := v.Block
 14278  	_ = b
 14279  	// match: (Xor64 x y)
 14280  	// cond:
 14281  	// result: (XOR x y)
 14282  	for {
 14283  		x := v.Args[0]
 14284  		y := v.Args[1]
 14285  		v.reset(OpARM64XOR)
 14286  		v.AddArg(x)
 14287  		v.AddArg(y)
 14288  		return true
 14289  	}
 14290  }
 14291  func rewriteValueARM64_OpXor8(v *Value, config *Config) bool {
 14292  	b := v.Block
 14293  	_ = b
 14294  	// match: (Xor8 x y)
 14295  	// cond:
 14296  	// result: (XOR x y)
 14297  	for {
 14298  		x := v.Args[0]
 14299  		y := v.Args[1]
 14300  		v.reset(OpARM64XOR)
 14301  		v.AddArg(x)
 14302  		v.AddArg(y)
 14303  		return true
 14304  	}
 14305  }
 14306  func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
 14307  	b := v.Block
 14308  	_ = b
 14309  	// match: (Zero [s] _ mem)
 14310  	// cond: SizeAndAlign(s).Size() == 0
 14311  	// result: mem
 14312  	for {
 14313  		s := v.AuxInt
 14314  		mem := v.Args[1]
 14315  		if !(SizeAndAlign(s).Size() == 0) {
 14316  			break
 14317  		}
 14318  		v.reset(OpCopy)
 14319  		v.Type = mem.Type
 14320  		v.AddArg(mem)
 14321  		return true
 14322  	}
 14323  	// match: (Zero [s] ptr mem)
 14324  	// cond: SizeAndAlign(s).Size() == 1
 14325  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
 14326  	for {
 14327  		s := v.AuxInt
 14328  		ptr := v.Args[0]
 14329  		mem := v.Args[1]
 14330  		if !(SizeAndAlign(s).Size() == 1) {
 14331  			break
 14332  		}
 14333  		v.reset(OpARM64MOVBstore)
 14334  		v.AddArg(ptr)
 14335  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14336  		v0.AuxInt = 0
 14337  		v.AddArg(v0)
 14338  		v.AddArg(mem)
 14339  		return true
 14340  	}
 14341  	// match: (Zero [s] ptr mem)
 14342  	// cond: SizeAndAlign(s).Size() == 2
 14343  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
 14344  	for {
 14345  		s := v.AuxInt
 14346  		ptr := v.Args[0]
 14347  		mem := v.Args[1]
 14348  		if !(SizeAndAlign(s).Size() == 2) {
 14349  			break
 14350  		}
 14351  		v.reset(OpARM64MOVHstore)
 14352  		v.AddArg(ptr)
 14353  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14354  		v0.AuxInt = 0
 14355  		v.AddArg(v0)
 14356  		v.AddArg(mem)
 14357  		return true
 14358  	}
 14359  	// match: (Zero [s] ptr mem)
 14360  	// cond: SizeAndAlign(s).Size() == 4
 14361  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
 14362  	for {
 14363  		s := v.AuxInt
 14364  		ptr := v.Args[0]
 14365  		mem := v.Args[1]
 14366  		if !(SizeAndAlign(s).Size() == 4) {
 14367  			break
 14368  		}
 14369  		v.reset(OpARM64MOVWstore)
 14370  		v.AddArg(ptr)
 14371  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14372  		v0.AuxInt = 0
 14373  		v.AddArg(v0)
 14374  		v.AddArg(mem)
 14375  		return true
 14376  	}
 14377  	// match: (Zero [s] ptr mem)
 14378  	// cond: SizeAndAlign(s).Size() == 8
 14379  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
 14380  	for {
 14381  		s := v.AuxInt
 14382  		ptr := v.Args[0]
 14383  		mem := v.Args[1]
 14384  		if !(SizeAndAlign(s).Size() == 8) {
 14385  			break
 14386  		}
 14387  		v.reset(OpARM64MOVDstore)
 14388  		v.AddArg(ptr)
 14389  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14390  		v0.AuxInt = 0
 14391  		v.AddArg(v0)
 14392  		v.AddArg(mem)
 14393  		return true
 14394  	}
 14395  	// match: (Zero [s] ptr mem)
 14396  	// cond: SizeAndAlign(s).Size() == 3
 14397  	// result: (MOVBstore [2] ptr (MOVDconst [0]) 		(MOVHstore ptr (MOVDconst [0]) mem))
 14398  	for {
 14399  		s := v.AuxInt
 14400  		ptr := v.Args[0]
 14401  		mem := v.Args[1]
 14402  		if !(SizeAndAlign(s).Size() == 3) {
 14403  			break
 14404  		}
 14405  		v.reset(OpARM64MOVBstore)
 14406  		v.AuxInt = 2
 14407  		v.AddArg(ptr)
 14408  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14409  		v0.AuxInt = 0
 14410  		v.AddArg(v0)
 14411  		v1 := b.NewValue0(v.Line, OpARM64MOVHstore, TypeMem)
 14412  		v1.AddArg(ptr)
 14413  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14414  		v2.AuxInt = 0
 14415  		v1.AddArg(v2)
 14416  		v1.AddArg(mem)
 14417  		v.AddArg(v1)
 14418  		return true
 14419  	}
 14420  	// match: (Zero [s] ptr mem)
 14421  	// cond: SizeAndAlign(s).Size() == 5
 14422  	// result: (MOVBstore [4] ptr (MOVDconst [0]) 		(MOVWstore ptr (MOVDconst [0]) mem))
 14423  	for {
 14424  		s := v.AuxInt
 14425  		ptr := v.Args[0]
 14426  		mem := v.Args[1]
 14427  		if !(SizeAndAlign(s).Size() == 5) {
 14428  			break
 14429  		}
 14430  		v.reset(OpARM64MOVBstore)
 14431  		v.AuxInt = 4
 14432  		v.AddArg(ptr)
 14433  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14434  		v0.AuxInt = 0
 14435  		v.AddArg(v0)
 14436  		v1 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 14437  		v1.AddArg(ptr)
 14438  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14439  		v2.AuxInt = 0
 14440  		v1.AddArg(v2)
 14441  		v1.AddArg(mem)
 14442  		v.AddArg(v1)
 14443  		return true
 14444  	}
 14445  	// match: (Zero [s] ptr mem)
 14446  	// cond: SizeAndAlign(s).Size() == 6
 14447  	// result: (MOVHstore [4] ptr (MOVDconst [0]) 		(MOVWstore ptr (MOVDconst [0]) mem))
 14448  	for {
 14449  		s := v.AuxInt
 14450  		ptr := v.Args[0]
 14451  		mem := v.Args[1]
 14452  		if !(SizeAndAlign(s).Size() == 6) {
 14453  			break
 14454  		}
 14455  		v.reset(OpARM64MOVHstore)
 14456  		v.AuxInt = 4
 14457  		v.AddArg(ptr)
 14458  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14459  		v0.AuxInt = 0
 14460  		v.AddArg(v0)
 14461  		v1 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 14462  		v1.AddArg(ptr)
 14463  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14464  		v2.AuxInt = 0
 14465  		v1.AddArg(v2)
 14466  		v1.AddArg(mem)
 14467  		v.AddArg(v1)
 14468  		return true
 14469  	}
 14470  	// match: (Zero [s] ptr mem)
 14471  	// cond: SizeAndAlign(s).Size() == 7
 14472  	// result: (MOVBstore [6] ptr (MOVDconst [0]) 		(MOVHstore [4] ptr (MOVDconst [0]) 			(MOVWstore ptr (MOVDconst [0]) mem)))
 14473  	for {
 14474  		s := v.AuxInt
 14475  		ptr := v.Args[0]
 14476  		mem := v.Args[1]
 14477  		if !(SizeAndAlign(s).Size() == 7) {
 14478  			break
 14479  		}
 14480  		v.reset(OpARM64MOVBstore)
 14481  		v.AuxInt = 6
 14482  		v.AddArg(ptr)
 14483  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14484  		v0.AuxInt = 0
 14485  		v.AddArg(v0)
 14486  		v1 := b.NewValue0(v.Line, OpARM64MOVHstore, TypeMem)
 14487  		v1.AuxInt = 4
 14488  		v1.AddArg(ptr)
 14489  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14490  		v2.AuxInt = 0
 14491  		v1.AddArg(v2)
 14492  		v3 := b.NewValue0(v.Line, OpARM64MOVWstore, TypeMem)
 14493  		v3.AddArg(ptr)
 14494  		v4 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14495  		v4.AuxInt = 0
 14496  		v3.AddArg(v4)
 14497  		v3.AddArg(mem)
 14498  		v1.AddArg(v3)
 14499  		v.AddArg(v1)
 14500  		return true
 14501  	}
 14502  	// match: (Zero [s] ptr mem)
 14503  	// cond: SizeAndAlign(s).Size() == 12
 14504  	// result: (MOVWstore [8] ptr (MOVDconst [0]) 		(MOVDstore ptr (MOVDconst [0]) mem))
 14505  	for {
 14506  		s := v.AuxInt
 14507  		ptr := v.Args[0]
 14508  		mem := v.Args[1]
 14509  		if !(SizeAndAlign(s).Size() == 12) {
 14510  			break
 14511  		}
 14512  		v.reset(OpARM64MOVWstore)
 14513  		v.AuxInt = 8
 14514  		v.AddArg(ptr)
 14515  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14516  		v0.AuxInt = 0
 14517  		v.AddArg(v0)
 14518  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 14519  		v1.AddArg(ptr)
 14520  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14521  		v2.AuxInt = 0
 14522  		v1.AddArg(v2)
 14523  		v1.AddArg(mem)
 14524  		v.AddArg(v1)
 14525  		return true
 14526  	}
 14527  	// match: (Zero [s] ptr mem)
 14528  	// cond: SizeAndAlign(s).Size() == 16
 14529  	// result: (MOVDstore [8] ptr (MOVDconst [0]) 		(MOVDstore ptr (MOVDconst [0]) mem))
 14530  	for {
 14531  		s := v.AuxInt
 14532  		ptr := v.Args[0]
 14533  		mem := v.Args[1]
 14534  		if !(SizeAndAlign(s).Size() == 16) {
 14535  			break
 14536  		}
 14537  		v.reset(OpARM64MOVDstore)
 14538  		v.AuxInt = 8
 14539  		v.AddArg(ptr)
 14540  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14541  		v0.AuxInt = 0
 14542  		v.AddArg(v0)
 14543  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 14544  		v1.AddArg(ptr)
 14545  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14546  		v2.AuxInt = 0
 14547  		v1.AddArg(v2)
 14548  		v1.AddArg(mem)
 14549  		v.AddArg(v1)
 14550  		return true
 14551  	}
 14552  	// match: (Zero [s] ptr mem)
 14553  	// cond: SizeAndAlign(s).Size() == 24
 14554  	// result: (MOVDstore [16] ptr (MOVDconst [0]) 		(MOVDstore [8] ptr (MOVDconst [0]) 			(MOVDstore ptr (MOVDconst [0]) mem)))
 14555  	for {
 14556  		s := v.AuxInt
 14557  		ptr := v.Args[0]
 14558  		mem := v.Args[1]
 14559  		if !(SizeAndAlign(s).Size() == 24) {
 14560  			break
 14561  		}
 14562  		v.reset(OpARM64MOVDstore)
 14563  		v.AuxInt = 16
 14564  		v.AddArg(ptr)
 14565  		v0 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14566  		v0.AuxInt = 0
 14567  		v.AddArg(v0)
 14568  		v1 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 14569  		v1.AuxInt = 8
 14570  		v1.AddArg(ptr)
 14571  		v2 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14572  		v2.AuxInt = 0
 14573  		v1.AddArg(v2)
 14574  		v3 := b.NewValue0(v.Line, OpARM64MOVDstore, TypeMem)
 14575  		v3.AddArg(ptr)
 14576  		v4 := b.NewValue0(v.Line, OpARM64MOVDconst, config.fe.TypeUInt64())
 14577  		v4.AuxInt = 0
 14578  		v3.AddArg(v4)
 14579  		v3.AddArg(mem)
 14580  		v1.AddArg(v3)
 14581  		v.AddArg(v1)
 14582  		return true
 14583  	}
 14584  	// match: (Zero [s] ptr mem)
 14585  	// cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8
 14586  	// result: (Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] 		(OffPtr <ptr.Type> ptr [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) 		(Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] ptr mem))
 14587  	for {
 14588  		s := v.AuxInt
 14589  		ptr := v.Args[0]
 14590  		mem := v.Args[1]
 14591  		if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) {
 14592  			break
 14593  		}
 14594  		v.reset(OpZero)
 14595  		v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()
 14596  		v0 := b.NewValue0(v.Line, OpOffPtr, ptr.Type)
 14597  		v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8
 14598  		v0.AddArg(ptr)
 14599  		v.AddArg(v0)
 14600  		v1 := b.NewValue0(v.Line, OpZero, TypeMem)
 14601  		v1.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()
 14602  		v1.AddArg(ptr)
 14603  		v1.AddArg(mem)
 14604  		v.AddArg(v1)
 14605  		return true
 14606  	}
 14607  	// match: (Zero [s] ptr mem)
 14608  	// cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 	&& !config.noDuffDevice
 14609  	// result: (DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem)
 14610  	for {
 14611  		s := v.AuxInt
 14612  		ptr := v.Args[0]
 14613  		mem := v.Args[1]
 14614  		if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice) {
 14615  			break
 14616  		}
 14617  		v.reset(OpARM64DUFFZERO)
 14618  		v.AuxInt = 4 * (128 - int64(SizeAndAlign(s).Size()/8))
 14619  		v.AddArg(ptr)
 14620  		v.AddArg(mem)
 14621  		return true
 14622  	}
 14623  	// match: (Zero [s] ptr mem)
 14624  	// cond: SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice)
 14625  	// result: (LoweredZero 		ptr 		(ADDconst <ptr.Type> [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)] ptr) 		mem)
 14626  	for {
 14627  		s := v.AuxInt
 14628  		ptr := v.Args[0]
 14629  		mem := v.Args[1]
 14630  		if !(SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice)) {
 14631  			break
 14632  		}
 14633  		v.reset(OpARM64LoweredZero)
 14634  		v.AddArg(ptr)
 14635  		v0 := b.NewValue0(v.Line, OpARM64ADDconst, ptr.Type)
 14636  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
 14637  		v0.AddArg(ptr)
 14638  		v.AddArg(v0)
 14639  		v.AddArg(mem)
 14640  		return true
 14641  	}
 14642  	return false
 14643  }
 14644  func rewriteValueARM64_OpZeroExt16to32(v *Value, config *Config) bool {
 14645  	b := v.Block
 14646  	_ = b
 14647  	// match: (ZeroExt16to32 x)
 14648  	// cond:
 14649  	// result: (MOVHUreg x)
 14650  	for {
 14651  		x := v.Args[0]
 14652  		v.reset(OpARM64MOVHUreg)
 14653  		v.AddArg(x)
 14654  		return true
 14655  	}
 14656  }
 14657  func rewriteValueARM64_OpZeroExt16to64(v *Value, config *Config) bool {
 14658  	b := v.Block
 14659  	_ = b
 14660  	// match: (ZeroExt16to64 x)
 14661  	// cond:
 14662  	// result: (MOVHUreg x)
 14663  	for {
 14664  		x := v.Args[0]
 14665  		v.reset(OpARM64MOVHUreg)
 14666  		v.AddArg(x)
 14667  		return true
 14668  	}
 14669  }
 14670  func rewriteValueARM64_OpZeroExt32to64(v *Value, config *Config) bool {
 14671  	b := v.Block
 14672  	_ = b
 14673  	// match: (ZeroExt32to64 x)
 14674  	// cond:
 14675  	// result: (MOVWUreg x)
 14676  	for {
 14677  		x := v.Args[0]
 14678  		v.reset(OpARM64MOVWUreg)
 14679  		v.AddArg(x)
 14680  		return true
 14681  	}
 14682  }
 14683  func rewriteValueARM64_OpZeroExt8to16(v *Value, config *Config) bool {
 14684  	b := v.Block
 14685  	_ = b
 14686  	// match: (ZeroExt8to16 x)
 14687  	// cond:
 14688  	// result: (MOVBUreg x)
 14689  	for {
 14690  		x := v.Args[0]
 14691  		v.reset(OpARM64MOVBUreg)
 14692  		v.AddArg(x)
 14693  		return true
 14694  	}
 14695  }
 14696  func rewriteValueARM64_OpZeroExt8to32(v *Value, config *Config) bool {
 14697  	b := v.Block
 14698  	_ = b
 14699  	// match: (ZeroExt8to32 x)
 14700  	// cond:
 14701  	// result: (MOVBUreg x)
 14702  	for {
 14703  		x := v.Args[0]
 14704  		v.reset(OpARM64MOVBUreg)
 14705  		v.AddArg(x)
 14706  		return true
 14707  	}
 14708  }
 14709  func rewriteValueARM64_OpZeroExt8to64(v *Value, config *Config) bool {
 14710  	b := v.Block
 14711  	_ = b
 14712  	// match: (ZeroExt8to64 x)
 14713  	// cond:
 14714  	// result: (MOVBUreg x)
 14715  	for {
 14716  		x := v.Args[0]
 14717  		v.reset(OpARM64MOVBUreg)
 14718  		v.AddArg(x)
 14719  		return true
 14720  	}
 14721  }
 14722  func rewriteBlockARM64(b *Block) bool {
 14723  	switch b.Kind {
 14724  	case BlockARM64EQ:
 14725  		// match: (EQ (FlagEQ) yes no)
 14726  		// cond:
 14727  		// result: (First nil yes no)
 14728  		for {
 14729  			v := b.Control
 14730  			if v.Op != OpARM64FlagEQ {
 14731  				break
 14732  			}
 14733  			yes := b.Succs[0]
 14734  			no := b.Succs[1]
 14735  			b.Kind = BlockFirst
 14736  			b.SetControl(nil)
 14737  			_ = yes
 14738  			_ = no
 14739  			return true
 14740  		}
 14741  		// match: (EQ (FlagLT_ULT) yes no)
 14742  		// cond:
 14743  		// result: (First nil no yes)
 14744  		for {
 14745  			v := b.Control
 14746  			if v.Op != OpARM64FlagLT_ULT {
 14747  				break
 14748  			}
 14749  			yes := b.Succs[0]
 14750  			no := b.Succs[1]
 14751  			b.Kind = BlockFirst
 14752  			b.SetControl(nil)
 14753  			b.swapSuccessors()
 14754  			_ = no
 14755  			_ = yes
 14756  			return true
 14757  		}
 14758  		// match: (EQ (FlagLT_UGT) yes no)
 14759  		// cond:
 14760  		// result: (First nil no yes)
 14761  		for {
 14762  			v := b.Control
 14763  			if v.Op != OpARM64FlagLT_UGT {
 14764  				break
 14765  			}
 14766  			yes := b.Succs[0]
 14767  			no := b.Succs[1]
 14768  			b.Kind = BlockFirst
 14769  			b.SetControl(nil)
 14770  			b.swapSuccessors()
 14771  			_ = no
 14772  			_ = yes
 14773  			return true
 14774  		}
 14775  		// match: (EQ (FlagGT_ULT) yes no)
 14776  		// cond:
 14777  		// result: (First nil no yes)
 14778  		for {
 14779  			v := b.Control
 14780  			if v.Op != OpARM64FlagGT_ULT {
 14781  				break
 14782  			}
 14783  			yes := b.Succs[0]
 14784  			no := b.Succs[1]
 14785  			b.Kind = BlockFirst
 14786  			b.SetControl(nil)
 14787  			b.swapSuccessors()
 14788  			_ = no
 14789  			_ = yes
 14790  			return true
 14791  		}
 14792  		// match: (EQ (FlagGT_UGT) yes no)
 14793  		// cond:
 14794  		// result: (First nil no yes)
 14795  		for {
 14796  			v := b.Control
 14797  			if v.Op != OpARM64FlagGT_UGT {
 14798  				break
 14799  			}
 14800  			yes := b.Succs[0]
 14801  			no := b.Succs[1]
 14802  			b.Kind = BlockFirst
 14803  			b.SetControl(nil)
 14804  			b.swapSuccessors()
 14805  			_ = no
 14806  			_ = yes
 14807  			return true
 14808  		}
 14809  		// match: (EQ (InvertFlags cmp) yes no)
 14810  		// cond:
 14811  		// result: (EQ cmp yes no)
 14812  		for {
 14813  			v := b.Control
 14814  			if v.Op != OpARM64InvertFlags {
 14815  				break
 14816  			}
 14817  			cmp := v.Args[0]
 14818  			yes := b.Succs[0]
 14819  			no := b.Succs[1]
 14820  			b.Kind = BlockARM64EQ
 14821  			b.SetControl(cmp)
 14822  			_ = yes
 14823  			_ = no
 14824  			return true
 14825  		}
 14826  	case BlockARM64GE:
 14827  		// match: (GE (FlagEQ) yes no)
 14828  		// cond:
 14829  		// result: (First nil yes no)
 14830  		for {
 14831  			v := b.Control
 14832  			if v.Op != OpARM64FlagEQ {
 14833  				break
 14834  			}
 14835  			yes := b.Succs[0]
 14836  			no := b.Succs[1]
 14837  			b.Kind = BlockFirst
 14838  			b.SetControl(nil)
 14839  			_ = yes
 14840  			_ = no
 14841  			return true
 14842  		}
 14843  		// match: (GE (FlagLT_ULT) yes no)
 14844  		// cond:
 14845  		// result: (First nil no yes)
 14846  		for {
 14847  			v := b.Control
 14848  			if v.Op != OpARM64FlagLT_ULT {
 14849  				break
 14850  			}
 14851  			yes := b.Succs[0]
 14852  			no := b.Succs[1]
 14853  			b.Kind = BlockFirst
 14854  			b.SetControl(nil)
 14855  			b.swapSuccessors()
 14856  			_ = no
 14857  			_ = yes
 14858  			return true
 14859  		}
 14860  		// match: (GE (FlagLT_UGT) yes no)
 14861  		// cond:
 14862  		// result: (First nil no yes)
 14863  		for {
 14864  			v := b.Control
 14865  			if v.Op != OpARM64FlagLT_UGT {
 14866  				break
 14867  			}
 14868  			yes := b.Succs[0]
 14869  			no := b.Succs[1]
 14870  			b.Kind = BlockFirst
 14871  			b.SetControl(nil)
 14872  			b.swapSuccessors()
 14873  			_ = no
 14874  			_ = yes
 14875  			return true
 14876  		}
 14877  		// match: (GE (FlagGT_ULT) yes no)
 14878  		// cond:
 14879  		// result: (First nil yes no)
 14880  		for {
 14881  			v := b.Control
 14882  			if v.Op != OpARM64FlagGT_ULT {
 14883  				break
 14884  			}
 14885  			yes := b.Succs[0]
 14886  			no := b.Succs[1]
 14887  			b.Kind = BlockFirst
 14888  			b.SetControl(nil)
 14889  			_ = yes
 14890  			_ = no
 14891  			return true
 14892  		}
 14893  		// match: (GE (FlagGT_UGT) yes no)
 14894  		// cond:
 14895  		// result: (First nil yes no)
 14896  		for {
 14897  			v := b.Control
 14898  			if v.Op != OpARM64FlagGT_UGT {
 14899  				break
 14900  			}
 14901  			yes := b.Succs[0]
 14902  			no := b.Succs[1]
 14903  			b.Kind = BlockFirst
 14904  			b.SetControl(nil)
 14905  			_ = yes
 14906  			_ = no
 14907  			return true
 14908  		}
 14909  		// match: (GE (InvertFlags cmp) yes no)
 14910  		// cond:
 14911  		// result: (LE cmp yes no)
 14912  		for {
 14913  			v := b.Control
 14914  			if v.Op != OpARM64InvertFlags {
 14915  				break
 14916  			}
 14917  			cmp := v.Args[0]
 14918  			yes := b.Succs[0]
 14919  			no := b.Succs[1]
 14920  			b.Kind = BlockARM64LE
 14921  			b.SetControl(cmp)
 14922  			_ = yes
 14923  			_ = no
 14924  			return true
 14925  		}
 14926  	case BlockARM64GT:
 14927  		// match: (GT (FlagEQ) yes no)
 14928  		// cond:
 14929  		// result: (First nil no yes)
 14930  		for {
 14931  			v := b.Control
 14932  			if v.Op != OpARM64FlagEQ {
 14933  				break
 14934  			}
 14935  			yes := b.Succs[0]
 14936  			no := b.Succs[1]
 14937  			b.Kind = BlockFirst
 14938  			b.SetControl(nil)
 14939  			b.swapSuccessors()
 14940  			_ = no
 14941  			_ = yes
 14942  			return true
 14943  		}
 14944  		// match: (GT (FlagLT_ULT) yes no)
 14945  		// cond:
 14946  		// result: (First nil no yes)
 14947  		for {
 14948  			v := b.Control
 14949  			if v.Op != OpARM64FlagLT_ULT {
 14950  				break
 14951  			}
 14952  			yes := b.Succs[0]
 14953  			no := b.Succs[1]
 14954  			b.Kind = BlockFirst
 14955  			b.SetControl(nil)
 14956  			b.swapSuccessors()
 14957  			_ = no
 14958  			_ = yes
 14959  			return true
 14960  		}
 14961  		// match: (GT (FlagLT_UGT) yes no)
 14962  		// cond:
 14963  		// result: (First nil no yes)
 14964  		for {
 14965  			v := b.Control
 14966  			if v.Op != OpARM64FlagLT_UGT {
 14967  				break
 14968  			}
 14969  			yes := b.Succs[0]
 14970  			no := b.Succs[1]
 14971  			b.Kind = BlockFirst
 14972  			b.SetControl(nil)
 14973  			b.swapSuccessors()
 14974  			_ = no
 14975  			_ = yes
 14976  			return true
 14977  		}
 14978  		// match: (GT (FlagGT_ULT) yes no)
 14979  		// cond:
 14980  		// result: (First nil yes no)
 14981  		for {
 14982  			v := b.Control
 14983  			if v.Op != OpARM64FlagGT_ULT {
 14984  				break
 14985  			}
 14986  			yes := b.Succs[0]
 14987  			no := b.Succs[1]
 14988  			b.Kind = BlockFirst
 14989  			b.SetControl(nil)
 14990  			_ = yes
 14991  			_ = no
 14992  			return true
 14993  		}
 14994  		// match: (GT (FlagGT_UGT) yes no)
 14995  		// cond:
 14996  		// result: (First nil yes no)
 14997  		for {
 14998  			v := b.Control
 14999  			if v.Op != OpARM64FlagGT_UGT {
 15000  				break
 15001  			}
 15002  			yes := b.Succs[0]
 15003  			no := b.Succs[1]
 15004  			b.Kind = BlockFirst
 15005  			b.SetControl(nil)
 15006  			_ = yes
 15007  			_ = no
 15008  			return true
 15009  		}
 15010  		// match: (GT (InvertFlags cmp) yes no)
 15011  		// cond:
 15012  		// result: (LT cmp yes no)
 15013  		for {
 15014  			v := b.Control
 15015  			if v.Op != OpARM64InvertFlags {
 15016  				break
 15017  			}
 15018  			cmp := v.Args[0]
 15019  			yes := b.Succs[0]
 15020  			no := b.Succs[1]
 15021  			b.Kind = BlockARM64LT
 15022  			b.SetControl(cmp)
 15023  			_ = yes
 15024  			_ = no
 15025  			return true
 15026  		}
 15027  	case BlockIf:
 15028  		// match: (If (Equal cc) yes no)
 15029  		// cond:
 15030  		// result: (EQ cc yes no)
 15031  		for {
 15032  			v := b.Control
 15033  			if v.Op != OpARM64Equal {
 15034  				break
 15035  			}
 15036  			cc := v.Args[0]
 15037  			yes := b.Succs[0]
 15038  			no := b.Succs[1]
 15039  			b.Kind = BlockARM64EQ
 15040  			b.SetControl(cc)
 15041  			_ = yes
 15042  			_ = no
 15043  			return true
 15044  		}
 15045  		// match: (If (NotEqual cc) yes no)
 15046  		// cond:
 15047  		// result: (NE cc yes no)
 15048  		for {
 15049  			v := b.Control
 15050  			if v.Op != OpARM64NotEqual {
 15051  				break
 15052  			}
 15053  			cc := v.Args[0]
 15054  			yes := b.Succs[0]
 15055  			no := b.Succs[1]
 15056  			b.Kind = BlockARM64NE
 15057  			b.SetControl(cc)
 15058  			_ = yes
 15059  			_ = no
 15060  			return true
 15061  		}
 15062  		// match: (If (LessThan cc) yes no)
 15063  		// cond:
 15064  		// result: (LT cc yes no)
 15065  		for {
 15066  			v := b.Control
 15067  			if v.Op != OpARM64LessThan {
 15068  				break
 15069  			}
 15070  			cc := v.Args[0]
 15071  			yes := b.Succs[0]
 15072  			no := b.Succs[1]
 15073  			b.Kind = BlockARM64LT
 15074  			b.SetControl(cc)
 15075  			_ = yes
 15076  			_ = no
 15077  			return true
 15078  		}
 15079  		// match: (If (LessThanU cc) yes no)
 15080  		// cond:
 15081  		// result: (ULT cc yes no)
 15082  		for {
 15083  			v := b.Control
 15084  			if v.Op != OpARM64LessThanU {
 15085  				break
 15086  			}
 15087  			cc := v.Args[0]
 15088  			yes := b.Succs[0]
 15089  			no := b.Succs[1]
 15090  			b.Kind = BlockARM64ULT
 15091  			b.SetControl(cc)
 15092  			_ = yes
 15093  			_ = no
 15094  			return true
 15095  		}
 15096  		// match: (If (LessEqual cc) yes no)
 15097  		// cond:
 15098  		// result: (LE cc yes no)
 15099  		for {
 15100  			v := b.Control
 15101  			if v.Op != OpARM64LessEqual {
 15102  				break
 15103  			}
 15104  			cc := v.Args[0]
 15105  			yes := b.Succs[0]
 15106  			no := b.Succs[1]
 15107  			b.Kind = BlockARM64LE
 15108  			b.SetControl(cc)
 15109  			_ = yes
 15110  			_ = no
 15111  			return true
 15112  		}
 15113  		// match: (If (LessEqualU cc) yes no)
 15114  		// cond:
 15115  		// result: (ULE cc yes no)
 15116  		for {
 15117  			v := b.Control
 15118  			if v.Op != OpARM64LessEqualU {
 15119  				break
 15120  			}
 15121  			cc := v.Args[0]
 15122  			yes := b.Succs[0]
 15123  			no := b.Succs[1]
 15124  			b.Kind = BlockARM64ULE
 15125  			b.SetControl(cc)
 15126  			_ = yes
 15127  			_ = no
 15128  			return true
 15129  		}
 15130  		// match: (If (GreaterThan cc) yes no)
 15131  		// cond:
 15132  		// result: (GT cc yes no)
 15133  		for {
 15134  			v := b.Control
 15135  			if v.Op != OpARM64GreaterThan {
 15136  				break
 15137  			}
 15138  			cc := v.Args[0]
 15139  			yes := b.Succs[0]
 15140  			no := b.Succs[1]
 15141  			b.Kind = BlockARM64GT
 15142  			b.SetControl(cc)
 15143  			_ = yes
 15144  			_ = no
 15145  			return true
 15146  		}
 15147  		// match: (If (GreaterThanU cc) yes no)
 15148  		// cond:
 15149  		// result: (UGT cc yes no)
 15150  		for {
 15151  			v := b.Control
 15152  			if v.Op != OpARM64GreaterThanU {
 15153  				break
 15154  			}
 15155  			cc := v.Args[0]
 15156  			yes := b.Succs[0]
 15157  			no := b.Succs[1]
 15158  			b.Kind = BlockARM64UGT
 15159  			b.SetControl(cc)
 15160  			_ = yes
 15161  			_ = no
 15162  			return true
 15163  		}
 15164  		// match: (If (GreaterEqual cc) yes no)
 15165  		// cond:
 15166  		// result: (GE cc yes no)
 15167  		for {
 15168  			v := b.Control
 15169  			if v.Op != OpARM64GreaterEqual {
 15170  				break
 15171  			}
 15172  			cc := v.Args[0]
 15173  			yes := b.Succs[0]
 15174  			no := b.Succs[1]
 15175  			b.Kind = BlockARM64GE
 15176  			b.SetControl(cc)
 15177  			_ = yes
 15178  			_ = no
 15179  			return true
 15180  		}
 15181  		// match: (If (GreaterEqualU cc) yes no)
 15182  		// cond:
 15183  		// result: (UGE cc yes no)
 15184  		for {
 15185  			v := b.Control
 15186  			if v.Op != OpARM64GreaterEqualU {
 15187  				break
 15188  			}
 15189  			cc := v.Args[0]
 15190  			yes := b.Succs[0]
 15191  			no := b.Succs[1]
 15192  			b.Kind = BlockARM64UGE
 15193  			b.SetControl(cc)
 15194  			_ = yes
 15195  			_ = no
 15196  			return true
 15197  		}
 15198  		// match: (If cond yes no)
 15199  		// cond:
 15200  		// result: (NE (CMPconst [0] cond) yes no)
 15201  		for {
 15202  			v := b.Control
 15203  			_ = v
 15204  			cond := b.Control
 15205  			yes := b.Succs[0]
 15206  			no := b.Succs[1]
 15207  			b.Kind = BlockARM64NE
 15208  			v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
 15209  			v0.AuxInt = 0
 15210  			v0.AddArg(cond)
 15211  			b.SetControl(v0)
 15212  			_ = yes
 15213  			_ = no
 15214  			return true
 15215  		}
 15216  	case BlockARM64LE:
 15217  		// match: (LE (FlagEQ) yes no)
 15218  		// cond:
 15219  		// result: (First nil yes no)
 15220  		for {
 15221  			v := b.Control
 15222  			if v.Op != OpARM64FlagEQ {
 15223  				break
 15224  			}
 15225  			yes := b.Succs[0]
 15226  			no := b.Succs[1]
 15227  			b.Kind = BlockFirst
 15228  			b.SetControl(nil)
 15229  			_ = yes
 15230  			_ = no
 15231  			return true
 15232  		}
 15233  		// match: (LE (FlagLT_ULT) yes no)
 15234  		// cond:
 15235  		// result: (First nil yes no)
 15236  		for {
 15237  			v := b.Control
 15238  			if v.Op != OpARM64FlagLT_ULT {
 15239  				break
 15240  			}
 15241  			yes := b.Succs[0]
 15242  			no := b.Succs[1]
 15243  			b.Kind = BlockFirst
 15244  			b.SetControl(nil)
 15245  			_ = yes
 15246  			_ = no
 15247  			return true
 15248  		}
 15249  		// match: (LE (FlagLT_UGT) yes no)
 15250  		// cond:
 15251  		// result: (First nil yes no)
 15252  		for {
 15253  			v := b.Control
 15254  			if v.Op != OpARM64FlagLT_UGT {
 15255  				break
 15256  			}
 15257  			yes := b.Succs[0]
 15258  			no := b.Succs[1]
 15259  			b.Kind = BlockFirst
 15260  			b.SetControl(nil)
 15261  			_ = yes
 15262  			_ = no
 15263  			return true
 15264  		}
 15265  		// match: (LE (FlagGT_ULT) yes no)
 15266  		// cond:
 15267  		// result: (First nil no yes)
 15268  		for {
 15269  			v := b.Control
 15270  			if v.Op != OpARM64FlagGT_ULT {
 15271  				break
 15272  			}
 15273  			yes := b.Succs[0]
 15274  			no := b.Succs[1]
 15275  			b.Kind = BlockFirst
 15276  			b.SetControl(nil)
 15277  			b.swapSuccessors()
 15278  			_ = no
 15279  			_ = yes
 15280  			return true
 15281  		}
 15282  		// match: (LE (FlagGT_UGT) yes no)
 15283  		// cond:
 15284  		// result: (First nil no yes)
 15285  		for {
 15286  			v := b.Control
 15287  			if v.Op != OpARM64FlagGT_UGT {
 15288  				break
 15289  			}
 15290  			yes := b.Succs[0]
 15291  			no := b.Succs[1]
 15292  			b.Kind = BlockFirst
 15293  			b.SetControl(nil)
 15294  			b.swapSuccessors()
 15295  			_ = no
 15296  			_ = yes
 15297  			return true
 15298  		}
 15299  		// match: (LE (InvertFlags cmp) yes no)
 15300  		// cond:
 15301  		// result: (GE cmp yes no)
 15302  		for {
 15303  			v := b.Control
 15304  			if v.Op != OpARM64InvertFlags {
 15305  				break
 15306  			}
 15307  			cmp := v.Args[0]
 15308  			yes := b.Succs[0]
 15309  			no := b.Succs[1]
 15310  			b.Kind = BlockARM64GE
 15311  			b.SetControl(cmp)
 15312  			_ = yes
 15313  			_ = no
 15314  			return true
 15315  		}
 15316  	case BlockARM64LT:
 15317  		// match: (LT (FlagEQ) yes no)
 15318  		// cond:
 15319  		// result: (First nil no yes)
 15320  		for {
 15321  			v := b.Control
 15322  			if v.Op != OpARM64FlagEQ {
 15323  				break
 15324  			}
 15325  			yes := b.Succs[0]
 15326  			no := b.Succs[1]
 15327  			b.Kind = BlockFirst
 15328  			b.SetControl(nil)
 15329  			b.swapSuccessors()
 15330  			_ = no
 15331  			_ = yes
 15332  			return true
 15333  		}
 15334  		// match: (LT (FlagLT_ULT) yes no)
 15335  		// cond:
 15336  		// result: (First nil yes no)
 15337  		for {
 15338  			v := b.Control
 15339  			if v.Op != OpARM64FlagLT_ULT {
 15340  				break
 15341  			}
 15342  			yes := b.Succs[0]
 15343  			no := b.Succs[1]
 15344  			b.Kind = BlockFirst
 15345  			b.SetControl(nil)
 15346  			_ = yes
 15347  			_ = no
 15348  			return true
 15349  		}
 15350  		// match: (LT (FlagLT_UGT) yes no)
 15351  		// cond:
 15352  		// result: (First nil yes no)
 15353  		for {
 15354  			v := b.Control
 15355  			if v.Op != OpARM64FlagLT_UGT {
 15356  				break
 15357  			}
 15358  			yes := b.Succs[0]
 15359  			no := b.Succs[1]
 15360  			b.Kind = BlockFirst
 15361  			b.SetControl(nil)
 15362  			_ = yes
 15363  			_ = no
 15364  			return true
 15365  		}
 15366  		// match: (LT (FlagGT_ULT) yes no)
 15367  		// cond:
 15368  		// result: (First nil no yes)
 15369  		for {
 15370  			v := b.Control
 15371  			if v.Op != OpARM64FlagGT_ULT {
 15372  				break
 15373  			}
 15374  			yes := b.Succs[0]
 15375  			no := b.Succs[1]
 15376  			b.Kind = BlockFirst
 15377  			b.SetControl(nil)
 15378  			b.swapSuccessors()
 15379  			_ = no
 15380  			_ = yes
 15381  			return true
 15382  		}
 15383  		// match: (LT (FlagGT_UGT) yes no)
 15384  		// cond:
 15385  		// result: (First nil no yes)
 15386  		for {
 15387  			v := b.Control
 15388  			if v.Op != OpARM64FlagGT_UGT {
 15389  				break
 15390  			}
 15391  			yes := b.Succs[0]
 15392  			no := b.Succs[1]
 15393  			b.Kind = BlockFirst
 15394  			b.SetControl(nil)
 15395  			b.swapSuccessors()
 15396  			_ = no
 15397  			_ = yes
 15398  			return true
 15399  		}
 15400  		// match: (LT (InvertFlags cmp) yes no)
 15401  		// cond:
 15402  		// result: (GT cmp yes no)
 15403  		for {
 15404  			v := b.Control
 15405  			if v.Op != OpARM64InvertFlags {
 15406  				break
 15407  			}
 15408  			cmp := v.Args[0]
 15409  			yes := b.Succs[0]
 15410  			no := b.Succs[1]
 15411  			b.Kind = BlockARM64GT
 15412  			b.SetControl(cmp)
 15413  			_ = yes
 15414  			_ = no
 15415  			return true
 15416  		}
 15417  	case BlockARM64NE:
 15418  		// match: (NE (CMPconst [0] (Equal cc)) yes no)
 15419  		// cond:
 15420  		// result: (EQ cc yes no)
 15421  		for {
 15422  			v := b.Control
 15423  			if v.Op != OpARM64CMPconst {
 15424  				break
 15425  			}
 15426  			if v.AuxInt != 0 {
 15427  				break
 15428  			}
 15429  			v_0 := v.Args[0]
 15430  			if v_0.Op != OpARM64Equal {
 15431  				break
 15432  			}
 15433  			cc := v_0.Args[0]
 15434  			yes := b.Succs[0]
 15435  			no := b.Succs[1]
 15436  			b.Kind = BlockARM64EQ
 15437  			b.SetControl(cc)
 15438  			_ = yes
 15439  			_ = no
 15440  			return true
 15441  		}
 15442  		// match: (NE (CMPconst [0] (NotEqual cc)) yes no)
 15443  		// cond:
 15444  		// result: (NE cc yes no)
 15445  		for {
 15446  			v := b.Control
 15447  			if v.Op != OpARM64CMPconst {
 15448  				break
 15449  			}
 15450  			if v.AuxInt != 0 {
 15451  				break
 15452  			}
 15453  			v_0 := v.Args[0]
 15454  			if v_0.Op != OpARM64NotEqual {
 15455  				break
 15456  			}
 15457  			cc := v_0.Args[0]
 15458  			yes := b.Succs[0]
 15459  			no := b.Succs[1]
 15460  			b.Kind = BlockARM64NE
 15461  			b.SetControl(cc)
 15462  			_ = yes
 15463  			_ = no
 15464  			return true
 15465  		}
 15466  		// match: (NE (CMPconst [0] (LessThan cc)) yes no)
 15467  		// cond:
 15468  		// result: (LT cc yes no)
 15469  		for {
 15470  			v := b.Control
 15471  			if v.Op != OpARM64CMPconst {
 15472  				break
 15473  			}
 15474  			if v.AuxInt != 0 {
 15475  				break
 15476  			}
 15477  			v_0 := v.Args[0]
 15478  			if v_0.Op != OpARM64LessThan {
 15479  				break
 15480  			}
 15481  			cc := v_0.Args[0]
 15482  			yes := b.Succs[0]
 15483  			no := b.Succs[1]
 15484  			b.Kind = BlockARM64LT
 15485  			b.SetControl(cc)
 15486  			_ = yes
 15487  			_ = no
 15488  			return true
 15489  		}
 15490  		// match: (NE (CMPconst [0] (LessThanU cc)) yes no)
 15491  		// cond:
 15492  		// result: (ULT cc yes no)
 15493  		for {
 15494  			v := b.Control
 15495  			if v.Op != OpARM64CMPconst {
 15496  				break
 15497  			}
 15498  			if v.AuxInt != 0 {
 15499  				break
 15500  			}
 15501  			v_0 := v.Args[0]
 15502  			if v_0.Op != OpARM64LessThanU {
 15503  				break
 15504  			}
 15505  			cc := v_0.Args[0]
 15506  			yes := b.Succs[0]
 15507  			no := b.Succs[1]
 15508  			b.Kind = BlockARM64ULT
 15509  			b.SetControl(cc)
 15510  			_ = yes
 15511  			_ = no
 15512  			return true
 15513  		}
 15514  		// match: (NE (CMPconst [0] (LessEqual cc)) yes no)
 15515  		// cond:
 15516  		// result: (LE cc yes no)
 15517  		for {
 15518  			v := b.Control
 15519  			if v.Op != OpARM64CMPconst {
 15520  				break
 15521  			}
 15522  			if v.AuxInt != 0 {
 15523  				break
 15524  			}
 15525  			v_0 := v.Args[0]
 15526  			if v_0.Op != OpARM64LessEqual {
 15527  				break
 15528  			}
 15529  			cc := v_0.Args[0]
 15530  			yes := b.Succs[0]
 15531  			no := b.Succs[1]
 15532  			b.Kind = BlockARM64LE
 15533  			b.SetControl(cc)
 15534  			_ = yes
 15535  			_ = no
 15536  			return true
 15537  		}
 15538  		// match: (NE (CMPconst [0] (LessEqualU cc)) yes no)
 15539  		// cond:
 15540  		// result: (ULE cc yes no)
 15541  		for {
 15542  			v := b.Control
 15543  			if v.Op != OpARM64CMPconst {
 15544  				break
 15545  			}
 15546  			if v.AuxInt != 0 {
 15547  				break
 15548  			}
 15549  			v_0 := v.Args[0]
 15550  			if v_0.Op != OpARM64LessEqualU {
 15551  				break
 15552  			}
 15553  			cc := v_0.Args[0]
 15554  			yes := b.Succs[0]
 15555  			no := b.Succs[1]
 15556  			b.Kind = BlockARM64ULE
 15557  			b.SetControl(cc)
 15558  			_ = yes
 15559  			_ = no
 15560  			return true
 15561  		}
 15562  		// match: (NE (CMPconst [0] (GreaterThan cc)) yes no)
 15563  		// cond:
 15564  		// result: (GT cc yes no)
 15565  		for {
 15566  			v := b.Control
 15567  			if v.Op != OpARM64CMPconst {
 15568  				break
 15569  			}
 15570  			if v.AuxInt != 0 {
 15571  				break
 15572  			}
 15573  			v_0 := v.Args[0]
 15574  			if v_0.Op != OpARM64GreaterThan {
 15575  				break
 15576  			}
 15577  			cc := v_0.Args[0]
 15578  			yes := b.Succs[0]
 15579  			no := b.Succs[1]
 15580  			b.Kind = BlockARM64GT
 15581  			b.SetControl(cc)
 15582  			_ = yes
 15583  			_ = no
 15584  			return true
 15585  		}
 15586  		// match: (NE (CMPconst [0] (GreaterThanU cc)) yes no)
 15587  		// cond:
 15588  		// result: (UGT cc yes no)
 15589  		for {
 15590  			v := b.Control
 15591  			if v.Op != OpARM64CMPconst {
 15592  				break
 15593  			}
 15594  			if v.AuxInt != 0 {
 15595  				break
 15596  			}
 15597  			v_0 := v.Args[0]
 15598  			if v_0.Op != OpARM64GreaterThanU {
 15599  				break
 15600  			}
 15601  			cc := v_0.Args[0]
 15602  			yes := b.Succs[0]
 15603  			no := b.Succs[1]
 15604  			b.Kind = BlockARM64UGT
 15605  			b.SetControl(cc)
 15606  			_ = yes
 15607  			_ = no
 15608  			return true
 15609  		}
 15610  		// match: (NE (CMPconst [0] (GreaterEqual cc)) yes no)
 15611  		// cond:
 15612  		// result: (GE cc yes no)
 15613  		for {
 15614  			v := b.Control
 15615  			if v.Op != OpARM64CMPconst {
 15616  				break
 15617  			}
 15618  			if v.AuxInt != 0 {
 15619  				break
 15620  			}
 15621  			v_0 := v.Args[0]
 15622  			if v_0.Op != OpARM64GreaterEqual {
 15623  				break
 15624  			}
 15625  			cc := v_0.Args[0]
 15626  			yes := b.Succs[0]
 15627  			no := b.Succs[1]
 15628  			b.Kind = BlockARM64GE
 15629  			b.SetControl(cc)
 15630  			_ = yes
 15631  			_ = no
 15632  			return true
 15633  		}
 15634  		// match: (NE (CMPconst [0] (GreaterEqualU cc)) yes no)
 15635  		// cond:
 15636  		// result: (UGE cc yes no)
 15637  		for {
 15638  			v := b.Control
 15639  			if v.Op != OpARM64CMPconst {
 15640  				break
 15641  			}
 15642  			if v.AuxInt != 0 {
 15643  				break
 15644  			}
 15645  			v_0 := v.Args[0]
 15646  			if v_0.Op != OpARM64GreaterEqualU {
 15647  				break
 15648  			}
 15649  			cc := v_0.Args[0]
 15650  			yes := b.Succs[0]
 15651  			no := b.Succs[1]
 15652  			b.Kind = BlockARM64UGE
 15653  			b.SetControl(cc)
 15654  			_ = yes
 15655  			_ = no
 15656  			return true
 15657  		}
 15658  		// match: (NE (FlagEQ) yes no)
 15659  		// cond:
 15660  		// result: (First nil no yes)
 15661  		for {
 15662  			v := b.Control
 15663  			if v.Op != OpARM64FlagEQ {
 15664  				break
 15665  			}
 15666  			yes := b.Succs[0]
 15667  			no := b.Succs[1]
 15668  			b.Kind = BlockFirst
 15669  			b.SetControl(nil)
 15670  			b.swapSuccessors()
 15671  			_ = no
 15672  			_ = yes
 15673  			return true
 15674  		}
 15675  		// match: (NE (FlagLT_ULT) yes no)
 15676  		// cond:
 15677  		// result: (First nil yes no)
 15678  		for {
 15679  			v := b.Control
 15680  			if v.Op != OpARM64FlagLT_ULT {
 15681  				break
 15682  			}
 15683  			yes := b.Succs[0]
 15684  			no := b.Succs[1]
 15685  			b.Kind = BlockFirst
 15686  			b.SetControl(nil)
 15687  			_ = yes
 15688  			_ = no
 15689  			return true
 15690  		}
 15691  		// match: (NE (FlagLT_UGT) yes no)
 15692  		// cond:
 15693  		// result: (First nil yes no)
 15694  		for {
 15695  			v := b.Control
 15696  			if v.Op != OpARM64FlagLT_UGT {
 15697  				break
 15698  			}
 15699  			yes := b.Succs[0]
 15700  			no := b.Succs[1]
 15701  			b.Kind = BlockFirst
 15702  			b.SetControl(nil)
 15703  			_ = yes
 15704  			_ = no
 15705  			return true
 15706  		}
 15707  		// match: (NE (FlagGT_ULT) yes no)
 15708  		// cond:
 15709  		// result: (First nil yes no)
 15710  		for {
 15711  			v := b.Control
 15712  			if v.Op != OpARM64FlagGT_ULT {
 15713  				break
 15714  			}
 15715  			yes := b.Succs[0]
 15716  			no := b.Succs[1]
 15717  			b.Kind = BlockFirst
 15718  			b.SetControl(nil)
 15719  			_ = yes
 15720  			_ = no
 15721  			return true
 15722  		}
 15723  		// match: (NE (FlagGT_UGT) yes no)
 15724  		// cond:
 15725  		// result: (First nil yes no)
 15726  		for {
 15727  			v := b.Control
 15728  			if v.Op != OpARM64FlagGT_UGT {
 15729  				break
 15730  			}
 15731  			yes := b.Succs[0]
 15732  			no := b.Succs[1]
 15733  			b.Kind = BlockFirst
 15734  			b.SetControl(nil)
 15735  			_ = yes
 15736  			_ = no
 15737  			return true
 15738  		}
 15739  		// match: (NE (InvertFlags cmp) yes no)
 15740  		// cond:
 15741  		// result: (NE cmp yes no)
 15742  		for {
 15743  			v := b.Control
 15744  			if v.Op != OpARM64InvertFlags {
 15745  				break
 15746  			}
 15747  			cmp := v.Args[0]
 15748  			yes := b.Succs[0]
 15749  			no := b.Succs[1]
 15750  			b.Kind = BlockARM64NE
 15751  			b.SetControl(cmp)
 15752  			_ = yes
 15753  			_ = no
 15754  			return true
 15755  		}
 15756  	case BlockARM64UGE:
 15757  		// match: (UGE (FlagEQ) yes no)
 15758  		// cond:
 15759  		// result: (First nil yes no)
 15760  		for {
 15761  			v := b.Control
 15762  			if v.Op != OpARM64FlagEQ {
 15763  				break
 15764  			}
 15765  			yes := b.Succs[0]
 15766  			no := b.Succs[1]
 15767  			b.Kind = BlockFirst
 15768  			b.SetControl(nil)
 15769  			_ = yes
 15770  			_ = no
 15771  			return true
 15772  		}
 15773  		// match: (UGE (FlagLT_ULT) yes no)
 15774  		// cond:
 15775  		// result: (First nil no yes)
 15776  		for {
 15777  			v := b.Control
 15778  			if v.Op != OpARM64FlagLT_ULT {
 15779  				break
 15780  			}
 15781  			yes := b.Succs[0]
 15782  			no := b.Succs[1]
 15783  			b.Kind = BlockFirst
 15784  			b.SetControl(nil)
 15785  			b.swapSuccessors()
 15786  			_ = no
 15787  			_ = yes
 15788  			return true
 15789  		}
 15790  		// match: (UGE (FlagLT_UGT) yes no)
 15791  		// cond:
 15792  		// result: (First nil yes no)
 15793  		for {
 15794  			v := b.Control
 15795  			if v.Op != OpARM64FlagLT_UGT {
 15796  				break
 15797  			}
 15798  			yes := b.Succs[0]
 15799  			no := b.Succs[1]
 15800  			b.Kind = BlockFirst
 15801  			b.SetControl(nil)
 15802  			_ = yes
 15803  			_ = no
 15804  			return true
 15805  		}
 15806  		// match: (UGE (FlagGT_ULT) yes no)
 15807  		// cond:
 15808  		// result: (First nil no yes)
 15809  		for {
 15810  			v := b.Control
 15811  			if v.Op != OpARM64FlagGT_ULT {
 15812  				break
 15813  			}
 15814  			yes := b.Succs[0]
 15815  			no := b.Succs[1]
 15816  			b.Kind = BlockFirst
 15817  			b.SetControl(nil)
 15818  			b.swapSuccessors()
 15819  			_ = no
 15820  			_ = yes
 15821  			return true
 15822  		}
 15823  		// match: (UGE (FlagGT_UGT) yes no)
 15824  		// cond:
 15825  		// result: (First nil yes no)
 15826  		for {
 15827  			v := b.Control
 15828  			if v.Op != OpARM64FlagGT_UGT {
 15829  				break
 15830  			}
 15831  			yes := b.Succs[0]
 15832  			no := b.Succs[1]
 15833  			b.Kind = BlockFirst
 15834  			b.SetControl(nil)
 15835  			_ = yes
 15836  			_ = no
 15837  			return true
 15838  		}
 15839  		// match: (UGE (InvertFlags cmp) yes no)
 15840  		// cond:
 15841  		// result: (ULE cmp yes no)
 15842  		for {
 15843  			v := b.Control
 15844  			if v.Op != OpARM64InvertFlags {
 15845  				break
 15846  			}
 15847  			cmp := v.Args[0]
 15848  			yes := b.Succs[0]
 15849  			no := b.Succs[1]
 15850  			b.Kind = BlockARM64ULE
 15851  			b.SetControl(cmp)
 15852  			_ = yes
 15853  			_ = no
 15854  			return true
 15855  		}
 15856  	case BlockARM64UGT:
 15857  		// match: (UGT (FlagEQ) yes no)
 15858  		// cond:
 15859  		// result: (First nil no yes)
 15860  		for {
 15861  			v := b.Control
 15862  			if v.Op != OpARM64FlagEQ {
 15863  				break
 15864  			}
 15865  			yes := b.Succs[0]
 15866  			no := b.Succs[1]
 15867  			b.Kind = BlockFirst
 15868  			b.SetControl(nil)
 15869  			b.swapSuccessors()
 15870  			_ = no
 15871  			_ = yes
 15872  			return true
 15873  		}
 15874  		// match: (UGT (FlagLT_ULT) yes no)
 15875  		// cond:
 15876  		// result: (First nil no yes)
 15877  		for {
 15878  			v := b.Control
 15879  			if v.Op != OpARM64FlagLT_ULT {
 15880  				break
 15881  			}
 15882  			yes := b.Succs[0]
 15883  			no := b.Succs[1]
 15884  			b.Kind = BlockFirst
 15885  			b.SetControl(nil)
 15886  			b.swapSuccessors()
 15887  			_ = no
 15888  			_ = yes
 15889  			return true
 15890  		}
 15891  		// match: (UGT (FlagLT_UGT) yes no)
 15892  		// cond:
 15893  		// result: (First nil yes no)
 15894  		for {
 15895  			v := b.Control
 15896  			if v.Op != OpARM64FlagLT_UGT {
 15897  				break
 15898  			}
 15899  			yes := b.Succs[0]
 15900  			no := b.Succs[1]
 15901  			b.Kind = BlockFirst
 15902  			b.SetControl(nil)
 15903  			_ = yes
 15904  			_ = no
 15905  			return true
 15906  		}
 15907  		// match: (UGT (FlagGT_ULT) yes no)
 15908  		// cond:
 15909  		// result: (First nil no yes)
 15910  		for {
 15911  			v := b.Control
 15912  			if v.Op != OpARM64FlagGT_ULT {
 15913  				break
 15914  			}
 15915  			yes := b.Succs[0]
 15916  			no := b.Succs[1]
 15917  			b.Kind = BlockFirst
 15918  			b.SetControl(nil)
 15919  			b.swapSuccessors()
 15920  			_ = no
 15921  			_ = yes
 15922  			return true
 15923  		}
 15924  		// match: (UGT (FlagGT_UGT) yes no)
 15925  		// cond:
 15926  		// result: (First nil yes no)
 15927  		for {
 15928  			v := b.Control
 15929  			if v.Op != OpARM64FlagGT_UGT {
 15930  				break
 15931  			}
 15932  			yes := b.Succs[0]
 15933  			no := b.Succs[1]
 15934  			b.Kind = BlockFirst
 15935  			b.SetControl(nil)
 15936  			_ = yes
 15937  			_ = no
 15938  			return true
 15939  		}
 15940  		// match: (UGT (InvertFlags cmp) yes no)
 15941  		// cond:
 15942  		// result: (ULT cmp yes no)
 15943  		for {
 15944  			v := b.Control
 15945  			if v.Op != OpARM64InvertFlags {
 15946  				break
 15947  			}
 15948  			cmp := v.Args[0]
 15949  			yes := b.Succs[0]
 15950  			no := b.Succs[1]
 15951  			b.Kind = BlockARM64ULT
 15952  			b.SetControl(cmp)
 15953  			_ = yes
 15954  			_ = no
 15955  			return true
 15956  		}
 15957  	case BlockARM64ULE:
 15958  		// match: (ULE (FlagEQ) yes no)
 15959  		// cond:
 15960  		// result: (First nil yes no)
 15961  		for {
 15962  			v := b.Control
 15963  			if v.Op != OpARM64FlagEQ {
 15964  				break
 15965  			}
 15966  			yes := b.Succs[0]
 15967  			no := b.Succs[1]
 15968  			b.Kind = BlockFirst
 15969  			b.SetControl(nil)
 15970  			_ = yes
 15971  			_ = no
 15972  			return true
 15973  		}
 15974  		// match: (ULE (FlagLT_ULT) yes no)
 15975  		// cond:
 15976  		// result: (First nil yes no)
 15977  		for {
 15978  			v := b.Control
 15979  			if v.Op != OpARM64FlagLT_ULT {
 15980  				break
 15981  			}
 15982  			yes := b.Succs[0]
 15983  			no := b.Succs[1]
 15984  			b.Kind = BlockFirst
 15985  			b.SetControl(nil)
 15986  			_ = yes
 15987  			_ = no
 15988  			return true
 15989  		}
 15990  		// match: (ULE (FlagLT_UGT) yes no)
 15991  		// cond:
 15992  		// result: (First nil no yes)
 15993  		for {
 15994  			v := b.Control
 15995  			if v.Op != OpARM64FlagLT_UGT {
 15996  				break
 15997  			}
 15998  			yes := b.Succs[0]
 15999  			no := b.Succs[1]
 16000  			b.Kind = BlockFirst
 16001  			b.SetControl(nil)
 16002  			b.swapSuccessors()
 16003  			_ = no
 16004  			_ = yes
 16005  			return true
 16006  		}
 16007  		// match: (ULE (FlagGT_ULT) yes no)
 16008  		// cond:
 16009  		// result: (First nil yes no)
 16010  		for {
 16011  			v := b.Control
 16012  			if v.Op != OpARM64FlagGT_ULT {
 16013  				break
 16014  			}
 16015  			yes := b.Succs[0]
 16016  			no := b.Succs[1]
 16017  			b.Kind = BlockFirst
 16018  			b.SetControl(nil)
 16019  			_ = yes
 16020  			_ = no
 16021  			return true
 16022  		}
 16023  		// match: (ULE (FlagGT_UGT) yes no)
 16024  		// cond:
 16025  		// result: (First nil no yes)
 16026  		for {
 16027  			v := b.Control
 16028  			if v.Op != OpARM64FlagGT_UGT {
 16029  				break
 16030  			}
 16031  			yes := b.Succs[0]
 16032  			no := b.Succs[1]
 16033  			b.Kind = BlockFirst
 16034  			b.SetControl(nil)
 16035  			b.swapSuccessors()
 16036  			_ = no
 16037  			_ = yes
 16038  			return true
 16039  		}
 16040  		// match: (ULE (InvertFlags cmp) yes no)
 16041  		// cond:
 16042  		// result: (UGE cmp yes no)
 16043  		for {
 16044  			v := b.Control
 16045  			if v.Op != OpARM64InvertFlags {
 16046  				break
 16047  			}
 16048  			cmp := v.Args[0]
 16049  			yes := b.Succs[0]
 16050  			no := b.Succs[1]
 16051  			b.Kind = BlockARM64UGE
 16052  			b.SetControl(cmp)
 16053  			_ = yes
 16054  			_ = no
 16055  			return true
 16056  		}
 16057  	case BlockARM64ULT:
 16058  		// match: (ULT (FlagEQ) yes no)
 16059  		// cond:
 16060  		// result: (First nil no yes)
 16061  		for {
 16062  			v := b.Control
 16063  			if v.Op != OpARM64FlagEQ {
 16064  				break
 16065  			}
 16066  			yes := b.Succs[0]
 16067  			no := b.Succs[1]
 16068  			b.Kind = BlockFirst
 16069  			b.SetControl(nil)
 16070  			b.swapSuccessors()
 16071  			_ = no
 16072  			_ = yes
 16073  			return true
 16074  		}
 16075  		// match: (ULT (FlagLT_ULT) yes no)
 16076  		// cond:
 16077  		// result: (First nil yes no)
 16078  		for {
 16079  			v := b.Control
 16080  			if v.Op != OpARM64FlagLT_ULT {
 16081  				break
 16082  			}
 16083  			yes := b.Succs[0]
 16084  			no := b.Succs[1]
 16085  			b.Kind = BlockFirst
 16086  			b.SetControl(nil)
 16087  			_ = yes
 16088  			_ = no
 16089  			return true
 16090  		}
 16091  		// match: (ULT (FlagLT_UGT) yes no)
 16092  		// cond:
 16093  		// result: (First nil no yes)
 16094  		for {
 16095  			v := b.Control
 16096  			if v.Op != OpARM64FlagLT_UGT {
 16097  				break
 16098  			}
 16099  			yes := b.Succs[0]
 16100  			no := b.Succs[1]
 16101  			b.Kind = BlockFirst
 16102  			b.SetControl(nil)
 16103  			b.swapSuccessors()
 16104  			_ = no
 16105  			_ = yes
 16106  			return true
 16107  		}
 16108  		// match: (ULT (FlagGT_ULT) yes no)
 16109  		// cond:
 16110  		// result: (First nil yes no)
 16111  		for {
 16112  			v := b.Control
 16113  			if v.Op != OpARM64FlagGT_ULT {
 16114  				break
 16115  			}
 16116  			yes := b.Succs[0]
 16117  			no := b.Succs[1]
 16118  			b.Kind = BlockFirst
 16119  			b.SetControl(nil)
 16120  			_ = yes
 16121  			_ = no
 16122  			return true
 16123  		}
 16124  		// match: (ULT (FlagGT_UGT) yes no)
 16125  		// cond:
 16126  		// result: (First nil no yes)
 16127  		for {
 16128  			v := b.Control
 16129  			if v.Op != OpARM64FlagGT_UGT {
 16130  				break
 16131  			}
 16132  			yes := b.Succs[0]
 16133  			no := b.Succs[1]
 16134  			b.Kind = BlockFirst
 16135  			b.SetControl(nil)
 16136  			b.swapSuccessors()
 16137  			_ = no
 16138  			_ = yes
 16139  			return true
 16140  		}
 16141  		// match: (ULT (InvertFlags cmp) yes no)
 16142  		// cond:
 16143  		// result: (UGT cmp yes no)
 16144  		for {
 16145  			v := b.Control
 16146  			if v.Op != OpARM64InvertFlags {
 16147  				break
 16148  			}
 16149  			cmp := v.Args[0]
 16150  			yes := b.Succs[0]
 16151  			no := b.Succs[1]
 16152  			b.Kind = BlockARM64UGT
 16153  			b.SetControl(cmp)
 16154  			_ = yes
 16155  			_ = no
 16156  			return true
 16157  		}
 16158  	}
 16159  	return false
 16160  }