github.com/sanprasirt/go@v0.0.0-20170607001320-a027466e4b6d/src/cmd/compile/internal/ssa/rewriteARM64.go (about)

     1  // Code generated from gen/ARM64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueARM64(v *Value) bool {
    17  	switch v.Op {
    18  	case OpARM64ADD:
    19  		return rewriteValueARM64_OpARM64ADD_0(v)
    20  	case OpARM64ADDconst:
    21  		return rewriteValueARM64_OpARM64ADDconst_0(v)
    22  	case OpARM64ADDshiftLL:
    23  		return rewriteValueARM64_OpARM64ADDshiftLL_0(v)
    24  	case OpARM64ADDshiftRA:
    25  		return rewriteValueARM64_OpARM64ADDshiftRA_0(v)
    26  	case OpARM64ADDshiftRL:
    27  		return rewriteValueARM64_OpARM64ADDshiftRL_0(v)
    28  	case OpARM64AND:
    29  		return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v)
    30  	case OpARM64ANDconst:
    31  		return rewriteValueARM64_OpARM64ANDconst_0(v)
    32  	case OpARM64ANDshiftLL:
    33  		return rewriteValueARM64_OpARM64ANDshiftLL_0(v)
    34  	case OpARM64ANDshiftRA:
    35  		return rewriteValueARM64_OpARM64ANDshiftRA_0(v)
    36  	case OpARM64ANDshiftRL:
    37  		return rewriteValueARM64_OpARM64ANDshiftRL_0(v)
    38  	case OpARM64BIC:
    39  		return rewriteValueARM64_OpARM64BIC_0(v)
    40  	case OpARM64BICconst:
    41  		return rewriteValueARM64_OpARM64BICconst_0(v)
    42  	case OpARM64BICshiftLL:
    43  		return rewriteValueARM64_OpARM64BICshiftLL_0(v)
    44  	case OpARM64BICshiftRA:
    45  		return rewriteValueARM64_OpARM64BICshiftRA_0(v)
    46  	case OpARM64BICshiftRL:
    47  		return rewriteValueARM64_OpARM64BICshiftRL_0(v)
    48  	case OpARM64CMP:
    49  		return rewriteValueARM64_OpARM64CMP_0(v)
    50  	case OpARM64CMPW:
    51  		return rewriteValueARM64_OpARM64CMPW_0(v)
    52  	case OpARM64CMPWconst:
    53  		return rewriteValueARM64_OpARM64CMPWconst_0(v)
    54  	case OpARM64CMPconst:
    55  		return rewriteValueARM64_OpARM64CMPconst_0(v)
    56  	case OpARM64CMPshiftLL:
    57  		return rewriteValueARM64_OpARM64CMPshiftLL_0(v)
    58  	case OpARM64CMPshiftRA:
    59  		return rewriteValueARM64_OpARM64CMPshiftRA_0(v)
    60  	case OpARM64CMPshiftRL:
    61  		return rewriteValueARM64_OpARM64CMPshiftRL_0(v)
    62  	case OpARM64CSELULT:
    63  		return rewriteValueARM64_OpARM64CSELULT_0(v)
    64  	case OpARM64CSELULT0:
    65  		return rewriteValueARM64_OpARM64CSELULT0_0(v)
    66  	case OpARM64DIV:
    67  		return rewriteValueARM64_OpARM64DIV_0(v)
    68  	case OpARM64DIVW:
    69  		return rewriteValueARM64_OpARM64DIVW_0(v)
    70  	case OpARM64Equal:
    71  		return rewriteValueARM64_OpARM64Equal_0(v)
    72  	case OpARM64FMOVDload:
    73  		return rewriteValueARM64_OpARM64FMOVDload_0(v)
    74  	case OpARM64FMOVDstore:
    75  		return rewriteValueARM64_OpARM64FMOVDstore_0(v)
    76  	case OpARM64FMOVSload:
    77  		return rewriteValueARM64_OpARM64FMOVSload_0(v)
    78  	case OpARM64FMOVSstore:
    79  		return rewriteValueARM64_OpARM64FMOVSstore_0(v)
    80  	case OpARM64GreaterEqual:
    81  		return rewriteValueARM64_OpARM64GreaterEqual_0(v)
    82  	case OpARM64GreaterEqualU:
    83  		return rewriteValueARM64_OpARM64GreaterEqualU_0(v)
    84  	case OpARM64GreaterThan:
    85  		return rewriteValueARM64_OpARM64GreaterThan_0(v)
    86  	case OpARM64GreaterThanU:
    87  		return rewriteValueARM64_OpARM64GreaterThanU_0(v)
    88  	case OpARM64LessEqual:
    89  		return rewriteValueARM64_OpARM64LessEqual_0(v)
    90  	case OpARM64LessEqualU:
    91  		return rewriteValueARM64_OpARM64LessEqualU_0(v)
    92  	case OpARM64LessThan:
    93  		return rewriteValueARM64_OpARM64LessThan_0(v)
    94  	case OpARM64LessThanU:
    95  		return rewriteValueARM64_OpARM64LessThanU_0(v)
    96  	case OpARM64MOD:
    97  		return rewriteValueARM64_OpARM64MOD_0(v)
    98  	case OpARM64MODW:
    99  		return rewriteValueARM64_OpARM64MODW_0(v)
   100  	case OpARM64MOVBUload:
   101  		return rewriteValueARM64_OpARM64MOVBUload_0(v)
   102  	case OpARM64MOVBUreg:
   103  		return rewriteValueARM64_OpARM64MOVBUreg_0(v)
   104  	case OpARM64MOVBload:
   105  		return rewriteValueARM64_OpARM64MOVBload_0(v)
   106  	case OpARM64MOVBreg:
   107  		return rewriteValueARM64_OpARM64MOVBreg_0(v)
   108  	case OpARM64MOVBstore:
   109  		return rewriteValueARM64_OpARM64MOVBstore_0(v)
   110  	case OpARM64MOVBstorezero:
   111  		return rewriteValueARM64_OpARM64MOVBstorezero_0(v)
   112  	case OpARM64MOVDload:
   113  		return rewriteValueARM64_OpARM64MOVDload_0(v)
   114  	case OpARM64MOVDreg:
   115  		return rewriteValueARM64_OpARM64MOVDreg_0(v)
   116  	case OpARM64MOVDstore:
   117  		return rewriteValueARM64_OpARM64MOVDstore_0(v)
   118  	case OpARM64MOVDstorezero:
   119  		return rewriteValueARM64_OpARM64MOVDstorezero_0(v)
   120  	case OpARM64MOVHUload:
   121  		return rewriteValueARM64_OpARM64MOVHUload_0(v)
   122  	case OpARM64MOVHUreg:
   123  		return rewriteValueARM64_OpARM64MOVHUreg_0(v)
   124  	case OpARM64MOVHload:
   125  		return rewriteValueARM64_OpARM64MOVHload_0(v)
   126  	case OpARM64MOVHreg:
   127  		return rewriteValueARM64_OpARM64MOVHreg_0(v)
   128  	case OpARM64MOVHstore:
   129  		return rewriteValueARM64_OpARM64MOVHstore_0(v)
   130  	case OpARM64MOVHstorezero:
   131  		return rewriteValueARM64_OpARM64MOVHstorezero_0(v)
   132  	case OpARM64MOVWUload:
   133  		return rewriteValueARM64_OpARM64MOVWUload_0(v)
   134  	case OpARM64MOVWUreg:
   135  		return rewriteValueARM64_OpARM64MOVWUreg_0(v)
   136  	case OpARM64MOVWload:
   137  		return rewriteValueARM64_OpARM64MOVWload_0(v)
   138  	case OpARM64MOVWreg:
   139  		return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v)
   140  	case OpARM64MOVWstore:
   141  		return rewriteValueARM64_OpARM64MOVWstore_0(v)
   142  	case OpARM64MOVWstorezero:
   143  		return rewriteValueARM64_OpARM64MOVWstorezero_0(v)
   144  	case OpARM64MUL:
   145  		return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v)
   146  	case OpARM64MULW:
   147  		return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v)
   148  	case OpARM64MVN:
   149  		return rewriteValueARM64_OpARM64MVN_0(v)
   150  	case OpARM64NEG:
   151  		return rewriteValueARM64_OpARM64NEG_0(v)
   152  	case OpARM64NotEqual:
   153  		return rewriteValueARM64_OpARM64NotEqual_0(v)
   154  	case OpARM64OR:
   155  		return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v)
   156  	case OpARM64ORconst:
   157  		return rewriteValueARM64_OpARM64ORconst_0(v)
   158  	case OpARM64ORshiftLL:
   159  		return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v)
   160  	case OpARM64ORshiftRA:
   161  		return rewriteValueARM64_OpARM64ORshiftRA_0(v)
   162  	case OpARM64ORshiftRL:
   163  		return rewriteValueARM64_OpARM64ORshiftRL_0(v)
   164  	case OpARM64SLL:
   165  		return rewriteValueARM64_OpARM64SLL_0(v)
   166  	case OpARM64SLLconst:
   167  		return rewriteValueARM64_OpARM64SLLconst_0(v)
   168  	case OpARM64SRA:
   169  		return rewriteValueARM64_OpARM64SRA_0(v)
   170  	case OpARM64SRAconst:
   171  		return rewriteValueARM64_OpARM64SRAconst_0(v)
   172  	case OpARM64SRL:
   173  		return rewriteValueARM64_OpARM64SRL_0(v)
   174  	case OpARM64SRLconst:
   175  		return rewriteValueARM64_OpARM64SRLconst_0(v)
   176  	case OpARM64SUB:
   177  		return rewriteValueARM64_OpARM64SUB_0(v)
   178  	case OpARM64SUBconst:
   179  		return rewriteValueARM64_OpARM64SUBconst_0(v)
   180  	case OpARM64SUBshiftLL:
   181  		return rewriteValueARM64_OpARM64SUBshiftLL_0(v)
   182  	case OpARM64SUBshiftRA:
   183  		return rewriteValueARM64_OpARM64SUBshiftRA_0(v)
   184  	case OpARM64SUBshiftRL:
   185  		return rewriteValueARM64_OpARM64SUBshiftRL_0(v)
   186  	case OpARM64UDIV:
   187  		return rewriteValueARM64_OpARM64UDIV_0(v)
   188  	case OpARM64UDIVW:
   189  		return rewriteValueARM64_OpARM64UDIVW_0(v)
   190  	case OpARM64UMOD:
   191  		return rewriteValueARM64_OpARM64UMOD_0(v)
   192  	case OpARM64UMODW:
   193  		return rewriteValueARM64_OpARM64UMODW_0(v)
   194  	case OpARM64XOR:
   195  		return rewriteValueARM64_OpARM64XOR_0(v)
   196  	case OpARM64XORconst:
   197  		return rewriteValueARM64_OpARM64XORconst_0(v)
   198  	case OpARM64XORshiftLL:
   199  		return rewriteValueARM64_OpARM64XORshiftLL_0(v)
   200  	case OpARM64XORshiftRA:
   201  		return rewriteValueARM64_OpARM64XORshiftRA_0(v)
   202  	case OpARM64XORshiftRL:
   203  		return rewriteValueARM64_OpARM64XORshiftRL_0(v)
   204  	case OpAdd16:
   205  		return rewriteValueARM64_OpAdd16_0(v)
   206  	case OpAdd32:
   207  		return rewriteValueARM64_OpAdd32_0(v)
   208  	case OpAdd32F:
   209  		return rewriteValueARM64_OpAdd32F_0(v)
   210  	case OpAdd64:
   211  		return rewriteValueARM64_OpAdd64_0(v)
   212  	case OpAdd64F:
   213  		return rewriteValueARM64_OpAdd64F_0(v)
   214  	case OpAdd8:
   215  		return rewriteValueARM64_OpAdd8_0(v)
   216  	case OpAddPtr:
   217  		return rewriteValueARM64_OpAddPtr_0(v)
   218  	case OpAddr:
   219  		return rewriteValueARM64_OpAddr_0(v)
   220  	case OpAnd16:
   221  		return rewriteValueARM64_OpAnd16_0(v)
   222  	case OpAnd32:
   223  		return rewriteValueARM64_OpAnd32_0(v)
   224  	case OpAnd64:
   225  		return rewriteValueARM64_OpAnd64_0(v)
   226  	case OpAnd8:
   227  		return rewriteValueARM64_OpAnd8_0(v)
   228  	case OpAndB:
   229  		return rewriteValueARM64_OpAndB_0(v)
   230  	case OpAtomicAdd32:
   231  		return rewriteValueARM64_OpAtomicAdd32_0(v)
   232  	case OpAtomicAdd64:
   233  		return rewriteValueARM64_OpAtomicAdd64_0(v)
   234  	case OpAtomicAnd8:
   235  		return rewriteValueARM64_OpAtomicAnd8_0(v)
   236  	case OpAtomicCompareAndSwap32:
   237  		return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v)
   238  	case OpAtomicCompareAndSwap64:
   239  		return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v)
   240  	case OpAtomicExchange32:
   241  		return rewriteValueARM64_OpAtomicExchange32_0(v)
   242  	case OpAtomicExchange64:
   243  		return rewriteValueARM64_OpAtomicExchange64_0(v)
   244  	case OpAtomicLoad32:
   245  		return rewriteValueARM64_OpAtomicLoad32_0(v)
   246  	case OpAtomicLoad64:
   247  		return rewriteValueARM64_OpAtomicLoad64_0(v)
   248  	case OpAtomicLoadPtr:
   249  		return rewriteValueARM64_OpAtomicLoadPtr_0(v)
   250  	case OpAtomicOr8:
   251  		return rewriteValueARM64_OpAtomicOr8_0(v)
   252  	case OpAtomicStore32:
   253  		return rewriteValueARM64_OpAtomicStore32_0(v)
   254  	case OpAtomicStore64:
   255  		return rewriteValueARM64_OpAtomicStore64_0(v)
   256  	case OpAtomicStorePtrNoWB:
   257  		return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v)
   258  	case OpAvg64u:
   259  		return rewriteValueARM64_OpAvg64u_0(v)
   260  	case OpBitLen64:
   261  		return rewriteValueARM64_OpBitLen64_0(v)
   262  	case OpBitRev16:
   263  		return rewriteValueARM64_OpBitRev16_0(v)
   264  	case OpBitRev32:
   265  		return rewriteValueARM64_OpBitRev32_0(v)
   266  	case OpBitRev64:
   267  		return rewriteValueARM64_OpBitRev64_0(v)
   268  	case OpBitRev8:
   269  		return rewriteValueARM64_OpBitRev8_0(v)
   270  	case OpBswap32:
   271  		return rewriteValueARM64_OpBswap32_0(v)
   272  	case OpBswap64:
   273  		return rewriteValueARM64_OpBswap64_0(v)
   274  	case OpClosureCall:
   275  		return rewriteValueARM64_OpClosureCall_0(v)
   276  	case OpCom16:
   277  		return rewriteValueARM64_OpCom16_0(v)
   278  	case OpCom32:
   279  		return rewriteValueARM64_OpCom32_0(v)
   280  	case OpCom64:
   281  		return rewriteValueARM64_OpCom64_0(v)
   282  	case OpCom8:
   283  		return rewriteValueARM64_OpCom8_0(v)
   284  	case OpConst16:
   285  		return rewriteValueARM64_OpConst16_0(v)
   286  	case OpConst32:
   287  		return rewriteValueARM64_OpConst32_0(v)
   288  	case OpConst32F:
   289  		return rewriteValueARM64_OpConst32F_0(v)
   290  	case OpConst64:
   291  		return rewriteValueARM64_OpConst64_0(v)
   292  	case OpConst64F:
   293  		return rewriteValueARM64_OpConst64F_0(v)
   294  	case OpConst8:
   295  		return rewriteValueARM64_OpConst8_0(v)
   296  	case OpConstBool:
   297  		return rewriteValueARM64_OpConstBool_0(v)
   298  	case OpConstNil:
   299  		return rewriteValueARM64_OpConstNil_0(v)
   300  	case OpConvert:
   301  		return rewriteValueARM64_OpConvert_0(v)
   302  	case OpCtz32:
   303  		return rewriteValueARM64_OpCtz32_0(v)
   304  	case OpCtz64:
   305  		return rewriteValueARM64_OpCtz64_0(v)
   306  	case OpCvt32Fto32:
   307  		return rewriteValueARM64_OpCvt32Fto32_0(v)
   308  	case OpCvt32Fto32U:
   309  		return rewriteValueARM64_OpCvt32Fto32U_0(v)
   310  	case OpCvt32Fto64:
   311  		return rewriteValueARM64_OpCvt32Fto64_0(v)
   312  	case OpCvt32Fto64F:
   313  		return rewriteValueARM64_OpCvt32Fto64F_0(v)
   314  	case OpCvt32Fto64U:
   315  		return rewriteValueARM64_OpCvt32Fto64U_0(v)
   316  	case OpCvt32Uto32F:
   317  		return rewriteValueARM64_OpCvt32Uto32F_0(v)
   318  	case OpCvt32Uto64F:
   319  		return rewriteValueARM64_OpCvt32Uto64F_0(v)
   320  	case OpCvt32to32F:
   321  		return rewriteValueARM64_OpCvt32to32F_0(v)
   322  	case OpCvt32to64F:
   323  		return rewriteValueARM64_OpCvt32to64F_0(v)
   324  	case OpCvt64Fto32:
   325  		return rewriteValueARM64_OpCvt64Fto32_0(v)
   326  	case OpCvt64Fto32F:
   327  		return rewriteValueARM64_OpCvt64Fto32F_0(v)
   328  	case OpCvt64Fto32U:
   329  		return rewriteValueARM64_OpCvt64Fto32U_0(v)
   330  	case OpCvt64Fto64:
   331  		return rewriteValueARM64_OpCvt64Fto64_0(v)
   332  	case OpCvt64Fto64U:
   333  		return rewriteValueARM64_OpCvt64Fto64U_0(v)
   334  	case OpCvt64Uto32F:
   335  		return rewriteValueARM64_OpCvt64Uto32F_0(v)
   336  	case OpCvt64Uto64F:
   337  		return rewriteValueARM64_OpCvt64Uto64F_0(v)
   338  	case OpCvt64to32F:
   339  		return rewriteValueARM64_OpCvt64to32F_0(v)
   340  	case OpCvt64to64F:
   341  		return rewriteValueARM64_OpCvt64to64F_0(v)
   342  	case OpDiv16:
   343  		return rewriteValueARM64_OpDiv16_0(v)
   344  	case OpDiv16u:
   345  		return rewriteValueARM64_OpDiv16u_0(v)
   346  	case OpDiv32:
   347  		return rewriteValueARM64_OpDiv32_0(v)
   348  	case OpDiv32F:
   349  		return rewriteValueARM64_OpDiv32F_0(v)
   350  	case OpDiv32u:
   351  		return rewriteValueARM64_OpDiv32u_0(v)
   352  	case OpDiv64:
   353  		return rewriteValueARM64_OpDiv64_0(v)
   354  	case OpDiv64F:
   355  		return rewriteValueARM64_OpDiv64F_0(v)
   356  	case OpDiv64u:
   357  		return rewriteValueARM64_OpDiv64u_0(v)
   358  	case OpDiv8:
   359  		return rewriteValueARM64_OpDiv8_0(v)
   360  	case OpDiv8u:
   361  		return rewriteValueARM64_OpDiv8u_0(v)
   362  	case OpEq16:
   363  		return rewriteValueARM64_OpEq16_0(v)
   364  	case OpEq32:
   365  		return rewriteValueARM64_OpEq32_0(v)
   366  	case OpEq32F:
   367  		return rewriteValueARM64_OpEq32F_0(v)
   368  	case OpEq64:
   369  		return rewriteValueARM64_OpEq64_0(v)
   370  	case OpEq64F:
   371  		return rewriteValueARM64_OpEq64F_0(v)
   372  	case OpEq8:
   373  		return rewriteValueARM64_OpEq8_0(v)
   374  	case OpEqB:
   375  		return rewriteValueARM64_OpEqB_0(v)
   376  	case OpEqPtr:
   377  		return rewriteValueARM64_OpEqPtr_0(v)
   378  	case OpGeq16:
   379  		return rewriteValueARM64_OpGeq16_0(v)
   380  	case OpGeq16U:
   381  		return rewriteValueARM64_OpGeq16U_0(v)
   382  	case OpGeq32:
   383  		return rewriteValueARM64_OpGeq32_0(v)
   384  	case OpGeq32F:
   385  		return rewriteValueARM64_OpGeq32F_0(v)
   386  	case OpGeq32U:
   387  		return rewriteValueARM64_OpGeq32U_0(v)
   388  	case OpGeq64:
   389  		return rewriteValueARM64_OpGeq64_0(v)
   390  	case OpGeq64F:
   391  		return rewriteValueARM64_OpGeq64F_0(v)
   392  	case OpGeq64U:
   393  		return rewriteValueARM64_OpGeq64U_0(v)
   394  	case OpGeq8:
   395  		return rewriteValueARM64_OpGeq8_0(v)
   396  	case OpGeq8U:
   397  		return rewriteValueARM64_OpGeq8U_0(v)
   398  	case OpGetClosurePtr:
   399  		return rewriteValueARM64_OpGetClosurePtr_0(v)
   400  	case OpGreater16:
   401  		return rewriteValueARM64_OpGreater16_0(v)
   402  	case OpGreater16U:
   403  		return rewriteValueARM64_OpGreater16U_0(v)
   404  	case OpGreater32:
   405  		return rewriteValueARM64_OpGreater32_0(v)
   406  	case OpGreater32F:
   407  		return rewriteValueARM64_OpGreater32F_0(v)
   408  	case OpGreater32U:
   409  		return rewriteValueARM64_OpGreater32U_0(v)
   410  	case OpGreater64:
   411  		return rewriteValueARM64_OpGreater64_0(v)
   412  	case OpGreater64F:
   413  		return rewriteValueARM64_OpGreater64F_0(v)
   414  	case OpGreater64U:
   415  		return rewriteValueARM64_OpGreater64U_0(v)
   416  	case OpGreater8:
   417  		return rewriteValueARM64_OpGreater8_0(v)
   418  	case OpGreater8U:
   419  		return rewriteValueARM64_OpGreater8U_0(v)
   420  	case OpHmul32:
   421  		return rewriteValueARM64_OpHmul32_0(v)
   422  	case OpHmul32u:
   423  		return rewriteValueARM64_OpHmul32u_0(v)
   424  	case OpHmul64:
   425  		return rewriteValueARM64_OpHmul64_0(v)
   426  	case OpHmul64u:
   427  		return rewriteValueARM64_OpHmul64u_0(v)
   428  	case OpInterCall:
   429  		return rewriteValueARM64_OpInterCall_0(v)
   430  	case OpIsInBounds:
   431  		return rewriteValueARM64_OpIsInBounds_0(v)
   432  	case OpIsNonNil:
   433  		return rewriteValueARM64_OpIsNonNil_0(v)
   434  	case OpIsSliceInBounds:
   435  		return rewriteValueARM64_OpIsSliceInBounds_0(v)
   436  	case OpLeq16:
   437  		return rewriteValueARM64_OpLeq16_0(v)
   438  	case OpLeq16U:
   439  		return rewriteValueARM64_OpLeq16U_0(v)
   440  	case OpLeq32:
   441  		return rewriteValueARM64_OpLeq32_0(v)
   442  	case OpLeq32F:
   443  		return rewriteValueARM64_OpLeq32F_0(v)
   444  	case OpLeq32U:
   445  		return rewriteValueARM64_OpLeq32U_0(v)
   446  	case OpLeq64:
   447  		return rewriteValueARM64_OpLeq64_0(v)
   448  	case OpLeq64F:
   449  		return rewriteValueARM64_OpLeq64F_0(v)
   450  	case OpLeq64U:
   451  		return rewriteValueARM64_OpLeq64U_0(v)
   452  	case OpLeq8:
   453  		return rewriteValueARM64_OpLeq8_0(v)
   454  	case OpLeq8U:
   455  		return rewriteValueARM64_OpLeq8U_0(v)
   456  	case OpLess16:
   457  		return rewriteValueARM64_OpLess16_0(v)
   458  	case OpLess16U:
   459  		return rewriteValueARM64_OpLess16U_0(v)
   460  	case OpLess32:
   461  		return rewriteValueARM64_OpLess32_0(v)
   462  	case OpLess32F:
   463  		return rewriteValueARM64_OpLess32F_0(v)
   464  	case OpLess32U:
   465  		return rewriteValueARM64_OpLess32U_0(v)
   466  	case OpLess64:
   467  		return rewriteValueARM64_OpLess64_0(v)
   468  	case OpLess64F:
   469  		return rewriteValueARM64_OpLess64F_0(v)
   470  	case OpLess64U:
   471  		return rewriteValueARM64_OpLess64U_0(v)
   472  	case OpLess8:
   473  		return rewriteValueARM64_OpLess8_0(v)
   474  	case OpLess8U:
   475  		return rewriteValueARM64_OpLess8U_0(v)
   476  	case OpLoad:
   477  		return rewriteValueARM64_OpLoad_0(v)
   478  	case OpLsh16x16:
   479  		return rewriteValueARM64_OpLsh16x16_0(v)
   480  	case OpLsh16x32:
   481  		return rewriteValueARM64_OpLsh16x32_0(v)
   482  	case OpLsh16x64:
   483  		return rewriteValueARM64_OpLsh16x64_0(v)
   484  	case OpLsh16x8:
   485  		return rewriteValueARM64_OpLsh16x8_0(v)
   486  	case OpLsh32x16:
   487  		return rewriteValueARM64_OpLsh32x16_0(v)
   488  	case OpLsh32x32:
   489  		return rewriteValueARM64_OpLsh32x32_0(v)
   490  	case OpLsh32x64:
   491  		return rewriteValueARM64_OpLsh32x64_0(v)
   492  	case OpLsh32x8:
   493  		return rewriteValueARM64_OpLsh32x8_0(v)
   494  	case OpLsh64x16:
   495  		return rewriteValueARM64_OpLsh64x16_0(v)
   496  	case OpLsh64x32:
   497  		return rewriteValueARM64_OpLsh64x32_0(v)
   498  	case OpLsh64x64:
   499  		return rewriteValueARM64_OpLsh64x64_0(v)
   500  	case OpLsh64x8:
   501  		return rewriteValueARM64_OpLsh64x8_0(v)
   502  	case OpLsh8x16:
   503  		return rewriteValueARM64_OpLsh8x16_0(v)
   504  	case OpLsh8x32:
   505  		return rewriteValueARM64_OpLsh8x32_0(v)
   506  	case OpLsh8x64:
   507  		return rewriteValueARM64_OpLsh8x64_0(v)
   508  	case OpLsh8x8:
   509  		return rewriteValueARM64_OpLsh8x8_0(v)
   510  	case OpMod16:
   511  		return rewriteValueARM64_OpMod16_0(v)
   512  	case OpMod16u:
   513  		return rewriteValueARM64_OpMod16u_0(v)
   514  	case OpMod32:
   515  		return rewriteValueARM64_OpMod32_0(v)
   516  	case OpMod32u:
   517  		return rewriteValueARM64_OpMod32u_0(v)
   518  	case OpMod64:
   519  		return rewriteValueARM64_OpMod64_0(v)
   520  	case OpMod64u:
   521  		return rewriteValueARM64_OpMod64u_0(v)
   522  	case OpMod8:
   523  		return rewriteValueARM64_OpMod8_0(v)
   524  	case OpMod8u:
   525  		return rewriteValueARM64_OpMod8u_0(v)
   526  	case OpMove:
   527  		return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v)
   528  	case OpMul16:
   529  		return rewriteValueARM64_OpMul16_0(v)
   530  	case OpMul32:
   531  		return rewriteValueARM64_OpMul32_0(v)
   532  	case OpMul32F:
   533  		return rewriteValueARM64_OpMul32F_0(v)
   534  	case OpMul64:
   535  		return rewriteValueARM64_OpMul64_0(v)
   536  	case OpMul64F:
   537  		return rewriteValueARM64_OpMul64F_0(v)
   538  	case OpMul8:
   539  		return rewriteValueARM64_OpMul8_0(v)
   540  	case OpNeg16:
   541  		return rewriteValueARM64_OpNeg16_0(v)
   542  	case OpNeg32:
   543  		return rewriteValueARM64_OpNeg32_0(v)
   544  	case OpNeg32F:
   545  		return rewriteValueARM64_OpNeg32F_0(v)
   546  	case OpNeg64:
   547  		return rewriteValueARM64_OpNeg64_0(v)
   548  	case OpNeg64F:
   549  		return rewriteValueARM64_OpNeg64F_0(v)
   550  	case OpNeg8:
   551  		return rewriteValueARM64_OpNeg8_0(v)
   552  	case OpNeq16:
   553  		return rewriteValueARM64_OpNeq16_0(v)
   554  	case OpNeq32:
   555  		return rewriteValueARM64_OpNeq32_0(v)
   556  	case OpNeq32F:
   557  		return rewriteValueARM64_OpNeq32F_0(v)
   558  	case OpNeq64:
   559  		return rewriteValueARM64_OpNeq64_0(v)
   560  	case OpNeq64F:
   561  		return rewriteValueARM64_OpNeq64F_0(v)
   562  	case OpNeq8:
   563  		return rewriteValueARM64_OpNeq8_0(v)
   564  	case OpNeqB:
   565  		return rewriteValueARM64_OpNeqB_0(v)
   566  	case OpNeqPtr:
   567  		return rewriteValueARM64_OpNeqPtr_0(v)
   568  	case OpNilCheck:
   569  		return rewriteValueARM64_OpNilCheck_0(v)
   570  	case OpNot:
   571  		return rewriteValueARM64_OpNot_0(v)
   572  	case OpOffPtr:
   573  		return rewriteValueARM64_OpOffPtr_0(v)
   574  	case OpOr16:
   575  		return rewriteValueARM64_OpOr16_0(v)
   576  	case OpOr32:
   577  		return rewriteValueARM64_OpOr32_0(v)
   578  	case OpOr64:
   579  		return rewriteValueARM64_OpOr64_0(v)
   580  	case OpOr8:
   581  		return rewriteValueARM64_OpOr8_0(v)
   582  	case OpOrB:
   583  		return rewriteValueARM64_OpOrB_0(v)
   584  	case OpRound32F:
   585  		return rewriteValueARM64_OpRound32F_0(v)
   586  	case OpRound64F:
   587  		return rewriteValueARM64_OpRound64F_0(v)
   588  	case OpRsh16Ux16:
   589  		return rewriteValueARM64_OpRsh16Ux16_0(v)
   590  	case OpRsh16Ux32:
   591  		return rewriteValueARM64_OpRsh16Ux32_0(v)
   592  	case OpRsh16Ux64:
   593  		return rewriteValueARM64_OpRsh16Ux64_0(v)
   594  	case OpRsh16Ux8:
   595  		return rewriteValueARM64_OpRsh16Ux8_0(v)
   596  	case OpRsh16x16:
   597  		return rewriteValueARM64_OpRsh16x16_0(v)
   598  	case OpRsh16x32:
   599  		return rewriteValueARM64_OpRsh16x32_0(v)
   600  	case OpRsh16x64:
   601  		return rewriteValueARM64_OpRsh16x64_0(v)
   602  	case OpRsh16x8:
   603  		return rewriteValueARM64_OpRsh16x8_0(v)
   604  	case OpRsh32Ux16:
   605  		return rewriteValueARM64_OpRsh32Ux16_0(v)
   606  	case OpRsh32Ux32:
   607  		return rewriteValueARM64_OpRsh32Ux32_0(v)
   608  	case OpRsh32Ux64:
   609  		return rewriteValueARM64_OpRsh32Ux64_0(v)
   610  	case OpRsh32Ux8:
   611  		return rewriteValueARM64_OpRsh32Ux8_0(v)
   612  	case OpRsh32x16:
   613  		return rewriteValueARM64_OpRsh32x16_0(v)
   614  	case OpRsh32x32:
   615  		return rewriteValueARM64_OpRsh32x32_0(v)
   616  	case OpRsh32x64:
   617  		return rewriteValueARM64_OpRsh32x64_0(v)
   618  	case OpRsh32x8:
   619  		return rewriteValueARM64_OpRsh32x8_0(v)
   620  	case OpRsh64Ux16:
   621  		return rewriteValueARM64_OpRsh64Ux16_0(v)
   622  	case OpRsh64Ux32:
   623  		return rewriteValueARM64_OpRsh64Ux32_0(v)
   624  	case OpRsh64Ux64:
   625  		return rewriteValueARM64_OpRsh64Ux64_0(v)
   626  	case OpRsh64Ux8:
   627  		return rewriteValueARM64_OpRsh64Ux8_0(v)
   628  	case OpRsh64x16:
   629  		return rewriteValueARM64_OpRsh64x16_0(v)
   630  	case OpRsh64x32:
   631  		return rewriteValueARM64_OpRsh64x32_0(v)
   632  	case OpRsh64x64:
   633  		return rewriteValueARM64_OpRsh64x64_0(v)
   634  	case OpRsh64x8:
   635  		return rewriteValueARM64_OpRsh64x8_0(v)
   636  	case OpRsh8Ux16:
   637  		return rewriteValueARM64_OpRsh8Ux16_0(v)
   638  	case OpRsh8Ux32:
   639  		return rewriteValueARM64_OpRsh8Ux32_0(v)
   640  	case OpRsh8Ux64:
   641  		return rewriteValueARM64_OpRsh8Ux64_0(v)
   642  	case OpRsh8Ux8:
   643  		return rewriteValueARM64_OpRsh8Ux8_0(v)
   644  	case OpRsh8x16:
   645  		return rewriteValueARM64_OpRsh8x16_0(v)
   646  	case OpRsh8x32:
   647  		return rewriteValueARM64_OpRsh8x32_0(v)
   648  	case OpRsh8x64:
   649  		return rewriteValueARM64_OpRsh8x64_0(v)
   650  	case OpRsh8x8:
   651  		return rewriteValueARM64_OpRsh8x8_0(v)
   652  	case OpSignExt16to32:
   653  		return rewriteValueARM64_OpSignExt16to32_0(v)
   654  	case OpSignExt16to64:
   655  		return rewriteValueARM64_OpSignExt16to64_0(v)
   656  	case OpSignExt32to64:
   657  		return rewriteValueARM64_OpSignExt32to64_0(v)
   658  	case OpSignExt8to16:
   659  		return rewriteValueARM64_OpSignExt8to16_0(v)
   660  	case OpSignExt8to32:
   661  		return rewriteValueARM64_OpSignExt8to32_0(v)
   662  	case OpSignExt8to64:
   663  		return rewriteValueARM64_OpSignExt8to64_0(v)
   664  	case OpSlicemask:
   665  		return rewriteValueARM64_OpSlicemask_0(v)
   666  	case OpSqrt:
   667  		return rewriteValueARM64_OpSqrt_0(v)
   668  	case OpStaticCall:
   669  		return rewriteValueARM64_OpStaticCall_0(v)
   670  	case OpStore:
   671  		return rewriteValueARM64_OpStore_0(v)
   672  	case OpSub16:
   673  		return rewriteValueARM64_OpSub16_0(v)
   674  	case OpSub32:
   675  		return rewriteValueARM64_OpSub32_0(v)
   676  	case OpSub32F:
   677  		return rewriteValueARM64_OpSub32F_0(v)
   678  	case OpSub64:
   679  		return rewriteValueARM64_OpSub64_0(v)
   680  	case OpSub64F:
   681  		return rewriteValueARM64_OpSub64F_0(v)
   682  	case OpSub8:
   683  		return rewriteValueARM64_OpSub8_0(v)
   684  	case OpSubPtr:
   685  		return rewriteValueARM64_OpSubPtr_0(v)
   686  	case OpTrunc16to8:
   687  		return rewriteValueARM64_OpTrunc16to8_0(v)
   688  	case OpTrunc32to16:
   689  		return rewriteValueARM64_OpTrunc32to16_0(v)
   690  	case OpTrunc32to8:
   691  		return rewriteValueARM64_OpTrunc32to8_0(v)
   692  	case OpTrunc64to16:
   693  		return rewriteValueARM64_OpTrunc64to16_0(v)
   694  	case OpTrunc64to32:
   695  		return rewriteValueARM64_OpTrunc64to32_0(v)
   696  	case OpTrunc64to8:
   697  		return rewriteValueARM64_OpTrunc64to8_0(v)
   698  	case OpXor16:
   699  		return rewriteValueARM64_OpXor16_0(v)
   700  	case OpXor32:
   701  		return rewriteValueARM64_OpXor32_0(v)
   702  	case OpXor64:
   703  		return rewriteValueARM64_OpXor64_0(v)
   704  	case OpXor8:
   705  		return rewriteValueARM64_OpXor8_0(v)
   706  	case OpZero:
   707  		return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v)
   708  	case OpZeroExt16to32:
   709  		return rewriteValueARM64_OpZeroExt16to32_0(v)
   710  	case OpZeroExt16to64:
   711  		return rewriteValueARM64_OpZeroExt16to64_0(v)
   712  	case OpZeroExt32to64:
   713  		return rewriteValueARM64_OpZeroExt32to64_0(v)
   714  	case OpZeroExt8to16:
   715  		return rewriteValueARM64_OpZeroExt8to16_0(v)
   716  	case OpZeroExt8to32:
   717  		return rewriteValueARM64_OpZeroExt8to32_0(v)
   718  	case OpZeroExt8to64:
   719  		return rewriteValueARM64_OpZeroExt8to64_0(v)
   720  	}
   721  	return false
   722  }
   723  func rewriteValueARM64_OpARM64ADD_0(v *Value) bool {
   724  	// match: (ADD x (MOVDconst [c]))
   725  	// cond:
   726  	// result: (ADDconst [c] x)
   727  	for {
   728  		_ = v.Args[1]
   729  		x := v.Args[0]
   730  		v_1 := v.Args[1]
   731  		if v_1.Op != OpARM64MOVDconst {
   732  			break
   733  		}
   734  		c := v_1.AuxInt
   735  		v.reset(OpARM64ADDconst)
   736  		v.AuxInt = c
   737  		v.AddArg(x)
   738  		return true
   739  	}
   740  	// match: (ADD (MOVDconst [c]) x)
   741  	// cond:
   742  	// result: (ADDconst [c] x)
   743  	for {
   744  		_ = v.Args[1]
   745  		v_0 := v.Args[0]
   746  		if v_0.Op != OpARM64MOVDconst {
   747  			break
   748  		}
   749  		c := v_0.AuxInt
   750  		x := v.Args[1]
   751  		v.reset(OpARM64ADDconst)
   752  		v.AuxInt = c
   753  		v.AddArg(x)
   754  		return true
   755  	}
   756  	// match: (ADD x (NEG y))
   757  	// cond:
   758  	// result: (SUB x y)
   759  	for {
   760  		_ = v.Args[1]
   761  		x := v.Args[0]
   762  		v_1 := v.Args[1]
   763  		if v_1.Op != OpARM64NEG {
   764  			break
   765  		}
   766  		y := v_1.Args[0]
   767  		v.reset(OpARM64SUB)
   768  		v.AddArg(x)
   769  		v.AddArg(y)
   770  		return true
   771  	}
   772  	// match: (ADD (NEG y) x)
   773  	// cond:
   774  	// result: (SUB x y)
   775  	for {
   776  		_ = v.Args[1]
   777  		v_0 := v.Args[0]
   778  		if v_0.Op != OpARM64NEG {
   779  			break
   780  		}
   781  		y := v_0.Args[0]
   782  		x := v.Args[1]
   783  		v.reset(OpARM64SUB)
   784  		v.AddArg(x)
   785  		v.AddArg(y)
   786  		return true
   787  	}
   788  	// match: (ADD x (SLLconst [c] y))
   789  	// cond:
   790  	// result: (ADDshiftLL x y [c])
   791  	for {
   792  		_ = v.Args[1]
   793  		x := v.Args[0]
   794  		v_1 := v.Args[1]
   795  		if v_1.Op != OpARM64SLLconst {
   796  			break
   797  		}
   798  		c := v_1.AuxInt
   799  		y := v_1.Args[0]
   800  		v.reset(OpARM64ADDshiftLL)
   801  		v.AuxInt = c
   802  		v.AddArg(x)
   803  		v.AddArg(y)
   804  		return true
   805  	}
   806  	// match: (ADD (SLLconst [c] y) x)
   807  	// cond:
   808  	// result: (ADDshiftLL x y [c])
   809  	for {
   810  		_ = v.Args[1]
   811  		v_0 := v.Args[0]
   812  		if v_0.Op != OpARM64SLLconst {
   813  			break
   814  		}
   815  		c := v_0.AuxInt
   816  		y := v_0.Args[0]
   817  		x := v.Args[1]
   818  		v.reset(OpARM64ADDshiftLL)
   819  		v.AuxInt = c
   820  		v.AddArg(x)
   821  		v.AddArg(y)
   822  		return true
   823  	}
   824  	// match: (ADD x (SRLconst [c] y))
   825  	// cond:
   826  	// result: (ADDshiftRL x y [c])
   827  	for {
   828  		_ = v.Args[1]
   829  		x := v.Args[0]
   830  		v_1 := v.Args[1]
   831  		if v_1.Op != OpARM64SRLconst {
   832  			break
   833  		}
   834  		c := v_1.AuxInt
   835  		y := v_1.Args[0]
   836  		v.reset(OpARM64ADDshiftRL)
   837  		v.AuxInt = c
   838  		v.AddArg(x)
   839  		v.AddArg(y)
   840  		return true
   841  	}
   842  	// match: (ADD (SRLconst [c] y) x)
   843  	// cond:
   844  	// result: (ADDshiftRL x y [c])
   845  	for {
   846  		_ = v.Args[1]
   847  		v_0 := v.Args[0]
   848  		if v_0.Op != OpARM64SRLconst {
   849  			break
   850  		}
   851  		c := v_0.AuxInt
   852  		y := v_0.Args[0]
   853  		x := v.Args[1]
   854  		v.reset(OpARM64ADDshiftRL)
   855  		v.AuxInt = c
   856  		v.AddArg(x)
   857  		v.AddArg(y)
   858  		return true
   859  	}
   860  	// match: (ADD x (SRAconst [c] y))
   861  	// cond:
   862  	// result: (ADDshiftRA x y [c])
   863  	for {
   864  		_ = v.Args[1]
   865  		x := v.Args[0]
   866  		v_1 := v.Args[1]
   867  		if v_1.Op != OpARM64SRAconst {
   868  			break
   869  		}
   870  		c := v_1.AuxInt
   871  		y := v_1.Args[0]
   872  		v.reset(OpARM64ADDshiftRA)
   873  		v.AuxInt = c
   874  		v.AddArg(x)
   875  		v.AddArg(y)
   876  		return true
   877  	}
   878  	// match: (ADD (SRAconst [c] y) x)
   879  	// cond:
   880  	// result: (ADDshiftRA x y [c])
   881  	for {
   882  		_ = v.Args[1]
   883  		v_0 := v.Args[0]
   884  		if v_0.Op != OpARM64SRAconst {
   885  			break
   886  		}
   887  		c := v_0.AuxInt
   888  		y := v_0.Args[0]
   889  		x := v.Args[1]
   890  		v.reset(OpARM64ADDshiftRA)
   891  		v.AuxInt = c
   892  		v.AddArg(x)
   893  		v.AddArg(y)
   894  		return true
   895  	}
   896  	return false
   897  }
   898  func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool {
   899  	// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
   900  	// cond:
   901  	// result: (MOVDaddr [off1+off2] {sym} ptr)
   902  	for {
   903  		off1 := v.AuxInt
   904  		v_0 := v.Args[0]
   905  		if v_0.Op != OpARM64MOVDaddr {
   906  			break
   907  		}
   908  		off2 := v_0.AuxInt
   909  		sym := v_0.Aux
   910  		ptr := v_0.Args[0]
   911  		v.reset(OpARM64MOVDaddr)
   912  		v.AuxInt = off1 + off2
   913  		v.Aux = sym
   914  		v.AddArg(ptr)
   915  		return true
   916  	}
   917  	// match: (ADDconst [0] x)
   918  	// cond:
   919  	// result: x
   920  	for {
   921  		if v.AuxInt != 0 {
   922  			break
   923  		}
   924  		x := v.Args[0]
   925  		v.reset(OpCopy)
   926  		v.Type = x.Type
   927  		v.AddArg(x)
   928  		return true
   929  	}
   930  	// match: (ADDconst [c] (MOVDconst [d]))
   931  	// cond:
   932  	// result: (MOVDconst [c+d])
   933  	for {
   934  		c := v.AuxInt
   935  		v_0 := v.Args[0]
   936  		if v_0.Op != OpARM64MOVDconst {
   937  			break
   938  		}
   939  		d := v_0.AuxInt
   940  		v.reset(OpARM64MOVDconst)
   941  		v.AuxInt = c + d
   942  		return true
   943  	}
   944  	// match: (ADDconst [c] (ADDconst [d] x))
   945  	// cond:
   946  	// result: (ADDconst [c+d] x)
   947  	for {
   948  		c := v.AuxInt
   949  		v_0 := v.Args[0]
   950  		if v_0.Op != OpARM64ADDconst {
   951  			break
   952  		}
   953  		d := v_0.AuxInt
   954  		x := v_0.Args[0]
   955  		v.reset(OpARM64ADDconst)
   956  		v.AuxInt = c + d
   957  		v.AddArg(x)
   958  		return true
   959  	}
   960  	// match: (ADDconst [c] (SUBconst [d] x))
   961  	// cond:
   962  	// result: (ADDconst [c-d] x)
   963  	for {
   964  		c := v.AuxInt
   965  		v_0 := v.Args[0]
   966  		if v_0.Op != OpARM64SUBconst {
   967  			break
   968  		}
   969  		d := v_0.AuxInt
   970  		x := v_0.Args[0]
   971  		v.reset(OpARM64ADDconst)
   972  		v.AuxInt = c - d
   973  		v.AddArg(x)
   974  		return true
   975  	}
   976  	return false
   977  }
   978  func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool {
   979  	b := v.Block
   980  	_ = b
   981  	// match: (ADDshiftLL (MOVDconst [c]) x [d])
   982  	// cond:
   983  	// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
   984  	for {
   985  		d := v.AuxInt
   986  		_ = v.Args[1]
   987  		v_0 := v.Args[0]
   988  		if v_0.Op != OpARM64MOVDconst {
   989  			break
   990  		}
   991  		c := v_0.AuxInt
   992  		x := v.Args[1]
   993  		v.reset(OpARM64ADDconst)
   994  		v.AuxInt = c
   995  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
   996  		v0.AuxInt = d
   997  		v0.AddArg(x)
   998  		v.AddArg(v0)
   999  		return true
  1000  	}
  1001  	// match: (ADDshiftLL x (MOVDconst [c]) [d])
  1002  	// cond:
  1003  	// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
  1004  	for {
  1005  		d := v.AuxInt
  1006  		_ = v.Args[1]
  1007  		x := v.Args[0]
  1008  		v_1 := v.Args[1]
  1009  		if v_1.Op != OpARM64MOVDconst {
  1010  			break
  1011  		}
  1012  		c := v_1.AuxInt
  1013  		v.reset(OpARM64ADDconst)
  1014  		v.AuxInt = int64(uint64(c) << uint64(d))
  1015  		v.AddArg(x)
  1016  		return true
  1017  	}
  1018  	// match: (ADDshiftLL [c] (SRLconst x [64-c]) x)
  1019  	// cond:
  1020  	// result: (RORconst [64-c] x)
  1021  	for {
  1022  		c := v.AuxInt
  1023  		_ = v.Args[1]
  1024  		v_0 := v.Args[0]
  1025  		if v_0.Op != OpARM64SRLconst {
  1026  			break
  1027  		}
  1028  		if v_0.AuxInt != 64-c {
  1029  			break
  1030  		}
  1031  		x := v_0.Args[0]
  1032  		if x != v.Args[1] {
  1033  			break
  1034  		}
  1035  		v.reset(OpARM64RORconst)
  1036  		v.AuxInt = 64 - c
  1037  		v.AddArg(x)
  1038  		return true
  1039  	}
  1040  	// match: (ADDshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x)
  1041  	// cond: c < 32 && t.Size() == 4
  1042  	// result: (RORWconst [32-c] x)
  1043  	for {
  1044  		t := v.Type
  1045  		c := v.AuxInt
  1046  		_ = v.Args[1]
  1047  		v_0 := v.Args[0]
  1048  		if v_0.Op != OpARM64SRLconst {
  1049  			break
  1050  		}
  1051  		if v_0.AuxInt != 32-c {
  1052  			break
  1053  		}
  1054  		v_0_0 := v_0.Args[0]
  1055  		if v_0_0.Op != OpARM64MOVWUreg {
  1056  			break
  1057  		}
  1058  		x := v_0_0.Args[0]
  1059  		if x != v.Args[1] {
  1060  			break
  1061  		}
  1062  		if !(c < 32 && t.Size() == 4) {
  1063  			break
  1064  		}
  1065  		v.reset(OpARM64RORWconst)
  1066  		v.AuxInt = 32 - c
  1067  		v.AddArg(x)
  1068  		return true
  1069  	}
  1070  	return false
  1071  }
  1072  func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool {
  1073  	b := v.Block
  1074  	_ = b
  1075  	// match: (ADDshiftRA (MOVDconst [c]) x [d])
  1076  	// cond:
  1077  	// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
  1078  	for {
  1079  		d := v.AuxInt
  1080  		_ = v.Args[1]
  1081  		v_0 := v.Args[0]
  1082  		if v_0.Op != OpARM64MOVDconst {
  1083  			break
  1084  		}
  1085  		c := v_0.AuxInt
  1086  		x := v.Args[1]
  1087  		v.reset(OpARM64ADDconst)
  1088  		v.AuxInt = c
  1089  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  1090  		v0.AuxInt = d
  1091  		v0.AddArg(x)
  1092  		v.AddArg(v0)
  1093  		return true
  1094  	}
  1095  	// match: (ADDshiftRA x (MOVDconst [c]) [d])
  1096  	// cond:
  1097  	// result: (ADDconst x [int64(int64(c)>>uint64(d))])
  1098  	for {
  1099  		d := v.AuxInt
  1100  		_ = v.Args[1]
  1101  		x := v.Args[0]
  1102  		v_1 := v.Args[1]
  1103  		if v_1.Op != OpARM64MOVDconst {
  1104  			break
  1105  		}
  1106  		c := v_1.AuxInt
  1107  		v.reset(OpARM64ADDconst)
  1108  		v.AuxInt = int64(int64(c) >> uint64(d))
  1109  		v.AddArg(x)
  1110  		return true
  1111  	}
  1112  	return false
  1113  }
  1114  func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool {
  1115  	b := v.Block
  1116  	_ = b
  1117  	// match: (ADDshiftRL (MOVDconst [c]) x [d])
  1118  	// cond:
  1119  	// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
  1120  	for {
  1121  		d := v.AuxInt
  1122  		_ = v.Args[1]
  1123  		v_0 := v.Args[0]
  1124  		if v_0.Op != OpARM64MOVDconst {
  1125  			break
  1126  		}
  1127  		c := v_0.AuxInt
  1128  		x := v.Args[1]
  1129  		v.reset(OpARM64ADDconst)
  1130  		v.AuxInt = c
  1131  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  1132  		v0.AuxInt = d
  1133  		v0.AddArg(x)
  1134  		v.AddArg(v0)
  1135  		return true
  1136  	}
  1137  	// match: (ADDshiftRL x (MOVDconst [c]) [d])
  1138  	// cond:
  1139  	// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
  1140  	for {
  1141  		d := v.AuxInt
  1142  		_ = v.Args[1]
  1143  		x := v.Args[0]
  1144  		v_1 := v.Args[1]
  1145  		if v_1.Op != OpARM64MOVDconst {
  1146  			break
  1147  		}
  1148  		c := v_1.AuxInt
  1149  		v.reset(OpARM64ADDconst)
  1150  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1151  		v.AddArg(x)
  1152  		return true
  1153  	}
  1154  	// match: (ADDshiftRL [c] (SLLconst x [64-c]) x)
  1155  	// cond:
  1156  	// result: (RORconst [   c] x)
  1157  	for {
  1158  		c := v.AuxInt
  1159  		_ = v.Args[1]
  1160  		v_0 := v.Args[0]
  1161  		if v_0.Op != OpARM64SLLconst {
  1162  			break
  1163  		}
  1164  		if v_0.AuxInt != 64-c {
  1165  			break
  1166  		}
  1167  		x := v_0.Args[0]
  1168  		if x != v.Args[1] {
  1169  			break
  1170  		}
  1171  		v.reset(OpARM64RORconst)
  1172  		v.AuxInt = c
  1173  		v.AddArg(x)
  1174  		return true
  1175  	}
  1176  	// match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x))
  1177  	// cond: c < 32 && t.Size() == 4
  1178  	// result: (RORWconst [   c] x)
  1179  	for {
  1180  		t := v.Type
  1181  		c := v.AuxInt
  1182  		_ = v.Args[1]
  1183  		v_0 := v.Args[0]
  1184  		if v_0.Op != OpARM64SLLconst {
  1185  			break
  1186  		}
  1187  		if v_0.AuxInt != 32-c {
  1188  			break
  1189  		}
  1190  		x := v_0.Args[0]
  1191  		v_1 := v.Args[1]
  1192  		if v_1.Op != OpARM64MOVWUreg {
  1193  			break
  1194  		}
  1195  		if x != v_1.Args[0] {
  1196  			break
  1197  		}
  1198  		if !(c < 32 && t.Size() == 4) {
  1199  			break
  1200  		}
  1201  		v.reset(OpARM64RORWconst)
  1202  		v.AuxInt = c
  1203  		v.AddArg(x)
  1204  		return true
  1205  	}
  1206  	return false
  1207  }
  1208  func rewriteValueARM64_OpARM64AND_0(v *Value) bool {
  1209  	// match: (AND x (MOVDconst [c]))
  1210  	// cond:
  1211  	// result: (ANDconst [c] x)
  1212  	for {
  1213  		_ = v.Args[1]
  1214  		x := v.Args[0]
  1215  		v_1 := v.Args[1]
  1216  		if v_1.Op != OpARM64MOVDconst {
  1217  			break
  1218  		}
  1219  		c := v_1.AuxInt
  1220  		v.reset(OpARM64ANDconst)
  1221  		v.AuxInt = c
  1222  		v.AddArg(x)
  1223  		return true
  1224  	}
  1225  	// match: (AND (MOVDconst [c]) x)
  1226  	// cond:
  1227  	// result: (ANDconst [c] x)
  1228  	for {
  1229  		_ = v.Args[1]
  1230  		v_0 := v.Args[0]
  1231  		if v_0.Op != OpARM64MOVDconst {
  1232  			break
  1233  		}
  1234  		c := v_0.AuxInt
  1235  		x := v.Args[1]
  1236  		v.reset(OpARM64ANDconst)
  1237  		v.AuxInt = c
  1238  		v.AddArg(x)
  1239  		return true
  1240  	}
  1241  	// match: (AND x x)
  1242  	// cond:
  1243  	// result: x
  1244  	for {
  1245  		_ = v.Args[1]
  1246  		x := v.Args[0]
  1247  		if x != v.Args[1] {
  1248  			break
  1249  		}
  1250  		v.reset(OpCopy)
  1251  		v.Type = x.Type
  1252  		v.AddArg(x)
  1253  		return true
  1254  	}
  1255  	// match: (AND x (MVN y))
  1256  	// cond:
  1257  	// result: (BIC x y)
  1258  	for {
  1259  		_ = v.Args[1]
  1260  		x := v.Args[0]
  1261  		v_1 := v.Args[1]
  1262  		if v_1.Op != OpARM64MVN {
  1263  			break
  1264  		}
  1265  		y := v_1.Args[0]
  1266  		v.reset(OpARM64BIC)
  1267  		v.AddArg(x)
  1268  		v.AddArg(y)
  1269  		return true
  1270  	}
  1271  	// match: (AND (MVN y) x)
  1272  	// cond:
  1273  	// result: (BIC x y)
  1274  	for {
  1275  		_ = v.Args[1]
  1276  		v_0 := v.Args[0]
  1277  		if v_0.Op != OpARM64MVN {
  1278  			break
  1279  		}
  1280  		y := v_0.Args[0]
  1281  		x := v.Args[1]
  1282  		v.reset(OpARM64BIC)
  1283  		v.AddArg(x)
  1284  		v.AddArg(y)
  1285  		return true
  1286  	}
  1287  	// match: (AND x (SLLconst [c] y))
  1288  	// cond:
  1289  	// result: (ANDshiftLL x y [c])
  1290  	for {
  1291  		_ = v.Args[1]
  1292  		x := v.Args[0]
  1293  		v_1 := v.Args[1]
  1294  		if v_1.Op != OpARM64SLLconst {
  1295  			break
  1296  		}
  1297  		c := v_1.AuxInt
  1298  		y := v_1.Args[0]
  1299  		v.reset(OpARM64ANDshiftLL)
  1300  		v.AuxInt = c
  1301  		v.AddArg(x)
  1302  		v.AddArg(y)
  1303  		return true
  1304  	}
  1305  	// match: (AND (SLLconst [c] y) x)
  1306  	// cond:
  1307  	// result: (ANDshiftLL x y [c])
  1308  	for {
  1309  		_ = v.Args[1]
  1310  		v_0 := v.Args[0]
  1311  		if v_0.Op != OpARM64SLLconst {
  1312  			break
  1313  		}
  1314  		c := v_0.AuxInt
  1315  		y := v_0.Args[0]
  1316  		x := v.Args[1]
  1317  		v.reset(OpARM64ANDshiftLL)
  1318  		v.AuxInt = c
  1319  		v.AddArg(x)
  1320  		v.AddArg(y)
  1321  		return true
  1322  	}
  1323  	// match: (AND x (SRLconst [c] y))
  1324  	// cond:
  1325  	// result: (ANDshiftRL x y [c])
  1326  	for {
  1327  		_ = v.Args[1]
  1328  		x := v.Args[0]
  1329  		v_1 := v.Args[1]
  1330  		if v_1.Op != OpARM64SRLconst {
  1331  			break
  1332  		}
  1333  		c := v_1.AuxInt
  1334  		y := v_1.Args[0]
  1335  		v.reset(OpARM64ANDshiftRL)
  1336  		v.AuxInt = c
  1337  		v.AddArg(x)
  1338  		v.AddArg(y)
  1339  		return true
  1340  	}
  1341  	// match: (AND (SRLconst [c] y) x)
  1342  	// cond:
  1343  	// result: (ANDshiftRL x y [c])
  1344  	for {
  1345  		_ = v.Args[1]
  1346  		v_0 := v.Args[0]
  1347  		if v_0.Op != OpARM64SRLconst {
  1348  			break
  1349  		}
  1350  		c := v_0.AuxInt
  1351  		y := v_0.Args[0]
  1352  		x := v.Args[1]
  1353  		v.reset(OpARM64ANDshiftRL)
  1354  		v.AuxInt = c
  1355  		v.AddArg(x)
  1356  		v.AddArg(y)
  1357  		return true
  1358  	}
  1359  	// match: (AND x (SRAconst [c] y))
  1360  	// cond:
  1361  	// result: (ANDshiftRA x y [c])
  1362  	for {
  1363  		_ = v.Args[1]
  1364  		x := v.Args[0]
  1365  		v_1 := v.Args[1]
  1366  		if v_1.Op != OpARM64SRAconst {
  1367  			break
  1368  		}
  1369  		c := v_1.AuxInt
  1370  		y := v_1.Args[0]
  1371  		v.reset(OpARM64ANDshiftRA)
  1372  		v.AuxInt = c
  1373  		v.AddArg(x)
  1374  		v.AddArg(y)
  1375  		return true
  1376  	}
  1377  	return false
  1378  }
  1379  func rewriteValueARM64_OpARM64AND_10(v *Value) bool {
  1380  	// match: (AND (SRAconst [c] y) x)
  1381  	// cond:
  1382  	// result: (ANDshiftRA x y [c])
  1383  	for {
  1384  		_ = v.Args[1]
  1385  		v_0 := v.Args[0]
  1386  		if v_0.Op != OpARM64SRAconst {
  1387  			break
  1388  		}
  1389  		c := v_0.AuxInt
  1390  		y := v_0.Args[0]
  1391  		x := v.Args[1]
  1392  		v.reset(OpARM64ANDshiftRA)
  1393  		v.AuxInt = c
  1394  		v.AddArg(x)
  1395  		v.AddArg(y)
  1396  		return true
  1397  	}
  1398  	return false
  1399  }
  1400  func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool {
  1401  	// match: (ANDconst [0] _)
  1402  	// cond:
  1403  	// result: (MOVDconst [0])
  1404  	for {
  1405  		if v.AuxInt != 0 {
  1406  			break
  1407  		}
  1408  		v.reset(OpARM64MOVDconst)
  1409  		v.AuxInt = 0
  1410  		return true
  1411  	}
  1412  	// match: (ANDconst [-1] x)
  1413  	// cond:
  1414  	// result: x
  1415  	for {
  1416  		if v.AuxInt != -1 {
  1417  			break
  1418  		}
  1419  		x := v.Args[0]
  1420  		v.reset(OpCopy)
  1421  		v.Type = x.Type
  1422  		v.AddArg(x)
  1423  		return true
  1424  	}
  1425  	// match: (ANDconst [c] (MOVDconst [d]))
  1426  	// cond:
  1427  	// result: (MOVDconst [c&d])
  1428  	for {
  1429  		c := v.AuxInt
  1430  		v_0 := v.Args[0]
  1431  		if v_0.Op != OpARM64MOVDconst {
  1432  			break
  1433  		}
  1434  		d := v_0.AuxInt
  1435  		v.reset(OpARM64MOVDconst)
  1436  		v.AuxInt = c & d
  1437  		return true
  1438  	}
  1439  	// match: (ANDconst [c] (ANDconst [d] x))
  1440  	// cond:
  1441  	// result: (ANDconst [c&d] x)
  1442  	for {
  1443  		c := v.AuxInt
  1444  		v_0 := v.Args[0]
  1445  		if v_0.Op != OpARM64ANDconst {
  1446  			break
  1447  		}
  1448  		d := v_0.AuxInt
  1449  		x := v_0.Args[0]
  1450  		v.reset(OpARM64ANDconst)
  1451  		v.AuxInt = c & d
  1452  		v.AddArg(x)
  1453  		return true
  1454  	}
  1455  	return false
  1456  }
  1457  func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool {
  1458  	b := v.Block
  1459  	_ = b
  1460  	// match: (ANDshiftLL (MOVDconst [c]) x [d])
  1461  	// cond:
  1462  	// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
  1463  	for {
  1464  		d := v.AuxInt
  1465  		_ = v.Args[1]
  1466  		v_0 := v.Args[0]
  1467  		if v_0.Op != OpARM64MOVDconst {
  1468  			break
  1469  		}
  1470  		c := v_0.AuxInt
  1471  		x := v.Args[1]
  1472  		v.reset(OpARM64ANDconst)
  1473  		v.AuxInt = c
  1474  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  1475  		v0.AuxInt = d
  1476  		v0.AddArg(x)
  1477  		v.AddArg(v0)
  1478  		return true
  1479  	}
  1480  	// match: (ANDshiftLL x (MOVDconst [c]) [d])
  1481  	// cond:
  1482  	// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
  1483  	for {
  1484  		d := v.AuxInt
  1485  		_ = v.Args[1]
  1486  		x := v.Args[0]
  1487  		v_1 := v.Args[1]
  1488  		if v_1.Op != OpARM64MOVDconst {
  1489  			break
  1490  		}
  1491  		c := v_1.AuxInt
  1492  		v.reset(OpARM64ANDconst)
  1493  		v.AuxInt = int64(uint64(c) << uint64(d))
  1494  		v.AddArg(x)
  1495  		return true
  1496  	}
  1497  	// match: (ANDshiftLL x y:(SLLconst x [c]) [d])
  1498  	// cond: c==d
  1499  	// result: y
  1500  	for {
  1501  		d := v.AuxInt
  1502  		_ = v.Args[1]
  1503  		x := v.Args[0]
  1504  		y := v.Args[1]
  1505  		if y.Op != OpARM64SLLconst {
  1506  			break
  1507  		}
  1508  		c := y.AuxInt
  1509  		if x != y.Args[0] {
  1510  			break
  1511  		}
  1512  		if !(c == d) {
  1513  			break
  1514  		}
  1515  		v.reset(OpCopy)
  1516  		v.Type = y.Type
  1517  		v.AddArg(y)
  1518  		return true
  1519  	}
  1520  	return false
  1521  }
  1522  func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool {
  1523  	b := v.Block
  1524  	_ = b
  1525  	// match: (ANDshiftRA (MOVDconst [c]) x [d])
  1526  	// cond:
  1527  	// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
  1528  	for {
  1529  		d := v.AuxInt
  1530  		_ = v.Args[1]
  1531  		v_0 := v.Args[0]
  1532  		if v_0.Op != OpARM64MOVDconst {
  1533  			break
  1534  		}
  1535  		c := v_0.AuxInt
  1536  		x := v.Args[1]
  1537  		v.reset(OpARM64ANDconst)
  1538  		v.AuxInt = c
  1539  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  1540  		v0.AuxInt = d
  1541  		v0.AddArg(x)
  1542  		v.AddArg(v0)
  1543  		return true
  1544  	}
  1545  	// match: (ANDshiftRA x (MOVDconst [c]) [d])
  1546  	// cond:
  1547  	// result: (ANDconst x [int64(int64(c)>>uint64(d))])
  1548  	for {
  1549  		d := v.AuxInt
  1550  		_ = v.Args[1]
  1551  		x := v.Args[0]
  1552  		v_1 := v.Args[1]
  1553  		if v_1.Op != OpARM64MOVDconst {
  1554  			break
  1555  		}
  1556  		c := v_1.AuxInt
  1557  		v.reset(OpARM64ANDconst)
  1558  		v.AuxInt = int64(int64(c) >> uint64(d))
  1559  		v.AddArg(x)
  1560  		return true
  1561  	}
  1562  	// match: (ANDshiftRA x y:(SRAconst x [c]) [d])
  1563  	// cond: c==d
  1564  	// result: y
  1565  	for {
  1566  		d := v.AuxInt
  1567  		_ = v.Args[1]
  1568  		x := v.Args[0]
  1569  		y := v.Args[1]
  1570  		if y.Op != OpARM64SRAconst {
  1571  			break
  1572  		}
  1573  		c := y.AuxInt
  1574  		if x != y.Args[0] {
  1575  			break
  1576  		}
  1577  		if !(c == d) {
  1578  			break
  1579  		}
  1580  		v.reset(OpCopy)
  1581  		v.Type = y.Type
  1582  		v.AddArg(y)
  1583  		return true
  1584  	}
  1585  	return false
  1586  }
  1587  func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool {
  1588  	b := v.Block
  1589  	_ = b
  1590  	// match: (ANDshiftRL (MOVDconst [c]) x [d])
  1591  	// cond:
  1592  	// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
  1593  	for {
  1594  		d := v.AuxInt
  1595  		_ = v.Args[1]
  1596  		v_0 := v.Args[0]
  1597  		if v_0.Op != OpARM64MOVDconst {
  1598  			break
  1599  		}
  1600  		c := v_0.AuxInt
  1601  		x := v.Args[1]
  1602  		v.reset(OpARM64ANDconst)
  1603  		v.AuxInt = c
  1604  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  1605  		v0.AuxInt = d
  1606  		v0.AddArg(x)
  1607  		v.AddArg(v0)
  1608  		return true
  1609  	}
  1610  	// match: (ANDshiftRL x (MOVDconst [c]) [d])
  1611  	// cond:
  1612  	// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
  1613  	for {
  1614  		d := v.AuxInt
  1615  		_ = v.Args[1]
  1616  		x := v.Args[0]
  1617  		v_1 := v.Args[1]
  1618  		if v_1.Op != OpARM64MOVDconst {
  1619  			break
  1620  		}
  1621  		c := v_1.AuxInt
  1622  		v.reset(OpARM64ANDconst)
  1623  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1624  		v.AddArg(x)
  1625  		return true
  1626  	}
  1627  	// match: (ANDshiftRL x y:(SRLconst x [c]) [d])
  1628  	// cond: c==d
  1629  	// result: y
  1630  	for {
  1631  		d := v.AuxInt
  1632  		_ = v.Args[1]
  1633  		x := v.Args[0]
  1634  		y := v.Args[1]
  1635  		if y.Op != OpARM64SRLconst {
  1636  			break
  1637  		}
  1638  		c := y.AuxInt
  1639  		if x != y.Args[0] {
  1640  			break
  1641  		}
  1642  		if !(c == d) {
  1643  			break
  1644  		}
  1645  		v.reset(OpCopy)
  1646  		v.Type = y.Type
  1647  		v.AddArg(y)
  1648  		return true
  1649  	}
  1650  	return false
  1651  }
  1652  func rewriteValueARM64_OpARM64BIC_0(v *Value) bool {
  1653  	// match: (BIC x (MOVDconst [c]))
  1654  	// cond:
  1655  	// result: (BICconst [c] x)
  1656  	for {
  1657  		_ = v.Args[1]
  1658  		x := v.Args[0]
  1659  		v_1 := v.Args[1]
  1660  		if v_1.Op != OpARM64MOVDconst {
  1661  			break
  1662  		}
  1663  		c := v_1.AuxInt
  1664  		v.reset(OpARM64BICconst)
  1665  		v.AuxInt = c
  1666  		v.AddArg(x)
  1667  		return true
  1668  	}
  1669  	// match: (BIC x x)
  1670  	// cond:
  1671  	// result: (MOVDconst [0])
  1672  	for {
  1673  		_ = v.Args[1]
  1674  		x := v.Args[0]
  1675  		if x != v.Args[1] {
  1676  			break
  1677  		}
  1678  		v.reset(OpARM64MOVDconst)
  1679  		v.AuxInt = 0
  1680  		return true
  1681  	}
  1682  	// match: (BIC x (SLLconst [c] y))
  1683  	// cond:
  1684  	// result: (BICshiftLL x y [c])
  1685  	for {
  1686  		_ = v.Args[1]
  1687  		x := v.Args[0]
  1688  		v_1 := v.Args[1]
  1689  		if v_1.Op != OpARM64SLLconst {
  1690  			break
  1691  		}
  1692  		c := v_1.AuxInt
  1693  		y := v_1.Args[0]
  1694  		v.reset(OpARM64BICshiftLL)
  1695  		v.AuxInt = c
  1696  		v.AddArg(x)
  1697  		v.AddArg(y)
  1698  		return true
  1699  	}
  1700  	// match: (BIC x (SRLconst [c] y))
  1701  	// cond:
  1702  	// result: (BICshiftRL x y [c])
  1703  	for {
  1704  		_ = v.Args[1]
  1705  		x := v.Args[0]
  1706  		v_1 := v.Args[1]
  1707  		if v_1.Op != OpARM64SRLconst {
  1708  			break
  1709  		}
  1710  		c := v_1.AuxInt
  1711  		y := v_1.Args[0]
  1712  		v.reset(OpARM64BICshiftRL)
  1713  		v.AuxInt = c
  1714  		v.AddArg(x)
  1715  		v.AddArg(y)
  1716  		return true
  1717  	}
  1718  	// match: (BIC x (SRAconst [c] y))
  1719  	// cond:
  1720  	// result: (BICshiftRA x y [c])
  1721  	for {
  1722  		_ = v.Args[1]
  1723  		x := v.Args[0]
  1724  		v_1 := v.Args[1]
  1725  		if v_1.Op != OpARM64SRAconst {
  1726  			break
  1727  		}
  1728  		c := v_1.AuxInt
  1729  		y := v_1.Args[0]
  1730  		v.reset(OpARM64BICshiftRA)
  1731  		v.AuxInt = c
  1732  		v.AddArg(x)
  1733  		v.AddArg(y)
  1734  		return true
  1735  	}
  1736  	return false
  1737  }
  1738  func rewriteValueARM64_OpARM64BICconst_0(v *Value) bool {
  1739  	// match: (BICconst [0] x)
  1740  	// cond:
  1741  	// result: x
  1742  	for {
  1743  		if v.AuxInt != 0 {
  1744  			break
  1745  		}
  1746  		x := v.Args[0]
  1747  		v.reset(OpCopy)
  1748  		v.Type = x.Type
  1749  		v.AddArg(x)
  1750  		return true
  1751  	}
  1752  	// match: (BICconst [-1] _)
  1753  	// cond:
  1754  	// result: (MOVDconst [0])
  1755  	for {
  1756  		if v.AuxInt != -1 {
  1757  			break
  1758  		}
  1759  		v.reset(OpARM64MOVDconst)
  1760  		v.AuxInt = 0
  1761  		return true
  1762  	}
  1763  	// match: (BICconst [c] (MOVDconst [d]))
  1764  	// cond:
  1765  	// result: (MOVDconst [d&^c])
  1766  	for {
  1767  		c := v.AuxInt
  1768  		v_0 := v.Args[0]
  1769  		if v_0.Op != OpARM64MOVDconst {
  1770  			break
  1771  		}
  1772  		d := v_0.AuxInt
  1773  		v.reset(OpARM64MOVDconst)
  1774  		v.AuxInt = d &^ c
  1775  		return true
  1776  	}
  1777  	return false
  1778  }
  1779  func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool {
  1780  	// match: (BICshiftLL x (MOVDconst [c]) [d])
  1781  	// cond:
  1782  	// result: (BICconst x [int64(uint64(c)<<uint64(d))])
  1783  	for {
  1784  		d := v.AuxInt
  1785  		_ = v.Args[1]
  1786  		x := v.Args[0]
  1787  		v_1 := v.Args[1]
  1788  		if v_1.Op != OpARM64MOVDconst {
  1789  			break
  1790  		}
  1791  		c := v_1.AuxInt
  1792  		v.reset(OpARM64BICconst)
  1793  		v.AuxInt = int64(uint64(c) << uint64(d))
  1794  		v.AddArg(x)
  1795  		return true
  1796  	}
  1797  	// match: (BICshiftLL x (SLLconst x [c]) [d])
  1798  	// cond: c==d
  1799  	// result: (MOVDconst [0])
  1800  	for {
  1801  		d := v.AuxInt
  1802  		_ = v.Args[1]
  1803  		x := v.Args[0]
  1804  		v_1 := v.Args[1]
  1805  		if v_1.Op != OpARM64SLLconst {
  1806  			break
  1807  		}
  1808  		c := v_1.AuxInt
  1809  		if x != v_1.Args[0] {
  1810  			break
  1811  		}
  1812  		if !(c == d) {
  1813  			break
  1814  		}
  1815  		v.reset(OpARM64MOVDconst)
  1816  		v.AuxInt = 0
  1817  		return true
  1818  	}
  1819  	return false
  1820  }
  1821  func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool {
  1822  	// match: (BICshiftRA x (MOVDconst [c]) [d])
  1823  	// cond:
  1824  	// result: (BICconst x [int64(int64(c)>>uint64(d))])
  1825  	for {
  1826  		d := v.AuxInt
  1827  		_ = v.Args[1]
  1828  		x := v.Args[0]
  1829  		v_1 := v.Args[1]
  1830  		if v_1.Op != OpARM64MOVDconst {
  1831  			break
  1832  		}
  1833  		c := v_1.AuxInt
  1834  		v.reset(OpARM64BICconst)
  1835  		v.AuxInt = int64(int64(c) >> uint64(d))
  1836  		v.AddArg(x)
  1837  		return true
  1838  	}
  1839  	// match: (BICshiftRA x (SRAconst x [c]) [d])
  1840  	// cond: c==d
  1841  	// result: (MOVDconst [0])
  1842  	for {
  1843  		d := v.AuxInt
  1844  		_ = v.Args[1]
  1845  		x := v.Args[0]
  1846  		v_1 := v.Args[1]
  1847  		if v_1.Op != OpARM64SRAconst {
  1848  			break
  1849  		}
  1850  		c := v_1.AuxInt
  1851  		if x != v_1.Args[0] {
  1852  			break
  1853  		}
  1854  		if !(c == d) {
  1855  			break
  1856  		}
  1857  		v.reset(OpARM64MOVDconst)
  1858  		v.AuxInt = 0
  1859  		return true
  1860  	}
  1861  	return false
  1862  }
  1863  func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool {
  1864  	// match: (BICshiftRL x (MOVDconst [c]) [d])
  1865  	// cond:
  1866  	// result: (BICconst x [int64(uint64(c)>>uint64(d))])
  1867  	for {
  1868  		d := v.AuxInt
  1869  		_ = v.Args[1]
  1870  		x := v.Args[0]
  1871  		v_1 := v.Args[1]
  1872  		if v_1.Op != OpARM64MOVDconst {
  1873  			break
  1874  		}
  1875  		c := v_1.AuxInt
  1876  		v.reset(OpARM64BICconst)
  1877  		v.AuxInt = int64(uint64(c) >> uint64(d))
  1878  		v.AddArg(x)
  1879  		return true
  1880  	}
  1881  	// match: (BICshiftRL x (SRLconst x [c]) [d])
  1882  	// cond: c==d
  1883  	// result: (MOVDconst [0])
  1884  	for {
  1885  		d := v.AuxInt
  1886  		_ = v.Args[1]
  1887  		x := v.Args[0]
  1888  		v_1 := v.Args[1]
  1889  		if v_1.Op != OpARM64SRLconst {
  1890  			break
  1891  		}
  1892  		c := v_1.AuxInt
  1893  		if x != v_1.Args[0] {
  1894  			break
  1895  		}
  1896  		if !(c == d) {
  1897  			break
  1898  		}
  1899  		v.reset(OpARM64MOVDconst)
  1900  		v.AuxInt = 0
  1901  		return true
  1902  	}
  1903  	return false
  1904  }
  1905  func rewriteValueARM64_OpARM64CMP_0(v *Value) bool {
  1906  	b := v.Block
  1907  	_ = b
  1908  	// match: (CMP x (MOVDconst [c]))
  1909  	// cond:
  1910  	// result: (CMPconst [c] x)
  1911  	for {
  1912  		_ = v.Args[1]
  1913  		x := v.Args[0]
  1914  		v_1 := v.Args[1]
  1915  		if v_1.Op != OpARM64MOVDconst {
  1916  			break
  1917  		}
  1918  		c := v_1.AuxInt
  1919  		v.reset(OpARM64CMPconst)
  1920  		v.AuxInt = c
  1921  		v.AddArg(x)
  1922  		return true
  1923  	}
  1924  	// match: (CMP (MOVDconst [c]) x)
  1925  	// cond:
  1926  	// result: (InvertFlags (CMPconst [c] x))
  1927  	for {
  1928  		_ = v.Args[1]
  1929  		v_0 := v.Args[0]
  1930  		if v_0.Op != OpARM64MOVDconst {
  1931  			break
  1932  		}
  1933  		c := v_0.AuxInt
  1934  		x := v.Args[1]
  1935  		v.reset(OpARM64InvertFlags)
  1936  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  1937  		v0.AuxInt = c
  1938  		v0.AddArg(x)
  1939  		v.AddArg(v0)
  1940  		return true
  1941  	}
  1942  	// match: (CMP x (SLLconst [c] y))
  1943  	// cond:
  1944  	// result: (CMPshiftLL x y [c])
  1945  	for {
  1946  		_ = v.Args[1]
  1947  		x := v.Args[0]
  1948  		v_1 := v.Args[1]
  1949  		if v_1.Op != OpARM64SLLconst {
  1950  			break
  1951  		}
  1952  		c := v_1.AuxInt
  1953  		y := v_1.Args[0]
  1954  		v.reset(OpARM64CMPshiftLL)
  1955  		v.AuxInt = c
  1956  		v.AddArg(x)
  1957  		v.AddArg(y)
  1958  		return true
  1959  	}
  1960  	// match: (CMP (SLLconst [c] y) x)
  1961  	// cond:
  1962  	// result: (InvertFlags (CMPshiftLL x y [c]))
  1963  	for {
  1964  		_ = v.Args[1]
  1965  		v_0 := v.Args[0]
  1966  		if v_0.Op != OpARM64SLLconst {
  1967  			break
  1968  		}
  1969  		c := v_0.AuxInt
  1970  		y := v_0.Args[0]
  1971  		x := v.Args[1]
  1972  		v.reset(OpARM64InvertFlags)
  1973  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
  1974  		v0.AuxInt = c
  1975  		v0.AddArg(x)
  1976  		v0.AddArg(y)
  1977  		v.AddArg(v0)
  1978  		return true
  1979  	}
  1980  	// match: (CMP x (SRLconst [c] y))
  1981  	// cond:
  1982  	// result: (CMPshiftRL x y [c])
  1983  	for {
  1984  		_ = v.Args[1]
  1985  		x := v.Args[0]
  1986  		v_1 := v.Args[1]
  1987  		if v_1.Op != OpARM64SRLconst {
  1988  			break
  1989  		}
  1990  		c := v_1.AuxInt
  1991  		y := v_1.Args[0]
  1992  		v.reset(OpARM64CMPshiftRL)
  1993  		v.AuxInt = c
  1994  		v.AddArg(x)
  1995  		v.AddArg(y)
  1996  		return true
  1997  	}
  1998  	// match: (CMP (SRLconst [c] y) x)
  1999  	// cond:
  2000  	// result: (InvertFlags (CMPshiftRL x y [c]))
  2001  	for {
  2002  		_ = v.Args[1]
  2003  		v_0 := v.Args[0]
  2004  		if v_0.Op != OpARM64SRLconst {
  2005  			break
  2006  		}
  2007  		c := v_0.AuxInt
  2008  		y := v_0.Args[0]
  2009  		x := v.Args[1]
  2010  		v.reset(OpARM64InvertFlags)
  2011  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
  2012  		v0.AuxInt = c
  2013  		v0.AddArg(x)
  2014  		v0.AddArg(y)
  2015  		v.AddArg(v0)
  2016  		return true
  2017  	}
  2018  	// match: (CMP x (SRAconst [c] y))
  2019  	// cond:
  2020  	// result: (CMPshiftRA x y [c])
  2021  	for {
  2022  		_ = v.Args[1]
  2023  		x := v.Args[0]
  2024  		v_1 := v.Args[1]
  2025  		if v_1.Op != OpARM64SRAconst {
  2026  			break
  2027  		}
  2028  		c := v_1.AuxInt
  2029  		y := v_1.Args[0]
  2030  		v.reset(OpARM64CMPshiftRA)
  2031  		v.AuxInt = c
  2032  		v.AddArg(x)
  2033  		v.AddArg(y)
  2034  		return true
  2035  	}
  2036  	// match: (CMP (SRAconst [c] y) x)
  2037  	// cond:
  2038  	// result: (InvertFlags (CMPshiftRA x y [c]))
  2039  	for {
  2040  		_ = v.Args[1]
  2041  		v_0 := v.Args[0]
  2042  		if v_0.Op != OpARM64SRAconst {
  2043  			break
  2044  		}
  2045  		c := v_0.AuxInt
  2046  		y := v_0.Args[0]
  2047  		x := v.Args[1]
  2048  		v.reset(OpARM64InvertFlags)
  2049  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
  2050  		v0.AuxInt = c
  2051  		v0.AddArg(x)
  2052  		v0.AddArg(y)
  2053  		v.AddArg(v0)
  2054  		return true
  2055  	}
  2056  	return false
  2057  }
  2058  func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool {
  2059  	b := v.Block
  2060  	_ = b
  2061  	// match: (CMPW x (MOVDconst [c]))
  2062  	// cond:
  2063  	// result: (CMPWconst [int64(int32(c))] x)
  2064  	for {
  2065  		_ = v.Args[1]
  2066  		x := v.Args[0]
  2067  		v_1 := v.Args[1]
  2068  		if v_1.Op != OpARM64MOVDconst {
  2069  			break
  2070  		}
  2071  		c := v_1.AuxInt
  2072  		v.reset(OpARM64CMPWconst)
  2073  		v.AuxInt = int64(int32(c))
  2074  		v.AddArg(x)
  2075  		return true
  2076  	}
  2077  	// match: (CMPW (MOVDconst [c]) x)
  2078  	// cond:
  2079  	// result: (InvertFlags (CMPWconst [int64(int32(c))] x))
  2080  	for {
  2081  		_ = v.Args[1]
  2082  		v_0 := v.Args[0]
  2083  		if v_0.Op != OpARM64MOVDconst {
  2084  			break
  2085  		}
  2086  		c := v_0.AuxInt
  2087  		x := v.Args[1]
  2088  		v.reset(OpARM64InvertFlags)
  2089  		v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
  2090  		v0.AuxInt = int64(int32(c))
  2091  		v0.AddArg(x)
  2092  		v.AddArg(v0)
  2093  		return true
  2094  	}
  2095  	return false
  2096  }
  2097  func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool {
  2098  	// match: (CMPWconst (MOVDconst [x]) [y])
  2099  	// cond: int32(x)==int32(y)
  2100  	// result: (FlagEQ)
  2101  	for {
  2102  		y := v.AuxInt
  2103  		v_0 := v.Args[0]
  2104  		if v_0.Op != OpARM64MOVDconst {
  2105  			break
  2106  		}
  2107  		x := v_0.AuxInt
  2108  		if !(int32(x) == int32(y)) {
  2109  			break
  2110  		}
  2111  		v.reset(OpARM64FlagEQ)
  2112  		return true
  2113  	}
  2114  	// match: (CMPWconst (MOVDconst [x]) [y])
  2115  	// cond: int32(x)<int32(y) && uint32(x)<uint32(y)
  2116  	// result: (FlagLT_ULT)
  2117  	for {
  2118  		y := v.AuxInt
  2119  		v_0 := v.Args[0]
  2120  		if v_0.Op != OpARM64MOVDconst {
  2121  			break
  2122  		}
  2123  		x := v_0.AuxInt
  2124  		if !(int32(x) < int32(y) && uint32(x) < uint32(y)) {
  2125  			break
  2126  		}
  2127  		v.reset(OpARM64FlagLT_ULT)
  2128  		return true
  2129  	}
  2130  	// match: (CMPWconst (MOVDconst [x]) [y])
  2131  	// cond: int32(x)<int32(y) && uint32(x)>uint32(y)
  2132  	// result: (FlagLT_UGT)
  2133  	for {
  2134  		y := v.AuxInt
  2135  		v_0 := v.Args[0]
  2136  		if v_0.Op != OpARM64MOVDconst {
  2137  			break
  2138  		}
  2139  		x := v_0.AuxInt
  2140  		if !(int32(x) < int32(y) && uint32(x) > uint32(y)) {
  2141  			break
  2142  		}
  2143  		v.reset(OpARM64FlagLT_UGT)
  2144  		return true
  2145  	}
  2146  	// match: (CMPWconst (MOVDconst [x]) [y])
  2147  	// cond: int32(x)>int32(y) && uint32(x)<uint32(y)
  2148  	// result: (FlagGT_ULT)
  2149  	for {
  2150  		y := v.AuxInt
  2151  		v_0 := v.Args[0]
  2152  		if v_0.Op != OpARM64MOVDconst {
  2153  			break
  2154  		}
  2155  		x := v_0.AuxInt
  2156  		if !(int32(x) > int32(y) && uint32(x) < uint32(y)) {
  2157  			break
  2158  		}
  2159  		v.reset(OpARM64FlagGT_ULT)
  2160  		return true
  2161  	}
  2162  	// match: (CMPWconst (MOVDconst [x]) [y])
  2163  	// cond: int32(x)>int32(y) && uint32(x)>uint32(y)
  2164  	// result: (FlagGT_UGT)
  2165  	for {
  2166  		y := v.AuxInt
  2167  		v_0 := v.Args[0]
  2168  		if v_0.Op != OpARM64MOVDconst {
  2169  			break
  2170  		}
  2171  		x := v_0.AuxInt
  2172  		if !(int32(x) > int32(y) && uint32(x) > uint32(y)) {
  2173  			break
  2174  		}
  2175  		v.reset(OpARM64FlagGT_UGT)
  2176  		return true
  2177  	}
  2178  	// match: (CMPWconst (MOVBUreg _) [c])
  2179  	// cond: 0xff < int32(c)
  2180  	// result: (FlagLT_ULT)
  2181  	for {
  2182  		c := v.AuxInt
  2183  		v_0 := v.Args[0]
  2184  		if v_0.Op != OpARM64MOVBUreg {
  2185  			break
  2186  		}
  2187  		if !(0xff < int32(c)) {
  2188  			break
  2189  		}
  2190  		v.reset(OpARM64FlagLT_ULT)
  2191  		return true
  2192  	}
  2193  	// match: (CMPWconst (MOVHUreg _) [c])
  2194  	// cond: 0xffff < int32(c)
  2195  	// result: (FlagLT_ULT)
  2196  	for {
  2197  		c := v.AuxInt
  2198  		v_0 := v.Args[0]
  2199  		if v_0.Op != OpARM64MOVHUreg {
  2200  			break
  2201  		}
  2202  		if !(0xffff < int32(c)) {
  2203  			break
  2204  		}
  2205  		v.reset(OpARM64FlagLT_ULT)
  2206  		return true
  2207  	}
  2208  	return false
  2209  }
  2210  func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool {
  2211  	// match: (CMPconst (MOVDconst [x]) [y])
  2212  	// cond: x==y
  2213  	// result: (FlagEQ)
  2214  	for {
  2215  		y := v.AuxInt
  2216  		v_0 := v.Args[0]
  2217  		if v_0.Op != OpARM64MOVDconst {
  2218  			break
  2219  		}
  2220  		x := v_0.AuxInt
  2221  		if !(x == y) {
  2222  			break
  2223  		}
  2224  		v.reset(OpARM64FlagEQ)
  2225  		return true
  2226  	}
  2227  	// match: (CMPconst (MOVDconst [x]) [y])
  2228  	// cond: int64(x)<int64(y) && uint64(x)<uint64(y)
  2229  	// result: (FlagLT_ULT)
  2230  	for {
  2231  		y := v.AuxInt
  2232  		v_0 := v.Args[0]
  2233  		if v_0.Op != OpARM64MOVDconst {
  2234  			break
  2235  		}
  2236  		x := v_0.AuxInt
  2237  		if !(int64(x) < int64(y) && uint64(x) < uint64(y)) {
  2238  			break
  2239  		}
  2240  		v.reset(OpARM64FlagLT_ULT)
  2241  		return true
  2242  	}
  2243  	// match: (CMPconst (MOVDconst [x]) [y])
  2244  	// cond: int64(x)<int64(y) && uint64(x)>uint64(y)
  2245  	// result: (FlagLT_UGT)
  2246  	for {
  2247  		y := v.AuxInt
  2248  		v_0 := v.Args[0]
  2249  		if v_0.Op != OpARM64MOVDconst {
  2250  			break
  2251  		}
  2252  		x := v_0.AuxInt
  2253  		if !(int64(x) < int64(y) && uint64(x) > uint64(y)) {
  2254  			break
  2255  		}
  2256  		v.reset(OpARM64FlagLT_UGT)
  2257  		return true
  2258  	}
  2259  	// match: (CMPconst (MOVDconst [x]) [y])
  2260  	// cond: int64(x)>int64(y) && uint64(x)<uint64(y)
  2261  	// result: (FlagGT_ULT)
  2262  	for {
  2263  		y := v.AuxInt
  2264  		v_0 := v.Args[0]
  2265  		if v_0.Op != OpARM64MOVDconst {
  2266  			break
  2267  		}
  2268  		x := v_0.AuxInt
  2269  		if !(int64(x) > int64(y) && uint64(x) < uint64(y)) {
  2270  			break
  2271  		}
  2272  		v.reset(OpARM64FlagGT_ULT)
  2273  		return true
  2274  	}
  2275  	// match: (CMPconst (MOVDconst [x]) [y])
  2276  	// cond: int64(x)>int64(y) && uint64(x)>uint64(y)
  2277  	// result: (FlagGT_UGT)
  2278  	for {
  2279  		y := v.AuxInt
  2280  		v_0 := v.Args[0]
  2281  		if v_0.Op != OpARM64MOVDconst {
  2282  			break
  2283  		}
  2284  		x := v_0.AuxInt
  2285  		if !(int64(x) > int64(y) && uint64(x) > uint64(y)) {
  2286  			break
  2287  		}
  2288  		v.reset(OpARM64FlagGT_UGT)
  2289  		return true
  2290  	}
  2291  	// match: (CMPconst (MOVBUreg _) [c])
  2292  	// cond: 0xff < c
  2293  	// result: (FlagLT_ULT)
  2294  	for {
  2295  		c := v.AuxInt
  2296  		v_0 := v.Args[0]
  2297  		if v_0.Op != OpARM64MOVBUreg {
  2298  			break
  2299  		}
  2300  		if !(0xff < c) {
  2301  			break
  2302  		}
  2303  		v.reset(OpARM64FlagLT_ULT)
  2304  		return true
  2305  	}
  2306  	// match: (CMPconst (MOVHUreg _) [c])
  2307  	// cond: 0xffff < c
  2308  	// result: (FlagLT_ULT)
  2309  	for {
  2310  		c := v.AuxInt
  2311  		v_0 := v.Args[0]
  2312  		if v_0.Op != OpARM64MOVHUreg {
  2313  			break
  2314  		}
  2315  		if !(0xffff < c) {
  2316  			break
  2317  		}
  2318  		v.reset(OpARM64FlagLT_ULT)
  2319  		return true
  2320  	}
  2321  	// match: (CMPconst (MOVWUreg _) [c])
  2322  	// cond: 0xffffffff < c
  2323  	// result: (FlagLT_ULT)
  2324  	for {
  2325  		c := v.AuxInt
  2326  		v_0 := v.Args[0]
  2327  		if v_0.Op != OpARM64MOVWUreg {
  2328  			break
  2329  		}
  2330  		if !(0xffffffff < c) {
  2331  			break
  2332  		}
  2333  		v.reset(OpARM64FlagLT_ULT)
  2334  		return true
  2335  	}
  2336  	// match: (CMPconst (ANDconst _ [m]) [n])
  2337  	// cond: 0 <= m && m < n
  2338  	// result: (FlagLT_ULT)
  2339  	for {
  2340  		n := v.AuxInt
  2341  		v_0 := v.Args[0]
  2342  		if v_0.Op != OpARM64ANDconst {
  2343  			break
  2344  		}
  2345  		m := v_0.AuxInt
  2346  		if !(0 <= m && m < n) {
  2347  			break
  2348  		}
  2349  		v.reset(OpARM64FlagLT_ULT)
  2350  		return true
  2351  	}
  2352  	// match: (CMPconst (SRLconst _ [c]) [n])
  2353  	// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
  2354  	// result: (FlagLT_ULT)
  2355  	for {
  2356  		n := v.AuxInt
  2357  		v_0 := v.Args[0]
  2358  		if v_0.Op != OpARM64SRLconst {
  2359  			break
  2360  		}
  2361  		c := v_0.AuxInt
  2362  		if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
  2363  			break
  2364  		}
  2365  		v.reset(OpARM64FlagLT_ULT)
  2366  		return true
  2367  	}
  2368  	return false
  2369  }
  2370  func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool {
  2371  	b := v.Block
  2372  	_ = b
  2373  	// match: (CMPshiftLL (MOVDconst [c]) x [d])
  2374  	// cond:
  2375  	// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
  2376  	for {
  2377  		d := v.AuxInt
  2378  		_ = v.Args[1]
  2379  		v_0 := v.Args[0]
  2380  		if v_0.Op != OpARM64MOVDconst {
  2381  			break
  2382  		}
  2383  		c := v_0.AuxInt
  2384  		x := v.Args[1]
  2385  		v.reset(OpARM64InvertFlags)
  2386  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  2387  		v0.AuxInt = c
  2388  		v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  2389  		v1.AuxInt = d
  2390  		v1.AddArg(x)
  2391  		v0.AddArg(v1)
  2392  		v.AddArg(v0)
  2393  		return true
  2394  	}
  2395  	// match: (CMPshiftLL x (MOVDconst [c]) [d])
  2396  	// cond:
  2397  	// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
  2398  	for {
  2399  		d := v.AuxInt
  2400  		_ = v.Args[1]
  2401  		x := v.Args[0]
  2402  		v_1 := v.Args[1]
  2403  		if v_1.Op != OpARM64MOVDconst {
  2404  			break
  2405  		}
  2406  		c := v_1.AuxInt
  2407  		v.reset(OpARM64CMPconst)
  2408  		v.AuxInt = int64(uint64(c) << uint64(d))
  2409  		v.AddArg(x)
  2410  		return true
  2411  	}
  2412  	return false
  2413  }
  2414  func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool {
  2415  	b := v.Block
  2416  	_ = b
  2417  	// match: (CMPshiftRA (MOVDconst [c]) x [d])
  2418  	// cond:
  2419  	// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
  2420  	for {
  2421  		d := v.AuxInt
  2422  		_ = v.Args[1]
  2423  		v_0 := v.Args[0]
  2424  		if v_0.Op != OpARM64MOVDconst {
  2425  			break
  2426  		}
  2427  		c := v_0.AuxInt
  2428  		x := v.Args[1]
  2429  		v.reset(OpARM64InvertFlags)
  2430  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  2431  		v0.AuxInt = c
  2432  		v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  2433  		v1.AuxInt = d
  2434  		v1.AddArg(x)
  2435  		v0.AddArg(v1)
  2436  		v.AddArg(v0)
  2437  		return true
  2438  	}
  2439  	// match: (CMPshiftRA x (MOVDconst [c]) [d])
  2440  	// cond:
  2441  	// result: (CMPconst x [int64(int64(c)>>uint64(d))])
  2442  	for {
  2443  		d := v.AuxInt
  2444  		_ = v.Args[1]
  2445  		x := v.Args[0]
  2446  		v_1 := v.Args[1]
  2447  		if v_1.Op != OpARM64MOVDconst {
  2448  			break
  2449  		}
  2450  		c := v_1.AuxInt
  2451  		v.reset(OpARM64CMPconst)
  2452  		v.AuxInt = int64(int64(c) >> uint64(d))
  2453  		v.AddArg(x)
  2454  		return true
  2455  	}
  2456  	return false
  2457  }
  2458  func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool {
  2459  	b := v.Block
  2460  	_ = b
  2461  	// match: (CMPshiftRL (MOVDconst [c]) x [d])
  2462  	// cond:
  2463  	// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
  2464  	for {
  2465  		d := v.AuxInt
  2466  		_ = v.Args[1]
  2467  		v_0 := v.Args[0]
  2468  		if v_0.Op != OpARM64MOVDconst {
  2469  			break
  2470  		}
  2471  		c := v_0.AuxInt
  2472  		x := v.Args[1]
  2473  		v.reset(OpARM64InvertFlags)
  2474  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  2475  		v0.AuxInt = c
  2476  		v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  2477  		v1.AuxInt = d
  2478  		v1.AddArg(x)
  2479  		v0.AddArg(v1)
  2480  		v.AddArg(v0)
  2481  		return true
  2482  	}
  2483  	// match: (CMPshiftRL x (MOVDconst [c]) [d])
  2484  	// cond:
  2485  	// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
  2486  	for {
  2487  		d := v.AuxInt
  2488  		_ = v.Args[1]
  2489  		x := v.Args[0]
  2490  		v_1 := v.Args[1]
  2491  		if v_1.Op != OpARM64MOVDconst {
  2492  			break
  2493  		}
  2494  		c := v_1.AuxInt
  2495  		v.reset(OpARM64CMPconst)
  2496  		v.AuxInt = int64(uint64(c) >> uint64(d))
  2497  		v.AddArg(x)
  2498  		return true
  2499  	}
  2500  	return false
  2501  }
  2502  func rewriteValueARM64_OpARM64CSELULT_0(v *Value) bool {
  2503  	// match: (CSELULT x (MOVDconst [0]) flag)
  2504  	// cond:
  2505  	// result: (CSELULT0 x flag)
  2506  	for {
  2507  		_ = v.Args[2]
  2508  		x := v.Args[0]
  2509  		v_1 := v.Args[1]
  2510  		if v_1.Op != OpARM64MOVDconst {
  2511  			break
  2512  		}
  2513  		if v_1.AuxInt != 0 {
  2514  			break
  2515  		}
  2516  		flag := v.Args[2]
  2517  		v.reset(OpARM64CSELULT0)
  2518  		v.AddArg(x)
  2519  		v.AddArg(flag)
  2520  		return true
  2521  	}
  2522  	// match: (CSELULT _ y (FlagEQ))
  2523  	// cond:
  2524  	// result: y
  2525  	for {
  2526  		_ = v.Args[2]
  2527  		y := v.Args[1]
  2528  		v_2 := v.Args[2]
  2529  		if v_2.Op != OpARM64FlagEQ {
  2530  			break
  2531  		}
  2532  		v.reset(OpCopy)
  2533  		v.Type = y.Type
  2534  		v.AddArg(y)
  2535  		return true
  2536  	}
  2537  	// match: (CSELULT x _ (FlagLT_ULT))
  2538  	// cond:
  2539  	// result: x
  2540  	for {
  2541  		_ = v.Args[2]
  2542  		x := v.Args[0]
  2543  		v_2 := v.Args[2]
  2544  		if v_2.Op != OpARM64FlagLT_ULT {
  2545  			break
  2546  		}
  2547  		v.reset(OpCopy)
  2548  		v.Type = x.Type
  2549  		v.AddArg(x)
  2550  		return true
  2551  	}
  2552  	// match: (CSELULT _ y (FlagLT_UGT))
  2553  	// cond:
  2554  	// result: y
  2555  	for {
  2556  		_ = v.Args[2]
  2557  		y := v.Args[1]
  2558  		v_2 := v.Args[2]
  2559  		if v_2.Op != OpARM64FlagLT_UGT {
  2560  			break
  2561  		}
  2562  		v.reset(OpCopy)
  2563  		v.Type = y.Type
  2564  		v.AddArg(y)
  2565  		return true
  2566  	}
  2567  	// match: (CSELULT x _ (FlagGT_ULT))
  2568  	// cond:
  2569  	// result: x
  2570  	for {
  2571  		_ = v.Args[2]
  2572  		x := v.Args[0]
  2573  		v_2 := v.Args[2]
  2574  		if v_2.Op != OpARM64FlagGT_ULT {
  2575  			break
  2576  		}
  2577  		v.reset(OpCopy)
  2578  		v.Type = x.Type
  2579  		v.AddArg(x)
  2580  		return true
  2581  	}
  2582  	// match: (CSELULT _ y (FlagGT_UGT))
  2583  	// cond:
  2584  	// result: y
  2585  	for {
  2586  		_ = v.Args[2]
  2587  		y := v.Args[1]
  2588  		v_2 := v.Args[2]
  2589  		if v_2.Op != OpARM64FlagGT_UGT {
  2590  			break
  2591  		}
  2592  		v.reset(OpCopy)
  2593  		v.Type = y.Type
  2594  		v.AddArg(y)
  2595  		return true
  2596  	}
  2597  	return false
  2598  }
  2599  func rewriteValueARM64_OpARM64CSELULT0_0(v *Value) bool {
  2600  	// match: (CSELULT0 _ (FlagEQ))
  2601  	// cond:
  2602  	// result: (MOVDconst [0])
  2603  	for {
  2604  		_ = v.Args[1]
  2605  		v_1 := v.Args[1]
  2606  		if v_1.Op != OpARM64FlagEQ {
  2607  			break
  2608  		}
  2609  		v.reset(OpARM64MOVDconst)
  2610  		v.AuxInt = 0
  2611  		return true
  2612  	}
  2613  	// match: (CSELULT0 x (FlagLT_ULT))
  2614  	// cond:
  2615  	// result: x
  2616  	for {
  2617  		_ = v.Args[1]
  2618  		x := v.Args[0]
  2619  		v_1 := v.Args[1]
  2620  		if v_1.Op != OpARM64FlagLT_ULT {
  2621  			break
  2622  		}
  2623  		v.reset(OpCopy)
  2624  		v.Type = x.Type
  2625  		v.AddArg(x)
  2626  		return true
  2627  	}
  2628  	// match: (CSELULT0 _ (FlagLT_UGT))
  2629  	// cond:
  2630  	// result: (MOVDconst [0])
  2631  	for {
  2632  		_ = v.Args[1]
  2633  		v_1 := v.Args[1]
  2634  		if v_1.Op != OpARM64FlagLT_UGT {
  2635  			break
  2636  		}
  2637  		v.reset(OpARM64MOVDconst)
  2638  		v.AuxInt = 0
  2639  		return true
  2640  	}
  2641  	// match: (CSELULT0 x (FlagGT_ULT))
  2642  	// cond:
  2643  	// result: x
  2644  	for {
  2645  		_ = v.Args[1]
  2646  		x := v.Args[0]
  2647  		v_1 := v.Args[1]
  2648  		if v_1.Op != OpARM64FlagGT_ULT {
  2649  			break
  2650  		}
  2651  		v.reset(OpCopy)
  2652  		v.Type = x.Type
  2653  		v.AddArg(x)
  2654  		return true
  2655  	}
  2656  	// match: (CSELULT0 _ (FlagGT_UGT))
  2657  	// cond:
  2658  	// result: (MOVDconst [0])
  2659  	for {
  2660  		_ = v.Args[1]
  2661  		v_1 := v.Args[1]
  2662  		if v_1.Op != OpARM64FlagGT_UGT {
  2663  			break
  2664  		}
  2665  		v.reset(OpARM64MOVDconst)
  2666  		v.AuxInt = 0
  2667  		return true
  2668  	}
  2669  	return false
  2670  }
  2671  func rewriteValueARM64_OpARM64DIV_0(v *Value) bool {
  2672  	// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
  2673  	// cond:
  2674  	// result: (MOVDconst [int64(c)/int64(d)])
  2675  	for {
  2676  		_ = v.Args[1]
  2677  		v_0 := v.Args[0]
  2678  		if v_0.Op != OpARM64MOVDconst {
  2679  			break
  2680  		}
  2681  		c := v_0.AuxInt
  2682  		v_1 := v.Args[1]
  2683  		if v_1.Op != OpARM64MOVDconst {
  2684  			break
  2685  		}
  2686  		d := v_1.AuxInt
  2687  		v.reset(OpARM64MOVDconst)
  2688  		v.AuxInt = int64(c) / int64(d)
  2689  		return true
  2690  	}
  2691  	return false
  2692  }
  2693  func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool {
  2694  	// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
  2695  	// cond:
  2696  	// result: (MOVDconst [int64(int32(c)/int32(d))])
  2697  	for {
  2698  		_ = v.Args[1]
  2699  		v_0 := v.Args[0]
  2700  		if v_0.Op != OpARM64MOVDconst {
  2701  			break
  2702  		}
  2703  		c := v_0.AuxInt
  2704  		v_1 := v.Args[1]
  2705  		if v_1.Op != OpARM64MOVDconst {
  2706  			break
  2707  		}
  2708  		d := v_1.AuxInt
  2709  		v.reset(OpARM64MOVDconst)
  2710  		v.AuxInt = int64(int32(c) / int32(d))
  2711  		return true
  2712  	}
  2713  	return false
  2714  }
  2715  func rewriteValueARM64_OpARM64Equal_0(v *Value) bool {
  2716  	// match: (Equal (FlagEQ))
  2717  	// cond:
  2718  	// result: (MOVDconst [1])
  2719  	for {
  2720  		v_0 := v.Args[0]
  2721  		if v_0.Op != OpARM64FlagEQ {
  2722  			break
  2723  		}
  2724  		v.reset(OpARM64MOVDconst)
  2725  		v.AuxInt = 1
  2726  		return true
  2727  	}
  2728  	// match: (Equal (FlagLT_ULT))
  2729  	// cond:
  2730  	// result: (MOVDconst [0])
  2731  	for {
  2732  		v_0 := v.Args[0]
  2733  		if v_0.Op != OpARM64FlagLT_ULT {
  2734  			break
  2735  		}
  2736  		v.reset(OpARM64MOVDconst)
  2737  		v.AuxInt = 0
  2738  		return true
  2739  	}
  2740  	// match: (Equal (FlagLT_UGT))
  2741  	// cond:
  2742  	// result: (MOVDconst [0])
  2743  	for {
  2744  		v_0 := v.Args[0]
  2745  		if v_0.Op != OpARM64FlagLT_UGT {
  2746  			break
  2747  		}
  2748  		v.reset(OpARM64MOVDconst)
  2749  		v.AuxInt = 0
  2750  		return true
  2751  	}
  2752  	// match: (Equal (FlagGT_ULT))
  2753  	// cond:
  2754  	// result: (MOVDconst [0])
  2755  	for {
  2756  		v_0 := v.Args[0]
  2757  		if v_0.Op != OpARM64FlagGT_ULT {
  2758  			break
  2759  		}
  2760  		v.reset(OpARM64MOVDconst)
  2761  		v.AuxInt = 0
  2762  		return true
  2763  	}
  2764  	// match: (Equal (FlagGT_UGT))
  2765  	// cond:
  2766  	// result: (MOVDconst [0])
  2767  	for {
  2768  		v_0 := v.Args[0]
  2769  		if v_0.Op != OpARM64FlagGT_UGT {
  2770  			break
  2771  		}
  2772  		v.reset(OpARM64MOVDconst)
  2773  		v.AuxInt = 0
  2774  		return true
  2775  	}
  2776  	// match: (Equal (InvertFlags x))
  2777  	// cond:
  2778  	// result: (Equal x)
  2779  	for {
  2780  		v_0 := v.Args[0]
  2781  		if v_0.Op != OpARM64InvertFlags {
  2782  			break
  2783  		}
  2784  		x := v_0.Args[0]
  2785  		v.reset(OpARM64Equal)
  2786  		v.AddArg(x)
  2787  		return true
  2788  	}
  2789  	return false
  2790  }
  2791  func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool {
  2792  	b := v.Block
  2793  	_ = b
  2794  	config := b.Func.Config
  2795  	_ = config
  2796  	// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  2797  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2798  	// result: (FMOVDload [off1+off2] {sym} ptr mem)
  2799  	for {
  2800  		off1 := v.AuxInt
  2801  		sym := v.Aux
  2802  		_ = v.Args[1]
  2803  		v_0 := v.Args[0]
  2804  		if v_0.Op != OpARM64ADDconst {
  2805  			break
  2806  		}
  2807  		off2 := v_0.AuxInt
  2808  		ptr := v_0.Args[0]
  2809  		mem := v.Args[1]
  2810  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2811  			break
  2812  		}
  2813  		v.reset(OpARM64FMOVDload)
  2814  		v.AuxInt = off1 + off2
  2815  		v.Aux = sym
  2816  		v.AddArg(ptr)
  2817  		v.AddArg(mem)
  2818  		return true
  2819  	}
  2820  	// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  2821  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2822  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  2823  	for {
  2824  		off1 := v.AuxInt
  2825  		sym1 := v.Aux
  2826  		_ = v.Args[1]
  2827  		v_0 := v.Args[0]
  2828  		if v_0.Op != OpARM64MOVDaddr {
  2829  			break
  2830  		}
  2831  		off2 := v_0.AuxInt
  2832  		sym2 := v_0.Aux
  2833  		ptr := v_0.Args[0]
  2834  		mem := v.Args[1]
  2835  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2836  			break
  2837  		}
  2838  		v.reset(OpARM64FMOVDload)
  2839  		v.AuxInt = off1 + off2
  2840  		v.Aux = mergeSym(sym1, sym2)
  2841  		v.AddArg(ptr)
  2842  		v.AddArg(mem)
  2843  		return true
  2844  	}
  2845  	return false
  2846  }
  2847  func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool {
  2848  	b := v.Block
  2849  	_ = b
  2850  	config := b.Func.Config
  2851  	_ = config
  2852  	// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  2853  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2854  	// result: (FMOVDstore [off1+off2] {sym} ptr val mem)
  2855  	for {
  2856  		off1 := v.AuxInt
  2857  		sym := v.Aux
  2858  		_ = v.Args[2]
  2859  		v_0 := v.Args[0]
  2860  		if v_0.Op != OpARM64ADDconst {
  2861  			break
  2862  		}
  2863  		off2 := v_0.AuxInt
  2864  		ptr := v_0.Args[0]
  2865  		val := v.Args[1]
  2866  		mem := v.Args[2]
  2867  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2868  			break
  2869  		}
  2870  		v.reset(OpARM64FMOVDstore)
  2871  		v.AuxInt = off1 + off2
  2872  		v.Aux = sym
  2873  		v.AddArg(ptr)
  2874  		v.AddArg(val)
  2875  		v.AddArg(mem)
  2876  		return true
  2877  	}
  2878  	// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  2879  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2880  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  2881  	for {
  2882  		off1 := v.AuxInt
  2883  		sym1 := v.Aux
  2884  		_ = v.Args[2]
  2885  		v_0 := v.Args[0]
  2886  		if v_0.Op != OpARM64MOVDaddr {
  2887  			break
  2888  		}
  2889  		off2 := v_0.AuxInt
  2890  		sym2 := v_0.Aux
  2891  		ptr := v_0.Args[0]
  2892  		val := v.Args[1]
  2893  		mem := v.Args[2]
  2894  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2895  			break
  2896  		}
  2897  		v.reset(OpARM64FMOVDstore)
  2898  		v.AuxInt = off1 + off2
  2899  		v.Aux = mergeSym(sym1, sym2)
  2900  		v.AddArg(ptr)
  2901  		v.AddArg(val)
  2902  		v.AddArg(mem)
  2903  		return true
  2904  	}
  2905  	return false
  2906  }
  2907  func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool {
  2908  	b := v.Block
  2909  	_ = b
  2910  	config := b.Func.Config
  2911  	_ = config
  2912  	// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
  2913  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2914  	// result: (FMOVSload [off1+off2] {sym} ptr mem)
  2915  	for {
  2916  		off1 := v.AuxInt
  2917  		sym := v.Aux
  2918  		_ = v.Args[1]
  2919  		v_0 := v.Args[0]
  2920  		if v_0.Op != OpARM64ADDconst {
  2921  			break
  2922  		}
  2923  		off2 := v_0.AuxInt
  2924  		ptr := v_0.Args[0]
  2925  		mem := v.Args[1]
  2926  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2927  			break
  2928  		}
  2929  		v.reset(OpARM64FMOVSload)
  2930  		v.AuxInt = off1 + off2
  2931  		v.Aux = sym
  2932  		v.AddArg(ptr)
  2933  		v.AddArg(mem)
  2934  		return true
  2935  	}
  2936  	// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  2937  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2938  	// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  2939  	for {
  2940  		off1 := v.AuxInt
  2941  		sym1 := v.Aux
  2942  		_ = v.Args[1]
  2943  		v_0 := v.Args[0]
  2944  		if v_0.Op != OpARM64MOVDaddr {
  2945  			break
  2946  		}
  2947  		off2 := v_0.AuxInt
  2948  		sym2 := v_0.Aux
  2949  		ptr := v_0.Args[0]
  2950  		mem := v.Args[1]
  2951  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2952  			break
  2953  		}
  2954  		v.reset(OpARM64FMOVSload)
  2955  		v.AuxInt = off1 + off2
  2956  		v.Aux = mergeSym(sym1, sym2)
  2957  		v.AddArg(ptr)
  2958  		v.AddArg(mem)
  2959  		return true
  2960  	}
  2961  	return false
  2962  }
  2963  func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool {
  2964  	b := v.Block
  2965  	_ = b
  2966  	config := b.Func.Config
  2967  	_ = config
  2968  	// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  2969  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2970  	// result: (FMOVSstore [off1+off2] {sym} ptr val mem)
  2971  	for {
  2972  		off1 := v.AuxInt
  2973  		sym := v.Aux
  2974  		_ = v.Args[2]
  2975  		v_0 := v.Args[0]
  2976  		if v_0.Op != OpARM64ADDconst {
  2977  			break
  2978  		}
  2979  		off2 := v_0.AuxInt
  2980  		ptr := v_0.Args[0]
  2981  		val := v.Args[1]
  2982  		mem := v.Args[2]
  2983  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2984  			break
  2985  		}
  2986  		v.reset(OpARM64FMOVSstore)
  2987  		v.AuxInt = off1 + off2
  2988  		v.Aux = sym
  2989  		v.AddArg(ptr)
  2990  		v.AddArg(val)
  2991  		v.AddArg(mem)
  2992  		return true
  2993  	}
  2994  	// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  2995  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2996  	// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  2997  	for {
  2998  		off1 := v.AuxInt
  2999  		sym1 := v.Aux
  3000  		_ = v.Args[2]
  3001  		v_0 := v.Args[0]
  3002  		if v_0.Op != OpARM64MOVDaddr {
  3003  			break
  3004  		}
  3005  		off2 := v_0.AuxInt
  3006  		sym2 := v_0.Aux
  3007  		ptr := v_0.Args[0]
  3008  		val := v.Args[1]
  3009  		mem := v.Args[2]
  3010  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3011  			break
  3012  		}
  3013  		v.reset(OpARM64FMOVSstore)
  3014  		v.AuxInt = off1 + off2
  3015  		v.Aux = mergeSym(sym1, sym2)
  3016  		v.AddArg(ptr)
  3017  		v.AddArg(val)
  3018  		v.AddArg(mem)
  3019  		return true
  3020  	}
  3021  	return false
  3022  }
  3023  func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool {
  3024  	// match: (GreaterEqual (FlagEQ))
  3025  	// cond:
  3026  	// result: (MOVDconst [1])
  3027  	for {
  3028  		v_0 := v.Args[0]
  3029  		if v_0.Op != OpARM64FlagEQ {
  3030  			break
  3031  		}
  3032  		v.reset(OpARM64MOVDconst)
  3033  		v.AuxInt = 1
  3034  		return true
  3035  	}
  3036  	// match: (GreaterEqual (FlagLT_ULT))
  3037  	// cond:
  3038  	// result: (MOVDconst [0])
  3039  	for {
  3040  		v_0 := v.Args[0]
  3041  		if v_0.Op != OpARM64FlagLT_ULT {
  3042  			break
  3043  		}
  3044  		v.reset(OpARM64MOVDconst)
  3045  		v.AuxInt = 0
  3046  		return true
  3047  	}
  3048  	// match: (GreaterEqual (FlagLT_UGT))
  3049  	// cond:
  3050  	// result: (MOVDconst [0])
  3051  	for {
  3052  		v_0 := v.Args[0]
  3053  		if v_0.Op != OpARM64FlagLT_UGT {
  3054  			break
  3055  		}
  3056  		v.reset(OpARM64MOVDconst)
  3057  		v.AuxInt = 0
  3058  		return true
  3059  	}
  3060  	// match: (GreaterEqual (FlagGT_ULT))
  3061  	// cond:
  3062  	// result: (MOVDconst [1])
  3063  	for {
  3064  		v_0 := v.Args[0]
  3065  		if v_0.Op != OpARM64FlagGT_ULT {
  3066  			break
  3067  		}
  3068  		v.reset(OpARM64MOVDconst)
  3069  		v.AuxInt = 1
  3070  		return true
  3071  	}
  3072  	// match: (GreaterEqual (FlagGT_UGT))
  3073  	// cond:
  3074  	// result: (MOVDconst [1])
  3075  	for {
  3076  		v_0 := v.Args[0]
  3077  		if v_0.Op != OpARM64FlagGT_UGT {
  3078  			break
  3079  		}
  3080  		v.reset(OpARM64MOVDconst)
  3081  		v.AuxInt = 1
  3082  		return true
  3083  	}
  3084  	// match: (GreaterEqual (InvertFlags x))
  3085  	// cond:
  3086  	// result: (LessEqual x)
  3087  	for {
  3088  		v_0 := v.Args[0]
  3089  		if v_0.Op != OpARM64InvertFlags {
  3090  			break
  3091  		}
  3092  		x := v_0.Args[0]
  3093  		v.reset(OpARM64LessEqual)
  3094  		v.AddArg(x)
  3095  		return true
  3096  	}
  3097  	return false
  3098  }
  3099  func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool {
  3100  	// match: (GreaterEqualU (FlagEQ))
  3101  	// cond:
  3102  	// result: (MOVDconst [1])
  3103  	for {
  3104  		v_0 := v.Args[0]
  3105  		if v_0.Op != OpARM64FlagEQ {
  3106  			break
  3107  		}
  3108  		v.reset(OpARM64MOVDconst)
  3109  		v.AuxInt = 1
  3110  		return true
  3111  	}
  3112  	// match: (GreaterEqualU (FlagLT_ULT))
  3113  	// cond:
  3114  	// result: (MOVDconst [0])
  3115  	for {
  3116  		v_0 := v.Args[0]
  3117  		if v_0.Op != OpARM64FlagLT_ULT {
  3118  			break
  3119  		}
  3120  		v.reset(OpARM64MOVDconst)
  3121  		v.AuxInt = 0
  3122  		return true
  3123  	}
  3124  	// match: (GreaterEqualU (FlagLT_UGT))
  3125  	// cond:
  3126  	// result: (MOVDconst [1])
  3127  	for {
  3128  		v_0 := v.Args[0]
  3129  		if v_0.Op != OpARM64FlagLT_UGT {
  3130  			break
  3131  		}
  3132  		v.reset(OpARM64MOVDconst)
  3133  		v.AuxInt = 1
  3134  		return true
  3135  	}
  3136  	// match: (GreaterEqualU (FlagGT_ULT))
  3137  	// cond:
  3138  	// result: (MOVDconst [0])
  3139  	for {
  3140  		v_0 := v.Args[0]
  3141  		if v_0.Op != OpARM64FlagGT_ULT {
  3142  			break
  3143  		}
  3144  		v.reset(OpARM64MOVDconst)
  3145  		v.AuxInt = 0
  3146  		return true
  3147  	}
  3148  	// match: (GreaterEqualU (FlagGT_UGT))
  3149  	// cond:
  3150  	// result: (MOVDconst [1])
  3151  	for {
  3152  		v_0 := v.Args[0]
  3153  		if v_0.Op != OpARM64FlagGT_UGT {
  3154  			break
  3155  		}
  3156  		v.reset(OpARM64MOVDconst)
  3157  		v.AuxInt = 1
  3158  		return true
  3159  	}
  3160  	// match: (GreaterEqualU (InvertFlags x))
  3161  	// cond:
  3162  	// result: (LessEqualU x)
  3163  	for {
  3164  		v_0 := v.Args[0]
  3165  		if v_0.Op != OpARM64InvertFlags {
  3166  			break
  3167  		}
  3168  		x := v_0.Args[0]
  3169  		v.reset(OpARM64LessEqualU)
  3170  		v.AddArg(x)
  3171  		return true
  3172  	}
  3173  	return false
  3174  }
  3175  func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool {
  3176  	// match: (GreaterThan (FlagEQ))
  3177  	// cond:
  3178  	// result: (MOVDconst [0])
  3179  	for {
  3180  		v_0 := v.Args[0]
  3181  		if v_0.Op != OpARM64FlagEQ {
  3182  			break
  3183  		}
  3184  		v.reset(OpARM64MOVDconst)
  3185  		v.AuxInt = 0
  3186  		return true
  3187  	}
  3188  	// match: (GreaterThan (FlagLT_ULT))
  3189  	// cond:
  3190  	// result: (MOVDconst [0])
  3191  	for {
  3192  		v_0 := v.Args[0]
  3193  		if v_0.Op != OpARM64FlagLT_ULT {
  3194  			break
  3195  		}
  3196  		v.reset(OpARM64MOVDconst)
  3197  		v.AuxInt = 0
  3198  		return true
  3199  	}
  3200  	// match: (GreaterThan (FlagLT_UGT))
  3201  	// cond:
  3202  	// result: (MOVDconst [0])
  3203  	for {
  3204  		v_0 := v.Args[0]
  3205  		if v_0.Op != OpARM64FlagLT_UGT {
  3206  			break
  3207  		}
  3208  		v.reset(OpARM64MOVDconst)
  3209  		v.AuxInt = 0
  3210  		return true
  3211  	}
  3212  	// match: (GreaterThan (FlagGT_ULT))
  3213  	// cond:
  3214  	// result: (MOVDconst [1])
  3215  	for {
  3216  		v_0 := v.Args[0]
  3217  		if v_0.Op != OpARM64FlagGT_ULT {
  3218  			break
  3219  		}
  3220  		v.reset(OpARM64MOVDconst)
  3221  		v.AuxInt = 1
  3222  		return true
  3223  	}
  3224  	// match: (GreaterThan (FlagGT_UGT))
  3225  	// cond:
  3226  	// result: (MOVDconst [1])
  3227  	for {
  3228  		v_0 := v.Args[0]
  3229  		if v_0.Op != OpARM64FlagGT_UGT {
  3230  			break
  3231  		}
  3232  		v.reset(OpARM64MOVDconst)
  3233  		v.AuxInt = 1
  3234  		return true
  3235  	}
  3236  	// match: (GreaterThan (InvertFlags x))
  3237  	// cond:
  3238  	// result: (LessThan x)
  3239  	for {
  3240  		v_0 := v.Args[0]
  3241  		if v_0.Op != OpARM64InvertFlags {
  3242  			break
  3243  		}
  3244  		x := v_0.Args[0]
  3245  		v.reset(OpARM64LessThan)
  3246  		v.AddArg(x)
  3247  		return true
  3248  	}
  3249  	return false
  3250  }
  3251  func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool {
  3252  	// match: (GreaterThanU (FlagEQ))
  3253  	// cond:
  3254  	// result: (MOVDconst [0])
  3255  	for {
  3256  		v_0 := v.Args[0]
  3257  		if v_0.Op != OpARM64FlagEQ {
  3258  			break
  3259  		}
  3260  		v.reset(OpARM64MOVDconst)
  3261  		v.AuxInt = 0
  3262  		return true
  3263  	}
  3264  	// match: (GreaterThanU (FlagLT_ULT))
  3265  	// cond:
  3266  	// result: (MOVDconst [0])
  3267  	for {
  3268  		v_0 := v.Args[0]
  3269  		if v_0.Op != OpARM64FlagLT_ULT {
  3270  			break
  3271  		}
  3272  		v.reset(OpARM64MOVDconst)
  3273  		v.AuxInt = 0
  3274  		return true
  3275  	}
  3276  	// match: (GreaterThanU (FlagLT_UGT))
  3277  	// cond:
  3278  	// result: (MOVDconst [1])
  3279  	for {
  3280  		v_0 := v.Args[0]
  3281  		if v_0.Op != OpARM64FlagLT_UGT {
  3282  			break
  3283  		}
  3284  		v.reset(OpARM64MOVDconst)
  3285  		v.AuxInt = 1
  3286  		return true
  3287  	}
  3288  	// match: (GreaterThanU (FlagGT_ULT))
  3289  	// cond:
  3290  	// result: (MOVDconst [0])
  3291  	for {
  3292  		v_0 := v.Args[0]
  3293  		if v_0.Op != OpARM64FlagGT_ULT {
  3294  			break
  3295  		}
  3296  		v.reset(OpARM64MOVDconst)
  3297  		v.AuxInt = 0
  3298  		return true
  3299  	}
  3300  	// match: (GreaterThanU (FlagGT_UGT))
  3301  	// cond:
  3302  	// result: (MOVDconst [1])
  3303  	for {
  3304  		v_0 := v.Args[0]
  3305  		if v_0.Op != OpARM64FlagGT_UGT {
  3306  			break
  3307  		}
  3308  		v.reset(OpARM64MOVDconst)
  3309  		v.AuxInt = 1
  3310  		return true
  3311  	}
  3312  	// match: (GreaterThanU (InvertFlags x))
  3313  	// cond:
  3314  	// result: (LessThanU x)
  3315  	for {
  3316  		v_0 := v.Args[0]
  3317  		if v_0.Op != OpARM64InvertFlags {
  3318  			break
  3319  		}
  3320  		x := v_0.Args[0]
  3321  		v.reset(OpARM64LessThanU)
  3322  		v.AddArg(x)
  3323  		return true
  3324  	}
  3325  	return false
  3326  }
  3327  func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool {
  3328  	// match: (LessEqual (FlagEQ))
  3329  	// cond:
  3330  	// result: (MOVDconst [1])
  3331  	for {
  3332  		v_0 := v.Args[0]
  3333  		if v_0.Op != OpARM64FlagEQ {
  3334  			break
  3335  		}
  3336  		v.reset(OpARM64MOVDconst)
  3337  		v.AuxInt = 1
  3338  		return true
  3339  	}
  3340  	// match: (LessEqual (FlagLT_ULT))
  3341  	// cond:
  3342  	// result: (MOVDconst [1])
  3343  	for {
  3344  		v_0 := v.Args[0]
  3345  		if v_0.Op != OpARM64FlagLT_ULT {
  3346  			break
  3347  		}
  3348  		v.reset(OpARM64MOVDconst)
  3349  		v.AuxInt = 1
  3350  		return true
  3351  	}
  3352  	// match: (LessEqual (FlagLT_UGT))
  3353  	// cond:
  3354  	// result: (MOVDconst [1])
  3355  	for {
  3356  		v_0 := v.Args[0]
  3357  		if v_0.Op != OpARM64FlagLT_UGT {
  3358  			break
  3359  		}
  3360  		v.reset(OpARM64MOVDconst)
  3361  		v.AuxInt = 1
  3362  		return true
  3363  	}
  3364  	// match: (LessEqual (FlagGT_ULT))
  3365  	// cond:
  3366  	// result: (MOVDconst [0])
  3367  	for {
  3368  		v_0 := v.Args[0]
  3369  		if v_0.Op != OpARM64FlagGT_ULT {
  3370  			break
  3371  		}
  3372  		v.reset(OpARM64MOVDconst)
  3373  		v.AuxInt = 0
  3374  		return true
  3375  	}
  3376  	// match: (LessEqual (FlagGT_UGT))
  3377  	// cond:
  3378  	// result: (MOVDconst [0])
  3379  	for {
  3380  		v_0 := v.Args[0]
  3381  		if v_0.Op != OpARM64FlagGT_UGT {
  3382  			break
  3383  		}
  3384  		v.reset(OpARM64MOVDconst)
  3385  		v.AuxInt = 0
  3386  		return true
  3387  	}
  3388  	// match: (LessEqual (InvertFlags x))
  3389  	// cond:
  3390  	// result: (GreaterEqual x)
  3391  	for {
  3392  		v_0 := v.Args[0]
  3393  		if v_0.Op != OpARM64InvertFlags {
  3394  			break
  3395  		}
  3396  		x := v_0.Args[0]
  3397  		v.reset(OpARM64GreaterEqual)
  3398  		v.AddArg(x)
  3399  		return true
  3400  	}
  3401  	return false
  3402  }
  3403  func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool {
  3404  	// match: (LessEqualU (FlagEQ))
  3405  	// cond:
  3406  	// result: (MOVDconst [1])
  3407  	for {
  3408  		v_0 := v.Args[0]
  3409  		if v_0.Op != OpARM64FlagEQ {
  3410  			break
  3411  		}
  3412  		v.reset(OpARM64MOVDconst)
  3413  		v.AuxInt = 1
  3414  		return true
  3415  	}
  3416  	// match: (LessEqualU (FlagLT_ULT))
  3417  	// cond:
  3418  	// result: (MOVDconst [1])
  3419  	for {
  3420  		v_0 := v.Args[0]
  3421  		if v_0.Op != OpARM64FlagLT_ULT {
  3422  			break
  3423  		}
  3424  		v.reset(OpARM64MOVDconst)
  3425  		v.AuxInt = 1
  3426  		return true
  3427  	}
  3428  	// match: (LessEqualU (FlagLT_UGT))
  3429  	// cond:
  3430  	// result: (MOVDconst [0])
  3431  	for {
  3432  		v_0 := v.Args[0]
  3433  		if v_0.Op != OpARM64FlagLT_UGT {
  3434  			break
  3435  		}
  3436  		v.reset(OpARM64MOVDconst)
  3437  		v.AuxInt = 0
  3438  		return true
  3439  	}
  3440  	// match: (LessEqualU (FlagGT_ULT))
  3441  	// cond:
  3442  	// result: (MOVDconst [1])
  3443  	for {
  3444  		v_0 := v.Args[0]
  3445  		if v_0.Op != OpARM64FlagGT_ULT {
  3446  			break
  3447  		}
  3448  		v.reset(OpARM64MOVDconst)
  3449  		v.AuxInt = 1
  3450  		return true
  3451  	}
  3452  	// match: (LessEqualU (FlagGT_UGT))
  3453  	// cond:
  3454  	// result: (MOVDconst [0])
  3455  	for {
  3456  		v_0 := v.Args[0]
  3457  		if v_0.Op != OpARM64FlagGT_UGT {
  3458  			break
  3459  		}
  3460  		v.reset(OpARM64MOVDconst)
  3461  		v.AuxInt = 0
  3462  		return true
  3463  	}
  3464  	// match: (LessEqualU (InvertFlags x))
  3465  	// cond:
  3466  	// result: (GreaterEqualU x)
  3467  	for {
  3468  		v_0 := v.Args[0]
  3469  		if v_0.Op != OpARM64InvertFlags {
  3470  			break
  3471  		}
  3472  		x := v_0.Args[0]
  3473  		v.reset(OpARM64GreaterEqualU)
  3474  		v.AddArg(x)
  3475  		return true
  3476  	}
  3477  	return false
  3478  }
  3479  func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool {
  3480  	// match: (LessThan (FlagEQ))
  3481  	// cond:
  3482  	// result: (MOVDconst [0])
  3483  	for {
  3484  		v_0 := v.Args[0]
  3485  		if v_0.Op != OpARM64FlagEQ {
  3486  			break
  3487  		}
  3488  		v.reset(OpARM64MOVDconst)
  3489  		v.AuxInt = 0
  3490  		return true
  3491  	}
  3492  	// match: (LessThan (FlagLT_ULT))
  3493  	// cond:
  3494  	// result: (MOVDconst [1])
  3495  	for {
  3496  		v_0 := v.Args[0]
  3497  		if v_0.Op != OpARM64FlagLT_ULT {
  3498  			break
  3499  		}
  3500  		v.reset(OpARM64MOVDconst)
  3501  		v.AuxInt = 1
  3502  		return true
  3503  	}
  3504  	// match: (LessThan (FlagLT_UGT))
  3505  	// cond:
  3506  	// result: (MOVDconst [1])
  3507  	for {
  3508  		v_0 := v.Args[0]
  3509  		if v_0.Op != OpARM64FlagLT_UGT {
  3510  			break
  3511  		}
  3512  		v.reset(OpARM64MOVDconst)
  3513  		v.AuxInt = 1
  3514  		return true
  3515  	}
  3516  	// match: (LessThan (FlagGT_ULT))
  3517  	// cond:
  3518  	// result: (MOVDconst [0])
  3519  	for {
  3520  		v_0 := v.Args[0]
  3521  		if v_0.Op != OpARM64FlagGT_ULT {
  3522  			break
  3523  		}
  3524  		v.reset(OpARM64MOVDconst)
  3525  		v.AuxInt = 0
  3526  		return true
  3527  	}
  3528  	// match: (LessThan (FlagGT_UGT))
  3529  	// cond:
  3530  	// result: (MOVDconst [0])
  3531  	for {
  3532  		v_0 := v.Args[0]
  3533  		if v_0.Op != OpARM64FlagGT_UGT {
  3534  			break
  3535  		}
  3536  		v.reset(OpARM64MOVDconst)
  3537  		v.AuxInt = 0
  3538  		return true
  3539  	}
  3540  	// match: (LessThan (InvertFlags x))
  3541  	// cond:
  3542  	// result: (GreaterThan x)
  3543  	for {
  3544  		v_0 := v.Args[0]
  3545  		if v_0.Op != OpARM64InvertFlags {
  3546  			break
  3547  		}
  3548  		x := v_0.Args[0]
  3549  		v.reset(OpARM64GreaterThan)
  3550  		v.AddArg(x)
  3551  		return true
  3552  	}
  3553  	return false
  3554  }
  3555  func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool {
  3556  	// match: (LessThanU (FlagEQ))
  3557  	// cond:
  3558  	// result: (MOVDconst [0])
  3559  	for {
  3560  		v_0 := v.Args[0]
  3561  		if v_0.Op != OpARM64FlagEQ {
  3562  			break
  3563  		}
  3564  		v.reset(OpARM64MOVDconst)
  3565  		v.AuxInt = 0
  3566  		return true
  3567  	}
  3568  	// match: (LessThanU (FlagLT_ULT))
  3569  	// cond:
  3570  	// result: (MOVDconst [1])
  3571  	for {
  3572  		v_0 := v.Args[0]
  3573  		if v_0.Op != OpARM64FlagLT_ULT {
  3574  			break
  3575  		}
  3576  		v.reset(OpARM64MOVDconst)
  3577  		v.AuxInt = 1
  3578  		return true
  3579  	}
  3580  	// match: (LessThanU (FlagLT_UGT))
  3581  	// cond:
  3582  	// result: (MOVDconst [0])
  3583  	for {
  3584  		v_0 := v.Args[0]
  3585  		if v_0.Op != OpARM64FlagLT_UGT {
  3586  			break
  3587  		}
  3588  		v.reset(OpARM64MOVDconst)
  3589  		v.AuxInt = 0
  3590  		return true
  3591  	}
  3592  	// match: (LessThanU (FlagGT_ULT))
  3593  	// cond:
  3594  	// result: (MOVDconst [1])
  3595  	for {
  3596  		v_0 := v.Args[0]
  3597  		if v_0.Op != OpARM64FlagGT_ULT {
  3598  			break
  3599  		}
  3600  		v.reset(OpARM64MOVDconst)
  3601  		v.AuxInt = 1
  3602  		return true
  3603  	}
  3604  	// match: (LessThanU (FlagGT_UGT))
  3605  	// cond:
  3606  	// result: (MOVDconst [0])
  3607  	for {
  3608  		v_0 := v.Args[0]
  3609  		if v_0.Op != OpARM64FlagGT_UGT {
  3610  			break
  3611  		}
  3612  		v.reset(OpARM64MOVDconst)
  3613  		v.AuxInt = 0
  3614  		return true
  3615  	}
  3616  	// match: (LessThanU (InvertFlags x))
  3617  	// cond:
  3618  	// result: (GreaterThanU x)
  3619  	for {
  3620  		v_0 := v.Args[0]
  3621  		if v_0.Op != OpARM64InvertFlags {
  3622  			break
  3623  		}
  3624  		x := v_0.Args[0]
  3625  		v.reset(OpARM64GreaterThanU)
  3626  		v.AddArg(x)
  3627  		return true
  3628  	}
  3629  	return false
  3630  }
  3631  func rewriteValueARM64_OpARM64MOD_0(v *Value) bool {
  3632  	// match: (MOD (MOVDconst [c]) (MOVDconst [d]))
  3633  	// cond:
  3634  	// result: (MOVDconst [int64(c)%int64(d)])
  3635  	for {
  3636  		_ = v.Args[1]
  3637  		v_0 := v.Args[0]
  3638  		if v_0.Op != OpARM64MOVDconst {
  3639  			break
  3640  		}
  3641  		c := v_0.AuxInt
  3642  		v_1 := v.Args[1]
  3643  		if v_1.Op != OpARM64MOVDconst {
  3644  			break
  3645  		}
  3646  		d := v_1.AuxInt
  3647  		v.reset(OpARM64MOVDconst)
  3648  		v.AuxInt = int64(c) % int64(d)
  3649  		return true
  3650  	}
  3651  	return false
  3652  }
  3653  func rewriteValueARM64_OpARM64MODW_0(v *Value) bool {
  3654  	// match: (MODW (MOVDconst [c]) (MOVDconst [d]))
  3655  	// cond:
  3656  	// result: (MOVDconst [int64(int32(c)%int32(d))])
  3657  	for {
  3658  		_ = v.Args[1]
  3659  		v_0 := v.Args[0]
  3660  		if v_0.Op != OpARM64MOVDconst {
  3661  			break
  3662  		}
  3663  		c := v_0.AuxInt
  3664  		v_1 := v.Args[1]
  3665  		if v_1.Op != OpARM64MOVDconst {
  3666  			break
  3667  		}
  3668  		d := v_1.AuxInt
  3669  		v.reset(OpARM64MOVDconst)
  3670  		v.AuxInt = int64(int32(c) % int32(d))
  3671  		return true
  3672  	}
  3673  	return false
  3674  }
  3675  func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool {
  3676  	b := v.Block
  3677  	_ = b
  3678  	config := b.Func.Config
  3679  	_ = config
  3680  	// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
  3681  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3682  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3683  	for {
  3684  		off1 := v.AuxInt
  3685  		sym := v.Aux
  3686  		_ = v.Args[1]
  3687  		v_0 := v.Args[0]
  3688  		if v_0.Op != OpARM64ADDconst {
  3689  			break
  3690  		}
  3691  		off2 := v_0.AuxInt
  3692  		ptr := v_0.Args[0]
  3693  		mem := v.Args[1]
  3694  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3695  			break
  3696  		}
  3697  		v.reset(OpARM64MOVBUload)
  3698  		v.AuxInt = off1 + off2
  3699  		v.Aux = sym
  3700  		v.AddArg(ptr)
  3701  		v.AddArg(mem)
  3702  		return true
  3703  	}
  3704  	// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3705  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3706  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3707  	for {
  3708  		off1 := v.AuxInt
  3709  		sym1 := v.Aux
  3710  		_ = v.Args[1]
  3711  		v_0 := v.Args[0]
  3712  		if v_0.Op != OpARM64MOVDaddr {
  3713  			break
  3714  		}
  3715  		off2 := v_0.AuxInt
  3716  		sym2 := v_0.Aux
  3717  		ptr := v_0.Args[0]
  3718  		mem := v.Args[1]
  3719  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3720  			break
  3721  		}
  3722  		v.reset(OpARM64MOVBUload)
  3723  		v.AuxInt = off1 + off2
  3724  		v.Aux = mergeSym(sym1, sym2)
  3725  		v.AddArg(ptr)
  3726  		v.AddArg(mem)
  3727  		return true
  3728  	}
  3729  	// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  3730  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3731  	// result: (MOVDconst [0])
  3732  	for {
  3733  		off := v.AuxInt
  3734  		sym := v.Aux
  3735  		_ = v.Args[1]
  3736  		ptr := v.Args[0]
  3737  		v_1 := v.Args[1]
  3738  		if v_1.Op != OpARM64MOVBstorezero {
  3739  			break
  3740  		}
  3741  		off2 := v_1.AuxInt
  3742  		sym2 := v_1.Aux
  3743  		_ = v_1.Args[1]
  3744  		ptr2 := v_1.Args[0]
  3745  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3746  			break
  3747  		}
  3748  		v.reset(OpARM64MOVDconst)
  3749  		v.AuxInt = 0
  3750  		return true
  3751  	}
  3752  	return false
  3753  }
  3754  func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool {
  3755  	// match: (MOVBUreg x:(MOVBUload _ _))
  3756  	// cond:
  3757  	// result: (MOVDreg x)
  3758  	for {
  3759  		x := v.Args[0]
  3760  		if x.Op != OpARM64MOVBUload {
  3761  			break
  3762  		}
  3763  		_ = x.Args[1]
  3764  		v.reset(OpARM64MOVDreg)
  3765  		v.AddArg(x)
  3766  		return true
  3767  	}
  3768  	// match: (MOVBUreg x:(MOVBUreg _))
  3769  	// cond:
  3770  	// result: (MOVDreg x)
  3771  	for {
  3772  		x := v.Args[0]
  3773  		if x.Op != OpARM64MOVBUreg {
  3774  			break
  3775  		}
  3776  		v.reset(OpARM64MOVDreg)
  3777  		v.AddArg(x)
  3778  		return true
  3779  	}
  3780  	// match: (MOVBUreg (MOVDconst [c]))
  3781  	// cond:
  3782  	// result: (MOVDconst [int64(uint8(c))])
  3783  	for {
  3784  		v_0 := v.Args[0]
  3785  		if v_0.Op != OpARM64MOVDconst {
  3786  			break
  3787  		}
  3788  		c := v_0.AuxInt
  3789  		v.reset(OpARM64MOVDconst)
  3790  		v.AuxInt = int64(uint8(c))
  3791  		return true
  3792  	}
  3793  	return false
  3794  }
  3795  func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool {
  3796  	b := v.Block
  3797  	_ = b
  3798  	config := b.Func.Config
  3799  	_ = config
  3800  	// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
  3801  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3802  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  3803  	for {
  3804  		off1 := v.AuxInt
  3805  		sym := v.Aux
  3806  		_ = v.Args[1]
  3807  		v_0 := v.Args[0]
  3808  		if v_0.Op != OpARM64ADDconst {
  3809  			break
  3810  		}
  3811  		off2 := v_0.AuxInt
  3812  		ptr := v_0.Args[0]
  3813  		mem := v.Args[1]
  3814  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3815  			break
  3816  		}
  3817  		v.reset(OpARM64MOVBload)
  3818  		v.AuxInt = off1 + off2
  3819  		v.Aux = sym
  3820  		v.AddArg(ptr)
  3821  		v.AddArg(mem)
  3822  		return true
  3823  	}
  3824  	// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  3825  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3826  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3827  	for {
  3828  		off1 := v.AuxInt
  3829  		sym1 := v.Aux
  3830  		_ = v.Args[1]
  3831  		v_0 := v.Args[0]
  3832  		if v_0.Op != OpARM64MOVDaddr {
  3833  			break
  3834  		}
  3835  		off2 := v_0.AuxInt
  3836  		sym2 := v_0.Aux
  3837  		ptr := v_0.Args[0]
  3838  		mem := v.Args[1]
  3839  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3840  			break
  3841  		}
  3842  		v.reset(OpARM64MOVBload)
  3843  		v.AuxInt = off1 + off2
  3844  		v.Aux = mergeSym(sym1, sym2)
  3845  		v.AddArg(ptr)
  3846  		v.AddArg(mem)
  3847  		return true
  3848  	}
  3849  	// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  3850  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3851  	// result: (MOVDconst [0])
  3852  	for {
  3853  		off := v.AuxInt
  3854  		sym := v.Aux
  3855  		_ = v.Args[1]
  3856  		ptr := v.Args[0]
  3857  		v_1 := v.Args[1]
  3858  		if v_1.Op != OpARM64MOVBstorezero {
  3859  			break
  3860  		}
  3861  		off2 := v_1.AuxInt
  3862  		sym2 := v_1.Aux
  3863  		_ = v_1.Args[1]
  3864  		ptr2 := v_1.Args[0]
  3865  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3866  			break
  3867  		}
  3868  		v.reset(OpARM64MOVDconst)
  3869  		v.AuxInt = 0
  3870  		return true
  3871  	}
  3872  	return false
  3873  }
  3874  func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool {
  3875  	// match: (MOVBreg x:(MOVBload _ _))
  3876  	// cond:
  3877  	// result: (MOVDreg x)
  3878  	for {
  3879  		x := v.Args[0]
  3880  		if x.Op != OpARM64MOVBload {
  3881  			break
  3882  		}
  3883  		_ = x.Args[1]
  3884  		v.reset(OpARM64MOVDreg)
  3885  		v.AddArg(x)
  3886  		return true
  3887  	}
  3888  	// match: (MOVBreg x:(MOVBreg _))
  3889  	// cond:
  3890  	// result: (MOVDreg x)
  3891  	for {
  3892  		x := v.Args[0]
  3893  		if x.Op != OpARM64MOVBreg {
  3894  			break
  3895  		}
  3896  		v.reset(OpARM64MOVDreg)
  3897  		v.AddArg(x)
  3898  		return true
  3899  	}
  3900  	// match: (MOVBreg (MOVDconst [c]))
  3901  	// cond:
  3902  	// result: (MOVDconst [int64(int8(c))])
  3903  	for {
  3904  		v_0 := v.Args[0]
  3905  		if v_0.Op != OpARM64MOVDconst {
  3906  			break
  3907  		}
  3908  		c := v_0.AuxInt
  3909  		v.reset(OpARM64MOVDconst)
  3910  		v.AuxInt = int64(int8(c))
  3911  		return true
  3912  	}
  3913  	return false
  3914  }
  3915  func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool {
  3916  	b := v.Block
  3917  	_ = b
  3918  	config := b.Func.Config
  3919  	_ = config
  3920  	// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  3921  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3922  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3923  	for {
  3924  		off1 := v.AuxInt
  3925  		sym := v.Aux
  3926  		_ = v.Args[2]
  3927  		v_0 := v.Args[0]
  3928  		if v_0.Op != OpARM64ADDconst {
  3929  			break
  3930  		}
  3931  		off2 := v_0.AuxInt
  3932  		ptr := v_0.Args[0]
  3933  		val := v.Args[1]
  3934  		mem := v.Args[2]
  3935  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3936  			break
  3937  		}
  3938  		v.reset(OpARM64MOVBstore)
  3939  		v.AuxInt = off1 + off2
  3940  		v.Aux = sym
  3941  		v.AddArg(ptr)
  3942  		v.AddArg(val)
  3943  		v.AddArg(mem)
  3944  		return true
  3945  	}
  3946  	// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  3947  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3948  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3949  	for {
  3950  		off1 := v.AuxInt
  3951  		sym1 := v.Aux
  3952  		_ = v.Args[2]
  3953  		v_0 := v.Args[0]
  3954  		if v_0.Op != OpARM64MOVDaddr {
  3955  			break
  3956  		}
  3957  		off2 := v_0.AuxInt
  3958  		sym2 := v_0.Aux
  3959  		ptr := v_0.Args[0]
  3960  		val := v.Args[1]
  3961  		mem := v.Args[2]
  3962  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3963  			break
  3964  		}
  3965  		v.reset(OpARM64MOVBstore)
  3966  		v.AuxInt = off1 + off2
  3967  		v.Aux = mergeSym(sym1, sym2)
  3968  		v.AddArg(ptr)
  3969  		v.AddArg(val)
  3970  		v.AddArg(mem)
  3971  		return true
  3972  	}
  3973  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  3974  	// cond:
  3975  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3976  	for {
  3977  		off := v.AuxInt
  3978  		sym := v.Aux
  3979  		_ = v.Args[2]
  3980  		ptr := v.Args[0]
  3981  		v_1 := v.Args[1]
  3982  		if v_1.Op != OpARM64MOVDconst {
  3983  			break
  3984  		}
  3985  		if v_1.AuxInt != 0 {
  3986  			break
  3987  		}
  3988  		mem := v.Args[2]
  3989  		v.reset(OpARM64MOVBstorezero)
  3990  		v.AuxInt = off
  3991  		v.Aux = sym
  3992  		v.AddArg(ptr)
  3993  		v.AddArg(mem)
  3994  		return true
  3995  	}
  3996  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3997  	// cond:
  3998  	// result: (MOVBstore [off] {sym} ptr x mem)
  3999  	for {
  4000  		off := v.AuxInt
  4001  		sym := v.Aux
  4002  		_ = v.Args[2]
  4003  		ptr := v.Args[0]
  4004  		v_1 := v.Args[1]
  4005  		if v_1.Op != OpARM64MOVBreg {
  4006  			break
  4007  		}
  4008  		x := v_1.Args[0]
  4009  		mem := v.Args[2]
  4010  		v.reset(OpARM64MOVBstore)
  4011  		v.AuxInt = off
  4012  		v.Aux = sym
  4013  		v.AddArg(ptr)
  4014  		v.AddArg(x)
  4015  		v.AddArg(mem)
  4016  		return true
  4017  	}
  4018  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4019  	// cond:
  4020  	// result: (MOVBstore [off] {sym} ptr x mem)
  4021  	for {
  4022  		off := v.AuxInt
  4023  		sym := v.Aux
  4024  		_ = v.Args[2]
  4025  		ptr := v.Args[0]
  4026  		v_1 := v.Args[1]
  4027  		if v_1.Op != OpARM64MOVBUreg {
  4028  			break
  4029  		}
  4030  		x := v_1.Args[0]
  4031  		mem := v.Args[2]
  4032  		v.reset(OpARM64MOVBstore)
  4033  		v.AuxInt = off
  4034  		v.Aux = sym
  4035  		v.AddArg(ptr)
  4036  		v.AddArg(x)
  4037  		v.AddArg(mem)
  4038  		return true
  4039  	}
  4040  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4041  	// cond:
  4042  	// result: (MOVBstore [off] {sym} ptr x mem)
  4043  	for {
  4044  		off := v.AuxInt
  4045  		sym := v.Aux
  4046  		_ = v.Args[2]
  4047  		ptr := v.Args[0]
  4048  		v_1 := v.Args[1]
  4049  		if v_1.Op != OpARM64MOVHreg {
  4050  			break
  4051  		}
  4052  		x := v_1.Args[0]
  4053  		mem := v.Args[2]
  4054  		v.reset(OpARM64MOVBstore)
  4055  		v.AuxInt = off
  4056  		v.Aux = sym
  4057  		v.AddArg(ptr)
  4058  		v.AddArg(x)
  4059  		v.AddArg(mem)
  4060  		return true
  4061  	}
  4062  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4063  	// cond:
  4064  	// result: (MOVBstore [off] {sym} ptr x mem)
  4065  	for {
  4066  		off := v.AuxInt
  4067  		sym := v.Aux
  4068  		_ = v.Args[2]
  4069  		ptr := v.Args[0]
  4070  		v_1 := v.Args[1]
  4071  		if v_1.Op != OpARM64MOVHUreg {
  4072  			break
  4073  		}
  4074  		x := v_1.Args[0]
  4075  		mem := v.Args[2]
  4076  		v.reset(OpARM64MOVBstore)
  4077  		v.AuxInt = off
  4078  		v.Aux = sym
  4079  		v.AddArg(ptr)
  4080  		v.AddArg(x)
  4081  		v.AddArg(mem)
  4082  		return true
  4083  	}
  4084  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4085  	// cond:
  4086  	// result: (MOVBstore [off] {sym} ptr x mem)
  4087  	for {
  4088  		off := v.AuxInt
  4089  		sym := v.Aux
  4090  		_ = v.Args[2]
  4091  		ptr := v.Args[0]
  4092  		v_1 := v.Args[1]
  4093  		if v_1.Op != OpARM64MOVWreg {
  4094  			break
  4095  		}
  4096  		x := v_1.Args[0]
  4097  		mem := v.Args[2]
  4098  		v.reset(OpARM64MOVBstore)
  4099  		v.AuxInt = off
  4100  		v.Aux = sym
  4101  		v.AddArg(ptr)
  4102  		v.AddArg(x)
  4103  		v.AddArg(mem)
  4104  		return true
  4105  	}
  4106  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4107  	// cond:
  4108  	// result: (MOVBstore [off] {sym} ptr x mem)
  4109  	for {
  4110  		off := v.AuxInt
  4111  		sym := v.Aux
  4112  		_ = v.Args[2]
  4113  		ptr := v.Args[0]
  4114  		v_1 := v.Args[1]
  4115  		if v_1.Op != OpARM64MOVWUreg {
  4116  			break
  4117  		}
  4118  		x := v_1.Args[0]
  4119  		mem := v.Args[2]
  4120  		v.reset(OpARM64MOVBstore)
  4121  		v.AuxInt = off
  4122  		v.Aux = sym
  4123  		v.AddArg(ptr)
  4124  		v.AddArg(x)
  4125  		v.AddArg(mem)
  4126  		return true
  4127  	}
  4128  	return false
  4129  }
  4130  func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool {
  4131  	b := v.Block
  4132  	_ = b
  4133  	config := b.Func.Config
  4134  	_ = config
  4135  	// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  4136  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4137  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  4138  	for {
  4139  		off1 := v.AuxInt
  4140  		sym := v.Aux
  4141  		_ = v.Args[1]
  4142  		v_0 := v.Args[0]
  4143  		if v_0.Op != OpARM64ADDconst {
  4144  			break
  4145  		}
  4146  		off2 := v_0.AuxInt
  4147  		ptr := v_0.Args[0]
  4148  		mem := v.Args[1]
  4149  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4150  			break
  4151  		}
  4152  		v.reset(OpARM64MOVBstorezero)
  4153  		v.AuxInt = off1 + off2
  4154  		v.Aux = sym
  4155  		v.AddArg(ptr)
  4156  		v.AddArg(mem)
  4157  		return true
  4158  	}
  4159  	// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4160  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4161  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4162  	for {
  4163  		off1 := v.AuxInt
  4164  		sym1 := v.Aux
  4165  		_ = v.Args[1]
  4166  		v_0 := v.Args[0]
  4167  		if v_0.Op != OpARM64MOVDaddr {
  4168  			break
  4169  		}
  4170  		off2 := v_0.AuxInt
  4171  		sym2 := v_0.Aux
  4172  		ptr := v_0.Args[0]
  4173  		mem := v.Args[1]
  4174  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4175  			break
  4176  		}
  4177  		v.reset(OpARM64MOVBstorezero)
  4178  		v.AuxInt = off1 + off2
  4179  		v.Aux = mergeSym(sym1, sym2)
  4180  		v.AddArg(ptr)
  4181  		v.AddArg(mem)
  4182  		return true
  4183  	}
  4184  	return false
  4185  }
  4186  func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool {
  4187  	b := v.Block
  4188  	_ = b
  4189  	config := b.Func.Config
  4190  	_ = config
  4191  	// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  4192  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4193  	// result: (MOVDload [off1+off2] {sym} ptr mem)
  4194  	for {
  4195  		off1 := v.AuxInt
  4196  		sym := v.Aux
  4197  		_ = v.Args[1]
  4198  		v_0 := v.Args[0]
  4199  		if v_0.Op != OpARM64ADDconst {
  4200  			break
  4201  		}
  4202  		off2 := v_0.AuxInt
  4203  		ptr := v_0.Args[0]
  4204  		mem := v.Args[1]
  4205  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4206  			break
  4207  		}
  4208  		v.reset(OpARM64MOVDload)
  4209  		v.AuxInt = off1 + off2
  4210  		v.Aux = sym
  4211  		v.AddArg(ptr)
  4212  		v.AddArg(mem)
  4213  		return true
  4214  	}
  4215  	// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4216  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4217  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4218  	for {
  4219  		off1 := v.AuxInt
  4220  		sym1 := v.Aux
  4221  		_ = v.Args[1]
  4222  		v_0 := v.Args[0]
  4223  		if v_0.Op != OpARM64MOVDaddr {
  4224  			break
  4225  		}
  4226  		off2 := v_0.AuxInt
  4227  		sym2 := v_0.Aux
  4228  		ptr := v_0.Args[0]
  4229  		mem := v.Args[1]
  4230  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4231  			break
  4232  		}
  4233  		v.reset(OpARM64MOVDload)
  4234  		v.AuxInt = off1 + off2
  4235  		v.Aux = mergeSym(sym1, sym2)
  4236  		v.AddArg(ptr)
  4237  		v.AddArg(mem)
  4238  		return true
  4239  	}
  4240  	// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
  4241  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4242  	// result: (MOVDconst [0])
  4243  	for {
  4244  		off := v.AuxInt
  4245  		sym := v.Aux
  4246  		_ = v.Args[1]
  4247  		ptr := v.Args[0]
  4248  		v_1 := v.Args[1]
  4249  		if v_1.Op != OpARM64MOVDstorezero {
  4250  			break
  4251  		}
  4252  		off2 := v_1.AuxInt
  4253  		sym2 := v_1.Aux
  4254  		_ = v_1.Args[1]
  4255  		ptr2 := v_1.Args[0]
  4256  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4257  			break
  4258  		}
  4259  		v.reset(OpARM64MOVDconst)
  4260  		v.AuxInt = 0
  4261  		return true
  4262  	}
  4263  	return false
  4264  }
  4265  func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool {
  4266  	// match: (MOVDreg x)
  4267  	// cond: x.Uses == 1
  4268  	// result: (MOVDnop x)
  4269  	for {
  4270  		x := v.Args[0]
  4271  		if !(x.Uses == 1) {
  4272  			break
  4273  		}
  4274  		v.reset(OpARM64MOVDnop)
  4275  		v.AddArg(x)
  4276  		return true
  4277  	}
  4278  	// match: (MOVDreg (MOVDconst [c]))
  4279  	// cond:
  4280  	// result: (MOVDconst [c])
  4281  	for {
  4282  		v_0 := v.Args[0]
  4283  		if v_0.Op != OpARM64MOVDconst {
  4284  			break
  4285  		}
  4286  		c := v_0.AuxInt
  4287  		v.reset(OpARM64MOVDconst)
  4288  		v.AuxInt = c
  4289  		return true
  4290  	}
  4291  	return false
  4292  }
  4293  func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool {
  4294  	b := v.Block
  4295  	_ = b
  4296  	config := b.Func.Config
  4297  	_ = config
  4298  	// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  4299  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4300  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4301  	for {
  4302  		off1 := v.AuxInt
  4303  		sym := v.Aux
  4304  		_ = v.Args[2]
  4305  		v_0 := v.Args[0]
  4306  		if v_0.Op != OpARM64ADDconst {
  4307  			break
  4308  		}
  4309  		off2 := v_0.AuxInt
  4310  		ptr := v_0.Args[0]
  4311  		val := v.Args[1]
  4312  		mem := v.Args[2]
  4313  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4314  			break
  4315  		}
  4316  		v.reset(OpARM64MOVDstore)
  4317  		v.AuxInt = off1 + off2
  4318  		v.Aux = sym
  4319  		v.AddArg(ptr)
  4320  		v.AddArg(val)
  4321  		v.AddArg(mem)
  4322  		return true
  4323  	}
  4324  	// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  4325  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4326  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4327  	for {
  4328  		off1 := v.AuxInt
  4329  		sym1 := v.Aux
  4330  		_ = v.Args[2]
  4331  		v_0 := v.Args[0]
  4332  		if v_0.Op != OpARM64MOVDaddr {
  4333  			break
  4334  		}
  4335  		off2 := v_0.AuxInt
  4336  		sym2 := v_0.Aux
  4337  		ptr := v_0.Args[0]
  4338  		val := v.Args[1]
  4339  		mem := v.Args[2]
  4340  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4341  			break
  4342  		}
  4343  		v.reset(OpARM64MOVDstore)
  4344  		v.AuxInt = off1 + off2
  4345  		v.Aux = mergeSym(sym1, sym2)
  4346  		v.AddArg(ptr)
  4347  		v.AddArg(val)
  4348  		v.AddArg(mem)
  4349  		return true
  4350  	}
  4351  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4352  	// cond:
  4353  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4354  	for {
  4355  		off := v.AuxInt
  4356  		sym := v.Aux
  4357  		_ = v.Args[2]
  4358  		ptr := v.Args[0]
  4359  		v_1 := v.Args[1]
  4360  		if v_1.Op != OpARM64MOVDconst {
  4361  			break
  4362  		}
  4363  		if v_1.AuxInt != 0 {
  4364  			break
  4365  		}
  4366  		mem := v.Args[2]
  4367  		v.reset(OpARM64MOVDstorezero)
  4368  		v.AuxInt = off
  4369  		v.Aux = sym
  4370  		v.AddArg(ptr)
  4371  		v.AddArg(mem)
  4372  		return true
  4373  	}
  4374  	return false
  4375  }
  4376  func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool {
  4377  	b := v.Block
  4378  	_ = b
  4379  	config := b.Func.Config
  4380  	_ = config
  4381  	// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  4382  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4383  	// result: (MOVDstorezero [off1+off2] {sym} ptr mem)
  4384  	for {
  4385  		off1 := v.AuxInt
  4386  		sym := v.Aux
  4387  		_ = v.Args[1]
  4388  		v_0 := v.Args[0]
  4389  		if v_0.Op != OpARM64ADDconst {
  4390  			break
  4391  		}
  4392  		off2 := v_0.AuxInt
  4393  		ptr := v_0.Args[0]
  4394  		mem := v.Args[1]
  4395  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4396  			break
  4397  		}
  4398  		v.reset(OpARM64MOVDstorezero)
  4399  		v.AuxInt = off1 + off2
  4400  		v.Aux = sym
  4401  		v.AddArg(ptr)
  4402  		v.AddArg(mem)
  4403  		return true
  4404  	}
  4405  	// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4406  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4407  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4408  	for {
  4409  		off1 := v.AuxInt
  4410  		sym1 := v.Aux
  4411  		_ = v.Args[1]
  4412  		v_0 := v.Args[0]
  4413  		if v_0.Op != OpARM64MOVDaddr {
  4414  			break
  4415  		}
  4416  		off2 := v_0.AuxInt
  4417  		sym2 := v_0.Aux
  4418  		ptr := v_0.Args[0]
  4419  		mem := v.Args[1]
  4420  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4421  			break
  4422  		}
  4423  		v.reset(OpARM64MOVDstorezero)
  4424  		v.AuxInt = off1 + off2
  4425  		v.Aux = mergeSym(sym1, sym2)
  4426  		v.AddArg(ptr)
  4427  		v.AddArg(mem)
  4428  		return true
  4429  	}
  4430  	return false
  4431  }
  4432  func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool {
  4433  	b := v.Block
  4434  	_ = b
  4435  	config := b.Func.Config
  4436  	_ = config
  4437  	// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
  4438  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4439  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4440  	for {
  4441  		off1 := v.AuxInt
  4442  		sym := v.Aux
  4443  		_ = v.Args[1]
  4444  		v_0 := v.Args[0]
  4445  		if v_0.Op != OpARM64ADDconst {
  4446  			break
  4447  		}
  4448  		off2 := v_0.AuxInt
  4449  		ptr := v_0.Args[0]
  4450  		mem := v.Args[1]
  4451  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4452  			break
  4453  		}
  4454  		v.reset(OpARM64MOVHUload)
  4455  		v.AuxInt = off1 + off2
  4456  		v.Aux = sym
  4457  		v.AddArg(ptr)
  4458  		v.AddArg(mem)
  4459  		return true
  4460  	}
  4461  	// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4462  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4463  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4464  	for {
  4465  		off1 := v.AuxInt
  4466  		sym1 := v.Aux
  4467  		_ = v.Args[1]
  4468  		v_0 := v.Args[0]
  4469  		if v_0.Op != OpARM64MOVDaddr {
  4470  			break
  4471  		}
  4472  		off2 := v_0.AuxInt
  4473  		sym2 := v_0.Aux
  4474  		ptr := v_0.Args[0]
  4475  		mem := v.Args[1]
  4476  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4477  			break
  4478  		}
  4479  		v.reset(OpARM64MOVHUload)
  4480  		v.AuxInt = off1 + off2
  4481  		v.Aux = mergeSym(sym1, sym2)
  4482  		v.AddArg(ptr)
  4483  		v.AddArg(mem)
  4484  		return true
  4485  	}
  4486  	// match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
  4487  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4488  	// result: (MOVDconst [0])
  4489  	for {
  4490  		off := v.AuxInt
  4491  		sym := v.Aux
  4492  		_ = v.Args[1]
  4493  		ptr := v.Args[0]
  4494  		v_1 := v.Args[1]
  4495  		if v_1.Op != OpARM64MOVHstorezero {
  4496  			break
  4497  		}
  4498  		off2 := v_1.AuxInt
  4499  		sym2 := v_1.Aux
  4500  		_ = v_1.Args[1]
  4501  		ptr2 := v_1.Args[0]
  4502  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4503  			break
  4504  		}
  4505  		v.reset(OpARM64MOVDconst)
  4506  		v.AuxInt = 0
  4507  		return true
  4508  	}
  4509  	return false
  4510  }
  4511  func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool {
  4512  	// match: (MOVHUreg x:(MOVBUload _ _))
  4513  	// cond:
  4514  	// result: (MOVDreg x)
  4515  	for {
  4516  		x := v.Args[0]
  4517  		if x.Op != OpARM64MOVBUload {
  4518  			break
  4519  		}
  4520  		_ = x.Args[1]
  4521  		v.reset(OpARM64MOVDreg)
  4522  		v.AddArg(x)
  4523  		return true
  4524  	}
  4525  	// match: (MOVHUreg x:(MOVHUload _ _))
  4526  	// cond:
  4527  	// result: (MOVDreg x)
  4528  	for {
  4529  		x := v.Args[0]
  4530  		if x.Op != OpARM64MOVHUload {
  4531  			break
  4532  		}
  4533  		_ = x.Args[1]
  4534  		v.reset(OpARM64MOVDreg)
  4535  		v.AddArg(x)
  4536  		return true
  4537  	}
  4538  	// match: (MOVHUreg x:(MOVBUreg _))
  4539  	// cond:
  4540  	// result: (MOVDreg x)
  4541  	for {
  4542  		x := v.Args[0]
  4543  		if x.Op != OpARM64MOVBUreg {
  4544  			break
  4545  		}
  4546  		v.reset(OpARM64MOVDreg)
  4547  		v.AddArg(x)
  4548  		return true
  4549  	}
  4550  	// match: (MOVHUreg x:(MOVHUreg _))
  4551  	// cond:
  4552  	// result: (MOVDreg x)
  4553  	for {
  4554  		x := v.Args[0]
  4555  		if x.Op != OpARM64MOVHUreg {
  4556  			break
  4557  		}
  4558  		v.reset(OpARM64MOVDreg)
  4559  		v.AddArg(x)
  4560  		return true
  4561  	}
  4562  	// match: (MOVHUreg (MOVDconst [c]))
  4563  	// cond:
  4564  	// result: (MOVDconst [int64(uint16(c))])
  4565  	for {
  4566  		v_0 := v.Args[0]
  4567  		if v_0.Op != OpARM64MOVDconst {
  4568  			break
  4569  		}
  4570  		c := v_0.AuxInt
  4571  		v.reset(OpARM64MOVDconst)
  4572  		v.AuxInt = int64(uint16(c))
  4573  		return true
  4574  	}
  4575  	return false
  4576  }
  4577  func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool {
  4578  	b := v.Block
  4579  	_ = b
  4580  	config := b.Func.Config
  4581  	_ = config
  4582  	// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
  4583  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4584  	// result: (MOVHload [off1+off2] {sym} ptr mem)
  4585  	for {
  4586  		off1 := v.AuxInt
  4587  		sym := v.Aux
  4588  		_ = v.Args[1]
  4589  		v_0 := v.Args[0]
  4590  		if v_0.Op != OpARM64ADDconst {
  4591  			break
  4592  		}
  4593  		off2 := v_0.AuxInt
  4594  		ptr := v_0.Args[0]
  4595  		mem := v.Args[1]
  4596  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4597  			break
  4598  		}
  4599  		v.reset(OpARM64MOVHload)
  4600  		v.AuxInt = off1 + off2
  4601  		v.Aux = sym
  4602  		v.AddArg(ptr)
  4603  		v.AddArg(mem)
  4604  		return true
  4605  	}
  4606  	// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4607  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4608  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4609  	for {
  4610  		off1 := v.AuxInt
  4611  		sym1 := v.Aux
  4612  		_ = v.Args[1]
  4613  		v_0 := v.Args[0]
  4614  		if v_0.Op != OpARM64MOVDaddr {
  4615  			break
  4616  		}
  4617  		off2 := v_0.AuxInt
  4618  		sym2 := v_0.Aux
  4619  		ptr := v_0.Args[0]
  4620  		mem := v.Args[1]
  4621  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4622  			break
  4623  		}
  4624  		v.reset(OpARM64MOVHload)
  4625  		v.AuxInt = off1 + off2
  4626  		v.Aux = mergeSym(sym1, sym2)
  4627  		v.AddArg(ptr)
  4628  		v.AddArg(mem)
  4629  		return true
  4630  	}
  4631  	// match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
  4632  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4633  	// result: (MOVDconst [0])
  4634  	for {
  4635  		off := v.AuxInt
  4636  		sym := v.Aux
  4637  		_ = v.Args[1]
  4638  		ptr := v.Args[0]
  4639  		v_1 := v.Args[1]
  4640  		if v_1.Op != OpARM64MOVHstorezero {
  4641  			break
  4642  		}
  4643  		off2 := v_1.AuxInt
  4644  		sym2 := v_1.Aux
  4645  		_ = v_1.Args[1]
  4646  		ptr2 := v_1.Args[0]
  4647  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4648  			break
  4649  		}
  4650  		v.reset(OpARM64MOVDconst)
  4651  		v.AuxInt = 0
  4652  		return true
  4653  	}
  4654  	return false
  4655  }
  4656  func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool {
  4657  	// match: (MOVHreg x:(MOVBload _ _))
  4658  	// cond:
  4659  	// result: (MOVDreg x)
  4660  	for {
  4661  		x := v.Args[0]
  4662  		if x.Op != OpARM64MOVBload {
  4663  			break
  4664  		}
  4665  		_ = x.Args[1]
  4666  		v.reset(OpARM64MOVDreg)
  4667  		v.AddArg(x)
  4668  		return true
  4669  	}
  4670  	// match: (MOVHreg x:(MOVBUload _ _))
  4671  	// cond:
  4672  	// result: (MOVDreg x)
  4673  	for {
  4674  		x := v.Args[0]
  4675  		if x.Op != OpARM64MOVBUload {
  4676  			break
  4677  		}
  4678  		_ = x.Args[1]
  4679  		v.reset(OpARM64MOVDreg)
  4680  		v.AddArg(x)
  4681  		return true
  4682  	}
  4683  	// match: (MOVHreg x:(MOVHload _ _))
  4684  	// cond:
  4685  	// result: (MOVDreg x)
  4686  	for {
  4687  		x := v.Args[0]
  4688  		if x.Op != OpARM64MOVHload {
  4689  			break
  4690  		}
  4691  		_ = x.Args[1]
  4692  		v.reset(OpARM64MOVDreg)
  4693  		v.AddArg(x)
  4694  		return true
  4695  	}
  4696  	// match: (MOVHreg x:(MOVBreg _))
  4697  	// cond:
  4698  	// result: (MOVDreg x)
  4699  	for {
  4700  		x := v.Args[0]
  4701  		if x.Op != OpARM64MOVBreg {
  4702  			break
  4703  		}
  4704  		v.reset(OpARM64MOVDreg)
  4705  		v.AddArg(x)
  4706  		return true
  4707  	}
  4708  	// match: (MOVHreg x:(MOVBUreg _))
  4709  	// cond:
  4710  	// result: (MOVDreg x)
  4711  	for {
  4712  		x := v.Args[0]
  4713  		if x.Op != OpARM64MOVBUreg {
  4714  			break
  4715  		}
  4716  		v.reset(OpARM64MOVDreg)
  4717  		v.AddArg(x)
  4718  		return true
  4719  	}
  4720  	// match: (MOVHreg x:(MOVHreg _))
  4721  	// cond:
  4722  	// result: (MOVDreg x)
  4723  	for {
  4724  		x := v.Args[0]
  4725  		if x.Op != OpARM64MOVHreg {
  4726  			break
  4727  		}
  4728  		v.reset(OpARM64MOVDreg)
  4729  		v.AddArg(x)
  4730  		return true
  4731  	}
  4732  	// match: (MOVHreg (MOVDconst [c]))
  4733  	// cond:
  4734  	// result: (MOVDconst [int64(int16(c))])
  4735  	for {
  4736  		v_0 := v.Args[0]
  4737  		if v_0.Op != OpARM64MOVDconst {
  4738  			break
  4739  		}
  4740  		c := v_0.AuxInt
  4741  		v.reset(OpARM64MOVDconst)
  4742  		v.AuxInt = int64(int16(c))
  4743  		return true
  4744  	}
  4745  	return false
  4746  }
  4747  func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool {
  4748  	b := v.Block
  4749  	_ = b
  4750  	config := b.Func.Config
  4751  	_ = config
  4752  	// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  4753  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4754  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4755  	for {
  4756  		off1 := v.AuxInt
  4757  		sym := v.Aux
  4758  		_ = v.Args[2]
  4759  		v_0 := v.Args[0]
  4760  		if v_0.Op != OpARM64ADDconst {
  4761  			break
  4762  		}
  4763  		off2 := v_0.AuxInt
  4764  		ptr := v_0.Args[0]
  4765  		val := v.Args[1]
  4766  		mem := v.Args[2]
  4767  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4768  			break
  4769  		}
  4770  		v.reset(OpARM64MOVHstore)
  4771  		v.AuxInt = off1 + off2
  4772  		v.Aux = sym
  4773  		v.AddArg(ptr)
  4774  		v.AddArg(val)
  4775  		v.AddArg(mem)
  4776  		return true
  4777  	}
  4778  	// match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  4779  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4780  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4781  	for {
  4782  		off1 := v.AuxInt
  4783  		sym1 := v.Aux
  4784  		_ = v.Args[2]
  4785  		v_0 := v.Args[0]
  4786  		if v_0.Op != OpARM64MOVDaddr {
  4787  			break
  4788  		}
  4789  		off2 := v_0.AuxInt
  4790  		sym2 := v_0.Aux
  4791  		ptr := v_0.Args[0]
  4792  		val := v.Args[1]
  4793  		mem := v.Args[2]
  4794  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4795  			break
  4796  		}
  4797  		v.reset(OpARM64MOVHstore)
  4798  		v.AuxInt = off1 + off2
  4799  		v.Aux = mergeSym(sym1, sym2)
  4800  		v.AddArg(ptr)
  4801  		v.AddArg(val)
  4802  		v.AddArg(mem)
  4803  		return true
  4804  	}
  4805  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  4806  	// cond:
  4807  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4808  	for {
  4809  		off := v.AuxInt
  4810  		sym := v.Aux
  4811  		_ = v.Args[2]
  4812  		ptr := v.Args[0]
  4813  		v_1 := v.Args[1]
  4814  		if v_1.Op != OpARM64MOVDconst {
  4815  			break
  4816  		}
  4817  		if v_1.AuxInt != 0 {
  4818  			break
  4819  		}
  4820  		mem := v.Args[2]
  4821  		v.reset(OpARM64MOVHstorezero)
  4822  		v.AuxInt = off
  4823  		v.Aux = sym
  4824  		v.AddArg(ptr)
  4825  		v.AddArg(mem)
  4826  		return true
  4827  	}
  4828  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4829  	// cond:
  4830  	// result: (MOVHstore [off] {sym} ptr x mem)
  4831  	for {
  4832  		off := v.AuxInt
  4833  		sym := v.Aux
  4834  		_ = v.Args[2]
  4835  		ptr := v.Args[0]
  4836  		v_1 := v.Args[1]
  4837  		if v_1.Op != OpARM64MOVHreg {
  4838  			break
  4839  		}
  4840  		x := v_1.Args[0]
  4841  		mem := v.Args[2]
  4842  		v.reset(OpARM64MOVHstore)
  4843  		v.AuxInt = off
  4844  		v.Aux = sym
  4845  		v.AddArg(ptr)
  4846  		v.AddArg(x)
  4847  		v.AddArg(mem)
  4848  		return true
  4849  	}
  4850  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4851  	// cond:
  4852  	// result: (MOVHstore [off] {sym} ptr x mem)
  4853  	for {
  4854  		off := v.AuxInt
  4855  		sym := v.Aux
  4856  		_ = v.Args[2]
  4857  		ptr := v.Args[0]
  4858  		v_1 := v.Args[1]
  4859  		if v_1.Op != OpARM64MOVHUreg {
  4860  			break
  4861  		}
  4862  		x := v_1.Args[0]
  4863  		mem := v.Args[2]
  4864  		v.reset(OpARM64MOVHstore)
  4865  		v.AuxInt = off
  4866  		v.Aux = sym
  4867  		v.AddArg(ptr)
  4868  		v.AddArg(x)
  4869  		v.AddArg(mem)
  4870  		return true
  4871  	}
  4872  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4873  	// cond:
  4874  	// result: (MOVHstore [off] {sym} ptr x mem)
  4875  	for {
  4876  		off := v.AuxInt
  4877  		sym := v.Aux
  4878  		_ = v.Args[2]
  4879  		ptr := v.Args[0]
  4880  		v_1 := v.Args[1]
  4881  		if v_1.Op != OpARM64MOVWreg {
  4882  			break
  4883  		}
  4884  		x := v_1.Args[0]
  4885  		mem := v.Args[2]
  4886  		v.reset(OpARM64MOVHstore)
  4887  		v.AuxInt = off
  4888  		v.Aux = sym
  4889  		v.AddArg(ptr)
  4890  		v.AddArg(x)
  4891  		v.AddArg(mem)
  4892  		return true
  4893  	}
  4894  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4895  	// cond:
  4896  	// result: (MOVHstore [off] {sym} ptr x mem)
  4897  	for {
  4898  		off := v.AuxInt
  4899  		sym := v.Aux
  4900  		_ = v.Args[2]
  4901  		ptr := v.Args[0]
  4902  		v_1 := v.Args[1]
  4903  		if v_1.Op != OpARM64MOVWUreg {
  4904  			break
  4905  		}
  4906  		x := v_1.Args[0]
  4907  		mem := v.Args[2]
  4908  		v.reset(OpARM64MOVHstore)
  4909  		v.AuxInt = off
  4910  		v.Aux = sym
  4911  		v.AddArg(ptr)
  4912  		v.AddArg(x)
  4913  		v.AddArg(mem)
  4914  		return true
  4915  	}
  4916  	return false
  4917  }
  4918  func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool {
  4919  	b := v.Block
  4920  	_ = b
  4921  	config := b.Func.Config
  4922  	_ = config
  4923  	// match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  4924  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4925  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4926  	for {
  4927  		off1 := v.AuxInt
  4928  		sym := v.Aux
  4929  		_ = v.Args[1]
  4930  		v_0 := v.Args[0]
  4931  		if v_0.Op != OpARM64ADDconst {
  4932  			break
  4933  		}
  4934  		off2 := v_0.AuxInt
  4935  		ptr := v_0.Args[0]
  4936  		mem := v.Args[1]
  4937  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4938  			break
  4939  		}
  4940  		v.reset(OpARM64MOVHstorezero)
  4941  		v.AuxInt = off1 + off2
  4942  		v.Aux = sym
  4943  		v.AddArg(ptr)
  4944  		v.AddArg(mem)
  4945  		return true
  4946  	}
  4947  	// match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4948  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4949  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4950  	for {
  4951  		off1 := v.AuxInt
  4952  		sym1 := v.Aux
  4953  		_ = v.Args[1]
  4954  		v_0 := v.Args[0]
  4955  		if v_0.Op != OpARM64MOVDaddr {
  4956  			break
  4957  		}
  4958  		off2 := v_0.AuxInt
  4959  		sym2 := v_0.Aux
  4960  		ptr := v_0.Args[0]
  4961  		mem := v.Args[1]
  4962  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4963  			break
  4964  		}
  4965  		v.reset(OpARM64MOVHstorezero)
  4966  		v.AuxInt = off1 + off2
  4967  		v.Aux = mergeSym(sym1, sym2)
  4968  		v.AddArg(ptr)
  4969  		v.AddArg(mem)
  4970  		return true
  4971  	}
  4972  	return false
  4973  }
  4974  func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool {
  4975  	b := v.Block
  4976  	_ = b
  4977  	config := b.Func.Config
  4978  	_ = config
  4979  	// match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem)
  4980  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4981  	// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4982  	for {
  4983  		off1 := v.AuxInt
  4984  		sym := v.Aux
  4985  		_ = v.Args[1]
  4986  		v_0 := v.Args[0]
  4987  		if v_0.Op != OpARM64ADDconst {
  4988  			break
  4989  		}
  4990  		off2 := v_0.AuxInt
  4991  		ptr := v_0.Args[0]
  4992  		mem := v.Args[1]
  4993  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4994  			break
  4995  		}
  4996  		v.reset(OpARM64MOVWUload)
  4997  		v.AuxInt = off1 + off2
  4998  		v.Aux = sym
  4999  		v.AddArg(ptr)
  5000  		v.AddArg(mem)
  5001  		return true
  5002  	}
  5003  	// match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5004  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5005  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5006  	for {
  5007  		off1 := v.AuxInt
  5008  		sym1 := v.Aux
  5009  		_ = v.Args[1]
  5010  		v_0 := v.Args[0]
  5011  		if v_0.Op != OpARM64MOVDaddr {
  5012  			break
  5013  		}
  5014  		off2 := v_0.AuxInt
  5015  		sym2 := v_0.Aux
  5016  		ptr := v_0.Args[0]
  5017  		mem := v.Args[1]
  5018  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5019  			break
  5020  		}
  5021  		v.reset(OpARM64MOVWUload)
  5022  		v.AuxInt = off1 + off2
  5023  		v.Aux = mergeSym(sym1, sym2)
  5024  		v.AddArg(ptr)
  5025  		v.AddArg(mem)
  5026  		return true
  5027  	}
  5028  	// match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
  5029  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  5030  	// result: (MOVDconst [0])
  5031  	for {
  5032  		off := v.AuxInt
  5033  		sym := v.Aux
  5034  		_ = v.Args[1]
  5035  		ptr := v.Args[0]
  5036  		v_1 := v.Args[1]
  5037  		if v_1.Op != OpARM64MOVWstorezero {
  5038  			break
  5039  		}
  5040  		off2 := v_1.AuxInt
  5041  		sym2 := v_1.Aux
  5042  		_ = v_1.Args[1]
  5043  		ptr2 := v_1.Args[0]
  5044  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  5045  			break
  5046  		}
  5047  		v.reset(OpARM64MOVDconst)
  5048  		v.AuxInt = 0
  5049  		return true
  5050  	}
  5051  	return false
  5052  }
  5053  func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool {
  5054  	// match: (MOVWUreg x:(MOVBUload _ _))
  5055  	// cond:
  5056  	// result: (MOVDreg x)
  5057  	for {
  5058  		x := v.Args[0]
  5059  		if x.Op != OpARM64MOVBUload {
  5060  			break
  5061  		}
  5062  		_ = x.Args[1]
  5063  		v.reset(OpARM64MOVDreg)
  5064  		v.AddArg(x)
  5065  		return true
  5066  	}
  5067  	// match: (MOVWUreg x:(MOVHUload _ _))
  5068  	// cond:
  5069  	// result: (MOVDreg x)
  5070  	for {
  5071  		x := v.Args[0]
  5072  		if x.Op != OpARM64MOVHUload {
  5073  			break
  5074  		}
  5075  		_ = x.Args[1]
  5076  		v.reset(OpARM64MOVDreg)
  5077  		v.AddArg(x)
  5078  		return true
  5079  	}
  5080  	// match: (MOVWUreg x:(MOVWUload _ _))
  5081  	// cond:
  5082  	// result: (MOVDreg x)
  5083  	for {
  5084  		x := v.Args[0]
  5085  		if x.Op != OpARM64MOVWUload {
  5086  			break
  5087  		}
  5088  		_ = x.Args[1]
  5089  		v.reset(OpARM64MOVDreg)
  5090  		v.AddArg(x)
  5091  		return true
  5092  	}
  5093  	// match: (MOVWUreg x:(MOVBUreg _))
  5094  	// cond:
  5095  	// result: (MOVDreg x)
  5096  	for {
  5097  		x := v.Args[0]
  5098  		if x.Op != OpARM64MOVBUreg {
  5099  			break
  5100  		}
  5101  		v.reset(OpARM64MOVDreg)
  5102  		v.AddArg(x)
  5103  		return true
  5104  	}
  5105  	// match: (MOVWUreg x:(MOVHUreg _))
  5106  	// cond:
  5107  	// result: (MOVDreg x)
  5108  	for {
  5109  		x := v.Args[0]
  5110  		if x.Op != OpARM64MOVHUreg {
  5111  			break
  5112  		}
  5113  		v.reset(OpARM64MOVDreg)
  5114  		v.AddArg(x)
  5115  		return true
  5116  	}
  5117  	// match: (MOVWUreg x:(MOVWUreg _))
  5118  	// cond:
  5119  	// result: (MOVDreg x)
  5120  	for {
  5121  		x := v.Args[0]
  5122  		if x.Op != OpARM64MOVWUreg {
  5123  			break
  5124  		}
  5125  		v.reset(OpARM64MOVDreg)
  5126  		v.AddArg(x)
  5127  		return true
  5128  	}
  5129  	// match: (MOVWUreg (MOVDconst [c]))
  5130  	// cond:
  5131  	// result: (MOVDconst [int64(uint32(c))])
  5132  	for {
  5133  		v_0 := v.Args[0]
  5134  		if v_0.Op != OpARM64MOVDconst {
  5135  			break
  5136  		}
  5137  		c := v_0.AuxInt
  5138  		v.reset(OpARM64MOVDconst)
  5139  		v.AuxInt = int64(uint32(c))
  5140  		return true
  5141  	}
  5142  	return false
  5143  }
  5144  func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool {
  5145  	b := v.Block
  5146  	_ = b
  5147  	config := b.Func.Config
  5148  	_ = config
  5149  	// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
  5150  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5151  	// result: (MOVWload [off1+off2] {sym} ptr mem)
  5152  	for {
  5153  		off1 := v.AuxInt
  5154  		sym := v.Aux
  5155  		_ = v.Args[1]
  5156  		v_0 := v.Args[0]
  5157  		if v_0.Op != OpARM64ADDconst {
  5158  			break
  5159  		}
  5160  		off2 := v_0.AuxInt
  5161  		ptr := v_0.Args[0]
  5162  		mem := v.Args[1]
  5163  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5164  			break
  5165  		}
  5166  		v.reset(OpARM64MOVWload)
  5167  		v.AuxInt = off1 + off2
  5168  		v.Aux = sym
  5169  		v.AddArg(ptr)
  5170  		v.AddArg(mem)
  5171  		return true
  5172  	}
  5173  	// match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5174  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5175  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5176  	for {
  5177  		off1 := v.AuxInt
  5178  		sym1 := v.Aux
  5179  		_ = v.Args[1]
  5180  		v_0 := v.Args[0]
  5181  		if v_0.Op != OpARM64MOVDaddr {
  5182  			break
  5183  		}
  5184  		off2 := v_0.AuxInt
  5185  		sym2 := v_0.Aux
  5186  		ptr := v_0.Args[0]
  5187  		mem := v.Args[1]
  5188  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5189  			break
  5190  		}
  5191  		v.reset(OpARM64MOVWload)
  5192  		v.AuxInt = off1 + off2
  5193  		v.Aux = mergeSym(sym1, sym2)
  5194  		v.AddArg(ptr)
  5195  		v.AddArg(mem)
  5196  		return true
  5197  	}
  5198  	// match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
  5199  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  5200  	// result: (MOVDconst [0])
  5201  	for {
  5202  		off := v.AuxInt
  5203  		sym := v.Aux
  5204  		_ = v.Args[1]
  5205  		ptr := v.Args[0]
  5206  		v_1 := v.Args[1]
  5207  		if v_1.Op != OpARM64MOVWstorezero {
  5208  			break
  5209  		}
  5210  		off2 := v_1.AuxInt
  5211  		sym2 := v_1.Aux
  5212  		_ = v_1.Args[1]
  5213  		ptr2 := v_1.Args[0]
  5214  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  5215  			break
  5216  		}
  5217  		v.reset(OpARM64MOVDconst)
  5218  		v.AuxInt = 0
  5219  		return true
  5220  	}
  5221  	return false
  5222  }
  5223  func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool {
  5224  	// match: (MOVWreg x:(MOVBload _ _))
  5225  	// cond:
  5226  	// result: (MOVDreg x)
  5227  	for {
  5228  		x := v.Args[0]
  5229  		if x.Op != OpARM64MOVBload {
  5230  			break
  5231  		}
  5232  		_ = x.Args[1]
  5233  		v.reset(OpARM64MOVDreg)
  5234  		v.AddArg(x)
  5235  		return true
  5236  	}
  5237  	// match: (MOVWreg x:(MOVBUload _ _))
  5238  	// cond:
  5239  	// result: (MOVDreg x)
  5240  	for {
  5241  		x := v.Args[0]
  5242  		if x.Op != OpARM64MOVBUload {
  5243  			break
  5244  		}
  5245  		_ = x.Args[1]
  5246  		v.reset(OpARM64MOVDreg)
  5247  		v.AddArg(x)
  5248  		return true
  5249  	}
  5250  	// match: (MOVWreg x:(MOVHload _ _))
  5251  	// cond:
  5252  	// result: (MOVDreg x)
  5253  	for {
  5254  		x := v.Args[0]
  5255  		if x.Op != OpARM64MOVHload {
  5256  			break
  5257  		}
  5258  		_ = x.Args[1]
  5259  		v.reset(OpARM64MOVDreg)
  5260  		v.AddArg(x)
  5261  		return true
  5262  	}
  5263  	// match: (MOVWreg x:(MOVHUload _ _))
  5264  	// cond:
  5265  	// result: (MOVDreg x)
  5266  	for {
  5267  		x := v.Args[0]
  5268  		if x.Op != OpARM64MOVHUload {
  5269  			break
  5270  		}
  5271  		_ = x.Args[1]
  5272  		v.reset(OpARM64MOVDreg)
  5273  		v.AddArg(x)
  5274  		return true
  5275  	}
  5276  	// match: (MOVWreg x:(MOVWload _ _))
  5277  	// cond:
  5278  	// result: (MOVDreg x)
  5279  	for {
  5280  		x := v.Args[0]
  5281  		if x.Op != OpARM64MOVWload {
  5282  			break
  5283  		}
  5284  		_ = x.Args[1]
  5285  		v.reset(OpARM64MOVDreg)
  5286  		v.AddArg(x)
  5287  		return true
  5288  	}
  5289  	// match: (MOVWreg x:(MOVBreg _))
  5290  	// cond:
  5291  	// result: (MOVDreg x)
  5292  	for {
  5293  		x := v.Args[0]
  5294  		if x.Op != OpARM64MOVBreg {
  5295  			break
  5296  		}
  5297  		v.reset(OpARM64MOVDreg)
  5298  		v.AddArg(x)
  5299  		return true
  5300  	}
  5301  	// match: (MOVWreg x:(MOVBUreg _))
  5302  	// cond:
  5303  	// result: (MOVDreg x)
  5304  	for {
  5305  		x := v.Args[0]
  5306  		if x.Op != OpARM64MOVBUreg {
  5307  			break
  5308  		}
  5309  		v.reset(OpARM64MOVDreg)
  5310  		v.AddArg(x)
  5311  		return true
  5312  	}
  5313  	// match: (MOVWreg x:(MOVHreg _))
  5314  	// cond:
  5315  	// result: (MOVDreg x)
  5316  	for {
  5317  		x := v.Args[0]
  5318  		if x.Op != OpARM64MOVHreg {
  5319  			break
  5320  		}
  5321  		v.reset(OpARM64MOVDreg)
  5322  		v.AddArg(x)
  5323  		return true
  5324  	}
  5325  	// match: (MOVWreg x:(MOVHreg _))
  5326  	// cond:
  5327  	// result: (MOVDreg x)
  5328  	for {
  5329  		x := v.Args[0]
  5330  		if x.Op != OpARM64MOVHreg {
  5331  			break
  5332  		}
  5333  		v.reset(OpARM64MOVDreg)
  5334  		v.AddArg(x)
  5335  		return true
  5336  	}
  5337  	// match: (MOVWreg x:(MOVWreg _))
  5338  	// cond:
  5339  	// result: (MOVDreg x)
  5340  	for {
  5341  		x := v.Args[0]
  5342  		if x.Op != OpARM64MOVWreg {
  5343  			break
  5344  		}
  5345  		v.reset(OpARM64MOVDreg)
  5346  		v.AddArg(x)
  5347  		return true
  5348  	}
  5349  	return false
  5350  }
  5351  func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool {
  5352  	// match: (MOVWreg (MOVDconst [c]))
  5353  	// cond:
  5354  	// result: (MOVDconst [int64(int32(c))])
  5355  	for {
  5356  		v_0 := v.Args[0]
  5357  		if v_0.Op != OpARM64MOVDconst {
  5358  			break
  5359  		}
  5360  		c := v_0.AuxInt
  5361  		v.reset(OpARM64MOVDconst)
  5362  		v.AuxInt = int64(int32(c))
  5363  		return true
  5364  	}
  5365  	return false
  5366  }
  5367  func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool {
  5368  	b := v.Block
  5369  	_ = b
  5370  	config := b.Func.Config
  5371  	_ = config
  5372  	// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  5373  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5374  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5375  	for {
  5376  		off1 := v.AuxInt
  5377  		sym := v.Aux
  5378  		_ = v.Args[2]
  5379  		v_0 := v.Args[0]
  5380  		if v_0.Op != OpARM64ADDconst {
  5381  			break
  5382  		}
  5383  		off2 := v_0.AuxInt
  5384  		ptr := v_0.Args[0]
  5385  		val := v.Args[1]
  5386  		mem := v.Args[2]
  5387  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5388  			break
  5389  		}
  5390  		v.reset(OpARM64MOVWstore)
  5391  		v.AuxInt = off1 + off2
  5392  		v.Aux = sym
  5393  		v.AddArg(ptr)
  5394  		v.AddArg(val)
  5395  		v.AddArg(mem)
  5396  		return true
  5397  	}
  5398  	// match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  5399  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5400  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5401  	for {
  5402  		off1 := v.AuxInt
  5403  		sym1 := v.Aux
  5404  		_ = v.Args[2]
  5405  		v_0 := v.Args[0]
  5406  		if v_0.Op != OpARM64MOVDaddr {
  5407  			break
  5408  		}
  5409  		off2 := v_0.AuxInt
  5410  		sym2 := v_0.Aux
  5411  		ptr := v_0.Args[0]
  5412  		val := v.Args[1]
  5413  		mem := v.Args[2]
  5414  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5415  			break
  5416  		}
  5417  		v.reset(OpARM64MOVWstore)
  5418  		v.AuxInt = off1 + off2
  5419  		v.Aux = mergeSym(sym1, sym2)
  5420  		v.AddArg(ptr)
  5421  		v.AddArg(val)
  5422  		v.AddArg(mem)
  5423  		return true
  5424  	}
  5425  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  5426  	// cond:
  5427  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5428  	for {
  5429  		off := v.AuxInt
  5430  		sym := v.Aux
  5431  		_ = v.Args[2]
  5432  		ptr := v.Args[0]
  5433  		v_1 := v.Args[1]
  5434  		if v_1.Op != OpARM64MOVDconst {
  5435  			break
  5436  		}
  5437  		if v_1.AuxInt != 0 {
  5438  			break
  5439  		}
  5440  		mem := v.Args[2]
  5441  		v.reset(OpARM64MOVWstorezero)
  5442  		v.AuxInt = off
  5443  		v.Aux = sym
  5444  		v.AddArg(ptr)
  5445  		v.AddArg(mem)
  5446  		return true
  5447  	}
  5448  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5449  	// cond:
  5450  	// result: (MOVWstore [off] {sym} ptr x mem)
  5451  	for {
  5452  		off := v.AuxInt
  5453  		sym := v.Aux
  5454  		_ = v.Args[2]
  5455  		ptr := v.Args[0]
  5456  		v_1 := v.Args[1]
  5457  		if v_1.Op != OpARM64MOVWreg {
  5458  			break
  5459  		}
  5460  		x := v_1.Args[0]
  5461  		mem := v.Args[2]
  5462  		v.reset(OpARM64MOVWstore)
  5463  		v.AuxInt = off
  5464  		v.Aux = sym
  5465  		v.AddArg(ptr)
  5466  		v.AddArg(x)
  5467  		v.AddArg(mem)
  5468  		return true
  5469  	}
  5470  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5471  	// cond:
  5472  	// result: (MOVWstore [off] {sym} ptr x mem)
  5473  	for {
  5474  		off := v.AuxInt
  5475  		sym := v.Aux
  5476  		_ = v.Args[2]
  5477  		ptr := v.Args[0]
  5478  		v_1 := v.Args[1]
  5479  		if v_1.Op != OpARM64MOVWUreg {
  5480  			break
  5481  		}
  5482  		x := v_1.Args[0]
  5483  		mem := v.Args[2]
  5484  		v.reset(OpARM64MOVWstore)
  5485  		v.AuxInt = off
  5486  		v.Aux = sym
  5487  		v.AddArg(ptr)
  5488  		v.AddArg(x)
  5489  		v.AddArg(mem)
  5490  		return true
  5491  	}
  5492  	return false
  5493  }
  5494  func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool {
  5495  	b := v.Block
  5496  	_ = b
  5497  	config := b.Func.Config
  5498  	_ = config
  5499  	// match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
  5500  	// cond: is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5501  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5502  	for {
  5503  		off1 := v.AuxInt
  5504  		sym := v.Aux
  5505  		_ = v.Args[1]
  5506  		v_0 := v.Args[0]
  5507  		if v_0.Op != OpARM64ADDconst {
  5508  			break
  5509  		}
  5510  		off2 := v_0.AuxInt
  5511  		ptr := v_0.Args[0]
  5512  		mem := v.Args[1]
  5513  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5514  			break
  5515  		}
  5516  		v.reset(OpARM64MOVWstorezero)
  5517  		v.AuxInt = off1 + off2
  5518  		v.Aux = sym
  5519  		v.AddArg(ptr)
  5520  		v.AddArg(mem)
  5521  		return true
  5522  	}
  5523  	// match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5524  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 	&& (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5525  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5526  	for {
  5527  		off1 := v.AuxInt
  5528  		sym1 := v.Aux
  5529  		_ = v.Args[1]
  5530  		v_0 := v.Args[0]
  5531  		if v_0.Op != OpARM64MOVDaddr {
  5532  			break
  5533  		}
  5534  		off2 := v_0.AuxInt
  5535  		sym2 := v_0.Aux
  5536  		ptr := v_0.Args[0]
  5537  		mem := v.Args[1]
  5538  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5539  			break
  5540  		}
  5541  		v.reset(OpARM64MOVWstorezero)
  5542  		v.AuxInt = off1 + off2
  5543  		v.Aux = mergeSym(sym1, sym2)
  5544  		v.AddArg(ptr)
  5545  		v.AddArg(mem)
  5546  		return true
  5547  	}
  5548  	return false
  5549  }
  5550  func rewriteValueARM64_OpARM64MUL_0(v *Value) bool {
  5551  	// match: (MUL x (MOVDconst [-1]))
  5552  	// cond:
  5553  	// result: (NEG x)
  5554  	for {
  5555  		_ = v.Args[1]
  5556  		x := v.Args[0]
  5557  		v_1 := v.Args[1]
  5558  		if v_1.Op != OpARM64MOVDconst {
  5559  			break
  5560  		}
  5561  		if v_1.AuxInt != -1 {
  5562  			break
  5563  		}
  5564  		v.reset(OpARM64NEG)
  5565  		v.AddArg(x)
  5566  		return true
  5567  	}
  5568  	// match: (MUL (MOVDconst [-1]) x)
  5569  	// cond:
  5570  	// result: (NEG x)
  5571  	for {
  5572  		_ = v.Args[1]
  5573  		v_0 := v.Args[0]
  5574  		if v_0.Op != OpARM64MOVDconst {
  5575  			break
  5576  		}
  5577  		if v_0.AuxInt != -1 {
  5578  			break
  5579  		}
  5580  		x := v.Args[1]
  5581  		v.reset(OpARM64NEG)
  5582  		v.AddArg(x)
  5583  		return true
  5584  	}
  5585  	// match: (MUL _ (MOVDconst [0]))
  5586  	// cond:
  5587  	// result: (MOVDconst [0])
  5588  	for {
  5589  		_ = v.Args[1]
  5590  		v_1 := v.Args[1]
  5591  		if v_1.Op != OpARM64MOVDconst {
  5592  			break
  5593  		}
  5594  		if v_1.AuxInt != 0 {
  5595  			break
  5596  		}
  5597  		v.reset(OpARM64MOVDconst)
  5598  		v.AuxInt = 0
  5599  		return true
  5600  	}
  5601  	// match: (MUL (MOVDconst [0]) _)
  5602  	// cond:
  5603  	// result: (MOVDconst [0])
  5604  	for {
  5605  		_ = v.Args[1]
  5606  		v_0 := v.Args[0]
  5607  		if v_0.Op != OpARM64MOVDconst {
  5608  			break
  5609  		}
  5610  		if v_0.AuxInt != 0 {
  5611  			break
  5612  		}
  5613  		v.reset(OpARM64MOVDconst)
  5614  		v.AuxInt = 0
  5615  		return true
  5616  	}
  5617  	// match: (MUL x (MOVDconst [1]))
  5618  	// cond:
  5619  	// result: x
  5620  	for {
  5621  		_ = v.Args[1]
  5622  		x := v.Args[0]
  5623  		v_1 := v.Args[1]
  5624  		if v_1.Op != OpARM64MOVDconst {
  5625  			break
  5626  		}
  5627  		if v_1.AuxInt != 1 {
  5628  			break
  5629  		}
  5630  		v.reset(OpCopy)
  5631  		v.Type = x.Type
  5632  		v.AddArg(x)
  5633  		return true
  5634  	}
  5635  	// match: (MUL (MOVDconst [1]) x)
  5636  	// cond:
  5637  	// result: x
  5638  	for {
  5639  		_ = v.Args[1]
  5640  		v_0 := v.Args[0]
  5641  		if v_0.Op != OpARM64MOVDconst {
  5642  			break
  5643  		}
  5644  		if v_0.AuxInt != 1 {
  5645  			break
  5646  		}
  5647  		x := v.Args[1]
  5648  		v.reset(OpCopy)
  5649  		v.Type = x.Type
  5650  		v.AddArg(x)
  5651  		return true
  5652  	}
  5653  	// match: (MUL x (MOVDconst [c]))
  5654  	// cond: isPowerOfTwo(c)
  5655  	// result: (SLLconst [log2(c)] x)
  5656  	for {
  5657  		_ = v.Args[1]
  5658  		x := v.Args[0]
  5659  		v_1 := v.Args[1]
  5660  		if v_1.Op != OpARM64MOVDconst {
  5661  			break
  5662  		}
  5663  		c := v_1.AuxInt
  5664  		if !(isPowerOfTwo(c)) {
  5665  			break
  5666  		}
  5667  		v.reset(OpARM64SLLconst)
  5668  		v.AuxInt = log2(c)
  5669  		v.AddArg(x)
  5670  		return true
  5671  	}
  5672  	// match: (MUL (MOVDconst [c]) x)
  5673  	// cond: isPowerOfTwo(c)
  5674  	// result: (SLLconst [log2(c)] x)
  5675  	for {
  5676  		_ = v.Args[1]
  5677  		v_0 := v.Args[0]
  5678  		if v_0.Op != OpARM64MOVDconst {
  5679  			break
  5680  		}
  5681  		c := v_0.AuxInt
  5682  		x := v.Args[1]
  5683  		if !(isPowerOfTwo(c)) {
  5684  			break
  5685  		}
  5686  		v.reset(OpARM64SLLconst)
  5687  		v.AuxInt = log2(c)
  5688  		v.AddArg(x)
  5689  		return true
  5690  	}
  5691  	// match: (MUL x (MOVDconst [c]))
  5692  	// cond: isPowerOfTwo(c-1) && c >= 3
  5693  	// result: (ADDshiftLL x x [log2(c-1)])
  5694  	for {
  5695  		_ = v.Args[1]
  5696  		x := v.Args[0]
  5697  		v_1 := v.Args[1]
  5698  		if v_1.Op != OpARM64MOVDconst {
  5699  			break
  5700  		}
  5701  		c := v_1.AuxInt
  5702  		if !(isPowerOfTwo(c-1) && c >= 3) {
  5703  			break
  5704  		}
  5705  		v.reset(OpARM64ADDshiftLL)
  5706  		v.AuxInt = log2(c - 1)
  5707  		v.AddArg(x)
  5708  		v.AddArg(x)
  5709  		return true
  5710  	}
  5711  	// match: (MUL (MOVDconst [c]) x)
  5712  	// cond: isPowerOfTwo(c-1) && c >= 3
  5713  	// result: (ADDshiftLL x x [log2(c-1)])
  5714  	for {
  5715  		_ = v.Args[1]
  5716  		v_0 := v.Args[0]
  5717  		if v_0.Op != OpARM64MOVDconst {
  5718  			break
  5719  		}
  5720  		c := v_0.AuxInt
  5721  		x := v.Args[1]
  5722  		if !(isPowerOfTwo(c-1) && c >= 3) {
  5723  			break
  5724  		}
  5725  		v.reset(OpARM64ADDshiftLL)
  5726  		v.AuxInt = log2(c - 1)
  5727  		v.AddArg(x)
  5728  		v.AddArg(x)
  5729  		return true
  5730  	}
  5731  	return false
  5732  }
  5733  func rewriteValueARM64_OpARM64MUL_10(v *Value) bool {
  5734  	b := v.Block
  5735  	_ = b
  5736  	// match: (MUL x (MOVDconst [c]))
  5737  	// cond: isPowerOfTwo(c+1) && c >= 7
  5738  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5739  	for {
  5740  		_ = v.Args[1]
  5741  		x := v.Args[0]
  5742  		v_1 := v.Args[1]
  5743  		if v_1.Op != OpARM64MOVDconst {
  5744  			break
  5745  		}
  5746  		c := v_1.AuxInt
  5747  		if !(isPowerOfTwo(c+1) && c >= 7) {
  5748  			break
  5749  		}
  5750  		v.reset(OpARM64ADDshiftLL)
  5751  		v.AuxInt = log2(c + 1)
  5752  		v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  5753  		v0.AddArg(x)
  5754  		v.AddArg(v0)
  5755  		v.AddArg(x)
  5756  		return true
  5757  	}
  5758  	// match: (MUL (MOVDconst [c]) x)
  5759  	// cond: isPowerOfTwo(c+1) && c >= 7
  5760  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  5761  	for {
  5762  		_ = v.Args[1]
  5763  		v_0 := v.Args[0]
  5764  		if v_0.Op != OpARM64MOVDconst {
  5765  			break
  5766  		}
  5767  		c := v_0.AuxInt
  5768  		x := v.Args[1]
  5769  		if !(isPowerOfTwo(c+1) && c >= 7) {
  5770  			break
  5771  		}
  5772  		v.reset(OpARM64ADDshiftLL)
  5773  		v.AuxInt = log2(c + 1)
  5774  		v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  5775  		v0.AddArg(x)
  5776  		v.AddArg(v0)
  5777  		v.AddArg(x)
  5778  		return true
  5779  	}
  5780  	// match: (MUL x (MOVDconst [c]))
  5781  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  5782  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5783  	for {
  5784  		_ = v.Args[1]
  5785  		x := v.Args[0]
  5786  		v_1 := v.Args[1]
  5787  		if v_1.Op != OpARM64MOVDconst {
  5788  			break
  5789  		}
  5790  		c := v_1.AuxInt
  5791  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  5792  			break
  5793  		}
  5794  		v.reset(OpARM64SLLconst)
  5795  		v.AuxInt = log2(c / 3)
  5796  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5797  		v0.AuxInt = 1
  5798  		v0.AddArg(x)
  5799  		v0.AddArg(x)
  5800  		v.AddArg(v0)
  5801  		return true
  5802  	}
  5803  	// match: (MUL (MOVDconst [c]) x)
  5804  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  5805  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  5806  	for {
  5807  		_ = v.Args[1]
  5808  		v_0 := v.Args[0]
  5809  		if v_0.Op != OpARM64MOVDconst {
  5810  			break
  5811  		}
  5812  		c := v_0.AuxInt
  5813  		x := v.Args[1]
  5814  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  5815  			break
  5816  		}
  5817  		v.reset(OpARM64SLLconst)
  5818  		v.AuxInt = log2(c / 3)
  5819  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5820  		v0.AuxInt = 1
  5821  		v0.AddArg(x)
  5822  		v0.AddArg(x)
  5823  		v.AddArg(v0)
  5824  		return true
  5825  	}
  5826  	// match: (MUL x (MOVDconst [c]))
  5827  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  5828  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  5829  	for {
  5830  		_ = v.Args[1]
  5831  		x := v.Args[0]
  5832  		v_1 := v.Args[1]
  5833  		if v_1.Op != OpARM64MOVDconst {
  5834  			break
  5835  		}
  5836  		c := v_1.AuxInt
  5837  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  5838  			break
  5839  		}
  5840  		v.reset(OpARM64SLLconst)
  5841  		v.AuxInt = log2(c / 5)
  5842  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5843  		v0.AuxInt = 2
  5844  		v0.AddArg(x)
  5845  		v0.AddArg(x)
  5846  		v.AddArg(v0)
  5847  		return true
  5848  	}
  5849  	// match: (MUL (MOVDconst [c]) x)
  5850  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  5851  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  5852  	for {
  5853  		_ = v.Args[1]
  5854  		v_0 := v.Args[0]
  5855  		if v_0.Op != OpARM64MOVDconst {
  5856  			break
  5857  		}
  5858  		c := v_0.AuxInt
  5859  		x := v.Args[1]
  5860  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  5861  			break
  5862  		}
  5863  		v.reset(OpARM64SLLconst)
  5864  		v.AuxInt = log2(c / 5)
  5865  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5866  		v0.AuxInt = 2
  5867  		v0.AddArg(x)
  5868  		v0.AddArg(x)
  5869  		v.AddArg(v0)
  5870  		return true
  5871  	}
  5872  	// match: (MUL x (MOVDconst [c]))
  5873  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  5874  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  5875  	for {
  5876  		_ = v.Args[1]
  5877  		x := v.Args[0]
  5878  		v_1 := v.Args[1]
  5879  		if v_1.Op != OpARM64MOVDconst {
  5880  			break
  5881  		}
  5882  		c := v_1.AuxInt
  5883  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  5884  			break
  5885  		}
  5886  		v.reset(OpARM64SLLconst)
  5887  		v.AuxInt = log2(c / 7)
  5888  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5889  		v0.AuxInt = 3
  5890  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  5891  		v1.AddArg(x)
  5892  		v0.AddArg(v1)
  5893  		v0.AddArg(x)
  5894  		v.AddArg(v0)
  5895  		return true
  5896  	}
  5897  	// match: (MUL (MOVDconst [c]) x)
  5898  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  5899  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  5900  	for {
  5901  		_ = v.Args[1]
  5902  		v_0 := v.Args[0]
  5903  		if v_0.Op != OpARM64MOVDconst {
  5904  			break
  5905  		}
  5906  		c := v_0.AuxInt
  5907  		x := v.Args[1]
  5908  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  5909  			break
  5910  		}
  5911  		v.reset(OpARM64SLLconst)
  5912  		v.AuxInt = log2(c / 7)
  5913  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5914  		v0.AuxInt = 3
  5915  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  5916  		v1.AddArg(x)
  5917  		v0.AddArg(v1)
  5918  		v0.AddArg(x)
  5919  		v.AddArg(v0)
  5920  		return true
  5921  	}
  5922  	// match: (MUL x (MOVDconst [c]))
  5923  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  5924  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  5925  	for {
  5926  		_ = v.Args[1]
  5927  		x := v.Args[0]
  5928  		v_1 := v.Args[1]
  5929  		if v_1.Op != OpARM64MOVDconst {
  5930  			break
  5931  		}
  5932  		c := v_1.AuxInt
  5933  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  5934  			break
  5935  		}
  5936  		v.reset(OpARM64SLLconst)
  5937  		v.AuxInt = log2(c / 9)
  5938  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5939  		v0.AuxInt = 3
  5940  		v0.AddArg(x)
  5941  		v0.AddArg(x)
  5942  		v.AddArg(v0)
  5943  		return true
  5944  	}
  5945  	// match: (MUL (MOVDconst [c]) x)
  5946  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  5947  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  5948  	for {
  5949  		_ = v.Args[1]
  5950  		v_0 := v.Args[0]
  5951  		if v_0.Op != OpARM64MOVDconst {
  5952  			break
  5953  		}
  5954  		c := v_0.AuxInt
  5955  		x := v.Args[1]
  5956  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  5957  			break
  5958  		}
  5959  		v.reset(OpARM64SLLconst)
  5960  		v.AuxInt = log2(c / 9)
  5961  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  5962  		v0.AuxInt = 3
  5963  		v0.AddArg(x)
  5964  		v0.AddArg(x)
  5965  		v.AddArg(v0)
  5966  		return true
  5967  	}
  5968  	return false
  5969  }
  5970  func rewriteValueARM64_OpARM64MUL_20(v *Value) bool {
  5971  	// match: (MUL (MOVDconst [c]) (MOVDconst [d]))
  5972  	// cond:
  5973  	// result: (MOVDconst [c*d])
  5974  	for {
  5975  		_ = v.Args[1]
  5976  		v_0 := v.Args[0]
  5977  		if v_0.Op != OpARM64MOVDconst {
  5978  			break
  5979  		}
  5980  		c := v_0.AuxInt
  5981  		v_1 := v.Args[1]
  5982  		if v_1.Op != OpARM64MOVDconst {
  5983  			break
  5984  		}
  5985  		d := v_1.AuxInt
  5986  		v.reset(OpARM64MOVDconst)
  5987  		v.AuxInt = c * d
  5988  		return true
  5989  	}
  5990  	// match: (MUL (MOVDconst [d]) (MOVDconst [c]))
  5991  	// cond:
  5992  	// result: (MOVDconst [c*d])
  5993  	for {
  5994  		_ = v.Args[1]
  5995  		v_0 := v.Args[0]
  5996  		if v_0.Op != OpARM64MOVDconst {
  5997  			break
  5998  		}
  5999  		d := v_0.AuxInt
  6000  		v_1 := v.Args[1]
  6001  		if v_1.Op != OpARM64MOVDconst {
  6002  			break
  6003  		}
  6004  		c := v_1.AuxInt
  6005  		v.reset(OpARM64MOVDconst)
  6006  		v.AuxInt = c * d
  6007  		return true
  6008  	}
  6009  	return false
  6010  }
  6011  func rewriteValueARM64_OpARM64MULW_0(v *Value) bool {
  6012  	// match: (MULW x (MOVDconst [c]))
  6013  	// cond: int32(c)==-1
  6014  	// result: (NEG x)
  6015  	for {
  6016  		_ = v.Args[1]
  6017  		x := v.Args[0]
  6018  		v_1 := v.Args[1]
  6019  		if v_1.Op != OpARM64MOVDconst {
  6020  			break
  6021  		}
  6022  		c := v_1.AuxInt
  6023  		if !(int32(c) == -1) {
  6024  			break
  6025  		}
  6026  		v.reset(OpARM64NEG)
  6027  		v.AddArg(x)
  6028  		return true
  6029  	}
  6030  	// match: (MULW (MOVDconst [c]) x)
  6031  	// cond: int32(c)==-1
  6032  	// result: (NEG x)
  6033  	for {
  6034  		_ = v.Args[1]
  6035  		v_0 := v.Args[0]
  6036  		if v_0.Op != OpARM64MOVDconst {
  6037  			break
  6038  		}
  6039  		c := v_0.AuxInt
  6040  		x := v.Args[1]
  6041  		if !(int32(c) == -1) {
  6042  			break
  6043  		}
  6044  		v.reset(OpARM64NEG)
  6045  		v.AddArg(x)
  6046  		return true
  6047  	}
  6048  	// match: (MULW _ (MOVDconst [c]))
  6049  	// cond: int32(c)==0
  6050  	// result: (MOVDconst [0])
  6051  	for {
  6052  		_ = v.Args[1]
  6053  		v_1 := v.Args[1]
  6054  		if v_1.Op != OpARM64MOVDconst {
  6055  			break
  6056  		}
  6057  		c := v_1.AuxInt
  6058  		if !(int32(c) == 0) {
  6059  			break
  6060  		}
  6061  		v.reset(OpARM64MOVDconst)
  6062  		v.AuxInt = 0
  6063  		return true
  6064  	}
  6065  	// match: (MULW (MOVDconst [c]) _)
  6066  	// cond: int32(c)==0
  6067  	// result: (MOVDconst [0])
  6068  	for {
  6069  		_ = v.Args[1]
  6070  		v_0 := v.Args[0]
  6071  		if v_0.Op != OpARM64MOVDconst {
  6072  			break
  6073  		}
  6074  		c := v_0.AuxInt
  6075  		if !(int32(c) == 0) {
  6076  			break
  6077  		}
  6078  		v.reset(OpARM64MOVDconst)
  6079  		v.AuxInt = 0
  6080  		return true
  6081  	}
  6082  	// match: (MULW x (MOVDconst [c]))
  6083  	// cond: int32(c)==1
  6084  	// result: x
  6085  	for {
  6086  		_ = v.Args[1]
  6087  		x := v.Args[0]
  6088  		v_1 := v.Args[1]
  6089  		if v_1.Op != OpARM64MOVDconst {
  6090  			break
  6091  		}
  6092  		c := v_1.AuxInt
  6093  		if !(int32(c) == 1) {
  6094  			break
  6095  		}
  6096  		v.reset(OpCopy)
  6097  		v.Type = x.Type
  6098  		v.AddArg(x)
  6099  		return true
  6100  	}
  6101  	// match: (MULW (MOVDconst [c]) x)
  6102  	// cond: int32(c)==1
  6103  	// result: x
  6104  	for {
  6105  		_ = v.Args[1]
  6106  		v_0 := v.Args[0]
  6107  		if v_0.Op != OpARM64MOVDconst {
  6108  			break
  6109  		}
  6110  		c := v_0.AuxInt
  6111  		x := v.Args[1]
  6112  		if !(int32(c) == 1) {
  6113  			break
  6114  		}
  6115  		v.reset(OpCopy)
  6116  		v.Type = x.Type
  6117  		v.AddArg(x)
  6118  		return true
  6119  	}
  6120  	// match: (MULW x (MOVDconst [c]))
  6121  	// cond: isPowerOfTwo(c)
  6122  	// result: (SLLconst [log2(c)] x)
  6123  	for {
  6124  		_ = v.Args[1]
  6125  		x := v.Args[0]
  6126  		v_1 := v.Args[1]
  6127  		if v_1.Op != OpARM64MOVDconst {
  6128  			break
  6129  		}
  6130  		c := v_1.AuxInt
  6131  		if !(isPowerOfTwo(c)) {
  6132  			break
  6133  		}
  6134  		v.reset(OpARM64SLLconst)
  6135  		v.AuxInt = log2(c)
  6136  		v.AddArg(x)
  6137  		return true
  6138  	}
  6139  	// match: (MULW (MOVDconst [c]) x)
  6140  	// cond: isPowerOfTwo(c)
  6141  	// result: (SLLconst [log2(c)] x)
  6142  	for {
  6143  		_ = v.Args[1]
  6144  		v_0 := v.Args[0]
  6145  		if v_0.Op != OpARM64MOVDconst {
  6146  			break
  6147  		}
  6148  		c := v_0.AuxInt
  6149  		x := v.Args[1]
  6150  		if !(isPowerOfTwo(c)) {
  6151  			break
  6152  		}
  6153  		v.reset(OpARM64SLLconst)
  6154  		v.AuxInt = log2(c)
  6155  		v.AddArg(x)
  6156  		return true
  6157  	}
  6158  	// match: (MULW x (MOVDconst [c]))
  6159  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  6160  	// result: (ADDshiftLL x x [log2(c-1)])
  6161  	for {
  6162  		_ = v.Args[1]
  6163  		x := v.Args[0]
  6164  		v_1 := v.Args[1]
  6165  		if v_1.Op != OpARM64MOVDconst {
  6166  			break
  6167  		}
  6168  		c := v_1.AuxInt
  6169  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  6170  			break
  6171  		}
  6172  		v.reset(OpARM64ADDshiftLL)
  6173  		v.AuxInt = log2(c - 1)
  6174  		v.AddArg(x)
  6175  		v.AddArg(x)
  6176  		return true
  6177  	}
  6178  	// match: (MULW (MOVDconst [c]) x)
  6179  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  6180  	// result: (ADDshiftLL x x [log2(c-1)])
  6181  	for {
  6182  		_ = v.Args[1]
  6183  		v_0 := v.Args[0]
  6184  		if v_0.Op != OpARM64MOVDconst {
  6185  			break
  6186  		}
  6187  		c := v_0.AuxInt
  6188  		x := v.Args[1]
  6189  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  6190  			break
  6191  		}
  6192  		v.reset(OpARM64ADDshiftLL)
  6193  		v.AuxInt = log2(c - 1)
  6194  		v.AddArg(x)
  6195  		v.AddArg(x)
  6196  		return true
  6197  	}
  6198  	return false
  6199  }
  6200  func rewriteValueARM64_OpARM64MULW_10(v *Value) bool {
  6201  	b := v.Block
  6202  	_ = b
  6203  	// match: (MULW x (MOVDconst [c]))
  6204  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  6205  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  6206  	for {
  6207  		_ = v.Args[1]
  6208  		x := v.Args[0]
  6209  		v_1 := v.Args[1]
  6210  		if v_1.Op != OpARM64MOVDconst {
  6211  			break
  6212  		}
  6213  		c := v_1.AuxInt
  6214  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  6215  			break
  6216  		}
  6217  		v.reset(OpARM64ADDshiftLL)
  6218  		v.AuxInt = log2(c + 1)
  6219  		v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  6220  		v0.AddArg(x)
  6221  		v.AddArg(v0)
  6222  		v.AddArg(x)
  6223  		return true
  6224  	}
  6225  	// match: (MULW (MOVDconst [c]) x)
  6226  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  6227  	// result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)])
  6228  	for {
  6229  		_ = v.Args[1]
  6230  		v_0 := v.Args[0]
  6231  		if v_0.Op != OpARM64MOVDconst {
  6232  			break
  6233  		}
  6234  		c := v_0.AuxInt
  6235  		x := v.Args[1]
  6236  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  6237  			break
  6238  		}
  6239  		v.reset(OpARM64ADDshiftLL)
  6240  		v.AuxInt = log2(c + 1)
  6241  		v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  6242  		v0.AddArg(x)
  6243  		v.AddArg(v0)
  6244  		v.AddArg(x)
  6245  		return true
  6246  	}
  6247  	// match: (MULW x (MOVDconst [c]))
  6248  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  6249  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  6250  	for {
  6251  		_ = v.Args[1]
  6252  		x := v.Args[0]
  6253  		v_1 := v.Args[1]
  6254  		if v_1.Op != OpARM64MOVDconst {
  6255  			break
  6256  		}
  6257  		c := v_1.AuxInt
  6258  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  6259  			break
  6260  		}
  6261  		v.reset(OpARM64SLLconst)
  6262  		v.AuxInt = log2(c / 3)
  6263  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6264  		v0.AuxInt = 1
  6265  		v0.AddArg(x)
  6266  		v0.AddArg(x)
  6267  		v.AddArg(v0)
  6268  		return true
  6269  	}
  6270  	// match: (MULW (MOVDconst [c]) x)
  6271  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  6272  	// result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1]))
  6273  	for {
  6274  		_ = v.Args[1]
  6275  		v_0 := v.Args[0]
  6276  		if v_0.Op != OpARM64MOVDconst {
  6277  			break
  6278  		}
  6279  		c := v_0.AuxInt
  6280  		x := v.Args[1]
  6281  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  6282  			break
  6283  		}
  6284  		v.reset(OpARM64SLLconst)
  6285  		v.AuxInt = log2(c / 3)
  6286  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6287  		v0.AuxInt = 1
  6288  		v0.AddArg(x)
  6289  		v0.AddArg(x)
  6290  		v.AddArg(v0)
  6291  		return true
  6292  	}
  6293  	// match: (MULW x (MOVDconst [c]))
  6294  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  6295  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  6296  	for {
  6297  		_ = v.Args[1]
  6298  		x := v.Args[0]
  6299  		v_1 := v.Args[1]
  6300  		if v_1.Op != OpARM64MOVDconst {
  6301  			break
  6302  		}
  6303  		c := v_1.AuxInt
  6304  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  6305  			break
  6306  		}
  6307  		v.reset(OpARM64SLLconst)
  6308  		v.AuxInt = log2(c / 5)
  6309  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6310  		v0.AuxInt = 2
  6311  		v0.AddArg(x)
  6312  		v0.AddArg(x)
  6313  		v.AddArg(v0)
  6314  		return true
  6315  	}
  6316  	// match: (MULW (MOVDconst [c]) x)
  6317  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  6318  	// result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))
  6319  	for {
  6320  		_ = v.Args[1]
  6321  		v_0 := v.Args[0]
  6322  		if v_0.Op != OpARM64MOVDconst {
  6323  			break
  6324  		}
  6325  		c := v_0.AuxInt
  6326  		x := v.Args[1]
  6327  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  6328  			break
  6329  		}
  6330  		v.reset(OpARM64SLLconst)
  6331  		v.AuxInt = log2(c / 5)
  6332  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6333  		v0.AuxInt = 2
  6334  		v0.AddArg(x)
  6335  		v0.AddArg(x)
  6336  		v.AddArg(v0)
  6337  		return true
  6338  	}
  6339  	// match: (MULW x (MOVDconst [c]))
  6340  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  6341  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  6342  	for {
  6343  		_ = v.Args[1]
  6344  		x := v.Args[0]
  6345  		v_1 := v.Args[1]
  6346  		if v_1.Op != OpARM64MOVDconst {
  6347  			break
  6348  		}
  6349  		c := v_1.AuxInt
  6350  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  6351  			break
  6352  		}
  6353  		v.reset(OpARM64SLLconst)
  6354  		v.AuxInt = log2(c / 7)
  6355  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6356  		v0.AuxInt = 3
  6357  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  6358  		v1.AddArg(x)
  6359  		v0.AddArg(v1)
  6360  		v0.AddArg(x)
  6361  		v.AddArg(v0)
  6362  		return true
  6363  	}
  6364  	// match: (MULW (MOVDconst [c]) x)
  6365  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  6366  	// result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
  6367  	for {
  6368  		_ = v.Args[1]
  6369  		v_0 := v.Args[0]
  6370  		if v_0.Op != OpARM64MOVDconst {
  6371  			break
  6372  		}
  6373  		c := v_0.AuxInt
  6374  		x := v.Args[1]
  6375  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  6376  			break
  6377  		}
  6378  		v.reset(OpARM64SLLconst)
  6379  		v.AuxInt = log2(c / 7)
  6380  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6381  		v0.AuxInt = 3
  6382  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  6383  		v1.AddArg(x)
  6384  		v0.AddArg(v1)
  6385  		v0.AddArg(x)
  6386  		v.AddArg(v0)
  6387  		return true
  6388  	}
  6389  	// match: (MULW x (MOVDconst [c]))
  6390  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  6391  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  6392  	for {
  6393  		_ = v.Args[1]
  6394  		x := v.Args[0]
  6395  		v_1 := v.Args[1]
  6396  		if v_1.Op != OpARM64MOVDconst {
  6397  			break
  6398  		}
  6399  		c := v_1.AuxInt
  6400  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  6401  			break
  6402  		}
  6403  		v.reset(OpARM64SLLconst)
  6404  		v.AuxInt = log2(c / 9)
  6405  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6406  		v0.AuxInt = 3
  6407  		v0.AddArg(x)
  6408  		v0.AddArg(x)
  6409  		v.AddArg(v0)
  6410  		return true
  6411  	}
  6412  	// match: (MULW (MOVDconst [c]) x)
  6413  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  6414  	// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
  6415  	for {
  6416  		_ = v.Args[1]
  6417  		v_0 := v.Args[0]
  6418  		if v_0.Op != OpARM64MOVDconst {
  6419  			break
  6420  		}
  6421  		c := v_0.AuxInt
  6422  		x := v.Args[1]
  6423  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  6424  			break
  6425  		}
  6426  		v.reset(OpARM64SLLconst)
  6427  		v.AuxInt = log2(c / 9)
  6428  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6429  		v0.AuxInt = 3
  6430  		v0.AddArg(x)
  6431  		v0.AddArg(x)
  6432  		v.AddArg(v0)
  6433  		return true
  6434  	}
  6435  	return false
  6436  }
  6437  func rewriteValueARM64_OpARM64MULW_20(v *Value) bool {
  6438  	// match: (MULW (MOVDconst [c]) (MOVDconst [d]))
  6439  	// cond:
  6440  	// result: (MOVDconst [int64(int32(c)*int32(d))])
  6441  	for {
  6442  		_ = v.Args[1]
  6443  		v_0 := v.Args[0]
  6444  		if v_0.Op != OpARM64MOVDconst {
  6445  			break
  6446  		}
  6447  		c := v_0.AuxInt
  6448  		v_1 := v.Args[1]
  6449  		if v_1.Op != OpARM64MOVDconst {
  6450  			break
  6451  		}
  6452  		d := v_1.AuxInt
  6453  		v.reset(OpARM64MOVDconst)
  6454  		v.AuxInt = int64(int32(c) * int32(d))
  6455  		return true
  6456  	}
  6457  	// match: (MULW (MOVDconst [d]) (MOVDconst [c]))
  6458  	// cond:
  6459  	// result: (MOVDconst [int64(int32(c)*int32(d))])
  6460  	for {
  6461  		_ = v.Args[1]
  6462  		v_0 := v.Args[0]
  6463  		if v_0.Op != OpARM64MOVDconst {
  6464  			break
  6465  		}
  6466  		d := v_0.AuxInt
  6467  		v_1 := v.Args[1]
  6468  		if v_1.Op != OpARM64MOVDconst {
  6469  			break
  6470  		}
  6471  		c := v_1.AuxInt
  6472  		v.reset(OpARM64MOVDconst)
  6473  		v.AuxInt = int64(int32(c) * int32(d))
  6474  		return true
  6475  	}
  6476  	return false
  6477  }
  6478  func rewriteValueARM64_OpARM64MVN_0(v *Value) bool {
  6479  	// match: (MVN (MOVDconst [c]))
  6480  	// cond:
  6481  	// result: (MOVDconst [^c])
  6482  	for {
  6483  		v_0 := v.Args[0]
  6484  		if v_0.Op != OpARM64MOVDconst {
  6485  			break
  6486  		}
  6487  		c := v_0.AuxInt
  6488  		v.reset(OpARM64MOVDconst)
  6489  		v.AuxInt = ^c
  6490  		return true
  6491  	}
  6492  	return false
  6493  }
  6494  func rewriteValueARM64_OpARM64NEG_0(v *Value) bool {
  6495  	// match: (NEG (MOVDconst [c]))
  6496  	// cond:
  6497  	// result: (MOVDconst [-c])
  6498  	for {
  6499  		v_0 := v.Args[0]
  6500  		if v_0.Op != OpARM64MOVDconst {
  6501  			break
  6502  		}
  6503  		c := v_0.AuxInt
  6504  		v.reset(OpARM64MOVDconst)
  6505  		v.AuxInt = -c
  6506  		return true
  6507  	}
  6508  	return false
  6509  }
  6510  func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool {
  6511  	// match: (NotEqual (FlagEQ))
  6512  	// cond:
  6513  	// result: (MOVDconst [0])
  6514  	for {
  6515  		v_0 := v.Args[0]
  6516  		if v_0.Op != OpARM64FlagEQ {
  6517  			break
  6518  		}
  6519  		v.reset(OpARM64MOVDconst)
  6520  		v.AuxInt = 0
  6521  		return true
  6522  	}
  6523  	// match: (NotEqual (FlagLT_ULT))
  6524  	// cond:
  6525  	// result: (MOVDconst [1])
  6526  	for {
  6527  		v_0 := v.Args[0]
  6528  		if v_0.Op != OpARM64FlagLT_ULT {
  6529  			break
  6530  		}
  6531  		v.reset(OpARM64MOVDconst)
  6532  		v.AuxInt = 1
  6533  		return true
  6534  	}
  6535  	// match: (NotEqual (FlagLT_UGT))
  6536  	// cond:
  6537  	// result: (MOVDconst [1])
  6538  	for {
  6539  		v_0 := v.Args[0]
  6540  		if v_0.Op != OpARM64FlagLT_UGT {
  6541  			break
  6542  		}
  6543  		v.reset(OpARM64MOVDconst)
  6544  		v.AuxInt = 1
  6545  		return true
  6546  	}
  6547  	// match: (NotEqual (FlagGT_ULT))
  6548  	// cond:
  6549  	// result: (MOVDconst [1])
  6550  	for {
  6551  		v_0 := v.Args[0]
  6552  		if v_0.Op != OpARM64FlagGT_ULT {
  6553  			break
  6554  		}
  6555  		v.reset(OpARM64MOVDconst)
  6556  		v.AuxInt = 1
  6557  		return true
  6558  	}
  6559  	// match: (NotEqual (FlagGT_UGT))
  6560  	// cond:
  6561  	// result: (MOVDconst [1])
  6562  	for {
  6563  		v_0 := v.Args[0]
  6564  		if v_0.Op != OpARM64FlagGT_UGT {
  6565  			break
  6566  		}
  6567  		v.reset(OpARM64MOVDconst)
  6568  		v.AuxInt = 1
  6569  		return true
  6570  	}
  6571  	// match: (NotEqual (InvertFlags x))
  6572  	// cond:
  6573  	// result: (NotEqual x)
  6574  	for {
  6575  		v_0 := v.Args[0]
  6576  		if v_0.Op != OpARM64InvertFlags {
  6577  			break
  6578  		}
  6579  		x := v_0.Args[0]
  6580  		v.reset(OpARM64NotEqual)
  6581  		v.AddArg(x)
  6582  		return true
  6583  	}
  6584  	return false
  6585  }
  6586  func rewriteValueARM64_OpARM64OR_0(v *Value) bool {
  6587  	b := v.Block
  6588  	_ = b
  6589  	// match: (OR x (MOVDconst [c]))
  6590  	// cond:
  6591  	// result: (ORconst  [c] x)
  6592  	for {
  6593  		_ = v.Args[1]
  6594  		x := v.Args[0]
  6595  		v_1 := v.Args[1]
  6596  		if v_1.Op != OpARM64MOVDconst {
  6597  			break
  6598  		}
  6599  		c := v_1.AuxInt
  6600  		v.reset(OpARM64ORconst)
  6601  		v.AuxInt = c
  6602  		v.AddArg(x)
  6603  		return true
  6604  	}
  6605  	// match: (OR (MOVDconst [c]) x)
  6606  	// cond:
  6607  	// result: (ORconst  [c] x)
  6608  	for {
  6609  		_ = v.Args[1]
  6610  		v_0 := v.Args[0]
  6611  		if v_0.Op != OpARM64MOVDconst {
  6612  			break
  6613  		}
  6614  		c := v_0.AuxInt
  6615  		x := v.Args[1]
  6616  		v.reset(OpARM64ORconst)
  6617  		v.AuxInt = c
  6618  		v.AddArg(x)
  6619  		return true
  6620  	}
  6621  	// match: (OR x x)
  6622  	// cond:
  6623  	// result: x
  6624  	for {
  6625  		_ = v.Args[1]
  6626  		x := v.Args[0]
  6627  		if x != v.Args[1] {
  6628  			break
  6629  		}
  6630  		v.reset(OpCopy)
  6631  		v.Type = x.Type
  6632  		v.AddArg(x)
  6633  		return true
  6634  	}
  6635  	// match: (OR x (SLLconst [c] y))
  6636  	// cond:
  6637  	// result: (ORshiftLL  x y [c])
  6638  	for {
  6639  		_ = v.Args[1]
  6640  		x := v.Args[0]
  6641  		v_1 := v.Args[1]
  6642  		if v_1.Op != OpARM64SLLconst {
  6643  			break
  6644  		}
  6645  		c := v_1.AuxInt
  6646  		y := v_1.Args[0]
  6647  		v.reset(OpARM64ORshiftLL)
  6648  		v.AuxInt = c
  6649  		v.AddArg(x)
  6650  		v.AddArg(y)
  6651  		return true
  6652  	}
  6653  	// match: (OR (SLLconst [c] y) x)
  6654  	// cond:
  6655  	// result: (ORshiftLL  x y [c])
  6656  	for {
  6657  		_ = v.Args[1]
  6658  		v_0 := v.Args[0]
  6659  		if v_0.Op != OpARM64SLLconst {
  6660  			break
  6661  		}
  6662  		c := v_0.AuxInt
  6663  		y := v_0.Args[0]
  6664  		x := v.Args[1]
  6665  		v.reset(OpARM64ORshiftLL)
  6666  		v.AuxInt = c
  6667  		v.AddArg(x)
  6668  		v.AddArg(y)
  6669  		return true
  6670  	}
  6671  	// match: (OR x (SRLconst [c] y))
  6672  	// cond:
  6673  	// result: (ORshiftRL  x y [c])
  6674  	for {
  6675  		_ = v.Args[1]
  6676  		x := v.Args[0]
  6677  		v_1 := v.Args[1]
  6678  		if v_1.Op != OpARM64SRLconst {
  6679  			break
  6680  		}
  6681  		c := v_1.AuxInt
  6682  		y := v_1.Args[0]
  6683  		v.reset(OpARM64ORshiftRL)
  6684  		v.AuxInt = c
  6685  		v.AddArg(x)
  6686  		v.AddArg(y)
  6687  		return true
  6688  	}
  6689  	// match: (OR (SRLconst [c] y) x)
  6690  	// cond:
  6691  	// result: (ORshiftRL  x y [c])
  6692  	for {
  6693  		_ = v.Args[1]
  6694  		v_0 := v.Args[0]
  6695  		if v_0.Op != OpARM64SRLconst {
  6696  			break
  6697  		}
  6698  		c := v_0.AuxInt
  6699  		y := v_0.Args[0]
  6700  		x := v.Args[1]
  6701  		v.reset(OpARM64ORshiftRL)
  6702  		v.AuxInt = c
  6703  		v.AddArg(x)
  6704  		v.AddArg(y)
  6705  		return true
  6706  	}
  6707  	// match: (OR x (SRAconst [c] y))
  6708  	// cond:
  6709  	// result: (ORshiftRA  x y [c])
  6710  	for {
  6711  		_ = v.Args[1]
  6712  		x := v.Args[0]
  6713  		v_1 := v.Args[1]
  6714  		if v_1.Op != OpARM64SRAconst {
  6715  			break
  6716  		}
  6717  		c := v_1.AuxInt
  6718  		y := v_1.Args[0]
  6719  		v.reset(OpARM64ORshiftRA)
  6720  		v.AuxInt = c
  6721  		v.AddArg(x)
  6722  		v.AddArg(y)
  6723  		return true
  6724  	}
  6725  	// match: (OR (SRAconst [c] y) x)
  6726  	// cond:
  6727  	// result: (ORshiftRA  x y [c])
  6728  	for {
  6729  		_ = v.Args[1]
  6730  		v_0 := v.Args[0]
  6731  		if v_0.Op != OpARM64SRAconst {
  6732  			break
  6733  		}
  6734  		c := v_0.AuxInt
  6735  		y := v_0.Args[0]
  6736  		x := v.Args[1]
  6737  		v.reset(OpARM64ORshiftRA)
  6738  		v.AuxInt = c
  6739  		v.AddArg(x)
  6740  		v.AddArg(y)
  6741  		return true
  6742  	}
  6743  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)))
  6744  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  6745  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)
  6746  	for {
  6747  		t := v.Type
  6748  		_ = v.Args[1]
  6749  		o0 := v.Args[0]
  6750  		if o0.Op != OpARM64ORshiftLL {
  6751  			break
  6752  		}
  6753  		if o0.AuxInt != 8 {
  6754  			break
  6755  		}
  6756  		_ = o0.Args[1]
  6757  		o1 := o0.Args[0]
  6758  		if o1.Op != OpARM64ORshiftLL {
  6759  			break
  6760  		}
  6761  		if o1.AuxInt != 16 {
  6762  			break
  6763  		}
  6764  		_ = o1.Args[1]
  6765  		s0 := o1.Args[0]
  6766  		if s0.Op != OpARM64SLLconst {
  6767  			break
  6768  		}
  6769  		if s0.AuxInt != 24 {
  6770  			break
  6771  		}
  6772  		y0 := s0.Args[0]
  6773  		if y0.Op != OpARM64MOVDnop {
  6774  			break
  6775  		}
  6776  		x0 := y0.Args[0]
  6777  		if x0.Op != OpARM64MOVBUload {
  6778  			break
  6779  		}
  6780  		i3 := x0.AuxInt
  6781  		s := x0.Aux
  6782  		_ = x0.Args[1]
  6783  		p := x0.Args[0]
  6784  		mem := x0.Args[1]
  6785  		y1 := o1.Args[1]
  6786  		if y1.Op != OpARM64MOVDnop {
  6787  			break
  6788  		}
  6789  		x1 := y1.Args[0]
  6790  		if x1.Op != OpARM64MOVBUload {
  6791  			break
  6792  		}
  6793  		i2 := x1.AuxInt
  6794  		if x1.Aux != s {
  6795  			break
  6796  		}
  6797  		_ = x1.Args[1]
  6798  		if p != x1.Args[0] {
  6799  			break
  6800  		}
  6801  		if mem != x1.Args[1] {
  6802  			break
  6803  		}
  6804  		y2 := o0.Args[1]
  6805  		if y2.Op != OpARM64MOVDnop {
  6806  			break
  6807  		}
  6808  		x2 := y2.Args[0]
  6809  		if x2.Op != OpARM64MOVBUload {
  6810  			break
  6811  		}
  6812  		i1 := x2.AuxInt
  6813  		if x2.Aux != s {
  6814  			break
  6815  		}
  6816  		_ = x2.Args[1]
  6817  		if p != x2.Args[0] {
  6818  			break
  6819  		}
  6820  		if mem != x2.Args[1] {
  6821  			break
  6822  		}
  6823  		y3 := v.Args[1]
  6824  		if y3.Op != OpARM64MOVDnop {
  6825  			break
  6826  		}
  6827  		x3 := y3.Args[0]
  6828  		if x3.Op != OpARM64MOVBUload {
  6829  			break
  6830  		}
  6831  		i0 := x3.AuxInt
  6832  		if x3.Aux != s {
  6833  			break
  6834  		}
  6835  		_ = x3.Args[1]
  6836  		if p != x3.Args[0] {
  6837  			break
  6838  		}
  6839  		if mem != x3.Args[1] {
  6840  			break
  6841  		}
  6842  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  6843  			break
  6844  		}
  6845  		b = mergePoint(b, x0, x1, x2, x3)
  6846  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  6847  		v.reset(OpCopy)
  6848  		v.AddArg(v0)
  6849  		v0.Aux = s
  6850  		v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  6851  		v1.AuxInt = i0
  6852  		v1.AddArg(p)
  6853  		v0.AddArg(v1)
  6854  		v0.AddArg(mem)
  6855  		return true
  6856  	}
  6857  	return false
  6858  }
  6859  func rewriteValueARM64_OpARM64OR_10(v *Value) bool {
  6860  	b := v.Block
  6861  	_ = b
  6862  	// match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))))
  6863  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  6864  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)
  6865  	for {
  6866  		t := v.Type
  6867  		_ = v.Args[1]
  6868  		y3 := v.Args[0]
  6869  		if y3.Op != OpARM64MOVDnop {
  6870  			break
  6871  		}
  6872  		x3 := y3.Args[0]
  6873  		if x3.Op != OpARM64MOVBUload {
  6874  			break
  6875  		}
  6876  		i0 := x3.AuxInt
  6877  		s := x3.Aux
  6878  		_ = x3.Args[1]
  6879  		p := x3.Args[0]
  6880  		mem := x3.Args[1]
  6881  		o0 := v.Args[1]
  6882  		if o0.Op != OpARM64ORshiftLL {
  6883  			break
  6884  		}
  6885  		if o0.AuxInt != 8 {
  6886  			break
  6887  		}
  6888  		_ = o0.Args[1]
  6889  		o1 := o0.Args[0]
  6890  		if o1.Op != OpARM64ORshiftLL {
  6891  			break
  6892  		}
  6893  		if o1.AuxInt != 16 {
  6894  			break
  6895  		}
  6896  		_ = o1.Args[1]
  6897  		s0 := o1.Args[0]
  6898  		if s0.Op != OpARM64SLLconst {
  6899  			break
  6900  		}
  6901  		if s0.AuxInt != 24 {
  6902  			break
  6903  		}
  6904  		y0 := s0.Args[0]
  6905  		if y0.Op != OpARM64MOVDnop {
  6906  			break
  6907  		}
  6908  		x0 := y0.Args[0]
  6909  		if x0.Op != OpARM64MOVBUload {
  6910  			break
  6911  		}
  6912  		i3 := x0.AuxInt
  6913  		if x0.Aux != s {
  6914  			break
  6915  		}
  6916  		_ = x0.Args[1]
  6917  		if p != x0.Args[0] {
  6918  			break
  6919  		}
  6920  		if mem != x0.Args[1] {
  6921  			break
  6922  		}
  6923  		y1 := o1.Args[1]
  6924  		if y1.Op != OpARM64MOVDnop {
  6925  			break
  6926  		}
  6927  		x1 := y1.Args[0]
  6928  		if x1.Op != OpARM64MOVBUload {
  6929  			break
  6930  		}
  6931  		i2 := x1.AuxInt
  6932  		if x1.Aux != s {
  6933  			break
  6934  		}
  6935  		_ = x1.Args[1]
  6936  		if p != x1.Args[0] {
  6937  			break
  6938  		}
  6939  		if mem != x1.Args[1] {
  6940  			break
  6941  		}
  6942  		y2 := o0.Args[1]
  6943  		if y2.Op != OpARM64MOVDnop {
  6944  			break
  6945  		}
  6946  		x2 := y2.Args[0]
  6947  		if x2.Op != OpARM64MOVBUload {
  6948  			break
  6949  		}
  6950  		i1 := x2.AuxInt
  6951  		if x2.Aux != s {
  6952  			break
  6953  		}
  6954  		_ = x2.Args[1]
  6955  		if p != x2.Args[0] {
  6956  			break
  6957  		}
  6958  		if mem != x2.Args[1] {
  6959  			break
  6960  		}
  6961  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  6962  			break
  6963  		}
  6964  		b = mergePoint(b, x0, x1, x2, x3)
  6965  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  6966  		v.reset(OpCopy)
  6967  		v.AddArg(v0)
  6968  		v0.Aux = s
  6969  		v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  6970  		v1.AuxInt = i0
  6971  		v1.AddArg(p)
  6972  		v0.AddArg(v1)
  6973  		v0.AddArg(mem)
  6974  		return true
  6975  	}
  6976  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)))
  6977  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& i4 == i0+4 	&& i5 == i0+5 	&& i6 == i0+6 	&& i7 == i0+7 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  6978  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  6979  	for {
  6980  		t := v.Type
  6981  		_ = v.Args[1]
  6982  		o0 := v.Args[0]
  6983  		if o0.Op != OpARM64ORshiftLL {
  6984  			break
  6985  		}
  6986  		if o0.AuxInt != 8 {
  6987  			break
  6988  		}
  6989  		_ = o0.Args[1]
  6990  		o1 := o0.Args[0]
  6991  		if o1.Op != OpARM64ORshiftLL {
  6992  			break
  6993  		}
  6994  		if o1.AuxInt != 16 {
  6995  			break
  6996  		}
  6997  		_ = o1.Args[1]
  6998  		o2 := o1.Args[0]
  6999  		if o2.Op != OpARM64ORshiftLL {
  7000  			break
  7001  		}
  7002  		if o2.AuxInt != 24 {
  7003  			break
  7004  		}
  7005  		_ = o2.Args[1]
  7006  		o3 := o2.Args[0]
  7007  		if o3.Op != OpARM64ORshiftLL {
  7008  			break
  7009  		}
  7010  		if o3.AuxInt != 32 {
  7011  			break
  7012  		}
  7013  		_ = o3.Args[1]
  7014  		o4 := o3.Args[0]
  7015  		if o4.Op != OpARM64ORshiftLL {
  7016  			break
  7017  		}
  7018  		if o4.AuxInt != 40 {
  7019  			break
  7020  		}
  7021  		_ = o4.Args[1]
  7022  		o5 := o4.Args[0]
  7023  		if o5.Op != OpARM64ORshiftLL {
  7024  			break
  7025  		}
  7026  		if o5.AuxInt != 48 {
  7027  			break
  7028  		}
  7029  		_ = o5.Args[1]
  7030  		s0 := o5.Args[0]
  7031  		if s0.Op != OpARM64SLLconst {
  7032  			break
  7033  		}
  7034  		if s0.AuxInt != 56 {
  7035  			break
  7036  		}
  7037  		y0 := s0.Args[0]
  7038  		if y0.Op != OpARM64MOVDnop {
  7039  			break
  7040  		}
  7041  		x0 := y0.Args[0]
  7042  		if x0.Op != OpARM64MOVBUload {
  7043  			break
  7044  		}
  7045  		i7 := x0.AuxInt
  7046  		s := x0.Aux
  7047  		_ = x0.Args[1]
  7048  		p := x0.Args[0]
  7049  		mem := x0.Args[1]
  7050  		y1 := o5.Args[1]
  7051  		if y1.Op != OpARM64MOVDnop {
  7052  			break
  7053  		}
  7054  		x1 := y1.Args[0]
  7055  		if x1.Op != OpARM64MOVBUload {
  7056  			break
  7057  		}
  7058  		i6 := x1.AuxInt
  7059  		if x1.Aux != s {
  7060  			break
  7061  		}
  7062  		_ = x1.Args[1]
  7063  		if p != x1.Args[0] {
  7064  			break
  7065  		}
  7066  		if mem != x1.Args[1] {
  7067  			break
  7068  		}
  7069  		y2 := o4.Args[1]
  7070  		if y2.Op != OpARM64MOVDnop {
  7071  			break
  7072  		}
  7073  		x2 := y2.Args[0]
  7074  		if x2.Op != OpARM64MOVBUload {
  7075  			break
  7076  		}
  7077  		i5 := x2.AuxInt
  7078  		if x2.Aux != s {
  7079  			break
  7080  		}
  7081  		_ = x2.Args[1]
  7082  		if p != x2.Args[0] {
  7083  			break
  7084  		}
  7085  		if mem != x2.Args[1] {
  7086  			break
  7087  		}
  7088  		y3 := o3.Args[1]
  7089  		if y3.Op != OpARM64MOVDnop {
  7090  			break
  7091  		}
  7092  		x3 := y3.Args[0]
  7093  		if x3.Op != OpARM64MOVBUload {
  7094  			break
  7095  		}
  7096  		i4 := x3.AuxInt
  7097  		if x3.Aux != s {
  7098  			break
  7099  		}
  7100  		_ = x3.Args[1]
  7101  		if p != x3.Args[0] {
  7102  			break
  7103  		}
  7104  		if mem != x3.Args[1] {
  7105  			break
  7106  		}
  7107  		y4 := o2.Args[1]
  7108  		if y4.Op != OpARM64MOVDnop {
  7109  			break
  7110  		}
  7111  		x4 := y4.Args[0]
  7112  		if x4.Op != OpARM64MOVBUload {
  7113  			break
  7114  		}
  7115  		i3 := x4.AuxInt
  7116  		if x4.Aux != s {
  7117  			break
  7118  		}
  7119  		_ = x4.Args[1]
  7120  		if p != x4.Args[0] {
  7121  			break
  7122  		}
  7123  		if mem != x4.Args[1] {
  7124  			break
  7125  		}
  7126  		y5 := o1.Args[1]
  7127  		if y5.Op != OpARM64MOVDnop {
  7128  			break
  7129  		}
  7130  		x5 := y5.Args[0]
  7131  		if x5.Op != OpARM64MOVBUload {
  7132  			break
  7133  		}
  7134  		i2 := x5.AuxInt
  7135  		if x5.Aux != s {
  7136  			break
  7137  		}
  7138  		_ = x5.Args[1]
  7139  		if p != x5.Args[0] {
  7140  			break
  7141  		}
  7142  		if mem != x5.Args[1] {
  7143  			break
  7144  		}
  7145  		y6 := o0.Args[1]
  7146  		if y6.Op != OpARM64MOVDnop {
  7147  			break
  7148  		}
  7149  		x6 := y6.Args[0]
  7150  		if x6.Op != OpARM64MOVBUload {
  7151  			break
  7152  		}
  7153  		i1 := x6.AuxInt
  7154  		if x6.Aux != s {
  7155  			break
  7156  		}
  7157  		_ = x6.Args[1]
  7158  		if p != x6.Args[0] {
  7159  			break
  7160  		}
  7161  		if mem != x6.Args[1] {
  7162  			break
  7163  		}
  7164  		y7 := v.Args[1]
  7165  		if y7.Op != OpARM64MOVDnop {
  7166  			break
  7167  		}
  7168  		x7 := y7.Args[0]
  7169  		if x7.Op != OpARM64MOVBUload {
  7170  			break
  7171  		}
  7172  		i0 := x7.AuxInt
  7173  		if x7.Aux != s {
  7174  			break
  7175  		}
  7176  		_ = x7.Args[1]
  7177  		if p != x7.Args[0] {
  7178  			break
  7179  		}
  7180  		if mem != x7.Args[1] {
  7181  			break
  7182  		}
  7183  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  7184  			break
  7185  		}
  7186  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  7187  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
  7188  		v.reset(OpCopy)
  7189  		v.AddArg(v0)
  7190  		v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  7191  		v1.Aux = s
  7192  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  7193  		v2.AuxInt = i0
  7194  		v2.AddArg(p)
  7195  		v1.AddArg(v2)
  7196  		v1.AddArg(mem)
  7197  		v0.AddArg(v1)
  7198  		return true
  7199  	}
  7200  	// match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))))
  7201  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& i4 == i0+4 	&& i5 == i0+5 	&& i6 == i0+6 	&& i7 == i0+7 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  7202  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  7203  	for {
  7204  		t := v.Type
  7205  		_ = v.Args[1]
  7206  		y7 := v.Args[0]
  7207  		if y7.Op != OpARM64MOVDnop {
  7208  			break
  7209  		}
  7210  		x7 := y7.Args[0]
  7211  		if x7.Op != OpARM64MOVBUload {
  7212  			break
  7213  		}
  7214  		i0 := x7.AuxInt
  7215  		s := x7.Aux
  7216  		_ = x7.Args[1]
  7217  		p := x7.Args[0]
  7218  		mem := x7.Args[1]
  7219  		o0 := v.Args[1]
  7220  		if o0.Op != OpARM64ORshiftLL {
  7221  			break
  7222  		}
  7223  		if o0.AuxInt != 8 {
  7224  			break
  7225  		}
  7226  		_ = o0.Args[1]
  7227  		o1 := o0.Args[0]
  7228  		if o1.Op != OpARM64ORshiftLL {
  7229  			break
  7230  		}
  7231  		if o1.AuxInt != 16 {
  7232  			break
  7233  		}
  7234  		_ = o1.Args[1]
  7235  		o2 := o1.Args[0]
  7236  		if o2.Op != OpARM64ORshiftLL {
  7237  			break
  7238  		}
  7239  		if o2.AuxInt != 24 {
  7240  			break
  7241  		}
  7242  		_ = o2.Args[1]
  7243  		o3 := o2.Args[0]
  7244  		if o3.Op != OpARM64ORshiftLL {
  7245  			break
  7246  		}
  7247  		if o3.AuxInt != 32 {
  7248  			break
  7249  		}
  7250  		_ = o3.Args[1]
  7251  		o4 := o3.Args[0]
  7252  		if o4.Op != OpARM64ORshiftLL {
  7253  			break
  7254  		}
  7255  		if o4.AuxInt != 40 {
  7256  			break
  7257  		}
  7258  		_ = o4.Args[1]
  7259  		o5 := o4.Args[0]
  7260  		if o5.Op != OpARM64ORshiftLL {
  7261  			break
  7262  		}
  7263  		if o5.AuxInt != 48 {
  7264  			break
  7265  		}
  7266  		_ = o5.Args[1]
  7267  		s0 := o5.Args[0]
  7268  		if s0.Op != OpARM64SLLconst {
  7269  			break
  7270  		}
  7271  		if s0.AuxInt != 56 {
  7272  			break
  7273  		}
  7274  		y0 := s0.Args[0]
  7275  		if y0.Op != OpARM64MOVDnop {
  7276  			break
  7277  		}
  7278  		x0 := y0.Args[0]
  7279  		if x0.Op != OpARM64MOVBUload {
  7280  			break
  7281  		}
  7282  		i7 := x0.AuxInt
  7283  		if x0.Aux != s {
  7284  			break
  7285  		}
  7286  		_ = x0.Args[1]
  7287  		if p != x0.Args[0] {
  7288  			break
  7289  		}
  7290  		if mem != x0.Args[1] {
  7291  			break
  7292  		}
  7293  		y1 := o5.Args[1]
  7294  		if y1.Op != OpARM64MOVDnop {
  7295  			break
  7296  		}
  7297  		x1 := y1.Args[0]
  7298  		if x1.Op != OpARM64MOVBUload {
  7299  			break
  7300  		}
  7301  		i6 := x1.AuxInt
  7302  		if x1.Aux != s {
  7303  			break
  7304  		}
  7305  		_ = x1.Args[1]
  7306  		if p != x1.Args[0] {
  7307  			break
  7308  		}
  7309  		if mem != x1.Args[1] {
  7310  			break
  7311  		}
  7312  		y2 := o4.Args[1]
  7313  		if y2.Op != OpARM64MOVDnop {
  7314  			break
  7315  		}
  7316  		x2 := y2.Args[0]
  7317  		if x2.Op != OpARM64MOVBUload {
  7318  			break
  7319  		}
  7320  		i5 := x2.AuxInt
  7321  		if x2.Aux != s {
  7322  			break
  7323  		}
  7324  		_ = x2.Args[1]
  7325  		if p != x2.Args[0] {
  7326  			break
  7327  		}
  7328  		if mem != x2.Args[1] {
  7329  			break
  7330  		}
  7331  		y3 := o3.Args[1]
  7332  		if y3.Op != OpARM64MOVDnop {
  7333  			break
  7334  		}
  7335  		x3 := y3.Args[0]
  7336  		if x3.Op != OpARM64MOVBUload {
  7337  			break
  7338  		}
  7339  		i4 := x3.AuxInt
  7340  		if x3.Aux != s {
  7341  			break
  7342  		}
  7343  		_ = x3.Args[1]
  7344  		if p != x3.Args[0] {
  7345  			break
  7346  		}
  7347  		if mem != x3.Args[1] {
  7348  			break
  7349  		}
  7350  		y4 := o2.Args[1]
  7351  		if y4.Op != OpARM64MOVDnop {
  7352  			break
  7353  		}
  7354  		x4 := y4.Args[0]
  7355  		if x4.Op != OpARM64MOVBUload {
  7356  			break
  7357  		}
  7358  		i3 := x4.AuxInt
  7359  		if x4.Aux != s {
  7360  			break
  7361  		}
  7362  		_ = x4.Args[1]
  7363  		if p != x4.Args[0] {
  7364  			break
  7365  		}
  7366  		if mem != x4.Args[1] {
  7367  			break
  7368  		}
  7369  		y5 := o1.Args[1]
  7370  		if y5.Op != OpARM64MOVDnop {
  7371  			break
  7372  		}
  7373  		x5 := y5.Args[0]
  7374  		if x5.Op != OpARM64MOVBUload {
  7375  			break
  7376  		}
  7377  		i2 := x5.AuxInt
  7378  		if x5.Aux != s {
  7379  			break
  7380  		}
  7381  		_ = x5.Args[1]
  7382  		if p != x5.Args[0] {
  7383  			break
  7384  		}
  7385  		if mem != x5.Args[1] {
  7386  			break
  7387  		}
  7388  		y6 := o0.Args[1]
  7389  		if y6.Op != OpARM64MOVDnop {
  7390  			break
  7391  		}
  7392  		x6 := y6.Args[0]
  7393  		if x6.Op != OpARM64MOVBUload {
  7394  			break
  7395  		}
  7396  		i1 := x6.AuxInt
  7397  		if x6.Aux != s {
  7398  			break
  7399  		}
  7400  		_ = x6.Args[1]
  7401  		if p != x6.Args[0] {
  7402  			break
  7403  		}
  7404  		if mem != x6.Args[1] {
  7405  			break
  7406  		}
  7407  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  7408  			break
  7409  		}
  7410  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  7411  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
  7412  		v.reset(OpCopy)
  7413  		v.AddArg(v0)
  7414  		v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  7415  		v1.Aux = s
  7416  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  7417  		v2.AuxInt = i0
  7418  		v2.AddArg(p)
  7419  		v1.AddArg(v2)
  7420  		v1.AddArg(mem)
  7421  		v0.AddArg(v1)
  7422  		return true
  7423  	}
  7424  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)))
  7425  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  7426  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  7427  	for {
  7428  		t := v.Type
  7429  		_ = v.Args[1]
  7430  		o0 := v.Args[0]
  7431  		if o0.Op != OpARM64ORshiftLL {
  7432  			break
  7433  		}
  7434  		if o0.AuxInt != 8 {
  7435  			break
  7436  		}
  7437  		_ = o0.Args[1]
  7438  		o1 := o0.Args[0]
  7439  		if o1.Op != OpARM64ORshiftLL {
  7440  			break
  7441  		}
  7442  		if o1.AuxInt != 16 {
  7443  			break
  7444  		}
  7445  		_ = o1.Args[1]
  7446  		s0 := o1.Args[0]
  7447  		if s0.Op != OpARM64SLLconst {
  7448  			break
  7449  		}
  7450  		if s0.AuxInt != 24 {
  7451  			break
  7452  		}
  7453  		y0 := s0.Args[0]
  7454  		if y0.Op != OpARM64MOVDnop {
  7455  			break
  7456  		}
  7457  		x0 := y0.Args[0]
  7458  		if x0.Op != OpARM64MOVBUload {
  7459  			break
  7460  		}
  7461  		i0 := x0.AuxInt
  7462  		s := x0.Aux
  7463  		_ = x0.Args[1]
  7464  		p := x0.Args[0]
  7465  		mem := x0.Args[1]
  7466  		y1 := o1.Args[1]
  7467  		if y1.Op != OpARM64MOVDnop {
  7468  			break
  7469  		}
  7470  		x1 := y1.Args[0]
  7471  		if x1.Op != OpARM64MOVBUload {
  7472  			break
  7473  		}
  7474  		i1 := x1.AuxInt
  7475  		if x1.Aux != s {
  7476  			break
  7477  		}
  7478  		_ = x1.Args[1]
  7479  		if p != x1.Args[0] {
  7480  			break
  7481  		}
  7482  		if mem != x1.Args[1] {
  7483  			break
  7484  		}
  7485  		y2 := o0.Args[1]
  7486  		if y2.Op != OpARM64MOVDnop {
  7487  			break
  7488  		}
  7489  		x2 := y2.Args[0]
  7490  		if x2.Op != OpARM64MOVBUload {
  7491  			break
  7492  		}
  7493  		i2 := x2.AuxInt
  7494  		if x2.Aux != s {
  7495  			break
  7496  		}
  7497  		_ = x2.Args[1]
  7498  		if p != x2.Args[0] {
  7499  			break
  7500  		}
  7501  		if mem != x2.Args[1] {
  7502  			break
  7503  		}
  7504  		y3 := v.Args[1]
  7505  		if y3.Op != OpARM64MOVDnop {
  7506  			break
  7507  		}
  7508  		x3 := y3.Args[0]
  7509  		if x3.Op != OpARM64MOVBUload {
  7510  			break
  7511  		}
  7512  		i3 := x3.AuxInt
  7513  		if x3.Aux != s {
  7514  			break
  7515  		}
  7516  		_ = x3.Args[1]
  7517  		if p != x3.Args[0] {
  7518  			break
  7519  		}
  7520  		if mem != x3.Args[1] {
  7521  			break
  7522  		}
  7523  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  7524  			break
  7525  		}
  7526  		b = mergePoint(b, x0, x1, x2, x3)
  7527  		v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
  7528  		v.reset(OpCopy)
  7529  		v.AddArg(v0)
  7530  		v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  7531  		v1.Aux = s
  7532  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  7533  		v2.AuxInt = i0
  7534  		v2.AddArg(p)
  7535  		v1.AddArg(v2)
  7536  		v1.AddArg(mem)
  7537  		v0.AddArg(v1)
  7538  		return true
  7539  	}
  7540  	// match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))))
  7541  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(o0) && clobber(o1) && clobber(s0)
  7542  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  7543  	for {
  7544  		t := v.Type
  7545  		_ = v.Args[1]
  7546  		y3 := v.Args[0]
  7547  		if y3.Op != OpARM64MOVDnop {
  7548  			break
  7549  		}
  7550  		x3 := y3.Args[0]
  7551  		if x3.Op != OpARM64MOVBUload {
  7552  			break
  7553  		}
  7554  		i3 := x3.AuxInt
  7555  		s := x3.Aux
  7556  		_ = x3.Args[1]
  7557  		p := x3.Args[0]
  7558  		mem := x3.Args[1]
  7559  		o0 := v.Args[1]
  7560  		if o0.Op != OpARM64ORshiftLL {
  7561  			break
  7562  		}
  7563  		if o0.AuxInt != 8 {
  7564  			break
  7565  		}
  7566  		_ = o0.Args[1]
  7567  		o1 := o0.Args[0]
  7568  		if o1.Op != OpARM64ORshiftLL {
  7569  			break
  7570  		}
  7571  		if o1.AuxInt != 16 {
  7572  			break
  7573  		}
  7574  		_ = o1.Args[1]
  7575  		s0 := o1.Args[0]
  7576  		if s0.Op != OpARM64SLLconst {
  7577  			break
  7578  		}
  7579  		if s0.AuxInt != 24 {
  7580  			break
  7581  		}
  7582  		y0 := s0.Args[0]
  7583  		if y0.Op != OpARM64MOVDnop {
  7584  			break
  7585  		}
  7586  		x0 := y0.Args[0]
  7587  		if x0.Op != OpARM64MOVBUload {
  7588  			break
  7589  		}
  7590  		i0 := x0.AuxInt
  7591  		if x0.Aux != s {
  7592  			break
  7593  		}
  7594  		_ = x0.Args[1]
  7595  		if p != x0.Args[0] {
  7596  			break
  7597  		}
  7598  		if mem != x0.Args[1] {
  7599  			break
  7600  		}
  7601  		y1 := o1.Args[1]
  7602  		if y1.Op != OpARM64MOVDnop {
  7603  			break
  7604  		}
  7605  		x1 := y1.Args[0]
  7606  		if x1.Op != OpARM64MOVBUload {
  7607  			break
  7608  		}
  7609  		i1 := x1.AuxInt
  7610  		if x1.Aux != s {
  7611  			break
  7612  		}
  7613  		_ = x1.Args[1]
  7614  		if p != x1.Args[0] {
  7615  			break
  7616  		}
  7617  		if mem != x1.Args[1] {
  7618  			break
  7619  		}
  7620  		y2 := o0.Args[1]
  7621  		if y2.Op != OpARM64MOVDnop {
  7622  			break
  7623  		}
  7624  		x2 := y2.Args[0]
  7625  		if x2.Op != OpARM64MOVBUload {
  7626  			break
  7627  		}
  7628  		i2 := x2.AuxInt
  7629  		if x2.Aux != s {
  7630  			break
  7631  		}
  7632  		_ = x2.Args[1]
  7633  		if p != x2.Args[0] {
  7634  			break
  7635  		}
  7636  		if mem != x2.Args[1] {
  7637  			break
  7638  		}
  7639  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
  7640  			break
  7641  		}
  7642  		b = mergePoint(b, x0, x1, x2, x3)
  7643  		v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
  7644  		v.reset(OpCopy)
  7645  		v.AddArg(v0)
  7646  		v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  7647  		v1.Aux = s
  7648  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  7649  		v2.AuxInt = i0
  7650  		v2.AddArg(p)
  7651  		v1.AddArg(v2)
  7652  		v1.AddArg(mem)
  7653  		v0.AddArg(v1)
  7654  		return true
  7655  	}
  7656  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)))
  7657  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& i4 == i0+4 	&& i5 == i0+5 	&& i6 == i0+6 	&& i7 == i0+7 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  7658  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  7659  	for {
  7660  		t := v.Type
  7661  		_ = v.Args[1]
  7662  		o0 := v.Args[0]
  7663  		if o0.Op != OpARM64ORshiftLL {
  7664  			break
  7665  		}
  7666  		if o0.AuxInt != 8 {
  7667  			break
  7668  		}
  7669  		_ = o0.Args[1]
  7670  		o1 := o0.Args[0]
  7671  		if o1.Op != OpARM64ORshiftLL {
  7672  			break
  7673  		}
  7674  		if o1.AuxInt != 16 {
  7675  			break
  7676  		}
  7677  		_ = o1.Args[1]
  7678  		o2 := o1.Args[0]
  7679  		if o2.Op != OpARM64ORshiftLL {
  7680  			break
  7681  		}
  7682  		if o2.AuxInt != 24 {
  7683  			break
  7684  		}
  7685  		_ = o2.Args[1]
  7686  		o3 := o2.Args[0]
  7687  		if o3.Op != OpARM64ORshiftLL {
  7688  			break
  7689  		}
  7690  		if o3.AuxInt != 32 {
  7691  			break
  7692  		}
  7693  		_ = o3.Args[1]
  7694  		o4 := o3.Args[0]
  7695  		if o4.Op != OpARM64ORshiftLL {
  7696  			break
  7697  		}
  7698  		if o4.AuxInt != 40 {
  7699  			break
  7700  		}
  7701  		_ = o4.Args[1]
  7702  		o5 := o4.Args[0]
  7703  		if o5.Op != OpARM64ORshiftLL {
  7704  			break
  7705  		}
  7706  		if o5.AuxInt != 48 {
  7707  			break
  7708  		}
  7709  		_ = o5.Args[1]
  7710  		s0 := o5.Args[0]
  7711  		if s0.Op != OpARM64SLLconst {
  7712  			break
  7713  		}
  7714  		if s0.AuxInt != 56 {
  7715  			break
  7716  		}
  7717  		y0 := s0.Args[0]
  7718  		if y0.Op != OpARM64MOVDnop {
  7719  			break
  7720  		}
  7721  		x0 := y0.Args[0]
  7722  		if x0.Op != OpARM64MOVBUload {
  7723  			break
  7724  		}
  7725  		i0 := x0.AuxInt
  7726  		s := x0.Aux
  7727  		_ = x0.Args[1]
  7728  		p := x0.Args[0]
  7729  		mem := x0.Args[1]
  7730  		y1 := o5.Args[1]
  7731  		if y1.Op != OpARM64MOVDnop {
  7732  			break
  7733  		}
  7734  		x1 := y1.Args[0]
  7735  		if x1.Op != OpARM64MOVBUload {
  7736  			break
  7737  		}
  7738  		i1 := x1.AuxInt
  7739  		if x1.Aux != s {
  7740  			break
  7741  		}
  7742  		_ = x1.Args[1]
  7743  		if p != x1.Args[0] {
  7744  			break
  7745  		}
  7746  		if mem != x1.Args[1] {
  7747  			break
  7748  		}
  7749  		y2 := o4.Args[1]
  7750  		if y2.Op != OpARM64MOVDnop {
  7751  			break
  7752  		}
  7753  		x2 := y2.Args[0]
  7754  		if x2.Op != OpARM64MOVBUload {
  7755  			break
  7756  		}
  7757  		i2 := x2.AuxInt
  7758  		if x2.Aux != s {
  7759  			break
  7760  		}
  7761  		_ = x2.Args[1]
  7762  		if p != x2.Args[0] {
  7763  			break
  7764  		}
  7765  		if mem != x2.Args[1] {
  7766  			break
  7767  		}
  7768  		y3 := o3.Args[1]
  7769  		if y3.Op != OpARM64MOVDnop {
  7770  			break
  7771  		}
  7772  		x3 := y3.Args[0]
  7773  		if x3.Op != OpARM64MOVBUload {
  7774  			break
  7775  		}
  7776  		i3 := x3.AuxInt
  7777  		if x3.Aux != s {
  7778  			break
  7779  		}
  7780  		_ = x3.Args[1]
  7781  		if p != x3.Args[0] {
  7782  			break
  7783  		}
  7784  		if mem != x3.Args[1] {
  7785  			break
  7786  		}
  7787  		y4 := o2.Args[1]
  7788  		if y4.Op != OpARM64MOVDnop {
  7789  			break
  7790  		}
  7791  		x4 := y4.Args[0]
  7792  		if x4.Op != OpARM64MOVBUload {
  7793  			break
  7794  		}
  7795  		i4 := x4.AuxInt
  7796  		if x4.Aux != s {
  7797  			break
  7798  		}
  7799  		_ = x4.Args[1]
  7800  		if p != x4.Args[0] {
  7801  			break
  7802  		}
  7803  		if mem != x4.Args[1] {
  7804  			break
  7805  		}
  7806  		y5 := o1.Args[1]
  7807  		if y5.Op != OpARM64MOVDnop {
  7808  			break
  7809  		}
  7810  		x5 := y5.Args[0]
  7811  		if x5.Op != OpARM64MOVBUload {
  7812  			break
  7813  		}
  7814  		i5 := x5.AuxInt
  7815  		if x5.Aux != s {
  7816  			break
  7817  		}
  7818  		_ = x5.Args[1]
  7819  		if p != x5.Args[0] {
  7820  			break
  7821  		}
  7822  		if mem != x5.Args[1] {
  7823  			break
  7824  		}
  7825  		y6 := o0.Args[1]
  7826  		if y6.Op != OpARM64MOVDnop {
  7827  			break
  7828  		}
  7829  		x6 := y6.Args[0]
  7830  		if x6.Op != OpARM64MOVBUload {
  7831  			break
  7832  		}
  7833  		i6 := x6.AuxInt
  7834  		if x6.Aux != s {
  7835  			break
  7836  		}
  7837  		_ = x6.Args[1]
  7838  		if p != x6.Args[0] {
  7839  			break
  7840  		}
  7841  		if mem != x6.Args[1] {
  7842  			break
  7843  		}
  7844  		y7 := v.Args[1]
  7845  		if y7.Op != OpARM64MOVDnop {
  7846  			break
  7847  		}
  7848  		x7 := y7.Args[0]
  7849  		if x7.Op != OpARM64MOVBUload {
  7850  			break
  7851  		}
  7852  		i7 := x7.AuxInt
  7853  		if x7.Aux != s {
  7854  			break
  7855  		}
  7856  		_ = x7.Args[1]
  7857  		if p != x7.Args[0] {
  7858  			break
  7859  		}
  7860  		if mem != x7.Args[1] {
  7861  			break
  7862  		}
  7863  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  7864  			break
  7865  		}
  7866  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  7867  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
  7868  		v.reset(OpCopy)
  7869  		v.AddArg(v0)
  7870  		v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  7871  		v1.Aux = s
  7872  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  7873  		v2.AuxInt = i0
  7874  		v2.AddArg(p)
  7875  		v1.AddArg(v2)
  7876  		v1.AddArg(mem)
  7877  		v0.AddArg(v1)
  7878  		return true
  7879  	}
  7880  	// match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))))
  7881  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& i4 == i0+4 	&& i5 == i0+5 	&& i6 == i0+6 	&& i7 == i0+7 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 	&& x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 	&& y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 	&& o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) 	&& clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) 	&& clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) 	&& clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) 	&& clobber(o4) && clobber(o5) && clobber(s0)
  7882  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  7883  	for {
  7884  		t := v.Type
  7885  		_ = v.Args[1]
  7886  		y7 := v.Args[0]
  7887  		if y7.Op != OpARM64MOVDnop {
  7888  			break
  7889  		}
  7890  		x7 := y7.Args[0]
  7891  		if x7.Op != OpARM64MOVBUload {
  7892  			break
  7893  		}
  7894  		i7 := x7.AuxInt
  7895  		s := x7.Aux
  7896  		_ = x7.Args[1]
  7897  		p := x7.Args[0]
  7898  		mem := x7.Args[1]
  7899  		o0 := v.Args[1]
  7900  		if o0.Op != OpARM64ORshiftLL {
  7901  			break
  7902  		}
  7903  		if o0.AuxInt != 8 {
  7904  			break
  7905  		}
  7906  		_ = o0.Args[1]
  7907  		o1 := o0.Args[0]
  7908  		if o1.Op != OpARM64ORshiftLL {
  7909  			break
  7910  		}
  7911  		if o1.AuxInt != 16 {
  7912  			break
  7913  		}
  7914  		_ = o1.Args[1]
  7915  		o2 := o1.Args[0]
  7916  		if o2.Op != OpARM64ORshiftLL {
  7917  			break
  7918  		}
  7919  		if o2.AuxInt != 24 {
  7920  			break
  7921  		}
  7922  		_ = o2.Args[1]
  7923  		o3 := o2.Args[0]
  7924  		if o3.Op != OpARM64ORshiftLL {
  7925  			break
  7926  		}
  7927  		if o3.AuxInt != 32 {
  7928  			break
  7929  		}
  7930  		_ = o3.Args[1]
  7931  		o4 := o3.Args[0]
  7932  		if o4.Op != OpARM64ORshiftLL {
  7933  			break
  7934  		}
  7935  		if o4.AuxInt != 40 {
  7936  			break
  7937  		}
  7938  		_ = o4.Args[1]
  7939  		o5 := o4.Args[0]
  7940  		if o5.Op != OpARM64ORshiftLL {
  7941  			break
  7942  		}
  7943  		if o5.AuxInt != 48 {
  7944  			break
  7945  		}
  7946  		_ = o5.Args[1]
  7947  		s0 := o5.Args[0]
  7948  		if s0.Op != OpARM64SLLconst {
  7949  			break
  7950  		}
  7951  		if s0.AuxInt != 56 {
  7952  			break
  7953  		}
  7954  		y0 := s0.Args[0]
  7955  		if y0.Op != OpARM64MOVDnop {
  7956  			break
  7957  		}
  7958  		x0 := y0.Args[0]
  7959  		if x0.Op != OpARM64MOVBUload {
  7960  			break
  7961  		}
  7962  		i0 := x0.AuxInt
  7963  		if x0.Aux != s {
  7964  			break
  7965  		}
  7966  		_ = x0.Args[1]
  7967  		if p != x0.Args[0] {
  7968  			break
  7969  		}
  7970  		if mem != x0.Args[1] {
  7971  			break
  7972  		}
  7973  		y1 := o5.Args[1]
  7974  		if y1.Op != OpARM64MOVDnop {
  7975  			break
  7976  		}
  7977  		x1 := y1.Args[0]
  7978  		if x1.Op != OpARM64MOVBUload {
  7979  			break
  7980  		}
  7981  		i1 := x1.AuxInt
  7982  		if x1.Aux != s {
  7983  			break
  7984  		}
  7985  		_ = x1.Args[1]
  7986  		if p != x1.Args[0] {
  7987  			break
  7988  		}
  7989  		if mem != x1.Args[1] {
  7990  			break
  7991  		}
  7992  		y2 := o4.Args[1]
  7993  		if y2.Op != OpARM64MOVDnop {
  7994  			break
  7995  		}
  7996  		x2 := y2.Args[0]
  7997  		if x2.Op != OpARM64MOVBUload {
  7998  			break
  7999  		}
  8000  		i2 := x2.AuxInt
  8001  		if x2.Aux != s {
  8002  			break
  8003  		}
  8004  		_ = x2.Args[1]
  8005  		if p != x2.Args[0] {
  8006  			break
  8007  		}
  8008  		if mem != x2.Args[1] {
  8009  			break
  8010  		}
  8011  		y3 := o3.Args[1]
  8012  		if y3.Op != OpARM64MOVDnop {
  8013  			break
  8014  		}
  8015  		x3 := y3.Args[0]
  8016  		if x3.Op != OpARM64MOVBUload {
  8017  			break
  8018  		}
  8019  		i3 := x3.AuxInt
  8020  		if x3.Aux != s {
  8021  			break
  8022  		}
  8023  		_ = x3.Args[1]
  8024  		if p != x3.Args[0] {
  8025  			break
  8026  		}
  8027  		if mem != x3.Args[1] {
  8028  			break
  8029  		}
  8030  		y4 := o2.Args[1]
  8031  		if y4.Op != OpARM64MOVDnop {
  8032  			break
  8033  		}
  8034  		x4 := y4.Args[0]
  8035  		if x4.Op != OpARM64MOVBUload {
  8036  			break
  8037  		}
  8038  		i4 := x4.AuxInt
  8039  		if x4.Aux != s {
  8040  			break
  8041  		}
  8042  		_ = x4.Args[1]
  8043  		if p != x4.Args[0] {
  8044  			break
  8045  		}
  8046  		if mem != x4.Args[1] {
  8047  			break
  8048  		}
  8049  		y5 := o1.Args[1]
  8050  		if y5.Op != OpARM64MOVDnop {
  8051  			break
  8052  		}
  8053  		x5 := y5.Args[0]
  8054  		if x5.Op != OpARM64MOVBUload {
  8055  			break
  8056  		}
  8057  		i5 := x5.AuxInt
  8058  		if x5.Aux != s {
  8059  			break
  8060  		}
  8061  		_ = x5.Args[1]
  8062  		if p != x5.Args[0] {
  8063  			break
  8064  		}
  8065  		if mem != x5.Args[1] {
  8066  			break
  8067  		}
  8068  		y6 := o0.Args[1]
  8069  		if y6.Op != OpARM64MOVDnop {
  8070  			break
  8071  		}
  8072  		x6 := y6.Args[0]
  8073  		if x6.Op != OpARM64MOVBUload {
  8074  			break
  8075  		}
  8076  		i6 := x6.AuxInt
  8077  		if x6.Aux != s {
  8078  			break
  8079  		}
  8080  		_ = x6.Args[1]
  8081  		if p != x6.Args[0] {
  8082  			break
  8083  		}
  8084  		if mem != x6.Args[1] {
  8085  			break
  8086  		}
  8087  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
  8088  			break
  8089  		}
  8090  		b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
  8091  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
  8092  		v.reset(OpCopy)
  8093  		v.AddArg(v0)
  8094  		v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  8095  		v1.Aux = s
  8096  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8097  		v2.AuxInt = i0
  8098  		v2.AddArg(p)
  8099  		v1.AddArg(v2)
  8100  		v1.AddArg(mem)
  8101  		v0.AddArg(v1)
  8102  		return true
  8103  	}
  8104  	return false
  8105  }
  8106  func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool {
  8107  	// match: (ORconst [0] x)
  8108  	// cond:
  8109  	// result: x
  8110  	for {
  8111  		if v.AuxInt != 0 {
  8112  			break
  8113  		}
  8114  		x := v.Args[0]
  8115  		v.reset(OpCopy)
  8116  		v.Type = x.Type
  8117  		v.AddArg(x)
  8118  		return true
  8119  	}
  8120  	// match: (ORconst [-1] _)
  8121  	// cond:
  8122  	// result: (MOVDconst [-1])
  8123  	for {
  8124  		if v.AuxInt != -1 {
  8125  			break
  8126  		}
  8127  		v.reset(OpARM64MOVDconst)
  8128  		v.AuxInt = -1
  8129  		return true
  8130  	}
  8131  	// match: (ORconst [c] (MOVDconst [d]))
  8132  	// cond:
  8133  	// result: (MOVDconst [c|d])
  8134  	for {
  8135  		c := v.AuxInt
  8136  		v_0 := v.Args[0]
  8137  		if v_0.Op != OpARM64MOVDconst {
  8138  			break
  8139  		}
  8140  		d := v_0.AuxInt
  8141  		v.reset(OpARM64MOVDconst)
  8142  		v.AuxInt = c | d
  8143  		return true
  8144  	}
  8145  	// match: (ORconst [c] (ORconst [d] x))
  8146  	// cond:
  8147  	// result: (ORconst [c|d] x)
  8148  	for {
  8149  		c := v.AuxInt
  8150  		v_0 := v.Args[0]
  8151  		if v_0.Op != OpARM64ORconst {
  8152  			break
  8153  		}
  8154  		d := v_0.AuxInt
  8155  		x := v_0.Args[0]
  8156  		v.reset(OpARM64ORconst)
  8157  		v.AuxInt = c | d
  8158  		v.AddArg(x)
  8159  		return true
  8160  	}
  8161  	return false
  8162  }
  8163  func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool {
  8164  	b := v.Block
  8165  	_ = b
  8166  	// match: (ORshiftLL (MOVDconst [c]) x [d])
  8167  	// cond:
  8168  	// result: (ORconst  [c] (SLLconst <x.Type> x [d]))
  8169  	for {
  8170  		d := v.AuxInt
  8171  		_ = v.Args[1]
  8172  		v_0 := v.Args[0]
  8173  		if v_0.Op != OpARM64MOVDconst {
  8174  			break
  8175  		}
  8176  		c := v_0.AuxInt
  8177  		x := v.Args[1]
  8178  		v.reset(OpARM64ORconst)
  8179  		v.AuxInt = c
  8180  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8181  		v0.AuxInt = d
  8182  		v0.AddArg(x)
  8183  		v.AddArg(v0)
  8184  		return true
  8185  	}
  8186  	// match: (ORshiftLL x (MOVDconst [c]) [d])
  8187  	// cond:
  8188  	// result: (ORconst  x [int64(uint64(c)<<uint64(d))])
  8189  	for {
  8190  		d := v.AuxInt
  8191  		_ = v.Args[1]
  8192  		x := v.Args[0]
  8193  		v_1 := v.Args[1]
  8194  		if v_1.Op != OpARM64MOVDconst {
  8195  			break
  8196  		}
  8197  		c := v_1.AuxInt
  8198  		v.reset(OpARM64ORconst)
  8199  		v.AuxInt = int64(uint64(c) << uint64(d))
  8200  		v.AddArg(x)
  8201  		return true
  8202  	}
  8203  	// match: (ORshiftLL x y:(SLLconst x [c]) [d])
  8204  	// cond: c==d
  8205  	// result: y
  8206  	for {
  8207  		d := v.AuxInt
  8208  		_ = v.Args[1]
  8209  		x := v.Args[0]
  8210  		y := v.Args[1]
  8211  		if y.Op != OpARM64SLLconst {
  8212  			break
  8213  		}
  8214  		c := y.AuxInt
  8215  		if x != y.Args[0] {
  8216  			break
  8217  		}
  8218  		if !(c == d) {
  8219  			break
  8220  		}
  8221  		v.reset(OpCopy)
  8222  		v.Type = y.Type
  8223  		v.AddArg(y)
  8224  		return true
  8225  	}
  8226  	// match: (ORshiftLL [c] (SRLconst x [64-c]) x)
  8227  	// cond:
  8228  	// result: (RORconst [64-c] x)
  8229  	for {
  8230  		c := v.AuxInt
  8231  		_ = v.Args[1]
  8232  		v_0 := v.Args[0]
  8233  		if v_0.Op != OpARM64SRLconst {
  8234  			break
  8235  		}
  8236  		if v_0.AuxInt != 64-c {
  8237  			break
  8238  		}
  8239  		x := v_0.Args[0]
  8240  		if x != v.Args[1] {
  8241  			break
  8242  		}
  8243  		v.reset(OpARM64RORconst)
  8244  		v.AuxInt = 64 - c
  8245  		v.AddArg(x)
  8246  		return true
  8247  	}
  8248  	// match: (ORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x)
  8249  	// cond: c < 32 && t.Size() == 4
  8250  	// result: (RORWconst [32-c] x)
  8251  	for {
  8252  		t := v.Type
  8253  		c := v.AuxInt
  8254  		_ = v.Args[1]
  8255  		v_0 := v.Args[0]
  8256  		if v_0.Op != OpARM64SRLconst {
  8257  			break
  8258  		}
  8259  		if v_0.AuxInt != 32-c {
  8260  			break
  8261  		}
  8262  		v_0_0 := v_0.Args[0]
  8263  		if v_0_0.Op != OpARM64MOVWUreg {
  8264  			break
  8265  		}
  8266  		x := v_0_0.Args[0]
  8267  		if x != v.Args[1] {
  8268  			break
  8269  		}
  8270  		if !(c < 32 && t.Size() == 4) {
  8271  			break
  8272  		}
  8273  		v.reset(OpARM64RORWconst)
  8274  		v.AuxInt = 32 - c
  8275  		v.AddArg(x)
  8276  		return true
  8277  	}
  8278  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem)))
  8279  	// cond: i1 == i0+1 	&& x0.Uses == 1 && x1.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 	&& mergePoint(b,x0,x1) != nil 	&& clobber(x0) && clobber(x1) 	&& clobber(y0) && clobber(y1)
  8280  	// result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem)
  8281  	for {
  8282  		t := v.Type
  8283  		if v.AuxInt != 8 {
  8284  			break
  8285  		}
  8286  		_ = v.Args[1]
  8287  		y0 := v.Args[0]
  8288  		if y0.Op != OpARM64MOVDnop {
  8289  			break
  8290  		}
  8291  		x0 := y0.Args[0]
  8292  		if x0.Op != OpARM64MOVBUload {
  8293  			break
  8294  		}
  8295  		i0 := x0.AuxInt
  8296  		s := x0.Aux
  8297  		_ = x0.Args[1]
  8298  		p := x0.Args[0]
  8299  		mem := x0.Args[1]
  8300  		y1 := v.Args[1]
  8301  		if y1.Op != OpARM64MOVDnop {
  8302  			break
  8303  		}
  8304  		x1 := y1.Args[0]
  8305  		if x1.Op != OpARM64MOVBUload {
  8306  			break
  8307  		}
  8308  		i1 := x1.AuxInt
  8309  		if x1.Aux != s {
  8310  			break
  8311  		}
  8312  		_ = x1.Args[1]
  8313  		if p != x1.Args[0] {
  8314  			break
  8315  		}
  8316  		if mem != x1.Args[1] {
  8317  			break
  8318  		}
  8319  		if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
  8320  			break
  8321  		}
  8322  		b = mergePoint(b, x0, x1)
  8323  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t)
  8324  		v.reset(OpCopy)
  8325  		v.AddArg(v0)
  8326  		v0.Aux = s
  8327  		v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8328  		v1.AuxInt = i0
  8329  		v1.AddArg(p)
  8330  		v0.AddArg(v1)
  8331  		v0.AddArg(mem)
  8332  		return true
  8333  	}
  8334  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem)))
  8335  	// cond: i2 == i0+2 	&& i3 == i0+3 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 	&& y1.Uses == 1 && y2.Uses == 1 	&& o0.Uses == 1 	&& mergePoint(b,x0,x1,x2) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) 	&& clobber(y1) && clobber(y2) 	&& clobber(o0)
  8336  	// result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)
  8337  	for {
  8338  		t := v.Type
  8339  		if v.AuxInt != 24 {
  8340  			break
  8341  		}
  8342  		_ = v.Args[1]
  8343  		o0 := v.Args[0]
  8344  		if o0.Op != OpARM64ORshiftLL {
  8345  			break
  8346  		}
  8347  		if o0.AuxInt != 16 {
  8348  			break
  8349  		}
  8350  		_ = o0.Args[1]
  8351  		x0 := o0.Args[0]
  8352  		if x0.Op != OpARM64MOVHUload {
  8353  			break
  8354  		}
  8355  		i0 := x0.AuxInt
  8356  		s := x0.Aux
  8357  		_ = x0.Args[1]
  8358  		p := x0.Args[0]
  8359  		mem := x0.Args[1]
  8360  		y1 := o0.Args[1]
  8361  		if y1.Op != OpARM64MOVDnop {
  8362  			break
  8363  		}
  8364  		x1 := y1.Args[0]
  8365  		if x1.Op != OpARM64MOVBUload {
  8366  			break
  8367  		}
  8368  		i2 := x1.AuxInt
  8369  		if x1.Aux != s {
  8370  			break
  8371  		}
  8372  		_ = x1.Args[1]
  8373  		if p != x1.Args[0] {
  8374  			break
  8375  		}
  8376  		if mem != x1.Args[1] {
  8377  			break
  8378  		}
  8379  		y2 := v.Args[1]
  8380  		if y2.Op != OpARM64MOVDnop {
  8381  			break
  8382  		}
  8383  		x2 := y2.Args[0]
  8384  		if x2.Op != OpARM64MOVBUload {
  8385  			break
  8386  		}
  8387  		i3 := x2.AuxInt
  8388  		if x2.Aux != s {
  8389  			break
  8390  		}
  8391  		_ = x2.Args[1]
  8392  		if p != x2.Args[0] {
  8393  			break
  8394  		}
  8395  		if mem != x2.Args[1] {
  8396  			break
  8397  		}
  8398  		if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
  8399  			break
  8400  		}
  8401  		b = mergePoint(b, x0, x1, x2)
  8402  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  8403  		v.reset(OpCopy)
  8404  		v.AddArg(v0)
  8405  		v0.Aux = s
  8406  		v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8407  		v1.AuxInt = i0
  8408  		v1.AddArg(p)
  8409  		v0.AddArg(v1)
  8410  		v0.AddArg(mem)
  8411  		return true
  8412  	}
  8413  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem)))
  8414  	// cond: i4 == i0+4 	&& i5 == i0+5 	&& i6 == i0+6 	&& i7 == i0+7 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 	&& y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) 	&& clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) 	&& clobber(o0) && clobber(o1) && clobber(o2)
  8415  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)
  8416  	for {
  8417  		t := v.Type
  8418  		if v.AuxInt != 56 {
  8419  			break
  8420  		}
  8421  		_ = v.Args[1]
  8422  		o0 := v.Args[0]
  8423  		if o0.Op != OpARM64ORshiftLL {
  8424  			break
  8425  		}
  8426  		if o0.AuxInt != 48 {
  8427  			break
  8428  		}
  8429  		_ = o0.Args[1]
  8430  		o1 := o0.Args[0]
  8431  		if o1.Op != OpARM64ORshiftLL {
  8432  			break
  8433  		}
  8434  		if o1.AuxInt != 40 {
  8435  			break
  8436  		}
  8437  		_ = o1.Args[1]
  8438  		o2 := o1.Args[0]
  8439  		if o2.Op != OpARM64ORshiftLL {
  8440  			break
  8441  		}
  8442  		if o2.AuxInt != 32 {
  8443  			break
  8444  		}
  8445  		_ = o2.Args[1]
  8446  		x0 := o2.Args[0]
  8447  		if x0.Op != OpARM64MOVWUload {
  8448  			break
  8449  		}
  8450  		i0 := x0.AuxInt
  8451  		s := x0.Aux
  8452  		_ = x0.Args[1]
  8453  		p := x0.Args[0]
  8454  		mem := x0.Args[1]
  8455  		y1 := o2.Args[1]
  8456  		if y1.Op != OpARM64MOVDnop {
  8457  			break
  8458  		}
  8459  		x1 := y1.Args[0]
  8460  		if x1.Op != OpARM64MOVBUload {
  8461  			break
  8462  		}
  8463  		i4 := x1.AuxInt
  8464  		if x1.Aux != s {
  8465  			break
  8466  		}
  8467  		_ = x1.Args[1]
  8468  		if p != x1.Args[0] {
  8469  			break
  8470  		}
  8471  		if mem != x1.Args[1] {
  8472  			break
  8473  		}
  8474  		y2 := o1.Args[1]
  8475  		if y2.Op != OpARM64MOVDnop {
  8476  			break
  8477  		}
  8478  		x2 := y2.Args[0]
  8479  		if x2.Op != OpARM64MOVBUload {
  8480  			break
  8481  		}
  8482  		i5 := x2.AuxInt
  8483  		if x2.Aux != s {
  8484  			break
  8485  		}
  8486  		_ = x2.Args[1]
  8487  		if p != x2.Args[0] {
  8488  			break
  8489  		}
  8490  		if mem != x2.Args[1] {
  8491  			break
  8492  		}
  8493  		y3 := o0.Args[1]
  8494  		if y3.Op != OpARM64MOVDnop {
  8495  			break
  8496  		}
  8497  		x3 := y3.Args[0]
  8498  		if x3.Op != OpARM64MOVBUload {
  8499  			break
  8500  		}
  8501  		i6 := x3.AuxInt
  8502  		if x3.Aux != s {
  8503  			break
  8504  		}
  8505  		_ = x3.Args[1]
  8506  		if p != x3.Args[0] {
  8507  			break
  8508  		}
  8509  		if mem != x3.Args[1] {
  8510  			break
  8511  		}
  8512  		y4 := v.Args[1]
  8513  		if y4.Op != OpARM64MOVDnop {
  8514  			break
  8515  		}
  8516  		x4 := y4.Args[0]
  8517  		if x4.Op != OpARM64MOVBUload {
  8518  			break
  8519  		}
  8520  		i7 := x4.AuxInt
  8521  		if x4.Aux != s {
  8522  			break
  8523  		}
  8524  		_ = x4.Args[1]
  8525  		if p != x4.Args[0] {
  8526  			break
  8527  		}
  8528  		if mem != x4.Args[1] {
  8529  			break
  8530  		}
  8531  		if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
  8532  			break
  8533  		}
  8534  		b = mergePoint(b, x0, x1, x2, x3, x4)
  8535  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  8536  		v.reset(OpCopy)
  8537  		v.AddArg(v0)
  8538  		v0.Aux = s
  8539  		v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8540  		v1.AuxInt = i0
  8541  		v1.AddArg(p)
  8542  		v0.AddArg(v1)
  8543  		v0.AddArg(mem)
  8544  		return true
  8545  	}
  8546  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem)))
  8547  	// cond: i1 == i0+1 	&& x0.Uses == 1 && x1.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 	&& mergePoint(b,x0,x1) != nil 	&& clobber(x0) && clobber(x1) 	&& clobber(y0) && clobber(y1)
  8548  	// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem))
  8549  	for {
  8550  		t := v.Type
  8551  		if v.AuxInt != 8 {
  8552  			break
  8553  		}
  8554  		_ = v.Args[1]
  8555  		y0 := v.Args[0]
  8556  		if y0.Op != OpARM64MOVDnop {
  8557  			break
  8558  		}
  8559  		x0 := y0.Args[0]
  8560  		if x0.Op != OpARM64MOVBUload {
  8561  			break
  8562  		}
  8563  		i1 := x0.AuxInt
  8564  		s := x0.Aux
  8565  		_ = x0.Args[1]
  8566  		p := x0.Args[0]
  8567  		mem := x0.Args[1]
  8568  		y1 := v.Args[1]
  8569  		if y1.Op != OpARM64MOVDnop {
  8570  			break
  8571  		}
  8572  		x1 := y1.Args[0]
  8573  		if x1.Op != OpARM64MOVBUload {
  8574  			break
  8575  		}
  8576  		i0 := x1.AuxInt
  8577  		if x1.Aux != s {
  8578  			break
  8579  		}
  8580  		_ = x1.Args[1]
  8581  		if p != x1.Args[0] {
  8582  			break
  8583  		}
  8584  		if mem != x1.Args[1] {
  8585  			break
  8586  		}
  8587  		if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
  8588  			break
  8589  		}
  8590  		b = mergePoint(b, x0, x1)
  8591  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, t)
  8592  		v.reset(OpCopy)
  8593  		v.AddArg(v0)
  8594  		v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t)
  8595  		v1.AuxInt = i0
  8596  		v1.Aux = s
  8597  		v1.AddArg(p)
  8598  		v1.AddArg(mem)
  8599  		v0.AddArg(v1)
  8600  		return true
  8601  	}
  8602  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem)))
  8603  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 	&& o0.Uses == 1 	&& mergePoint(b,x0,x1,x2) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) 	&& clobber(y0) && clobber(y1) && clobber(y2) 	&& clobber(o0)
  8604  	// result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  8605  	for {
  8606  		t := v.Type
  8607  		if v.AuxInt != 24 {
  8608  			break
  8609  		}
  8610  		_ = v.Args[1]
  8611  		o0 := v.Args[0]
  8612  		if o0.Op != OpARM64ORshiftLL {
  8613  			break
  8614  		}
  8615  		if o0.AuxInt != 16 {
  8616  			break
  8617  		}
  8618  		_ = o0.Args[1]
  8619  		y0 := o0.Args[0]
  8620  		if y0.Op != OpARM64REV16W {
  8621  			break
  8622  		}
  8623  		x0 := y0.Args[0]
  8624  		if x0.Op != OpARM64MOVHUload {
  8625  			break
  8626  		}
  8627  		i2 := x0.AuxInt
  8628  		s := x0.Aux
  8629  		_ = x0.Args[1]
  8630  		p := x0.Args[0]
  8631  		mem := x0.Args[1]
  8632  		y1 := o0.Args[1]
  8633  		if y1.Op != OpARM64MOVDnop {
  8634  			break
  8635  		}
  8636  		x1 := y1.Args[0]
  8637  		if x1.Op != OpARM64MOVBUload {
  8638  			break
  8639  		}
  8640  		i1 := x1.AuxInt
  8641  		if x1.Aux != s {
  8642  			break
  8643  		}
  8644  		_ = x1.Args[1]
  8645  		if p != x1.Args[0] {
  8646  			break
  8647  		}
  8648  		if mem != x1.Args[1] {
  8649  			break
  8650  		}
  8651  		y2 := v.Args[1]
  8652  		if y2.Op != OpARM64MOVDnop {
  8653  			break
  8654  		}
  8655  		x2 := y2.Args[0]
  8656  		if x2.Op != OpARM64MOVBUload {
  8657  			break
  8658  		}
  8659  		i0 := x2.AuxInt
  8660  		if x2.Aux != s {
  8661  			break
  8662  		}
  8663  		_ = x2.Args[1]
  8664  		if p != x2.Args[0] {
  8665  			break
  8666  		}
  8667  		if mem != x2.Args[1] {
  8668  			break
  8669  		}
  8670  		if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) {
  8671  			break
  8672  		}
  8673  		b = mergePoint(b, x0, x1, x2)
  8674  		v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
  8675  		v.reset(OpCopy)
  8676  		v.AddArg(v0)
  8677  		v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t)
  8678  		v1.Aux = s
  8679  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8680  		v2.AuxInt = i0
  8681  		v2.AddArg(p)
  8682  		v1.AddArg(v2)
  8683  		v1.AddArg(mem)
  8684  		v0.AddArg(v1)
  8685  		return true
  8686  	}
  8687  	return false
  8688  }
  8689  func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool {
  8690  	b := v.Block
  8691  	_ = b
  8692  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem)))
  8693  	// cond: i1 == i0+1 	&& i2 == i0+2 	&& i3 == i0+3 	&& i4 == i0+4 	&& x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 	&& y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 	&& o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 	&& mergePoint(b,x0,x1,x2,x3,x4) != nil 	&& clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) 	&& clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) 	&& clobber(o0) && clobber(o1) && clobber(o2)
  8694  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem))
  8695  	for {
  8696  		t := v.Type
  8697  		if v.AuxInt != 56 {
  8698  			break
  8699  		}
  8700  		_ = v.Args[1]
  8701  		o0 := v.Args[0]
  8702  		if o0.Op != OpARM64ORshiftLL {
  8703  			break
  8704  		}
  8705  		if o0.AuxInt != 48 {
  8706  			break
  8707  		}
  8708  		_ = o0.Args[1]
  8709  		o1 := o0.Args[0]
  8710  		if o1.Op != OpARM64ORshiftLL {
  8711  			break
  8712  		}
  8713  		if o1.AuxInt != 40 {
  8714  			break
  8715  		}
  8716  		_ = o1.Args[1]
  8717  		o2 := o1.Args[0]
  8718  		if o2.Op != OpARM64ORshiftLL {
  8719  			break
  8720  		}
  8721  		if o2.AuxInt != 32 {
  8722  			break
  8723  		}
  8724  		_ = o2.Args[1]
  8725  		y0 := o2.Args[0]
  8726  		if y0.Op != OpARM64REVW {
  8727  			break
  8728  		}
  8729  		x0 := y0.Args[0]
  8730  		if x0.Op != OpARM64MOVWUload {
  8731  			break
  8732  		}
  8733  		i4 := x0.AuxInt
  8734  		s := x0.Aux
  8735  		_ = x0.Args[1]
  8736  		p := x0.Args[0]
  8737  		mem := x0.Args[1]
  8738  		y1 := o2.Args[1]
  8739  		if y1.Op != OpARM64MOVDnop {
  8740  			break
  8741  		}
  8742  		x1 := y1.Args[0]
  8743  		if x1.Op != OpARM64MOVBUload {
  8744  			break
  8745  		}
  8746  		i3 := x1.AuxInt
  8747  		if x1.Aux != s {
  8748  			break
  8749  		}
  8750  		_ = x1.Args[1]
  8751  		if p != x1.Args[0] {
  8752  			break
  8753  		}
  8754  		if mem != x1.Args[1] {
  8755  			break
  8756  		}
  8757  		y2 := o1.Args[1]
  8758  		if y2.Op != OpARM64MOVDnop {
  8759  			break
  8760  		}
  8761  		x2 := y2.Args[0]
  8762  		if x2.Op != OpARM64MOVBUload {
  8763  			break
  8764  		}
  8765  		i2 := x2.AuxInt
  8766  		if x2.Aux != s {
  8767  			break
  8768  		}
  8769  		_ = x2.Args[1]
  8770  		if p != x2.Args[0] {
  8771  			break
  8772  		}
  8773  		if mem != x2.Args[1] {
  8774  			break
  8775  		}
  8776  		y3 := o0.Args[1]
  8777  		if y3.Op != OpARM64MOVDnop {
  8778  			break
  8779  		}
  8780  		x3 := y3.Args[0]
  8781  		if x3.Op != OpARM64MOVBUload {
  8782  			break
  8783  		}
  8784  		i1 := x3.AuxInt
  8785  		if x3.Aux != s {
  8786  			break
  8787  		}
  8788  		_ = x3.Args[1]
  8789  		if p != x3.Args[0] {
  8790  			break
  8791  		}
  8792  		if mem != x3.Args[1] {
  8793  			break
  8794  		}
  8795  		y4 := v.Args[1]
  8796  		if y4.Op != OpARM64MOVDnop {
  8797  			break
  8798  		}
  8799  		x4 := y4.Args[0]
  8800  		if x4.Op != OpARM64MOVBUload {
  8801  			break
  8802  		}
  8803  		i0 := x4.AuxInt
  8804  		if x4.Aux != s {
  8805  			break
  8806  		}
  8807  		_ = x4.Args[1]
  8808  		if p != x4.Args[0] {
  8809  			break
  8810  		}
  8811  		if mem != x4.Args[1] {
  8812  			break
  8813  		}
  8814  		if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
  8815  			break
  8816  		}
  8817  		b = mergePoint(b, x0, x1, x2, x3, x4)
  8818  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
  8819  		v.reset(OpCopy)
  8820  		v.AddArg(v0)
  8821  		v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t)
  8822  		v1.Aux = s
  8823  		v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type)
  8824  		v2.AuxInt = i0
  8825  		v2.AddArg(p)
  8826  		v1.AddArg(v2)
  8827  		v1.AddArg(mem)
  8828  		v0.AddArg(v1)
  8829  		return true
  8830  	}
  8831  	return false
  8832  }
  8833  func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool {
  8834  	b := v.Block
  8835  	_ = b
  8836  	// match: (ORshiftRA (MOVDconst [c]) x [d])
  8837  	// cond:
  8838  	// result: (ORconst  [c] (SRAconst <x.Type> x [d]))
  8839  	for {
  8840  		d := v.AuxInt
  8841  		_ = v.Args[1]
  8842  		v_0 := v.Args[0]
  8843  		if v_0.Op != OpARM64MOVDconst {
  8844  			break
  8845  		}
  8846  		c := v_0.AuxInt
  8847  		x := v.Args[1]
  8848  		v.reset(OpARM64ORconst)
  8849  		v.AuxInt = c
  8850  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  8851  		v0.AuxInt = d
  8852  		v0.AddArg(x)
  8853  		v.AddArg(v0)
  8854  		return true
  8855  	}
  8856  	// match: (ORshiftRA x (MOVDconst [c]) [d])
  8857  	// cond:
  8858  	// result: (ORconst  x [int64(int64(c)>>uint64(d))])
  8859  	for {
  8860  		d := v.AuxInt
  8861  		_ = v.Args[1]
  8862  		x := v.Args[0]
  8863  		v_1 := v.Args[1]
  8864  		if v_1.Op != OpARM64MOVDconst {
  8865  			break
  8866  		}
  8867  		c := v_1.AuxInt
  8868  		v.reset(OpARM64ORconst)
  8869  		v.AuxInt = int64(int64(c) >> uint64(d))
  8870  		v.AddArg(x)
  8871  		return true
  8872  	}
  8873  	// match: (ORshiftRA x y:(SRAconst x [c]) [d])
  8874  	// cond: c==d
  8875  	// result: y
  8876  	for {
  8877  		d := v.AuxInt
  8878  		_ = v.Args[1]
  8879  		x := v.Args[0]
  8880  		y := v.Args[1]
  8881  		if y.Op != OpARM64SRAconst {
  8882  			break
  8883  		}
  8884  		c := y.AuxInt
  8885  		if x != y.Args[0] {
  8886  			break
  8887  		}
  8888  		if !(c == d) {
  8889  			break
  8890  		}
  8891  		v.reset(OpCopy)
  8892  		v.Type = y.Type
  8893  		v.AddArg(y)
  8894  		return true
  8895  	}
  8896  	return false
  8897  }
  8898  func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool {
  8899  	b := v.Block
  8900  	_ = b
  8901  	// match: (ORshiftRL (MOVDconst [c]) x [d])
  8902  	// cond:
  8903  	// result: (ORconst  [c] (SRLconst <x.Type> x [d]))
  8904  	for {
  8905  		d := v.AuxInt
  8906  		_ = v.Args[1]
  8907  		v_0 := v.Args[0]
  8908  		if v_0.Op != OpARM64MOVDconst {
  8909  			break
  8910  		}
  8911  		c := v_0.AuxInt
  8912  		x := v.Args[1]
  8913  		v.reset(OpARM64ORconst)
  8914  		v.AuxInt = c
  8915  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  8916  		v0.AuxInt = d
  8917  		v0.AddArg(x)
  8918  		v.AddArg(v0)
  8919  		return true
  8920  	}
  8921  	// match: (ORshiftRL x (MOVDconst [c]) [d])
  8922  	// cond:
  8923  	// result: (ORconst  x [int64(uint64(c)>>uint64(d))])
  8924  	for {
  8925  		d := v.AuxInt
  8926  		_ = v.Args[1]
  8927  		x := v.Args[0]
  8928  		v_1 := v.Args[1]
  8929  		if v_1.Op != OpARM64MOVDconst {
  8930  			break
  8931  		}
  8932  		c := v_1.AuxInt
  8933  		v.reset(OpARM64ORconst)
  8934  		v.AuxInt = int64(uint64(c) >> uint64(d))
  8935  		v.AddArg(x)
  8936  		return true
  8937  	}
  8938  	// match: (ORshiftRL x y:(SRLconst x [c]) [d])
  8939  	// cond: c==d
  8940  	// result: y
  8941  	for {
  8942  		d := v.AuxInt
  8943  		_ = v.Args[1]
  8944  		x := v.Args[0]
  8945  		y := v.Args[1]
  8946  		if y.Op != OpARM64SRLconst {
  8947  			break
  8948  		}
  8949  		c := y.AuxInt
  8950  		if x != y.Args[0] {
  8951  			break
  8952  		}
  8953  		if !(c == d) {
  8954  			break
  8955  		}
  8956  		v.reset(OpCopy)
  8957  		v.Type = y.Type
  8958  		v.AddArg(y)
  8959  		return true
  8960  	}
  8961  	// match: (ORshiftRL [c] (SLLconst x [64-c]) x)
  8962  	// cond:
  8963  	// result: (RORconst [   c] x)
  8964  	for {
  8965  		c := v.AuxInt
  8966  		_ = v.Args[1]
  8967  		v_0 := v.Args[0]
  8968  		if v_0.Op != OpARM64SLLconst {
  8969  			break
  8970  		}
  8971  		if v_0.AuxInt != 64-c {
  8972  			break
  8973  		}
  8974  		x := v_0.Args[0]
  8975  		if x != v.Args[1] {
  8976  			break
  8977  		}
  8978  		v.reset(OpARM64RORconst)
  8979  		v.AuxInt = c
  8980  		v.AddArg(x)
  8981  		return true
  8982  	}
  8983  	// match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x))
  8984  	// cond: c < 32 && t.Size() == 4
  8985  	// result: (RORWconst [   c] x)
  8986  	for {
  8987  		t := v.Type
  8988  		c := v.AuxInt
  8989  		_ = v.Args[1]
  8990  		v_0 := v.Args[0]
  8991  		if v_0.Op != OpARM64SLLconst {
  8992  			break
  8993  		}
  8994  		if v_0.AuxInt != 32-c {
  8995  			break
  8996  		}
  8997  		x := v_0.Args[0]
  8998  		v_1 := v.Args[1]
  8999  		if v_1.Op != OpARM64MOVWUreg {
  9000  			break
  9001  		}
  9002  		if x != v_1.Args[0] {
  9003  			break
  9004  		}
  9005  		if !(c < 32 && t.Size() == 4) {
  9006  			break
  9007  		}
  9008  		v.reset(OpARM64RORWconst)
  9009  		v.AuxInt = c
  9010  		v.AddArg(x)
  9011  		return true
  9012  	}
  9013  	return false
  9014  }
  9015  func rewriteValueARM64_OpARM64SLL_0(v *Value) bool {
  9016  	// match: (SLL x (MOVDconst [c]))
  9017  	// cond:
  9018  	// result: (SLLconst x [c&63])
  9019  	for {
  9020  		_ = v.Args[1]
  9021  		x := v.Args[0]
  9022  		v_1 := v.Args[1]
  9023  		if v_1.Op != OpARM64MOVDconst {
  9024  			break
  9025  		}
  9026  		c := v_1.AuxInt
  9027  		v.reset(OpARM64SLLconst)
  9028  		v.AuxInt = c & 63
  9029  		v.AddArg(x)
  9030  		return true
  9031  	}
  9032  	return false
  9033  }
  9034  func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool {
  9035  	// match: (SLLconst [c] (MOVDconst [d]))
  9036  	// cond:
  9037  	// result: (MOVDconst [int64(d)<<uint64(c)])
  9038  	for {
  9039  		c := v.AuxInt
  9040  		v_0 := v.Args[0]
  9041  		if v_0.Op != OpARM64MOVDconst {
  9042  			break
  9043  		}
  9044  		d := v_0.AuxInt
  9045  		v.reset(OpARM64MOVDconst)
  9046  		v.AuxInt = int64(d) << uint64(c)
  9047  		return true
  9048  	}
  9049  	// match: (SLLconst [c] (SRLconst [c] x))
  9050  	// cond: 0 < c && c < 64
  9051  	// result: (ANDconst [^(1<<uint(c)-1)] x)
  9052  	for {
  9053  		c := v.AuxInt
  9054  		v_0 := v.Args[0]
  9055  		if v_0.Op != OpARM64SRLconst {
  9056  			break
  9057  		}
  9058  		if v_0.AuxInt != c {
  9059  			break
  9060  		}
  9061  		x := v_0.Args[0]
  9062  		if !(0 < c && c < 64) {
  9063  			break
  9064  		}
  9065  		v.reset(OpARM64ANDconst)
  9066  		v.AuxInt = ^(1<<uint(c) - 1)
  9067  		v.AddArg(x)
  9068  		return true
  9069  	}
  9070  	return false
  9071  }
  9072  func rewriteValueARM64_OpARM64SRA_0(v *Value) bool {
  9073  	// match: (SRA x (MOVDconst [c]))
  9074  	// cond:
  9075  	// result: (SRAconst x [c&63])
  9076  	for {
  9077  		_ = v.Args[1]
  9078  		x := v.Args[0]
  9079  		v_1 := v.Args[1]
  9080  		if v_1.Op != OpARM64MOVDconst {
  9081  			break
  9082  		}
  9083  		c := v_1.AuxInt
  9084  		v.reset(OpARM64SRAconst)
  9085  		v.AuxInt = c & 63
  9086  		v.AddArg(x)
  9087  		return true
  9088  	}
  9089  	return false
  9090  }
  9091  func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool {
  9092  	// match: (SRAconst [c] (MOVDconst [d]))
  9093  	// cond:
  9094  	// result: (MOVDconst [int64(d)>>uint64(c)])
  9095  	for {
  9096  		c := v.AuxInt
  9097  		v_0 := v.Args[0]
  9098  		if v_0.Op != OpARM64MOVDconst {
  9099  			break
  9100  		}
  9101  		d := v_0.AuxInt
  9102  		v.reset(OpARM64MOVDconst)
  9103  		v.AuxInt = int64(d) >> uint64(c)
  9104  		return true
  9105  	}
  9106  	return false
  9107  }
  9108  func rewriteValueARM64_OpARM64SRL_0(v *Value) bool {
  9109  	// match: (SRL x (MOVDconst [c]))
  9110  	// cond:
  9111  	// result: (SRLconst x [c&63])
  9112  	for {
  9113  		_ = v.Args[1]
  9114  		x := v.Args[0]
  9115  		v_1 := v.Args[1]
  9116  		if v_1.Op != OpARM64MOVDconst {
  9117  			break
  9118  		}
  9119  		c := v_1.AuxInt
  9120  		v.reset(OpARM64SRLconst)
  9121  		v.AuxInt = c & 63
  9122  		v.AddArg(x)
  9123  		return true
  9124  	}
  9125  	return false
  9126  }
  9127  func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool {
  9128  	// match: (SRLconst [c] (MOVDconst [d]))
  9129  	// cond:
  9130  	// result: (MOVDconst [int64(uint64(d)>>uint64(c))])
  9131  	for {
  9132  		c := v.AuxInt
  9133  		v_0 := v.Args[0]
  9134  		if v_0.Op != OpARM64MOVDconst {
  9135  			break
  9136  		}
  9137  		d := v_0.AuxInt
  9138  		v.reset(OpARM64MOVDconst)
  9139  		v.AuxInt = int64(uint64(d) >> uint64(c))
  9140  		return true
  9141  	}
  9142  	// match: (SRLconst [c] (SLLconst [c] x))
  9143  	// cond: 0 < c && c < 64
  9144  	// result: (ANDconst [1<<uint(64-c)-1] x)
  9145  	for {
  9146  		c := v.AuxInt
  9147  		v_0 := v.Args[0]
  9148  		if v_0.Op != OpARM64SLLconst {
  9149  			break
  9150  		}
  9151  		if v_0.AuxInt != c {
  9152  			break
  9153  		}
  9154  		x := v_0.Args[0]
  9155  		if !(0 < c && c < 64) {
  9156  			break
  9157  		}
  9158  		v.reset(OpARM64ANDconst)
  9159  		v.AuxInt = 1<<uint(64-c) - 1
  9160  		v.AddArg(x)
  9161  		return true
  9162  	}
  9163  	return false
  9164  }
  9165  func rewriteValueARM64_OpARM64SUB_0(v *Value) bool {
  9166  	b := v.Block
  9167  	_ = b
  9168  	// match: (SUB x (MOVDconst [c]))
  9169  	// cond:
  9170  	// result: (SUBconst [c] x)
  9171  	for {
  9172  		_ = v.Args[1]
  9173  		x := v.Args[0]
  9174  		v_1 := v.Args[1]
  9175  		if v_1.Op != OpARM64MOVDconst {
  9176  			break
  9177  		}
  9178  		c := v_1.AuxInt
  9179  		v.reset(OpARM64SUBconst)
  9180  		v.AuxInt = c
  9181  		v.AddArg(x)
  9182  		return true
  9183  	}
  9184  	// match: (SUB x x)
  9185  	// cond:
  9186  	// result: (MOVDconst [0])
  9187  	for {
  9188  		_ = v.Args[1]
  9189  		x := v.Args[0]
  9190  		if x != v.Args[1] {
  9191  			break
  9192  		}
  9193  		v.reset(OpARM64MOVDconst)
  9194  		v.AuxInt = 0
  9195  		return true
  9196  	}
  9197  	// match: (SUB x (SUB y z))
  9198  	// cond:
  9199  	// result: (SUB (ADD <v.Type> x z) y)
  9200  	for {
  9201  		_ = v.Args[1]
  9202  		x := v.Args[0]
  9203  		v_1 := v.Args[1]
  9204  		if v_1.Op != OpARM64SUB {
  9205  			break
  9206  		}
  9207  		_ = v_1.Args[1]
  9208  		y := v_1.Args[0]
  9209  		z := v_1.Args[1]
  9210  		v.reset(OpARM64SUB)
  9211  		v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type)
  9212  		v0.AddArg(x)
  9213  		v0.AddArg(z)
  9214  		v.AddArg(v0)
  9215  		v.AddArg(y)
  9216  		return true
  9217  	}
  9218  	// match: (SUB (SUB x y) z)
  9219  	// cond:
  9220  	// result: (SUB x (ADD <y.Type> y z))
  9221  	for {
  9222  		_ = v.Args[1]
  9223  		v_0 := v.Args[0]
  9224  		if v_0.Op != OpARM64SUB {
  9225  			break
  9226  		}
  9227  		_ = v_0.Args[1]
  9228  		x := v_0.Args[0]
  9229  		y := v_0.Args[1]
  9230  		z := v.Args[1]
  9231  		v.reset(OpARM64SUB)
  9232  		v.AddArg(x)
  9233  		v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type)
  9234  		v0.AddArg(y)
  9235  		v0.AddArg(z)
  9236  		v.AddArg(v0)
  9237  		return true
  9238  	}
  9239  	// match: (SUB x (SLLconst [c] y))
  9240  	// cond:
  9241  	// result: (SUBshiftLL x y [c])
  9242  	for {
  9243  		_ = v.Args[1]
  9244  		x := v.Args[0]
  9245  		v_1 := v.Args[1]
  9246  		if v_1.Op != OpARM64SLLconst {
  9247  			break
  9248  		}
  9249  		c := v_1.AuxInt
  9250  		y := v_1.Args[0]
  9251  		v.reset(OpARM64SUBshiftLL)
  9252  		v.AuxInt = c
  9253  		v.AddArg(x)
  9254  		v.AddArg(y)
  9255  		return true
  9256  	}
  9257  	// match: (SUB x (SRLconst [c] y))
  9258  	// cond:
  9259  	// result: (SUBshiftRL x y [c])
  9260  	for {
  9261  		_ = v.Args[1]
  9262  		x := v.Args[0]
  9263  		v_1 := v.Args[1]
  9264  		if v_1.Op != OpARM64SRLconst {
  9265  			break
  9266  		}
  9267  		c := v_1.AuxInt
  9268  		y := v_1.Args[0]
  9269  		v.reset(OpARM64SUBshiftRL)
  9270  		v.AuxInt = c
  9271  		v.AddArg(x)
  9272  		v.AddArg(y)
  9273  		return true
  9274  	}
  9275  	// match: (SUB x (SRAconst [c] y))
  9276  	// cond:
  9277  	// result: (SUBshiftRA x y [c])
  9278  	for {
  9279  		_ = v.Args[1]
  9280  		x := v.Args[0]
  9281  		v_1 := v.Args[1]
  9282  		if v_1.Op != OpARM64SRAconst {
  9283  			break
  9284  		}
  9285  		c := v_1.AuxInt
  9286  		y := v_1.Args[0]
  9287  		v.reset(OpARM64SUBshiftRA)
  9288  		v.AuxInt = c
  9289  		v.AddArg(x)
  9290  		v.AddArg(y)
  9291  		return true
  9292  	}
  9293  	return false
  9294  }
  9295  func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool {
  9296  	// match: (SUBconst [0] x)
  9297  	// cond:
  9298  	// result: x
  9299  	for {
  9300  		if v.AuxInt != 0 {
  9301  			break
  9302  		}
  9303  		x := v.Args[0]
  9304  		v.reset(OpCopy)
  9305  		v.Type = x.Type
  9306  		v.AddArg(x)
  9307  		return true
  9308  	}
  9309  	// match: (SUBconst [c] (MOVDconst [d]))
  9310  	// cond:
  9311  	// result: (MOVDconst [d-c])
  9312  	for {
  9313  		c := v.AuxInt
  9314  		v_0 := v.Args[0]
  9315  		if v_0.Op != OpARM64MOVDconst {
  9316  			break
  9317  		}
  9318  		d := v_0.AuxInt
  9319  		v.reset(OpARM64MOVDconst)
  9320  		v.AuxInt = d - c
  9321  		return true
  9322  	}
  9323  	// match: (SUBconst [c] (SUBconst [d] x))
  9324  	// cond:
  9325  	// result: (ADDconst [-c-d] x)
  9326  	for {
  9327  		c := v.AuxInt
  9328  		v_0 := v.Args[0]
  9329  		if v_0.Op != OpARM64SUBconst {
  9330  			break
  9331  		}
  9332  		d := v_0.AuxInt
  9333  		x := v_0.Args[0]
  9334  		v.reset(OpARM64ADDconst)
  9335  		v.AuxInt = -c - d
  9336  		v.AddArg(x)
  9337  		return true
  9338  	}
  9339  	// match: (SUBconst [c] (ADDconst [d] x))
  9340  	// cond:
  9341  	// result: (ADDconst [-c+d] x)
  9342  	for {
  9343  		c := v.AuxInt
  9344  		v_0 := v.Args[0]
  9345  		if v_0.Op != OpARM64ADDconst {
  9346  			break
  9347  		}
  9348  		d := v_0.AuxInt
  9349  		x := v_0.Args[0]
  9350  		v.reset(OpARM64ADDconst)
  9351  		v.AuxInt = -c + d
  9352  		v.AddArg(x)
  9353  		return true
  9354  	}
  9355  	return false
  9356  }
  9357  func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool {
  9358  	// match: (SUBshiftLL x (MOVDconst [c]) [d])
  9359  	// cond:
  9360  	// result: (SUBconst x [int64(uint64(c)<<uint64(d))])
  9361  	for {
  9362  		d := v.AuxInt
  9363  		_ = v.Args[1]
  9364  		x := v.Args[0]
  9365  		v_1 := v.Args[1]
  9366  		if v_1.Op != OpARM64MOVDconst {
  9367  			break
  9368  		}
  9369  		c := v_1.AuxInt
  9370  		v.reset(OpARM64SUBconst)
  9371  		v.AuxInt = int64(uint64(c) << uint64(d))
  9372  		v.AddArg(x)
  9373  		return true
  9374  	}
  9375  	// match: (SUBshiftLL x (SLLconst x [c]) [d])
  9376  	// cond: c==d
  9377  	// result: (MOVDconst [0])
  9378  	for {
  9379  		d := v.AuxInt
  9380  		_ = v.Args[1]
  9381  		x := v.Args[0]
  9382  		v_1 := v.Args[1]
  9383  		if v_1.Op != OpARM64SLLconst {
  9384  			break
  9385  		}
  9386  		c := v_1.AuxInt
  9387  		if x != v_1.Args[0] {
  9388  			break
  9389  		}
  9390  		if !(c == d) {
  9391  			break
  9392  		}
  9393  		v.reset(OpARM64MOVDconst)
  9394  		v.AuxInt = 0
  9395  		return true
  9396  	}
  9397  	return false
  9398  }
  9399  func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool {
  9400  	// match: (SUBshiftRA x (MOVDconst [c]) [d])
  9401  	// cond:
  9402  	// result: (SUBconst x [int64(int64(c)>>uint64(d))])
  9403  	for {
  9404  		d := v.AuxInt
  9405  		_ = v.Args[1]
  9406  		x := v.Args[0]
  9407  		v_1 := v.Args[1]
  9408  		if v_1.Op != OpARM64MOVDconst {
  9409  			break
  9410  		}
  9411  		c := v_1.AuxInt
  9412  		v.reset(OpARM64SUBconst)
  9413  		v.AuxInt = int64(int64(c) >> uint64(d))
  9414  		v.AddArg(x)
  9415  		return true
  9416  	}
  9417  	// match: (SUBshiftRA x (SRAconst x [c]) [d])
  9418  	// cond: c==d
  9419  	// result: (MOVDconst [0])
  9420  	for {
  9421  		d := v.AuxInt
  9422  		_ = v.Args[1]
  9423  		x := v.Args[0]
  9424  		v_1 := v.Args[1]
  9425  		if v_1.Op != OpARM64SRAconst {
  9426  			break
  9427  		}
  9428  		c := v_1.AuxInt
  9429  		if x != v_1.Args[0] {
  9430  			break
  9431  		}
  9432  		if !(c == d) {
  9433  			break
  9434  		}
  9435  		v.reset(OpARM64MOVDconst)
  9436  		v.AuxInt = 0
  9437  		return true
  9438  	}
  9439  	return false
  9440  }
  9441  func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool {
  9442  	// match: (SUBshiftRL x (MOVDconst [c]) [d])
  9443  	// cond:
  9444  	// result: (SUBconst x [int64(uint64(c)>>uint64(d))])
  9445  	for {
  9446  		d := v.AuxInt
  9447  		_ = v.Args[1]
  9448  		x := v.Args[0]
  9449  		v_1 := v.Args[1]
  9450  		if v_1.Op != OpARM64MOVDconst {
  9451  			break
  9452  		}
  9453  		c := v_1.AuxInt
  9454  		v.reset(OpARM64SUBconst)
  9455  		v.AuxInt = int64(uint64(c) >> uint64(d))
  9456  		v.AddArg(x)
  9457  		return true
  9458  	}
  9459  	// match: (SUBshiftRL x (SRLconst x [c]) [d])
  9460  	// cond: c==d
  9461  	// result: (MOVDconst [0])
  9462  	for {
  9463  		d := v.AuxInt
  9464  		_ = v.Args[1]
  9465  		x := v.Args[0]
  9466  		v_1 := v.Args[1]
  9467  		if v_1.Op != OpARM64SRLconst {
  9468  			break
  9469  		}
  9470  		c := v_1.AuxInt
  9471  		if x != v_1.Args[0] {
  9472  			break
  9473  		}
  9474  		if !(c == d) {
  9475  			break
  9476  		}
  9477  		v.reset(OpARM64MOVDconst)
  9478  		v.AuxInt = 0
  9479  		return true
  9480  	}
  9481  	return false
  9482  }
  9483  func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool {
  9484  	// match: (UDIV x (MOVDconst [1]))
  9485  	// cond:
  9486  	// result: x
  9487  	for {
  9488  		_ = v.Args[1]
  9489  		x := v.Args[0]
  9490  		v_1 := v.Args[1]
  9491  		if v_1.Op != OpARM64MOVDconst {
  9492  			break
  9493  		}
  9494  		if v_1.AuxInt != 1 {
  9495  			break
  9496  		}
  9497  		v.reset(OpCopy)
  9498  		v.Type = x.Type
  9499  		v.AddArg(x)
  9500  		return true
  9501  	}
  9502  	// match: (UDIV x (MOVDconst [c]))
  9503  	// cond: isPowerOfTwo(c)
  9504  	// result: (SRLconst [log2(c)] x)
  9505  	for {
  9506  		_ = v.Args[1]
  9507  		x := v.Args[0]
  9508  		v_1 := v.Args[1]
  9509  		if v_1.Op != OpARM64MOVDconst {
  9510  			break
  9511  		}
  9512  		c := v_1.AuxInt
  9513  		if !(isPowerOfTwo(c)) {
  9514  			break
  9515  		}
  9516  		v.reset(OpARM64SRLconst)
  9517  		v.AuxInt = log2(c)
  9518  		v.AddArg(x)
  9519  		return true
  9520  	}
  9521  	// match: (UDIV (MOVDconst [c]) (MOVDconst [d]))
  9522  	// cond:
  9523  	// result: (MOVDconst [int64(uint64(c)/uint64(d))])
  9524  	for {
  9525  		_ = v.Args[1]
  9526  		v_0 := v.Args[0]
  9527  		if v_0.Op != OpARM64MOVDconst {
  9528  			break
  9529  		}
  9530  		c := v_0.AuxInt
  9531  		v_1 := v.Args[1]
  9532  		if v_1.Op != OpARM64MOVDconst {
  9533  			break
  9534  		}
  9535  		d := v_1.AuxInt
  9536  		v.reset(OpARM64MOVDconst)
  9537  		v.AuxInt = int64(uint64(c) / uint64(d))
  9538  		return true
  9539  	}
  9540  	return false
  9541  }
  9542  func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool {
  9543  	// match: (UDIVW x (MOVDconst [c]))
  9544  	// cond: uint32(c)==1
  9545  	// result: x
  9546  	for {
  9547  		_ = v.Args[1]
  9548  		x := v.Args[0]
  9549  		v_1 := v.Args[1]
  9550  		if v_1.Op != OpARM64MOVDconst {
  9551  			break
  9552  		}
  9553  		c := v_1.AuxInt
  9554  		if !(uint32(c) == 1) {
  9555  			break
  9556  		}
  9557  		v.reset(OpCopy)
  9558  		v.Type = x.Type
  9559  		v.AddArg(x)
  9560  		return true
  9561  	}
  9562  	// match: (UDIVW x (MOVDconst [c]))
  9563  	// cond: isPowerOfTwo(c) && is32Bit(c)
  9564  	// result: (SRLconst [log2(c)] x)
  9565  	for {
  9566  		_ = v.Args[1]
  9567  		x := v.Args[0]
  9568  		v_1 := v.Args[1]
  9569  		if v_1.Op != OpARM64MOVDconst {
  9570  			break
  9571  		}
  9572  		c := v_1.AuxInt
  9573  		if !(isPowerOfTwo(c) && is32Bit(c)) {
  9574  			break
  9575  		}
  9576  		v.reset(OpARM64SRLconst)
  9577  		v.AuxInt = log2(c)
  9578  		v.AddArg(x)
  9579  		return true
  9580  	}
  9581  	// match: (UDIVW (MOVDconst [c]) (MOVDconst [d]))
  9582  	// cond:
  9583  	// result: (MOVDconst [int64(uint32(c)/uint32(d))])
  9584  	for {
  9585  		_ = v.Args[1]
  9586  		v_0 := v.Args[0]
  9587  		if v_0.Op != OpARM64MOVDconst {
  9588  			break
  9589  		}
  9590  		c := v_0.AuxInt
  9591  		v_1 := v.Args[1]
  9592  		if v_1.Op != OpARM64MOVDconst {
  9593  			break
  9594  		}
  9595  		d := v_1.AuxInt
  9596  		v.reset(OpARM64MOVDconst)
  9597  		v.AuxInt = int64(uint32(c) / uint32(d))
  9598  		return true
  9599  	}
  9600  	return false
  9601  }
  9602  func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool {
  9603  	// match: (UMOD _ (MOVDconst [1]))
  9604  	// cond:
  9605  	// result: (MOVDconst [0])
  9606  	for {
  9607  		_ = v.Args[1]
  9608  		v_1 := v.Args[1]
  9609  		if v_1.Op != OpARM64MOVDconst {
  9610  			break
  9611  		}
  9612  		if v_1.AuxInt != 1 {
  9613  			break
  9614  		}
  9615  		v.reset(OpARM64MOVDconst)
  9616  		v.AuxInt = 0
  9617  		return true
  9618  	}
  9619  	// match: (UMOD x (MOVDconst [c]))
  9620  	// cond: isPowerOfTwo(c)
  9621  	// result: (ANDconst [c-1] x)
  9622  	for {
  9623  		_ = v.Args[1]
  9624  		x := v.Args[0]
  9625  		v_1 := v.Args[1]
  9626  		if v_1.Op != OpARM64MOVDconst {
  9627  			break
  9628  		}
  9629  		c := v_1.AuxInt
  9630  		if !(isPowerOfTwo(c)) {
  9631  			break
  9632  		}
  9633  		v.reset(OpARM64ANDconst)
  9634  		v.AuxInt = c - 1
  9635  		v.AddArg(x)
  9636  		return true
  9637  	}
  9638  	// match: (UMOD (MOVDconst [c]) (MOVDconst [d]))
  9639  	// cond:
  9640  	// result: (MOVDconst [int64(uint64(c)%uint64(d))])
  9641  	for {
  9642  		_ = v.Args[1]
  9643  		v_0 := v.Args[0]
  9644  		if v_0.Op != OpARM64MOVDconst {
  9645  			break
  9646  		}
  9647  		c := v_0.AuxInt
  9648  		v_1 := v.Args[1]
  9649  		if v_1.Op != OpARM64MOVDconst {
  9650  			break
  9651  		}
  9652  		d := v_1.AuxInt
  9653  		v.reset(OpARM64MOVDconst)
  9654  		v.AuxInt = int64(uint64(c) % uint64(d))
  9655  		return true
  9656  	}
  9657  	return false
  9658  }
  9659  func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool {
  9660  	// match: (UMODW _ (MOVDconst [c]))
  9661  	// cond: uint32(c)==1
  9662  	// result: (MOVDconst [0])
  9663  	for {
  9664  		_ = v.Args[1]
  9665  		v_1 := v.Args[1]
  9666  		if v_1.Op != OpARM64MOVDconst {
  9667  			break
  9668  		}
  9669  		c := v_1.AuxInt
  9670  		if !(uint32(c) == 1) {
  9671  			break
  9672  		}
  9673  		v.reset(OpARM64MOVDconst)
  9674  		v.AuxInt = 0
  9675  		return true
  9676  	}
  9677  	// match: (UMODW x (MOVDconst [c]))
  9678  	// cond: isPowerOfTwo(c) && is32Bit(c)
  9679  	// result: (ANDconst [c-1] x)
  9680  	for {
  9681  		_ = v.Args[1]
  9682  		x := v.Args[0]
  9683  		v_1 := v.Args[1]
  9684  		if v_1.Op != OpARM64MOVDconst {
  9685  			break
  9686  		}
  9687  		c := v_1.AuxInt
  9688  		if !(isPowerOfTwo(c) && is32Bit(c)) {
  9689  			break
  9690  		}
  9691  		v.reset(OpARM64ANDconst)
  9692  		v.AuxInt = c - 1
  9693  		v.AddArg(x)
  9694  		return true
  9695  	}
  9696  	// match: (UMODW (MOVDconst [c]) (MOVDconst [d]))
  9697  	// cond:
  9698  	// result: (MOVDconst [int64(uint32(c)%uint32(d))])
  9699  	for {
  9700  		_ = v.Args[1]
  9701  		v_0 := v.Args[0]
  9702  		if v_0.Op != OpARM64MOVDconst {
  9703  			break
  9704  		}
  9705  		c := v_0.AuxInt
  9706  		v_1 := v.Args[1]
  9707  		if v_1.Op != OpARM64MOVDconst {
  9708  			break
  9709  		}
  9710  		d := v_1.AuxInt
  9711  		v.reset(OpARM64MOVDconst)
  9712  		v.AuxInt = int64(uint32(c) % uint32(d))
  9713  		return true
  9714  	}
  9715  	return false
  9716  }
  9717  func rewriteValueARM64_OpARM64XOR_0(v *Value) bool {
  9718  	// match: (XOR x (MOVDconst [c]))
  9719  	// cond:
  9720  	// result: (XORconst [c] x)
  9721  	for {
  9722  		_ = v.Args[1]
  9723  		x := v.Args[0]
  9724  		v_1 := v.Args[1]
  9725  		if v_1.Op != OpARM64MOVDconst {
  9726  			break
  9727  		}
  9728  		c := v_1.AuxInt
  9729  		v.reset(OpARM64XORconst)
  9730  		v.AuxInt = c
  9731  		v.AddArg(x)
  9732  		return true
  9733  	}
  9734  	// match: (XOR (MOVDconst [c]) x)
  9735  	// cond:
  9736  	// result: (XORconst [c] x)
  9737  	for {
  9738  		_ = v.Args[1]
  9739  		v_0 := v.Args[0]
  9740  		if v_0.Op != OpARM64MOVDconst {
  9741  			break
  9742  		}
  9743  		c := v_0.AuxInt
  9744  		x := v.Args[1]
  9745  		v.reset(OpARM64XORconst)
  9746  		v.AuxInt = c
  9747  		v.AddArg(x)
  9748  		return true
  9749  	}
  9750  	// match: (XOR x x)
  9751  	// cond:
  9752  	// result: (MOVDconst [0])
  9753  	for {
  9754  		_ = v.Args[1]
  9755  		x := v.Args[0]
  9756  		if x != v.Args[1] {
  9757  			break
  9758  		}
  9759  		v.reset(OpARM64MOVDconst)
  9760  		v.AuxInt = 0
  9761  		return true
  9762  	}
  9763  	// match: (XOR x (SLLconst [c] y))
  9764  	// cond:
  9765  	// result: (XORshiftLL x y [c])
  9766  	for {
  9767  		_ = v.Args[1]
  9768  		x := v.Args[0]
  9769  		v_1 := v.Args[1]
  9770  		if v_1.Op != OpARM64SLLconst {
  9771  			break
  9772  		}
  9773  		c := v_1.AuxInt
  9774  		y := v_1.Args[0]
  9775  		v.reset(OpARM64XORshiftLL)
  9776  		v.AuxInt = c
  9777  		v.AddArg(x)
  9778  		v.AddArg(y)
  9779  		return true
  9780  	}
  9781  	// match: (XOR (SLLconst [c] y) x)
  9782  	// cond:
  9783  	// result: (XORshiftLL x y [c])
  9784  	for {
  9785  		_ = v.Args[1]
  9786  		v_0 := v.Args[0]
  9787  		if v_0.Op != OpARM64SLLconst {
  9788  			break
  9789  		}
  9790  		c := v_0.AuxInt
  9791  		y := v_0.Args[0]
  9792  		x := v.Args[1]
  9793  		v.reset(OpARM64XORshiftLL)
  9794  		v.AuxInt = c
  9795  		v.AddArg(x)
  9796  		v.AddArg(y)
  9797  		return true
  9798  	}
  9799  	// match: (XOR x (SRLconst [c] y))
  9800  	// cond:
  9801  	// result: (XORshiftRL x y [c])
  9802  	for {
  9803  		_ = v.Args[1]
  9804  		x := v.Args[0]
  9805  		v_1 := v.Args[1]
  9806  		if v_1.Op != OpARM64SRLconst {
  9807  			break
  9808  		}
  9809  		c := v_1.AuxInt
  9810  		y := v_1.Args[0]
  9811  		v.reset(OpARM64XORshiftRL)
  9812  		v.AuxInt = c
  9813  		v.AddArg(x)
  9814  		v.AddArg(y)
  9815  		return true
  9816  	}
  9817  	// match: (XOR (SRLconst [c] y) x)
  9818  	// cond:
  9819  	// result: (XORshiftRL x y [c])
  9820  	for {
  9821  		_ = v.Args[1]
  9822  		v_0 := v.Args[0]
  9823  		if v_0.Op != OpARM64SRLconst {
  9824  			break
  9825  		}
  9826  		c := v_0.AuxInt
  9827  		y := v_0.Args[0]
  9828  		x := v.Args[1]
  9829  		v.reset(OpARM64XORshiftRL)
  9830  		v.AuxInt = c
  9831  		v.AddArg(x)
  9832  		v.AddArg(y)
  9833  		return true
  9834  	}
  9835  	// match: (XOR x (SRAconst [c] y))
  9836  	// cond:
  9837  	// result: (XORshiftRA x y [c])
  9838  	for {
  9839  		_ = v.Args[1]
  9840  		x := v.Args[0]
  9841  		v_1 := v.Args[1]
  9842  		if v_1.Op != OpARM64SRAconst {
  9843  			break
  9844  		}
  9845  		c := v_1.AuxInt
  9846  		y := v_1.Args[0]
  9847  		v.reset(OpARM64XORshiftRA)
  9848  		v.AuxInt = c
  9849  		v.AddArg(x)
  9850  		v.AddArg(y)
  9851  		return true
  9852  	}
  9853  	// match: (XOR (SRAconst [c] y) x)
  9854  	// cond:
  9855  	// result: (XORshiftRA x y [c])
  9856  	for {
  9857  		_ = v.Args[1]
  9858  		v_0 := v.Args[0]
  9859  		if v_0.Op != OpARM64SRAconst {
  9860  			break
  9861  		}
  9862  		c := v_0.AuxInt
  9863  		y := v_0.Args[0]
  9864  		x := v.Args[1]
  9865  		v.reset(OpARM64XORshiftRA)
  9866  		v.AuxInt = c
  9867  		v.AddArg(x)
  9868  		v.AddArg(y)
  9869  		return true
  9870  	}
  9871  	return false
  9872  }
  9873  func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool {
  9874  	// match: (XORconst [0] x)
  9875  	// cond:
  9876  	// result: x
  9877  	for {
  9878  		if v.AuxInt != 0 {
  9879  			break
  9880  		}
  9881  		x := v.Args[0]
  9882  		v.reset(OpCopy)
  9883  		v.Type = x.Type
  9884  		v.AddArg(x)
  9885  		return true
  9886  	}
  9887  	// match: (XORconst [-1] x)
  9888  	// cond:
  9889  	// result: (MVN x)
  9890  	for {
  9891  		if v.AuxInt != -1 {
  9892  			break
  9893  		}
  9894  		x := v.Args[0]
  9895  		v.reset(OpARM64MVN)
  9896  		v.AddArg(x)
  9897  		return true
  9898  	}
  9899  	// match: (XORconst [c] (MOVDconst [d]))
  9900  	// cond:
  9901  	// result: (MOVDconst [c^d])
  9902  	for {
  9903  		c := v.AuxInt
  9904  		v_0 := v.Args[0]
  9905  		if v_0.Op != OpARM64MOVDconst {
  9906  			break
  9907  		}
  9908  		d := v_0.AuxInt
  9909  		v.reset(OpARM64MOVDconst)
  9910  		v.AuxInt = c ^ d
  9911  		return true
  9912  	}
  9913  	// match: (XORconst [c] (XORconst [d] x))
  9914  	// cond:
  9915  	// result: (XORconst [c^d] x)
  9916  	for {
  9917  		c := v.AuxInt
  9918  		v_0 := v.Args[0]
  9919  		if v_0.Op != OpARM64XORconst {
  9920  			break
  9921  		}
  9922  		d := v_0.AuxInt
  9923  		x := v_0.Args[0]
  9924  		v.reset(OpARM64XORconst)
  9925  		v.AuxInt = c ^ d
  9926  		v.AddArg(x)
  9927  		return true
  9928  	}
  9929  	return false
  9930  }
  9931  func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool {
  9932  	b := v.Block
  9933  	_ = b
  9934  	// match: (XORshiftLL (MOVDconst [c]) x [d])
  9935  	// cond:
  9936  	// result: (XORconst [c] (SLLconst <x.Type> x [d]))
  9937  	for {
  9938  		d := v.AuxInt
  9939  		_ = v.Args[1]
  9940  		v_0 := v.Args[0]
  9941  		if v_0.Op != OpARM64MOVDconst {
  9942  			break
  9943  		}
  9944  		c := v_0.AuxInt
  9945  		x := v.Args[1]
  9946  		v.reset(OpARM64XORconst)
  9947  		v.AuxInt = c
  9948  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  9949  		v0.AuxInt = d
  9950  		v0.AddArg(x)
  9951  		v.AddArg(v0)
  9952  		return true
  9953  	}
  9954  	// match: (XORshiftLL x (MOVDconst [c]) [d])
  9955  	// cond:
  9956  	// result: (XORconst x [int64(uint64(c)<<uint64(d))])
  9957  	for {
  9958  		d := v.AuxInt
  9959  		_ = v.Args[1]
  9960  		x := v.Args[0]
  9961  		v_1 := v.Args[1]
  9962  		if v_1.Op != OpARM64MOVDconst {
  9963  			break
  9964  		}
  9965  		c := v_1.AuxInt
  9966  		v.reset(OpARM64XORconst)
  9967  		v.AuxInt = int64(uint64(c) << uint64(d))
  9968  		v.AddArg(x)
  9969  		return true
  9970  	}
  9971  	// match: (XORshiftLL x (SLLconst x [c]) [d])
  9972  	// cond: c==d
  9973  	// result: (MOVDconst [0])
  9974  	for {
  9975  		d := v.AuxInt
  9976  		_ = v.Args[1]
  9977  		x := v.Args[0]
  9978  		v_1 := v.Args[1]
  9979  		if v_1.Op != OpARM64SLLconst {
  9980  			break
  9981  		}
  9982  		c := v_1.AuxInt
  9983  		if x != v_1.Args[0] {
  9984  			break
  9985  		}
  9986  		if !(c == d) {
  9987  			break
  9988  		}
  9989  		v.reset(OpARM64MOVDconst)
  9990  		v.AuxInt = 0
  9991  		return true
  9992  	}
  9993  	// match: (XORshiftLL [c] (SRLconst x [64-c]) x)
  9994  	// cond:
  9995  	// result: (RORconst [64-c] x)
  9996  	for {
  9997  		c := v.AuxInt
  9998  		_ = v.Args[1]
  9999  		v_0 := v.Args[0]
 10000  		if v_0.Op != OpARM64SRLconst {
 10001  			break
 10002  		}
 10003  		if v_0.AuxInt != 64-c {
 10004  			break
 10005  		}
 10006  		x := v_0.Args[0]
 10007  		if x != v.Args[1] {
 10008  			break
 10009  		}
 10010  		v.reset(OpARM64RORconst)
 10011  		v.AuxInt = 64 - c
 10012  		v.AddArg(x)
 10013  		return true
 10014  	}
 10015  	// match: (XORshiftLL <t> [c] (SRLconst (MOVWUreg x) [32-c]) x)
 10016  	// cond: c < 32 && t.Size() == 4
 10017  	// result: (RORWconst [32-c] x)
 10018  	for {
 10019  		t := v.Type
 10020  		c := v.AuxInt
 10021  		_ = v.Args[1]
 10022  		v_0 := v.Args[0]
 10023  		if v_0.Op != OpARM64SRLconst {
 10024  			break
 10025  		}
 10026  		if v_0.AuxInt != 32-c {
 10027  			break
 10028  		}
 10029  		v_0_0 := v_0.Args[0]
 10030  		if v_0_0.Op != OpARM64MOVWUreg {
 10031  			break
 10032  		}
 10033  		x := v_0_0.Args[0]
 10034  		if x != v.Args[1] {
 10035  			break
 10036  		}
 10037  		if !(c < 32 && t.Size() == 4) {
 10038  			break
 10039  		}
 10040  		v.reset(OpARM64RORWconst)
 10041  		v.AuxInt = 32 - c
 10042  		v.AddArg(x)
 10043  		return true
 10044  	}
 10045  	return false
 10046  }
 10047  func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool {
 10048  	b := v.Block
 10049  	_ = b
 10050  	// match: (XORshiftRA (MOVDconst [c]) x [d])
 10051  	// cond:
 10052  	// result: (XORconst [c] (SRAconst <x.Type> x [d]))
 10053  	for {
 10054  		d := v.AuxInt
 10055  		_ = v.Args[1]
 10056  		v_0 := v.Args[0]
 10057  		if v_0.Op != OpARM64MOVDconst {
 10058  			break
 10059  		}
 10060  		c := v_0.AuxInt
 10061  		x := v.Args[1]
 10062  		v.reset(OpARM64XORconst)
 10063  		v.AuxInt = c
 10064  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
 10065  		v0.AuxInt = d
 10066  		v0.AddArg(x)
 10067  		v.AddArg(v0)
 10068  		return true
 10069  	}
 10070  	// match: (XORshiftRA x (MOVDconst [c]) [d])
 10071  	// cond:
 10072  	// result: (XORconst x [int64(int64(c)>>uint64(d))])
 10073  	for {
 10074  		d := v.AuxInt
 10075  		_ = v.Args[1]
 10076  		x := v.Args[0]
 10077  		v_1 := v.Args[1]
 10078  		if v_1.Op != OpARM64MOVDconst {
 10079  			break
 10080  		}
 10081  		c := v_1.AuxInt
 10082  		v.reset(OpARM64XORconst)
 10083  		v.AuxInt = int64(int64(c) >> uint64(d))
 10084  		v.AddArg(x)
 10085  		return true
 10086  	}
 10087  	// match: (XORshiftRA x (SRAconst x [c]) [d])
 10088  	// cond: c==d
 10089  	// result: (MOVDconst [0])
 10090  	for {
 10091  		d := v.AuxInt
 10092  		_ = v.Args[1]
 10093  		x := v.Args[0]
 10094  		v_1 := v.Args[1]
 10095  		if v_1.Op != OpARM64SRAconst {
 10096  			break
 10097  		}
 10098  		c := v_1.AuxInt
 10099  		if x != v_1.Args[0] {
 10100  			break
 10101  		}
 10102  		if !(c == d) {
 10103  			break
 10104  		}
 10105  		v.reset(OpARM64MOVDconst)
 10106  		v.AuxInt = 0
 10107  		return true
 10108  	}
 10109  	return false
 10110  }
 10111  func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool {
 10112  	b := v.Block
 10113  	_ = b
 10114  	// match: (XORshiftRL (MOVDconst [c]) x [d])
 10115  	// cond:
 10116  	// result: (XORconst [c] (SRLconst <x.Type> x [d]))
 10117  	for {
 10118  		d := v.AuxInt
 10119  		_ = v.Args[1]
 10120  		v_0 := v.Args[0]
 10121  		if v_0.Op != OpARM64MOVDconst {
 10122  			break
 10123  		}
 10124  		c := v_0.AuxInt
 10125  		x := v.Args[1]
 10126  		v.reset(OpARM64XORconst)
 10127  		v.AuxInt = c
 10128  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
 10129  		v0.AuxInt = d
 10130  		v0.AddArg(x)
 10131  		v.AddArg(v0)
 10132  		return true
 10133  	}
 10134  	// match: (XORshiftRL x (MOVDconst [c]) [d])
 10135  	// cond:
 10136  	// result: (XORconst x [int64(uint64(c)>>uint64(d))])
 10137  	for {
 10138  		d := v.AuxInt
 10139  		_ = v.Args[1]
 10140  		x := v.Args[0]
 10141  		v_1 := v.Args[1]
 10142  		if v_1.Op != OpARM64MOVDconst {
 10143  			break
 10144  		}
 10145  		c := v_1.AuxInt
 10146  		v.reset(OpARM64XORconst)
 10147  		v.AuxInt = int64(uint64(c) >> uint64(d))
 10148  		v.AddArg(x)
 10149  		return true
 10150  	}
 10151  	// match: (XORshiftRL x (SRLconst x [c]) [d])
 10152  	// cond: c==d
 10153  	// result: (MOVDconst [0])
 10154  	for {
 10155  		d := v.AuxInt
 10156  		_ = v.Args[1]
 10157  		x := v.Args[0]
 10158  		v_1 := v.Args[1]
 10159  		if v_1.Op != OpARM64SRLconst {
 10160  			break
 10161  		}
 10162  		c := v_1.AuxInt
 10163  		if x != v_1.Args[0] {
 10164  			break
 10165  		}
 10166  		if !(c == d) {
 10167  			break
 10168  		}
 10169  		v.reset(OpARM64MOVDconst)
 10170  		v.AuxInt = 0
 10171  		return true
 10172  	}
 10173  	// match: (XORshiftRL [c] (SLLconst x [64-c]) x)
 10174  	// cond:
 10175  	// result: (RORconst [   c] x)
 10176  	for {
 10177  		c := v.AuxInt
 10178  		_ = v.Args[1]
 10179  		v_0 := v.Args[0]
 10180  		if v_0.Op != OpARM64SLLconst {
 10181  			break
 10182  		}
 10183  		if v_0.AuxInt != 64-c {
 10184  			break
 10185  		}
 10186  		x := v_0.Args[0]
 10187  		if x != v.Args[1] {
 10188  			break
 10189  		}
 10190  		v.reset(OpARM64RORconst)
 10191  		v.AuxInt = c
 10192  		v.AddArg(x)
 10193  		return true
 10194  	}
 10195  	// match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x))
 10196  	// cond: c < 32 && t.Size() == 4
 10197  	// result: (RORWconst [   c] x)
 10198  	for {
 10199  		t := v.Type
 10200  		c := v.AuxInt
 10201  		_ = v.Args[1]
 10202  		v_0 := v.Args[0]
 10203  		if v_0.Op != OpARM64SLLconst {
 10204  			break
 10205  		}
 10206  		if v_0.AuxInt != 32-c {
 10207  			break
 10208  		}
 10209  		x := v_0.Args[0]
 10210  		v_1 := v.Args[1]
 10211  		if v_1.Op != OpARM64MOVWUreg {
 10212  			break
 10213  		}
 10214  		if x != v_1.Args[0] {
 10215  			break
 10216  		}
 10217  		if !(c < 32 && t.Size() == 4) {
 10218  			break
 10219  		}
 10220  		v.reset(OpARM64RORWconst)
 10221  		v.AuxInt = c
 10222  		v.AddArg(x)
 10223  		return true
 10224  	}
 10225  	return false
 10226  }
 10227  func rewriteValueARM64_OpAdd16_0(v *Value) bool {
 10228  	// match: (Add16 x y)
 10229  	// cond:
 10230  	// result: (ADD x y)
 10231  	for {
 10232  		_ = v.Args[1]
 10233  		x := v.Args[0]
 10234  		y := v.Args[1]
 10235  		v.reset(OpARM64ADD)
 10236  		v.AddArg(x)
 10237  		v.AddArg(y)
 10238  		return true
 10239  	}
 10240  }
 10241  func rewriteValueARM64_OpAdd32_0(v *Value) bool {
 10242  	// match: (Add32 x y)
 10243  	// cond:
 10244  	// result: (ADD x y)
 10245  	for {
 10246  		_ = v.Args[1]
 10247  		x := v.Args[0]
 10248  		y := v.Args[1]
 10249  		v.reset(OpARM64ADD)
 10250  		v.AddArg(x)
 10251  		v.AddArg(y)
 10252  		return true
 10253  	}
 10254  }
 10255  func rewriteValueARM64_OpAdd32F_0(v *Value) bool {
 10256  	// match: (Add32F x y)
 10257  	// cond:
 10258  	// result: (FADDS x y)
 10259  	for {
 10260  		_ = v.Args[1]
 10261  		x := v.Args[0]
 10262  		y := v.Args[1]
 10263  		v.reset(OpARM64FADDS)
 10264  		v.AddArg(x)
 10265  		v.AddArg(y)
 10266  		return true
 10267  	}
 10268  }
 10269  func rewriteValueARM64_OpAdd64_0(v *Value) bool {
 10270  	// match: (Add64 x y)
 10271  	// cond:
 10272  	// result: (ADD x y)
 10273  	for {
 10274  		_ = v.Args[1]
 10275  		x := v.Args[0]
 10276  		y := v.Args[1]
 10277  		v.reset(OpARM64ADD)
 10278  		v.AddArg(x)
 10279  		v.AddArg(y)
 10280  		return true
 10281  	}
 10282  }
 10283  func rewriteValueARM64_OpAdd64F_0(v *Value) bool {
 10284  	// match: (Add64F x y)
 10285  	// cond:
 10286  	// result: (FADDD x y)
 10287  	for {
 10288  		_ = v.Args[1]
 10289  		x := v.Args[0]
 10290  		y := v.Args[1]
 10291  		v.reset(OpARM64FADDD)
 10292  		v.AddArg(x)
 10293  		v.AddArg(y)
 10294  		return true
 10295  	}
 10296  }
 10297  func rewriteValueARM64_OpAdd8_0(v *Value) bool {
 10298  	// match: (Add8 x y)
 10299  	// cond:
 10300  	// result: (ADD x y)
 10301  	for {
 10302  		_ = v.Args[1]
 10303  		x := v.Args[0]
 10304  		y := v.Args[1]
 10305  		v.reset(OpARM64ADD)
 10306  		v.AddArg(x)
 10307  		v.AddArg(y)
 10308  		return true
 10309  	}
 10310  }
 10311  func rewriteValueARM64_OpAddPtr_0(v *Value) bool {
 10312  	// match: (AddPtr x y)
 10313  	// cond:
 10314  	// result: (ADD x y)
 10315  	for {
 10316  		_ = v.Args[1]
 10317  		x := v.Args[0]
 10318  		y := v.Args[1]
 10319  		v.reset(OpARM64ADD)
 10320  		v.AddArg(x)
 10321  		v.AddArg(y)
 10322  		return true
 10323  	}
 10324  }
 10325  func rewriteValueARM64_OpAddr_0(v *Value) bool {
 10326  	// match: (Addr {sym} base)
 10327  	// cond:
 10328  	// result: (MOVDaddr {sym} base)
 10329  	for {
 10330  		sym := v.Aux
 10331  		base := v.Args[0]
 10332  		v.reset(OpARM64MOVDaddr)
 10333  		v.Aux = sym
 10334  		v.AddArg(base)
 10335  		return true
 10336  	}
 10337  }
 10338  func rewriteValueARM64_OpAnd16_0(v *Value) bool {
 10339  	// match: (And16 x y)
 10340  	// cond:
 10341  	// result: (AND x y)
 10342  	for {
 10343  		_ = v.Args[1]
 10344  		x := v.Args[0]
 10345  		y := v.Args[1]
 10346  		v.reset(OpARM64AND)
 10347  		v.AddArg(x)
 10348  		v.AddArg(y)
 10349  		return true
 10350  	}
 10351  }
 10352  func rewriteValueARM64_OpAnd32_0(v *Value) bool {
 10353  	// match: (And32 x y)
 10354  	// cond:
 10355  	// result: (AND x y)
 10356  	for {
 10357  		_ = v.Args[1]
 10358  		x := v.Args[0]
 10359  		y := v.Args[1]
 10360  		v.reset(OpARM64AND)
 10361  		v.AddArg(x)
 10362  		v.AddArg(y)
 10363  		return true
 10364  	}
 10365  }
 10366  func rewriteValueARM64_OpAnd64_0(v *Value) bool {
 10367  	// match: (And64 x y)
 10368  	// cond:
 10369  	// result: (AND x y)
 10370  	for {
 10371  		_ = v.Args[1]
 10372  		x := v.Args[0]
 10373  		y := v.Args[1]
 10374  		v.reset(OpARM64AND)
 10375  		v.AddArg(x)
 10376  		v.AddArg(y)
 10377  		return true
 10378  	}
 10379  }
 10380  func rewriteValueARM64_OpAnd8_0(v *Value) bool {
 10381  	// match: (And8 x y)
 10382  	// cond:
 10383  	// result: (AND x y)
 10384  	for {
 10385  		_ = v.Args[1]
 10386  		x := v.Args[0]
 10387  		y := v.Args[1]
 10388  		v.reset(OpARM64AND)
 10389  		v.AddArg(x)
 10390  		v.AddArg(y)
 10391  		return true
 10392  	}
 10393  }
 10394  func rewriteValueARM64_OpAndB_0(v *Value) bool {
 10395  	// match: (AndB x y)
 10396  	// cond:
 10397  	// result: (AND x y)
 10398  	for {
 10399  		_ = v.Args[1]
 10400  		x := v.Args[0]
 10401  		y := v.Args[1]
 10402  		v.reset(OpARM64AND)
 10403  		v.AddArg(x)
 10404  		v.AddArg(y)
 10405  		return true
 10406  	}
 10407  }
 10408  func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool {
 10409  	// match: (AtomicAdd32 ptr val mem)
 10410  	// cond:
 10411  	// result: (LoweredAtomicAdd32 ptr val mem)
 10412  	for {
 10413  		_ = v.Args[2]
 10414  		ptr := v.Args[0]
 10415  		val := v.Args[1]
 10416  		mem := v.Args[2]
 10417  		v.reset(OpARM64LoweredAtomicAdd32)
 10418  		v.AddArg(ptr)
 10419  		v.AddArg(val)
 10420  		v.AddArg(mem)
 10421  		return true
 10422  	}
 10423  }
 10424  func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool {
 10425  	// match: (AtomicAdd64 ptr val mem)
 10426  	// cond:
 10427  	// result: (LoweredAtomicAdd64 ptr val mem)
 10428  	for {
 10429  		_ = v.Args[2]
 10430  		ptr := v.Args[0]
 10431  		val := v.Args[1]
 10432  		mem := v.Args[2]
 10433  		v.reset(OpARM64LoweredAtomicAdd64)
 10434  		v.AddArg(ptr)
 10435  		v.AddArg(val)
 10436  		v.AddArg(mem)
 10437  		return true
 10438  	}
 10439  }
 10440  func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool {
 10441  	// match: (AtomicAnd8 ptr val mem)
 10442  	// cond:
 10443  	// result: (LoweredAtomicAnd8 ptr val mem)
 10444  	for {
 10445  		_ = v.Args[2]
 10446  		ptr := v.Args[0]
 10447  		val := v.Args[1]
 10448  		mem := v.Args[2]
 10449  		v.reset(OpARM64LoweredAtomicAnd8)
 10450  		v.AddArg(ptr)
 10451  		v.AddArg(val)
 10452  		v.AddArg(mem)
 10453  		return true
 10454  	}
 10455  }
 10456  func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool {
 10457  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
 10458  	// cond:
 10459  	// result: (LoweredAtomicCas32 ptr old new_ mem)
 10460  	for {
 10461  		_ = v.Args[3]
 10462  		ptr := v.Args[0]
 10463  		old := v.Args[1]
 10464  		new_ := v.Args[2]
 10465  		mem := v.Args[3]
 10466  		v.reset(OpARM64LoweredAtomicCas32)
 10467  		v.AddArg(ptr)
 10468  		v.AddArg(old)
 10469  		v.AddArg(new_)
 10470  		v.AddArg(mem)
 10471  		return true
 10472  	}
 10473  }
 10474  func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool {
 10475  	// match: (AtomicCompareAndSwap64 ptr old new_ mem)
 10476  	// cond:
 10477  	// result: (LoweredAtomicCas64 ptr old new_ mem)
 10478  	for {
 10479  		_ = v.Args[3]
 10480  		ptr := v.Args[0]
 10481  		old := v.Args[1]
 10482  		new_ := v.Args[2]
 10483  		mem := v.Args[3]
 10484  		v.reset(OpARM64LoweredAtomicCas64)
 10485  		v.AddArg(ptr)
 10486  		v.AddArg(old)
 10487  		v.AddArg(new_)
 10488  		v.AddArg(mem)
 10489  		return true
 10490  	}
 10491  }
 10492  func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool {
 10493  	// match: (AtomicExchange32 ptr val mem)
 10494  	// cond:
 10495  	// result: (LoweredAtomicExchange32 ptr val mem)
 10496  	for {
 10497  		_ = v.Args[2]
 10498  		ptr := v.Args[0]
 10499  		val := v.Args[1]
 10500  		mem := v.Args[2]
 10501  		v.reset(OpARM64LoweredAtomicExchange32)
 10502  		v.AddArg(ptr)
 10503  		v.AddArg(val)
 10504  		v.AddArg(mem)
 10505  		return true
 10506  	}
 10507  }
 10508  func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool {
 10509  	// match: (AtomicExchange64 ptr val mem)
 10510  	// cond:
 10511  	// result: (LoweredAtomicExchange64 ptr val mem)
 10512  	for {
 10513  		_ = v.Args[2]
 10514  		ptr := v.Args[0]
 10515  		val := v.Args[1]
 10516  		mem := v.Args[2]
 10517  		v.reset(OpARM64LoweredAtomicExchange64)
 10518  		v.AddArg(ptr)
 10519  		v.AddArg(val)
 10520  		v.AddArg(mem)
 10521  		return true
 10522  	}
 10523  }
 10524  func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool {
 10525  	// match: (AtomicLoad32 ptr mem)
 10526  	// cond:
 10527  	// result: (LDARW ptr mem)
 10528  	for {
 10529  		_ = v.Args[1]
 10530  		ptr := v.Args[0]
 10531  		mem := v.Args[1]
 10532  		v.reset(OpARM64LDARW)
 10533  		v.AddArg(ptr)
 10534  		v.AddArg(mem)
 10535  		return true
 10536  	}
 10537  }
 10538  func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool {
 10539  	// match: (AtomicLoad64 ptr mem)
 10540  	// cond:
 10541  	// result: (LDAR  ptr mem)
 10542  	for {
 10543  		_ = v.Args[1]
 10544  		ptr := v.Args[0]
 10545  		mem := v.Args[1]
 10546  		v.reset(OpARM64LDAR)
 10547  		v.AddArg(ptr)
 10548  		v.AddArg(mem)
 10549  		return true
 10550  	}
 10551  }
 10552  func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool {
 10553  	// match: (AtomicLoadPtr ptr mem)
 10554  	// cond:
 10555  	// result: (LDAR  ptr mem)
 10556  	for {
 10557  		_ = v.Args[1]
 10558  		ptr := v.Args[0]
 10559  		mem := v.Args[1]
 10560  		v.reset(OpARM64LDAR)
 10561  		v.AddArg(ptr)
 10562  		v.AddArg(mem)
 10563  		return true
 10564  	}
 10565  }
 10566  func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool {
 10567  	// match: (AtomicOr8 ptr val mem)
 10568  	// cond:
 10569  	// result: (LoweredAtomicOr8  ptr val mem)
 10570  	for {
 10571  		_ = v.Args[2]
 10572  		ptr := v.Args[0]
 10573  		val := v.Args[1]
 10574  		mem := v.Args[2]
 10575  		v.reset(OpARM64LoweredAtomicOr8)
 10576  		v.AddArg(ptr)
 10577  		v.AddArg(val)
 10578  		v.AddArg(mem)
 10579  		return true
 10580  	}
 10581  }
 10582  func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool {
 10583  	// match: (AtomicStore32 ptr val mem)
 10584  	// cond:
 10585  	// result: (STLRW ptr val mem)
 10586  	for {
 10587  		_ = v.Args[2]
 10588  		ptr := v.Args[0]
 10589  		val := v.Args[1]
 10590  		mem := v.Args[2]
 10591  		v.reset(OpARM64STLRW)
 10592  		v.AddArg(ptr)
 10593  		v.AddArg(val)
 10594  		v.AddArg(mem)
 10595  		return true
 10596  	}
 10597  }
 10598  func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool {
 10599  	// match: (AtomicStore64 ptr val mem)
 10600  	// cond:
 10601  	// result: (STLR  ptr val mem)
 10602  	for {
 10603  		_ = v.Args[2]
 10604  		ptr := v.Args[0]
 10605  		val := v.Args[1]
 10606  		mem := v.Args[2]
 10607  		v.reset(OpARM64STLR)
 10608  		v.AddArg(ptr)
 10609  		v.AddArg(val)
 10610  		v.AddArg(mem)
 10611  		return true
 10612  	}
 10613  }
 10614  func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool {
 10615  	// match: (AtomicStorePtrNoWB ptr val mem)
 10616  	// cond:
 10617  	// result: (STLR  ptr val mem)
 10618  	for {
 10619  		_ = v.Args[2]
 10620  		ptr := v.Args[0]
 10621  		val := v.Args[1]
 10622  		mem := v.Args[2]
 10623  		v.reset(OpARM64STLR)
 10624  		v.AddArg(ptr)
 10625  		v.AddArg(val)
 10626  		v.AddArg(mem)
 10627  		return true
 10628  	}
 10629  }
 10630  func rewriteValueARM64_OpAvg64u_0(v *Value) bool {
 10631  	b := v.Block
 10632  	_ = b
 10633  	// match: (Avg64u <t> x y)
 10634  	// cond:
 10635  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
 10636  	for {
 10637  		t := v.Type
 10638  		_ = v.Args[1]
 10639  		x := v.Args[0]
 10640  		y := v.Args[1]
 10641  		v.reset(OpARM64ADD)
 10642  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t)
 10643  		v0.AuxInt = 1
 10644  		v1 := b.NewValue0(v.Pos, OpARM64SUB, t)
 10645  		v1.AddArg(x)
 10646  		v1.AddArg(y)
 10647  		v0.AddArg(v1)
 10648  		v.AddArg(v0)
 10649  		v.AddArg(y)
 10650  		return true
 10651  	}
 10652  }
 10653  func rewriteValueARM64_OpBitLen64_0(v *Value) bool {
 10654  	b := v.Block
 10655  	_ = b
 10656  	typ := &b.Func.Config.Types
 10657  	_ = typ
 10658  	// match: (BitLen64 x)
 10659  	// cond:
 10660  	// result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x))
 10661  	for {
 10662  		x := v.Args[0]
 10663  		v.reset(OpARM64SUB)
 10664  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 10665  		v0.AuxInt = 64
 10666  		v.AddArg(v0)
 10667  		v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int)
 10668  		v1.AddArg(x)
 10669  		v.AddArg(v1)
 10670  		return true
 10671  	}
 10672  }
 10673  func rewriteValueARM64_OpBitRev16_0(v *Value) bool {
 10674  	b := v.Block
 10675  	_ = b
 10676  	typ := &b.Func.Config.Types
 10677  	_ = typ
 10678  	// match: (BitRev16 x)
 10679  	// cond:
 10680  	// result: (SRLconst [48] (RBIT <typ.UInt64> x))
 10681  	for {
 10682  		x := v.Args[0]
 10683  		v.reset(OpARM64SRLconst)
 10684  		v.AuxInt = 48
 10685  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
 10686  		v0.AddArg(x)
 10687  		v.AddArg(v0)
 10688  		return true
 10689  	}
 10690  }
 10691  func rewriteValueARM64_OpBitRev32_0(v *Value) bool {
 10692  	// match: (BitRev32 x)
 10693  	// cond:
 10694  	// result: (RBITW x)
 10695  	for {
 10696  		x := v.Args[0]
 10697  		v.reset(OpARM64RBITW)
 10698  		v.AddArg(x)
 10699  		return true
 10700  	}
 10701  }
 10702  func rewriteValueARM64_OpBitRev64_0(v *Value) bool {
 10703  	// match: (BitRev64 x)
 10704  	// cond:
 10705  	// result: (RBIT x)
 10706  	for {
 10707  		x := v.Args[0]
 10708  		v.reset(OpARM64RBIT)
 10709  		v.AddArg(x)
 10710  		return true
 10711  	}
 10712  }
 10713  func rewriteValueARM64_OpBitRev8_0(v *Value) bool {
 10714  	b := v.Block
 10715  	_ = b
 10716  	typ := &b.Func.Config.Types
 10717  	_ = typ
 10718  	// match: (BitRev8 x)
 10719  	// cond:
 10720  	// result: (SRLconst [56] (RBIT <typ.UInt64> x))
 10721  	for {
 10722  		x := v.Args[0]
 10723  		v.reset(OpARM64SRLconst)
 10724  		v.AuxInt = 56
 10725  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
 10726  		v0.AddArg(x)
 10727  		v.AddArg(v0)
 10728  		return true
 10729  	}
 10730  }
 10731  func rewriteValueARM64_OpBswap32_0(v *Value) bool {
 10732  	// match: (Bswap32 x)
 10733  	// cond:
 10734  	// result: (REVW x)
 10735  	for {
 10736  		x := v.Args[0]
 10737  		v.reset(OpARM64REVW)
 10738  		v.AddArg(x)
 10739  		return true
 10740  	}
 10741  }
 10742  func rewriteValueARM64_OpBswap64_0(v *Value) bool {
 10743  	// match: (Bswap64 x)
 10744  	// cond:
 10745  	// result: (REV x)
 10746  	for {
 10747  		x := v.Args[0]
 10748  		v.reset(OpARM64REV)
 10749  		v.AddArg(x)
 10750  		return true
 10751  	}
 10752  }
 10753  func rewriteValueARM64_OpClosureCall_0(v *Value) bool {
 10754  	// match: (ClosureCall [argwid] entry closure mem)
 10755  	// cond:
 10756  	// result: (CALLclosure [argwid] entry closure mem)
 10757  	for {
 10758  		argwid := v.AuxInt
 10759  		_ = v.Args[2]
 10760  		entry := v.Args[0]
 10761  		closure := v.Args[1]
 10762  		mem := v.Args[2]
 10763  		v.reset(OpARM64CALLclosure)
 10764  		v.AuxInt = argwid
 10765  		v.AddArg(entry)
 10766  		v.AddArg(closure)
 10767  		v.AddArg(mem)
 10768  		return true
 10769  	}
 10770  }
 10771  func rewriteValueARM64_OpCom16_0(v *Value) bool {
 10772  	// match: (Com16 x)
 10773  	// cond:
 10774  	// result: (MVN x)
 10775  	for {
 10776  		x := v.Args[0]
 10777  		v.reset(OpARM64MVN)
 10778  		v.AddArg(x)
 10779  		return true
 10780  	}
 10781  }
 10782  func rewriteValueARM64_OpCom32_0(v *Value) bool {
 10783  	// match: (Com32 x)
 10784  	// cond:
 10785  	// result: (MVN x)
 10786  	for {
 10787  		x := v.Args[0]
 10788  		v.reset(OpARM64MVN)
 10789  		v.AddArg(x)
 10790  		return true
 10791  	}
 10792  }
 10793  func rewriteValueARM64_OpCom64_0(v *Value) bool {
 10794  	// match: (Com64 x)
 10795  	// cond:
 10796  	// result: (MVN x)
 10797  	for {
 10798  		x := v.Args[0]
 10799  		v.reset(OpARM64MVN)
 10800  		v.AddArg(x)
 10801  		return true
 10802  	}
 10803  }
 10804  func rewriteValueARM64_OpCom8_0(v *Value) bool {
 10805  	// match: (Com8 x)
 10806  	// cond:
 10807  	// result: (MVN x)
 10808  	for {
 10809  		x := v.Args[0]
 10810  		v.reset(OpARM64MVN)
 10811  		v.AddArg(x)
 10812  		return true
 10813  	}
 10814  }
 10815  func rewriteValueARM64_OpConst16_0(v *Value) bool {
 10816  	// match: (Const16 [val])
 10817  	// cond:
 10818  	// result: (MOVDconst [val])
 10819  	for {
 10820  		val := v.AuxInt
 10821  		v.reset(OpARM64MOVDconst)
 10822  		v.AuxInt = val
 10823  		return true
 10824  	}
 10825  }
 10826  func rewriteValueARM64_OpConst32_0(v *Value) bool {
 10827  	// match: (Const32 [val])
 10828  	// cond:
 10829  	// result: (MOVDconst [val])
 10830  	for {
 10831  		val := v.AuxInt
 10832  		v.reset(OpARM64MOVDconst)
 10833  		v.AuxInt = val
 10834  		return true
 10835  	}
 10836  }
 10837  func rewriteValueARM64_OpConst32F_0(v *Value) bool {
 10838  	// match: (Const32F [val])
 10839  	// cond:
 10840  	// result: (FMOVSconst [val])
 10841  	for {
 10842  		val := v.AuxInt
 10843  		v.reset(OpARM64FMOVSconst)
 10844  		v.AuxInt = val
 10845  		return true
 10846  	}
 10847  }
 10848  func rewriteValueARM64_OpConst64_0(v *Value) bool {
 10849  	// match: (Const64 [val])
 10850  	// cond:
 10851  	// result: (MOVDconst [val])
 10852  	for {
 10853  		val := v.AuxInt
 10854  		v.reset(OpARM64MOVDconst)
 10855  		v.AuxInt = val
 10856  		return true
 10857  	}
 10858  }
 10859  func rewriteValueARM64_OpConst64F_0(v *Value) bool {
 10860  	// match: (Const64F [val])
 10861  	// cond:
 10862  	// result: (FMOVDconst [val])
 10863  	for {
 10864  		val := v.AuxInt
 10865  		v.reset(OpARM64FMOVDconst)
 10866  		v.AuxInt = val
 10867  		return true
 10868  	}
 10869  }
 10870  func rewriteValueARM64_OpConst8_0(v *Value) bool {
 10871  	// match: (Const8 [val])
 10872  	// cond:
 10873  	// result: (MOVDconst [val])
 10874  	for {
 10875  		val := v.AuxInt
 10876  		v.reset(OpARM64MOVDconst)
 10877  		v.AuxInt = val
 10878  		return true
 10879  	}
 10880  }
 10881  func rewriteValueARM64_OpConstBool_0(v *Value) bool {
 10882  	// match: (ConstBool [b])
 10883  	// cond:
 10884  	// result: (MOVDconst [b])
 10885  	for {
 10886  		b := v.AuxInt
 10887  		v.reset(OpARM64MOVDconst)
 10888  		v.AuxInt = b
 10889  		return true
 10890  	}
 10891  }
 10892  func rewriteValueARM64_OpConstNil_0(v *Value) bool {
 10893  	// match: (ConstNil)
 10894  	// cond:
 10895  	// result: (MOVDconst [0])
 10896  	for {
 10897  		v.reset(OpARM64MOVDconst)
 10898  		v.AuxInt = 0
 10899  		return true
 10900  	}
 10901  }
 10902  func rewriteValueARM64_OpConvert_0(v *Value) bool {
 10903  	// match: (Convert x mem)
 10904  	// cond:
 10905  	// result: (MOVDconvert x mem)
 10906  	for {
 10907  		_ = v.Args[1]
 10908  		x := v.Args[0]
 10909  		mem := v.Args[1]
 10910  		v.reset(OpARM64MOVDconvert)
 10911  		v.AddArg(x)
 10912  		v.AddArg(mem)
 10913  		return true
 10914  	}
 10915  }
 10916  func rewriteValueARM64_OpCtz32_0(v *Value) bool {
 10917  	b := v.Block
 10918  	_ = b
 10919  	// match: (Ctz32 <t> x)
 10920  	// cond:
 10921  	// result: (CLZW (RBITW <t> x))
 10922  	for {
 10923  		t := v.Type
 10924  		x := v.Args[0]
 10925  		v.reset(OpARM64CLZW)
 10926  		v0 := b.NewValue0(v.Pos, OpARM64RBITW, t)
 10927  		v0.AddArg(x)
 10928  		v.AddArg(v0)
 10929  		return true
 10930  	}
 10931  }
 10932  func rewriteValueARM64_OpCtz64_0(v *Value) bool {
 10933  	b := v.Block
 10934  	_ = b
 10935  	// match: (Ctz64 <t> x)
 10936  	// cond:
 10937  	// result: (CLZ (RBIT <t> x))
 10938  	for {
 10939  		t := v.Type
 10940  		x := v.Args[0]
 10941  		v.reset(OpARM64CLZ)
 10942  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, t)
 10943  		v0.AddArg(x)
 10944  		v.AddArg(v0)
 10945  		return true
 10946  	}
 10947  }
 10948  func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool {
 10949  	// match: (Cvt32Fto32 x)
 10950  	// cond:
 10951  	// result: (FCVTZSSW x)
 10952  	for {
 10953  		x := v.Args[0]
 10954  		v.reset(OpARM64FCVTZSSW)
 10955  		v.AddArg(x)
 10956  		return true
 10957  	}
 10958  }
 10959  func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool {
 10960  	// match: (Cvt32Fto32U x)
 10961  	// cond:
 10962  	// result: (FCVTZUSW x)
 10963  	for {
 10964  		x := v.Args[0]
 10965  		v.reset(OpARM64FCVTZUSW)
 10966  		v.AddArg(x)
 10967  		return true
 10968  	}
 10969  }
 10970  func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool {
 10971  	// match: (Cvt32Fto64 x)
 10972  	// cond:
 10973  	// result: (FCVTZSS x)
 10974  	for {
 10975  		x := v.Args[0]
 10976  		v.reset(OpARM64FCVTZSS)
 10977  		v.AddArg(x)
 10978  		return true
 10979  	}
 10980  }
 10981  func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool {
 10982  	// match: (Cvt32Fto64F x)
 10983  	// cond:
 10984  	// result: (FCVTSD x)
 10985  	for {
 10986  		x := v.Args[0]
 10987  		v.reset(OpARM64FCVTSD)
 10988  		v.AddArg(x)
 10989  		return true
 10990  	}
 10991  }
 10992  func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool {
 10993  	// match: (Cvt32Fto64U x)
 10994  	// cond:
 10995  	// result: (FCVTZUS x)
 10996  	for {
 10997  		x := v.Args[0]
 10998  		v.reset(OpARM64FCVTZUS)
 10999  		v.AddArg(x)
 11000  		return true
 11001  	}
 11002  }
 11003  func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool {
 11004  	// match: (Cvt32Uto32F x)
 11005  	// cond:
 11006  	// result: (UCVTFWS x)
 11007  	for {
 11008  		x := v.Args[0]
 11009  		v.reset(OpARM64UCVTFWS)
 11010  		v.AddArg(x)
 11011  		return true
 11012  	}
 11013  }
 11014  func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool {
 11015  	// match: (Cvt32Uto64F x)
 11016  	// cond:
 11017  	// result: (UCVTFWD x)
 11018  	for {
 11019  		x := v.Args[0]
 11020  		v.reset(OpARM64UCVTFWD)
 11021  		v.AddArg(x)
 11022  		return true
 11023  	}
 11024  }
 11025  func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool {
 11026  	// match: (Cvt32to32F x)
 11027  	// cond:
 11028  	// result: (SCVTFWS x)
 11029  	for {
 11030  		x := v.Args[0]
 11031  		v.reset(OpARM64SCVTFWS)
 11032  		v.AddArg(x)
 11033  		return true
 11034  	}
 11035  }
 11036  func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool {
 11037  	// match: (Cvt32to64F x)
 11038  	// cond:
 11039  	// result: (SCVTFWD x)
 11040  	for {
 11041  		x := v.Args[0]
 11042  		v.reset(OpARM64SCVTFWD)
 11043  		v.AddArg(x)
 11044  		return true
 11045  	}
 11046  }
 11047  func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool {
 11048  	// match: (Cvt64Fto32 x)
 11049  	// cond:
 11050  	// result: (FCVTZSDW x)
 11051  	for {
 11052  		x := v.Args[0]
 11053  		v.reset(OpARM64FCVTZSDW)
 11054  		v.AddArg(x)
 11055  		return true
 11056  	}
 11057  }
 11058  func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool {
 11059  	// match: (Cvt64Fto32F x)
 11060  	// cond:
 11061  	// result: (FCVTDS x)
 11062  	for {
 11063  		x := v.Args[0]
 11064  		v.reset(OpARM64FCVTDS)
 11065  		v.AddArg(x)
 11066  		return true
 11067  	}
 11068  }
 11069  func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool {
 11070  	// match: (Cvt64Fto32U x)
 11071  	// cond:
 11072  	// result: (FCVTZUDW x)
 11073  	for {
 11074  		x := v.Args[0]
 11075  		v.reset(OpARM64FCVTZUDW)
 11076  		v.AddArg(x)
 11077  		return true
 11078  	}
 11079  }
 11080  func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool {
 11081  	// match: (Cvt64Fto64 x)
 11082  	// cond:
 11083  	// result: (FCVTZSD x)
 11084  	for {
 11085  		x := v.Args[0]
 11086  		v.reset(OpARM64FCVTZSD)
 11087  		v.AddArg(x)
 11088  		return true
 11089  	}
 11090  }
 11091  func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool {
 11092  	// match: (Cvt64Fto64U x)
 11093  	// cond:
 11094  	// result: (FCVTZUD x)
 11095  	for {
 11096  		x := v.Args[0]
 11097  		v.reset(OpARM64FCVTZUD)
 11098  		v.AddArg(x)
 11099  		return true
 11100  	}
 11101  }
 11102  func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool {
 11103  	// match: (Cvt64Uto32F x)
 11104  	// cond:
 11105  	// result: (UCVTFS x)
 11106  	for {
 11107  		x := v.Args[0]
 11108  		v.reset(OpARM64UCVTFS)
 11109  		v.AddArg(x)
 11110  		return true
 11111  	}
 11112  }
 11113  func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool {
 11114  	// match: (Cvt64Uto64F x)
 11115  	// cond:
 11116  	// result: (UCVTFD x)
 11117  	for {
 11118  		x := v.Args[0]
 11119  		v.reset(OpARM64UCVTFD)
 11120  		v.AddArg(x)
 11121  		return true
 11122  	}
 11123  }
 11124  func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool {
 11125  	// match: (Cvt64to32F x)
 11126  	// cond:
 11127  	// result: (SCVTFS x)
 11128  	for {
 11129  		x := v.Args[0]
 11130  		v.reset(OpARM64SCVTFS)
 11131  		v.AddArg(x)
 11132  		return true
 11133  	}
 11134  }
 11135  func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool {
 11136  	// match: (Cvt64to64F x)
 11137  	// cond:
 11138  	// result: (SCVTFD x)
 11139  	for {
 11140  		x := v.Args[0]
 11141  		v.reset(OpARM64SCVTFD)
 11142  		v.AddArg(x)
 11143  		return true
 11144  	}
 11145  }
 11146  func rewriteValueARM64_OpDiv16_0(v *Value) bool {
 11147  	b := v.Block
 11148  	_ = b
 11149  	typ := &b.Func.Config.Types
 11150  	_ = typ
 11151  	// match: (Div16 x y)
 11152  	// cond:
 11153  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
 11154  	for {
 11155  		_ = v.Args[1]
 11156  		x := v.Args[0]
 11157  		y := v.Args[1]
 11158  		v.reset(OpARM64DIVW)
 11159  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11160  		v0.AddArg(x)
 11161  		v.AddArg(v0)
 11162  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11163  		v1.AddArg(y)
 11164  		v.AddArg(v1)
 11165  		return true
 11166  	}
 11167  }
 11168  func rewriteValueARM64_OpDiv16u_0(v *Value) bool {
 11169  	b := v.Block
 11170  	_ = b
 11171  	typ := &b.Func.Config.Types
 11172  	_ = typ
 11173  	// match: (Div16u x y)
 11174  	// cond:
 11175  	// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
 11176  	for {
 11177  		_ = v.Args[1]
 11178  		x := v.Args[0]
 11179  		y := v.Args[1]
 11180  		v.reset(OpARM64UDIVW)
 11181  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11182  		v0.AddArg(x)
 11183  		v.AddArg(v0)
 11184  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11185  		v1.AddArg(y)
 11186  		v.AddArg(v1)
 11187  		return true
 11188  	}
 11189  }
 11190  func rewriteValueARM64_OpDiv32_0(v *Value) bool {
 11191  	// match: (Div32 x y)
 11192  	// cond:
 11193  	// result: (DIVW x y)
 11194  	for {
 11195  		_ = v.Args[1]
 11196  		x := v.Args[0]
 11197  		y := v.Args[1]
 11198  		v.reset(OpARM64DIVW)
 11199  		v.AddArg(x)
 11200  		v.AddArg(y)
 11201  		return true
 11202  	}
 11203  }
 11204  func rewriteValueARM64_OpDiv32F_0(v *Value) bool {
 11205  	// match: (Div32F x y)
 11206  	// cond:
 11207  	// result: (FDIVS x y)
 11208  	for {
 11209  		_ = v.Args[1]
 11210  		x := v.Args[0]
 11211  		y := v.Args[1]
 11212  		v.reset(OpARM64FDIVS)
 11213  		v.AddArg(x)
 11214  		v.AddArg(y)
 11215  		return true
 11216  	}
 11217  }
 11218  func rewriteValueARM64_OpDiv32u_0(v *Value) bool {
 11219  	// match: (Div32u x y)
 11220  	// cond:
 11221  	// result: (UDIVW x y)
 11222  	for {
 11223  		_ = v.Args[1]
 11224  		x := v.Args[0]
 11225  		y := v.Args[1]
 11226  		v.reset(OpARM64UDIVW)
 11227  		v.AddArg(x)
 11228  		v.AddArg(y)
 11229  		return true
 11230  	}
 11231  }
 11232  func rewriteValueARM64_OpDiv64_0(v *Value) bool {
 11233  	// match: (Div64 x y)
 11234  	// cond:
 11235  	// result: (DIV x y)
 11236  	for {
 11237  		_ = v.Args[1]
 11238  		x := v.Args[0]
 11239  		y := v.Args[1]
 11240  		v.reset(OpARM64DIV)
 11241  		v.AddArg(x)
 11242  		v.AddArg(y)
 11243  		return true
 11244  	}
 11245  }
 11246  func rewriteValueARM64_OpDiv64F_0(v *Value) bool {
 11247  	// match: (Div64F x y)
 11248  	// cond:
 11249  	// result: (FDIVD x y)
 11250  	for {
 11251  		_ = v.Args[1]
 11252  		x := v.Args[0]
 11253  		y := v.Args[1]
 11254  		v.reset(OpARM64FDIVD)
 11255  		v.AddArg(x)
 11256  		v.AddArg(y)
 11257  		return true
 11258  	}
 11259  }
 11260  func rewriteValueARM64_OpDiv64u_0(v *Value) bool {
 11261  	// match: (Div64u x y)
 11262  	// cond:
 11263  	// result: (UDIV x y)
 11264  	for {
 11265  		_ = v.Args[1]
 11266  		x := v.Args[0]
 11267  		y := v.Args[1]
 11268  		v.reset(OpARM64UDIV)
 11269  		v.AddArg(x)
 11270  		v.AddArg(y)
 11271  		return true
 11272  	}
 11273  }
 11274  func rewriteValueARM64_OpDiv8_0(v *Value) bool {
 11275  	b := v.Block
 11276  	_ = b
 11277  	typ := &b.Func.Config.Types
 11278  	_ = typ
 11279  	// match: (Div8 x y)
 11280  	// cond:
 11281  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
 11282  	for {
 11283  		_ = v.Args[1]
 11284  		x := v.Args[0]
 11285  		y := v.Args[1]
 11286  		v.reset(OpARM64DIVW)
 11287  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11288  		v0.AddArg(x)
 11289  		v.AddArg(v0)
 11290  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11291  		v1.AddArg(y)
 11292  		v.AddArg(v1)
 11293  		return true
 11294  	}
 11295  }
 11296  func rewriteValueARM64_OpDiv8u_0(v *Value) bool {
 11297  	b := v.Block
 11298  	_ = b
 11299  	typ := &b.Func.Config.Types
 11300  	_ = typ
 11301  	// match: (Div8u x y)
 11302  	// cond:
 11303  	// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
 11304  	for {
 11305  		_ = v.Args[1]
 11306  		x := v.Args[0]
 11307  		y := v.Args[1]
 11308  		v.reset(OpARM64UDIVW)
 11309  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11310  		v0.AddArg(x)
 11311  		v.AddArg(v0)
 11312  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11313  		v1.AddArg(y)
 11314  		v.AddArg(v1)
 11315  		return true
 11316  	}
 11317  }
 11318  func rewriteValueARM64_OpEq16_0(v *Value) bool {
 11319  	b := v.Block
 11320  	_ = b
 11321  	typ := &b.Func.Config.Types
 11322  	_ = typ
 11323  	// match: (Eq16 x y)
 11324  	// cond:
 11325  	// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 11326  	for {
 11327  		_ = v.Args[1]
 11328  		x := v.Args[0]
 11329  		y := v.Args[1]
 11330  		v.reset(OpARM64Equal)
 11331  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11332  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11333  		v1.AddArg(x)
 11334  		v0.AddArg(v1)
 11335  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11336  		v2.AddArg(y)
 11337  		v0.AddArg(v2)
 11338  		v.AddArg(v0)
 11339  		return true
 11340  	}
 11341  }
 11342  func rewriteValueARM64_OpEq32_0(v *Value) bool {
 11343  	b := v.Block
 11344  	_ = b
 11345  	// match: (Eq32 x y)
 11346  	// cond:
 11347  	// result: (Equal (CMPW x y))
 11348  	for {
 11349  		_ = v.Args[1]
 11350  		x := v.Args[0]
 11351  		y := v.Args[1]
 11352  		v.reset(OpARM64Equal)
 11353  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11354  		v0.AddArg(x)
 11355  		v0.AddArg(y)
 11356  		v.AddArg(v0)
 11357  		return true
 11358  	}
 11359  }
 11360  func rewriteValueARM64_OpEq32F_0(v *Value) bool {
 11361  	b := v.Block
 11362  	_ = b
 11363  	// match: (Eq32F x y)
 11364  	// cond:
 11365  	// result: (Equal (FCMPS x y))
 11366  	for {
 11367  		_ = v.Args[1]
 11368  		x := v.Args[0]
 11369  		y := v.Args[1]
 11370  		v.reset(OpARM64Equal)
 11371  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 11372  		v0.AddArg(x)
 11373  		v0.AddArg(y)
 11374  		v.AddArg(v0)
 11375  		return true
 11376  	}
 11377  }
 11378  func rewriteValueARM64_OpEq64_0(v *Value) bool {
 11379  	b := v.Block
 11380  	_ = b
 11381  	// match: (Eq64 x y)
 11382  	// cond:
 11383  	// result: (Equal (CMP x y))
 11384  	for {
 11385  		_ = v.Args[1]
 11386  		x := v.Args[0]
 11387  		y := v.Args[1]
 11388  		v.reset(OpARM64Equal)
 11389  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11390  		v0.AddArg(x)
 11391  		v0.AddArg(y)
 11392  		v.AddArg(v0)
 11393  		return true
 11394  	}
 11395  }
 11396  func rewriteValueARM64_OpEq64F_0(v *Value) bool {
 11397  	b := v.Block
 11398  	_ = b
 11399  	// match: (Eq64F x y)
 11400  	// cond:
 11401  	// result: (Equal (FCMPD x y))
 11402  	for {
 11403  		_ = v.Args[1]
 11404  		x := v.Args[0]
 11405  		y := v.Args[1]
 11406  		v.reset(OpARM64Equal)
 11407  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 11408  		v0.AddArg(x)
 11409  		v0.AddArg(y)
 11410  		v.AddArg(v0)
 11411  		return true
 11412  	}
 11413  }
 11414  func rewriteValueARM64_OpEq8_0(v *Value) bool {
 11415  	b := v.Block
 11416  	_ = b
 11417  	typ := &b.Func.Config.Types
 11418  	_ = typ
 11419  	// match: (Eq8 x y)
 11420  	// cond:
 11421  	// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 11422  	for {
 11423  		_ = v.Args[1]
 11424  		x := v.Args[0]
 11425  		y := v.Args[1]
 11426  		v.reset(OpARM64Equal)
 11427  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11428  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11429  		v1.AddArg(x)
 11430  		v0.AddArg(v1)
 11431  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11432  		v2.AddArg(y)
 11433  		v0.AddArg(v2)
 11434  		v.AddArg(v0)
 11435  		return true
 11436  	}
 11437  }
 11438  func rewriteValueARM64_OpEqB_0(v *Value) bool {
 11439  	b := v.Block
 11440  	_ = b
 11441  	typ := &b.Func.Config.Types
 11442  	_ = typ
 11443  	// match: (EqB x y)
 11444  	// cond:
 11445  	// result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y))
 11446  	for {
 11447  		_ = v.Args[1]
 11448  		x := v.Args[0]
 11449  		y := v.Args[1]
 11450  		v.reset(OpARM64XOR)
 11451  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 11452  		v0.AuxInt = 1
 11453  		v.AddArg(v0)
 11454  		v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool)
 11455  		v1.AddArg(x)
 11456  		v1.AddArg(y)
 11457  		v.AddArg(v1)
 11458  		return true
 11459  	}
 11460  }
 11461  func rewriteValueARM64_OpEqPtr_0(v *Value) bool {
 11462  	b := v.Block
 11463  	_ = b
 11464  	// match: (EqPtr x y)
 11465  	// cond:
 11466  	// result: (Equal (CMP x y))
 11467  	for {
 11468  		_ = v.Args[1]
 11469  		x := v.Args[0]
 11470  		y := v.Args[1]
 11471  		v.reset(OpARM64Equal)
 11472  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11473  		v0.AddArg(x)
 11474  		v0.AddArg(y)
 11475  		v.AddArg(v0)
 11476  		return true
 11477  	}
 11478  }
 11479  func rewriteValueARM64_OpGeq16_0(v *Value) bool {
 11480  	b := v.Block
 11481  	_ = b
 11482  	typ := &b.Func.Config.Types
 11483  	_ = typ
 11484  	// match: (Geq16 x y)
 11485  	// cond:
 11486  	// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 11487  	for {
 11488  		_ = v.Args[1]
 11489  		x := v.Args[0]
 11490  		y := v.Args[1]
 11491  		v.reset(OpARM64GreaterEqual)
 11492  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11493  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11494  		v1.AddArg(x)
 11495  		v0.AddArg(v1)
 11496  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11497  		v2.AddArg(y)
 11498  		v0.AddArg(v2)
 11499  		v.AddArg(v0)
 11500  		return true
 11501  	}
 11502  }
 11503  func rewriteValueARM64_OpGeq16U_0(v *Value) bool {
 11504  	b := v.Block
 11505  	_ = b
 11506  	typ := &b.Func.Config.Types
 11507  	_ = typ
 11508  	// match: (Geq16U x y)
 11509  	// cond:
 11510  	// result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 11511  	for {
 11512  		_ = v.Args[1]
 11513  		x := v.Args[0]
 11514  		y := v.Args[1]
 11515  		v.reset(OpARM64GreaterEqualU)
 11516  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11517  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11518  		v1.AddArg(x)
 11519  		v0.AddArg(v1)
 11520  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11521  		v2.AddArg(y)
 11522  		v0.AddArg(v2)
 11523  		v.AddArg(v0)
 11524  		return true
 11525  	}
 11526  }
 11527  func rewriteValueARM64_OpGeq32_0(v *Value) bool {
 11528  	b := v.Block
 11529  	_ = b
 11530  	// match: (Geq32 x y)
 11531  	// cond:
 11532  	// result: (GreaterEqual (CMPW x y))
 11533  	for {
 11534  		_ = v.Args[1]
 11535  		x := v.Args[0]
 11536  		y := v.Args[1]
 11537  		v.reset(OpARM64GreaterEqual)
 11538  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11539  		v0.AddArg(x)
 11540  		v0.AddArg(y)
 11541  		v.AddArg(v0)
 11542  		return true
 11543  	}
 11544  }
 11545  func rewriteValueARM64_OpGeq32F_0(v *Value) bool {
 11546  	b := v.Block
 11547  	_ = b
 11548  	// match: (Geq32F x y)
 11549  	// cond:
 11550  	// result: (GreaterEqual (FCMPS x y))
 11551  	for {
 11552  		_ = v.Args[1]
 11553  		x := v.Args[0]
 11554  		y := v.Args[1]
 11555  		v.reset(OpARM64GreaterEqual)
 11556  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 11557  		v0.AddArg(x)
 11558  		v0.AddArg(y)
 11559  		v.AddArg(v0)
 11560  		return true
 11561  	}
 11562  }
 11563  func rewriteValueARM64_OpGeq32U_0(v *Value) bool {
 11564  	b := v.Block
 11565  	_ = b
 11566  	// match: (Geq32U x y)
 11567  	// cond:
 11568  	// result: (GreaterEqualU (CMPW x y))
 11569  	for {
 11570  		_ = v.Args[1]
 11571  		x := v.Args[0]
 11572  		y := v.Args[1]
 11573  		v.reset(OpARM64GreaterEqualU)
 11574  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11575  		v0.AddArg(x)
 11576  		v0.AddArg(y)
 11577  		v.AddArg(v0)
 11578  		return true
 11579  	}
 11580  }
 11581  func rewriteValueARM64_OpGeq64_0(v *Value) bool {
 11582  	b := v.Block
 11583  	_ = b
 11584  	// match: (Geq64 x y)
 11585  	// cond:
 11586  	// result: (GreaterEqual (CMP x y))
 11587  	for {
 11588  		_ = v.Args[1]
 11589  		x := v.Args[0]
 11590  		y := v.Args[1]
 11591  		v.reset(OpARM64GreaterEqual)
 11592  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11593  		v0.AddArg(x)
 11594  		v0.AddArg(y)
 11595  		v.AddArg(v0)
 11596  		return true
 11597  	}
 11598  }
 11599  func rewriteValueARM64_OpGeq64F_0(v *Value) bool {
 11600  	b := v.Block
 11601  	_ = b
 11602  	// match: (Geq64F x y)
 11603  	// cond:
 11604  	// result: (GreaterEqual (FCMPD x y))
 11605  	for {
 11606  		_ = v.Args[1]
 11607  		x := v.Args[0]
 11608  		y := v.Args[1]
 11609  		v.reset(OpARM64GreaterEqual)
 11610  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 11611  		v0.AddArg(x)
 11612  		v0.AddArg(y)
 11613  		v.AddArg(v0)
 11614  		return true
 11615  	}
 11616  }
 11617  func rewriteValueARM64_OpGeq64U_0(v *Value) bool {
 11618  	b := v.Block
 11619  	_ = b
 11620  	// match: (Geq64U x y)
 11621  	// cond:
 11622  	// result: (GreaterEqualU (CMP x y))
 11623  	for {
 11624  		_ = v.Args[1]
 11625  		x := v.Args[0]
 11626  		y := v.Args[1]
 11627  		v.reset(OpARM64GreaterEqualU)
 11628  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11629  		v0.AddArg(x)
 11630  		v0.AddArg(y)
 11631  		v.AddArg(v0)
 11632  		return true
 11633  	}
 11634  }
 11635  func rewriteValueARM64_OpGeq8_0(v *Value) bool {
 11636  	b := v.Block
 11637  	_ = b
 11638  	typ := &b.Func.Config.Types
 11639  	_ = typ
 11640  	// match: (Geq8 x y)
 11641  	// cond:
 11642  	// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 11643  	for {
 11644  		_ = v.Args[1]
 11645  		x := v.Args[0]
 11646  		y := v.Args[1]
 11647  		v.reset(OpARM64GreaterEqual)
 11648  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11649  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11650  		v1.AddArg(x)
 11651  		v0.AddArg(v1)
 11652  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11653  		v2.AddArg(y)
 11654  		v0.AddArg(v2)
 11655  		v.AddArg(v0)
 11656  		return true
 11657  	}
 11658  }
 11659  func rewriteValueARM64_OpGeq8U_0(v *Value) bool {
 11660  	b := v.Block
 11661  	_ = b
 11662  	typ := &b.Func.Config.Types
 11663  	_ = typ
 11664  	// match: (Geq8U x y)
 11665  	// cond:
 11666  	// result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 11667  	for {
 11668  		_ = v.Args[1]
 11669  		x := v.Args[0]
 11670  		y := v.Args[1]
 11671  		v.reset(OpARM64GreaterEqualU)
 11672  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11673  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11674  		v1.AddArg(x)
 11675  		v0.AddArg(v1)
 11676  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11677  		v2.AddArg(y)
 11678  		v0.AddArg(v2)
 11679  		v.AddArg(v0)
 11680  		return true
 11681  	}
 11682  }
 11683  func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool {
 11684  	// match: (GetClosurePtr)
 11685  	// cond:
 11686  	// result: (LoweredGetClosurePtr)
 11687  	for {
 11688  		v.reset(OpARM64LoweredGetClosurePtr)
 11689  		return true
 11690  	}
 11691  }
 11692  func rewriteValueARM64_OpGreater16_0(v *Value) bool {
 11693  	b := v.Block
 11694  	_ = b
 11695  	typ := &b.Func.Config.Types
 11696  	_ = typ
 11697  	// match: (Greater16 x y)
 11698  	// cond:
 11699  	// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 11700  	for {
 11701  		_ = v.Args[1]
 11702  		x := v.Args[0]
 11703  		y := v.Args[1]
 11704  		v.reset(OpARM64GreaterThan)
 11705  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11706  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11707  		v1.AddArg(x)
 11708  		v0.AddArg(v1)
 11709  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 11710  		v2.AddArg(y)
 11711  		v0.AddArg(v2)
 11712  		v.AddArg(v0)
 11713  		return true
 11714  	}
 11715  }
 11716  func rewriteValueARM64_OpGreater16U_0(v *Value) bool {
 11717  	b := v.Block
 11718  	_ = b
 11719  	typ := &b.Func.Config.Types
 11720  	_ = typ
 11721  	// match: (Greater16U x y)
 11722  	// cond:
 11723  	// result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 11724  	for {
 11725  		_ = v.Args[1]
 11726  		x := v.Args[0]
 11727  		y := v.Args[1]
 11728  		v.reset(OpARM64GreaterThanU)
 11729  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11730  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11731  		v1.AddArg(x)
 11732  		v0.AddArg(v1)
 11733  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 11734  		v2.AddArg(y)
 11735  		v0.AddArg(v2)
 11736  		v.AddArg(v0)
 11737  		return true
 11738  	}
 11739  }
 11740  func rewriteValueARM64_OpGreater32_0(v *Value) bool {
 11741  	b := v.Block
 11742  	_ = b
 11743  	// match: (Greater32 x y)
 11744  	// cond:
 11745  	// result: (GreaterThan (CMPW x y))
 11746  	for {
 11747  		_ = v.Args[1]
 11748  		x := v.Args[0]
 11749  		y := v.Args[1]
 11750  		v.reset(OpARM64GreaterThan)
 11751  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11752  		v0.AddArg(x)
 11753  		v0.AddArg(y)
 11754  		v.AddArg(v0)
 11755  		return true
 11756  	}
 11757  }
 11758  func rewriteValueARM64_OpGreater32F_0(v *Value) bool {
 11759  	b := v.Block
 11760  	_ = b
 11761  	// match: (Greater32F x y)
 11762  	// cond:
 11763  	// result: (GreaterThan (FCMPS x y))
 11764  	for {
 11765  		_ = v.Args[1]
 11766  		x := v.Args[0]
 11767  		y := v.Args[1]
 11768  		v.reset(OpARM64GreaterThan)
 11769  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 11770  		v0.AddArg(x)
 11771  		v0.AddArg(y)
 11772  		v.AddArg(v0)
 11773  		return true
 11774  	}
 11775  }
 11776  func rewriteValueARM64_OpGreater32U_0(v *Value) bool {
 11777  	b := v.Block
 11778  	_ = b
 11779  	// match: (Greater32U x y)
 11780  	// cond:
 11781  	// result: (GreaterThanU (CMPW x y))
 11782  	for {
 11783  		_ = v.Args[1]
 11784  		x := v.Args[0]
 11785  		y := v.Args[1]
 11786  		v.reset(OpARM64GreaterThanU)
 11787  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11788  		v0.AddArg(x)
 11789  		v0.AddArg(y)
 11790  		v.AddArg(v0)
 11791  		return true
 11792  	}
 11793  }
 11794  func rewriteValueARM64_OpGreater64_0(v *Value) bool {
 11795  	b := v.Block
 11796  	_ = b
 11797  	// match: (Greater64 x y)
 11798  	// cond:
 11799  	// result: (GreaterThan (CMP x y))
 11800  	for {
 11801  		_ = v.Args[1]
 11802  		x := v.Args[0]
 11803  		y := v.Args[1]
 11804  		v.reset(OpARM64GreaterThan)
 11805  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11806  		v0.AddArg(x)
 11807  		v0.AddArg(y)
 11808  		v.AddArg(v0)
 11809  		return true
 11810  	}
 11811  }
 11812  func rewriteValueARM64_OpGreater64F_0(v *Value) bool {
 11813  	b := v.Block
 11814  	_ = b
 11815  	// match: (Greater64F x y)
 11816  	// cond:
 11817  	// result: (GreaterThan (FCMPD x y))
 11818  	for {
 11819  		_ = v.Args[1]
 11820  		x := v.Args[0]
 11821  		y := v.Args[1]
 11822  		v.reset(OpARM64GreaterThan)
 11823  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 11824  		v0.AddArg(x)
 11825  		v0.AddArg(y)
 11826  		v.AddArg(v0)
 11827  		return true
 11828  	}
 11829  }
 11830  func rewriteValueARM64_OpGreater64U_0(v *Value) bool {
 11831  	b := v.Block
 11832  	_ = b
 11833  	// match: (Greater64U x y)
 11834  	// cond:
 11835  	// result: (GreaterThanU (CMP x y))
 11836  	for {
 11837  		_ = v.Args[1]
 11838  		x := v.Args[0]
 11839  		y := v.Args[1]
 11840  		v.reset(OpARM64GreaterThanU)
 11841  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11842  		v0.AddArg(x)
 11843  		v0.AddArg(y)
 11844  		v.AddArg(v0)
 11845  		return true
 11846  	}
 11847  }
 11848  func rewriteValueARM64_OpGreater8_0(v *Value) bool {
 11849  	b := v.Block
 11850  	_ = b
 11851  	typ := &b.Func.Config.Types
 11852  	_ = typ
 11853  	// match: (Greater8 x y)
 11854  	// cond:
 11855  	// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 11856  	for {
 11857  		_ = v.Args[1]
 11858  		x := v.Args[0]
 11859  		y := v.Args[1]
 11860  		v.reset(OpARM64GreaterThan)
 11861  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11862  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11863  		v1.AddArg(x)
 11864  		v0.AddArg(v1)
 11865  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 11866  		v2.AddArg(y)
 11867  		v0.AddArg(v2)
 11868  		v.AddArg(v0)
 11869  		return true
 11870  	}
 11871  }
 11872  func rewriteValueARM64_OpGreater8U_0(v *Value) bool {
 11873  	b := v.Block
 11874  	_ = b
 11875  	typ := &b.Func.Config.Types
 11876  	_ = typ
 11877  	// match: (Greater8U x y)
 11878  	// cond:
 11879  	// result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 11880  	for {
 11881  		_ = v.Args[1]
 11882  		x := v.Args[0]
 11883  		y := v.Args[1]
 11884  		v.reset(OpARM64GreaterThanU)
 11885  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 11886  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11887  		v1.AddArg(x)
 11888  		v0.AddArg(v1)
 11889  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 11890  		v2.AddArg(y)
 11891  		v0.AddArg(v2)
 11892  		v.AddArg(v0)
 11893  		return true
 11894  	}
 11895  }
 11896  func rewriteValueARM64_OpHmul32_0(v *Value) bool {
 11897  	b := v.Block
 11898  	_ = b
 11899  	typ := &b.Func.Config.Types
 11900  	_ = typ
 11901  	// match: (Hmul32 x y)
 11902  	// cond:
 11903  	// result: (SRAconst (MULL <typ.Int64> x y) [32])
 11904  	for {
 11905  		_ = v.Args[1]
 11906  		x := v.Args[0]
 11907  		y := v.Args[1]
 11908  		v.reset(OpARM64SRAconst)
 11909  		v.AuxInt = 32
 11910  		v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64)
 11911  		v0.AddArg(x)
 11912  		v0.AddArg(y)
 11913  		v.AddArg(v0)
 11914  		return true
 11915  	}
 11916  }
 11917  func rewriteValueARM64_OpHmul32u_0(v *Value) bool {
 11918  	b := v.Block
 11919  	_ = b
 11920  	typ := &b.Func.Config.Types
 11921  	_ = typ
 11922  	// match: (Hmul32u x y)
 11923  	// cond:
 11924  	// result: (SRAconst (UMULL <typ.UInt64> x y) [32])
 11925  	for {
 11926  		_ = v.Args[1]
 11927  		x := v.Args[0]
 11928  		y := v.Args[1]
 11929  		v.reset(OpARM64SRAconst)
 11930  		v.AuxInt = 32
 11931  		v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64)
 11932  		v0.AddArg(x)
 11933  		v0.AddArg(y)
 11934  		v.AddArg(v0)
 11935  		return true
 11936  	}
 11937  }
 11938  func rewriteValueARM64_OpHmul64_0(v *Value) bool {
 11939  	// match: (Hmul64 x y)
 11940  	// cond:
 11941  	// result: (MULH x y)
 11942  	for {
 11943  		_ = v.Args[1]
 11944  		x := v.Args[0]
 11945  		y := v.Args[1]
 11946  		v.reset(OpARM64MULH)
 11947  		v.AddArg(x)
 11948  		v.AddArg(y)
 11949  		return true
 11950  	}
 11951  }
 11952  func rewriteValueARM64_OpHmul64u_0(v *Value) bool {
 11953  	// match: (Hmul64u x y)
 11954  	// cond:
 11955  	// result: (UMULH x y)
 11956  	for {
 11957  		_ = v.Args[1]
 11958  		x := v.Args[0]
 11959  		y := v.Args[1]
 11960  		v.reset(OpARM64UMULH)
 11961  		v.AddArg(x)
 11962  		v.AddArg(y)
 11963  		return true
 11964  	}
 11965  }
 11966  func rewriteValueARM64_OpInterCall_0(v *Value) bool {
 11967  	// match: (InterCall [argwid] entry mem)
 11968  	// cond:
 11969  	// result: (CALLinter [argwid] entry mem)
 11970  	for {
 11971  		argwid := v.AuxInt
 11972  		_ = v.Args[1]
 11973  		entry := v.Args[0]
 11974  		mem := v.Args[1]
 11975  		v.reset(OpARM64CALLinter)
 11976  		v.AuxInt = argwid
 11977  		v.AddArg(entry)
 11978  		v.AddArg(mem)
 11979  		return true
 11980  	}
 11981  }
 11982  func rewriteValueARM64_OpIsInBounds_0(v *Value) bool {
 11983  	b := v.Block
 11984  	_ = b
 11985  	// match: (IsInBounds idx len)
 11986  	// cond:
 11987  	// result: (LessThanU (CMP idx len))
 11988  	for {
 11989  		_ = v.Args[1]
 11990  		idx := v.Args[0]
 11991  		len := v.Args[1]
 11992  		v.reset(OpARM64LessThanU)
 11993  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 11994  		v0.AddArg(idx)
 11995  		v0.AddArg(len)
 11996  		v.AddArg(v0)
 11997  		return true
 11998  	}
 11999  }
 12000  func rewriteValueARM64_OpIsNonNil_0(v *Value) bool {
 12001  	b := v.Block
 12002  	_ = b
 12003  	// match: (IsNonNil ptr)
 12004  	// cond:
 12005  	// result: (NotEqual (CMPconst [0] ptr))
 12006  	for {
 12007  		ptr := v.Args[0]
 12008  		v.reset(OpARM64NotEqual)
 12009  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12010  		v0.AuxInt = 0
 12011  		v0.AddArg(ptr)
 12012  		v.AddArg(v0)
 12013  		return true
 12014  	}
 12015  }
 12016  func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool {
 12017  	b := v.Block
 12018  	_ = b
 12019  	// match: (IsSliceInBounds idx len)
 12020  	// cond:
 12021  	// result: (LessEqualU (CMP idx len))
 12022  	for {
 12023  		_ = v.Args[1]
 12024  		idx := v.Args[0]
 12025  		len := v.Args[1]
 12026  		v.reset(OpARM64LessEqualU)
 12027  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 12028  		v0.AddArg(idx)
 12029  		v0.AddArg(len)
 12030  		v.AddArg(v0)
 12031  		return true
 12032  	}
 12033  }
 12034  func rewriteValueARM64_OpLeq16_0(v *Value) bool {
 12035  	b := v.Block
 12036  	_ = b
 12037  	typ := &b.Func.Config.Types
 12038  	_ = typ
 12039  	// match: (Leq16 x y)
 12040  	// cond:
 12041  	// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 12042  	for {
 12043  		_ = v.Args[1]
 12044  		x := v.Args[0]
 12045  		y := v.Args[1]
 12046  		v.reset(OpARM64LessEqual)
 12047  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12048  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 12049  		v1.AddArg(x)
 12050  		v0.AddArg(v1)
 12051  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 12052  		v2.AddArg(y)
 12053  		v0.AddArg(v2)
 12054  		v.AddArg(v0)
 12055  		return true
 12056  	}
 12057  }
 12058  func rewriteValueARM64_OpLeq16U_0(v *Value) bool {
 12059  	b := v.Block
 12060  	_ = b
 12061  	typ := &b.Func.Config.Types
 12062  	_ = typ
 12063  	// match: (Leq16U x y)
 12064  	// cond:
 12065  	// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 12066  	for {
 12067  		_ = v.Args[1]
 12068  		x := v.Args[0]
 12069  		y := v.Args[1]
 12070  		v.reset(OpARM64LessEqualU)
 12071  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12072  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 12073  		v1.AddArg(x)
 12074  		v0.AddArg(v1)
 12075  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 12076  		v2.AddArg(y)
 12077  		v0.AddArg(v2)
 12078  		v.AddArg(v0)
 12079  		return true
 12080  	}
 12081  }
 12082  func rewriteValueARM64_OpLeq32_0(v *Value) bool {
 12083  	b := v.Block
 12084  	_ = b
 12085  	// match: (Leq32 x y)
 12086  	// cond:
 12087  	// result: (LessEqual (CMPW x y))
 12088  	for {
 12089  		_ = v.Args[1]
 12090  		x := v.Args[0]
 12091  		y := v.Args[1]
 12092  		v.reset(OpARM64LessEqual)
 12093  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12094  		v0.AddArg(x)
 12095  		v0.AddArg(y)
 12096  		v.AddArg(v0)
 12097  		return true
 12098  	}
 12099  }
 12100  func rewriteValueARM64_OpLeq32F_0(v *Value) bool {
 12101  	b := v.Block
 12102  	_ = b
 12103  	// match: (Leq32F x y)
 12104  	// cond:
 12105  	// result: (GreaterEqual (FCMPS y x))
 12106  	for {
 12107  		_ = v.Args[1]
 12108  		x := v.Args[0]
 12109  		y := v.Args[1]
 12110  		v.reset(OpARM64GreaterEqual)
 12111  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 12112  		v0.AddArg(y)
 12113  		v0.AddArg(x)
 12114  		v.AddArg(v0)
 12115  		return true
 12116  	}
 12117  }
 12118  func rewriteValueARM64_OpLeq32U_0(v *Value) bool {
 12119  	b := v.Block
 12120  	_ = b
 12121  	// match: (Leq32U x y)
 12122  	// cond:
 12123  	// result: (LessEqualU (CMPW x y))
 12124  	for {
 12125  		_ = v.Args[1]
 12126  		x := v.Args[0]
 12127  		y := v.Args[1]
 12128  		v.reset(OpARM64LessEqualU)
 12129  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12130  		v0.AddArg(x)
 12131  		v0.AddArg(y)
 12132  		v.AddArg(v0)
 12133  		return true
 12134  	}
 12135  }
 12136  func rewriteValueARM64_OpLeq64_0(v *Value) bool {
 12137  	b := v.Block
 12138  	_ = b
 12139  	// match: (Leq64 x y)
 12140  	// cond:
 12141  	// result: (LessEqual (CMP x y))
 12142  	for {
 12143  		_ = v.Args[1]
 12144  		x := v.Args[0]
 12145  		y := v.Args[1]
 12146  		v.reset(OpARM64LessEqual)
 12147  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 12148  		v0.AddArg(x)
 12149  		v0.AddArg(y)
 12150  		v.AddArg(v0)
 12151  		return true
 12152  	}
 12153  }
 12154  func rewriteValueARM64_OpLeq64F_0(v *Value) bool {
 12155  	b := v.Block
 12156  	_ = b
 12157  	// match: (Leq64F x y)
 12158  	// cond:
 12159  	// result: (GreaterEqual (FCMPD y x))
 12160  	for {
 12161  		_ = v.Args[1]
 12162  		x := v.Args[0]
 12163  		y := v.Args[1]
 12164  		v.reset(OpARM64GreaterEqual)
 12165  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 12166  		v0.AddArg(y)
 12167  		v0.AddArg(x)
 12168  		v.AddArg(v0)
 12169  		return true
 12170  	}
 12171  }
 12172  func rewriteValueARM64_OpLeq64U_0(v *Value) bool {
 12173  	b := v.Block
 12174  	_ = b
 12175  	// match: (Leq64U x y)
 12176  	// cond:
 12177  	// result: (LessEqualU (CMP x y))
 12178  	for {
 12179  		_ = v.Args[1]
 12180  		x := v.Args[0]
 12181  		y := v.Args[1]
 12182  		v.reset(OpARM64LessEqualU)
 12183  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 12184  		v0.AddArg(x)
 12185  		v0.AddArg(y)
 12186  		v.AddArg(v0)
 12187  		return true
 12188  	}
 12189  }
 12190  func rewriteValueARM64_OpLeq8_0(v *Value) bool {
 12191  	b := v.Block
 12192  	_ = b
 12193  	typ := &b.Func.Config.Types
 12194  	_ = typ
 12195  	// match: (Leq8 x y)
 12196  	// cond:
 12197  	// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 12198  	for {
 12199  		_ = v.Args[1]
 12200  		x := v.Args[0]
 12201  		y := v.Args[1]
 12202  		v.reset(OpARM64LessEqual)
 12203  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12204  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 12205  		v1.AddArg(x)
 12206  		v0.AddArg(v1)
 12207  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 12208  		v2.AddArg(y)
 12209  		v0.AddArg(v2)
 12210  		v.AddArg(v0)
 12211  		return true
 12212  	}
 12213  }
 12214  func rewriteValueARM64_OpLeq8U_0(v *Value) bool {
 12215  	b := v.Block
 12216  	_ = b
 12217  	typ := &b.Func.Config.Types
 12218  	_ = typ
 12219  	// match: (Leq8U x y)
 12220  	// cond:
 12221  	// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 12222  	for {
 12223  		_ = v.Args[1]
 12224  		x := v.Args[0]
 12225  		y := v.Args[1]
 12226  		v.reset(OpARM64LessEqualU)
 12227  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12228  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 12229  		v1.AddArg(x)
 12230  		v0.AddArg(v1)
 12231  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 12232  		v2.AddArg(y)
 12233  		v0.AddArg(v2)
 12234  		v.AddArg(v0)
 12235  		return true
 12236  	}
 12237  }
 12238  func rewriteValueARM64_OpLess16_0(v *Value) bool {
 12239  	b := v.Block
 12240  	_ = b
 12241  	typ := &b.Func.Config.Types
 12242  	_ = typ
 12243  	// match: (Less16 x y)
 12244  	// cond:
 12245  	// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 12246  	for {
 12247  		_ = v.Args[1]
 12248  		x := v.Args[0]
 12249  		y := v.Args[1]
 12250  		v.reset(OpARM64LessThan)
 12251  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12252  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 12253  		v1.AddArg(x)
 12254  		v0.AddArg(v1)
 12255  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 12256  		v2.AddArg(y)
 12257  		v0.AddArg(v2)
 12258  		v.AddArg(v0)
 12259  		return true
 12260  	}
 12261  }
 12262  func rewriteValueARM64_OpLess16U_0(v *Value) bool {
 12263  	b := v.Block
 12264  	_ = b
 12265  	typ := &b.Func.Config.Types
 12266  	_ = typ
 12267  	// match: (Less16U x y)
 12268  	// cond:
 12269  	// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 12270  	for {
 12271  		_ = v.Args[1]
 12272  		x := v.Args[0]
 12273  		y := v.Args[1]
 12274  		v.reset(OpARM64LessThanU)
 12275  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12276  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 12277  		v1.AddArg(x)
 12278  		v0.AddArg(v1)
 12279  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 12280  		v2.AddArg(y)
 12281  		v0.AddArg(v2)
 12282  		v.AddArg(v0)
 12283  		return true
 12284  	}
 12285  }
 12286  func rewriteValueARM64_OpLess32_0(v *Value) bool {
 12287  	b := v.Block
 12288  	_ = b
 12289  	// match: (Less32 x y)
 12290  	// cond:
 12291  	// result: (LessThan (CMPW x y))
 12292  	for {
 12293  		_ = v.Args[1]
 12294  		x := v.Args[0]
 12295  		y := v.Args[1]
 12296  		v.reset(OpARM64LessThan)
 12297  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12298  		v0.AddArg(x)
 12299  		v0.AddArg(y)
 12300  		v.AddArg(v0)
 12301  		return true
 12302  	}
 12303  }
 12304  func rewriteValueARM64_OpLess32F_0(v *Value) bool {
 12305  	b := v.Block
 12306  	_ = b
 12307  	// match: (Less32F x y)
 12308  	// cond:
 12309  	// result: (GreaterThan (FCMPS y x))
 12310  	for {
 12311  		_ = v.Args[1]
 12312  		x := v.Args[0]
 12313  		y := v.Args[1]
 12314  		v.reset(OpARM64GreaterThan)
 12315  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 12316  		v0.AddArg(y)
 12317  		v0.AddArg(x)
 12318  		v.AddArg(v0)
 12319  		return true
 12320  	}
 12321  }
 12322  func rewriteValueARM64_OpLess32U_0(v *Value) bool {
 12323  	b := v.Block
 12324  	_ = b
 12325  	// match: (Less32U x y)
 12326  	// cond:
 12327  	// result: (LessThanU (CMPW x y))
 12328  	for {
 12329  		_ = v.Args[1]
 12330  		x := v.Args[0]
 12331  		y := v.Args[1]
 12332  		v.reset(OpARM64LessThanU)
 12333  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12334  		v0.AddArg(x)
 12335  		v0.AddArg(y)
 12336  		v.AddArg(v0)
 12337  		return true
 12338  	}
 12339  }
 12340  func rewriteValueARM64_OpLess64_0(v *Value) bool {
 12341  	b := v.Block
 12342  	_ = b
 12343  	// match: (Less64 x y)
 12344  	// cond:
 12345  	// result: (LessThan (CMP x y))
 12346  	for {
 12347  		_ = v.Args[1]
 12348  		x := v.Args[0]
 12349  		y := v.Args[1]
 12350  		v.reset(OpARM64LessThan)
 12351  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 12352  		v0.AddArg(x)
 12353  		v0.AddArg(y)
 12354  		v.AddArg(v0)
 12355  		return true
 12356  	}
 12357  }
 12358  func rewriteValueARM64_OpLess64F_0(v *Value) bool {
 12359  	b := v.Block
 12360  	_ = b
 12361  	// match: (Less64F x y)
 12362  	// cond:
 12363  	// result: (GreaterThan (FCMPD y x))
 12364  	for {
 12365  		_ = v.Args[1]
 12366  		x := v.Args[0]
 12367  		y := v.Args[1]
 12368  		v.reset(OpARM64GreaterThan)
 12369  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 12370  		v0.AddArg(y)
 12371  		v0.AddArg(x)
 12372  		v.AddArg(v0)
 12373  		return true
 12374  	}
 12375  }
 12376  func rewriteValueARM64_OpLess64U_0(v *Value) bool {
 12377  	b := v.Block
 12378  	_ = b
 12379  	// match: (Less64U x y)
 12380  	// cond:
 12381  	// result: (LessThanU (CMP x y))
 12382  	for {
 12383  		_ = v.Args[1]
 12384  		x := v.Args[0]
 12385  		y := v.Args[1]
 12386  		v.reset(OpARM64LessThanU)
 12387  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 12388  		v0.AddArg(x)
 12389  		v0.AddArg(y)
 12390  		v.AddArg(v0)
 12391  		return true
 12392  	}
 12393  }
 12394  func rewriteValueARM64_OpLess8_0(v *Value) bool {
 12395  	b := v.Block
 12396  	_ = b
 12397  	typ := &b.Func.Config.Types
 12398  	_ = typ
 12399  	// match: (Less8 x y)
 12400  	// cond:
 12401  	// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 12402  	for {
 12403  		_ = v.Args[1]
 12404  		x := v.Args[0]
 12405  		y := v.Args[1]
 12406  		v.reset(OpARM64LessThan)
 12407  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12408  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 12409  		v1.AddArg(x)
 12410  		v0.AddArg(v1)
 12411  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 12412  		v2.AddArg(y)
 12413  		v0.AddArg(v2)
 12414  		v.AddArg(v0)
 12415  		return true
 12416  	}
 12417  }
 12418  func rewriteValueARM64_OpLess8U_0(v *Value) bool {
 12419  	b := v.Block
 12420  	_ = b
 12421  	typ := &b.Func.Config.Types
 12422  	_ = typ
 12423  	// match: (Less8U x y)
 12424  	// cond:
 12425  	// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 12426  	for {
 12427  		_ = v.Args[1]
 12428  		x := v.Args[0]
 12429  		y := v.Args[1]
 12430  		v.reset(OpARM64LessThanU)
 12431  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 12432  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 12433  		v1.AddArg(x)
 12434  		v0.AddArg(v1)
 12435  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 12436  		v2.AddArg(y)
 12437  		v0.AddArg(v2)
 12438  		v.AddArg(v0)
 12439  		return true
 12440  	}
 12441  }
 12442  func rewriteValueARM64_OpLoad_0(v *Value) bool {
 12443  	// match: (Load <t> ptr mem)
 12444  	// cond: t.IsBoolean()
 12445  	// result: (MOVBUload ptr mem)
 12446  	for {
 12447  		t := v.Type
 12448  		_ = v.Args[1]
 12449  		ptr := v.Args[0]
 12450  		mem := v.Args[1]
 12451  		if !(t.IsBoolean()) {
 12452  			break
 12453  		}
 12454  		v.reset(OpARM64MOVBUload)
 12455  		v.AddArg(ptr)
 12456  		v.AddArg(mem)
 12457  		return true
 12458  	}
 12459  	// match: (Load <t> ptr mem)
 12460  	// cond: (is8BitInt(t) && isSigned(t))
 12461  	// result: (MOVBload ptr mem)
 12462  	for {
 12463  		t := v.Type
 12464  		_ = v.Args[1]
 12465  		ptr := v.Args[0]
 12466  		mem := v.Args[1]
 12467  		if !(is8BitInt(t) && isSigned(t)) {
 12468  			break
 12469  		}
 12470  		v.reset(OpARM64MOVBload)
 12471  		v.AddArg(ptr)
 12472  		v.AddArg(mem)
 12473  		return true
 12474  	}
 12475  	// match: (Load <t> ptr mem)
 12476  	// cond: (is8BitInt(t) && !isSigned(t))
 12477  	// result: (MOVBUload ptr mem)
 12478  	for {
 12479  		t := v.Type
 12480  		_ = v.Args[1]
 12481  		ptr := v.Args[0]
 12482  		mem := v.Args[1]
 12483  		if !(is8BitInt(t) && !isSigned(t)) {
 12484  			break
 12485  		}
 12486  		v.reset(OpARM64MOVBUload)
 12487  		v.AddArg(ptr)
 12488  		v.AddArg(mem)
 12489  		return true
 12490  	}
 12491  	// match: (Load <t> ptr mem)
 12492  	// cond: (is16BitInt(t) && isSigned(t))
 12493  	// result: (MOVHload ptr mem)
 12494  	for {
 12495  		t := v.Type
 12496  		_ = v.Args[1]
 12497  		ptr := v.Args[0]
 12498  		mem := v.Args[1]
 12499  		if !(is16BitInt(t) && isSigned(t)) {
 12500  			break
 12501  		}
 12502  		v.reset(OpARM64MOVHload)
 12503  		v.AddArg(ptr)
 12504  		v.AddArg(mem)
 12505  		return true
 12506  	}
 12507  	// match: (Load <t> ptr mem)
 12508  	// cond: (is16BitInt(t) && !isSigned(t))
 12509  	// result: (MOVHUload ptr mem)
 12510  	for {
 12511  		t := v.Type
 12512  		_ = v.Args[1]
 12513  		ptr := v.Args[0]
 12514  		mem := v.Args[1]
 12515  		if !(is16BitInt(t) && !isSigned(t)) {
 12516  			break
 12517  		}
 12518  		v.reset(OpARM64MOVHUload)
 12519  		v.AddArg(ptr)
 12520  		v.AddArg(mem)
 12521  		return true
 12522  	}
 12523  	// match: (Load <t> ptr mem)
 12524  	// cond: (is32BitInt(t) && isSigned(t))
 12525  	// result: (MOVWload ptr mem)
 12526  	for {
 12527  		t := v.Type
 12528  		_ = v.Args[1]
 12529  		ptr := v.Args[0]
 12530  		mem := v.Args[1]
 12531  		if !(is32BitInt(t) && isSigned(t)) {
 12532  			break
 12533  		}
 12534  		v.reset(OpARM64MOVWload)
 12535  		v.AddArg(ptr)
 12536  		v.AddArg(mem)
 12537  		return true
 12538  	}
 12539  	// match: (Load <t> ptr mem)
 12540  	// cond: (is32BitInt(t) && !isSigned(t))
 12541  	// result: (MOVWUload ptr mem)
 12542  	for {
 12543  		t := v.Type
 12544  		_ = v.Args[1]
 12545  		ptr := v.Args[0]
 12546  		mem := v.Args[1]
 12547  		if !(is32BitInt(t) && !isSigned(t)) {
 12548  			break
 12549  		}
 12550  		v.reset(OpARM64MOVWUload)
 12551  		v.AddArg(ptr)
 12552  		v.AddArg(mem)
 12553  		return true
 12554  	}
 12555  	// match: (Load <t> ptr mem)
 12556  	// cond: (is64BitInt(t) || isPtr(t))
 12557  	// result: (MOVDload ptr mem)
 12558  	for {
 12559  		t := v.Type
 12560  		_ = v.Args[1]
 12561  		ptr := v.Args[0]
 12562  		mem := v.Args[1]
 12563  		if !(is64BitInt(t) || isPtr(t)) {
 12564  			break
 12565  		}
 12566  		v.reset(OpARM64MOVDload)
 12567  		v.AddArg(ptr)
 12568  		v.AddArg(mem)
 12569  		return true
 12570  	}
 12571  	// match: (Load <t> ptr mem)
 12572  	// cond: is32BitFloat(t)
 12573  	// result: (FMOVSload ptr mem)
 12574  	for {
 12575  		t := v.Type
 12576  		_ = v.Args[1]
 12577  		ptr := v.Args[0]
 12578  		mem := v.Args[1]
 12579  		if !(is32BitFloat(t)) {
 12580  			break
 12581  		}
 12582  		v.reset(OpARM64FMOVSload)
 12583  		v.AddArg(ptr)
 12584  		v.AddArg(mem)
 12585  		return true
 12586  	}
 12587  	// match: (Load <t> ptr mem)
 12588  	// cond: is64BitFloat(t)
 12589  	// result: (FMOVDload ptr mem)
 12590  	for {
 12591  		t := v.Type
 12592  		_ = v.Args[1]
 12593  		ptr := v.Args[0]
 12594  		mem := v.Args[1]
 12595  		if !(is64BitFloat(t)) {
 12596  			break
 12597  		}
 12598  		v.reset(OpARM64FMOVDload)
 12599  		v.AddArg(ptr)
 12600  		v.AddArg(mem)
 12601  		return true
 12602  	}
 12603  	return false
 12604  }
 12605  func rewriteValueARM64_OpLsh16x16_0(v *Value) bool {
 12606  	b := v.Block
 12607  	_ = b
 12608  	typ := &b.Func.Config.Types
 12609  	_ = typ
 12610  	// match: (Lsh16x16 <t> x y)
 12611  	// cond:
 12612  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 12613  	for {
 12614  		t := v.Type
 12615  		_ = v.Args[1]
 12616  		x := v.Args[0]
 12617  		y := v.Args[1]
 12618  		v.reset(OpARM64CSELULT)
 12619  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12620  		v0.AddArg(x)
 12621  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12622  		v1.AddArg(y)
 12623  		v0.AddArg(v1)
 12624  		v.AddArg(v0)
 12625  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12626  		v2.AuxInt = 0
 12627  		v.AddArg(v2)
 12628  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12629  		v3.AuxInt = 64
 12630  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12631  		v4.AddArg(y)
 12632  		v3.AddArg(v4)
 12633  		v.AddArg(v3)
 12634  		return true
 12635  	}
 12636  }
 12637  func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
 12638  	b := v.Block
 12639  	_ = b
 12640  	typ := &b.Func.Config.Types
 12641  	_ = typ
 12642  	// match: (Lsh16x32 <t> x y)
 12643  	// cond:
 12644  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 12645  	for {
 12646  		t := v.Type
 12647  		_ = v.Args[1]
 12648  		x := v.Args[0]
 12649  		y := v.Args[1]
 12650  		v.reset(OpARM64CSELULT)
 12651  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12652  		v0.AddArg(x)
 12653  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12654  		v1.AddArg(y)
 12655  		v0.AddArg(v1)
 12656  		v.AddArg(v0)
 12657  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12658  		v2.AuxInt = 0
 12659  		v.AddArg(v2)
 12660  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12661  		v3.AuxInt = 64
 12662  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12663  		v4.AddArg(y)
 12664  		v3.AddArg(v4)
 12665  		v.AddArg(v3)
 12666  		return true
 12667  	}
 12668  }
 12669  func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
 12670  	b := v.Block
 12671  	_ = b
 12672  	// match: (Lsh16x64 x (MOVDconst [c]))
 12673  	// cond: uint64(c) < 16
 12674  	// result: (SLLconst x [c])
 12675  	for {
 12676  		_ = v.Args[1]
 12677  		x := v.Args[0]
 12678  		v_1 := v.Args[1]
 12679  		if v_1.Op != OpARM64MOVDconst {
 12680  			break
 12681  		}
 12682  		c := v_1.AuxInt
 12683  		if !(uint64(c) < 16) {
 12684  			break
 12685  		}
 12686  		v.reset(OpARM64SLLconst)
 12687  		v.AuxInt = c
 12688  		v.AddArg(x)
 12689  		return true
 12690  	}
 12691  	// match: (Lsh16x64 _ (MOVDconst [c]))
 12692  	// cond: uint64(c) >= 16
 12693  	// result: (MOVDconst [0])
 12694  	for {
 12695  		_ = v.Args[1]
 12696  		v_1 := v.Args[1]
 12697  		if v_1.Op != OpARM64MOVDconst {
 12698  			break
 12699  		}
 12700  		c := v_1.AuxInt
 12701  		if !(uint64(c) >= 16) {
 12702  			break
 12703  		}
 12704  		v.reset(OpARM64MOVDconst)
 12705  		v.AuxInt = 0
 12706  		return true
 12707  	}
 12708  	// match: (Lsh16x64 <t> x y)
 12709  	// cond:
 12710  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 12711  	for {
 12712  		t := v.Type
 12713  		_ = v.Args[1]
 12714  		x := v.Args[0]
 12715  		y := v.Args[1]
 12716  		v.reset(OpARM64CSELULT)
 12717  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12718  		v0.AddArg(x)
 12719  		v0.AddArg(y)
 12720  		v.AddArg(v0)
 12721  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 12722  		v1.AuxInt = 0
 12723  		v.AddArg(v1)
 12724  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12725  		v2.AuxInt = 64
 12726  		v2.AddArg(y)
 12727  		v.AddArg(v2)
 12728  		return true
 12729  	}
 12730  }
 12731  func rewriteValueARM64_OpLsh16x8_0(v *Value) bool {
 12732  	b := v.Block
 12733  	_ = b
 12734  	typ := &b.Func.Config.Types
 12735  	_ = typ
 12736  	// match: (Lsh16x8 <t> x y)
 12737  	// cond:
 12738  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 12739  	for {
 12740  		t := v.Type
 12741  		_ = v.Args[1]
 12742  		x := v.Args[0]
 12743  		y := v.Args[1]
 12744  		v.reset(OpARM64CSELULT)
 12745  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12746  		v0.AddArg(x)
 12747  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 12748  		v1.AddArg(y)
 12749  		v0.AddArg(v1)
 12750  		v.AddArg(v0)
 12751  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12752  		v2.AuxInt = 0
 12753  		v.AddArg(v2)
 12754  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12755  		v3.AuxInt = 64
 12756  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 12757  		v4.AddArg(y)
 12758  		v3.AddArg(v4)
 12759  		v.AddArg(v3)
 12760  		return true
 12761  	}
 12762  }
 12763  func rewriteValueARM64_OpLsh32x16_0(v *Value) bool {
 12764  	b := v.Block
 12765  	_ = b
 12766  	typ := &b.Func.Config.Types
 12767  	_ = typ
 12768  	// match: (Lsh32x16 <t> x y)
 12769  	// cond:
 12770  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 12771  	for {
 12772  		t := v.Type
 12773  		_ = v.Args[1]
 12774  		x := v.Args[0]
 12775  		y := v.Args[1]
 12776  		v.reset(OpARM64CSELULT)
 12777  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12778  		v0.AddArg(x)
 12779  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12780  		v1.AddArg(y)
 12781  		v0.AddArg(v1)
 12782  		v.AddArg(v0)
 12783  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12784  		v2.AuxInt = 0
 12785  		v.AddArg(v2)
 12786  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12787  		v3.AuxInt = 64
 12788  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12789  		v4.AddArg(y)
 12790  		v3.AddArg(v4)
 12791  		v.AddArg(v3)
 12792  		return true
 12793  	}
 12794  }
 12795  func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
 12796  	b := v.Block
 12797  	_ = b
 12798  	typ := &b.Func.Config.Types
 12799  	_ = typ
 12800  	// match: (Lsh32x32 <t> x y)
 12801  	// cond:
 12802  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 12803  	for {
 12804  		t := v.Type
 12805  		_ = v.Args[1]
 12806  		x := v.Args[0]
 12807  		y := v.Args[1]
 12808  		v.reset(OpARM64CSELULT)
 12809  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12810  		v0.AddArg(x)
 12811  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12812  		v1.AddArg(y)
 12813  		v0.AddArg(v1)
 12814  		v.AddArg(v0)
 12815  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12816  		v2.AuxInt = 0
 12817  		v.AddArg(v2)
 12818  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12819  		v3.AuxInt = 64
 12820  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12821  		v4.AddArg(y)
 12822  		v3.AddArg(v4)
 12823  		v.AddArg(v3)
 12824  		return true
 12825  	}
 12826  }
 12827  func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
 12828  	b := v.Block
 12829  	_ = b
 12830  	// match: (Lsh32x64 x (MOVDconst [c]))
 12831  	// cond: uint64(c) < 32
 12832  	// result: (SLLconst x [c])
 12833  	for {
 12834  		_ = v.Args[1]
 12835  		x := v.Args[0]
 12836  		v_1 := v.Args[1]
 12837  		if v_1.Op != OpARM64MOVDconst {
 12838  			break
 12839  		}
 12840  		c := v_1.AuxInt
 12841  		if !(uint64(c) < 32) {
 12842  			break
 12843  		}
 12844  		v.reset(OpARM64SLLconst)
 12845  		v.AuxInt = c
 12846  		v.AddArg(x)
 12847  		return true
 12848  	}
 12849  	// match: (Lsh32x64 _ (MOVDconst [c]))
 12850  	// cond: uint64(c) >= 32
 12851  	// result: (MOVDconst [0])
 12852  	for {
 12853  		_ = v.Args[1]
 12854  		v_1 := v.Args[1]
 12855  		if v_1.Op != OpARM64MOVDconst {
 12856  			break
 12857  		}
 12858  		c := v_1.AuxInt
 12859  		if !(uint64(c) >= 32) {
 12860  			break
 12861  		}
 12862  		v.reset(OpARM64MOVDconst)
 12863  		v.AuxInt = 0
 12864  		return true
 12865  	}
 12866  	// match: (Lsh32x64 <t> x y)
 12867  	// cond:
 12868  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 12869  	for {
 12870  		t := v.Type
 12871  		_ = v.Args[1]
 12872  		x := v.Args[0]
 12873  		y := v.Args[1]
 12874  		v.reset(OpARM64CSELULT)
 12875  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12876  		v0.AddArg(x)
 12877  		v0.AddArg(y)
 12878  		v.AddArg(v0)
 12879  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 12880  		v1.AuxInt = 0
 12881  		v.AddArg(v1)
 12882  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12883  		v2.AuxInt = 64
 12884  		v2.AddArg(y)
 12885  		v.AddArg(v2)
 12886  		return true
 12887  	}
 12888  }
 12889  func rewriteValueARM64_OpLsh32x8_0(v *Value) bool {
 12890  	b := v.Block
 12891  	_ = b
 12892  	typ := &b.Func.Config.Types
 12893  	_ = typ
 12894  	// match: (Lsh32x8 <t> x y)
 12895  	// cond:
 12896  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 12897  	for {
 12898  		t := v.Type
 12899  		_ = v.Args[1]
 12900  		x := v.Args[0]
 12901  		y := v.Args[1]
 12902  		v.reset(OpARM64CSELULT)
 12903  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12904  		v0.AddArg(x)
 12905  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 12906  		v1.AddArg(y)
 12907  		v0.AddArg(v1)
 12908  		v.AddArg(v0)
 12909  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12910  		v2.AuxInt = 0
 12911  		v.AddArg(v2)
 12912  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12913  		v3.AuxInt = 64
 12914  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 12915  		v4.AddArg(y)
 12916  		v3.AddArg(v4)
 12917  		v.AddArg(v3)
 12918  		return true
 12919  	}
 12920  }
 12921  func rewriteValueARM64_OpLsh64x16_0(v *Value) bool {
 12922  	b := v.Block
 12923  	_ = b
 12924  	typ := &b.Func.Config.Types
 12925  	_ = typ
 12926  	// match: (Lsh64x16 <t> x y)
 12927  	// cond:
 12928  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 12929  	for {
 12930  		t := v.Type
 12931  		_ = v.Args[1]
 12932  		x := v.Args[0]
 12933  		y := v.Args[1]
 12934  		v.reset(OpARM64CSELULT)
 12935  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12936  		v0.AddArg(x)
 12937  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12938  		v1.AddArg(y)
 12939  		v0.AddArg(v1)
 12940  		v.AddArg(v0)
 12941  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12942  		v2.AuxInt = 0
 12943  		v.AddArg(v2)
 12944  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12945  		v3.AuxInt = 64
 12946  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 12947  		v4.AddArg(y)
 12948  		v3.AddArg(v4)
 12949  		v.AddArg(v3)
 12950  		return true
 12951  	}
 12952  }
 12953  func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
 12954  	b := v.Block
 12955  	_ = b
 12956  	typ := &b.Func.Config.Types
 12957  	_ = typ
 12958  	// match: (Lsh64x32 <t> x y)
 12959  	// cond:
 12960  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 12961  	for {
 12962  		t := v.Type
 12963  		_ = v.Args[1]
 12964  		x := v.Args[0]
 12965  		y := v.Args[1]
 12966  		v.reset(OpARM64CSELULT)
 12967  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 12968  		v0.AddArg(x)
 12969  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12970  		v1.AddArg(y)
 12971  		v0.AddArg(v1)
 12972  		v.AddArg(v0)
 12973  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 12974  		v2.AuxInt = 0
 12975  		v.AddArg(v2)
 12976  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 12977  		v3.AuxInt = 64
 12978  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 12979  		v4.AddArg(y)
 12980  		v3.AddArg(v4)
 12981  		v.AddArg(v3)
 12982  		return true
 12983  	}
 12984  }
 12985  func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
 12986  	b := v.Block
 12987  	_ = b
 12988  	// match: (Lsh64x64 x (MOVDconst [c]))
 12989  	// cond: uint64(c) < 64
 12990  	// result: (SLLconst x [c])
 12991  	for {
 12992  		_ = v.Args[1]
 12993  		x := v.Args[0]
 12994  		v_1 := v.Args[1]
 12995  		if v_1.Op != OpARM64MOVDconst {
 12996  			break
 12997  		}
 12998  		c := v_1.AuxInt
 12999  		if !(uint64(c) < 64) {
 13000  			break
 13001  		}
 13002  		v.reset(OpARM64SLLconst)
 13003  		v.AuxInt = c
 13004  		v.AddArg(x)
 13005  		return true
 13006  	}
 13007  	// match: (Lsh64x64 _ (MOVDconst [c]))
 13008  	// cond: uint64(c) >= 64
 13009  	// result: (MOVDconst [0])
 13010  	for {
 13011  		_ = v.Args[1]
 13012  		v_1 := v.Args[1]
 13013  		if v_1.Op != OpARM64MOVDconst {
 13014  			break
 13015  		}
 13016  		c := v_1.AuxInt
 13017  		if !(uint64(c) >= 64) {
 13018  			break
 13019  		}
 13020  		v.reset(OpARM64MOVDconst)
 13021  		v.AuxInt = 0
 13022  		return true
 13023  	}
 13024  	// match: (Lsh64x64 <t> x y)
 13025  	// cond:
 13026  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 13027  	for {
 13028  		t := v.Type
 13029  		_ = v.Args[1]
 13030  		x := v.Args[0]
 13031  		y := v.Args[1]
 13032  		v.reset(OpARM64CSELULT)
 13033  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13034  		v0.AddArg(x)
 13035  		v0.AddArg(y)
 13036  		v.AddArg(v0)
 13037  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 13038  		v1.AuxInt = 0
 13039  		v.AddArg(v1)
 13040  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13041  		v2.AuxInt = 64
 13042  		v2.AddArg(y)
 13043  		v.AddArg(v2)
 13044  		return true
 13045  	}
 13046  }
 13047  func rewriteValueARM64_OpLsh64x8_0(v *Value) bool {
 13048  	b := v.Block
 13049  	_ = b
 13050  	typ := &b.Func.Config.Types
 13051  	_ = typ
 13052  	// match: (Lsh64x8 <t> x y)
 13053  	// cond:
 13054  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 13055  	for {
 13056  		t := v.Type
 13057  		_ = v.Args[1]
 13058  		x := v.Args[0]
 13059  		y := v.Args[1]
 13060  		v.reset(OpARM64CSELULT)
 13061  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13062  		v0.AddArg(x)
 13063  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 13064  		v1.AddArg(y)
 13065  		v0.AddArg(v1)
 13066  		v.AddArg(v0)
 13067  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 13068  		v2.AuxInt = 0
 13069  		v.AddArg(v2)
 13070  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13071  		v3.AuxInt = 64
 13072  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 13073  		v4.AddArg(y)
 13074  		v3.AddArg(v4)
 13075  		v.AddArg(v3)
 13076  		return true
 13077  	}
 13078  }
 13079  func rewriteValueARM64_OpLsh8x16_0(v *Value) bool {
 13080  	b := v.Block
 13081  	_ = b
 13082  	typ := &b.Func.Config.Types
 13083  	_ = typ
 13084  	// match: (Lsh8x16 <t> x y)
 13085  	// cond:
 13086  	// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 13087  	for {
 13088  		t := v.Type
 13089  		_ = v.Args[1]
 13090  		x := v.Args[0]
 13091  		y := v.Args[1]
 13092  		v.reset(OpARM64CSELULT)
 13093  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13094  		v0.AddArg(x)
 13095  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 13096  		v1.AddArg(y)
 13097  		v0.AddArg(v1)
 13098  		v.AddArg(v0)
 13099  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 13100  		v2.AuxInt = 0
 13101  		v.AddArg(v2)
 13102  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13103  		v3.AuxInt = 64
 13104  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 13105  		v4.AddArg(y)
 13106  		v3.AddArg(v4)
 13107  		v.AddArg(v3)
 13108  		return true
 13109  	}
 13110  }
 13111  func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
 13112  	b := v.Block
 13113  	_ = b
 13114  	typ := &b.Func.Config.Types
 13115  	_ = typ
 13116  	// match: (Lsh8x32 <t> x y)
 13117  	// cond:
 13118  	// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 13119  	for {
 13120  		t := v.Type
 13121  		_ = v.Args[1]
 13122  		x := v.Args[0]
 13123  		y := v.Args[1]
 13124  		v.reset(OpARM64CSELULT)
 13125  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13126  		v0.AddArg(x)
 13127  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 13128  		v1.AddArg(y)
 13129  		v0.AddArg(v1)
 13130  		v.AddArg(v0)
 13131  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 13132  		v2.AuxInt = 0
 13133  		v.AddArg(v2)
 13134  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13135  		v3.AuxInt = 64
 13136  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 13137  		v4.AddArg(y)
 13138  		v3.AddArg(v4)
 13139  		v.AddArg(v3)
 13140  		return true
 13141  	}
 13142  }
 13143  func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
 13144  	b := v.Block
 13145  	_ = b
 13146  	// match: (Lsh8x64 x (MOVDconst [c]))
 13147  	// cond: uint64(c) < 8
 13148  	// result: (SLLconst x [c])
 13149  	for {
 13150  		_ = v.Args[1]
 13151  		x := v.Args[0]
 13152  		v_1 := v.Args[1]
 13153  		if v_1.Op != OpARM64MOVDconst {
 13154  			break
 13155  		}
 13156  		c := v_1.AuxInt
 13157  		if !(uint64(c) < 8) {
 13158  			break
 13159  		}
 13160  		v.reset(OpARM64SLLconst)
 13161  		v.AuxInt = c
 13162  		v.AddArg(x)
 13163  		return true
 13164  	}
 13165  	// match: (Lsh8x64 _ (MOVDconst [c]))
 13166  	// cond: uint64(c) >= 8
 13167  	// result: (MOVDconst [0])
 13168  	for {
 13169  		_ = v.Args[1]
 13170  		v_1 := v.Args[1]
 13171  		if v_1.Op != OpARM64MOVDconst {
 13172  			break
 13173  		}
 13174  		c := v_1.AuxInt
 13175  		if !(uint64(c) >= 8) {
 13176  			break
 13177  		}
 13178  		v.reset(OpARM64MOVDconst)
 13179  		v.AuxInt = 0
 13180  		return true
 13181  	}
 13182  	// match: (Lsh8x64 <t> x y)
 13183  	// cond:
 13184  	// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 13185  	for {
 13186  		t := v.Type
 13187  		_ = v.Args[1]
 13188  		x := v.Args[0]
 13189  		y := v.Args[1]
 13190  		v.reset(OpARM64CSELULT)
 13191  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13192  		v0.AddArg(x)
 13193  		v0.AddArg(y)
 13194  		v.AddArg(v0)
 13195  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 13196  		v1.AuxInt = 0
 13197  		v.AddArg(v1)
 13198  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13199  		v2.AuxInt = 64
 13200  		v2.AddArg(y)
 13201  		v.AddArg(v2)
 13202  		return true
 13203  	}
 13204  }
 13205  func rewriteValueARM64_OpLsh8x8_0(v *Value) bool {
 13206  	b := v.Block
 13207  	_ = b
 13208  	typ := &b.Func.Config.Types
 13209  	_ = typ
 13210  	// match: (Lsh8x8 <t> x y)
 13211  	// cond:
 13212  	// result: (CSELULT (SLL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 13213  	for {
 13214  		t := v.Type
 13215  		_ = v.Args[1]
 13216  		x := v.Args[0]
 13217  		y := v.Args[1]
 13218  		v.reset(OpARM64CSELULT)
 13219  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 13220  		v0.AddArg(x)
 13221  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 13222  		v1.AddArg(y)
 13223  		v0.AddArg(v1)
 13224  		v.AddArg(v0)
 13225  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 13226  		v2.AuxInt = 0
 13227  		v.AddArg(v2)
 13228  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 13229  		v3.AuxInt = 64
 13230  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 13231  		v4.AddArg(y)
 13232  		v3.AddArg(v4)
 13233  		v.AddArg(v3)
 13234  		return true
 13235  	}
 13236  }
 13237  func rewriteValueARM64_OpMod16_0(v *Value) bool {
 13238  	b := v.Block
 13239  	_ = b
 13240  	typ := &b.Func.Config.Types
 13241  	_ = typ
 13242  	// match: (Mod16 x y)
 13243  	// cond:
 13244  	// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
 13245  	for {
 13246  		_ = v.Args[1]
 13247  		x := v.Args[0]
 13248  		y := v.Args[1]
 13249  		v.reset(OpARM64MODW)
 13250  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 13251  		v0.AddArg(x)
 13252  		v.AddArg(v0)
 13253  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 13254  		v1.AddArg(y)
 13255  		v.AddArg(v1)
 13256  		return true
 13257  	}
 13258  }
 13259  func rewriteValueARM64_OpMod16u_0(v *Value) bool {
 13260  	b := v.Block
 13261  	_ = b
 13262  	typ := &b.Func.Config.Types
 13263  	_ = typ
 13264  	// match: (Mod16u x y)
 13265  	// cond:
 13266  	// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
 13267  	for {
 13268  		_ = v.Args[1]
 13269  		x := v.Args[0]
 13270  		y := v.Args[1]
 13271  		v.reset(OpARM64UMODW)
 13272  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 13273  		v0.AddArg(x)
 13274  		v.AddArg(v0)
 13275  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 13276  		v1.AddArg(y)
 13277  		v.AddArg(v1)
 13278  		return true
 13279  	}
 13280  }
 13281  func rewriteValueARM64_OpMod32_0(v *Value) bool {
 13282  	// match: (Mod32 x y)
 13283  	// cond:
 13284  	// result: (MODW x y)
 13285  	for {
 13286  		_ = v.Args[1]
 13287  		x := v.Args[0]
 13288  		y := v.Args[1]
 13289  		v.reset(OpARM64MODW)
 13290  		v.AddArg(x)
 13291  		v.AddArg(y)
 13292  		return true
 13293  	}
 13294  }
 13295  func rewriteValueARM64_OpMod32u_0(v *Value) bool {
 13296  	// match: (Mod32u x y)
 13297  	// cond:
 13298  	// result: (UMODW x y)
 13299  	for {
 13300  		_ = v.Args[1]
 13301  		x := v.Args[0]
 13302  		y := v.Args[1]
 13303  		v.reset(OpARM64UMODW)
 13304  		v.AddArg(x)
 13305  		v.AddArg(y)
 13306  		return true
 13307  	}
 13308  }
 13309  func rewriteValueARM64_OpMod64_0(v *Value) bool {
 13310  	// match: (Mod64 x y)
 13311  	// cond:
 13312  	// result: (MOD x y)
 13313  	for {
 13314  		_ = v.Args[1]
 13315  		x := v.Args[0]
 13316  		y := v.Args[1]
 13317  		v.reset(OpARM64MOD)
 13318  		v.AddArg(x)
 13319  		v.AddArg(y)
 13320  		return true
 13321  	}
 13322  }
 13323  func rewriteValueARM64_OpMod64u_0(v *Value) bool {
 13324  	// match: (Mod64u x y)
 13325  	// cond:
 13326  	// result: (UMOD x y)
 13327  	for {
 13328  		_ = v.Args[1]
 13329  		x := v.Args[0]
 13330  		y := v.Args[1]
 13331  		v.reset(OpARM64UMOD)
 13332  		v.AddArg(x)
 13333  		v.AddArg(y)
 13334  		return true
 13335  	}
 13336  }
 13337  func rewriteValueARM64_OpMod8_0(v *Value) bool {
 13338  	b := v.Block
 13339  	_ = b
 13340  	typ := &b.Func.Config.Types
 13341  	_ = typ
 13342  	// match: (Mod8 x y)
 13343  	// cond:
 13344  	// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
 13345  	for {
 13346  		_ = v.Args[1]
 13347  		x := v.Args[0]
 13348  		y := v.Args[1]
 13349  		v.reset(OpARM64MODW)
 13350  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 13351  		v0.AddArg(x)
 13352  		v.AddArg(v0)
 13353  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 13354  		v1.AddArg(y)
 13355  		v.AddArg(v1)
 13356  		return true
 13357  	}
 13358  }
 13359  func rewriteValueARM64_OpMod8u_0(v *Value) bool {
 13360  	b := v.Block
 13361  	_ = b
 13362  	typ := &b.Func.Config.Types
 13363  	_ = typ
 13364  	// match: (Mod8u x y)
 13365  	// cond:
 13366  	// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
 13367  	for {
 13368  		_ = v.Args[1]
 13369  		x := v.Args[0]
 13370  		y := v.Args[1]
 13371  		v.reset(OpARM64UMODW)
 13372  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 13373  		v0.AddArg(x)
 13374  		v.AddArg(v0)
 13375  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 13376  		v1.AddArg(y)
 13377  		v.AddArg(v1)
 13378  		return true
 13379  	}
 13380  }
 13381  func rewriteValueARM64_OpMove_0(v *Value) bool {
 13382  	b := v.Block
 13383  	_ = b
 13384  	typ := &b.Func.Config.Types
 13385  	_ = typ
 13386  	// match: (Move [0] _ _ mem)
 13387  	// cond:
 13388  	// result: mem
 13389  	for {
 13390  		if v.AuxInt != 0 {
 13391  			break
 13392  		}
 13393  		_ = v.Args[2]
 13394  		mem := v.Args[2]
 13395  		v.reset(OpCopy)
 13396  		v.Type = mem.Type
 13397  		v.AddArg(mem)
 13398  		return true
 13399  	}
 13400  	// match: (Move [1] dst src mem)
 13401  	// cond:
 13402  	// result: (MOVBstore dst (MOVBUload src mem) mem)
 13403  	for {
 13404  		if v.AuxInt != 1 {
 13405  			break
 13406  		}
 13407  		_ = v.Args[2]
 13408  		dst := v.Args[0]
 13409  		src := v.Args[1]
 13410  		mem := v.Args[2]
 13411  		v.reset(OpARM64MOVBstore)
 13412  		v.AddArg(dst)
 13413  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 13414  		v0.AddArg(src)
 13415  		v0.AddArg(mem)
 13416  		v.AddArg(v0)
 13417  		v.AddArg(mem)
 13418  		return true
 13419  	}
 13420  	// match: (Move [2] dst src mem)
 13421  	// cond:
 13422  	// result: (MOVHstore dst (MOVHUload src mem) mem)
 13423  	for {
 13424  		if v.AuxInt != 2 {
 13425  			break
 13426  		}
 13427  		_ = v.Args[2]
 13428  		dst := v.Args[0]
 13429  		src := v.Args[1]
 13430  		mem := v.Args[2]
 13431  		v.reset(OpARM64MOVHstore)
 13432  		v.AddArg(dst)
 13433  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 13434  		v0.AddArg(src)
 13435  		v0.AddArg(mem)
 13436  		v.AddArg(v0)
 13437  		v.AddArg(mem)
 13438  		return true
 13439  	}
 13440  	// match: (Move [4] dst src mem)
 13441  	// cond:
 13442  	// result: (MOVWstore dst (MOVWUload src mem) mem)
 13443  	for {
 13444  		if v.AuxInt != 4 {
 13445  			break
 13446  		}
 13447  		_ = v.Args[2]
 13448  		dst := v.Args[0]
 13449  		src := v.Args[1]
 13450  		mem := v.Args[2]
 13451  		v.reset(OpARM64MOVWstore)
 13452  		v.AddArg(dst)
 13453  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 13454  		v0.AddArg(src)
 13455  		v0.AddArg(mem)
 13456  		v.AddArg(v0)
 13457  		v.AddArg(mem)
 13458  		return true
 13459  	}
 13460  	// match: (Move [8] dst src mem)
 13461  	// cond:
 13462  	// result: (MOVDstore dst (MOVDload src mem) mem)
 13463  	for {
 13464  		if v.AuxInt != 8 {
 13465  			break
 13466  		}
 13467  		_ = v.Args[2]
 13468  		dst := v.Args[0]
 13469  		src := v.Args[1]
 13470  		mem := v.Args[2]
 13471  		v.reset(OpARM64MOVDstore)
 13472  		v.AddArg(dst)
 13473  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13474  		v0.AddArg(src)
 13475  		v0.AddArg(mem)
 13476  		v.AddArg(v0)
 13477  		v.AddArg(mem)
 13478  		return true
 13479  	}
 13480  	// match: (Move [3] dst src mem)
 13481  	// cond:
 13482  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
 13483  	for {
 13484  		if v.AuxInt != 3 {
 13485  			break
 13486  		}
 13487  		_ = v.Args[2]
 13488  		dst := v.Args[0]
 13489  		src := v.Args[1]
 13490  		mem := v.Args[2]
 13491  		v.reset(OpARM64MOVBstore)
 13492  		v.AuxInt = 2
 13493  		v.AddArg(dst)
 13494  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 13495  		v0.AuxInt = 2
 13496  		v0.AddArg(src)
 13497  		v0.AddArg(mem)
 13498  		v.AddArg(v0)
 13499  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 13500  		v1.AddArg(dst)
 13501  		v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 13502  		v2.AddArg(src)
 13503  		v2.AddArg(mem)
 13504  		v1.AddArg(v2)
 13505  		v1.AddArg(mem)
 13506  		v.AddArg(v1)
 13507  		return true
 13508  	}
 13509  	// match: (Move [5] dst src mem)
 13510  	// cond:
 13511  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) 		(MOVWstore dst (MOVWUload src mem) mem))
 13512  	for {
 13513  		if v.AuxInt != 5 {
 13514  			break
 13515  		}
 13516  		_ = v.Args[2]
 13517  		dst := v.Args[0]
 13518  		src := v.Args[1]
 13519  		mem := v.Args[2]
 13520  		v.reset(OpARM64MOVBstore)
 13521  		v.AuxInt = 4
 13522  		v.AddArg(dst)
 13523  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 13524  		v0.AuxInt = 4
 13525  		v0.AddArg(src)
 13526  		v0.AddArg(mem)
 13527  		v.AddArg(v0)
 13528  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 13529  		v1.AddArg(dst)
 13530  		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 13531  		v2.AddArg(src)
 13532  		v2.AddArg(mem)
 13533  		v1.AddArg(v2)
 13534  		v1.AddArg(mem)
 13535  		v.AddArg(v1)
 13536  		return true
 13537  	}
 13538  	// match: (Move [6] dst src mem)
 13539  	// cond:
 13540  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) 		(MOVWstore dst (MOVWUload src mem) mem))
 13541  	for {
 13542  		if v.AuxInt != 6 {
 13543  			break
 13544  		}
 13545  		_ = v.Args[2]
 13546  		dst := v.Args[0]
 13547  		src := v.Args[1]
 13548  		mem := v.Args[2]
 13549  		v.reset(OpARM64MOVHstore)
 13550  		v.AuxInt = 4
 13551  		v.AddArg(dst)
 13552  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 13553  		v0.AuxInt = 4
 13554  		v0.AddArg(src)
 13555  		v0.AddArg(mem)
 13556  		v.AddArg(v0)
 13557  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 13558  		v1.AddArg(dst)
 13559  		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 13560  		v2.AddArg(src)
 13561  		v2.AddArg(mem)
 13562  		v1.AddArg(v2)
 13563  		v1.AddArg(mem)
 13564  		v.AddArg(v1)
 13565  		return true
 13566  	}
 13567  	// match: (Move [7] dst src mem)
 13568  	// cond:
 13569  	// result: (MOVBstore [6] dst (MOVBUload [6] src mem) 		(MOVHstore [4] dst (MOVHUload [4] src mem) 			(MOVWstore dst (MOVWUload src mem) mem)))
 13570  	for {
 13571  		if v.AuxInt != 7 {
 13572  			break
 13573  		}
 13574  		_ = v.Args[2]
 13575  		dst := v.Args[0]
 13576  		src := v.Args[1]
 13577  		mem := v.Args[2]
 13578  		v.reset(OpARM64MOVBstore)
 13579  		v.AuxInt = 6
 13580  		v.AddArg(dst)
 13581  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 13582  		v0.AuxInt = 6
 13583  		v0.AddArg(src)
 13584  		v0.AddArg(mem)
 13585  		v.AddArg(v0)
 13586  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 13587  		v1.AuxInt = 4
 13588  		v1.AddArg(dst)
 13589  		v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 13590  		v2.AuxInt = 4
 13591  		v2.AddArg(src)
 13592  		v2.AddArg(mem)
 13593  		v1.AddArg(v2)
 13594  		v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 13595  		v3.AddArg(dst)
 13596  		v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 13597  		v4.AddArg(src)
 13598  		v4.AddArg(mem)
 13599  		v3.AddArg(v4)
 13600  		v3.AddArg(mem)
 13601  		v1.AddArg(v3)
 13602  		v.AddArg(v1)
 13603  		return true
 13604  	}
 13605  	// match: (Move [12] dst src mem)
 13606  	// cond:
 13607  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) 		(MOVDstore dst (MOVDload src mem) mem))
 13608  	for {
 13609  		if v.AuxInt != 12 {
 13610  			break
 13611  		}
 13612  		_ = v.Args[2]
 13613  		dst := v.Args[0]
 13614  		src := v.Args[1]
 13615  		mem := v.Args[2]
 13616  		v.reset(OpARM64MOVWstore)
 13617  		v.AuxInt = 8
 13618  		v.AddArg(dst)
 13619  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 13620  		v0.AuxInt = 8
 13621  		v0.AddArg(src)
 13622  		v0.AddArg(mem)
 13623  		v.AddArg(v0)
 13624  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 13625  		v1.AddArg(dst)
 13626  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13627  		v2.AddArg(src)
 13628  		v2.AddArg(mem)
 13629  		v1.AddArg(v2)
 13630  		v1.AddArg(mem)
 13631  		v.AddArg(v1)
 13632  		return true
 13633  	}
 13634  	return false
 13635  }
 13636  func rewriteValueARM64_OpMove_10(v *Value) bool {
 13637  	b := v.Block
 13638  	_ = b
 13639  	config := b.Func.Config
 13640  	_ = config
 13641  	typ := &b.Func.Config.Types
 13642  	_ = typ
 13643  	// match: (Move [16] dst src mem)
 13644  	// cond:
 13645  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) 		(MOVDstore dst (MOVDload src mem) mem))
 13646  	for {
 13647  		if v.AuxInt != 16 {
 13648  			break
 13649  		}
 13650  		_ = v.Args[2]
 13651  		dst := v.Args[0]
 13652  		src := v.Args[1]
 13653  		mem := v.Args[2]
 13654  		v.reset(OpARM64MOVDstore)
 13655  		v.AuxInt = 8
 13656  		v.AddArg(dst)
 13657  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13658  		v0.AuxInt = 8
 13659  		v0.AddArg(src)
 13660  		v0.AddArg(mem)
 13661  		v.AddArg(v0)
 13662  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 13663  		v1.AddArg(dst)
 13664  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13665  		v2.AddArg(src)
 13666  		v2.AddArg(mem)
 13667  		v1.AddArg(v2)
 13668  		v1.AddArg(mem)
 13669  		v.AddArg(v1)
 13670  		return true
 13671  	}
 13672  	// match: (Move [24] dst src mem)
 13673  	// cond:
 13674  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) 		(MOVDstore [8] dst (MOVDload [8] src mem) 			(MOVDstore dst (MOVDload src mem) mem)))
 13675  	for {
 13676  		if v.AuxInt != 24 {
 13677  			break
 13678  		}
 13679  		_ = v.Args[2]
 13680  		dst := v.Args[0]
 13681  		src := v.Args[1]
 13682  		mem := v.Args[2]
 13683  		v.reset(OpARM64MOVDstore)
 13684  		v.AuxInt = 16
 13685  		v.AddArg(dst)
 13686  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13687  		v0.AuxInt = 16
 13688  		v0.AddArg(src)
 13689  		v0.AddArg(mem)
 13690  		v.AddArg(v0)
 13691  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 13692  		v1.AuxInt = 8
 13693  		v1.AddArg(dst)
 13694  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13695  		v2.AuxInt = 8
 13696  		v2.AddArg(src)
 13697  		v2.AddArg(mem)
 13698  		v1.AddArg(v2)
 13699  		v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 13700  		v3.AddArg(dst)
 13701  		v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 13702  		v4.AddArg(src)
 13703  		v4.AddArg(mem)
 13704  		v3.AddArg(v4)
 13705  		v3.AddArg(mem)
 13706  		v1.AddArg(v3)
 13707  		v.AddArg(v1)
 13708  		return true
 13709  	}
 13710  	// match: (Move [s] dst src mem)
 13711  	// cond: s%8 != 0 && s > 8
 13712  	// result: (Move [s%8] 		(OffPtr <dst.Type> dst [s-s%8]) 		(OffPtr <src.Type> src [s-s%8]) 		(Move [s-s%8] dst src mem))
 13713  	for {
 13714  		s := v.AuxInt
 13715  		_ = v.Args[2]
 13716  		dst := v.Args[0]
 13717  		src := v.Args[1]
 13718  		mem := v.Args[2]
 13719  		if !(s%8 != 0 && s > 8) {
 13720  			break
 13721  		}
 13722  		v.reset(OpMove)
 13723  		v.AuxInt = s % 8
 13724  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
 13725  		v0.AuxInt = s - s%8
 13726  		v0.AddArg(dst)
 13727  		v.AddArg(v0)
 13728  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
 13729  		v1.AuxInt = s - s%8
 13730  		v1.AddArg(src)
 13731  		v.AddArg(v1)
 13732  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
 13733  		v2.AuxInt = s - s%8
 13734  		v2.AddArg(dst)
 13735  		v2.AddArg(src)
 13736  		v2.AddArg(mem)
 13737  		v.AddArg(v2)
 13738  		return true
 13739  	}
 13740  	// match: (Move [s] dst src mem)
 13741  	// cond: s%8 == 0 && s > 24 && s <= 8*128 	&& !config.noDuffDevice
 13742  	// result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem)
 13743  	for {
 13744  		s := v.AuxInt
 13745  		_ = v.Args[2]
 13746  		dst := v.Args[0]
 13747  		src := v.Args[1]
 13748  		mem := v.Args[2]
 13749  		if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) {
 13750  			break
 13751  		}
 13752  		v.reset(OpARM64DUFFCOPY)
 13753  		v.AuxInt = 8 * (128 - int64(s/8))
 13754  		v.AddArg(dst)
 13755  		v.AddArg(src)
 13756  		v.AddArg(mem)
 13757  		return true
 13758  	}
 13759  	// match: (Move [s] dst src mem)
 13760  	// cond: s > 24 && s%8 == 0
 13761  	// result: (LoweredMove 		dst 		src 		(ADDconst <src.Type> src [s-8]) 		mem)
 13762  	for {
 13763  		s := v.AuxInt
 13764  		_ = v.Args[2]
 13765  		dst := v.Args[0]
 13766  		src := v.Args[1]
 13767  		mem := v.Args[2]
 13768  		if !(s > 24 && s%8 == 0) {
 13769  			break
 13770  		}
 13771  		v.reset(OpARM64LoweredMove)
 13772  		v.AddArg(dst)
 13773  		v.AddArg(src)
 13774  		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
 13775  		v0.AuxInt = s - 8
 13776  		v0.AddArg(src)
 13777  		v.AddArg(v0)
 13778  		v.AddArg(mem)
 13779  		return true
 13780  	}
 13781  	return false
 13782  }
 13783  func rewriteValueARM64_OpMul16_0(v *Value) bool {
 13784  	// match: (Mul16 x y)
 13785  	// cond:
 13786  	// result: (MULW x y)
 13787  	for {
 13788  		_ = v.Args[1]
 13789  		x := v.Args[0]
 13790  		y := v.Args[1]
 13791  		v.reset(OpARM64MULW)
 13792  		v.AddArg(x)
 13793  		v.AddArg(y)
 13794  		return true
 13795  	}
 13796  }
 13797  func rewriteValueARM64_OpMul32_0(v *Value) bool {
 13798  	// match: (Mul32 x y)
 13799  	// cond:
 13800  	// result: (MULW x y)
 13801  	for {
 13802  		_ = v.Args[1]
 13803  		x := v.Args[0]
 13804  		y := v.Args[1]
 13805  		v.reset(OpARM64MULW)
 13806  		v.AddArg(x)
 13807  		v.AddArg(y)
 13808  		return true
 13809  	}
 13810  }
 13811  func rewriteValueARM64_OpMul32F_0(v *Value) bool {
 13812  	// match: (Mul32F x y)
 13813  	// cond:
 13814  	// result: (FMULS x y)
 13815  	for {
 13816  		_ = v.Args[1]
 13817  		x := v.Args[0]
 13818  		y := v.Args[1]
 13819  		v.reset(OpARM64FMULS)
 13820  		v.AddArg(x)
 13821  		v.AddArg(y)
 13822  		return true
 13823  	}
 13824  }
 13825  func rewriteValueARM64_OpMul64_0(v *Value) bool {
 13826  	// match: (Mul64 x y)
 13827  	// cond:
 13828  	// result: (MUL x y)
 13829  	for {
 13830  		_ = v.Args[1]
 13831  		x := v.Args[0]
 13832  		y := v.Args[1]
 13833  		v.reset(OpARM64MUL)
 13834  		v.AddArg(x)
 13835  		v.AddArg(y)
 13836  		return true
 13837  	}
 13838  }
 13839  func rewriteValueARM64_OpMul64F_0(v *Value) bool {
 13840  	// match: (Mul64F x y)
 13841  	// cond:
 13842  	// result: (FMULD x y)
 13843  	for {
 13844  		_ = v.Args[1]
 13845  		x := v.Args[0]
 13846  		y := v.Args[1]
 13847  		v.reset(OpARM64FMULD)
 13848  		v.AddArg(x)
 13849  		v.AddArg(y)
 13850  		return true
 13851  	}
 13852  }
 13853  func rewriteValueARM64_OpMul8_0(v *Value) bool {
 13854  	// match: (Mul8 x y)
 13855  	// cond:
 13856  	// result: (MULW x y)
 13857  	for {
 13858  		_ = v.Args[1]
 13859  		x := v.Args[0]
 13860  		y := v.Args[1]
 13861  		v.reset(OpARM64MULW)
 13862  		v.AddArg(x)
 13863  		v.AddArg(y)
 13864  		return true
 13865  	}
 13866  }
 13867  func rewriteValueARM64_OpNeg16_0(v *Value) bool {
 13868  	// match: (Neg16 x)
 13869  	// cond:
 13870  	// result: (NEG x)
 13871  	for {
 13872  		x := v.Args[0]
 13873  		v.reset(OpARM64NEG)
 13874  		v.AddArg(x)
 13875  		return true
 13876  	}
 13877  }
 13878  func rewriteValueARM64_OpNeg32_0(v *Value) bool {
 13879  	// match: (Neg32 x)
 13880  	// cond:
 13881  	// result: (NEG x)
 13882  	for {
 13883  		x := v.Args[0]
 13884  		v.reset(OpARM64NEG)
 13885  		v.AddArg(x)
 13886  		return true
 13887  	}
 13888  }
 13889  func rewriteValueARM64_OpNeg32F_0(v *Value) bool {
 13890  	// match: (Neg32F x)
 13891  	// cond:
 13892  	// result: (FNEGS x)
 13893  	for {
 13894  		x := v.Args[0]
 13895  		v.reset(OpARM64FNEGS)
 13896  		v.AddArg(x)
 13897  		return true
 13898  	}
 13899  }
 13900  func rewriteValueARM64_OpNeg64_0(v *Value) bool {
 13901  	// match: (Neg64 x)
 13902  	// cond:
 13903  	// result: (NEG x)
 13904  	for {
 13905  		x := v.Args[0]
 13906  		v.reset(OpARM64NEG)
 13907  		v.AddArg(x)
 13908  		return true
 13909  	}
 13910  }
 13911  func rewriteValueARM64_OpNeg64F_0(v *Value) bool {
 13912  	// match: (Neg64F x)
 13913  	// cond:
 13914  	// result: (FNEGD x)
 13915  	for {
 13916  		x := v.Args[0]
 13917  		v.reset(OpARM64FNEGD)
 13918  		v.AddArg(x)
 13919  		return true
 13920  	}
 13921  }
 13922  func rewriteValueARM64_OpNeg8_0(v *Value) bool {
 13923  	// match: (Neg8 x)
 13924  	// cond:
 13925  	// result: (NEG x)
 13926  	for {
 13927  		x := v.Args[0]
 13928  		v.reset(OpARM64NEG)
 13929  		v.AddArg(x)
 13930  		return true
 13931  	}
 13932  }
 13933  func rewriteValueARM64_OpNeq16_0(v *Value) bool {
 13934  	b := v.Block
 13935  	_ = b
 13936  	typ := &b.Func.Config.Types
 13937  	_ = typ
 13938  	// match: (Neq16 x y)
 13939  	// cond:
 13940  	// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 13941  	for {
 13942  		_ = v.Args[1]
 13943  		x := v.Args[0]
 13944  		y := v.Args[1]
 13945  		v.reset(OpARM64NotEqual)
 13946  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 13947  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 13948  		v1.AddArg(x)
 13949  		v0.AddArg(v1)
 13950  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 13951  		v2.AddArg(y)
 13952  		v0.AddArg(v2)
 13953  		v.AddArg(v0)
 13954  		return true
 13955  	}
 13956  }
 13957  func rewriteValueARM64_OpNeq32_0(v *Value) bool {
 13958  	b := v.Block
 13959  	_ = b
 13960  	// match: (Neq32 x y)
 13961  	// cond:
 13962  	// result: (NotEqual (CMPW x y))
 13963  	for {
 13964  		_ = v.Args[1]
 13965  		x := v.Args[0]
 13966  		y := v.Args[1]
 13967  		v.reset(OpARM64NotEqual)
 13968  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 13969  		v0.AddArg(x)
 13970  		v0.AddArg(y)
 13971  		v.AddArg(v0)
 13972  		return true
 13973  	}
 13974  }
 13975  func rewriteValueARM64_OpNeq32F_0(v *Value) bool {
 13976  	b := v.Block
 13977  	_ = b
 13978  	// match: (Neq32F x y)
 13979  	// cond:
 13980  	// result: (NotEqual (FCMPS x y))
 13981  	for {
 13982  		_ = v.Args[1]
 13983  		x := v.Args[0]
 13984  		y := v.Args[1]
 13985  		v.reset(OpARM64NotEqual)
 13986  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 13987  		v0.AddArg(x)
 13988  		v0.AddArg(y)
 13989  		v.AddArg(v0)
 13990  		return true
 13991  	}
 13992  }
 13993  func rewriteValueARM64_OpNeq64_0(v *Value) bool {
 13994  	b := v.Block
 13995  	_ = b
 13996  	// match: (Neq64 x y)
 13997  	// cond:
 13998  	// result: (NotEqual (CMP x y))
 13999  	for {
 14000  		_ = v.Args[1]
 14001  		x := v.Args[0]
 14002  		y := v.Args[1]
 14003  		v.reset(OpARM64NotEqual)
 14004  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 14005  		v0.AddArg(x)
 14006  		v0.AddArg(y)
 14007  		v.AddArg(v0)
 14008  		return true
 14009  	}
 14010  }
 14011  func rewriteValueARM64_OpNeq64F_0(v *Value) bool {
 14012  	b := v.Block
 14013  	_ = b
 14014  	// match: (Neq64F x y)
 14015  	// cond:
 14016  	// result: (NotEqual (FCMPD x y))
 14017  	for {
 14018  		_ = v.Args[1]
 14019  		x := v.Args[0]
 14020  		y := v.Args[1]
 14021  		v.reset(OpARM64NotEqual)
 14022  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 14023  		v0.AddArg(x)
 14024  		v0.AddArg(y)
 14025  		v.AddArg(v0)
 14026  		return true
 14027  	}
 14028  }
 14029  func rewriteValueARM64_OpNeq8_0(v *Value) bool {
 14030  	b := v.Block
 14031  	_ = b
 14032  	typ := &b.Func.Config.Types
 14033  	_ = typ
 14034  	// match: (Neq8 x y)
 14035  	// cond:
 14036  	// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 14037  	for {
 14038  		_ = v.Args[1]
 14039  		x := v.Args[0]
 14040  		y := v.Args[1]
 14041  		v.reset(OpARM64NotEqual)
 14042  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 14043  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 14044  		v1.AddArg(x)
 14045  		v0.AddArg(v1)
 14046  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 14047  		v2.AddArg(y)
 14048  		v0.AddArg(v2)
 14049  		v.AddArg(v0)
 14050  		return true
 14051  	}
 14052  }
 14053  func rewriteValueARM64_OpNeqB_0(v *Value) bool {
 14054  	// match: (NeqB x y)
 14055  	// cond:
 14056  	// result: (XOR x y)
 14057  	for {
 14058  		_ = v.Args[1]
 14059  		x := v.Args[0]
 14060  		y := v.Args[1]
 14061  		v.reset(OpARM64XOR)
 14062  		v.AddArg(x)
 14063  		v.AddArg(y)
 14064  		return true
 14065  	}
 14066  }
 14067  func rewriteValueARM64_OpNeqPtr_0(v *Value) bool {
 14068  	b := v.Block
 14069  	_ = b
 14070  	// match: (NeqPtr x y)
 14071  	// cond:
 14072  	// result: (NotEqual (CMP x y))
 14073  	for {
 14074  		_ = v.Args[1]
 14075  		x := v.Args[0]
 14076  		y := v.Args[1]
 14077  		v.reset(OpARM64NotEqual)
 14078  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 14079  		v0.AddArg(x)
 14080  		v0.AddArg(y)
 14081  		v.AddArg(v0)
 14082  		return true
 14083  	}
 14084  }
 14085  func rewriteValueARM64_OpNilCheck_0(v *Value) bool {
 14086  	// match: (NilCheck ptr mem)
 14087  	// cond:
 14088  	// result: (LoweredNilCheck ptr mem)
 14089  	for {
 14090  		_ = v.Args[1]
 14091  		ptr := v.Args[0]
 14092  		mem := v.Args[1]
 14093  		v.reset(OpARM64LoweredNilCheck)
 14094  		v.AddArg(ptr)
 14095  		v.AddArg(mem)
 14096  		return true
 14097  	}
 14098  }
 14099  func rewriteValueARM64_OpNot_0(v *Value) bool {
 14100  	b := v.Block
 14101  	_ = b
 14102  	typ := &b.Func.Config.Types
 14103  	_ = typ
 14104  	// match: (Not x)
 14105  	// cond:
 14106  	// result: (XOR (MOVDconst [1]) x)
 14107  	for {
 14108  		x := v.Args[0]
 14109  		v.reset(OpARM64XOR)
 14110  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 14111  		v0.AuxInt = 1
 14112  		v.AddArg(v0)
 14113  		v.AddArg(x)
 14114  		return true
 14115  	}
 14116  }
 14117  func rewriteValueARM64_OpOffPtr_0(v *Value) bool {
 14118  	// match: (OffPtr [off] ptr:(SP))
 14119  	// cond:
 14120  	// result: (MOVDaddr [off] ptr)
 14121  	for {
 14122  		off := v.AuxInt
 14123  		ptr := v.Args[0]
 14124  		if ptr.Op != OpSP {
 14125  			break
 14126  		}
 14127  		v.reset(OpARM64MOVDaddr)
 14128  		v.AuxInt = off
 14129  		v.AddArg(ptr)
 14130  		return true
 14131  	}
 14132  	// match: (OffPtr [off] ptr)
 14133  	// cond:
 14134  	// result: (ADDconst [off] ptr)
 14135  	for {
 14136  		off := v.AuxInt
 14137  		ptr := v.Args[0]
 14138  		v.reset(OpARM64ADDconst)
 14139  		v.AuxInt = off
 14140  		v.AddArg(ptr)
 14141  		return true
 14142  	}
 14143  }
 14144  func rewriteValueARM64_OpOr16_0(v *Value) bool {
 14145  	// match: (Or16 x y)
 14146  	// cond:
 14147  	// result: (OR x y)
 14148  	for {
 14149  		_ = v.Args[1]
 14150  		x := v.Args[0]
 14151  		y := v.Args[1]
 14152  		v.reset(OpARM64OR)
 14153  		v.AddArg(x)
 14154  		v.AddArg(y)
 14155  		return true
 14156  	}
 14157  }
 14158  func rewriteValueARM64_OpOr32_0(v *Value) bool {
 14159  	// match: (Or32 x y)
 14160  	// cond:
 14161  	// result: (OR x y)
 14162  	for {
 14163  		_ = v.Args[1]
 14164  		x := v.Args[0]
 14165  		y := v.Args[1]
 14166  		v.reset(OpARM64OR)
 14167  		v.AddArg(x)
 14168  		v.AddArg(y)
 14169  		return true
 14170  	}
 14171  }
 14172  func rewriteValueARM64_OpOr64_0(v *Value) bool {
 14173  	// match: (Or64 x y)
 14174  	// cond:
 14175  	// result: (OR x y)
 14176  	for {
 14177  		_ = v.Args[1]
 14178  		x := v.Args[0]
 14179  		y := v.Args[1]
 14180  		v.reset(OpARM64OR)
 14181  		v.AddArg(x)
 14182  		v.AddArg(y)
 14183  		return true
 14184  	}
 14185  }
 14186  func rewriteValueARM64_OpOr8_0(v *Value) bool {
 14187  	// match: (Or8 x y)
 14188  	// cond:
 14189  	// result: (OR x y)
 14190  	for {
 14191  		_ = v.Args[1]
 14192  		x := v.Args[0]
 14193  		y := v.Args[1]
 14194  		v.reset(OpARM64OR)
 14195  		v.AddArg(x)
 14196  		v.AddArg(y)
 14197  		return true
 14198  	}
 14199  }
 14200  func rewriteValueARM64_OpOrB_0(v *Value) bool {
 14201  	// match: (OrB x y)
 14202  	// cond:
 14203  	// result: (OR x y)
 14204  	for {
 14205  		_ = v.Args[1]
 14206  		x := v.Args[0]
 14207  		y := v.Args[1]
 14208  		v.reset(OpARM64OR)
 14209  		v.AddArg(x)
 14210  		v.AddArg(y)
 14211  		return true
 14212  	}
 14213  }
 14214  func rewriteValueARM64_OpRound32F_0(v *Value) bool {
 14215  	// match: (Round32F x)
 14216  	// cond:
 14217  	// result: x
 14218  	for {
 14219  		x := v.Args[0]
 14220  		v.reset(OpCopy)
 14221  		v.Type = x.Type
 14222  		v.AddArg(x)
 14223  		return true
 14224  	}
 14225  }
 14226  func rewriteValueARM64_OpRound64F_0(v *Value) bool {
 14227  	// match: (Round64F x)
 14228  	// cond:
 14229  	// result: x
 14230  	for {
 14231  		x := v.Args[0]
 14232  		v.reset(OpCopy)
 14233  		v.Type = x.Type
 14234  		v.AddArg(x)
 14235  		return true
 14236  	}
 14237  }
 14238  func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool {
 14239  	b := v.Block
 14240  	_ = b
 14241  	typ := &b.Func.Config.Types
 14242  	_ = typ
 14243  	// match: (Rsh16Ux16 <t> x y)
 14244  	// cond:
 14245  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 14246  	for {
 14247  		t := v.Type
 14248  		_ = v.Args[1]
 14249  		x := v.Args[0]
 14250  		y := v.Args[1]
 14251  		v.reset(OpARM64CSELULT)
 14252  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14253  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14254  		v1.AddArg(x)
 14255  		v0.AddArg(v1)
 14256  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14257  		v2.AddArg(y)
 14258  		v0.AddArg(v2)
 14259  		v.AddArg(v0)
 14260  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14261  		v3.AuxInt = 0
 14262  		v.AddArg(v3)
 14263  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14264  		v4.AuxInt = 64
 14265  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14266  		v5.AddArg(y)
 14267  		v4.AddArg(v5)
 14268  		v.AddArg(v4)
 14269  		return true
 14270  	}
 14271  }
 14272  func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool {
 14273  	b := v.Block
 14274  	_ = b
 14275  	typ := &b.Func.Config.Types
 14276  	_ = typ
 14277  	// match: (Rsh16Ux32 <t> x y)
 14278  	// cond:
 14279  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 14280  	for {
 14281  		t := v.Type
 14282  		_ = v.Args[1]
 14283  		x := v.Args[0]
 14284  		y := v.Args[1]
 14285  		v.reset(OpARM64CSELULT)
 14286  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14287  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14288  		v1.AddArg(x)
 14289  		v0.AddArg(v1)
 14290  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14291  		v2.AddArg(y)
 14292  		v0.AddArg(v2)
 14293  		v.AddArg(v0)
 14294  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14295  		v3.AuxInt = 0
 14296  		v.AddArg(v3)
 14297  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14298  		v4.AuxInt = 64
 14299  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14300  		v5.AddArg(y)
 14301  		v4.AddArg(v5)
 14302  		v.AddArg(v4)
 14303  		return true
 14304  	}
 14305  }
 14306  func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool {
 14307  	b := v.Block
 14308  	_ = b
 14309  	typ := &b.Func.Config.Types
 14310  	_ = typ
 14311  	// match: (Rsh16Ux64 x (MOVDconst [c]))
 14312  	// cond: uint64(c) < 16
 14313  	// result: (SRLconst (ZeroExt16to64 x) [c])
 14314  	for {
 14315  		_ = v.Args[1]
 14316  		x := v.Args[0]
 14317  		v_1 := v.Args[1]
 14318  		if v_1.Op != OpARM64MOVDconst {
 14319  			break
 14320  		}
 14321  		c := v_1.AuxInt
 14322  		if !(uint64(c) < 16) {
 14323  			break
 14324  		}
 14325  		v.reset(OpARM64SRLconst)
 14326  		v.AuxInt = c
 14327  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14328  		v0.AddArg(x)
 14329  		v.AddArg(v0)
 14330  		return true
 14331  	}
 14332  	// match: (Rsh16Ux64 _ (MOVDconst [c]))
 14333  	// cond: uint64(c) >= 16
 14334  	// result: (MOVDconst [0])
 14335  	for {
 14336  		_ = v.Args[1]
 14337  		v_1 := v.Args[1]
 14338  		if v_1.Op != OpARM64MOVDconst {
 14339  			break
 14340  		}
 14341  		c := v_1.AuxInt
 14342  		if !(uint64(c) >= 16) {
 14343  			break
 14344  		}
 14345  		v.reset(OpARM64MOVDconst)
 14346  		v.AuxInt = 0
 14347  		return true
 14348  	}
 14349  	// match: (Rsh16Ux64 <t> x y)
 14350  	// cond:
 14351  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 14352  	for {
 14353  		t := v.Type
 14354  		_ = v.Args[1]
 14355  		x := v.Args[0]
 14356  		y := v.Args[1]
 14357  		v.reset(OpARM64CSELULT)
 14358  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14359  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14360  		v1.AddArg(x)
 14361  		v0.AddArg(v1)
 14362  		v0.AddArg(y)
 14363  		v.AddArg(v0)
 14364  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14365  		v2.AuxInt = 0
 14366  		v.AddArg(v2)
 14367  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14368  		v3.AuxInt = 64
 14369  		v3.AddArg(y)
 14370  		v.AddArg(v3)
 14371  		return true
 14372  	}
 14373  }
 14374  func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool {
 14375  	b := v.Block
 14376  	_ = b
 14377  	typ := &b.Func.Config.Types
 14378  	_ = typ
 14379  	// match: (Rsh16Ux8 <t> x y)
 14380  	// cond:
 14381  	// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 14382  	for {
 14383  		t := v.Type
 14384  		_ = v.Args[1]
 14385  		x := v.Args[0]
 14386  		y := v.Args[1]
 14387  		v.reset(OpARM64CSELULT)
 14388  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14389  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14390  		v1.AddArg(x)
 14391  		v0.AddArg(v1)
 14392  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14393  		v2.AddArg(y)
 14394  		v0.AddArg(v2)
 14395  		v.AddArg(v0)
 14396  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14397  		v3.AuxInt = 0
 14398  		v.AddArg(v3)
 14399  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14400  		v4.AuxInt = 64
 14401  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14402  		v5.AddArg(y)
 14403  		v4.AddArg(v5)
 14404  		v.AddArg(v4)
 14405  		return true
 14406  	}
 14407  }
 14408  func rewriteValueARM64_OpRsh16x16_0(v *Value) bool {
 14409  	b := v.Block
 14410  	_ = b
 14411  	typ := &b.Func.Config.Types
 14412  	_ = typ
 14413  	// match: (Rsh16x16 x y)
 14414  	// cond:
 14415  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 14416  	for {
 14417  		_ = v.Args[1]
 14418  		x := v.Args[0]
 14419  		y := v.Args[1]
 14420  		v.reset(OpARM64SRA)
 14421  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14422  		v0.AddArg(x)
 14423  		v.AddArg(v0)
 14424  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14425  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14426  		v2.AddArg(y)
 14427  		v1.AddArg(v2)
 14428  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14429  		v3.AuxInt = 63
 14430  		v1.AddArg(v3)
 14431  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14432  		v4.AuxInt = 64
 14433  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14434  		v5.AddArg(y)
 14435  		v4.AddArg(v5)
 14436  		v1.AddArg(v4)
 14437  		v.AddArg(v1)
 14438  		return true
 14439  	}
 14440  }
 14441  func rewriteValueARM64_OpRsh16x32_0(v *Value) bool {
 14442  	b := v.Block
 14443  	_ = b
 14444  	typ := &b.Func.Config.Types
 14445  	_ = typ
 14446  	// match: (Rsh16x32 x y)
 14447  	// cond:
 14448  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 14449  	for {
 14450  		_ = v.Args[1]
 14451  		x := v.Args[0]
 14452  		y := v.Args[1]
 14453  		v.reset(OpARM64SRA)
 14454  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14455  		v0.AddArg(x)
 14456  		v.AddArg(v0)
 14457  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14458  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14459  		v2.AddArg(y)
 14460  		v1.AddArg(v2)
 14461  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14462  		v3.AuxInt = 63
 14463  		v1.AddArg(v3)
 14464  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14465  		v4.AuxInt = 64
 14466  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14467  		v5.AddArg(y)
 14468  		v4.AddArg(v5)
 14469  		v1.AddArg(v4)
 14470  		v.AddArg(v1)
 14471  		return true
 14472  	}
 14473  }
 14474  func rewriteValueARM64_OpRsh16x64_0(v *Value) bool {
 14475  	b := v.Block
 14476  	_ = b
 14477  	typ := &b.Func.Config.Types
 14478  	_ = typ
 14479  	// match: (Rsh16x64 x (MOVDconst [c]))
 14480  	// cond: uint64(c) < 16
 14481  	// result: (SRAconst (SignExt16to64 x) [c])
 14482  	for {
 14483  		_ = v.Args[1]
 14484  		x := v.Args[0]
 14485  		v_1 := v.Args[1]
 14486  		if v_1.Op != OpARM64MOVDconst {
 14487  			break
 14488  		}
 14489  		c := v_1.AuxInt
 14490  		if !(uint64(c) < 16) {
 14491  			break
 14492  		}
 14493  		v.reset(OpARM64SRAconst)
 14494  		v.AuxInt = c
 14495  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14496  		v0.AddArg(x)
 14497  		v.AddArg(v0)
 14498  		return true
 14499  	}
 14500  	// match: (Rsh16x64 x (MOVDconst [c]))
 14501  	// cond: uint64(c) >= 16
 14502  	// result: (SRAconst (SignExt16to64 x) [63])
 14503  	for {
 14504  		_ = v.Args[1]
 14505  		x := v.Args[0]
 14506  		v_1 := v.Args[1]
 14507  		if v_1.Op != OpARM64MOVDconst {
 14508  			break
 14509  		}
 14510  		c := v_1.AuxInt
 14511  		if !(uint64(c) >= 16) {
 14512  			break
 14513  		}
 14514  		v.reset(OpARM64SRAconst)
 14515  		v.AuxInt = 63
 14516  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14517  		v0.AddArg(x)
 14518  		v.AddArg(v0)
 14519  		return true
 14520  	}
 14521  	// match: (Rsh16x64 x y)
 14522  	// cond:
 14523  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 14524  	for {
 14525  		_ = v.Args[1]
 14526  		x := v.Args[0]
 14527  		y := v.Args[1]
 14528  		v.reset(OpARM64SRA)
 14529  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14530  		v0.AddArg(x)
 14531  		v.AddArg(v0)
 14532  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14533  		v1.AddArg(y)
 14534  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14535  		v2.AuxInt = 63
 14536  		v1.AddArg(v2)
 14537  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14538  		v3.AuxInt = 64
 14539  		v3.AddArg(y)
 14540  		v1.AddArg(v3)
 14541  		v.AddArg(v1)
 14542  		return true
 14543  	}
 14544  }
 14545  func rewriteValueARM64_OpRsh16x8_0(v *Value) bool {
 14546  	b := v.Block
 14547  	_ = b
 14548  	typ := &b.Func.Config.Types
 14549  	_ = typ
 14550  	// match: (Rsh16x8 x y)
 14551  	// cond:
 14552  	// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 14553  	for {
 14554  		_ = v.Args[1]
 14555  		x := v.Args[0]
 14556  		y := v.Args[1]
 14557  		v.reset(OpARM64SRA)
 14558  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 14559  		v0.AddArg(x)
 14560  		v.AddArg(v0)
 14561  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14562  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14563  		v2.AddArg(y)
 14564  		v1.AddArg(v2)
 14565  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14566  		v3.AuxInt = 63
 14567  		v1.AddArg(v3)
 14568  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14569  		v4.AuxInt = 64
 14570  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14571  		v5.AddArg(y)
 14572  		v4.AddArg(v5)
 14573  		v1.AddArg(v4)
 14574  		v.AddArg(v1)
 14575  		return true
 14576  	}
 14577  }
 14578  func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool {
 14579  	b := v.Block
 14580  	_ = b
 14581  	typ := &b.Func.Config.Types
 14582  	_ = typ
 14583  	// match: (Rsh32Ux16 <t> x y)
 14584  	// cond:
 14585  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 14586  	for {
 14587  		t := v.Type
 14588  		_ = v.Args[1]
 14589  		x := v.Args[0]
 14590  		y := v.Args[1]
 14591  		v.reset(OpARM64CSELULT)
 14592  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14593  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14594  		v1.AddArg(x)
 14595  		v0.AddArg(v1)
 14596  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14597  		v2.AddArg(y)
 14598  		v0.AddArg(v2)
 14599  		v.AddArg(v0)
 14600  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14601  		v3.AuxInt = 0
 14602  		v.AddArg(v3)
 14603  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14604  		v4.AuxInt = 64
 14605  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14606  		v5.AddArg(y)
 14607  		v4.AddArg(v5)
 14608  		v.AddArg(v4)
 14609  		return true
 14610  	}
 14611  }
 14612  func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool {
 14613  	b := v.Block
 14614  	_ = b
 14615  	typ := &b.Func.Config.Types
 14616  	_ = typ
 14617  	// match: (Rsh32Ux32 <t> x y)
 14618  	// cond:
 14619  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 14620  	for {
 14621  		t := v.Type
 14622  		_ = v.Args[1]
 14623  		x := v.Args[0]
 14624  		y := v.Args[1]
 14625  		v.reset(OpARM64CSELULT)
 14626  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14627  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14628  		v1.AddArg(x)
 14629  		v0.AddArg(v1)
 14630  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14631  		v2.AddArg(y)
 14632  		v0.AddArg(v2)
 14633  		v.AddArg(v0)
 14634  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14635  		v3.AuxInt = 0
 14636  		v.AddArg(v3)
 14637  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14638  		v4.AuxInt = 64
 14639  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14640  		v5.AddArg(y)
 14641  		v4.AddArg(v5)
 14642  		v.AddArg(v4)
 14643  		return true
 14644  	}
 14645  }
 14646  func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool {
 14647  	b := v.Block
 14648  	_ = b
 14649  	typ := &b.Func.Config.Types
 14650  	_ = typ
 14651  	// match: (Rsh32Ux64 x (MOVDconst [c]))
 14652  	// cond: uint64(c) < 32
 14653  	// result: (SRLconst (ZeroExt32to64 x) [c])
 14654  	for {
 14655  		_ = v.Args[1]
 14656  		x := v.Args[0]
 14657  		v_1 := v.Args[1]
 14658  		if v_1.Op != OpARM64MOVDconst {
 14659  			break
 14660  		}
 14661  		c := v_1.AuxInt
 14662  		if !(uint64(c) < 32) {
 14663  			break
 14664  		}
 14665  		v.reset(OpARM64SRLconst)
 14666  		v.AuxInt = c
 14667  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14668  		v0.AddArg(x)
 14669  		v.AddArg(v0)
 14670  		return true
 14671  	}
 14672  	// match: (Rsh32Ux64 _ (MOVDconst [c]))
 14673  	// cond: uint64(c) >= 32
 14674  	// result: (MOVDconst [0])
 14675  	for {
 14676  		_ = v.Args[1]
 14677  		v_1 := v.Args[1]
 14678  		if v_1.Op != OpARM64MOVDconst {
 14679  			break
 14680  		}
 14681  		c := v_1.AuxInt
 14682  		if !(uint64(c) >= 32) {
 14683  			break
 14684  		}
 14685  		v.reset(OpARM64MOVDconst)
 14686  		v.AuxInt = 0
 14687  		return true
 14688  	}
 14689  	// match: (Rsh32Ux64 <t> x y)
 14690  	// cond:
 14691  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 14692  	for {
 14693  		t := v.Type
 14694  		_ = v.Args[1]
 14695  		x := v.Args[0]
 14696  		y := v.Args[1]
 14697  		v.reset(OpARM64CSELULT)
 14698  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14699  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14700  		v1.AddArg(x)
 14701  		v0.AddArg(v1)
 14702  		v0.AddArg(y)
 14703  		v.AddArg(v0)
 14704  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14705  		v2.AuxInt = 0
 14706  		v.AddArg(v2)
 14707  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14708  		v3.AuxInt = 64
 14709  		v3.AddArg(y)
 14710  		v.AddArg(v3)
 14711  		return true
 14712  	}
 14713  }
 14714  func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool {
 14715  	b := v.Block
 14716  	_ = b
 14717  	typ := &b.Func.Config.Types
 14718  	_ = typ
 14719  	// match: (Rsh32Ux8 <t> x y)
 14720  	// cond:
 14721  	// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 14722  	for {
 14723  		t := v.Type
 14724  		_ = v.Args[1]
 14725  		x := v.Args[0]
 14726  		y := v.Args[1]
 14727  		v.reset(OpARM64CSELULT)
 14728  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14729  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14730  		v1.AddArg(x)
 14731  		v0.AddArg(v1)
 14732  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14733  		v2.AddArg(y)
 14734  		v0.AddArg(v2)
 14735  		v.AddArg(v0)
 14736  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 14737  		v3.AuxInt = 0
 14738  		v.AddArg(v3)
 14739  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14740  		v4.AuxInt = 64
 14741  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14742  		v5.AddArg(y)
 14743  		v4.AddArg(v5)
 14744  		v.AddArg(v4)
 14745  		return true
 14746  	}
 14747  }
 14748  func rewriteValueARM64_OpRsh32x16_0(v *Value) bool {
 14749  	b := v.Block
 14750  	_ = b
 14751  	typ := &b.Func.Config.Types
 14752  	_ = typ
 14753  	// match: (Rsh32x16 x y)
 14754  	// cond:
 14755  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 14756  	for {
 14757  		_ = v.Args[1]
 14758  		x := v.Args[0]
 14759  		y := v.Args[1]
 14760  		v.reset(OpARM64SRA)
 14761  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14762  		v0.AddArg(x)
 14763  		v.AddArg(v0)
 14764  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14765  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14766  		v2.AddArg(y)
 14767  		v1.AddArg(v2)
 14768  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14769  		v3.AuxInt = 63
 14770  		v1.AddArg(v3)
 14771  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14772  		v4.AuxInt = 64
 14773  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14774  		v5.AddArg(y)
 14775  		v4.AddArg(v5)
 14776  		v1.AddArg(v4)
 14777  		v.AddArg(v1)
 14778  		return true
 14779  	}
 14780  }
 14781  func rewriteValueARM64_OpRsh32x32_0(v *Value) bool {
 14782  	b := v.Block
 14783  	_ = b
 14784  	typ := &b.Func.Config.Types
 14785  	_ = typ
 14786  	// match: (Rsh32x32 x y)
 14787  	// cond:
 14788  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 14789  	for {
 14790  		_ = v.Args[1]
 14791  		x := v.Args[0]
 14792  		y := v.Args[1]
 14793  		v.reset(OpARM64SRA)
 14794  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14795  		v0.AddArg(x)
 14796  		v.AddArg(v0)
 14797  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14798  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14799  		v2.AddArg(y)
 14800  		v1.AddArg(v2)
 14801  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14802  		v3.AuxInt = 63
 14803  		v1.AddArg(v3)
 14804  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14805  		v4.AuxInt = 64
 14806  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14807  		v5.AddArg(y)
 14808  		v4.AddArg(v5)
 14809  		v1.AddArg(v4)
 14810  		v.AddArg(v1)
 14811  		return true
 14812  	}
 14813  }
 14814  func rewriteValueARM64_OpRsh32x64_0(v *Value) bool {
 14815  	b := v.Block
 14816  	_ = b
 14817  	typ := &b.Func.Config.Types
 14818  	_ = typ
 14819  	// match: (Rsh32x64 x (MOVDconst [c]))
 14820  	// cond: uint64(c) < 32
 14821  	// result: (SRAconst (SignExt32to64 x) [c])
 14822  	for {
 14823  		_ = v.Args[1]
 14824  		x := v.Args[0]
 14825  		v_1 := v.Args[1]
 14826  		if v_1.Op != OpARM64MOVDconst {
 14827  			break
 14828  		}
 14829  		c := v_1.AuxInt
 14830  		if !(uint64(c) < 32) {
 14831  			break
 14832  		}
 14833  		v.reset(OpARM64SRAconst)
 14834  		v.AuxInt = c
 14835  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14836  		v0.AddArg(x)
 14837  		v.AddArg(v0)
 14838  		return true
 14839  	}
 14840  	// match: (Rsh32x64 x (MOVDconst [c]))
 14841  	// cond: uint64(c) >= 32
 14842  	// result: (SRAconst (SignExt32to64 x) [63])
 14843  	for {
 14844  		_ = v.Args[1]
 14845  		x := v.Args[0]
 14846  		v_1 := v.Args[1]
 14847  		if v_1.Op != OpARM64MOVDconst {
 14848  			break
 14849  		}
 14850  		c := v_1.AuxInt
 14851  		if !(uint64(c) >= 32) {
 14852  			break
 14853  		}
 14854  		v.reset(OpARM64SRAconst)
 14855  		v.AuxInt = 63
 14856  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14857  		v0.AddArg(x)
 14858  		v.AddArg(v0)
 14859  		return true
 14860  	}
 14861  	// match: (Rsh32x64 x y)
 14862  	// cond:
 14863  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 14864  	for {
 14865  		_ = v.Args[1]
 14866  		x := v.Args[0]
 14867  		y := v.Args[1]
 14868  		v.reset(OpARM64SRA)
 14869  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14870  		v0.AddArg(x)
 14871  		v.AddArg(v0)
 14872  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14873  		v1.AddArg(y)
 14874  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14875  		v2.AuxInt = 63
 14876  		v1.AddArg(v2)
 14877  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14878  		v3.AuxInt = 64
 14879  		v3.AddArg(y)
 14880  		v1.AddArg(v3)
 14881  		v.AddArg(v1)
 14882  		return true
 14883  	}
 14884  }
 14885  func rewriteValueARM64_OpRsh32x8_0(v *Value) bool {
 14886  	b := v.Block
 14887  	_ = b
 14888  	typ := &b.Func.Config.Types
 14889  	_ = typ
 14890  	// match: (Rsh32x8 x y)
 14891  	// cond:
 14892  	// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 14893  	for {
 14894  		_ = v.Args[1]
 14895  		x := v.Args[0]
 14896  		y := v.Args[1]
 14897  		v.reset(OpARM64SRA)
 14898  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 14899  		v0.AddArg(x)
 14900  		v.AddArg(v0)
 14901  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 14902  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14903  		v2.AddArg(y)
 14904  		v1.AddArg(v2)
 14905  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 14906  		v3.AuxInt = 63
 14907  		v1.AddArg(v3)
 14908  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14909  		v4.AuxInt = 64
 14910  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 14911  		v5.AddArg(y)
 14912  		v4.AddArg(v5)
 14913  		v1.AddArg(v4)
 14914  		v.AddArg(v1)
 14915  		return true
 14916  	}
 14917  }
 14918  func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool {
 14919  	b := v.Block
 14920  	_ = b
 14921  	typ := &b.Func.Config.Types
 14922  	_ = typ
 14923  	// match: (Rsh64Ux16 <t> x y)
 14924  	// cond:
 14925  	// result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 14926  	for {
 14927  		t := v.Type
 14928  		_ = v.Args[1]
 14929  		x := v.Args[0]
 14930  		y := v.Args[1]
 14931  		v.reset(OpARM64CSELULT)
 14932  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14933  		v0.AddArg(x)
 14934  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14935  		v1.AddArg(y)
 14936  		v0.AddArg(v1)
 14937  		v.AddArg(v0)
 14938  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14939  		v2.AuxInt = 0
 14940  		v.AddArg(v2)
 14941  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14942  		v3.AuxInt = 64
 14943  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 14944  		v4.AddArg(y)
 14945  		v3.AddArg(v4)
 14946  		v.AddArg(v3)
 14947  		return true
 14948  	}
 14949  }
 14950  func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
 14951  	b := v.Block
 14952  	_ = b
 14953  	typ := &b.Func.Config.Types
 14954  	_ = typ
 14955  	// match: (Rsh64Ux32 <t> x y)
 14956  	// cond:
 14957  	// result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 14958  	for {
 14959  		t := v.Type
 14960  		_ = v.Args[1]
 14961  		x := v.Args[0]
 14962  		y := v.Args[1]
 14963  		v.reset(OpARM64CSELULT)
 14964  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 14965  		v0.AddArg(x)
 14966  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14967  		v1.AddArg(y)
 14968  		v0.AddArg(v1)
 14969  		v.AddArg(v0)
 14970  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14971  		v2.AuxInt = 0
 14972  		v.AddArg(v2)
 14973  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 14974  		v3.AuxInt = 64
 14975  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 14976  		v4.AddArg(y)
 14977  		v3.AddArg(v4)
 14978  		v.AddArg(v3)
 14979  		return true
 14980  	}
 14981  }
 14982  func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
 14983  	b := v.Block
 14984  	_ = b
 14985  	// match: (Rsh64Ux64 x (MOVDconst [c]))
 14986  	// cond: uint64(c) < 64
 14987  	// result: (SRLconst x [c])
 14988  	for {
 14989  		_ = v.Args[1]
 14990  		x := v.Args[0]
 14991  		v_1 := v.Args[1]
 14992  		if v_1.Op != OpARM64MOVDconst {
 14993  			break
 14994  		}
 14995  		c := v_1.AuxInt
 14996  		if !(uint64(c) < 64) {
 14997  			break
 14998  		}
 14999  		v.reset(OpARM64SRLconst)
 15000  		v.AuxInt = c
 15001  		v.AddArg(x)
 15002  		return true
 15003  	}
 15004  	// match: (Rsh64Ux64 _ (MOVDconst [c]))
 15005  	// cond: uint64(c) >= 64
 15006  	// result: (MOVDconst [0])
 15007  	for {
 15008  		_ = v.Args[1]
 15009  		v_1 := v.Args[1]
 15010  		if v_1.Op != OpARM64MOVDconst {
 15011  			break
 15012  		}
 15013  		c := v_1.AuxInt
 15014  		if !(uint64(c) >= 64) {
 15015  			break
 15016  		}
 15017  		v.reset(OpARM64MOVDconst)
 15018  		v.AuxInt = 0
 15019  		return true
 15020  	}
 15021  	// match: (Rsh64Ux64 <t> x y)
 15022  	// cond:
 15023  	// result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 15024  	for {
 15025  		t := v.Type
 15026  		_ = v.Args[1]
 15027  		x := v.Args[0]
 15028  		y := v.Args[1]
 15029  		v.reset(OpARM64CSELULT)
 15030  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15031  		v0.AddArg(x)
 15032  		v0.AddArg(y)
 15033  		v.AddArg(v0)
 15034  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 15035  		v1.AuxInt = 0
 15036  		v.AddArg(v1)
 15037  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15038  		v2.AuxInt = 64
 15039  		v2.AddArg(y)
 15040  		v.AddArg(v2)
 15041  		return true
 15042  	}
 15043  }
 15044  func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool {
 15045  	b := v.Block
 15046  	_ = b
 15047  	typ := &b.Func.Config.Types
 15048  	_ = typ
 15049  	// match: (Rsh64Ux8 <t> x y)
 15050  	// cond:
 15051  	// result: (CSELULT (SRL <t> x (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 15052  	for {
 15053  		t := v.Type
 15054  		_ = v.Args[1]
 15055  		x := v.Args[0]
 15056  		y := v.Args[1]
 15057  		v.reset(OpARM64CSELULT)
 15058  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15059  		v0.AddArg(x)
 15060  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15061  		v1.AddArg(y)
 15062  		v0.AddArg(v1)
 15063  		v.AddArg(v0)
 15064  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 15065  		v2.AuxInt = 0
 15066  		v.AddArg(v2)
 15067  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15068  		v3.AuxInt = 64
 15069  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15070  		v4.AddArg(y)
 15071  		v3.AddArg(v4)
 15072  		v.AddArg(v3)
 15073  		return true
 15074  	}
 15075  }
 15076  func rewriteValueARM64_OpRsh64x16_0(v *Value) bool {
 15077  	b := v.Block
 15078  	_ = b
 15079  	typ := &b.Func.Config.Types
 15080  	_ = typ
 15081  	// match: (Rsh64x16 x y)
 15082  	// cond:
 15083  	// result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 15084  	for {
 15085  		_ = v.Args[1]
 15086  		x := v.Args[0]
 15087  		y := v.Args[1]
 15088  		v.reset(OpARM64SRA)
 15089  		v.AddArg(x)
 15090  		v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15091  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15092  		v1.AddArg(y)
 15093  		v0.AddArg(v1)
 15094  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15095  		v2.AuxInt = 63
 15096  		v0.AddArg(v2)
 15097  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15098  		v3.AuxInt = 64
 15099  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15100  		v4.AddArg(y)
 15101  		v3.AddArg(v4)
 15102  		v0.AddArg(v3)
 15103  		v.AddArg(v0)
 15104  		return true
 15105  	}
 15106  }
 15107  func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
 15108  	b := v.Block
 15109  	_ = b
 15110  	typ := &b.Func.Config.Types
 15111  	_ = typ
 15112  	// match: (Rsh64x32 x y)
 15113  	// cond:
 15114  	// result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 15115  	for {
 15116  		_ = v.Args[1]
 15117  		x := v.Args[0]
 15118  		y := v.Args[1]
 15119  		v.reset(OpARM64SRA)
 15120  		v.AddArg(x)
 15121  		v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15122  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15123  		v1.AddArg(y)
 15124  		v0.AddArg(v1)
 15125  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15126  		v2.AuxInt = 63
 15127  		v0.AddArg(v2)
 15128  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15129  		v3.AuxInt = 64
 15130  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15131  		v4.AddArg(y)
 15132  		v3.AddArg(v4)
 15133  		v0.AddArg(v3)
 15134  		v.AddArg(v0)
 15135  		return true
 15136  	}
 15137  }
 15138  func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
 15139  	b := v.Block
 15140  	_ = b
 15141  	// match: (Rsh64x64 x (MOVDconst [c]))
 15142  	// cond: uint64(c) < 64
 15143  	// result: (SRAconst x [c])
 15144  	for {
 15145  		_ = v.Args[1]
 15146  		x := v.Args[0]
 15147  		v_1 := v.Args[1]
 15148  		if v_1.Op != OpARM64MOVDconst {
 15149  			break
 15150  		}
 15151  		c := v_1.AuxInt
 15152  		if !(uint64(c) < 64) {
 15153  			break
 15154  		}
 15155  		v.reset(OpARM64SRAconst)
 15156  		v.AuxInt = c
 15157  		v.AddArg(x)
 15158  		return true
 15159  	}
 15160  	// match: (Rsh64x64 x (MOVDconst [c]))
 15161  	// cond: uint64(c) >= 64
 15162  	// result: (SRAconst x [63])
 15163  	for {
 15164  		_ = v.Args[1]
 15165  		x := v.Args[0]
 15166  		v_1 := v.Args[1]
 15167  		if v_1.Op != OpARM64MOVDconst {
 15168  			break
 15169  		}
 15170  		c := v_1.AuxInt
 15171  		if !(uint64(c) >= 64) {
 15172  			break
 15173  		}
 15174  		v.reset(OpARM64SRAconst)
 15175  		v.AuxInt = 63
 15176  		v.AddArg(x)
 15177  		return true
 15178  	}
 15179  	// match: (Rsh64x64 x y)
 15180  	// cond:
 15181  	// result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 15182  	for {
 15183  		_ = v.Args[1]
 15184  		x := v.Args[0]
 15185  		y := v.Args[1]
 15186  		v.reset(OpARM64SRA)
 15187  		v.AddArg(x)
 15188  		v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15189  		v0.AddArg(y)
 15190  		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15191  		v1.AuxInt = 63
 15192  		v0.AddArg(v1)
 15193  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15194  		v2.AuxInt = 64
 15195  		v2.AddArg(y)
 15196  		v0.AddArg(v2)
 15197  		v.AddArg(v0)
 15198  		return true
 15199  	}
 15200  }
 15201  func rewriteValueARM64_OpRsh64x8_0(v *Value) bool {
 15202  	b := v.Block
 15203  	_ = b
 15204  	typ := &b.Func.Config.Types
 15205  	_ = typ
 15206  	// match: (Rsh64x8 x y)
 15207  	// cond:
 15208  	// result: (SRA x (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 15209  	for {
 15210  		_ = v.Args[1]
 15211  		x := v.Args[0]
 15212  		y := v.Args[1]
 15213  		v.reset(OpARM64SRA)
 15214  		v.AddArg(x)
 15215  		v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15216  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15217  		v1.AddArg(y)
 15218  		v0.AddArg(v1)
 15219  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15220  		v2.AuxInt = 63
 15221  		v0.AddArg(v2)
 15222  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15223  		v3.AuxInt = 64
 15224  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15225  		v4.AddArg(y)
 15226  		v3.AddArg(v4)
 15227  		v0.AddArg(v3)
 15228  		v.AddArg(v0)
 15229  		return true
 15230  	}
 15231  }
 15232  func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool {
 15233  	b := v.Block
 15234  	_ = b
 15235  	typ := &b.Func.Config.Types
 15236  	_ = typ
 15237  	// match: (Rsh8Ux16 <t> x y)
 15238  	// cond:
 15239  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 15240  	for {
 15241  		t := v.Type
 15242  		_ = v.Args[1]
 15243  		x := v.Args[0]
 15244  		y := v.Args[1]
 15245  		v.reset(OpARM64CSELULT)
 15246  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15247  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15248  		v1.AddArg(x)
 15249  		v0.AddArg(v1)
 15250  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15251  		v2.AddArg(y)
 15252  		v0.AddArg(v2)
 15253  		v.AddArg(v0)
 15254  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 15255  		v3.AuxInt = 0
 15256  		v.AddArg(v3)
 15257  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15258  		v4.AuxInt = 64
 15259  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15260  		v5.AddArg(y)
 15261  		v4.AddArg(v5)
 15262  		v.AddArg(v4)
 15263  		return true
 15264  	}
 15265  }
 15266  func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool {
 15267  	b := v.Block
 15268  	_ = b
 15269  	typ := &b.Func.Config.Types
 15270  	_ = typ
 15271  	// match: (Rsh8Ux32 <t> x y)
 15272  	// cond:
 15273  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 15274  	for {
 15275  		t := v.Type
 15276  		_ = v.Args[1]
 15277  		x := v.Args[0]
 15278  		y := v.Args[1]
 15279  		v.reset(OpARM64CSELULT)
 15280  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15281  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15282  		v1.AddArg(x)
 15283  		v0.AddArg(v1)
 15284  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15285  		v2.AddArg(y)
 15286  		v0.AddArg(v2)
 15287  		v.AddArg(v0)
 15288  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 15289  		v3.AuxInt = 0
 15290  		v.AddArg(v3)
 15291  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15292  		v4.AuxInt = 64
 15293  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15294  		v5.AddArg(y)
 15295  		v4.AddArg(v5)
 15296  		v.AddArg(v4)
 15297  		return true
 15298  	}
 15299  }
 15300  func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool {
 15301  	b := v.Block
 15302  	_ = b
 15303  	typ := &b.Func.Config.Types
 15304  	_ = typ
 15305  	// match: (Rsh8Ux64 x (MOVDconst [c]))
 15306  	// cond: uint64(c) < 8
 15307  	// result: (SRLconst (ZeroExt8to64  x) [c])
 15308  	for {
 15309  		_ = v.Args[1]
 15310  		x := v.Args[0]
 15311  		v_1 := v.Args[1]
 15312  		if v_1.Op != OpARM64MOVDconst {
 15313  			break
 15314  		}
 15315  		c := v_1.AuxInt
 15316  		if !(uint64(c) < 8) {
 15317  			break
 15318  		}
 15319  		v.reset(OpARM64SRLconst)
 15320  		v.AuxInt = c
 15321  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15322  		v0.AddArg(x)
 15323  		v.AddArg(v0)
 15324  		return true
 15325  	}
 15326  	// match: (Rsh8Ux64 _ (MOVDconst [c]))
 15327  	// cond: uint64(c) >= 8
 15328  	// result: (MOVDconst [0])
 15329  	for {
 15330  		_ = v.Args[1]
 15331  		v_1 := v.Args[1]
 15332  		if v_1.Op != OpARM64MOVDconst {
 15333  			break
 15334  		}
 15335  		c := v_1.AuxInt
 15336  		if !(uint64(c) >= 8) {
 15337  			break
 15338  		}
 15339  		v.reset(OpARM64MOVDconst)
 15340  		v.AuxInt = 0
 15341  		return true
 15342  	}
 15343  	// match: (Rsh8Ux64 <t> x y)
 15344  	// cond:
 15345  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 15346  	for {
 15347  		t := v.Type
 15348  		_ = v.Args[1]
 15349  		x := v.Args[0]
 15350  		y := v.Args[1]
 15351  		v.reset(OpARM64CSELULT)
 15352  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15353  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15354  		v1.AddArg(x)
 15355  		v0.AddArg(v1)
 15356  		v0.AddArg(y)
 15357  		v.AddArg(v0)
 15358  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 15359  		v2.AuxInt = 0
 15360  		v.AddArg(v2)
 15361  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15362  		v3.AuxInt = 64
 15363  		v3.AddArg(y)
 15364  		v.AddArg(v3)
 15365  		return true
 15366  	}
 15367  }
 15368  func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool {
 15369  	b := v.Block
 15370  	_ = b
 15371  	typ := &b.Func.Config.Types
 15372  	_ = typ
 15373  	// match: (Rsh8Ux8 <t> x y)
 15374  	// cond:
 15375  	// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64  y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64  y)))
 15376  	for {
 15377  		t := v.Type
 15378  		_ = v.Args[1]
 15379  		x := v.Args[0]
 15380  		y := v.Args[1]
 15381  		v.reset(OpARM64CSELULT)
 15382  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 15383  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15384  		v1.AddArg(x)
 15385  		v0.AddArg(v1)
 15386  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15387  		v2.AddArg(y)
 15388  		v0.AddArg(v2)
 15389  		v.AddArg(v0)
 15390  		v3 := b.NewValue0(v.Pos, OpConst64, t)
 15391  		v3.AuxInt = 0
 15392  		v.AddArg(v3)
 15393  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15394  		v4.AuxInt = 64
 15395  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15396  		v5.AddArg(y)
 15397  		v4.AddArg(v5)
 15398  		v.AddArg(v4)
 15399  		return true
 15400  	}
 15401  }
 15402  func rewriteValueARM64_OpRsh8x16_0(v *Value) bool {
 15403  	b := v.Block
 15404  	_ = b
 15405  	typ := &b.Func.Config.Types
 15406  	_ = typ
 15407  	// match: (Rsh8x16 x y)
 15408  	// cond:
 15409  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 15410  	for {
 15411  		_ = v.Args[1]
 15412  		x := v.Args[0]
 15413  		y := v.Args[1]
 15414  		v.reset(OpARM64SRA)
 15415  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15416  		v0.AddArg(x)
 15417  		v.AddArg(v0)
 15418  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15419  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15420  		v2.AddArg(y)
 15421  		v1.AddArg(v2)
 15422  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15423  		v3.AuxInt = 63
 15424  		v1.AddArg(v3)
 15425  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15426  		v4.AuxInt = 64
 15427  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 15428  		v5.AddArg(y)
 15429  		v4.AddArg(v5)
 15430  		v1.AddArg(v4)
 15431  		v.AddArg(v1)
 15432  		return true
 15433  	}
 15434  }
 15435  func rewriteValueARM64_OpRsh8x32_0(v *Value) bool {
 15436  	b := v.Block
 15437  	_ = b
 15438  	typ := &b.Func.Config.Types
 15439  	_ = typ
 15440  	// match: (Rsh8x32 x y)
 15441  	// cond:
 15442  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 15443  	for {
 15444  		_ = v.Args[1]
 15445  		x := v.Args[0]
 15446  		y := v.Args[1]
 15447  		v.reset(OpARM64SRA)
 15448  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15449  		v0.AddArg(x)
 15450  		v.AddArg(v0)
 15451  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15452  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15453  		v2.AddArg(y)
 15454  		v1.AddArg(v2)
 15455  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15456  		v3.AuxInt = 63
 15457  		v1.AddArg(v3)
 15458  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15459  		v4.AuxInt = 64
 15460  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 15461  		v5.AddArg(y)
 15462  		v4.AddArg(v5)
 15463  		v1.AddArg(v4)
 15464  		v.AddArg(v1)
 15465  		return true
 15466  	}
 15467  }
 15468  func rewriteValueARM64_OpRsh8x64_0(v *Value) bool {
 15469  	b := v.Block
 15470  	_ = b
 15471  	typ := &b.Func.Config.Types
 15472  	_ = typ
 15473  	// match: (Rsh8x64 x (MOVDconst [c]))
 15474  	// cond: uint64(c) < 8
 15475  	// result: (SRAconst (SignExt8to64  x) [c])
 15476  	for {
 15477  		_ = v.Args[1]
 15478  		x := v.Args[0]
 15479  		v_1 := v.Args[1]
 15480  		if v_1.Op != OpARM64MOVDconst {
 15481  			break
 15482  		}
 15483  		c := v_1.AuxInt
 15484  		if !(uint64(c) < 8) {
 15485  			break
 15486  		}
 15487  		v.reset(OpARM64SRAconst)
 15488  		v.AuxInt = c
 15489  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15490  		v0.AddArg(x)
 15491  		v.AddArg(v0)
 15492  		return true
 15493  	}
 15494  	// match: (Rsh8x64 x (MOVDconst [c]))
 15495  	// cond: uint64(c) >= 8
 15496  	// result: (SRAconst (SignExt8to64  x) [63])
 15497  	for {
 15498  		_ = v.Args[1]
 15499  		x := v.Args[0]
 15500  		v_1 := v.Args[1]
 15501  		if v_1.Op != OpARM64MOVDconst {
 15502  			break
 15503  		}
 15504  		c := v_1.AuxInt
 15505  		if !(uint64(c) >= 8) {
 15506  			break
 15507  		}
 15508  		v.reset(OpARM64SRAconst)
 15509  		v.AuxInt = 63
 15510  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15511  		v0.AddArg(x)
 15512  		v.AddArg(v0)
 15513  		return true
 15514  	}
 15515  	// match: (Rsh8x64 x y)
 15516  	// cond:
 15517  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 15518  	for {
 15519  		_ = v.Args[1]
 15520  		x := v.Args[0]
 15521  		y := v.Args[1]
 15522  		v.reset(OpARM64SRA)
 15523  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15524  		v0.AddArg(x)
 15525  		v.AddArg(v0)
 15526  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15527  		v1.AddArg(y)
 15528  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15529  		v2.AuxInt = 63
 15530  		v1.AddArg(v2)
 15531  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15532  		v3.AuxInt = 64
 15533  		v3.AddArg(y)
 15534  		v1.AddArg(v3)
 15535  		v.AddArg(v1)
 15536  		return true
 15537  	}
 15538  }
 15539  func rewriteValueARM64_OpRsh8x8_0(v *Value) bool {
 15540  	b := v.Block
 15541  	_ = b
 15542  	typ := &b.Func.Config.Types
 15543  	_ = typ
 15544  	// match: (Rsh8x8 x y)
 15545  	// cond:
 15546  	// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64  y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64  y))))
 15547  	for {
 15548  		_ = v.Args[1]
 15549  		x := v.Args[0]
 15550  		y := v.Args[1]
 15551  		v.reset(OpARM64SRA)
 15552  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 15553  		v0.AddArg(x)
 15554  		v.AddArg(v0)
 15555  		v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
 15556  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15557  		v2.AddArg(y)
 15558  		v1.AddArg(v2)
 15559  		v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
 15560  		v3.AuxInt = 63
 15561  		v1.AddArg(v3)
 15562  		v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 15563  		v4.AuxInt = 64
 15564  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 15565  		v5.AddArg(y)
 15566  		v4.AddArg(v5)
 15567  		v1.AddArg(v4)
 15568  		v.AddArg(v1)
 15569  		return true
 15570  	}
 15571  }
 15572  func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool {
 15573  	// match: (SignExt16to32 x)
 15574  	// cond:
 15575  	// result: (MOVHreg x)
 15576  	for {
 15577  		x := v.Args[0]
 15578  		v.reset(OpARM64MOVHreg)
 15579  		v.AddArg(x)
 15580  		return true
 15581  	}
 15582  }
 15583  func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool {
 15584  	// match: (SignExt16to64 x)
 15585  	// cond:
 15586  	// result: (MOVHreg x)
 15587  	for {
 15588  		x := v.Args[0]
 15589  		v.reset(OpARM64MOVHreg)
 15590  		v.AddArg(x)
 15591  		return true
 15592  	}
 15593  }
 15594  func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool {
 15595  	// match: (SignExt32to64 x)
 15596  	// cond:
 15597  	// result: (MOVWreg x)
 15598  	for {
 15599  		x := v.Args[0]
 15600  		v.reset(OpARM64MOVWreg)
 15601  		v.AddArg(x)
 15602  		return true
 15603  	}
 15604  }
 15605  func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool {
 15606  	// match: (SignExt8to16 x)
 15607  	// cond:
 15608  	// result: (MOVBreg x)
 15609  	for {
 15610  		x := v.Args[0]
 15611  		v.reset(OpARM64MOVBreg)
 15612  		v.AddArg(x)
 15613  		return true
 15614  	}
 15615  }
 15616  func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool {
 15617  	// match: (SignExt8to32 x)
 15618  	// cond:
 15619  	// result: (MOVBreg x)
 15620  	for {
 15621  		x := v.Args[0]
 15622  		v.reset(OpARM64MOVBreg)
 15623  		v.AddArg(x)
 15624  		return true
 15625  	}
 15626  }
 15627  func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool {
 15628  	// match: (SignExt8to64 x)
 15629  	// cond:
 15630  	// result: (MOVBreg x)
 15631  	for {
 15632  		x := v.Args[0]
 15633  		v.reset(OpARM64MOVBreg)
 15634  		v.AddArg(x)
 15635  		return true
 15636  	}
 15637  }
 15638  func rewriteValueARM64_OpSlicemask_0(v *Value) bool {
 15639  	b := v.Block
 15640  	_ = b
 15641  	// match: (Slicemask <t> x)
 15642  	// cond:
 15643  	// result: (SRAconst (NEG <t> x) [63])
 15644  	for {
 15645  		t := v.Type
 15646  		x := v.Args[0]
 15647  		v.reset(OpARM64SRAconst)
 15648  		v.AuxInt = 63
 15649  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
 15650  		v0.AddArg(x)
 15651  		v.AddArg(v0)
 15652  		return true
 15653  	}
 15654  }
 15655  func rewriteValueARM64_OpSqrt_0(v *Value) bool {
 15656  	// match: (Sqrt x)
 15657  	// cond:
 15658  	// result: (FSQRTD x)
 15659  	for {
 15660  		x := v.Args[0]
 15661  		v.reset(OpARM64FSQRTD)
 15662  		v.AddArg(x)
 15663  		return true
 15664  	}
 15665  }
 15666  func rewriteValueARM64_OpStaticCall_0(v *Value) bool {
 15667  	// match: (StaticCall [argwid] {target} mem)
 15668  	// cond:
 15669  	// result: (CALLstatic [argwid] {target} mem)
 15670  	for {
 15671  		argwid := v.AuxInt
 15672  		target := v.Aux
 15673  		mem := v.Args[0]
 15674  		v.reset(OpARM64CALLstatic)
 15675  		v.AuxInt = argwid
 15676  		v.Aux = target
 15677  		v.AddArg(mem)
 15678  		return true
 15679  	}
 15680  }
 15681  func rewriteValueARM64_OpStore_0(v *Value) bool {
 15682  	// match: (Store {t} ptr val mem)
 15683  	// cond: t.(*types.Type).Size() == 1
 15684  	// result: (MOVBstore ptr val mem)
 15685  	for {
 15686  		t := v.Aux
 15687  		_ = v.Args[2]
 15688  		ptr := v.Args[0]
 15689  		val := v.Args[1]
 15690  		mem := v.Args[2]
 15691  		if !(t.(*types.Type).Size() == 1) {
 15692  			break
 15693  		}
 15694  		v.reset(OpARM64MOVBstore)
 15695  		v.AddArg(ptr)
 15696  		v.AddArg(val)
 15697  		v.AddArg(mem)
 15698  		return true
 15699  	}
 15700  	// match: (Store {t} ptr val mem)
 15701  	// cond: t.(*types.Type).Size() == 2
 15702  	// result: (MOVHstore ptr val mem)
 15703  	for {
 15704  		t := v.Aux
 15705  		_ = v.Args[2]
 15706  		ptr := v.Args[0]
 15707  		val := v.Args[1]
 15708  		mem := v.Args[2]
 15709  		if !(t.(*types.Type).Size() == 2) {
 15710  			break
 15711  		}
 15712  		v.reset(OpARM64MOVHstore)
 15713  		v.AddArg(ptr)
 15714  		v.AddArg(val)
 15715  		v.AddArg(mem)
 15716  		return true
 15717  	}
 15718  	// match: (Store {t} ptr val mem)
 15719  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
 15720  	// result: (MOVWstore ptr val mem)
 15721  	for {
 15722  		t := v.Aux
 15723  		_ = v.Args[2]
 15724  		ptr := v.Args[0]
 15725  		val := v.Args[1]
 15726  		mem := v.Args[2]
 15727  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
 15728  			break
 15729  		}
 15730  		v.reset(OpARM64MOVWstore)
 15731  		v.AddArg(ptr)
 15732  		v.AddArg(val)
 15733  		v.AddArg(mem)
 15734  		return true
 15735  	}
 15736  	// match: (Store {t} ptr val mem)
 15737  	// cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)
 15738  	// result: (MOVDstore ptr val mem)
 15739  	for {
 15740  		t := v.Aux
 15741  		_ = v.Args[2]
 15742  		ptr := v.Args[0]
 15743  		val := v.Args[1]
 15744  		mem := v.Args[2]
 15745  		if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) {
 15746  			break
 15747  		}
 15748  		v.reset(OpARM64MOVDstore)
 15749  		v.AddArg(ptr)
 15750  		v.AddArg(val)
 15751  		v.AddArg(mem)
 15752  		return true
 15753  	}
 15754  	// match: (Store {t} ptr val mem)
 15755  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
 15756  	// result: (FMOVSstore ptr val mem)
 15757  	for {
 15758  		t := v.Aux
 15759  		_ = v.Args[2]
 15760  		ptr := v.Args[0]
 15761  		val := v.Args[1]
 15762  		mem := v.Args[2]
 15763  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
 15764  			break
 15765  		}
 15766  		v.reset(OpARM64FMOVSstore)
 15767  		v.AddArg(ptr)
 15768  		v.AddArg(val)
 15769  		v.AddArg(mem)
 15770  		return true
 15771  	}
 15772  	// match: (Store {t} ptr val mem)
 15773  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
 15774  	// result: (FMOVDstore ptr val mem)
 15775  	for {
 15776  		t := v.Aux
 15777  		_ = v.Args[2]
 15778  		ptr := v.Args[0]
 15779  		val := v.Args[1]
 15780  		mem := v.Args[2]
 15781  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
 15782  			break
 15783  		}
 15784  		v.reset(OpARM64FMOVDstore)
 15785  		v.AddArg(ptr)
 15786  		v.AddArg(val)
 15787  		v.AddArg(mem)
 15788  		return true
 15789  	}
 15790  	return false
 15791  }
 15792  func rewriteValueARM64_OpSub16_0(v *Value) bool {
 15793  	// match: (Sub16 x y)
 15794  	// cond:
 15795  	// result: (SUB x y)
 15796  	for {
 15797  		_ = v.Args[1]
 15798  		x := v.Args[0]
 15799  		y := v.Args[1]
 15800  		v.reset(OpARM64SUB)
 15801  		v.AddArg(x)
 15802  		v.AddArg(y)
 15803  		return true
 15804  	}
 15805  }
 15806  func rewriteValueARM64_OpSub32_0(v *Value) bool {
 15807  	// match: (Sub32 x y)
 15808  	// cond:
 15809  	// result: (SUB x y)
 15810  	for {
 15811  		_ = v.Args[1]
 15812  		x := v.Args[0]
 15813  		y := v.Args[1]
 15814  		v.reset(OpARM64SUB)
 15815  		v.AddArg(x)
 15816  		v.AddArg(y)
 15817  		return true
 15818  	}
 15819  }
 15820  func rewriteValueARM64_OpSub32F_0(v *Value) bool {
 15821  	// match: (Sub32F x y)
 15822  	// cond:
 15823  	// result: (FSUBS x y)
 15824  	for {
 15825  		_ = v.Args[1]
 15826  		x := v.Args[0]
 15827  		y := v.Args[1]
 15828  		v.reset(OpARM64FSUBS)
 15829  		v.AddArg(x)
 15830  		v.AddArg(y)
 15831  		return true
 15832  	}
 15833  }
 15834  func rewriteValueARM64_OpSub64_0(v *Value) bool {
 15835  	// match: (Sub64 x y)
 15836  	// cond:
 15837  	// result: (SUB x y)
 15838  	for {
 15839  		_ = v.Args[1]
 15840  		x := v.Args[0]
 15841  		y := v.Args[1]
 15842  		v.reset(OpARM64SUB)
 15843  		v.AddArg(x)
 15844  		v.AddArg(y)
 15845  		return true
 15846  	}
 15847  }
 15848  func rewriteValueARM64_OpSub64F_0(v *Value) bool {
 15849  	// match: (Sub64F x y)
 15850  	// cond:
 15851  	// result: (FSUBD x y)
 15852  	for {
 15853  		_ = v.Args[1]
 15854  		x := v.Args[0]
 15855  		y := v.Args[1]
 15856  		v.reset(OpARM64FSUBD)
 15857  		v.AddArg(x)
 15858  		v.AddArg(y)
 15859  		return true
 15860  	}
 15861  }
 15862  func rewriteValueARM64_OpSub8_0(v *Value) bool {
 15863  	// match: (Sub8 x y)
 15864  	// cond:
 15865  	// result: (SUB x y)
 15866  	for {
 15867  		_ = v.Args[1]
 15868  		x := v.Args[0]
 15869  		y := v.Args[1]
 15870  		v.reset(OpARM64SUB)
 15871  		v.AddArg(x)
 15872  		v.AddArg(y)
 15873  		return true
 15874  	}
 15875  }
 15876  func rewriteValueARM64_OpSubPtr_0(v *Value) bool {
 15877  	// match: (SubPtr x y)
 15878  	// cond:
 15879  	// result: (SUB x y)
 15880  	for {
 15881  		_ = v.Args[1]
 15882  		x := v.Args[0]
 15883  		y := v.Args[1]
 15884  		v.reset(OpARM64SUB)
 15885  		v.AddArg(x)
 15886  		v.AddArg(y)
 15887  		return true
 15888  	}
 15889  }
 15890  func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool {
 15891  	// match: (Trunc16to8 x)
 15892  	// cond:
 15893  	// result: x
 15894  	for {
 15895  		x := v.Args[0]
 15896  		v.reset(OpCopy)
 15897  		v.Type = x.Type
 15898  		v.AddArg(x)
 15899  		return true
 15900  	}
 15901  }
 15902  func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool {
 15903  	// match: (Trunc32to16 x)
 15904  	// cond:
 15905  	// result: x
 15906  	for {
 15907  		x := v.Args[0]
 15908  		v.reset(OpCopy)
 15909  		v.Type = x.Type
 15910  		v.AddArg(x)
 15911  		return true
 15912  	}
 15913  }
 15914  func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool {
 15915  	// match: (Trunc32to8 x)
 15916  	// cond:
 15917  	// result: x
 15918  	for {
 15919  		x := v.Args[0]
 15920  		v.reset(OpCopy)
 15921  		v.Type = x.Type
 15922  		v.AddArg(x)
 15923  		return true
 15924  	}
 15925  }
 15926  func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool {
 15927  	// match: (Trunc64to16 x)
 15928  	// cond:
 15929  	// result: x
 15930  	for {
 15931  		x := v.Args[0]
 15932  		v.reset(OpCopy)
 15933  		v.Type = x.Type
 15934  		v.AddArg(x)
 15935  		return true
 15936  	}
 15937  }
 15938  func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool {
 15939  	// match: (Trunc64to32 x)
 15940  	// cond:
 15941  	// result: x
 15942  	for {
 15943  		x := v.Args[0]
 15944  		v.reset(OpCopy)
 15945  		v.Type = x.Type
 15946  		v.AddArg(x)
 15947  		return true
 15948  	}
 15949  }
 15950  func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool {
 15951  	// match: (Trunc64to8 x)
 15952  	// cond:
 15953  	// result: x
 15954  	for {
 15955  		x := v.Args[0]
 15956  		v.reset(OpCopy)
 15957  		v.Type = x.Type
 15958  		v.AddArg(x)
 15959  		return true
 15960  	}
 15961  }
 15962  func rewriteValueARM64_OpXor16_0(v *Value) bool {
 15963  	// match: (Xor16 x y)
 15964  	// cond:
 15965  	// result: (XOR x y)
 15966  	for {
 15967  		_ = v.Args[1]
 15968  		x := v.Args[0]
 15969  		y := v.Args[1]
 15970  		v.reset(OpARM64XOR)
 15971  		v.AddArg(x)
 15972  		v.AddArg(y)
 15973  		return true
 15974  	}
 15975  }
 15976  func rewriteValueARM64_OpXor32_0(v *Value) bool {
 15977  	// match: (Xor32 x y)
 15978  	// cond:
 15979  	// result: (XOR x y)
 15980  	for {
 15981  		_ = v.Args[1]
 15982  		x := v.Args[0]
 15983  		y := v.Args[1]
 15984  		v.reset(OpARM64XOR)
 15985  		v.AddArg(x)
 15986  		v.AddArg(y)
 15987  		return true
 15988  	}
 15989  }
 15990  func rewriteValueARM64_OpXor64_0(v *Value) bool {
 15991  	// match: (Xor64 x y)
 15992  	// cond:
 15993  	// result: (XOR x y)
 15994  	for {
 15995  		_ = v.Args[1]
 15996  		x := v.Args[0]
 15997  		y := v.Args[1]
 15998  		v.reset(OpARM64XOR)
 15999  		v.AddArg(x)
 16000  		v.AddArg(y)
 16001  		return true
 16002  	}
 16003  }
 16004  func rewriteValueARM64_OpXor8_0(v *Value) bool {
 16005  	// match: (Xor8 x y)
 16006  	// cond:
 16007  	// result: (XOR x y)
 16008  	for {
 16009  		_ = v.Args[1]
 16010  		x := v.Args[0]
 16011  		y := v.Args[1]
 16012  		v.reset(OpARM64XOR)
 16013  		v.AddArg(x)
 16014  		v.AddArg(y)
 16015  		return true
 16016  	}
 16017  }
 16018  func rewriteValueARM64_OpZero_0(v *Value) bool {
 16019  	b := v.Block
 16020  	_ = b
 16021  	typ := &b.Func.Config.Types
 16022  	_ = typ
 16023  	// match: (Zero [0] _ mem)
 16024  	// cond:
 16025  	// result: mem
 16026  	for {
 16027  		if v.AuxInt != 0 {
 16028  			break
 16029  		}
 16030  		_ = v.Args[1]
 16031  		mem := v.Args[1]
 16032  		v.reset(OpCopy)
 16033  		v.Type = mem.Type
 16034  		v.AddArg(mem)
 16035  		return true
 16036  	}
 16037  	// match: (Zero [1] ptr mem)
 16038  	// cond:
 16039  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
 16040  	for {
 16041  		if v.AuxInt != 1 {
 16042  			break
 16043  		}
 16044  		_ = v.Args[1]
 16045  		ptr := v.Args[0]
 16046  		mem := v.Args[1]
 16047  		v.reset(OpARM64MOVBstore)
 16048  		v.AddArg(ptr)
 16049  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16050  		v0.AuxInt = 0
 16051  		v.AddArg(v0)
 16052  		v.AddArg(mem)
 16053  		return true
 16054  	}
 16055  	// match: (Zero [2] ptr mem)
 16056  	// cond:
 16057  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
 16058  	for {
 16059  		if v.AuxInt != 2 {
 16060  			break
 16061  		}
 16062  		_ = v.Args[1]
 16063  		ptr := v.Args[0]
 16064  		mem := v.Args[1]
 16065  		v.reset(OpARM64MOVHstore)
 16066  		v.AddArg(ptr)
 16067  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16068  		v0.AuxInt = 0
 16069  		v.AddArg(v0)
 16070  		v.AddArg(mem)
 16071  		return true
 16072  	}
 16073  	// match: (Zero [4] ptr mem)
 16074  	// cond:
 16075  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
 16076  	for {
 16077  		if v.AuxInt != 4 {
 16078  			break
 16079  		}
 16080  		_ = v.Args[1]
 16081  		ptr := v.Args[0]
 16082  		mem := v.Args[1]
 16083  		v.reset(OpARM64MOVWstore)
 16084  		v.AddArg(ptr)
 16085  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16086  		v0.AuxInt = 0
 16087  		v.AddArg(v0)
 16088  		v.AddArg(mem)
 16089  		return true
 16090  	}
 16091  	// match: (Zero [8] ptr mem)
 16092  	// cond:
 16093  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
 16094  	for {
 16095  		if v.AuxInt != 8 {
 16096  			break
 16097  		}
 16098  		_ = v.Args[1]
 16099  		ptr := v.Args[0]
 16100  		mem := v.Args[1]
 16101  		v.reset(OpARM64MOVDstore)
 16102  		v.AddArg(ptr)
 16103  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16104  		v0.AuxInt = 0
 16105  		v.AddArg(v0)
 16106  		v.AddArg(mem)
 16107  		return true
 16108  	}
 16109  	// match: (Zero [3] ptr mem)
 16110  	// cond:
 16111  	// result: (MOVBstore [2] ptr (MOVDconst [0]) 		(MOVHstore ptr (MOVDconst [0]) mem))
 16112  	for {
 16113  		if v.AuxInt != 3 {
 16114  			break
 16115  		}
 16116  		_ = v.Args[1]
 16117  		ptr := v.Args[0]
 16118  		mem := v.Args[1]
 16119  		v.reset(OpARM64MOVBstore)
 16120  		v.AuxInt = 2
 16121  		v.AddArg(ptr)
 16122  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16123  		v0.AuxInt = 0
 16124  		v.AddArg(v0)
 16125  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 16126  		v1.AddArg(ptr)
 16127  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16128  		v2.AuxInt = 0
 16129  		v1.AddArg(v2)
 16130  		v1.AddArg(mem)
 16131  		v.AddArg(v1)
 16132  		return true
 16133  	}
 16134  	// match: (Zero [5] ptr mem)
 16135  	// cond:
 16136  	// result: (MOVBstore [4] ptr (MOVDconst [0]) 		(MOVWstore ptr (MOVDconst [0]) mem))
 16137  	for {
 16138  		if v.AuxInt != 5 {
 16139  			break
 16140  		}
 16141  		_ = v.Args[1]
 16142  		ptr := v.Args[0]
 16143  		mem := v.Args[1]
 16144  		v.reset(OpARM64MOVBstore)
 16145  		v.AuxInt = 4
 16146  		v.AddArg(ptr)
 16147  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16148  		v0.AuxInt = 0
 16149  		v.AddArg(v0)
 16150  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 16151  		v1.AddArg(ptr)
 16152  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16153  		v2.AuxInt = 0
 16154  		v1.AddArg(v2)
 16155  		v1.AddArg(mem)
 16156  		v.AddArg(v1)
 16157  		return true
 16158  	}
 16159  	// match: (Zero [6] ptr mem)
 16160  	// cond:
 16161  	// result: (MOVHstore [4] ptr (MOVDconst [0]) 		(MOVWstore ptr (MOVDconst [0]) mem))
 16162  	for {
 16163  		if v.AuxInt != 6 {
 16164  			break
 16165  		}
 16166  		_ = v.Args[1]
 16167  		ptr := v.Args[0]
 16168  		mem := v.Args[1]
 16169  		v.reset(OpARM64MOVHstore)
 16170  		v.AuxInt = 4
 16171  		v.AddArg(ptr)
 16172  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16173  		v0.AuxInt = 0
 16174  		v.AddArg(v0)
 16175  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 16176  		v1.AddArg(ptr)
 16177  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16178  		v2.AuxInt = 0
 16179  		v1.AddArg(v2)
 16180  		v1.AddArg(mem)
 16181  		v.AddArg(v1)
 16182  		return true
 16183  	}
 16184  	// match: (Zero [7] ptr mem)
 16185  	// cond:
 16186  	// result: (MOVBstore [6] ptr (MOVDconst [0]) 		(MOVHstore [4] ptr (MOVDconst [0]) 			(MOVWstore ptr (MOVDconst [0]) mem)))
 16187  	for {
 16188  		if v.AuxInt != 7 {
 16189  			break
 16190  		}
 16191  		_ = v.Args[1]
 16192  		ptr := v.Args[0]
 16193  		mem := v.Args[1]
 16194  		v.reset(OpARM64MOVBstore)
 16195  		v.AuxInt = 6
 16196  		v.AddArg(ptr)
 16197  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16198  		v0.AuxInt = 0
 16199  		v.AddArg(v0)
 16200  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 16201  		v1.AuxInt = 4
 16202  		v1.AddArg(ptr)
 16203  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16204  		v2.AuxInt = 0
 16205  		v1.AddArg(v2)
 16206  		v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 16207  		v3.AddArg(ptr)
 16208  		v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16209  		v4.AuxInt = 0
 16210  		v3.AddArg(v4)
 16211  		v3.AddArg(mem)
 16212  		v1.AddArg(v3)
 16213  		v.AddArg(v1)
 16214  		return true
 16215  	}
 16216  	// match: (Zero [12] ptr mem)
 16217  	// cond:
 16218  	// result: (MOVWstore [8] ptr (MOVDconst [0]) 		(MOVDstore ptr (MOVDconst [0]) mem))
 16219  	for {
 16220  		if v.AuxInt != 12 {
 16221  			break
 16222  		}
 16223  		_ = v.Args[1]
 16224  		ptr := v.Args[0]
 16225  		mem := v.Args[1]
 16226  		v.reset(OpARM64MOVWstore)
 16227  		v.AuxInt = 8
 16228  		v.AddArg(ptr)
 16229  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16230  		v0.AuxInt = 0
 16231  		v.AddArg(v0)
 16232  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 16233  		v1.AddArg(ptr)
 16234  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16235  		v2.AuxInt = 0
 16236  		v1.AddArg(v2)
 16237  		v1.AddArg(mem)
 16238  		v.AddArg(v1)
 16239  		return true
 16240  	}
 16241  	return false
 16242  }
 16243  func rewriteValueARM64_OpZero_10(v *Value) bool {
 16244  	b := v.Block
 16245  	_ = b
 16246  	config := b.Func.Config
 16247  	_ = config
 16248  	typ := &b.Func.Config.Types
 16249  	_ = typ
 16250  	// match: (Zero [16] ptr mem)
 16251  	// cond:
 16252  	// result: (MOVDstore [8] ptr (MOVDconst [0]) 		(MOVDstore ptr (MOVDconst [0]) mem))
 16253  	for {
 16254  		if v.AuxInt != 16 {
 16255  			break
 16256  		}
 16257  		_ = v.Args[1]
 16258  		ptr := v.Args[0]
 16259  		mem := v.Args[1]
 16260  		v.reset(OpARM64MOVDstore)
 16261  		v.AuxInt = 8
 16262  		v.AddArg(ptr)
 16263  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16264  		v0.AuxInt = 0
 16265  		v.AddArg(v0)
 16266  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 16267  		v1.AddArg(ptr)
 16268  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16269  		v2.AuxInt = 0
 16270  		v1.AddArg(v2)
 16271  		v1.AddArg(mem)
 16272  		v.AddArg(v1)
 16273  		return true
 16274  	}
 16275  	// match: (Zero [24] ptr mem)
 16276  	// cond:
 16277  	// result: (MOVDstore [16] ptr (MOVDconst [0]) 		(MOVDstore [8] ptr (MOVDconst [0]) 			(MOVDstore ptr (MOVDconst [0]) mem)))
 16278  	for {
 16279  		if v.AuxInt != 24 {
 16280  			break
 16281  		}
 16282  		_ = v.Args[1]
 16283  		ptr := v.Args[0]
 16284  		mem := v.Args[1]
 16285  		v.reset(OpARM64MOVDstore)
 16286  		v.AuxInt = 16
 16287  		v.AddArg(ptr)
 16288  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16289  		v0.AuxInt = 0
 16290  		v.AddArg(v0)
 16291  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 16292  		v1.AuxInt = 8
 16293  		v1.AddArg(ptr)
 16294  		v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16295  		v2.AuxInt = 0
 16296  		v1.AddArg(v2)
 16297  		v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 16298  		v3.AddArg(ptr)
 16299  		v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 16300  		v4.AuxInt = 0
 16301  		v3.AddArg(v4)
 16302  		v3.AddArg(mem)
 16303  		v1.AddArg(v3)
 16304  		v.AddArg(v1)
 16305  		return true
 16306  	}
 16307  	// match: (Zero [s] ptr mem)
 16308  	// cond: s%8 != 0 && s > 8
 16309  	// result: (Zero [s%8] 		(OffPtr <ptr.Type> ptr [s-s%8]) 		(Zero [s-s%8] ptr mem))
 16310  	for {
 16311  		s := v.AuxInt
 16312  		_ = v.Args[1]
 16313  		ptr := v.Args[0]
 16314  		mem := v.Args[1]
 16315  		if !(s%8 != 0 && s > 8) {
 16316  			break
 16317  		}
 16318  		v.reset(OpZero)
 16319  		v.AuxInt = s % 8
 16320  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 16321  		v0.AuxInt = s - s%8
 16322  		v0.AddArg(ptr)
 16323  		v.AddArg(v0)
 16324  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 16325  		v1.AuxInt = s - s%8
 16326  		v1.AddArg(ptr)
 16327  		v1.AddArg(mem)
 16328  		v.AddArg(v1)
 16329  		return true
 16330  	}
 16331  	// match: (Zero [s] ptr mem)
 16332  	// cond: s%8 == 0 && s > 24 && s <= 8*128 	&& !config.noDuffDevice
 16333  	// result: (DUFFZERO [4 * (128 - int64(s/8))] ptr mem)
 16334  	for {
 16335  		s := v.AuxInt
 16336  		_ = v.Args[1]
 16337  		ptr := v.Args[0]
 16338  		mem := v.Args[1]
 16339  		if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) {
 16340  			break
 16341  		}
 16342  		v.reset(OpARM64DUFFZERO)
 16343  		v.AuxInt = 4 * (128 - int64(s/8))
 16344  		v.AddArg(ptr)
 16345  		v.AddArg(mem)
 16346  		return true
 16347  	}
 16348  	// match: (Zero [s] ptr mem)
 16349  	// cond: s%8 == 0 && (s > 8*128 || config.noDuffDevice)
 16350  	// result: (LoweredZero 		ptr 		(ADDconst <ptr.Type> [s-8] ptr) 		mem)
 16351  	for {
 16352  		s := v.AuxInt
 16353  		_ = v.Args[1]
 16354  		ptr := v.Args[0]
 16355  		mem := v.Args[1]
 16356  		if !(s%8 == 0 && (s > 8*128 || config.noDuffDevice)) {
 16357  			break
 16358  		}
 16359  		v.reset(OpARM64LoweredZero)
 16360  		v.AddArg(ptr)
 16361  		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
 16362  		v0.AuxInt = s - 8
 16363  		v0.AddArg(ptr)
 16364  		v.AddArg(v0)
 16365  		v.AddArg(mem)
 16366  		return true
 16367  	}
 16368  	return false
 16369  }
 16370  func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool {
 16371  	// match: (ZeroExt16to32 x)
 16372  	// cond:
 16373  	// result: (MOVHUreg x)
 16374  	for {
 16375  		x := v.Args[0]
 16376  		v.reset(OpARM64MOVHUreg)
 16377  		v.AddArg(x)
 16378  		return true
 16379  	}
 16380  }
 16381  func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool {
 16382  	// match: (ZeroExt16to64 x)
 16383  	// cond:
 16384  	// result: (MOVHUreg x)
 16385  	for {
 16386  		x := v.Args[0]
 16387  		v.reset(OpARM64MOVHUreg)
 16388  		v.AddArg(x)
 16389  		return true
 16390  	}
 16391  }
 16392  func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool {
 16393  	// match: (ZeroExt32to64 x)
 16394  	// cond:
 16395  	// result: (MOVWUreg x)
 16396  	for {
 16397  		x := v.Args[0]
 16398  		v.reset(OpARM64MOVWUreg)
 16399  		v.AddArg(x)
 16400  		return true
 16401  	}
 16402  }
 16403  func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool {
 16404  	// match: (ZeroExt8to16 x)
 16405  	// cond:
 16406  	// result: (MOVBUreg x)
 16407  	for {
 16408  		x := v.Args[0]
 16409  		v.reset(OpARM64MOVBUreg)
 16410  		v.AddArg(x)
 16411  		return true
 16412  	}
 16413  }
 16414  func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool {
 16415  	// match: (ZeroExt8to32 x)
 16416  	// cond:
 16417  	// result: (MOVBUreg x)
 16418  	for {
 16419  		x := v.Args[0]
 16420  		v.reset(OpARM64MOVBUreg)
 16421  		v.AddArg(x)
 16422  		return true
 16423  	}
 16424  }
 16425  func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool {
 16426  	// match: (ZeroExt8to64 x)
 16427  	// cond:
 16428  	// result: (MOVBUreg x)
 16429  	for {
 16430  		x := v.Args[0]
 16431  		v.reset(OpARM64MOVBUreg)
 16432  		v.AddArg(x)
 16433  		return true
 16434  	}
 16435  }
 16436  func rewriteBlockARM64(b *Block) bool {
 16437  	config := b.Func.Config
 16438  	_ = config
 16439  	fe := b.Func.fe
 16440  	_ = fe
 16441  	typ := &config.Types
 16442  	_ = typ
 16443  	switch b.Kind {
 16444  	case BlockARM64EQ:
 16445  		// match: (EQ (CMPconst [0] x) yes no)
 16446  		// cond:
 16447  		// result: (Z x yes no)
 16448  		for {
 16449  			v := b.Control
 16450  			if v.Op != OpARM64CMPconst {
 16451  				break
 16452  			}
 16453  			if v.AuxInt != 0 {
 16454  				break
 16455  			}
 16456  			x := v.Args[0]
 16457  			b.Kind = BlockARM64Z
 16458  			b.SetControl(x)
 16459  			return true
 16460  		}
 16461  		// match: (EQ (CMPWconst [0] x) yes no)
 16462  		// cond:
 16463  		// result: (ZW x yes no)
 16464  		for {
 16465  			v := b.Control
 16466  			if v.Op != OpARM64CMPWconst {
 16467  				break
 16468  			}
 16469  			if v.AuxInt != 0 {
 16470  				break
 16471  			}
 16472  			x := v.Args[0]
 16473  			b.Kind = BlockARM64ZW
 16474  			b.SetControl(x)
 16475  			return true
 16476  		}
 16477  		// match: (EQ (FlagEQ) yes no)
 16478  		// cond:
 16479  		// result: (First nil yes no)
 16480  		for {
 16481  			v := b.Control
 16482  			if v.Op != OpARM64FlagEQ {
 16483  				break
 16484  			}
 16485  			b.Kind = BlockFirst
 16486  			b.SetControl(nil)
 16487  			return true
 16488  		}
 16489  		// match: (EQ (FlagLT_ULT) yes no)
 16490  		// cond:
 16491  		// result: (First nil no yes)
 16492  		for {
 16493  			v := b.Control
 16494  			if v.Op != OpARM64FlagLT_ULT {
 16495  				break
 16496  			}
 16497  			b.Kind = BlockFirst
 16498  			b.SetControl(nil)
 16499  			b.swapSuccessors()
 16500  			return true
 16501  		}
 16502  		// match: (EQ (FlagLT_UGT) yes no)
 16503  		// cond:
 16504  		// result: (First nil no yes)
 16505  		for {
 16506  			v := b.Control
 16507  			if v.Op != OpARM64FlagLT_UGT {
 16508  				break
 16509  			}
 16510  			b.Kind = BlockFirst
 16511  			b.SetControl(nil)
 16512  			b.swapSuccessors()
 16513  			return true
 16514  		}
 16515  		// match: (EQ (FlagGT_ULT) yes no)
 16516  		// cond:
 16517  		// result: (First nil no yes)
 16518  		for {
 16519  			v := b.Control
 16520  			if v.Op != OpARM64FlagGT_ULT {
 16521  				break
 16522  			}
 16523  			b.Kind = BlockFirst
 16524  			b.SetControl(nil)
 16525  			b.swapSuccessors()
 16526  			return true
 16527  		}
 16528  		// match: (EQ (FlagGT_UGT) yes no)
 16529  		// cond:
 16530  		// result: (First nil no yes)
 16531  		for {
 16532  			v := b.Control
 16533  			if v.Op != OpARM64FlagGT_UGT {
 16534  				break
 16535  			}
 16536  			b.Kind = BlockFirst
 16537  			b.SetControl(nil)
 16538  			b.swapSuccessors()
 16539  			return true
 16540  		}
 16541  		// match: (EQ (InvertFlags cmp) yes no)
 16542  		// cond:
 16543  		// result: (EQ cmp yes no)
 16544  		for {
 16545  			v := b.Control
 16546  			if v.Op != OpARM64InvertFlags {
 16547  				break
 16548  			}
 16549  			cmp := v.Args[0]
 16550  			b.Kind = BlockARM64EQ
 16551  			b.SetControl(cmp)
 16552  			return true
 16553  		}
 16554  	case BlockARM64GE:
 16555  		// match: (GE (FlagEQ) yes no)
 16556  		// cond:
 16557  		// result: (First nil yes no)
 16558  		for {
 16559  			v := b.Control
 16560  			if v.Op != OpARM64FlagEQ {
 16561  				break
 16562  			}
 16563  			b.Kind = BlockFirst
 16564  			b.SetControl(nil)
 16565  			return true
 16566  		}
 16567  		// match: (GE (FlagLT_ULT) yes no)
 16568  		// cond:
 16569  		// result: (First nil no yes)
 16570  		for {
 16571  			v := b.Control
 16572  			if v.Op != OpARM64FlagLT_ULT {
 16573  				break
 16574  			}
 16575  			b.Kind = BlockFirst
 16576  			b.SetControl(nil)
 16577  			b.swapSuccessors()
 16578  			return true
 16579  		}
 16580  		// match: (GE (FlagLT_UGT) yes no)
 16581  		// cond:
 16582  		// result: (First nil no yes)
 16583  		for {
 16584  			v := b.Control
 16585  			if v.Op != OpARM64FlagLT_UGT {
 16586  				break
 16587  			}
 16588  			b.Kind = BlockFirst
 16589  			b.SetControl(nil)
 16590  			b.swapSuccessors()
 16591  			return true
 16592  		}
 16593  		// match: (GE (FlagGT_ULT) yes no)
 16594  		// cond:
 16595  		// result: (First nil yes no)
 16596  		for {
 16597  			v := b.Control
 16598  			if v.Op != OpARM64FlagGT_ULT {
 16599  				break
 16600  			}
 16601  			b.Kind = BlockFirst
 16602  			b.SetControl(nil)
 16603  			return true
 16604  		}
 16605  		// match: (GE (FlagGT_UGT) yes no)
 16606  		// cond:
 16607  		// result: (First nil yes no)
 16608  		for {
 16609  			v := b.Control
 16610  			if v.Op != OpARM64FlagGT_UGT {
 16611  				break
 16612  			}
 16613  			b.Kind = BlockFirst
 16614  			b.SetControl(nil)
 16615  			return true
 16616  		}
 16617  		// match: (GE (InvertFlags cmp) yes no)
 16618  		// cond:
 16619  		// result: (LE cmp yes no)
 16620  		for {
 16621  			v := b.Control
 16622  			if v.Op != OpARM64InvertFlags {
 16623  				break
 16624  			}
 16625  			cmp := v.Args[0]
 16626  			b.Kind = BlockARM64LE
 16627  			b.SetControl(cmp)
 16628  			return true
 16629  		}
 16630  	case BlockARM64GT:
 16631  		// match: (GT (FlagEQ) yes no)
 16632  		// cond:
 16633  		// result: (First nil no yes)
 16634  		for {
 16635  			v := b.Control
 16636  			if v.Op != OpARM64FlagEQ {
 16637  				break
 16638  			}
 16639  			b.Kind = BlockFirst
 16640  			b.SetControl(nil)
 16641  			b.swapSuccessors()
 16642  			return true
 16643  		}
 16644  		// match: (GT (FlagLT_ULT) yes no)
 16645  		// cond:
 16646  		// result: (First nil no yes)
 16647  		for {
 16648  			v := b.Control
 16649  			if v.Op != OpARM64FlagLT_ULT {
 16650  				break
 16651  			}
 16652  			b.Kind = BlockFirst
 16653  			b.SetControl(nil)
 16654  			b.swapSuccessors()
 16655  			return true
 16656  		}
 16657  		// match: (GT (FlagLT_UGT) yes no)
 16658  		// cond:
 16659  		// result: (First nil no yes)
 16660  		for {
 16661  			v := b.Control
 16662  			if v.Op != OpARM64FlagLT_UGT {
 16663  				break
 16664  			}
 16665  			b.Kind = BlockFirst
 16666  			b.SetControl(nil)
 16667  			b.swapSuccessors()
 16668  			return true
 16669  		}
 16670  		// match: (GT (FlagGT_ULT) yes no)
 16671  		// cond:
 16672  		// result: (First nil yes no)
 16673  		for {
 16674  			v := b.Control
 16675  			if v.Op != OpARM64FlagGT_ULT {
 16676  				break
 16677  			}
 16678  			b.Kind = BlockFirst
 16679  			b.SetControl(nil)
 16680  			return true
 16681  		}
 16682  		// match: (GT (FlagGT_UGT) yes no)
 16683  		// cond:
 16684  		// result: (First nil yes no)
 16685  		for {
 16686  			v := b.Control
 16687  			if v.Op != OpARM64FlagGT_UGT {
 16688  				break
 16689  			}
 16690  			b.Kind = BlockFirst
 16691  			b.SetControl(nil)
 16692  			return true
 16693  		}
 16694  		// match: (GT (InvertFlags cmp) yes no)
 16695  		// cond:
 16696  		// result: (LT cmp yes no)
 16697  		for {
 16698  			v := b.Control
 16699  			if v.Op != OpARM64InvertFlags {
 16700  				break
 16701  			}
 16702  			cmp := v.Args[0]
 16703  			b.Kind = BlockARM64LT
 16704  			b.SetControl(cmp)
 16705  			return true
 16706  		}
 16707  	case BlockIf:
 16708  		// match: (If (Equal cc) yes no)
 16709  		// cond:
 16710  		// result: (EQ cc yes no)
 16711  		for {
 16712  			v := b.Control
 16713  			if v.Op != OpARM64Equal {
 16714  				break
 16715  			}
 16716  			cc := v.Args[0]
 16717  			b.Kind = BlockARM64EQ
 16718  			b.SetControl(cc)
 16719  			return true
 16720  		}
 16721  		// match: (If (NotEqual cc) yes no)
 16722  		// cond:
 16723  		// result: (NE cc yes no)
 16724  		for {
 16725  			v := b.Control
 16726  			if v.Op != OpARM64NotEqual {
 16727  				break
 16728  			}
 16729  			cc := v.Args[0]
 16730  			b.Kind = BlockARM64NE
 16731  			b.SetControl(cc)
 16732  			return true
 16733  		}
 16734  		// match: (If (LessThan cc) yes no)
 16735  		// cond:
 16736  		// result: (LT cc yes no)
 16737  		for {
 16738  			v := b.Control
 16739  			if v.Op != OpARM64LessThan {
 16740  				break
 16741  			}
 16742  			cc := v.Args[0]
 16743  			b.Kind = BlockARM64LT
 16744  			b.SetControl(cc)
 16745  			return true
 16746  		}
 16747  		// match: (If (LessThanU cc) yes no)
 16748  		// cond:
 16749  		// result: (ULT cc yes no)
 16750  		for {
 16751  			v := b.Control
 16752  			if v.Op != OpARM64LessThanU {
 16753  				break
 16754  			}
 16755  			cc := v.Args[0]
 16756  			b.Kind = BlockARM64ULT
 16757  			b.SetControl(cc)
 16758  			return true
 16759  		}
 16760  		// match: (If (LessEqual cc) yes no)
 16761  		// cond:
 16762  		// result: (LE cc yes no)
 16763  		for {
 16764  			v := b.Control
 16765  			if v.Op != OpARM64LessEqual {
 16766  				break
 16767  			}
 16768  			cc := v.Args[0]
 16769  			b.Kind = BlockARM64LE
 16770  			b.SetControl(cc)
 16771  			return true
 16772  		}
 16773  		// match: (If (LessEqualU cc) yes no)
 16774  		// cond:
 16775  		// result: (ULE cc yes no)
 16776  		for {
 16777  			v := b.Control
 16778  			if v.Op != OpARM64LessEqualU {
 16779  				break
 16780  			}
 16781  			cc := v.Args[0]
 16782  			b.Kind = BlockARM64ULE
 16783  			b.SetControl(cc)
 16784  			return true
 16785  		}
 16786  		// match: (If (GreaterThan cc) yes no)
 16787  		// cond:
 16788  		// result: (GT cc yes no)
 16789  		for {
 16790  			v := b.Control
 16791  			if v.Op != OpARM64GreaterThan {
 16792  				break
 16793  			}
 16794  			cc := v.Args[0]
 16795  			b.Kind = BlockARM64GT
 16796  			b.SetControl(cc)
 16797  			return true
 16798  		}
 16799  		// match: (If (GreaterThanU cc) yes no)
 16800  		// cond:
 16801  		// result: (UGT cc yes no)
 16802  		for {
 16803  			v := b.Control
 16804  			if v.Op != OpARM64GreaterThanU {
 16805  				break
 16806  			}
 16807  			cc := v.Args[0]
 16808  			b.Kind = BlockARM64UGT
 16809  			b.SetControl(cc)
 16810  			return true
 16811  		}
 16812  		// match: (If (GreaterEqual cc) yes no)
 16813  		// cond:
 16814  		// result: (GE cc yes no)
 16815  		for {
 16816  			v := b.Control
 16817  			if v.Op != OpARM64GreaterEqual {
 16818  				break
 16819  			}
 16820  			cc := v.Args[0]
 16821  			b.Kind = BlockARM64GE
 16822  			b.SetControl(cc)
 16823  			return true
 16824  		}
 16825  		// match: (If (GreaterEqualU cc) yes no)
 16826  		// cond:
 16827  		// result: (UGE cc yes no)
 16828  		for {
 16829  			v := b.Control
 16830  			if v.Op != OpARM64GreaterEqualU {
 16831  				break
 16832  			}
 16833  			cc := v.Args[0]
 16834  			b.Kind = BlockARM64UGE
 16835  			b.SetControl(cc)
 16836  			return true
 16837  		}
 16838  		// match: (If cond yes no)
 16839  		// cond:
 16840  		// result: (NZ cond yes no)
 16841  		for {
 16842  			v := b.Control
 16843  			_ = v
 16844  			cond := b.Control
 16845  			b.Kind = BlockARM64NZ
 16846  			b.SetControl(cond)
 16847  			return true
 16848  		}
 16849  	case BlockARM64LE:
 16850  		// match: (LE (FlagEQ) yes no)
 16851  		// cond:
 16852  		// result: (First nil yes no)
 16853  		for {
 16854  			v := b.Control
 16855  			if v.Op != OpARM64FlagEQ {
 16856  				break
 16857  			}
 16858  			b.Kind = BlockFirst
 16859  			b.SetControl(nil)
 16860  			return true
 16861  		}
 16862  		// match: (LE (FlagLT_ULT) yes no)
 16863  		// cond:
 16864  		// result: (First nil yes no)
 16865  		for {
 16866  			v := b.Control
 16867  			if v.Op != OpARM64FlagLT_ULT {
 16868  				break
 16869  			}
 16870  			b.Kind = BlockFirst
 16871  			b.SetControl(nil)
 16872  			return true
 16873  		}
 16874  		// match: (LE (FlagLT_UGT) yes no)
 16875  		// cond:
 16876  		// result: (First nil yes no)
 16877  		for {
 16878  			v := b.Control
 16879  			if v.Op != OpARM64FlagLT_UGT {
 16880  				break
 16881  			}
 16882  			b.Kind = BlockFirst
 16883  			b.SetControl(nil)
 16884  			return true
 16885  		}
 16886  		// match: (LE (FlagGT_ULT) yes no)
 16887  		// cond:
 16888  		// result: (First nil no yes)
 16889  		for {
 16890  			v := b.Control
 16891  			if v.Op != OpARM64FlagGT_ULT {
 16892  				break
 16893  			}
 16894  			b.Kind = BlockFirst
 16895  			b.SetControl(nil)
 16896  			b.swapSuccessors()
 16897  			return true
 16898  		}
 16899  		// match: (LE (FlagGT_UGT) yes no)
 16900  		// cond:
 16901  		// result: (First nil no yes)
 16902  		for {
 16903  			v := b.Control
 16904  			if v.Op != OpARM64FlagGT_UGT {
 16905  				break
 16906  			}
 16907  			b.Kind = BlockFirst
 16908  			b.SetControl(nil)
 16909  			b.swapSuccessors()
 16910  			return true
 16911  		}
 16912  		// match: (LE (InvertFlags cmp) yes no)
 16913  		// cond:
 16914  		// result: (GE cmp yes no)
 16915  		for {
 16916  			v := b.Control
 16917  			if v.Op != OpARM64InvertFlags {
 16918  				break
 16919  			}
 16920  			cmp := v.Args[0]
 16921  			b.Kind = BlockARM64GE
 16922  			b.SetControl(cmp)
 16923  			return true
 16924  		}
 16925  	case BlockARM64LT:
 16926  		// match: (LT (FlagEQ) yes no)
 16927  		// cond:
 16928  		// result: (First nil no yes)
 16929  		for {
 16930  			v := b.Control
 16931  			if v.Op != OpARM64FlagEQ {
 16932  				break
 16933  			}
 16934  			b.Kind = BlockFirst
 16935  			b.SetControl(nil)
 16936  			b.swapSuccessors()
 16937  			return true
 16938  		}
 16939  		// match: (LT (FlagLT_ULT) yes no)
 16940  		// cond:
 16941  		// result: (First nil yes no)
 16942  		for {
 16943  			v := b.Control
 16944  			if v.Op != OpARM64FlagLT_ULT {
 16945  				break
 16946  			}
 16947  			b.Kind = BlockFirst
 16948  			b.SetControl(nil)
 16949  			return true
 16950  		}
 16951  		// match: (LT (FlagLT_UGT) yes no)
 16952  		// cond:
 16953  		// result: (First nil yes no)
 16954  		for {
 16955  			v := b.Control
 16956  			if v.Op != OpARM64FlagLT_UGT {
 16957  				break
 16958  			}
 16959  			b.Kind = BlockFirst
 16960  			b.SetControl(nil)
 16961  			return true
 16962  		}
 16963  		// match: (LT (FlagGT_ULT) yes no)
 16964  		// cond:
 16965  		// result: (First nil no yes)
 16966  		for {
 16967  			v := b.Control
 16968  			if v.Op != OpARM64FlagGT_ULT {
 16969  				break
 16970  			}
 16971  			b.Kind = BlockFirst
 16972  			b.SetControl(nil)
 16973  			b.swapSuccessors()
 16974  			return true
 16975  		}
 16976  		// match: (LT (FlagGT_UGT) yes no)
 16977  		// cond:
 16978  		// result: (First nil no yes)
 16979  		for {
 16980  			v := b.Control
 16981  			if v.Op != OpARM64FlagGT_UGT {
 16982  				break
 16983  			}
 16984  			b.Kind = BlockFirst
 16985  			b.SetControl(nil)
 16986  			b.swapSuccessors()
 16987  			return true
 16988  		}
 16989  		// match: (LT (InvertFlags cmp) yes no)
 16990  		// cond:
 16991  		// result: (GT cmp yes no)
 16992  		for {
 16993  			v := b.Control
 16994  			if v.Op != OpARM64InvertFlags {
 16995  				break
 16996  			}
 16997  			cmp := v.Args[0]
 16998  			b.Kind = BlockARM64GT
 16999  			b.SetControl(cmp)
 17000  			return true
 17001  		}
 17002  	case BlockARM64NE:
 17003  		// match: (NE (CMPconst [0] x) yes no)
 17004  		// cond:
 17005  		// result: (NZ x yes no)
 17006  		for {
 17007  			v := b.Control
 17008  			if v.Op != OpARM64CMPconst {
 17009  				break
 17010  			}
 17011  			if v.AuxInt != 0 {
 17012  				break
 17013  			}
 17014  			x := v.Args[0]
 17015  			b.Kind = BlockARM64NZ
 17016  			b.SetControl(x)
 17017  			return true
 17018  		}
 17019  		// match: (NE (CMPWconst [0] x) yes no)
 17020  		// cond:
 17021  		// result: (NZW x yes no)
 17022  		for {
 17023  			v := b.Control
 17024  			if v.Op != OpARM64CMPWconst {
 17025  				break
 17026  			}
 17027  			if v.AuxInt != 0 {
 17028  				break
 17029  			}
 17030  			x := v.Args[0]
 17031  			b.Kind = BlockARM64NZW
 17032  			b.SetControl(x)
 17033  			return true
 17034  		}
 17035  		// match: (NE (FlagEQ) yes no)
 17036  		// cond:
 17037  		// result: (First nil no yes)
 17038  		for {
 17039  			v := b.Control
 17040  			if v.Op != OpARM64FlagEQ {
 17041  				break
 17042  			}
 17043  			b.Kind = BlockFirst
 17044  			b.SetControl(nil)
 17045  			b.swapSuccessors()
 17046  			return true
 17047  		}
 17048  		// match: (NE (FlagLT_ULT) yes no)
 17049  		// cond:
 17050  		// result: (First nil yes no)
 17051  		for {
 17052  			v := b.Control
 17053  			if v.Op != OpARM64FlagLT_ULT {
 17054  				break
 17055  			}
 17056  			b.Kind = BlockFirst
 17057  			b.SetControl(nil)
 17058  			return true
 17059  		}
 17060  		// match: (NE (FlagLT_UGT) yes no)
 17061  		// cond:
 17062  		// result: (First nil yes no)
 17063  		for {
 17064  			v := b.Control
 17065  			if v.Op != OpARM64FlagLT_UGT {
 17066  				break
 17067  			}
 17068  			b.Kind = BlockFirst
 17069  			b.SetControl(nil)
 17070  			return true
 17071  		}
 17072  		// match: (NE (FlagGT_ULT) yes no)
 17073  		// cond:
 17074  		// result: (First nil yes no)
 17075  		for {
 17076  			v := b.Control
 17077  			if v.Op != OpARM64FlagGT_ULT {
 17078  				break
 17079  			}
 17080  			b.Kind = BlockFirst
 17081  			b.SetControl(nil)
 17082  			return true
 17083  		}
 17084  		// match: (NE (FlagGT_UGT) yes no)
 17085  		// cond:
 17086  		// result: (First nil yes no)
 17087  		for {
 17088  			v := b.Control
 17089  			if v.Op != OpARM64FlagGT_UGT {
 17090  				break
 17091  			}
 17092  			b.Kind = BlockFirst
 17093  			b.SetControl(nil)
 17094  			return true
 17095  		}
 17096  		// match: (NE (InvertFlags cmp) yes no)
 17097  		// cond:
 17098  		// result: (NE cmp yes no)
 17099  		for {
 17100  			v := b.Control
 17101  			if v.Op != OpARM64InvertFlags {
 17102  				break
 17103  			}
 17104  			cmp := v.Args[0]
 17105  			b.Kind = BlockARM64NE
 17106  			b.SetControl(cmp)
 17107  			return true
 17108  		}
 17109  	case BlockARM64NZ:
 17110  		// match: (NZ (Equal cc) yes no)
 17111  		// cond:
 17112  		// result: (EQ cc yes no)
 17113  		for {
 17114  			v := b.Control
 17115  			if v.Op != OpARM64Equal {
 17116  				break
 17117  			}
 17118  			cc := v.Args[0]
 17119  			b.Kind = BlockARM64EQ
 17120  			b.SetControl(cc)
 17121  			return true
 17122  		}
 17123  		// match: (NZ (NotEqual cc) yes no)
 17124  		// cond:
 17125  		// result: (NE cc yes no)
 17126  		for {
 17127  			v := b.Control
 17128  			if v.Op != OpARM64NotEqual {
 17129  				break
 17130  			}
 17131  			cc := v.Args[0]
 17132  			b.Kind = BlockARM64NE
 17133  			b.SetControl(cc)
 17134  			return true
 17135  		}
 17136  		// match: (NZ (LessThan cc) yes no)
 17137  		// cond:
 17138  		// result: (LT cc yes no)
 17139  		for {
 17140  			v := b.Control
 17141  			if v.Op != OpARM64LessThan {
 17142  				break
 17143  			}
 17144  			cc := v.Args[0]
 17145  			b.Kind = BlockARM64LT
 17146  			b.SetControl(cc)
 17147  			return true
 17148  		}
 17149  		// match: (NZ (LessThanU cc) yes no)
 17150  		// cond:
 17151  		// result: (ULT cc yes no)
 17152  		for {
 17153  			v := b.Control
 17154  			if v.Op != OpARM64LessThanU {
 17155  				break
 17156  			}
 17157  			cc := v.Args[0]
 17158  			b.Kind = BlockARM64ULT
 17159  			b.SetControl(cc)
 17160  			return true
 17161  		}
 17162  		// match: (NZ (LessEqual cc) yes no)
 17163  		// cond:
 17164  		// result: (LE cc yes no)
 17165  		for {
 17166  			v := b.Control
 17167  			if v.Op != OpARM64LessEqual {
 17168  				break
 17169  			}
 17170  			cc := v.Args[0]
 17171  			b.Kind = BlockARM64LE
 17172  			b.SetControl(cc)
 17173  			return true
 17174  		}
 17175  		// match: (NZ (LessEqualU cc) yes no)
 17176  		// cond:
 17177  		// result: (ULE cc yes no)
 17178  		for {
 17179  			v := b.Control
 17180  			if v.Op != OpARM64LessEqualU {
 17181  				break
 17182  			}
 17183  			cc := v.Args[0]
 17184  			b.Kind = BlockARM64ULE
 17185  			b.SetControl(cc)
 17186  			return true
 17187  		}
 17188  		// match: (NZ (GreaterThan cc) yes no)
 17189  		// cond:
 17190  		// result: (GT cc yes no)
 17191  		for {
 17192  			v := b.Control
 17193  			if v.Op != OpARM64GreaterThan {
 17194  				break
 17195  			}
 17196  			cc := v.Args[0]
 17197  			b.Kind = BlockARM64GT
 17198  			b.SetControl(cc)
 17199  			return true
 17200  		}
 17201  		// match: (NZ (GreaterThanU cc) yes no)
 17202  		// cond:
 17203  		// result: (UGT cc yes no)
 17204  		for {
 17205  			v := b.Control
 17206  			if v.Op != OpARM64GreaterThanU {
 17207  				break
 17208  			}
 17209  			cc := v.Args[0]
 17210  			b.Kind = BlockARM64UGT
 17211  			b.SetControl(cc)
 17212  			return true
 17213  		}
 17214  		// match: (NZ (GreaterEqual cc) yes no)
 17215  		// cond:
 17216  		// result: (GE cc yes no)
 17217  		for {
 17218  			v := b.Control
 17219  			if v.Op != OpARM64GreaterEqual {
 17220  				break
 17221  			}
 17222  			cc := v.Args[0]
 17223  			b.Kind = BlockARM64GE
 17224  			b.SetControl(cc)
 17225  			return true
 17226  		}
 17227  		// match: (NZ (GreaterEqualU cc) yes no)
 17228  		// cond:
 17229  		// result: (UGE cc yes no)
 17230  		for {
 17231  			v := b.Control
 17232  			if v.Op != OpARM64GreaterEqualU {
 17233  				break
 17234  			}
 17235  			cc := v.Args[0]
 17236  			b.Kind = BlockARM64UGE
 17237  			b.SetControl(cc)
 17238  			return true
 17239  		}
 17240  		// match: (NZ (MOVDconst [0]) yes no)
 17241  		// cond:
 17242  		// result: (First nil no yes)
 17243  		for {
 17244  			v := b.Control
 17245  			if v.Op != OpARM64MOVDconst {
 17246  				break
 17247  			}
 17248  			if v.AuxInt != 0 {
 17249  				break
 17250  			}
 17251  			b.Kind = BlockFirst
 17252  			b.SetControl(nil)
 17253  			b.swapSuccessors()
 17254  			return true
 17255  		}
 17256  		// match: (NZ (MOVDconst [c]) yes no)
 17257  		// cond: c != 0
 17258  		// result: (First nil yes no)
 17259  		for {
 17260  			v := b.Control
 17261  			if v.Op != OpARM64MOVDconst {
 17262  				break
 17263  			}
 17264  			c := v.AuxInt
 17265  			if !(c != 0) {
 17266  				break
 17267  			}
 17268  			b.Kind = BlockFirst
 17269  			b.SetControl(nil)
 17270  			return true
 17271  		}
 17272  	case BlockARM64NZW:
 17273  		// match: (NZW (MOVDconst [c]) yes no)
 17274  		// cond: int32(c) == 0
 17275  		// result: (First nil no yes)
 17276  		for {
 17277  			v := b.Control
 17278  			if v.Op != OpARM64MOVDconst {
 17279  				break
 17280  			}
 17281  			c := v.AuxInt
 17282  			if !(int32(c) == 0) {
 17283  				break
 17284  			}
 17285  			b.Kind = BlockFirst
 17286  			b.SetControl(nil)
 17287  			b.swapSuccessors()
 17288  			return true
 17289  		}
 17290  		// match: (NZW (MOVDconst [c]) yes no)
 17291  		// cond: int32(c) != 0
 17292  		// result: (First nil yes no)
 17293  		for {
 17294  			v := b.Control
 17295  			if v.Op != OpARM64MOVDconst {
 17296  				break
 17297  			}
 17298  			c := v.AuxInt
 17299  			if !(int32(c) != 0) {
 17300  				break
 17301  			}
 17302  			b.Kind = BlockFirst
 17303  			b.SetControl(nil)
 17304  			return true
 17305  		}
 17306  	case BlockARM64UGE:
 17307  		// match: (UGE (FlagEQ) yes no)
 17308  		// cond:
 17309  		// result: (First nil yes no)
 17310  		for {
 17311  			v := b.Control
 17312  			if v.Op != OpARM64FlagEQ {
 17313  				break
 17314  			}
 17315  			b.Kind = BlockFirst
 17316  			b.SetControl(nil)
 17317  			return true
 17318  		}
 17319  		// match: (UGE (FlagLT_ULT) yes no)
 17320  		// cond:
 17321  		// result: (First nil no yes)
 17322  		for {
 17323  			v := b.Control
 17324  			if v.Op != OpARM64FlagLT_ULT {
 17325  				break
 17326  			}
 17327  			b.Kind = BlockFirst
 17328  			b.SetControl(nil)
 17329  			b.swapSuccessors()
 17330  			return true
 17331  		}
 17332  		// match: (UGE (FlagLT_UGT) yes no)
 17333  		// cond:
 17334  		// result: (First nil yes no)
 17335  		for {
 17336  			v := b.Control
 17337  			if v.Op != OpARM64FlagLT_UGT {
 17338  				break
 17339  			}
 17340  			b.Kind = BlockFirst
 17341  			b.SetControl(nil)
 17342  			return true
 17343  		}
 17344  		// match: (UGE (FlagGT_ULT) yes no)
 17345  		// cond:
 17346  		// result: (First nil no yes)
 17347  		for {
 17348  			v := b.Control
 17349  			if v.Op != OpARM64FlagGT_ULT {
 17350  				break
 17351  			}
 17352  			b.Kind = BlockFirst
 17353  			b.SetControl(nil)
 17354  			b.swapSuccessors()
 17355  			return true
 17356  		}
 17357  		// match: (UGE (FlagGT_UGT) yes no)
 17358  		// cond:
 17359  		// result: (First nil yes no)
 17360  		for {
 17361  			v := b.Control
 17362  			if v.Op != OpARM64FlagGT_UGT {
 17363  				break
 17364  			}
 17365  			b.Kind = BlockFirst
 17366  			b.SetControl(nil)
 17367  			return true
 17368  		}
 17369  		// match: (UGE (InvertFlags cmp) yes no)
 17370  		// cond:
 17371  		// result: (ULE cmp yes no)
 17372  		for {
 17373  			v := b.Control
 17374  			if v.Op != OpARM64InvertFlags {
 17375  				break
 17376  			}
 17377  			cmp := v.Args[0]
 17378  			b.Kind = BlockARM64ULE
 17379  			b.SetControl(cmp)
 17380  			return true
 17381  		}
 17382  	case BlockARM64UGT:
 17383  		// match: (UGT (FlagEQ) yes no)
 17384  		// cond:
 17385  		// result: (First nil no yes)
 17386  		for {
 17387  			v := b.Control
 17388  			if v.Op != OpARM64FlagEQ {
 17389  				break
 17390  			}
 17391  			b.Kind = BlockFirst
 17392  			b.SetControl(nil)
 17393  			b.swapSuccessors()
 17394  			return true
 17395  		}
 17396  		// match: (UGT (FlagLT_ULT) yes no)
 17397  		// cond:
 17398  		// result: (First nil no yes)
 17399  		for {
 17400  			v := b.Control
 17401  			if v.Op != OpARM64FlagLT_ULT {
 17402  				break
 17403  			}
 17404  			b.Kind = BlockFirst
 17405  			b.SetControl(nil)
 17406  			b.swapSuccessors()
 17407  			return true
 17408  		}
 17409  		// match: (UGT (FlagLT_UGT) yes no)
 17410  		// cond:
 17411  		// result: (First nil yes no)
 17412  		for {
 17413  			v := b.Control
 17414  			if v.Op != OpARM64FlagLT_UGT {
 17415  				break
 17416  			}
 17417  			b.Kind = BlockFirst
 17418  			b.SetControl(nil)
 17419  			return true
 17420  		}
 17421  		// match: (UGT (FlagGT_ULT) yes no)
 17422  		// cond:
 17423  		// result: (First nil no yes)
 17424  		for {
 17425  			v := b.Control
 17426  			if v.Op != OpARM64FlagGT_ULT {
 17427  				break
 17428  			}
 17429  			b.Kind = BlockFirst
 17430  			b.SetControl(nil)
 17431  			b.swapSuccessors()
 17432  			return true
 17433  		}
 17434  		// match: (UGT (FlagGT_UGT) yes no)
 17435  		// cond:
 17436  		// result: (First nil yes no)
 17437  		for {
 17438  			v := b.Control
 17439  			if v.Op != OpARM64FlagGT_UGT {
 17440  				break
 17441  			}
 17442  			b.Kind = BlockFirst
 17443  			b.SetControl(nil)
 17444  			return true
 17445  		}
 17446  		// match: (UGT (InvertFlags cmp) yes no)
 17447  		// cond:
 17448  		// result: (ULT cmp yes no)
 17449  		for {
 17450  			v := b.Control
 17451  			if v.Op != OpARM64InvertFlags {
 17452  				break
 17453  			}
 17454  			cmp := v.Args[0]
 17455  			b.Kind = BlockARM64ULT
 17456  			b.SetControl(cmp)
 17457  			return true
 17458  		}
 17459  	case BlockARM64ULE:
 17460  		// match: (ULE (FlagEQ) yes no)
 17461  		// cond:
 17462  		// result: (First nil yes no)
 17463  		for {
 17464  			v := b.Control
 17465  			if v.Op != OpARM64FlagEQ {
 17466  				break
 17467  			}
 17468  			b.Kind = BlockFirst
 17469  			b.SetControl(nil)
 17470  			return true
 17471  		}
 17472  		// match: (ULE (FlagLT_ULT) yes no)
 17473  		// cond:
 17474  		// result: (First nil yes no)
 17475  		for {
 17476  			v := b.Control
 17477  			if v.Op != OpARM64FlagLT_ULT {
 17478  				break
 17479  			}
 17480  			b.Kind = BlockFirst
 17481  			b.SetControl(nil)
 17482  			return true
 17483  		}
 17484  		// match: (ULE (FlagLT_UGT) yes no)
 17485  		// cond:
 17486  		// result: (First nil no yes)
 17487  		for {
 17488  			v := b.Control
 17489  			if v.Op != OpARM64FlagLT_UGT {
 17490  				break
 17491  			}
 17492  			b.Kind = BlockFirst
 17493  			b.SetControl(nil)
 17494  			b.swapSuccessors()
 17495  			return true
 17496  		}
 17497  		// match: (ULE (FlagGT_ULT) yes no)
 17498  		// cond:
 17499  		// result: (First nil yes no)
 17500  		for {
 17501  			v := b.Control
 17502  			if v.Op != OpARM64FlagGT_ULT {
 17503  				break
 17504  			}
 17505  			b.Kind = BlockFirst
 17506  			b.SetControl(nil)
 17507  			return true
 17508  		}
 17509  		// match: (ULE (FlagGT_UGT) yes no)
 17510  		// cond:
 17511  		// result: (First nil no yes)
 17512  		for {
 17513  			v := b.Control
 17514  			if v.Op != OpARM64FlagGT_UGT {
 17515  				break
 17516  			}
 17517  			b.Kind = BlockFirst
 17518  			b.SetControl(nil)
 17519  			b.swapSuccessors()
 17520  			return true
 17521  		}
 17522  		// match: (ULE (InvertFlags cmp) yes no)
 17523  		// cond:
 17524  		// result: (UGE cmp yes no)
 17525  		for {
 17526  			v := b.Control
 17527  			if v.Op != OpARM64InvertFlags {
 17528  				break
 17529  			}
 17530  			cmp := v.Args[0]
 17531  			b.Kind = BlockARM64UGE
 17532  			b.SetControl(cmp)
 17533  			return true
 17534  		}
 17535  	case BlockARM64ULT:
 17536  		// match: (ULT (FlagEQ) yes no)
 17537  		// cond:
 17538  		// result: (First nil no yes)
 17539  		for {
 17540  			v := b.Control
 17541  			if v.Op != OpARM64FlagEQ {
 17542  				break
 17543  			}
 17544  			b.Kind = BlockFirst
 17545  			b.SetControl(nil)
 17546  			b.swapSuccessors()
 17547  			return true
 17548  		}
 17549  		// match: (ULT (FlagLT_ULT) yes no)
 17550  		// cond:
 17551  		// result: (First nil yes no)
 17552  		for {
 17553  			v := b.Control
 17554  			if v.Op != OpARM64FlagLT_ULT {
 17555  				break
 17556  			}
 17557  			b.Kind = BlockFirst
 17558  			b.SetControl(nil)
 17559  			return true
 17560  		}
 17561  		// match: (ULT (FlagLT_UGT) yes no)
 17562  		// cond:
 17563  		// result: (First nil no yes)
 17564  		for {
 17565  			v := b.Control
 17566  			if v.Op != OpARM64FlagLT_UGT {
 17567  				break
 17568  			}
 17569  			b.Kind = BlockFirst
 17570  			b.SetControl(nil)
 17571  			b.swapSuccessors()
 17572  			return true
 17573  		}
 17574  		// match: (ULT (FlagGT_ULT) yes no)
 17575  		// cond:
 17576  		// result: (First nil yes no)
 17577  		for {
 17578  			v := b.Control
 17579  			if v.Op != OpARM64FlagGT_ULT {
 17580  				break
 17581  			}
 17582  			b.Kind = BlockFirst
 17583  			b.SetControl(nil)
 17584  			return true
 17585  		}
 17586  		// match: (ULT (FlagGT_UGT) yes no)
 17587  		// cond:
 17588  		// result: (First nil no yes)
 17589  		for {
 17590  			v := b.Control
 17591  			if v.Op != OpARM64FlagGT_UGT {
 17592  				break
 17593  			}
 17594  			b.Kind = BlockFirst
 17595  			b.SetControl(nil)
 17596  			b.swapSuccessors()
 17597  			return true
 17598  		}
 17599  		// match: (ULT (InvertFlags cmp) yes no)
 17600  		// cond:
 17601  		// result: (UGT cmp yes no)
 17602  		for {
 17603  			v := b.Control
 17604  			if v.Op != OpARM64InvertFlags {
 17605  				break
 17606  			}
 17607  			cmp := v.Args[0]
 17608  			b.Kind = BlockARM64UGT
 17609  			b.SetControl(cmp)
 17610  			return true
 17611  		}
 17612  	case BlockARM64Z:
 17613  		// match: (Z (MOVDconst [0]) yes no)
 17614  		// cond:
 17615  		// result: (First nil yes no)
 17616  		for {
 17617  			v := b.Control
 17618  			if v.Op != OpARM64MOVDconst {
 17619  				break
 17620  			}
 17621  			if v.AuxInt != 0 {
 17622  				break
 17623  			}
 17624  			b.Kind = BlockFirst
 17625  			b.SetControl(nil)
 17626  			return true
 17627  		}
 17628  		// match: (Z (MOVDconst [c]) yes no)
 17629  		// cond: c != 0
 17630  		// result: (First nil no yes)
 17631  		for {
 17632  			v := b.Control
 17633  			if v.Op != OpARM64MOVDconst {
 17634  				break
 17635  			}
 17636  			c := v.AuxInt
 17637  			if !(c != 0) {
 17638  				break
 17639  			}
 17640  			b.Kind = BlockFirst
 17641  			b.SetControl(nil)
 17642  			b.swapSuccessors()
 17643  			return true
 17644  		}
 17645  	case BlockARM64ZW:
 17646  		// match: (ZW (MOVDconst [c]) yes no)
 17647  		// cond: int32(c) == 0
 17648  		// result: (First nil yes no)
 17649  		for {
 17650  			v := b.Control
 17651  			if v.Op != OpARM64MOVDconst {
 17652  				break
 17653  			}
 17654  			c := v.AuxInt
 17655  			if !(int32(c) == 0) {
 17656  				break
 17657  			}
 17658  			b.Kind = BlockFirst
 17659  			b.SetControl(nil)
 17660  			return true
 17661  		}
 17662  		// match: (ZW (MOVDconst [c]) yes no)
 17663  		// cond: int32(c) != 0
 17664  		// result: (First nil no yes)
 17665  		for {
 17666  			v := b.Control
 17667  			if v.Op != OpARM64MOVDconst {
 17668  				break
 17669  			}
 17670  			c := v.AuxInt
 17671  			if !(int32(c) != 0) {
 17672  				break
 17673  			}
 17674  			b.Kind = BlockFirst
 17675  			b.SetControl(nil)
 17676  			b.swapSuccessors()
 17677  			return true
 17678  		}
 17679  	}
 17680  	return false
 17681  }