github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/ssa/rewriteARM64.go (about)

     1  // Code generated from _gen/ARM64.rules; DO NOT EDIT.
     2  // generated with: cd _gen; go run .
     3  
     4  package ssa
     5  
     6  import "github.com/bir3/gocompiler/src/cmd/compile/internal/types"
     7  
     8  func rewriteValueARM64(v *Value) bool {
     9  	switch v.Op {
    10  	case OpARM64ADCSflags:
    11  		return rewriteValueARM64_OpARM64ADCSflags(v)
    12  	case OpARM64ADD:
    13  		return rewriteValueARM64_OpARM64ADD(v)
    14  	case OpARM64ADDSflags:
    15  		return rewriteValueARM64_OpARM64ADDSflags(v)
    16  	case OpARM64ADDconst:
    17  		return rewriteValueARM64_OpARM64ADDconst(v)
    18  	case OpARM64ADDshiftLL:
    19  		return rewriteValueARM64_OpARM64ADDshiftLL(v)
    20  	case OpARM64ADDshiftRA:
    21  		return rewriteValueARM64_OpARM64ADDshiftRA(v)
    22  	case OpARM64ADDshiftRL:
    23  		return rewriteValueARM64_OpARM64ADDshiftRL(v)
    24  	case OpARM64AND:
    25  		return rewriteValueARM64_OpARM64AND(v)
    26  	case OpARM64ANDconst:
    27  		return rewriteValueARM64_OpARM64ANDconst(v)
    28  	case OpARM64ANDshiftLL:
    29  		return rewriteValueARM64_OpARM64ANDshiftLL(v)
    30  	case OpARM64ANDshiftRA:
    31  		return rewriteValueARM64_OpARM64ANDshiftRA(v)
    32  	case OpARM64ANDshiftRL:
    33  		return rewriteValueARM64_OpARM64ANDshiftRL(v)
    34  	case OpARM64ANDshiftRO:
    35  		return rewriteValueARM64_OpARM64ANDshiftRO(v)
    36  	case OpARM64BIC:
    37  		return rewriteValueARM64_OpARM64BIC(v)
    38  	case OpARM64BICshiftLL:
    39  		return rewriteValueARM64_OpARM64BICshiftLL(v)
    40  	case OpARM64BICshiftRA:
    41  		return rewriteValueARM64_OpARM64BICshiftRA(v)
    42  	case OpARM64BICshiftRL:
    43  		return rewriteValueARM64_OpARM64BICshiftRL(v)
    44  	case OpARM64BICshiftRO:
    45  		return rewriteValueARM64_OpARM64BICshiftRO(v)
    46  	case OpARM64CMN:
    47  		return rewriteValueARM64_OpARM64CMN(v)
    48  	case OpARM64CMNW:
    49  		return rewriteValueARM64_OpARM64CMNW(v)
    50  	case OpARM64CMNWconst:
    51  		return rewriteValueARM64_OpARM64CMNWconst(v)
    52  	case OpARM64CMNconst:
    53  		return rewriteValueARM64_OpARM64CMNconst(v)
    54  	case OpARM64CMNshiftLL:
    55  		return rewriteValueARM64_OpARM64CMNshiftLL(v)
    56  	case OpARM64CMNshiftRA:
    57  		return rewriteValueARM64_OpARM64CMNshiftRA(v)
    58  	case OpARM64CMNshiftRL:
    59  		return rewriteValueARM64_OpARM64CMNshiftRL(v)
    60  	case OpARM64CMP:
    61  		return rewriteValueARM64_OpARM64CMP(v)
    62  	case OpARM64CMPW:
    63  		return rewriteValueARM64_OpARM64CMPW(v)
    64  	case OpARM64CMPWconst:
    65  		return rewriteValueARM64_OpARM64CMPWconst(v)
    66  	case OpARM64CMPconst:
    67  		return rewriteValueARM64_OpARM64CMPconst(v)
    68  	case OpARM64CMPshiftLL:
    69  		return rewriteValueARM64_OpARM64CMPshiftLL(v)
    70  	case OpARM64CMPshiftRA:
    71  		return rewriteValueARM64_OpARM64CMPshiftRA(v)
    72  	case OpARM64CMPshiftRL:
    73  		return rewriteValueARM64_OpARM64CMPshiftRL(v)
    74  	case OpARM64CSEL:
    75  		return rewriteValueARM64_OpARM64CSEL(v)
    76  	case OpARM64CSEL0:
    77  		return rewriteValueARM64_OpARM64CSEL0(v)
    78  	case OpARM64CSETM:
    79  		return rewriteValueARM64_OpARM64CSETM(v)
    80  	case OpARM64CSINC:
    81  		return rewriteValueARM64_OpARM64CSINC(v)
    82  	case OpARM64CSINV:
    83  		return rewriteValueARM64_OpARM64CSINV(v)
    84  	case OpARM64CSNEG:
    85  		return rewriteValueARM64_OpARM64CSNEG(v)
    86  	case OpARM64DIV:
    87  		return rewriteValueARM64_OpARM64DIV(v)
    88  	case OpARM64DIVW:
    89  		return rewriteValueARM64_OpARM64DIVW(v)
    90  	case OpARM64EON:
    91  		return rewriteValueARM64_OpARM64EON(v)
    92  	case OpARM64EONshiftLL:
    93  		return rewriteValueARM64_OpARM64EONshiftLL(v)
    94  	case OpARM64EONshiftRA:
    95  		return rewriteValueARM64_OpARM64EONshiftRA(v)
    96  	case OpARM64EONshiftRL:
    97  		return rewriteValueARM64_OpARM64EONshiftRL(v)
    98  	case OpARM64EONshiftRO:
    99  		return rewriteValueARM64_OpARM64EONshiftRO(v)
   100  	case OpARM64Equal:
   101  		return rewriteValueARM64_OpARM64Equal(v)
   102  	case OpARM64FADDD:
   103  		return rewriteValueARM64_OpARM64FADDD(v)
   104  	case OpARM64FADDS:
   105  		return rewriteValueARM64_OpARM64FADDS(v)
   106  	case OpARM64FCMPD:
   107  		return rewriteValueARM64_OpARM64FCMPD(v)
   108  	case OpARM64FCMPS:
   109  		return rewriteValueARM64_OpARM64FCMPS(v)
   110  	case OpARM64FMOVDfpgp:
   111  		return rewriteValueARM64_OpARM64FMOVDfpgp(v)
   112  	case OpARM64FMOVDgpfp:
   113  		return rewriteValueARM64_OpARM64FMOVDgpfp(v)
   114  	case OpARM64FMOVDload:
   115  		return rewriteValueARM64_OpARM64FMOVDload(v)
   116  	case OpARM64FMOVDloadidx:
   117  		return rewriteValueARM64_OpARM64FMOVDloadidx(v)
   118  	case OpARM64FMOVDloadidx8:
   119  		return rewriteValueARM64_OpARM64FMOVDloadidx8(v)
   120  	case OpARM64FMOVDstore:
   121  		return rewriteValueARM64_OpARM64FMOVDstore(v)
   122  	case OpARM64FMOVDstoreidx:
   123  		return rewriteValueARM64_OpARM64FMOVDstoreidx(v)
   124  	case OpARM64FMOVDstoreidx8:
   125  		return rewriteValueARM64_OpARM64FMOVDstoreidx8(v)
   126  	case OpARM64FMOVSload:
   127  		return rewriteValueARM64_OpARM64FMOVSload(v)
   128  	case OpARM64FMOVSloadidx:
   129  		return rewriteValueARM64_OpARM64FMOVSloadidx(v)
   130  	case OpARM64FMOVSloadidx4:
   131  		return rewriteValueARM64_OpARM64FMOVSloadidx4(v)
   132  	case OpARM64FMOVSstore:
   133  		return rewriteValueARM64_OpARM64FMOVSstore(v)
   134  	case OpARM64FMOVSstoreidx:
   135  		return rewriteValueARM64_OpARM64FMOVSstoreidx(v)
   136  	case OpARM64FMOVSstoreidx4:
   137  		return rewriteValueARM64_OpARM64FMOVSstoreidx4(v)
   138  	case OpARM64FMULD:
   139  		return rewriteValueARM64_OpARM64FMULD(v)
   140  	case OpARM64FMULS:
   141  		return rewriteValueARM64_OpARM64FMULS(v)
   142  	case OpARM64FNEGD:
   143  		return rewriteValueARM64_OpARM64FNEGD(v)
   144  	case OpARM64FNEGS:
   145  		return rewriteValueARM64_OpARM64FNEGS(v)
   146  	case OpARM64FNMULD:
   147  		return rewriteValueARM64_OpARM64FNMULD(v)
   148  	case OpARM64FNMULS:
   149  		return rewriteValueARM64_OpARM64FNMULS(v)
   150  	case OpARM64FSUBD:
   151  		return rewriteValueARM64_OpARM64FSUBD(v)
   152  	case OpARM64FSUBS:
   153  		return rewriteValueARM64_OpARM64FSUBS(v)
   154  	case OpARM64GreaterEqual:
   155  		return rewriteValueARM64_OpARM64GreaterEqual(v)
   156  	case OpARM64GreaterEqualF:
   157  		return rewriteValueARM64_OpARM64GreaterEqualF(v)
   158  	case OpARM64GreaterEqualU:
   159  		return rewriteValueARM64_OpARM64GreaterEqualU(v)
   160  	case OpARM64GreaterThan:
   161  		return rewriteValueARM64_OpARM64GreaterThan(v)
   162  	case OpARM64GreaterThanF:
   163  		return rewriteValueARM64_OpARM64GreaterThanF(v)
   164  	case OpARM64GreaterThanU:
   165  		return rewriteValueARM64_OpARM64GreaterThanU(v)
   166  	case OpARM64LDP:
   167  		return rewriteValueARM64_OpARM64LDP(v)
   168  	case OpARM64LessEqual:
   169  		return rewriteValueARM64_OpARM64LessEqual(v)
   170  	case OpARM64LessEqualF:
   171  		return rewriteValueARM64_OpARM64LessEqualF(v)
   172  	case OpARM64LessEqualU:
   173  		return rewriteValueARM64_OpARM64LessEqualU(v)
   174  	case OpARM64LessThan:
   175  		return rewriteValueARM64_OpARM64LessThan(v)
   176  	case OpARM64LessThanF:
   177  		return rewriteValueARM64_OpARM64LessThanF(v)
   178  	case OpARM64LessThanU:
   179  		return rewriteValueARM64_OpARM64LessThanU(v)
   180  	case OpARM64MADD:
   181  		return rewriteValueARM64_OpARM64MADD(v)
   182  	case OpARM64MADDW:
   183  		return rewriteValueARM64_OpARM64MADDW(v)
   184  	case OpARM64MNEG:
   185  		return rewriteValueARM64_OpARM64MNEG(v)
   186  	case OpARM64MNEGW:
   187  		return rewriteValueARM64_OpARM64MNEGW(v)
   188  	case OpARM64MOD:
   189  		return rewriteValueARM64_OpARM64MOD(v)
   190  	case OpARM64MODW:
   191  		return rewriteValueARM64_OpARM64MODW(v)
   192  	case OpARM64MOVBUload:
   193  		return rewriteValueARM64_OpARM64MOVBUload(v)
   194  	case OpARM64MOVBUloadidx:
   195  		return rewriteValueARM64_OpARM64MOVBUloadidx(v)
   196  	case OpARM64MOVBUreg:
   197  		return rewriteValueARM64_OpARM64MOVBUreg(v)
   198  	case OpARM64MOVBload:
   199  		return rewriteValueARM64_OpARM64MOVBload(v)
   200  	case OpARM64MOVBloadidx:
   201  		return rewriteValueARM64_OpARM64MOVBloadidx(v)
   202  	case OpARM64MOVBreg:
   203  		return rewriteValueARM64_OpARM64MOVBreg(v)
   204  	case OpARM64MOVBstore:
   205  		return rewriteValueARM64_OpARM64MOVBstore(v)
   206  	case OpARM64MOVBstoreidx:
   207  		return rewriteValueARM64_OpARM64MOVBstoreidx(v)
   208  	case OpARM64MOVBstorezero:
   209  		return rewriteValueARM64_OpARM64MOVBstorezero(v)
   210  	case OpARM64MOVBstorezeroidx:
   211  		return rewriteValueARM64_OpARM64MOVBstorezeroidx(v)
   212  	case OpARM64MOVDload:
   213  		return rewriteValueARM64_OpARM64MOVDload(v)
   214  	case OpARM64MOVDloadidx:
   215  		return rewriteValueARM64_OpARM64MOVDloadidx(v)
   216  	case OpARM64MOVDloadidx8:
   217  		return rewriteValueARM64_OpARM64MOVDloadidx8(v)
   218  	case OpARM64MOVDnop:
   219  		return rewriteValueARM64_OpARM64MOVDnop(v)
   220  	case OpARM64MOVDreg:
   221  		return rewriteValueARM64_OpARM64MOVDreg(v)
   222  	case OpARM64MOVDstore:
   223  		return rewriteValueARM64_OpARM64MOVDstore(v)
   224  	case OpARM64MOVDstoreidx:
   225  		return rewriteValueARM64_OpARM64MOVDstoreidx(v)
   226  	case OpARM64MOVDstoreidx8:
   227  		return rewriteValueARM64_OpARM64MOVDstoreidx8(v)
   228  	case OpARM64MOVDstorezero:
   229  		return rewriteValueARM64_OpARM64MOVDstorezero(v)
   230  	case OpARM64MOVDstorezeroidx:
   231  		return rewriteValueARM64_OpARM64MOVDstorezeroidx(v)
   232  	case OpARM64MOVDstorezeroidx8:
   233  		return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v)
   234  	case OpARM64MOVHUload:
   235  		return rewriteValueARM64_OpARM64MOVHUload(v)
   236  	case OpARM64MOVHUloadidx:
   237  		return rewriteValueARM64_OpARM64MOVHUloadidx(v)
   238  	case OpARM64MOVHUloadidx2:
   239  		return rewriteValueARM64_OpARM64MOVHUloadidx2(v)
   240  	case OpARM64MOVHUreg:
   241  		return rewriteValueARM64_OpARM64MOVHUreg(v)
   242  	case OpARM64MOVHload:
   243  		return rewriteValueARM64_OpARM64MOVHload(v)
   244  	case OpARM64MOVHloadidx:
   245  		return rewriteValueARM64_OpARM64MOVHloadidx(v)
   246  	case OpARM64MOVHloadidx2:
   247  		return rewriteValueARM64_OpARM64MOVHloadidx2(v)
   248  	case OpARM64MOVHreg:
   249  		return rewriteValueARM64_OpARM64MOVHreg(v)
   250  	case OpARM64MOVHstore:
   251  		return rewriteValueARM64_OpARM64MOVHstore(v)
   252  	case OpARM64MOVHstoreidx:
   253  		return rewriteValueARM64_OpARM64MOVHstoreidx(v)
   254  	case OpARM64MOVHstoreidx2:
   255  		return rewriteValueARM64_OpARM64MOVHstoreidx2(v)
   256  	case OpARM64MOVHstorezero:
   257  		return rewriteValueARM64_OpARM64MOVHstorezero(v)
   258  	case OpARM64MOVHstorezeroidx:
   259  		return rewriteValueARM64_OpARM64MOVHstorezeroidx(v)
   260  	case OpARM64MOVHstorezeroidx2:
   261  		return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v)
   262  	case OpARM64MOVQstorezero:
   263  		return rewriteValueARM64_OpARM64MOVQstorezero(v)
   264  	case OpARM64MOVWUload:
   265  		return rewriteValueARM64_OpARM64MOVWUload(v)
   266  	case OpARM64MOVWUloadidx:
   267  		return rewriteValueARM64_OpARM64MOVWUloadidx(v)
   268  	case OpARM64MOVWUloadidx4:
   269  		return rewriteValueARM64_OpARM64MOVWUloadidx4(v)
   270  	case OpARM64MOVWUreg:
   271  		return rewriteValueARM64_OpARM64MOVWUreg(v)
   272  	case OpARM64MOVWload:
   273  		return rewriteValueARM64_OpARM64MOVWload(v)
   274  	case OpARM64MOVWloadidx:
   275  		return rewriteValueARM64_OpARM64MOVWloadidx(v)
   276  	case OpARM64MOVWloadidx4:
   277  		return rewriteValueARM64_OpARM64MOVWloadidx4(v)
   278  	case OpARM64MOVWreg:
   279  		return rewriteValueARM64_OpARM64MOVWreg(v)
   280  	case OpARM64MOVWstore:
   281  		return rewriteValueARM64_OpARM64MOVWstore(v)
   282  	case OpARM64MOVWstoreidx:
   283  		return rewriteValueARM64_OpARM64MOVWstoreidx(v)
   284  	case OpARM64MOVWstoreidx4:
   285  		return rewriteValueARM64_OpARM64MOVWstoreidx4(v)
   286  	case OpARM64MOVWstorezero:
   287  		return rewriteValueARM64_OpARM64MOVWstorezero(v)
   288  	case OpARM64MOVWstorezeroidx:
   289  		return rewriteValueARM64_OpARM64MOVWstorezeroidx(v)
   290  	case OpARM64MOVWstorezeroidx4:
   291  		return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v)
   292  	case OpARM64MSUB:
   293  		return rewriteValueARM64_OpARM64MSUB(v)
   294  	case OpARM64MSUBW:
   295  		return rewriteValueARM64_OpARM64MSUBW(v)
   296  	case OpARM64MUL:
   297  		return rewriteValueARM64_OpARM64MUL(v)
   298  	case OpARM64MULW:
   299  		return rewriteValueARM64_OpARM64MULW(v)
   300  	case OpARM64MVN:
   301  		return rewriteValueARM64_OpARM64MVN(v)
   302  	case OpARM64MVNshiftLL:
   303  		return rewriteValueARM64_OpARM64MVNshiftLL(v)
   304  	case OpARM64MVNshiftRA:
   305  		return rewriteValueARM64_OpARM64MVNshiftRA(v)
   306  	case OpARM64MVNshiftRL:
   307  		return rewriteValueARM64_OpARM64MVNshiftRL(v)
   308  	case OpARM64MVNshiftRO:
   309  		return rewriteValueARM64_OpARM64MVNshiftRO(v)
   310  	case OpARM64NEG:
   311  		return rewriteValueARM64_OpARM64NEG(v)
   312  	case OpARM64NEGshiftLL:
   313  		return rewriteValueARM64_OpARM64NEGshiftLL(v)
   314  	case OpARM64NEGshiftRA:
   315  		return rewriteValueARM64_OpARM64NEGshiftRA(v)
   316  	case OpARM64NEGshiftRL:
   317  		return rewriteValueARM64_OpARM64NEGshiftRL(v)
   318  	case OpARM64NotEqual:
   319  		return rewriteValueARM64_OpARM64NotEqual(v)
   320  	case OpARM64OR:
   321  		return rewriteValueARM64_OpARM64OR(v)
   322  	case OpARM64ORN:
   323  		return rewriteValueARM64_OpARM64ORN(v)
   324  	case OpARM64ORNshiftLL:
   325  		return rewriteValueARM64_OpARM64ORNshiftLL(v)
   326  	case OpARM64ORNshiftRA:
   327  		return rewriteValueARM64_OpARM64ORNshiftRA(v)
   328  	case OpARM64ORNshiftRL:
   329  		return rewriteValueARM64_OpARM64ORNshiftRL(v)
   330  	case OpARM64ORNshiftRO:
   331  		return rewriteValueARM64_OpARM64ORNshiftRO(v)
   332  	case OpARM64ORconst:
   333  		return rewriteValueARM64_OpARM64ORconst(v)
   334  	case OpARM64ORshiftLL:
   335  		return rewriteValueARM64_OpARM64ORshiftLL(v)
   336  	case OpARM64ORshiftRA:
   337  		return rewriteValueARM64_OpARM64ORshiftRA(v)
   338  	case OpARM64ORshiftRL:
   339  		return rewriteValueARM64_OpARM64ORshiftRL(v)
   340  	case OpARM64ORshiftRO:
   341  		return rewriteValueARM64_OpARM64ORshiftRO(v)
   342  	case OpARM64REV:
   343  		return rewriteValueARM64_OpARM64REV(v)
   344  	case OpARM64REVW:
   345  		return rewriteValueARM64_OpARM64REVW(v)
   346  	case OpARM64ROR:
   347  		return rewriteValueARM64_OpARM64ROR(v)
   348  	case OpARM64RORW:
   349  		return rewriteValueARM64_OpARM64RORW(v)
   350  	case OpARM64SBCSflags:
   351  		return rewriteValueARM64_OpARM64SBCSflags(v)
   352  	case OpARM64SLL:
   353  		return rewriteValueARM64_OpARM64SLL(v)
   354  	case OpARM64SLLconst:
   355  		return rewriteValueARM64_OpARM64SLLconst(v)
   356  	case OpARM64SRA:
   357  		return rewriteValueARM64_OpARM64SRA(v)
   358  	case OpARM64SRAconst:
   359  		return rewriteValueARM64_OpARM64SRAconst(v)
   360  	case OpARM64SRL:
   361  		return rewriteValueARM64_OpARM64SRL(v)
   362  	case OpARM64SRLconst:
   363  		return rewriteValueARM64_OpARM64SRLconst(v)
   364  	case OpARM64STP:
   365  		return rewriteValueARM64_OpARM64STP(v)
   366  	case OpARM64SUB:
   367  		return rewriteValueARM64_OpARM64SUB(v)
   368  	case OpARM64SUBconst:
   369  		return rewriteValueARM64_OpARM64SUBconst(v)
   370  	case OpARM64SUBshiftLL:
   371  		return rewriteValueARM64_OpARM64SUBshiftLL(v)
   372  	case OpARM64SUBshiftRA:
   373  		return rewriteValueARM64_OpARM64SUBshiftRA(v)
   374  	case OpARM64SUBshiftRL:
   375  		return rewriteValueARM64_OpARM64SUBshiftRL(v)
   376  	case OpARM64TST:
   377  		return rewriteValueARM64_OpARM64TST(v)
   378  	case OpARM64TSTW:
   379  		return rewriteValueARM64_OpARM64TSTW(v)
   380  	case OpARM64TSTWconst:
   381  		return rewriteValueARM64_OpARM64TSTWconst(v)
   382  	case OpARM64TSTconst:
   383  		return rewriteValueARM64_OpARM64TSTconst(v)
   384  	case OpARM64TSTshiftLL:
   385  		return rewriteValueARM64_OpARM64TSTshiftLL(v)
   386  	case OpARM64TSTshiftRA:
   387  		return rewriteValueARM64_OpARM64TSTshiftRA(v)
   388  	case OpARM64TSTshiftRL:
   389  		return rewriteValueARM64_OpARM64TSTshiftRL(v)
   390  	case OpARM64TSTshiftRO:
   391  		return rewriteValueARM64_OpARM64TSTshiftRO(v)
   392  	case OpARM64UBFIZ:
   393  		return rewriteValueARM64_OpARM64UBFIZ(v)
   394  	case OpARM64UBFX:
   395  		return rewriteValueARM64_OpARM64UBFX(v)
   396  	case OpARM64UDIV:
   397  		return rewriteValueARM64_OpARM64UDIV(v)
   398  	case OpARM64UDIVW:
   399  		return rewriteValueARM64_OpARM64UDIVW(v)
   400  	case OpARM64UMOD:
   401  		return rewriteValueARM64_OpARM64UMOD(v)
   402  	case OpARM64UMODW:
   403  		return rewriteValueARM64_OpARM64UMODW(v)
   404  	case OpARM64XOR:
   405  		return rewriteValueARM64_OpARM64XOR(v)
   406  	case OpARM64XORconst:
   407  		return rewriteValueARM64_OpARM64XORconst(v)
   408  	case OpARM64XORshiftLL:
   409  		return rewriteValueARM64_OpARM64XORshiftLL(v)
   410  	case OpARM64XORshiftRA:
   411  		return rewriteValueARM64_OpARM64XORshiftRA(v)
   412  	case OpARM64XORshiftRL:
   413  		return rewriteValueARM64_OpARM64XORshiftRL(v)
   414  	case OpARM64XORshiftRO:
   415  		return rewriteValueARM64_OpARM64XORshiftRO(v)
   416  	case OpAbs:
   417  		v.Op = OpARM64FABSD
   418  		return true
   419  	case OpAdd16:
   420  		v.Op = OpARM64ADD
   421  		return true
   422  	case OpAdd32:
   423  		v.Op = OpARM64ADD
   424  		return true
   425  	case OpAdd32F:
   426  		v.Op = OpARM64FADDS
   427  		return true
   428  	case OpAdd64:
   429  		v.Op = OpARM64ADD
   430  		return true
   431  	case OpAdd64F:
   432  		v.Op = OpARM64FADDD
   433  		return true
   434  	case OpAdd8:
   435  		v.Op = OpARM64ADD
   436  		return true
   437  	case OpAddPtr:
   438  		v.Op = OpARM64ADD
   439  		return true
   440  	case OpAddr:
   441  		return rewriteValueARM64_OpAddr(v)
   442  	case OpAnd16:
   443  		v.Op = OpARM64AND
   444  		return true
   445  	case OpAnd32:
   446  		v.Op = OpARM64AND
   447  		return true
   448  	case OpAnd64:
   449  		v.Op = OpARM64AND
   450  		return true
   451  	case OpAnd8:
   452  		v.Op = OpARM64AND
   453  		return true
   454  	case OpAndB:
   455  		v.Op = OpARM64AND
   456  		return true
   457  	case OpAtomicAdd32:
   458  		v.Op = OpARM64LoweredAtomicAdd32
   459  		return true
   460  	case OpAtomicAdd32Variant:
   461  		v.Op = OpARM64LoweredAtomicAdd32Variant
   462  		return true
   463  	case OpAtomicAdd64:
   464  		v.Op = OpARM64LoweredAtomicAdd64
   465  		return true
   466  	case OpAtomicAdd64Variant:
   467  		v.Op = OpARM64LoweredAtomicAdd64Variant
   468  		return true
   469  	case OpAtomicAnd32:
   470  		return rewriteValueARM64_OpAtomicAnd32(v)
   471  	case OpAtomicAnd32Variant:
   472  		return rewriteValueARM64_OpAtomicAnd32Variant(v)
   473  	case OpAtomicAnd8:
   474  		return rewriteValueARM64_OpAtomicAnd8(v)
   475  	case OpAtomicAnd8Variant:
   476  		return rewriteValueARM64_OpAtomicAnd8Variant(v)
   477  	case OpAtomicCompareAndSwap32:
   478  		v.Op = OpARM64LoweredAtomicCas32
   479  		return true
   480  	case OpAtomicCompareAndSwap32Variant:
   481  		v.Op = OpARM64LoweredAtomicCas32Variant
   482  		return true
   483  	case OpAtomicCompareAndSwap64:
   484  		v.Op = OpARM64LoweredAtomicCas64
   485  		return true
   486  	case OpAtomicCompareAndSwap64Variant:
   487  		v.Op = OpARM64LoweredAtomicCas64Variant
   488  		return true
   489  	case OpAtomicExchange32:
   490  		v.Op = OpARM64LoweredAtomicExchange32
   491  		return true
   492  	case OpAtomicExchange32Variant:
   493  		v.Op = OpARM64LoweredAtomicExchange32Variant
   494  		return true
   495  	case OpAtomicExchange64:
   496  		v.Op = OpARM64LoweredAtomicExchange64
   497  		return true
   498  	case OpAtomicExchange64Variant:
   499  		v.Op = OpARM64LoweredAtomicExchange64Variant
   500  		return true
   501  	case OpAtomicLoad32:
   502  		v.Op = OpARM64LDARW
   503  		return true
   504  	case OpAtomicLoad64:
   505  		v.Op = OpARM64LDAR
   506  		return true
   507  	case OpAtomicLoad8:
   508  		v.Op = OpARM64LDARB
   509  		return true
   510  	case OpAtomicLoadPtr:
   511  		v.Op = OpARM64LDAR
   512  		return true
   513  	case OpAtomicOr32:
   514  		return rewriteValueARM64_OpAtomicOr32(v)
   515  	case OpAtomicOr32Variant:
   516  		return rewriteValueARM64_OpAtomicOr32Variant(v)
   517  	case OpAtomicOr8:
   518  		return rewriteValueARM64_OpAtomicOr8(v)
   519  	case OpAtomicOr8Variant:
   520  		return rewriteValueARM64_OpAtomicOr8Variant(v)
   521  	case OpAtomicStore32:
   522  		v.Op = OpARM64STLRW
   523  		return true
   524  	case OpAtomicStore64:
   525  		v.Op = OpARM64STLR
   526  		return true
   527  	case OpAtomicStore8:
   528  		v.Op = OpARM64STLRB
   529  		return true
   530  	case OpAtomicStorePtrNoWB:
   531  		v.Op = OpARM64STLR
   532  		return true
   533  	case OpAvg64u:
   534  		return rewriteValueARM64_OpAvg64u(v)
   535  	case OpBitLen32:
   536  		return rewriteValueARM64_OpBitLen32(v)
   537  	case OpBitLen64:
   538  		return rewriteValueARM64_OpBitLen64(v)
   539  	case OpBitRev16:
   540  		return rewriteValueARM64_OpBitRev16(v)
   541  	case OpBitRev32:
   542  		v.Op = OpARM64RBITW
   543  		return true
   544  	case OpBitRev64:
   545  		v.Op = OpARM64RBIT
   546  		return true
   547  	case OpBitRev8:
   548  		return rewriteValueARM64_OpBitRev8(v)
   549  	case OpBswap32:
   550  		v.Op = OpARM64REVW
   551  		return true
   552  	case OpBswap64:
   553  		v.Op = OpARM64REV
   554  		return true
   555  	case OpCeil:
   556  		v.Op = OpARM64FRINTPD
   557  		return true
   558  	case OpClosureCall:
   559  		v.Op = OpARM64CALLclosure
   560  		return true
   561  	case OpCom16:
   562  		v.Op = OpARM64MVN
   563  		return true
   564  	case OpCom32:
   565  		v.Op = OpARM64MVN
   566  		return true
   567  	case OpCom64:
   568  		v.Op = OpARM64MVN
   569  		return true
   570  	case OpCom8:
   571  		v.Op = OpARM64MVN
   572  		return true
   573  	case OpCondSelect:
   574  		return rewriteValueARM64_OpCondSelect(v)
   575  	case OpConst16:
   576  		return rewriteValueARM64_OpConst16(v)
   577  	case OpConst32:
   578  		return rewriteValueARM64_OpConst32(v)
   579  	case OpConst32F:
   580  		return rewriteValueARM64_OpConst32F(v)
   581  	case OpConst64:
   582  		return rewriteValueARM64_OpConst64(v)
   583  	case OpConst64F:
   584  		return rewriteValueARM64_OpConst64F(v)
   585  	case OpConst8:
   586  		return rewriteValueARM64_OpConst8(v)
   587  	case OpConstBool:
   588  		return rewriteValueARM64_OpConstBool(v)
   589  	case OpConstNil:
   590  		return rewriteValueARM64_OpConstNil(v)
   591  	case OpCtz16:
   592  		return rewriteValueARM64_OpCtz16(v)
   593  	case OpCtz16NonZero:
   594  		v.Op = OpCtz32
   595  		return true
   596  	case OpCtz32:
   597  		return rewriteValueARM64_OpCtz32(v)
   598  	case OpCtz32NonZero:
   599  		v.Op = OpCtz32
   600  		return true
   601  	case OpCtz64:
   602  		return rewriteValueARM64_OpCtz64(v)
   603  	case OpCtz64NonZero:
   604  		v.Op = OpCtz64
   605  		return true
   606  	case OpCtz8:
   607  		return rewriteValueARM64_OpCtz8(v)
   608  	case OpCtz8NonZero:
   609  		v.Op = OpCtz32
   610  		return true
   611  	case OpCvt32Fto32:
   612  		v.Op = OpARM64FCVTZSSW
   613  		return true
   614  	case OpCvt32Fto32U:
   615  		v.Op = OpARM64FCVTZUSW
   616  		return true
   617  	case OpCvt32Fto64:
   618  		v.Op = OpARM64FCVTZSS
   619  		return true
   620  	case OpCvt32Fto64F:
   621  		v.Op = OpARM64FCVTSD
   622  		return true
   623  	case OpCvt32Fto64U:
   624  		v.Op = OpARM64FCVTZUS
   625  		return true
   626  	case OpCvt32Uto32F:
   627  		v.Op = OpARM64UCVTFWS
   628  		return true
   629  	case OpCvt32Uto64F:
   630  		v.Op = OpARM64UCVTFWD
   631  		return true
   632  	case OpCvt32to32F:
   633  		v.Op = OpARM64SCVTFWS
   634  		return true
   635  	case OpCvt32to64F:
   636  		v.Op = OpARM64SCVTFWD
   637  		return true
   638  	case OpCvt64Fto32:
   639  		v.Op = OpARM64FCVTZSDW
   640  		return true
   641  	case OpCvt64Fto32F:
   642  		v.Op = OpARM64FCVTDS
   643  		return true
   644  	case OpCvt64Fto32U:
   645  		v.Op = OpARM64FCVTZUDW
   646  		return true
   647  	case OpCvt64Fto64:
   648  		v.Op = OpARM64FCVTZSD
   649  		return true
   650  	case OpCvt64Fto64U:
   651  		v.Op = OpARM64FCVTZUD
   652  		return true
   653  	case OpCvt64Uto32F:
   654  		v.Op = OpARM64UCVTFS
   655  		return true
   656  	case OpCvt64Uto64F:
   657  		v.Op = OpARM64UCVTFD
   658  		return true
   659  	case OpCvt64to32F:
   660  		v.Op = OpARM64SCVTFS
   661  		return true
   662  	case OpCvt64to64F:
   663  		v.Op = OpARM64SCVTFD
   664  		return true
   665  	case OpCvtBoolToUint8:
   666  		v.Op = OpCopy
   667  		return true
   668  	case OpDiv16:
   669  		return rewriteValueARM64_OpDiv16(v)
   670  	case OpDiv16u:
   671  		return rewriteValueARM64_OpDiv16u(v)
   672  	case OpDiv32:
   673  		return rewriteValueARM64_OpDiv32(v)
   674  	case OpDiv32F:
   675  		v.Op = OpARM64FDIVS
   676  		return true
   677  	case OpDiv32u:
   678  		v.Op = OpARM64UDIVW
   679  		return true
   680  	case OpDiv64:
   681  		return rewriteValueARM64_OpDiv64(v)
   682  	case OpDiv64F:
   683  		v.Op = OpARM64FDIVD
   684  		return true
   685  	case OpDiv64u:
   686  		v.Op = OpARM64UDIV
   687  		return true
   688  	case OpDiv8:
   689  		return rewriteValueARM64_OpDiv8(v)
   690  	case OpDiv8u:
   691  		return rewriteValueARM64_OpDiv8u(v)
   692  	case OpEq16:
   693  		return rewriteValueARM64_OpEq16(v)
   694  	case OpEq32:
   695  		return rewriteValueARM64_OpEq32(v)
   696  	case OpEq32F:
   697  		return rewriteValueARM64_OpEq32F(v)
   698  	case OpEq64:
   699  		return rewriteValueARM64_OpEq64(v)
   700  	case OpEq64F:
   701  		return rewriteValueARM64_OpEq64F(v)
   702  	case OpEq8:
   703  		return rewriteValueARM64_OpEq8(v)
   704  	case OpEqB:
   705  		return rewriteValueARM64_OpEqB(v)
   706  	case OpEqPtr:
   707  		return rewriteValueARM64_OpEqPtr(v)
   708  	case OpFMA:
   709  		return rewriteValueARM64_OpFMA(v)
   710  	case OpFloor:
   711  		v.Op = OpARM64FRINTMD
   712  		return true
   713  	case OpGetCallerPC:
   714  		v.Op = OpARM64LoweredGetCallerPC
   715  		return true
   716  	case OpGetCallerSP:
   717  		v.Op = OpARM64LoweredGetCallerSP
   718  		return true
   719  	case OpGetClosurePtr:
   720  		v.Op = OpARM64LoweredGetClosurePtr
   721  		return true
   722  	case OpHmul32:
   723  		return rewriteValueARM64_OpHmul32(v)
   724  	case OpHmul32u:
   725  		return rewriteValueARM64_OpHmul32u(v)
   726  	case OpHmul64:
   727  		v.Op = OpARM64MULH
   728  		return true
   729  	case OpHmul64u:
   730  		v.Op = OpARM64UMULH
   731  		return true
   732  	case OpInterCall:
   733  		v.Op = OpARM64CALLinter
   734  		return true
   735  	case OpIsInBounds:
   736  		return rewriteValueARM64_OpIsInBounds(v)
   737  	case OpIsNonNil:
   738  		return rewriteValueARM64_OpIsNonNil(v)
   739  	case OpIsSliceInBounds:
   740  		return rewriteValueARM64_OpIsSliceInBounds(v)
   741  	case OpLeq16:
   742  		return rewriteValueARM64_OpLeq16(v)
   743  	case OpLeq16U:
   744  		return rewriteValueARM64_OpLeq16U(v)
   745  	case OpLeq32:
   746  		return rewriteValueARM64_OpLeq32(v)
   747  	case OpLeq32F:
   748  		return rewriteValueARM64_OpLeq32F(v)
   749  	case OpLeq32U:
   750  		return rewriteValueARM64_OpLeq32U(v)
   751  	case OpLeq64:
   752  		return rewriteValueARM64_OpLeq64(v)
   753  	case OpLeq64F:
   754  		return rewriteValueARM64_OpLeq64F(v)
   755  	case OpLeq64U:
   756  		return rewriteValueARM64_OpLeq64U(v)
   757  	case OpLeq8:
   758  		return rewriteValueARM64_OpLeq8(v)
   759  	case OpLeq8U:
   760  		return rewriteValueARM64_OpLeq8U(v)
   761  	case OpLess16:
   762  		return rewriteValueARM64_OpLess16(v)
   763  	case OpLess16U:
   764  		return rewriteValueARM64_OpLess16U(v)
   765  	case OpLess32:
   766  		return rewriteValueARM64_OpLess32(v)
   767  	case OpLess32F:
   768  		return rewriteValueARM64_OpLess32F(v)
   769  	case OpLess32U:
   770  		return rewriteValueARM64_OpLess32U(v)
   771  	case OpLess64:
   772  		return rewriteValueARM64_OpLess64(v)
   773  	case OpLess64F:
   774  		return rewriteValueARM64_OpLess64F(v)
   775  	case OpLess64U:
   776  		return rewriteValueARM64_OpLess64U(v)
   777  	case OpLess8:
   778  		return rewriteValueARM64_OpLess8(v)
   779  	case OpLess8U:
   780  		return rewriteValueARM64_OpLess8U(v)
   781  	case OpLoad:
   782  		return rewriteValueARM64_OpLoad(v)
   783  	case OpLocalAddr:
   784  		return rewriteValueARM64_OpLocalAddr(v)
   785  	case OpLsh16x16:
   786  		return rewriteValueARM64_OpLsh16x16(v)
   787  	case OpLsh16x32:
   788  		return rewriteValueARM64_OpLsh16x32(v)
   789  	case OpLsh16x64:
   790  		return rewriteValueARM64_OpLsh16x64(v)
   791  	case OpLsh16x8:
   792  		return rewriteValueARM64_OpLsh16x8(v)
   793  	case OpLsh32x16:
   794  		return rewriteValueARM64_OpLsh32x16(v)
   795  	case OpLsh32x32:
   796  		return rewriteValueARM64_OpLsh32x32(v)
   797  	case OpLsh32x64:
   798  		return rewriteValueARM64_OpLsh32x64(v)
   799  	case OpLsh32x8:
   800  		return rewriteValueARM64_OpLsh32x8(v)
   801  	case OpLsh64x16:
   802  		return rewriteValueARM64_OpLsh64x16(v)
   803  	case OpLsh64x32:
   804  		return rewriteValueARM64_OpLsh64x32(v)
   805  	case OpLsh64x64:
   806  		return rewriteValueARM64_OpLsh64x64(v)
   807  	case OpLsh64x8:
   808  		return rewriteValueARM64_OpLsh64x8(v)
   809  	case OpLsh8x16:
   810  		return rewriteValueARM64_OpLsh8x16(v)
   811  	case OpLsh8x32:
   812  		return rewriteValueARM64_OpLsh8x32(v)
   813  	case OpLsh8x64:
   814  		return rewriteValueARM64_OpLsh8x64(v)
   815  	case OpLsh8x8:
   816  		return rewriteValueARM64_OpLsh8x8(v)
   817  	case OpMod16:
   818  		return rewriteValueARM64_OpMod16(v)
   819  	case OpMod16u:
   820  		return rewriteValueARM64_OpMod16u(v)
   821  	case OpMod32:
   822  		return rewriteValueARM64_OpMod32(v)
   823  	case OpMod32u:
   824  		v.Op = OpARM64UMODW
   825  		return true
   826  	case OpMod64:
   827  		return rewriteValueARM64_OpMod64(v)
   828  	case OpMod64u:
   829  		v.Op = OpARM64UMOD
   830  		return true
   831  	case OpMod8:
   832  		return rewriteValueARM64_OpMod8(v)
   833  	case OpMod8u:
   834  		return rewriteValueARM64_OpMod8u(v)
   835  	case OpMove:
   836  		return rewriteValueARM64_OpMove(v)
   837  	case OpMul16:
   838  		v.Op = OpARM64MULW
   839  		return true
   840  	case OpMul32:
   841  		v.Op = OpARM64MULW
   842  		return true
   843  	case OpMul32F:
   844  		v.Op = OpARM64FMULS
   845  		return true
   846  	case OpMul64:
   847  		v.Op = OpARM64MUL
   848  		return true
   849  	case OpMul64F:
   850  		v.Op = OpARM64FMULD
   851  		return true
   852  	case OpMul8:
   853  		v.Op = OpARM64MULW
   854  		return true
   855  	case OpNeg16:
   856  		v.Op = OpARM64NEG
   857  		return true
   858  	case OpNeg32:
   859  		v.Op = OpARM64NEG
   860  		return true
   861  	case OpNeg32F:
   862  		v.Op = OpARM64FNEGS
   863  		return true
   864  	case OpNeg64:
   865  		v.Op = OpARM64NEG
   866  		return true
   867  	case OpNeg64F:
   868  		v.Op = OpARM64FNEGD
   869  		return true
   870  	case OpNeg8:
   871  		v.Op = OpARM64NEG
   872  		return true
   873  	case OpNeq16:
   874  		return rewriteValueARM64_OpNeq16(v)
   875  	case OpNeq32:
   876  		return rewriteValueARM64_OpNeq32(v)
   877  	case OpNeq32F:
   878  		return rewriteValueARM64_OpNeq32F(v)
   879  	case OpNeq64:
   880  		return rewriteValueARM64_OpNeq64(v)
   881  	case OpNeq64F:
   882  		return rewriteValueARM64_OpNeq64F(v)
   883  	case OpNeq8:
   884  		return rewriteValueARM64_OpNeq8(v)
   885  	case OpNeqB:
   886  		v.Op = OpARM64XOR
   887  		return true
   888  	case OpNeqPtr:
   889  		return rewriteValueARM64_OpNeqPtr(v)
   890  	case OpNilCheck:
   891  		v.Op = OpARM64LoweredNilCheck
   892  		return true
   893  	case OpNot:
   894  		return rewriteValueARM64_OpNot(v)
   895  	case OpOffPtr:
   896  		return rewriteValueARM64_OpOffPtr(v)
   897  	case OpOr16:
   898  		v.Op = OpARM64OR
   899  		return true
   900  	case OpOr32:
   901  		v.Op = OpARM64OR
   902  		return true
   903  	case OpOr64:
   904  		v.Op = OpARM64OR
   905  		return true
   906  	case OpOr8:
   907  		v.Op = OpARM64OR
   908  		return true
   909  	case OpOrB:
   910  		v.Op = OpARM64OR
   911  		return true
   912  	case OpPanicBounds:
   913  		return rewriteValueARM64_OpPanicBounds(v)
   914  	case OpPopCount16:
   915  		return rewriteValueARM64_OpPopCount16(v)
   916  	case OpPopCount32:
   917  		return rewriteValueARM64_OpPopCount32(v)
   918  	case OpPopCount64:
   919  		return rewriteValueARM64_OpPopCount64(v)
   920  	case OpPrefetchCache:
   921  		return rewriteValueARM64_OpPrefetchCache(v)
   922  	case OpPrefetchCacheStreamed:
   923  		return rewriteValueARM64_OpPrefetchCacheStreamed(v)
   924  	case OpPubBarrier:
   925  		return rewriteValueARM64_OpPubBarrier(v)
   926  	case OpRotateLeft16:
   927  		return rewriteValueARM64_OpRotateLeft16(v)
   928  	case OpRotateLeft32:
   929  		return rewriteValueARM64_OpRotateLeft32(v)
   930  	case OpRotateLeft64:
   931  		return rewriteValueARM64_OpRotateLeft64(v)
   932  	case OpRotateLeft8:
   933  		return rewriteValueARM64_OpRotateLeft8(v)
   934  	case OpRound:
   935  		v.Op = OpARM64FRINTAD
   936  		return true
   937  	case OpRound32F:
   938  		v.Op = OpARM64LoweredRound32F
   939  		return true
   940  	case OpRound64F:
   941  		v.Op = OpARM64LoweredRound64F
   942  		return true
   943  	case OpRoundToEven:
   944  		v.Op = OpARM64FRINTND
   945  		return true
   946  	case OpRsh16Ux16:
   947  		return rewriteValueARM64_OpRsh16Ux16(v)
   948  	case OpRsh16Ux32:
   949  		return rewriteValueARM64_OpRsh16Ux32(v)
   950  	case OpRsh16Ux64:
   951  		return rewriteValueARM64_OpRsh16Ux64(v)
   952  	case OpRsh16Ux8:
   953  		return rewriteValueARM64_OpRsh16Ux8(v)
   954  	case OpRsh16x16:
   955  		return rewriteValueARM64_OpRsh16x16(v)
   956  	case OpRsh16x32:
   957  		return rewriteValueARM64_OpRsh16x32(v)
   958  	case OpRsh16x64:
   959  		return rewriteValueARM64_OpRsh16x64(v)
   960  	case OpRsh16x8:
   961  		return rewriteValueARM64_OpRsh16x8(v)
   962  	case OpRsh32Ux16:
   963  		return rewriteValueARM64_OpRsh32Ux16(v)
   964  	case OpRsh32Ux32:
   965  		return rewriteValueARM64_OpRsh32Ux32(v)
   966  	case OpRsh32Ux64:
   967  		return rewriteValueARM64_OpRsh32Ux64(v)
   968  	case OpRsh32Ux8:
   969  		return rewriteValueARM64_OpRsh32Ux8(v)
   970  	case OpRsh32x16:
   971  		return rewriteValueARM64_OpRsh32x16(v)
   972  	case OpRsh32x32:
   973  		return rewriteValueARM64_OpRsh32x32(v)
   974  	case OpRsh32x64:
   975  		return rewriteValueARM64_OpRsh32x64(v)
   976  	case OpRsh32x8:
   977  		return rewriteValueARM64_OpRsh32x8(v)
   978  	case OpRsh64Ux16:
   979  		return rewriteValueARM64_OpRsh64Ux16(v)
   980  	case OpRsh64Ux32:
   981  		return rewriteValueARM64_OpRsh64Ux32(v)
   982  	case OpRsh64Ux64:
   983  		return rewriteValueARM64_OpRsh64Ux64(v)
   984  	case OpRsh64Ux8:
   985  		return rewriteValueARM64_OpRsh64Ux8(v)
   986  	case OpRsh64x16:
   987  		return rewriteValueARM64_OpRsh64x16(v)
   988  	case OpRsh64x32:
   989  		return rewriteValueARM64_OpRsh64x32(v)
   990  	case OpRsh64x64:
   991  		return rewriteValueARM64_OpRsh64x64(v)
   992  	case OpRsh64x8:
   993  		return rewriteValueARM64_OpRsh64x8(v)
   994  	case OpRsh8Ux16:
   995  		return rewriteValueARM64_OpRsh8Ux16(v)
   996  	case OpRsh8Ux32:
   997  		return rewriteValueARM64_OpRsh8Ux32(v)
   998  	case OpRsh8Ux64:
   999  		return rewriteValueARM64_OpRsh8Ux64(v)
  1000  	case OpRsh8Ux8:
  1001  		return rewriteValueARM64_OpRsh8Ux8(v)
  1002  	case OpRsh8x16:
  1003  		return rewriteValueARM64_OpRsh8x16(v)
  1004  	case OpRsh8x32:
  1005  		return rewriteValueARM64_OpRsh8x32(v)
  1006  	case OpRsh8x64:
  1007  		return rewriteValueARM64_OpRsh8x64(v)
  1008  	case OpRsh8x8:
  1009  		return rewriteValueARM64_OpRsh8x8(v)
  1010  	case OpSelect0:
  1011  		return rewriteValueARM64_OpSelect0(v)
  1012  	case OpSelect1:
  1013  		return rewriteValueARM64_OpSelect1(v)
  1014  	case OpSelectN:
  1015  		return rewriteValueARM64_OpSelectN(v)
  1016  	case OpSignExt16to32:
  1017  		v.Op = OpARM64MOVHreg
  1018  		return true
  1019  	case OpSignExt16to64:
  1020  		v.Op = OpARM64MOVHreg
  1021  		return true
  1022  	case OpSignExt32to64:
  1023  		v.Op = OpARM64MOVWreg
  1024  		return true
  1025  	case OpSignExt8to16:
  1026  		v.Op = OpARM64MOVBreg
  1027  		return true
  1028  	case OpSignExt8to32:
  1029  		v.Op = OpARM64MOVBreg
  1030  		return true
  1031  	case OpSignExt8to64:
  1032  		v.Op = OpARM64MOVBreg
  1033  		return true
  1034  	case OpSlicemask:
  1035  		return rewriteValueARM64_OpSlicemask(v)
  1036  	case OpSqrt:
  1037  		v.Op = OpARM64FSQRTD
  1038  		return true
  1039  	case OpSqrt32:
  1040  		v.Op = OpARM64FSQRTS
  1041  		return true
  1042  	case OpStaticCall:
  1043  		v.Op = OpARM64CALLstatic
  1044  		return true
  1045  	case OpStore:
  1046  		return rewriteValueARM64_OpStore(v)
  1047  	case OpSub16:
  1048  		v.Op = OpARM64SUB
  1049  		return true
  1050  	case OpSub32:
  1051  		v.Op = OpARM64SUB
  1052  		return true
  1053  	case OpSub32F:
  1054  		v.Op = OpARM64FSUBS
  1055  		return true
  1056  	case OpSub64:
  1057  		v.Op = OpARM64SUB
  1058  		return true
  1059  	case OpSub64F:
  1060  		v.Op = OpARM64FSUBD
  1061  		return true
  1062  	case OpSub8:
  1063  		v.Op = OpARM64SUB
  1064  		return true
  1065  	case OpSubPtr:
  1066  		v.Op = OpARM64SUB
  1067  		return true
  1068  	case OpTailCall:
  1069  		v.Op = OpARM64CALLtail
  1070  		return true
  1071  	case OpTrunc:
  1072  		v.Op = OpARM64FRINTZD
  1073  		return true
  1074  	case OpTrunc16to8:
  1075  		v.Op = OpCopy
  1076  		return true
  1077  	case OpTrunc32to16:
  1078  		v.Op = OpCopy
  1079  		return true
  1080  	case OpTrunc32to8:
  1081  		v.Op = OpCopy
  1082  		return true
  1083  	case OpTrunc64to16:
  1084  		v.Op = OpCopy
  1085  		return true
  1086  	case OpTrunc64to32:
  1087  		v.Op = OpCopy
  1088  		return true
  1089  	case OpTrunc64to8:
  1090  		v.Op = OpCopy
  1091  		return true
  1092  	case OpWB:
  1093  		v.Op = OpARM64LoweredWB
  1094  		return true
  1095  	case OpXor16:
  1096  		v.Op = OpARM64XOR
  1097  		return true
  1098  	case OpXor32:
  1099  		v.Op = OpARM64XOR
  1100  		return true
  1101  	case OpXor64:
  1102  		v.Op = OpARM64XOR
  1103  		return true
  1104  	case OpXor8:
  1105  		v.Op = OpARM64XOR
  1106  		return true
  1107  	case OpZero:
  1108  		return rewriteValueARM64_OpZero(v)
  1109  	case OpZeroExt16to32:
  1110  		v.Op = OpARM64MOVHUreg
  1111  		return true
  1112  	case OpZeroExt16to64:
  1113  		v.Op = OpARM64MOVHUreg
  1114  		return true
  1115  	case OpZeroExt32to64:
  1116  		v.Op = OpARM64MOVWUreg
  1117  		return true
  1118  	case OpZeroExt8to16:
  1119  		v.Op = OpARM64MOVBUreg
  1120  		return true
  1121  	case OpZeroExt8to32:
  1122  		v.Op = OpARM64MOVBUreg
  1123  		return true
  1124  	case OpZeroExt8to64:
  1125  		v.Op = OpARM64MOVBUreg
  1126  		return true
  1127  	}
  1128  	return false
  1129  }
  1130  func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool {
  1131  	v_2 := v.Args[2]
  1132  	v_1 := v.Args[1]
  1133  	v_0 := v.Args[0]
  1134  	b := v.Block
  1135  	typ := &b.Func.Config.Types
  1136  	// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c))))
  1137  	// result: (ADCSflags x y c)
  1138  	for {
  1139  		x := v_0
  1140  		y := v_1
  1141  		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
  1142  			break
  1143  		}
  1144  		v_2_0 := v_2.Args[0]
  1145  		if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
  1146  			break
  1147  		}
  1148  		v_2_0_0 := v_2_0.Args[0]
  1149  		if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 {
  1150  			break
  1151  		}
  1152  		c := v_2_0_0.Args[0]
  1153  		v.reset(OpARM64ADCSflags)
  1154  		v.AddArg3(x, y, c)
  1155  		return true
  1156  	}
  1157  	// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0]))))
  1158  	// result: (ADDSflags x y)
  1159  	for {
  1160  		x := v_0
  1161  		y := v_1
  1162  		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
  1163  			break
  1164  		}
  1165  		v_2_0 := v_2.Args[0]
  1166  		if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
  1167  			break
  1168  		}
  1169  		v_2_0_0 := v_2_0.Args[0]
  1170  		if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
  1171  			break
  1172  		}
  1173  		v.reset(OpARM64ADDSflags)
  1174  		v.AddArg2(x, y)
  1175  		return true
  1176  	}
  1177  	return false
  1178  }
  1179  func rewriteValueARM64_OpARM64ADD(v *Value) bool {
  1180  	v_1 := v.Args[1]
  1181  	v_0 := v.Args[0]
  1182  	// match: (ADD x (MOVDconst [c]))
  1183  	// result: (ADDconst [c] x)
  1184  	for {
  1185  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1186  			x := v_0
  1187  			if v_1.Op != OpARM64MOVDconst {
  1188  				continue
  1189  			}
  1190  			c := auxIntToInt64(v_1.AuxInt)
  1191  			v.reset(OpARM64ADDconst)
  1192  			v.AuxInt = int64ToAuxInt(c)
  1193  			v.AddArg(x)
  1194  			return true
  1195  		}
  1196  		break
  1197  	}
  1198  	// match: (ADD a l:(MUL x y))
  1199  	// cond: l.Uses==1 && clobber(l)
  1200  	// result: (MADD a x y)
  1201  	for {
  1202  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1203  			a := v_0
  1204  			l := v_1
  1205  			if l.Op != OpARM64MUL {
  1206  				continue
  1207  			}
  1208  			y := l.Args[1]
  1209  			x := l.Args[0]
  1210  			if !(l.Uses == 1 && clobber(l)) {
  1211  				continue
  1212  			}
  1213  			v.reset(OpARM64MADD)
  1214  			v.AddArg3(a, x, y)
  1215  			return true
  1216  		}
  1217  		break
  1218  	}
  1219  	// match: (ADD a l:(MNEG x y))
  1220  	// cond: l.Uses==1 && clobber(l)
  1221  	// result: (MSUB a x y)
  1222  	for {
  1223  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1224  			a := v_0
  1225  			l := v_1
  1226  			if l.Op != OpARM64MNEG {
  1227  				continue
  1228  			}
  1229  			y := l.Args[1]
  1230  			x := l.Args[0]
  1231  			if !(l.Uses == 1 && clobber(l)) {
  1232  				continue
  1233  			}
  1234  			v.reset(OpARM64MSUB)
  1235  			v.AddArg3(a, x, y)
  1236  			return true
  1237  		}
  1238  		break
  1239  	}
  1240  	// match: (ADD a l:(MULW x y))
  1241  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1242  	// result: (MADDW a x y)
  1243  	for {
  1244  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1245  			a := v_0
  1246  			l := v_1
  1247  			if l.Op != OpARM64MULW {
  1248  				continue
  1249  			}
  1250  			y := l.Args[1]
  1251  			x := l.Args[0]
  1252  			if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1253  				continue
  1254  			}
  1255  			v.reset(OpARM64MADDW)
  1256  			v.AddArg3(a, x, y)
  1257  			return true
  1258  		}
  1259  		break
  1260  	}
  1261  	// match: (ADD a l:(MNEGW x y))
  1262  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1263  	// result: (MSUBW a x y)
  1264  	for {
  1265  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1266  			a := v_0
  1267  			l := v_1
  1268  			if l.Op != OpARM64MNEGW {
  1269  				continue
  1270  			}
  1271  			y := l.Args[1]
  1272  			x := l.Args[0]
  1273  			if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1274  				continue
  1275  			}
  1276  			v.reset(OpARM64MSUBW)
  1277  			v.AddArg3(a, x, y)
  1278  			return true
  1279  		}
  1280  		break
  1281  	}
  1282  	// match: (ADD x (NEG y))
  1283  	// result: (SUB x y)
  1284  	for {
  1285  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1286  			x := v_0
  1287  			if v_1.Op != OpARM64NEG {
  1288  				continue
  1289  			}
  1290  			y := v_1.Args[0]
  1291  			v.reset(OpARM64SUB)
  1292  			v.AddArg2(x, y)
  1293  			return true
  1294  		}
  1295  		break
  1296  	}
  1297  	// match: (ADD x0 x1:(SLLconst [c] y))
  1298  	// cond: clobberIfDead(x1)
  1299  	// result: (ADDshiftLL x0 y [c])
  1300  	for {
  1301  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1302  			x0 := v_0
  1303  			x1 := v_1
  1304  			if x1.Op != OpARM64SLLconst {
  1305  				continue
  1306  			}
  1307  			c := auxIntToInt64(x1.AuxInt)
  1308  			y := x1.Args[0]
  1309  			if !(clobberIfDead(x1)) {
  1310  				continue
  1311  			}
  1312  			v.reset(OpARM64ADDshiftLL)
  1313  			v.AuxInt = int64ToAuxInt(c)
  1314  			v.AddArg2(x0, y)
  1315  			return true
  1316  		}
  1317  		break
  1318  	}
  1319  	// match: (ADD x0 x1:(SRLconst [c] y))
  1320  	// cond: clobberIfDead(x1)
  1321  	// result: (ADDshiftRL x0 y [c])
  1322  	for {
  1323  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1324  			x0 := v_0
  1325  			x1 := v_1
  1326  			if x1.Op != OpARM64SRLconst {
  1327  				continue
  1328  			}
  1329  			c := auxIntToInt64(x1.AuxInt)
  1330  			y := x1.Args[0]
  1331  			if !(clobberIfDead(x1)) {
  1332  				continue
  1333  			}
  1334  			v.reset(OpARM64ADDshiftRL)
  1335  			v.AuxInt = int64ToAuxInt(c)
  1336  			v.AddArg2(x0, y)
  1337  			return true
  1338  		}
  1339  		break
  1340  	}
  1341  	// match: (ADD x0 x1:(SRAconst [c] y))
  1342  	// cond: clobberIfDead(x1)
  1343  	// result: (ADDshiftRA x0 y [c])
  1344  	for {
  1345  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1346  			x0 := v_0
  1347  			x1 := v_1
  1348  			if x1.Op != OpARM64SRAconst {
  1349  				continue
  1350  			}
  1351  			c := auxIntToInt64(x1.AuxInt)
  1352  			y := x1.Args[0]
  1353  			if !(clobberIfDead(x1)) {
  1354  				continue
  1355  			}
  1356  			v.reset(OpARM64ADDshiftRA)
  1357  			v.AuxInt = int64ToAuxInt(c)
  1358  			v.AddArg2(x0, y)
  1359  			return true
  1360  		}
  1361  		break
  1362  	}
  1363  	return false
  1364  }
  1365  func rewriteValueARM64_OpARM64ADDSflags(v *Value) bool {
  1366  	v_1 := v.Args[1]
  1367  	v_0 := v.Args[0]
  1368  	// match: (ADDSflags x (MOVDconst [c]))
  1369  	// result: (ADDSconstflags [c] x)
  1370  	for {
  1371  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1372  			x := v_0
  1373  			if v_1.Op != OpARM64MOVDconst {
  1374  				continue
  1375  			}
  1376  			c := auxIntToInt64(v_1.AuxInt)
  1377  			v.reset(OpARM64ADDSconstflags)
  1378  			v.AuxInt = int64ToAuxInt(c)
  1379  			v.AddArg(x)
  1380  			return true
  1381  		}
  1382  		break
  1383  	}
  1384  	return false
  1385  }
  1386  func rewriteValueARM64_OpARM64ADDconst(v *Value) bool {
  1387  	v_0 := v.Args[0]
  1388  	// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
  1389  	// cond: is32Bit(off1+int64(off2))
  1390  	// result: (MOVDaddr [int32(off1)+off2] {sym} ptr)
  1391  	for {
  1392  		off1 := auxIntToInt64(v.AuxInt)
  1393  		if v_0.Op != OpARM64MOVDaddr {
  1394  			break
  1395  		}
  1396  		off2 := auxIntToInt32(v_0.AuxInt)
  1397  		sym := auxToSym(v_0.Aux)
  1398  		ptr := v_0.Args[0]
  1399  		if !(is32Bit(off1 + int64(off2))) {
  1400  			break
  1401  		}
  1402  		v.reset(OpARM64MOVDaddr)
  1403  		v.AuxInt = int32ToAuxInt(int32(off1) + off2)
  1404  		v.Aux = symToAux(sym)
  1405  		v.AddArg(ptr)
  1406  		return true
  1407  	}
  1408  	// match: (ADDconst [c] y)
  1409  	// cond: c < 0
  1410  	// result: (SUBconst [-c] y)
  1411  	for {
  1412  		c := auxIntToInt64(v.AuxInt)
  1413  		y := v_0
  1414  		if !(c < 0) {
  1415  			break
  1416  		}
  1417  		v.reset(OpARM64SUBconst)
  1418  		v.AuxInt = int64ToAuxInt(-c)
  1419  		v.AddArg(y)
  1420  		return true
  1421  	}
  1422  	// match: (ADDconst [0] x)
  1423  	// result: x
  1424  	for {
  1425  		if auxIntToInt64(v.AuxInt) != 0 {
  1426  			break
  1427  		}
  1428  		x := v_0
  1429  		v.copyOf(x)
  1430  		return true
  1431  	}
  1432  	// match: (ADDconst [c] (MOVDconst [d]))
  1433  	// result: (MOVDconst [c+d])
  1434  	for {
  1435  		c := auxIntToInt64(v.AuxInt)
  1436  		if v_0.Op != OpARM64MOVDconst {
  1437  			break
  1438  		}
  1439  		d := auxIntToInt64(v_0.AuxInt)
  1440  		v.reset(OpARM64MOVDconst)
  1441  		v.AuxInt = int64ToAuxInt(c + d)
  1442  		return true
  1443  	}
  1444  	// match: (ADDconst [c] (ADDconst [d] x))
  1445  	// result: (ADDconst [c+d] x)
  1446  	for {
  1447  		c := auxIntToInt64(v.AuxInt)
  1448  		if v_0.Op != OpARM64ADDconst {
  1449  			break
  1450  		}
  1451  		d := auxIntToInt64(v_0.AuxInt)
  1452  		x := v_0.Args[0]
  1453  		v.reset(OpARM64ADDconst)
  1454  		v.AuxInt = int64ToAuxInt(c + d)
  1455  		v.AddArg(x)
  1456  		return true
  1457  	}
  1458  	// match: (ADDconst [c] (SUBconst [d] x))
  1459  	// result: (ADDconst [c-d] x)
  1460  	for {
  1461  		c := auxIntToInt64(v.AuxInt)
  1462  		if v_0.Op != OpARM64SUBconst {
  1463  			break
  1464  		}
  1465  		d := auxIntToInt64(v_0.AuxInt)
  1466  		x := v_0.Args[0]
  1467  		v.reset(OpARM64ADDconst)
  1468  		v.AuxInt = int64ToAuxInt(c - d)
  1469  		v.AddArg(x)
  1470  		return true
  1471  	}
  1472  	return false
  1473  }
  1474  func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool {
  1475  	v_1 := v.Args[1]
  1476  	v_0 := v.Args[0]
  1477  	b := v.Block
  1478  	typ := &b.Func.Config.Types
  1479  	// match: (ADDshiftLL (MOVDconst [c]) x [d])
  1480  	// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
  1481  	for {
  1482  		d := auxIntToInt64(v.AuxInt)
  1483  		if v_0.Op != OpARM64MOVDconst {
  1484  			break
  1485  		}
  1486  		c := auxIntToInt64(v_0.AuxInt)
  1487  		x := v_1
  1488  		v.reset(OpARM64ADDconst)
  1489  		v.AuxInt = int64ToAuxInt(c)
  1490  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  1491  		v0.AuxInt = int64ToAuxInt(d)
  1492  		v0.AddArg(x)
  1493  		v.AddArg(v0)
  1494  		return true
  1495  	}
  1496  	// match: (ADDshiftLL x (MOVDconst [c]) [d])
  1497  	// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
  1498  	for {
  1499  		d := auxIntToInt64(v.AuxInt)
  1500  		x := v_0
  1501  		if v_1.Op != OpARM64MOVDconst {
  1502  			break
  1503  		}
  1504  		c := auxIntToInt64(v_1.AuxInt)
  1505  		v.reset(OpARM64ADDconst)
  1506  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
  1507  		v.AddArg(x)
  1508  		return true
  1509  	}
  1510  	// match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
  1511  	// result: (REV16W x)
  1512  	for {
  1513  		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
  1514  			break
  1515  		}
  1516  		x := v_0.Args[0]
  1517  		if x != v_1 {
  1518  			break
  1519  		}
  1520  		v.reset(OpARM64REV16W)
  1521  		v.AddArg(x)
  1522  		return true
  1523  	}
  1524  	// match: (ADDshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
  1525  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  1526  	// result: (REV16W x)
  1527  	for {
  1528  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
  1529  			break
  1530  		}
  1531  		v_0_0 := v_0.Args[0]
  1532  		if v_0_0.Op != OpARM64ANDconst {
  1533  			break
  1534  		}
  1535  		c1 := auxIntToInt64(v_0_0.AuxInt)
  1536  		x := v_0_0.Args[0]
  1537  		if v_1.Op != OpARM64ANDconst {
  1538  			break
  1539  		}
  1540  		c2 := auxIntToInt64(v_1.AuxInt)
  1541  		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  1542  			break
  1543  		}
  1544  		v.reset(OpARM64REV16W)
  1545  		v.AddArg(x)
  1546  		return true
  1547  	}
  1548  	// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
  1549  	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
  1550  	// result: (REV16 x)
  1551  	for {
  1552  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1553  			break
  1554  		}
  1555  		v_0_0 := v_0.Args[0]
  1556  		if v_0_0.Op != OpARM64ANDconst {
  1557  			break
  1558  		}
  1559  		c1 := auxIntToInt64(v_0_0.AuxInt)
  1560  		x := v_0_0.Args[0]
  1561  		if v_1.Op != OpARM64ANDconst {
  1562  			break
  1563  		}
  1564  		c2 := auxIntToInt64(v_1.AuxInt)
  1565  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  1566  			break
  1567  		}
  1568  		v.reset(OpARM64REV16)
  1569  		v.AddArg(x)
  1570  		return true
  1571  	}
  1572  	// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
  1573  	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
  1574  	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
  1575  	for {
  1576  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1577  			break
  1578  		}
  1579  		v_0_0 := v_0.Args[0]
  1580  		if v_0_0.Op != OpARM64ANDconst {
  1581  			break
  1582  		}
  1583  		c1 := auxIntToInt64(v_0_0.AuxInt)
  1584  		x := v_0_0.Args[0]
  1585  		if v_1.Op != OpARM64ANDconst {
  1586  			break
  1587  		}
  1588  		c2 := auxIntToInt64(v_1.AuxInt)
  1589  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  1590  			break
  1591  		}
  1592  		v.reset(OpARM64REV16)
  1593  		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
  1594  		v0.AuxInt = int64ToAuxInt(0xffffffff)
  1595  		v0.AddArg(x)
  1596  		v.AddArg(v0)
  1597  		return true
  1598  	}
  1599  	// match: (ADDshiftLL [c] (SRLconst x [64-c]) x2)
  1600  	// result: (EXTRconst [64-c] x2 x)
  1601  	for {
  1602  		c := auxIntToInt64(v.AuxInt)
  1603  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
  1604  			break
  1605  		}
  1606  		x := v_0.Args[0]
  1607  		x2 := v_1
  1608  		v.reset(OpARM64EXTRconst)
  1609  		v.AuxInt = int64ToAuxInt(64 - c)
  1610  		v.AddArg2(x2, x)
  1611  		return true
  1612  	}
  1613  	// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2)
  1614  	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
  1615  	// result: (EXTRWconst [32-c] x2 x)
  1616  	for {
  1617  		t := v.Type
  1618  		c := auxIntToInt64(v.AuxInt)
  1619  		if v_0.Op != OpARM64UBFX {
  1620  			break
  1621  		}
  1622  		bfc := auxIntToArm64BitField(v_0.AuxInt)
  1623  		x := v_0.Args[0]
  1624  		x2 := v_1
  1625  		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
  1626  			break
  1627  		}
  1628  		v.reset(OpARM64EXTRWconst)
  1629  		v.AuxInt = int64ToAuxInt(32 - c)
  1630  		v.AddArg2(x2, x)
  1631  		return true
  1632  	}
  1633  	return false
  1634  }
  1635  func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool {
  1636  	v_1 := v.Args[1]
  1637  	v_0 := v.Args[0]
  1638  	b := v.Block
  1639  	// match: (ADDshiftRA (MOVDconst [c]) x [d])
  1640  	// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
  1641  	for {
  1642  		d := auxIntToInt64(v.AuxInt)
  1643  		if v_0.Op != OpARM64MOVDconst {
  1644  			break
  1645  		}
  1646  		c := auxIntToInt64(v_0.AuxInt)
  1647  		x := v_1
  1648  		v.reset(OpARM64ADDconst)
  1649  		v.AuxInt = int64ToAuxInt(c)
  1650  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  1651  		v0.AuxInt = int64ToAuxInt(d)
  1652  		v0.AddArg(x)
  1653  		v.AddArg(v0)
  1654  		return true
  1655  	}
  1656  	// match: (ADDshiftRA x (MOVDconst [c]) [d])
  1657  	// result: (ADDconst x [c>>uint64(d)])
  1658  	for {
  1659  		d := auxIntToInt64(v.AuxInt)
  1660  		x := v_0
  1661  		if v_1.Op != OpARM64MOVDconst {
  1662  			break
  1663  		}
  1664  		c := auxIntToInt64(v_1.AuxInt)
  1665  		v.reset(OpARM64ADDconst)
  1666  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
  1667  		v.AddArg(x)
  1668  		return true
  1669  	}
  1670  	return false
  1671  }
  1672  func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool {
  1673  	v_1 := v.Args[1]
  1674  	v_0 := v.Args[0]
  1675  	b := v.Block
  1676  	// match: (ADDshiftRL (MOVDconst [c]) x [d])
  1677  	// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
  1678  	for {
  1679  		d := auxIntToInt64(v.AuxInt)
  1680  		if v_0.Op != OpARM64MOVDconst {
  1681  			break
  1682  		}
  1683  		c := auxIntToInt64(v_0.AuxInt)
  1684  		x := v_1
  1685  		v.reset(OpARM64ADDconst)
  1686  		v.AuxInt = int64ToAuxInt(c)
  1687  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  1688  		v0.AuxInt = int64ToAuxInt(d)
  1689  		v0.AddArg(x)
  1690  		v.AddArg(v0)
  1691  		return true
  1692  	}
  1693  	// match: (ADDshiftRL x (MOVDconst [c]) [d])
  1694  	// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
  1695  	for {
  1696  		d := auxIntToInt64(v.AuxInt)
  1697  		x := v_0
  1698  		if v_1.Op != OpARM64MOVDconst {
  1699  			break
  1700  		}
  1701  		c := auxIntToInt64(v_1.AuxInt)
  1702  		v.reset(OpARM64ADDconst)
  1703  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
  1704  		v.AddArg(x)
  1705  		return true
  1706  	}
  1707  	return false
  1708  }
  1709  func rewriteValueARM64_OpARM64AND(v *Value) bool {
  1710  	v_1 := v.Args[1]
  1711  	v_0 := v.Args[0]
  1712  	// match: (AND x (MOVDconst [c]))
  1713  	// result: (ANDconst [c] x)
  1714  	for {
  1715  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1716  			x := v_0
  1717  			if v_1.Op != OpARM64MOVDconst {
  1718  				continue
  1719  			}
  1720  			c := auxIntToInt64(v_1.AuxInt)
  1721  			v.reset(OpARM64ANDconst)
  1722  			v.AuxInt = int64ToAuxInt(c)
  1723  			v.AddArg(x)
  1724  			return true
  1725  		}
  1726  		break
  1727  	}
  1728  	// match: (AND x x)
  1729  	// result: x
  1730  	for {
  1731  		x := v_0
  1732  		if x != v_1 {
  1733  			break
  1734  		}
  1735  		v.copyOf(x)
  1736  		return true
  1737  	}
  1738  	// match: (AND x (MVN y))
  1739  	// result: (BIC x y)
  1740  	for {
  1741  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1742  			x := v_0
  1743  			if v_1.Op != OpARM64MVN {
  1744  				continue
  1745  			}
  1746  			y := v_1.Args[0]
  1747  			v.reset(OpARM64BIC)
  1748  			v.AddArg2(x, y)
  1749  			return true
  1750  		}
  1751  		break
  1752  	}
  1753  	// match: (AND x0 x1:(SLLconst [c] y))
  1754  	// cond: clobberIfDead(x1)
  1755  	// result: (ANDshiftLL x0 y [c])
  1756  	for {
  1757  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1758  			x0 := v_0
  1759  			x1 := v_1
  1760  			if x1.Op != OpARM64SLLconst {
  1761  				continue
  1762  			}
  1763  			c := auxIntToInt64(x1.AuxInt)
  1764  			y := x1.Args[0]
  1765  			if !(clobberIfDead(x1)) {
  1766  				continue
  1767  			}
  1768  			v.reset(OpARM64ANDshiftLL)
  1769  			v.AuxInt = int64ToAuxInt(c)
  1770  			v.AddArg2(x0, y)
  1771  			return true
  1772  		}
  1773  		break
  1774  	}
  1775  	// match: (AND x0 x1:(SRLconst [c] y))
  1776  	// cond: clobberIfDead(x1)
  1777  	// result: (ANDshiftRL x0 y [c])
  1778  	for {
  1779  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1780  			x0 := v_0
  1781  			x1 := v_1
  1782  			if x1.Op != OpARM64SRLconst {
  1783  				continue
  1784  			}
  1785  			c := auxIntToInt64(x1.AuxInt)
  1786  			y := x1.Args[0]
  1787  			if !(clobberIfDead(x1)) {
  1788  				continue
  1789  			}
  1790  			v.reset(OpARM64ANDshiftRL)
  1791  			v.AuxInt = int64ToAuxInt(c)
  1792  			v.AddArg2(x0, y)
  1793  			return true
  1794  		}
  1795  		break
  1796  	}
  1797  	// match: (AND x0 x1:(SRAconst [c] y))
  1798  	// cond: clobberIfDead(x1)
  1799  	// result: (ANDshiftRA x0 y [c])
  1800  	for {
  1801  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1802  			x0 := v_0
  1803  			x1 := v_1
  1804  			if x1.Op != OpARM64SRAconst {
  1805  				continue
  1806  			}
  1807  			c := auxIntToInt64(x1.AuxInt)
  1808  			y := x1.Args[0]
  1809  			if !(clobberIfDead(x1)) {
  1810  				continue
  1811  			}
  1812  			v.reset(OpARM64ANDshiftRA)
  1813  			v.AuxInt = int64ToAuxInt(c)
  1814  			v.AddArg2(x0, y)
  1815  			return true
  1816  		}
  1817  		break
  1818  	}
  1819  	// match: (AND x0 x1:(RORconst [c] y))
  1820  	// cond: clobberIfDead(x1)
  1821  	// result: (ANDshiftRO x0 y [c])
  1822  	for {
  1823  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1824  			x0 := v_0
  1825  			x1 := v_1
  1826  			if x1.Op != OpARM64RORconst {
  1827  				continue
  1828  			}
  1829  			c := auxIntToInt64(x1.AuxInt)
  1830  			y := x1.Args[0]
  1831  			if !(clobberIfDead(x1)) {
  1832  				continue
  1833  			}
  1834  			v.reset(OpARM64ANDshiftRO)
  1835  			v.AuxInt = int64ToAuxInt(c)
  1836  			v.AddArg2(x0, y)
  1837  			return true
  1838  		}
  1839  		break
  1840  	}
  1841  	return false
  1842  }
  1843  func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
  1844  	v_0 := v.Args[0]
  1845  	// match: (ANDconst [0] _)
  1846  	// result: (MOVDconst [0])
  1847  	for {
  1848  		if auxIntToInt64(v.AuxInt) != 0 {
  1849  			break
  1850  		}
  1851  		v.reset(OpARM64MOVDconst)
  1852  		v.AuxInt = int64ToAuxInt(0)
  1853  		return true
  1854  	}
  1855  	// match: (ANDconst [-1] x)
  1856  	// result: x
  1857  	for {
  1858  		if auxIntToInt64(v.AuxInt) != -1 {
  1859  			break
  1860  		}
  1861  		x := v_0
  1862  		v.copyOf(x)
  1863  		return true
  1864  	}
  1865  	// match: (ANDconst [c] (MOVDconst [d]))
  1866  	// result: (MOVDconst [c&d])
  1867  	for {
  1868  		c := auxIntToInt64(v.AuxInt)
  1869  		if v_0.Op != OpARM64MOVDconst {
  1870  			break
  1871  		}
  1872  		d := auxIntToInt64(v_0.AuxInt)
  1873  		v.reset(OpARM64MOVDconst)
  1874  		v.AuxInt = int64ToAuxInt(c & d)
  1875  		return true
  1876  	}
  1877  	// match: (ANDconst [c] (ANDconst [d] x))
  1878  	// result: (ANDconst [c&d] x)
  1879  	for {
  1880  		c := auxIntToInt64(v.AuxInt)
  1881  		if v_0.Op != OpARM64ANDconst {
  1882  			break
  1883  		}
  1884  		d := auxIntToInt64(v_0.AuxInt)
  1885  		x := v_0.Args[0]
  1886  		v.reset(OpARM64ANDconst)
  1887  		v.AuxInt = int64ToAuxInt(c & d)
  1888  		v.AddArg(x)
  1889  		return true
  1890  	}
  1891  	// match: (ANDconst [c] (MOVWUreg x))
  1892  	// result: (ANDconst [c&(1<<32-1)] x)
  1893  	for {
  1894  		c := auxIntToInt64(v.AuxInt)
  1895  		if v_0.Op != OpARM64MOVWUreg {
  1896  			break
  1897  		}
  1898  		x := v_0.Args[0]
  1899  		v.reset(OpARM64ANDconst)
  1900  		v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
  1901  		v.AddArg(x)
  1902  		return true
  1903  	}
  1904  	// match: (ANDconst [c] (MOVHUreg x))
  1905  	// result: (ANDconst [c&(1<<16-1)] x)
  1906  	for {
  1907  		c := auxIntToInt64(v.AuxInt)
  1908  		if v_0.Op != OpARM64MOVHUreg {
  1909  			break
  1910  		}
  1911  		x := v_0.Args[0]
  1912  		v.reset(OpARM64ANDconst)
  1913  		v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
  1914  		v.AddArg(x)
  1915  		return true
  1916  	}
  1917  	// match: (ANDconst [c] (MOVBUreg x))
  1918  	// result: (ANDconst [c&(1<<8-1)] x)
  1919  	for {
  1920  		c := auxIntToInt64(v.AuxInt)
  1921  		if v_0.Op != OpARM64MOVBUreg {
  1922  			break
  1923  		}
  1924  		x := v_0.Args[0]
  1925  		v.reset(OpARM64ANDconst)
  1926  		v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
  1927  		v.AddArg(x)
  1928  		return true
  1929  	}
  1930  	// match: (ANDconst [ac] (SLLconst [sc] x))
  1931  	// cond: isARM64BFMask(sc, ac, sc)
  1932  	// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
  1933  	for {
  1934  		ac := auxIntToInt64(v.AuxInt)
  1935  		if v_0.Op != OpARM64SLLconst {
  1936  			break
  1937  		}
  1938  		sc := auxIntToInt64(v_0.AuxInt)
  1939  		x := v_0.Args[0]
  1940  		if !(isARM64BFMask(sc, ac, sc)) {
  1941  			break
  1942  		}
  1943  		v.reset(OpARM64UBFIZ)
  1944  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
  1945  		v.AddArg(x)
  1946  		return true
  1947  	}
  1948  	// match: (ANDconst [ac] (SRLconst [sc] x))
  1949  	// cond: isARM64BFMask(sc, ac, 0)
  1950  	// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
  1951  	for {
  1952  		ac := auxIntToInt64(v.AuxInt)
  1953  		if v_0.Op != OpARM64SRLconst {
  1954  			break
  1955  		}
  1956  		sc := auxIntToInt64(v_0.AuxInt)
  1957  		x := v_0.Args[0]
  1958  		if !(isARM64BFMask(sc, ac, 0)) {
  1959  			break
  1960  		}
  1961  		v.reset(OpARM64UBFX)
  1962  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
  1963  		v.AddArg(x)
  1964  		return true
  1965  	}
  1966  	// match: (ANDconst [c] (UBFX [bfc] x))
  1967  	// cond: isARM64BFMask(0, c, 0)
  1968  	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0)))] x)
  1969  	for {
  1970  		c := auxIntToInt64(v.AuxInt)
  1971  		if v_0.Op != OpARM64UBFX {
  1972  			break
  1973  		}
  1974  		bfc := auxIntToArm64BitField(v_0.AuxInt)
  1975  		x := v_0.Args[0]
  1976  		if !(isARM64BFMask(0, c, 0)) {
  1977  			break
  1978  		}
  1979  		v.reset(OpARM64UBFX)
  1980  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0))))
  1981  		v.AddArg(x)
  1982  		return true
  1983  	}
  1984  	return false
  1985  }
  1986  func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool {
  1987  	v_1 := v.Args[1]
  1988  	v_0 := v.Args[0]
  1989  	b := v.Block
  1990  	// match: (ANDshiftLL (MOVDconst [c]) x [d])
  1991  	// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
  1992  	for {
  1993  		d := auxIntToInt64(v.AuxInt)
  1994  		if v_0.Op != OpARM64MOVDconst {
  1995  			break
  1996  		}
  1997  		c := auxIntToInt64(v_0.AuxInt)
  1998  		x := v_1
  1999  		v.reset(OpARM64ANDconst)
  2000  		v.AuxInt = int64ToAuxInt(c)
  2001  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  2002  		v0.AuxInt = int64ToAuxInt(d)
  2003  		v0.AddArg(x)
  2004  		v.AddArg(v0)
  2005  		return true
  2006  	}
  2007  	// match: (ANDshiftLL x (MOVDconst [c]) [d])
  2008  	// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
  2009  	for {
  2010  		d := auxIntToInt64(v.AuxInt)
  2011  		x := v_0
  2012  		if v_1.Op != OpARM64MOVDconst {
  2013  			break
  2014  		}
  2015  		c := auxIntToInt64(v_1.AuxInt)
  2016  		v.reset(OpARM64ANDconst)
  2017  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
  2018  		v.AddArg(x)
  2019  		return true
  2020  	}
  2021  	// match: (ANDshiftLL y:(SLLconst x [c]) x [c])
  2022  	// result: y
  2023  	for {
  2024  		c := auxIntToInt64(v.AuxInt)
  2025  		y := v_0
  2026  		if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
  2027  			break
  2028  		}
  2029  		x := y.Args[0]
  2030  		if x != v_1 {
  2031  			break
  2032  		}
  2033  		v.copyOf(y)
  2034  		return true
  2035  	}
  2036  	return false
  2037  }
  2038  func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool {
  2039  	v_1 := v.Args[1]
  2040  	v_0 := v.Args[0]
  2041  	b := v.Block
  2042  	// match: (ANDshiftRA (MOVDconst [c]) x [d])
  2043  	// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
  2044  	for {
  2045  		d := auxIntToInt64(v.AuxInt)
  2046  		if v_0.Op != OpARM64MOVDconst {
  2047  			break
  2048  		}
  2049  		c := auxIntToInt64(v_0.AuxInt)
  2050  		x := v_1
  2051  		v.reset(OpARM64ANDconst)
  2052  		v.AuxInt = int64ToAuxInt(c)
  2053  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  2054  		v0.AuxInt = int64ToAuxInt(d)
  2055  		v0.AddArg(x)
  2056  		v.AddArg(v0)
  2057  		return true
  2058  	}
  2059  	// match: (ANDshiftRA x (MOVDconst [c]) [d])
  2060  	// result: (ANDconst x [c>>uint64(d)])
  2061  	for {
  2062  		d := auxIntToInt64(v.AuxInt)
  2063  		x := v_0
  2064  		if v_1.Op != OpARM64MOVDconst {
  2065  			break
  2066  		}
  2067  		c := auxIntToInt64(v_1.AuxInt)
  2068  		v.reset(OpARM64ANDconst)
  2069  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
  2070  		v.AddArg(x)
  2071  		return true
  2072  	}
  2073  	// match: (ANDshiftRA y:(SRAconst x [c]) x [c])
  2074  	// result: y
  2075  	for {
  2076  		c := auxIntToInt64(v.AuxInt)
  2077  		y := v_0
  2078  		if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
  2079  			break
  2080  		}
  2081  		x := y.Args[0]
  2082  		if x != v_1 {
  2083  			break
  2084  		}
  2085  		v.copyOf(y)
  2086  		return true
  2087  	}
  2088  	return false
  2089  }
  2090  func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
  2091  	v_1 := v.Args[1]
  2092  	v_0 := v.Args[0]
  2093  	b := v.Block
  2094  	// match: (ANDshiftRL (MOVDconst [c]) x [d])
  2095  	// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
  2096  	for {
  2097  		d := auxIntToInt64(v.AuxInt)
  2098  		if v_0.Op != OpARM64MOVDconst {
  2099  			break
  2100  		}
  2101  		c := auxIntToInt64(v_0.AuxInt)
  2102  		x := v_1
  2103  		v.reset(OpARM64ANDconst)
  2104  		v.AuxInt = int64ToAuxInt(c)
  2105  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  2106  		v0.AuxInt = int64ToAuxInt(d)
  2107  		v0.AddArg(x)
  2108  		v.AddArg(v0)
  2109  		return true
  2110  	}
  2111  	// match: (ANDshiftRL x (MOVDconst [c]) [d])
  2112  	// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
  2113  	for {
  2114  		d := auxIntToInt64(v.AuxInt)
  2115  		x := v_0
  2116  		if v_1.Op != OpARM64MOVDconst {
  2117  			break
  2118  		}
  2119  		c := auxIntToInt64(v_1.AuxInt)
  2120  		v.reset(OpARM64ANDconst)
  2121  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
  2122  		v.AddArg(x)
  2123  		return true
  2124  	}
  2125  	// match: (ANDshiftRL y:(SRLconst x [c]) x [c])
  2126  	// result: y
  2127  	for {
  2128  		c := auxIntToInt64(v.AuxInt)
  2129  		y := v_0
  2130  		if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
  2131  			break
  2132  		}
  2133  		x := y.Args[0]
  2134  		if x != v_1 {
  2135  			break
  2136  		}
  2137  		v.copyOf(y)
  2138  		return true
  2139  	}
  2140  	return false
  2141  }
  2142  func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool {
  2143  	v_1 := v.Args[1]
  2144  	v_0 := v.Args[0]
  2145  	b := v.Block
  2146  	// match: (ANDshiftRO (MOVDconst [c]) x [d])
  2147  	// result: (ANDconst [c] (RORconst <x.Type> x [d]))
  2148  	for {
  2149  		d := auxIntToInt64(v.AuxInt)
  2150  		if v_0.Op != OpARM64MOVDconst {
  2151  			break
  2152  		}
  2153  		c := auxIntToInt64(v_0.AuxInt)
  2154  		x := v_1
  2155  		v.reset(OpARM64ANDconst)
  2156  		v.AuxInt = int64ToAuxInt(c)
  2157  		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
  2158  		v0.AuxInt = int64ToAuxInt(d)
  2159  		v0.AddArg(x)
  2160  		v.AddArg(v0)
  2161  		return true
  2162  	}
  2163  	// match: (ANDshiftRO x (MOVDconst [c]) [d])
  2164  	// result: (ANDconst x [rotateRight64(c, d)])
  2165  	for {
  2166  		d := auxIntToInt64(v.AuxInt)
  2167  		x := v_0
  2168  		if v_1.Op != OpARM64MOVDconst {
  2169  			break
  2170  		}
  2171  		c := auxIntToInt64(v_1.AuxInt)
  2172  		v.reset(OpARM64ANDconst)
  2173  		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
  2174  		v.AddArg(x)
  2175  		return true
  2176  	}
  2177  	// match: (ANDshiftRO y:(RORconst x [c]) x [c])
  2178  	// result: y
  2179  	for {
  2180  		c := auxIntToInt64(v.AuxInt)
  2181  		y := v_0
  2182  		if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
  2183  			break
  2184  		}
  2185  		x := y.Args[0]
  2186  		if x != v_1 {
  2187  			break
  2188  		}
  2189  		v.copyOf(y)
  2190  		return true
  2191  	}
  2192  	return false
  2193  }
  2194  func rewriteValueARM64_OpARM64BIC(v *Value) bool {
  2195  	v_1 := v.Args[1]
  2196  	v_0 := v.Args[0]
  2197  	// match: (BIC x (MOVDconst [c]))
  2198  	// result: (ANDconst [^c] x)
  2199  	for {
  2200  		x := v_0
  2201  		if v_1.Op != OpARM64MOVDconst {
  2202  			break
  2203  		}
  2204  		c := auxIntToInt64(v_1.AuxInt)
  2205  		v.reset(OpARM64ANDconst)
  2206  		v.AuxInt = int64ToAuxInt(^c)
  2207  		v.AddArg(x)
  2208  		return true
  2209  	}
  2210  	// match: (BIC x x)
  2211  	// result: (MOVDconst [0])
  2212  	for {
  2213  		x := v_0
  2214  		if x != v_1 {
  2215  			break
  2216  		}
  2217  		v.reset(OpARM64MOVDconst)
  2218  		v.AuxInt = int64ToAuxInt(0)
  2219  		return true
  2220  	}
  2221  	// match: (BIC x0 x1:(SLLconst [c] y))
  2222  	// cond: clobberIfDead(x1)
  2223  	// result: (BICshiftLL x0 y [c])
  2224  	for {
  2225  		x0 := v_0
  2226  		x1 := v_1
  2227  		if x1.Op != OpARM64SLLconst {
  2228  			break
  2229  		}
  2230  		c := auxIntToInt64(x1.AuxInt)
  2231  		y := x1.Args[0]
  2232  		if !(clobberIfDead(x1)) {
  2233  			break
  2234  		}
  2235  		v.reset(OpARM64BICshiftLL)
  2236  		v.AuxInt = int64ToAuxInt(c)
  2237  		v.AddArg2(x0, y)
  2238  		return true
  2239  	}
  2240  	// match: (BIC x0 x1:(SRLconst [c] y))
  2241  	// cond: clobberIfDead(x1)
  2242  	// result: (BICshiftRL x0 y [c])
  2243  	for {
  2244  		x0 := v_0
  2245  		x1 := v_1
  2246  		if x1.Op != OpARM64SRLconst {
  2247  			break
  2248  		}
  2249  		c := auxIntToInt64(x1.AuxInt)
  2250  		y := x1.Args[0]
  2251  		if !(clobberIfDead(x1)) {
  2252  			break
  2253  		}
  2254  		v.reset(OpARM64BICshiftRL)
  2255  		v.AuxInt = int64ToAuxInt(c)
  2256  		v.AddArg2(x0, y)
  2257  		return true
  2258  	}
  2259  	// match: (BIC x0 x1:(SRAconst [c] y))
  2260  	// cond: clobberIfDead(x1)
  2261  	// result: (BICshiftRA x0 y [c])
  2262  	for {
  2263  		x0 := v_0
  2264  		x1 := v_1
  2265  		if x1.Op != OpARM64SRAconst {
  2266  			break
  2267  		}
  2268  		c := auxIntToInt64(x1.AuxInt)
  2269  		y := x1.Args[0]
  2270  		if !(clobberIfDead(x1)) {
  2271  			break
  2272  		}
  2273  		v.reset(OpARM64BICshiftRA)
  2274  		v.AuxInt = int64ToAuxInt(c)
  2275  		v.AddArg2(x0, y)
  2276  		return true
  2277  	}
  2278  	// match: (BIC x0 x1:(RORconst [c] y))
  2279  	// cond: clobberIfDead(x1)
  2280  	// result: (BICshiftRO x0 y [c])
  2281  	for {
  2282  		x0 := v_0
  2283  		x1 := v_1
  2284  		if x1.Op != OpARM64RORconst {
  2285  			break
  2286  		}
  2287  		c := auxIntToInt64(x1.AuxInt)
  2288  		y := x1.Args[0]
  2289  		if !(clobberIfDead(x1)) {
  2290  			break
  2291  		}
  2292  		v.reset(OpARM64BICshiftRO)
  2293  		v.AuxInt = int64ToAuxInt(c)
  2294  		v.AddArg2(x0, y)
  2295  		return true
  2296  	}
  2297  	return false
  2298  }
  2299  func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
  2300  	v_1 := v.Args[1]
  2301  	v_0 := v.Args[0]
  2302  	// match: (BICshiftLL x (MOVDconst [c]) [d])
  2303  	// result: (ANDconst x [^int64(uint64(c)<<uint64(d))])
  2304  	for {
  2305  		d := auxIntToInt64(v.AuxInt)
  2306  		x := v_0
  2307  		if v_1.Op != OpARM64MOVDconst {
  2308  			break
  2309  		}
  2310  		c := auxIntToInt64(v_1.AuxInt)
  2311  		v.reset(OpARM64ANDconst)
  2312  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
  2313  		v.AddArg(x)
  2314  		return true
  2315  	}
  2316  	// match: (BICshiftLL (SLLconst x [c]) x [c])
  2317  	// result: (MOVDconst [0])
  2318  	for {
  2319  		c := auxIntToInt64(v.AuxInt)
  2320  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
  2321  			break
  2322  		}
  2323  		x := v_0.Args[0]
  2324  		if x != v_1 {
  2325  			break
  2326  		}
  2327  		v.reset(OpARM64MOVDconst)
  2328  		v.AuxInt = int64ToAuxInt(0)
  2329  		return true
  2330  	}
  2331  	return false
  2332  }
  2333  func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool {
  2334  	v_1 := v.Args[1]
  2335  	v_0 := v.Args[0]
  2336  	// match: (BICshiftRA x (MOVDconst [c]) [d])
  2337  	// result: (ANDconst x [^(c>>uint64(d))])
  2338  	for {
  2339  		d := auxIntToInt64(v.AuxInt)
  2340  		x := v_0
  2341  		if v_1.Op != OpARM64MOVDconst {
  2342  			break
  2343  		}
  2344  		c := auxIntToInt64(v_1.AuxInt)
  2345  		v.reset(OpARM64ANDconst)
  2346  		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
  2347  		v.AddArg(x)
  2348  		return true
  2349  	}
  2350  	// match: (BICshiftRA (SRAconst x [c]) x [c])
  2351  	// result: (MOVDconst [0])
  2352  	for {
  2353  		c := auxIntToInt64(v.AuxInt)
  2354  		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
  2355  			break
  2356  		}
  2357  		x := v_0.Args[0]
  2358  		if x != v_1 {
  2359  			break
  2360  		}
  2361  		v.reset(OpARM64MOVDconst)
  2362  		v.AuxInt = int64ToAuxInt(0)
  2363  		return true
  2364  	}
  2365  	return false
  2366  }
  2367  func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
  2368  	v_1 := v.Args[1]
  2369  	v_0 := v.Args[0]
  2370  	// match: (BICshiftRL x (MOVDconst [c]) [d])
  2371  	// result: (ANDconst x [^int64(uint64(c)>>uint64(d))])
  2372  	for {
  2373  		d := auxIntToInt64(v.AuxInt)
  2374  		x := v_0
  2375  		if v_1.Op != OpARM64MOVDconst {
  2376  			break
  2377  		}
  2378  		c := auxIntToInt64(v_1.AuxInt)
  2379  		v.reset(OpARM64ANDconst)
  2380  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
  2381  		v.AddArg(x)
  2382  		return true
  2383  	}
  2384  	// match: (BICshiftRL (SRLconst x [c]) x [c])
  2385  	// result: (MOVDconst [0])
  2386  	for {
  2387  		c := auxIntToInt64(v.AuxInt)
  2388  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
  2389  			break
  2390  		}
  2391  		x := v_0.Args[0]
  2392  		if x != v_1 {
  2393  			break
  2394  		}
  2395  		v.reset(OpARM64MOVDconst)
  2396  		v.AuxInt = int64ToAuxInt(0)
  2397  		return true
  2398  	}
  2399  	return false
  2400  }
  2401  func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool {
  2402  	v_1 := v.Args[1]
  2403  	v_0 := v.Args[0]
  2404  	// match: (BICshiftRO x (MOVDconst [c]) [d])
  2405  	// result: (ANDconst x [^rotateRight64(c, d)])
  2406  	for {
  2407  		d := auxIntToInt64(v.AuxInt)
  2408  		x := v_0
  2409  		if v_1.Op != OpARM64MOVDconst {
  2410  			break
  2411  		}
  2412  		c := auxIntToInt64(v_1.AuxInt)
  2413  		v.reset(OpARM64ANDconst)
  2414  		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
  2415  		v.AddArg(x)
  2416  		return true
  2417  	}
  2418  	// match: (BICshiftRO (RORconst x [c]) x [c])
  2419  	// result: (MOVDconst [0])
  2420  	for {
  2421  		c := auxIntToInt64(v.AuxInt)
  2422  		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
  2423  			break
  2424  		}
  2425  		x := v_0.Args[0]
  2426  		if x != v_1 {
  2427  			break
  2428  		}
  2429  		v.reset(OpARM64MOVDconst)
  2430  		v.AuxInt = int64ToAuxInt(0)
  2431  		return true
  2432  	}
  2433  	return false
  2434  }
  2435  func rewriteValueARM64_OpARM64CMN(v *Value) bool {
  2436  	v_1 := v.Args[1]
  2437  	v_0 := v.Args[0]
  2438  	// match: (CMN x (MOVDconst [c]))
  2439  	// result: (CMNconst [c] x)
  2440  	for {
  2441  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2442  			x := v_0
  2443  			if v_1.Op != OpARM64MOVDconst {
  2444  				continue
  2445  			}
  2446  			c := auxIntToInt64(v_1.AuxInt)
  2447  			v.reset(OpARM64CMNconst)
  2448  			v.AuxInt = int64ToAuxInt(c)
  2449  			v.AddArg(x)
  2450  			return true
  2451  		}
  2452  		break
  2453  	}
  2454  	// match: (CMN x0 x1:(SLLconst [c] y))
  2455  	// cond: clobberIfDead(x1)
  2456  	// result: (CMNshiftLL x0 y [c])
  2457  	for {
  2458  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2459  			x0 := v_0
  2460  			x1 := v_1
  2461  			if x1.Op != OpARM64SLLconst {
  2462  				continue
  2463  			}
  2464  			c := auxIntToInt64(x1.AuxInt)
  2465  			y := x1.Args[0]
  2466  			if !(clobberIfDead(x1)) {
  2467  				continue
  2468  			}
  2469  			v.reset(OpARM64CMNshiftLL)
  2470  			v.AuxInt = int64ToAuxInt(c)
  2471  			v.AddArg2(x0, y)
  2472  			return true
  2473  		}
  2474  		break
  2475  	}
  2476  	// match: (CMN x0 x1:(SRLconst [c] y))
  2477  	// cond: clobberIfDead(x1)
  2478  	// result: (CMNshiftRL x0 y [c])
  2479  	for {
  2480  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2481  			x0 := v_0
  2482  			x1 := v_1
  2483  			if x1.Op != OpARM64SRLconst {
  2484  				continue
  2485  			}
  2486  			c := auxIntToInt64(x1.AuxInt)
  2487  			y := x1.Args[0]
  2488  			if !(clobberIfDead(x1)) {
  2489  				continue
  2490  			}
  2491  			v.reset(OpARM64CMNshiftRL)
  2492  			v.AuxInt = int64ToAuxInt(c)
  2493  			v.AddArg2(x0, y)
  2494  			return true
  2495  		}
  2496  		break
  2497  	}
  2498  	// match: (CMN x0 x1:(SRAconst [c] y))
  2499  	// cond: clobberIfDead(x1)
  2500  	// result: (CMNshiftRA x0 y [c])
  2501  	for {
  2502  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2503  			x0 := v_0
  2504  			x1 := v_1
  2505  			if x1.Op != OpARM64SRAconst {
  2506  				continue
  2507  			}
  2508  			c := auxIntToInt64(x1.AuxInt)
  2509  			y := x1.Args[0]
  2510  			if !(clobberIfDead(x1)) {
  2511  				continue
  2512  			}
  2513  			v.reset(OpARM64CMNshiftRA)
  2514  			v.AuxInt = int64ToAuxInt(c)
  2515  			v.AddArg2(x0, y)
  2516  			return true
  2517  		}
  2518  		break
  2519  	}
  2520  	return false
  2521  }
  2522  func rewriteValueARM64_OpARM64CMNW(v *Value) bool {
  2523  	v_1 := v.Args[1]
  2524  	v_0 := v.Args[0]
  2525  	// match: (CMNW x (MOVDconst [c]))
  2526  	// result: (CMNWconst [int32(c)] x)
  2527  	for {
  2528  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2529  			x := v_0
  2530  			if v_1.Op != OpARM64MOVDconst {
  2531  				continue
  2532  			}
  2533  			c := auxIntToInt64(v_1.AuxInt)
  2534  			v.reset(OpARM64CMNWconst)
  2535  			v.AuxInt = int32ToAuxInt(int32(c))
  2536  			v.AddArg(x)
  2537  			return true
  2538  		}
  2539  		break
  2540  	}
  2541  	return false
  2542  }
  2543  func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool {
  2544  	v_0 := v.Args[0]
  2545  	// match: (CMNWconst [c] y)
  2546  	// cond: c < 0 && c != -1<<31
  2547  	// result: (CMPWconst [-c] y)
  2548  	for {
  2549  		c := auxIntToInt32(v.AuxInt)
  2550  		y := v_0
  2551  		if !(c < 0 && c != -1<<31) {
  2552  			break
  2553  		}
  2554  		v.reset(OpARM64CMPWconst)
  2555  		v.AuxInt = int32ToAuxInt(-c)
  2556  		v.AddArg(y)
  2557  		return true
  2558  	}
  2559  	// match: (CMNWconst (MOVDconst [x]) [y])
  2560  	// result: (FlagConstant [addFlags32(int32(x),y)])
  2561  	for {
  2562  		y := auxIntToInt32(v.AuxInt)
  2563  		if v_0.Op != OpARM64MOVDconst {
  2564  			break
  2565  		}
  2566  		x := auxIntToInt64(v_0.AuxInt)
  2567  		v.reset(OpARM64FlagConstant)
  2568  		v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y))
  2569  		return true
  2570  	}
  2571  	return false
  2572  }
  2573  func rewriteValueARM64_OpARM64CMNconst(v *Value) bool {
  2574  	v_0 := v.Args[0]
  2575  	// match: (CMNconst [c] y)
  2576  	// cond: c < 0 && c != -1<<63
  2577  	// result: (CMPconst [-c] y)
  2578  	for {
  2579  		c := auxIntToInt64(v.AuxInt)
  2580  		y := v_0
  2581  		if !(c < 0 && c != -1<<63) {
  2582  			break
  2583  		}
  2584  		v.reset(OpARM64CMPconst)
  2585  		v.AuxInt = int64ToAuxInt(-c)
  2586  		v.AddArg(y)
  2587  		return true
  2588  	}
  2589  	// match: (CMNconst (MOVDconst [x]) [y])
  2590  	// result: (FlagConstant [addFlags64(x,y)])
  2591  	for {
  2592  		y := auxIntToInt64(v.AuxInt)
  2593  		if v_0.Op != OpARM64MOVDconst {
  2594  			break
  2595  		}
  2596  		x := auxIntToInt64(v_0.AuxInt)
  2597  		v.reset(OpARM64FlagConstant)
  2598  		v.AuxInt = flagConstantToAuxInt(addFlags64(x, y))
  2599  		return true
  2600  	}
  2601  	return false
  2602  }
  2603  func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool {
  2604  	v_1 := v.Args[1]
  2605  	v_0 := v.Args[0]
  2606  	b := v.Block
  2607  	// match: (CMNshiftLL (MOVDconst [c]) x [d])
  2608  	// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
  2609  	for {
  2610  		d := auxIntToInt64(v.AuxInt)
  2611  		if v_0.Op != OpARM64MOVDconst {
  2612  			break
  2613  		}
  2614  		c := auxIntToInt64(v_0.AuxInt)
  2615  		x := v_1
  2616  		v.reset(OpARM64CMNconst)
  2617  		v.AuxInt = int64ToAuxInt(c)
  2618  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  2619  		v0.AuxInt = int64ToAuxInt(d)
  2620  		v0.AddArg(x)
  2621  		v.AddArg(v0)
  2622  		return true
  2623  	}
  2624  	// match: (CMNshiftLL x (MOVDconst [c]) [d])
  2625  	// result: (CMNconst x [int64(uint64(c)<<uint64(d))])
  2626  	for {
  2627  		d := auxIntToInt64(v.AuxInt)
  2628  		x := v_0
  2629  		if v_1.Op != OpARM64MOVDconst {
  2630  			break
  2631  		}
  2632  		c := auxIntToInt64(v_1.AuxInt)
  2633  		v.reset(OpARM64CMNconst)
  2634  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
  2635  		v.AddArg(x)
  2636  		return true
  2637  	}
  2638  	return false
  2639  }
  2640  func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool {
  2641  	v_1 := v.Args[1]
  2642  	v_0 := v.Args[0]
  2643  	b := v.Block
  2644  	// match: (CMNshiftRA (MOVDconst [c]) x [d])
  2645  	// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
  2646  	for {
  2647  		d := auxIntToInt64(v.AuxInt)
  2648  		if v_0.Op != OpARM64MOVDconst {
  2649  			break
  2650  		}
  2651  		c := auxIntToInt64(v_0.AuxInt)
  2652  		x := v_1
  2653  		v.reset(OpARM64CMNconst)
  2654  		v.AuxInt = int64ToAuxInt(c)
  2655  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  2656  		v0.AuxInt = int64ToAuxInt(d)
  2657  		v0.AddArg(x)
  2658  		v.AddArg(v0)
  2659  		return true
  2660  	}
  2661  	// match: (CMNshiftRA x (MOVDconst [c]) [d])
  2662  	// result: (CMNconst x [c>>uint64(d)])
  2663  	for {
  2664  		d := auxIntToInt64(v.AuxInt)
  2665  		x := v_0
  2666  		if v_1.Op != OpARM64MOVDconst {
  2667  			break
  2668  		}
  2669  		c := auxIntToInt64(v_1.AuxInt)
  2670  		v.reset(OpARM64CMNconst)
  2671  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
  2672  		v.AddArg(x)
  2673  		return true
  2674  	}
  2675  	return false
  2676  }
  2677  func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool {
  2678  	v_1 := v.Args[1]
  2679  	v_0 := v.Args[0]
  2680  	b := v.Block
  2681  	// match: (CMNshiftRL (MOVDconst [c]) x [d])
  2682  	// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
  2683  	for {
  2684  		d := auxIntToInt64(v.AuxInt)
  2685  		if v_0.Op != OpARM64MOVDconst {
  2686  			break
  2687  		}
  2688  		c := auxIntToInt64(v_0.AuxInt)
  2689  		x := v_1
  2690  		v.reset(OpARM64CMNconst)
  2691  		v.AuxInt = int64ToAuxInt(c)
  2692  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  2693  		v0.AuxInt = int64ToAuxInt(d)
  2694  		v0.AddArg(x)
  2695  		v.AddArg(v0)
  2696  		return true
  2697  	}
  2698  	// match: (CMNshiftRL x (MOVDconst [c]) [d])
  2699  	// result: (CMNconst x [int64(uint64(c)>>uint64(d))])
  2700  	for {
  2701  		d := auxIntToInt64(v.AuxInt)
  2702  		x := v_0
  2703  		if v_1.Op != OpARM64MOVDconst {
  2704  			break
  2705  		}
  2706  		c := auxIntToInt64(v_1.AuxInt)
  2707  		v.reset(OpARM64CMNconst)
  2708  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
  2709  		v.AddArg(x)
  2710  		return true
  2711  	}
  2712  	return false
  2713  }
  2714  func rewriteValueARM64_OpARM64CMP(v *Value) bool {
  2715  	v_1 := v.Args[1]
  2716  	v_0 := v.Args[0]
  2717  	b := v.Block
  2718  	// match: (CMP x (MOVDconst [c]))
  2719  	// result: (CMPconst [c] x)
  2720  	for {
  2721  		x := v_0
  2722  		if v_1.Op != OpARM64MOVDconst {
  2723  			break
  2724  		}
  2725  		c := auxIntToInt64(v_1.AuxInt)
  2726  		v.reset(OpARM64CMPconst)
  2727  		v.AuxInt = int64ToAuxInt(c)
  2728  		v.AddArg(x)
  2729  		return true
  2730  	}
  2731  	// match: (CMP (MOVDconst [c]) x)
  2732  	// result: (InvertFlags (CMPconst [c] x))
  2733  	for {
  2734  		if v_0.Op != OpARM64MOVDconst {
  2735  			break
  2736  		}
  2737  		c := auxIntToInt64(v_0.AuxInt)
  2738  		x := v_1
  2739  		v.reset(OpARM64InvertFlags)
  2740  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  2741  		v0.AuxInt = int64ToAuxInt(c)
  2742  		v0.AddArg(x)
  2743  		v.AddArg(v0)
  2744  		return true
  2745  	}
  2746  	// match: (CMP x y)
  2747  	// cond: canonLessThan(x,y)
  2748  	// result: (InvertFlags (CMP y x))
  2749  	for {
  2750  		x := v_0
  2751  		y := v_1
  2752  		if !(canonLessThan(x, y)) {
  2753  			break
  2754  		}
  2755  		v.reset(OpARM64InvertFlags)
  2756  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
  2757  		v0.AddArg2(y, x)
  2758  		v.AddArg(v0)
  2759  		return true
  2760  	}
  2761  	// match: (CMP x0 x1:(SLLconst [c] y))
  2762  	// cond: clobberIfDead(x1)
  2763  	// result: (CMPshiftLL x0 y [c])
  2764  	for {
  2765  		x0 := v_0
  2766  		x1 := v_1
  2767  		if x1.Op != OpARM64SLLconst {
  2768  			break
  2769  		}
  2770  		c := auxIntToInt64(x1.AuxInt)
  2771  		y := x1.Args[0]
  2772  		if !(clobberIfDead(x1)) {
  2773  			break
  2774  		}
  2775  		v.reset(OpARM64CMPshiftLL)
  2776  		v.AuxInt = int64ToAuxInt(c)
  2777  		v.AddArg2(x0, y)
  2778  		return true
  2779  	}
  2780  	// match: (CMP x0:(SLLconst [c] y) x1)
  2781  	// cond: clobberIfDead(x0)
  2782  	// result: (InvertFlags (CMPshiftLL x1 y [c]))
  2783  	for {
  2784  		x0 := v_0
  2785  		if x0.Op != OpARM64SLLconst {
  2786  			break
  2787  		}
  2788  		c := auxIntToInt64(x0.AuxInt)
  2789  		y := x0.Args[0]
  2790  		x1 := v_1
  2791  		if !(clobberIfDead(x0)) {
  2792  			break
  2793  		}
  2794  		v.reset(OpARM64InvertFlags)
  2795  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
  2796  		v0.AuxInt = int64ToAuxInt(c)
  2797  		v0.AddArg2(x1, y)
  2798  		v.AddArg(v0)
  2799  		return true
  2800  	}
  2801  	// match: (CMP x0 x1:(SRLconst [c] y))
  2802  	// cond: clobberIfDead(x1)
  2803  	// result: (CMPshiftRL x0 y [c])
  2804  	for {
  2805  		x0 := v_0
  2806  		x1 := v_1
  2807  		if x1.Op != OpARM64SRLconst {
  2808  			break
  2809  		}
  2810  		c := auxIntToInt64(x1.AuxInt)
  2811  		y := x1.Args[0]
  2812  		if !(clobberIfDead(x1)) {
  2813  			break
  2814  		}
  2815  		v.reset(OpARM64CMPshiftRL)
  2816  		v.AuxInt = int64ToAuxInt(c)
  2817  		v.AddArg2(x0, y)
  2818  		return true
  2819  	}
  2820  	// match: (CMP x0:(SRLconst [c] y) x1)
  2821  	// cond: clobberIfDead(x0)
  2822  	// result: (InvertFlags (CMPshiftRL x1 y [c]))
  2823  	for {
  2824  		x0 := v_0
  2825  		if x0.Op != OpARM64SRLconst {
  2826  			break
  2827  		}
  2828  		c := auxIntToInt64(x0.AuxInt)
  2829  		y := x0.Args[0]
  2830  		x1 := v_1
  2831  		if !(clobberIfDead(x0)) {
  2832  			break
  2833  		}
  2834  		v.reset(OpARM64InvertFlags)
  2835  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
  2836  		v0.AuxInt = int64ToAuxInt(c)
  2837  		v0.AddArg2(x1, y)
  2838  		v.AddArg(v0)
  2839  		return true
  2840  	}
  2841  	// match: (CMP x0 x1:(SRAconst [c] y))
  2842  	// cond: clobberIfDead(x1)
  2843  	// result: (CMPshiftRA x0 y [c])
  2844  	for {
  2845  		x0 := v_0
  2846  		x1 := v_1
  2847  		if x1.Op != OpARM64SRAconst {
  2848  			break
  2849  		}
  2850  		c := auxIntToInt64(x1.AuxInt)
  2851  		y := x1.Args[0]
  2852  		if !(clobberIfDead(x1)) {
  2853  			break
  2854  		}
  2855  		v.reset(OpARM64CMPshiftRA)
  2856  		v.AuxInt = int64ToAuxInt(c)
  2857  		v.AddArg2(x0, y)
  2858  		return true
  2859  	}
  2860  	// match: (CMP x0:(SRAconst [c] y) x1)
  2861  	// cond: clobberIfDead(x0)
  2862  	// result: (InvertFlags (CMPshiftRA x1 y [c]))
  2863  	for {
  2864  		x0 := v_0
  2865  		if x0.Op != OpARM64SRAconst {
  2866  			break
  2867  		}
  2868  		c := auxIntToInt64(x0.AuxInt)
  2869  		y := x0.Args[0]
  2870  		x1 := v_1
  2871  		if !(clobberIfDead(x0)) {
  2872  			break
  2873  		}
  2874  		v.reset(OpARM64InvertFlags)
  2875  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
  2876  		v0.AuxInt = int64ToAuxInt(c)
  2877  		v0.AddArg2(x1, y)
  2878  		v.AddArg(v0)
  2879  		return true
  2880  	}
  2881  	return false
  2882  }
  2883  func rewriteValueARM64_OpARM64CMPW(v *Value) bool {
  2884  	v_1 := v.Args[1]
  2885  	v_0 := v.Args[0]
  2886  	b := v.Block
  2887  	// match: (CMPW x (MOVDconst [c]))
  2888  	// result: (CMPWconst [int32(c)] x)
  2889  	for {
  2890  		x := v_0
  2891  		if v_1.Op != OpARM64MOVDconst {
  2892  			break
  2893  		}
  2894  		c := auxIntToInt64(v_1.AuxInt)
  2895  		v.reset(OpARM64CMPWconst)
  2896  		v.AuxInt = int32ToAuxInt(int32(c))
  2897  		v.AddArg(x)
  2898  		return true
  2899  	}
  2900  	// match: (CMPW (MOVDconst [c]) x)
  2901  	// result: (InvertFlags (CMPWconst [int32(c)] x))
  2902  	for {
  2903  		if v_0.Op != OpARM64MOVDconst {
  2904  			break
  2905  		}
  2906  		c := auxIntToInt64(v_0.AuxInt)
  2907  		x := v_1
  2908  		v.reset(OpARM64InvertFlags)
  2909  		v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
  2910  		v0.AuxInt = int32ToAuxInt(int32(c))
  2911  		v0.AddArg(x)
  2912  		v.AddArg(v0)
  2913  		return true
  2914  	}
  2915  	// match: (CMPW x y)
  2916  	// cond: canonLessThan(x,y)
  2917  	// result: (InvertFlags (CMPW y x))
  2918  	for {
  2919  		x := v_0
  2920  		y := v_1
  2921  		if !(canonLessThan(x, y)) {
  2922  			break
  2923  		}
  2924  		v.reset(OpARM64InvertFlags)
  2925  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
  2926  		v0.AddArg2(y, x)
  2927  		v.AddArg(v0)
  2928  		return true
  2929  	}
  2930  	return false
  2931  }
  2932  func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool {
  2933  	v_0 := v.Args[0]
  2934  	// match: (CMPWconst [c] y)
  2935  	// cond: c < 0 && c != -1<<31
  2936  	// result: (CMNWconst [-c] y)
  2937  	for {
  2938  		c := auxIntToInt32(v.AuxInt)
  2939  		y := v_0
  2940  		if !(c < 0 && c != -1<<31) {
  2941  			break
  2942  		}
  2943  		v.reset(OpARM64CMNWconst)
  2944  		v.AuxInt = int32ToAuxInt(-c)
  2945  		v.AddArg(y)
  2946  		return true
  2947  	}
  2948  	// match: (CMPWconst (MOVDconst [x]) [y])
  2949  	// result: (FlagConstant [subFlags32(int32(x),y)])
  2950  	for {
  2951  		y := auxIntToInt32(v.AuxInt)
  2952  		if v_0.Op != OpARM64MOVDconst {
  2953  			break
  2954  		}
  2955  		x := auxIntToInt64(v_0.AuxInt)
  2956  		v.reset(OpARM64FlagConstant)
  2957  		v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y))
  2958  		return true
  2959  	}
  2960  	// match: (CMPWconst (MOVBUreg _) [c])
  2961  	// cond: 0xff < c
  2962  	// result: (FlagConstant [subFlags64(0,1)])
  2963  	for {
  2964  		c := auxIntToInt32(v.AuxInt)
  2965  		if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
  2966  			break
  2967  		}
  2968  		v.reset(OpARM64FlagConstant)
  2969  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  2970  		return true
  2971  	}
  2972  	// match: (CMPWconst (MOVHUreg _) [c])
  2973  	// cond: 0xffff < c
  2974  	// result: (FlagConstant [subFlags64(0,1)])
  2975  	for {
  2976  		c := auxIntToInt32(v.AuxInt)
  2977  		if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
  2978  			break
  2979  		}
  2980  		v.reset(OpARM64FlagConstant)
  2981  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  2982  		return true
  2983  	}
  2984  	return false
  2985  }
  2986  func rewriteValueARM64_OpARM64CMPconst(v *Value) bool {
  2987  	v_0 := v.Args[0]
  2988  	// match: (CMPconst [c] y)
  2989  	// cond: c < 0 && c != -1<<63
  2990  	// result: (CMNconst [-c] y)
  2991  	for {
  2992  		c := auxIntToInt64(v.AuxInt)
  2993  		y := v_0
  2994  		if !(c < 0 && c != -1<<63) {
  2995  			break
  2996  		}
  2997  		v.reset(OpARM64CMNconst)
  2998  		v.AuxInt = int64ToAuxInt(-c)
  2999  		v.AddArg(y)
  3000  		return true
  3001  	}
  3002  	// match: (CMPconst (MOVDconst [x]) [y])
  3003  	// result: (FlagConstant [subFlags64(x,y)])
  3004  	for {
  3005  		y := auxIntToInt64(v.AuxInt)
  3006  		if v_0.Op != OpARM64MOVDconst {
  3007  			break
  3008  		}
  3009  		x := auxIntToInt64(v_0.AuxInt)
  3010  		v.reset(OpARM64FlagConstant)
  3011  		v.AuxInt = flagConstantToAuxInt(subFlags64(x, y))
  3012  		return true
  3013  	}
  3014  	// match: (CMPconst (MOVBUreg _) [c])
  3015  	// cond: 0xff < c
  3016  	// result: (FlagConstant [subFlags64(0,1)])
  3017  	for {
  3018  		c := auxIntToInt64(v.AuxInt)
  3019  		if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
  3020  			break
  3021  		}
  3022  		v.reset(OpARM64FlagConstant)
  3023  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  3024  		return true
  3025  	}
  3026  	// match: (CMPconst (MOVHUreg _) [c])
  3027  	// cond: 0xffff < c
  3028  	// result: (FlagConstant [subFlags64(0,1)])
  3029  	for {
  3030  		c := auxIntToInt64(v.AuxInt)
  3031  		if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
  3032  			break
  3033  		}
  3034  		v.reset(OpARM64FlagConstant)
  3035  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  3036  		return true
  3037  	}
  3038  	// match: (CMPconst (MOVWUreg _) [c])
  3039  	// cond: 0xffffffff < c
  3040  	// result: (FlagConstant [subFlags64(0,1)])
  3041  	for {
  3042  		c := auxIntToInt64(v.AuxInt)
  3043  		if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) {
  3044  			break
  3045  		}
  3046  		v.reset(OpARM64FlagConstant)
  3047  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  3048  		return true
  3049  	}
  3050  	// match: (CMPconst (ANDconst _ [m]) [n])
  3051  	// cond: 0 <= m && m < n
  3052  	// result: (FlagConstant [subFlags64(0,1)])
  3053  	for {
  3054  		n := auxIntToInt64(v.AuxInt)
  3055  		if v_0.Op != OpARM64ANDconst {
  3056  			break
  3057  		}
  3058  		m := auxIntToInt64(v_0.AuxInt)
  3059  		if !(0 <= m && m < n) {
  3060  			break
  3061  		}
  3062  		v.reset(OpARM64FlagConstant)
  3063  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  3064  		return true
  3065  	}
  3066  	// match: (CMPconst (SRLconst _ [c]) [n])
  3067  	// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
  3068  	// result: (FlagConstant [subFlags64(0,1)])
  3069  	for {
  3070  		n := auxIntToInt64(v.AuxInt)
  3071  		if v_0.Op != OpARM64SRLconst {
  3072  			break
  3073  		}
  3074  		c := auxIntToInt64(v_0.AuxInt)
  3075  		if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
  3076  			break
  3077  		}
  3078  		v.reset(OpARM64FlagConstant)
  3079  		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
  3080  		return true
  3081  	}
  3082  	return false
  3083  }
  3084  func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool {
  3085  	v_1 := v.Args[1]
  3086  	v_0 := v.Args[0]
  3087  	b := v.Block
  3088  	// match: (CMPshiftLL (MOVDconst [c]) x [d])
  3089  	// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
  3090  	for {
  3091  		d := auxIntToInt64(v.AuxInt)
  3092  		if v_0.Op != OpARM64MOVDconst {
  3093  			break
  3094  		}
  3095  		c := auxIntToInt64(v_0.AuxInt)
  3096  		x := v_1
  3097  		v.reset(OpARM64InvertFlags)
  3098  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  3099  		v0.AuxInt = int64ToAuxInt(c)
  3100  		v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  3101  		v1.AuxInt = int64ToAuxInt(d)
  3102  		v1.AddArg(x)
  3103  		v0.AddArg(v1)
  3104  		v.AddArg(v0)
  3105  		return true
  3106  	}
  3107  	// match: (CMPshiftLL x (MOVDconst [c]) [d])
  3108  	// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
  3109  	for {
  3110  		d := auxIntToInt64(v.AuxInt)
  3111  		x := v_0
  3112  		if v_1.Op != OpARM64MOVDconst {
  3113  			break
  3114  		}
  3115  		c := auxIntToInt64(v_1.AuxInt)
  3116  		v.reset(OpARM64CMPconst)
  3117  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
  3118  		v.AddArg(x)
  3119  		return true
  3120  	}
  3121  	return false
  3122  }
  3123  func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool {
  3124  	v_1 := v.Args[1]
  3125  	v_0 := v.Args[0]
  3126  	b := v.Block
  3127  	// match: (CMPshiftRA (MOVDconst [c]) x [d])
  3128  	// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
  3129  	for {
  3130  		d := auxIntToInt64(v.AuxInt)
  3131  		if v_0.Op != OpARM64MOVDconst {
  3132  			break
  3133  		}
  3134  		c := auxIntToInt64(v_0.AuxInt)
  3135  		x := v_1
  3136  		v.reset(OpARM64InvertFlags)
  3137  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  3138  		v0.AuxInt = int64ToAuxInt(c)
  3139  		v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  3140  		v1.AuxInt = int64ToAuxInt(d)
  3141  		v1.AddArg(x)
  3142  		v0.AddArg(v1)
  3143  		v.AddArg(v0)
  3144  		return true
  3145  	}
  3146  	// match: (CMPshiftRA x (MOVDconst [c]) [d])
  3147  	// result: (CMPconst x [c>>uint64(d)])
  3148  	for {
  3149  		d := auxIntToInt64(v.AuxInt)
  3150  		x := v_0
  3151  		if v_1.Op != OpARM64MOVDconst {
  3152  			break
  3153  		}
  3154  		c := auxIntToInt64(v_1.AuxInt)
  3155  		v.reset(OpARM64CMPconst)
  3156  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
  3157  		v.AddArg(x)
  3158  		return true
  3159  	}
  3160  	return false
  3161  }
  3162  func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool {
  3163  	v_1 := v.Args[1]
  3164  	v_0 := v.Args[0]
  3165  	b := v.Block
  3166  	// match: (CMPshiftRL (MOVDconst [c]) x [d])
  3167  	// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
  3168  	for {
  3169  		d := auxIntToInt64(v.AuxInt)
  3170  		if v_0.Op != OpARM64MOVDconst {
  3171  			break
  3172  		}
  3173  		c := auxIntToInt64(v_0.AuxInt)
  3174  		x := v_1
  3175  		v.reset(OpARM64InvertFlags)
  3176  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  3177  		v0.AuxInt = int64ToAuxInt(c)
  3178  		v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  3179  		v1.AuxInt = int64ToAuxInt(d)
  3180  		v1.AddArg(x)
  3181  		v0.AddArg(v1)
  3182  		v.AddArg(v0)
  3183  		return true
  3184  	}
  3185  	// match: (CMPshiftRL x (MOVDconst [c]) [d])
  3186  	// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
  3187  	for {
  3188  		d := auxIntToInt64(v.AuxInt)
  3189  		x := v_0
  3190  		if v_1.Op != OpARM64MOVDconst {
  3191  			break
  3192  		}
  3193  		c := auxIntToInt64(v_1.AuxInt)
  3194  		v.reset(OpARM64CMPconst)
  3195  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
  3196  		v.AddArg(x)
  3197  		return true
  3198  	}
  3199  	return false
  3200  }
  3201  func rewriteValueARM64_OpARM64CSEL(v *Value) bool {
  3202  	v_2 := v.Args[2]
  3203  	v_1 := v.Args[1]
  3204  	v_0 := v.Args[0]
  3205  	// match: (CSEL [cc] (MOVDconst [-1]) (MOVDconst [0]) flag)
  3206  	// result: (CSETM [cc] flag)
  3207  	for {
  3208  		cc := auxIntToOp(v.AuxInt)
  3209  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != -1 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3210  			break
  3211  		}
  3212  		flag := v_2
  3213  		v.reset(OpARM64CSETM)
  3214  		v.AuxInt = opToAuxInt(cc)
  3215  		v.AddArg(flag)
  3216  		return true
  3217  	}
  3218  	// match: (CSEL [cc] (MOVDconst [0]) (MOVDconst [-1]) flag)
  3219  	// result: (CSETM [arm64Negate(cc)] flag)
  3220  	for {
  3221  		cc := auxIntToOp(v.AuxInt)
  3222  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
  3223  			break
  3224  		}
  3225  		flag := v_2
  3226  		v.reset(OpARM64CSETM)
  3227  		v.AuxInt = opToAuxInt(arm64Negate(cc))
  3228  		v.AddArg(flag)
  3229  		return true
  3230  	}
  3231  	// match: (CSEL [cc] x (MOVDconst [0]) flag)
  3232  	// result: (CSEL0 [cc] x flag)
  3233  	for {
  3234  		cc := auxIntToOp(v.AuxInt)
  3235  		x := v_0
  3236  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3237  			break
  3238  		}
  3239  		flag := v_2
  3240  		v.reset(OpARM64CSEL0)
  3241  		v.AuxInt = opToAuxInt(cc)
  3242  		v.AddArg2(x, flag)
  3243  		return true
  3244  	}
  3245  	// match: (CSEL [cc] (MOVDconst [0]) y flag)
  3246  	// result: (CSEL0 [arm64Negate(cc)] y flag)
  3247  	for {
  3248  		cc := auxIntToOp(v.AuxInt)
  3249  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  3250  			break
  3251  		}
  3252  		y := v_1
  3253  		flag := v_2
  3254  		v.reset(OpARM64CSEL0)
  3255  		v.AuxInt = opToAuxInt(arm64Negate(cc))
  3256  		v.AddArg2(y, flag)
  3257  		return true
  3258  	}
  3259  	// match: (CSEL [cc] x (ADDconst [1] a) flag)
  3260  	// result: (CSINC [cc] x a flag)
  3261  	for {
  3262  		cc := auxIntToOp(v.AuxInt)
  3263  		x := v_0
  3264  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
  3265  			break
  3266  		}
  3267  		a := v_1.Args[0]
  3268  		flag := v_2
  3269  		v.reset(OpARM64CSINC)
  3270  		v.AuxInt = opToAuxInt(cc)
  3271  		v.AddArg3(x, a, flag)
  3272  		return true
  3273  	}
  3274  	// match: (CSEL [cc] (ADDconst [1] a) x flag)
  3275  	// result: (CSINC [arm64Negate(cc)] x a flag)
  3276  	for {
  3277  		cc := auxIntToOp(v.AuxInt)
  3278  		if v_0.Op != OpARM64ADDconst || auxIntToInt64(v_0.AuxInt) != 1 {
  3279  			break
  3280  		}
  3281  		a := v_0.Args[0]
  3282  		x := v_1
  3283  		flag := v_2
  3284  		v.reset(OpARM64CSINC)
  3285  		v.AuxInt = opToAuxInt(arm64Negate(cc))
  3286  		v.AddArg3(x, a, flag)
  3287  		return true
  3288  	}
  3289  	// match: (CSEL [cc] x (MVN a) flag)
  3290  	// result: (CSINV [cc] x a flag)
  3291  	for {
  3292  		cc := auxIntToOp(v.AuxInt)
  3293  		x := v_0
  3294  		if v_1.Op != OpARM64MVN {
  3295  			break
  3296  		}
  3297  		a := v_1.Args[0]
  3298  		flag := v_2
  3299  		v.reset(OpARM64CSINV)
  3300  		v.AuxInt = opToAuxInt(cc)
  3301  		v.AddArg3(x, a, flag)
  3302  		return true
  3303  	}
  3304  	// match: (CSEL [cc] (MVN a) x flag)
  3305  	// result: (CSINV [arm64Negate(cc)] x a flag)
  3306  	for {
  3307  		cc := auxIntToOp(v.AuxInt)
  3308  		if v_0.Op != OpARM64MVN {
  3309  			break
  3310  		}
  3311  		a := v_0.Args[0]
  3312  		x := v_1
  3313  		flag := v_2
  3314  		v.reset(OpARM64CSINV)
  3315  		v.AuxInt = opToAuxInt(arm64Negate(cc))
  3316  		v.AddArg3(x, a, flag)
  3317  		return true
  3318  	}
  3319  	// match: (CSEL [cc] x (NEG a) flag)
  3320  	// result: (CSNEG [cc] x a flag)
  3321  	for {
  3322  		cc := auxIntToOp(v.AuxInt)
  3323  		x := v_0
  3324  		if v_1.Op != OpARM64NEG {
  3325  			break
  3326  		}
  3327  		a := v_1.Args[0]
  3328  		flag := v_2
  3329  		v.reset(OpARM64CSNEG)
  3330  		v.AuxInt = opToAuxInt(cc)
  3331  		v.AddArg3(x, a, flag)
  3332  		return true
  3333  	}
  3334  	// match: (CSEL [cc] (NEG a) x flag)
  3335  	// result: (CSNEG [arm64Negate(cc)] x a flag)
  3336  	for {
  3337  		cc := auxIntToOp(v.AuxInt)
  3338  		if v_0.Op != OpARM64NEG {
  3339  			break
  3340  		}
  3341  		a := v_0.Args[0]
  3342  		x := v_1
  3343  		flag := v_2
  3344  		v.reset(OpARM64CSNEG)
  3345  		v.AuxInt = opToAuxInt(arm64Negate(cc))
  3346  		v.AddArg3(x, a, flag)
  3347  		return true
  3348  	}
  3349  	// match: (CSEL [cc] x y (InvertFlags cmp))
  3350  	// result: (CSEL [arm64Invert(cc)] x y cmp)
  3351  	for {
  3352  		cc := auxIntToOp(v.AuxInt)
  3353  		x := v_0
  3354  		y := v_1
  3355  		if v_2.Op != OpARM64InvertFlags {
  3356  			break
  3357  		}
  3358  		cmp := v_2.Args[0]
  3359  		v.reset(OpARM64CSEL)
  3360  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3361  		v.AddArg3(x, y, cmp)
  3362  		return true
  3363  	}
  3364  	// match: (CSEL [cc] x _ flag)
  3365  	// cond: ccARM64Eval(cc, flag) > 0
  3366  	// result: x
  3367  	for {
  3368  		cc := auxIntToOp(v.AuxInt)
  3369  		x := v_0
  3370  		flag := v_2
  3371  		if !(ccARM64Eval(cc, flag) > 0) {
  3372  			break
  3373  		}
  3374  		v.copyOf(x)
  3375  		return true
  3376  	}
  3377  	// match: (CSEL [cc] _ y flag)
  3378  	// cond: ccARM64Eval(cc, flag) < 0
  3379  	// result: y
  3380  	for {
  3381  		cc := auxIntToOp(v.AuxInt)
  3382  		y := v_1
  3383  		flag := v_2
  3384  		if !(ccARM64Eval(cc, flag) < 0) {
  3385  			break
  3386  		}
  3387  		v.copyOf(y)
  3388  		return true
  3389  	}
  3390  	// match: (CSEL [cc] x y (CMPWconst [0] boolval))
  3391  	// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
  3392  	// result: (CSEL [boolval.Op] x y flagArg(boolval))
  3393  	for {
  3394  		cc := auxIntToOp(v.AuxInt)
  3395  		x := v_0
  3396  		y := v_1
  3397  		if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
  3398  			break
  3399  		}
  3400  		boolval := v_2.Args[0]
  3401  		if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
  3402  			break
  3403  		}
  3404  		v.reset(OpARM64CSEL)
  3405  		v.AuxInt = opToAuxInt(boolval.Op)
  3406  		v.AddArg3(x, y, flagArg(boolval))
  3407  		return true
  3408  	}
  3409  	// match: (CSEL [cc] x y (CMPWconst [0] boolval))
  3410  	// cond: cc == OpARM64Equal && flagArg(boolval) != nil
  3411  	// result: (CSEL [arm64Negate(boolval.Op)] x y flagArg(boolval))
  3412  	for {
  3413  		cc := auxIntToOp(v.AuxInt)
  3414  		x := v_0
  3415  		y := v_1
  3416  		if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
  3417  			break
  3418  		}
  3419  		boolval := v_2.Args[0]
  3420  		if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
  3421  			break
  3422  		}
  3423  		v.reset(OpARM64CSEL)
  3424  		v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
  3425  		v.AddArg3(x, y, flagArg(boolval))
  3426  		return true
  3427  	}
  3428  	return false
  3429  }
  3430  func rewriteValueARM64_OpARM64CSEL0(v *Value) bool {
  3431  	v_1 := v.Args[1]
  3432  	v_0 := v.Args[0]
  3433  	// match: (CSEL0 [cc] x (InvertFlags cmp))
  3434  	// result: (CSEL0 [arm64Invert(cc)] x cmp)
  3435  	for {
  3436  		cc := auxIntToOp(v.AuxInt)
  3437  		x := v_0
  3438  		if v_1.Op != OpARM64InvertFlags {
  3439  			break
  3440  		}
  3441  		cmp := v_1.Args[0]
  3442  		v.reset(OpARM64CSEL0)
  3443  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3444  		v.AddArg2(x, cmp)
  3445  		return true
  3446  	}
  3447  	// match: (CSEL0 [cc] x flag)
  3448  	// cond: ccARM64Eval(cc, flag) > 0
  3449  	// result: x
  3450  	for {
  3451  		cc := auxIntToOp(v.AuxInt)
  3452  		x := v_0
  3453  		flag := v_1
  3454  		if !(ccARM64Eval(cc, flag) > 0) {
  3455  			break
  3456  		}
  3457  		v.copyOf(x)
  3458  		return true
  3459  	}
  3460  	// match: (CSEL0 [cc] _ flag)
  3461  	// cond: ccARM64Eval(cc, flag) < 0
  3462  	// result: (MOVDconst [0])
  3463  	for {
  3464  		cc := auxIntToOp(v.AuxInt)
  3465  		flag := v_1
  3466  		if !(ccARM64Eval(cc, flag) < 0) {
  3467  			break
  3468  		}
  3469  		v.reset(OpARM64MOVDconst)
  3470  		v.AuxInt = int64ToAuxInt(0)
  3471  		return true
  3472  	}
  3473  	// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
  3474  	// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
  3475  	// result: (CSEL0 [boolval.Op] x flagArg(boolval))
  3476  	for {
  3477  		cc := auxIntToOp(v.AuxInt)
  3478  		x := v_0
  3479  		if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
  3480  			break
  3481  		}
  3482  		boolval := v_1.Args[0]
  3483  		if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
  3484  			break
  3485  		}
  3486  		v.reset(OpARM64CSEL0)
  3487  		v.AuxInt = opToAuxInt(boolval.Op)
  3488  		v.AddArg2(x, flagArg(boolval))
  3489  		return true
  3490  	}
  3491  	// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
  3492  	// cond: cc == OpARM64Equal && flagArg(boolval) != nil
  3493  	// result: (CSEL0 [arm64Negate(boolval.Op)] x flagArg(boolval))
  3494  	for {
  3495  		cc := auxIntToOp(v.AuxInt)
  3496  		x := v_0
  3497  		if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
  3498  			break
  3499  		}
  3500  		boolval := v_1.Args[0]
  3501  		if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
  3502  			break
  3503  		}
  3504  		v.reset(OpARM64CSEL0)
  3505  		v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
  3506  		v.AddArg2(x, flagArg(boolval))
  3507  		return true
  3508  	}
  3509  	return false
  3510  }
  3511  func rewriteValueARM64_OpARM64CSETM(v *Value) bool {
  3512  	v_0 := v.Args[0]
  3513  	// match: (CSETM [cc] (InvertFlags cmp))
  3514  	// result: (CSETM [arm64Invert(cc)] cmp)
  3515  	for {
  3516  		cc := auxIntToOp(v.AuxInt)
  3517  		if v_0.Op != OpARM64InvertFlags {
  3518  			break
  3519  		}
  3520  		cmp := v_0.Args[0]
  3521  		v.reset(OpARM64CSETM)
  3522  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3523  		v.AddArg(cmp)
  3524  		return true
  3525  	}
  3526  	// match: (CSETM [cc] flag)
  3527  	// cond: ccARM64Eval(cc, flag) > 0
  3528  	// result: (MOVDconst [-1])
  3529  	for {
  3530  		cc := auxIntToOp(v.AuxInt)
  3531  		flag := v_0
  3532  		if !(ccARM64Eval(cc, flag) > 0) {
  3533  			break
  3534  		}
  3535  		v.reset(OpARM64MOVDconst)
  3536  		v.AuxInt = int64ToAuxInt(-1)
  3537  		return true
  3538  	}
  3539  	// match: (CSETM [cc] flag)
  3540  	// cond: ccARM64Eval(cc, flag) < 0
  3541  	// result: (MOVDconst [0])
  3542  	for {
  3543  		cc := auxIntToOp(v.AuxInt)
  3544  		flag := v_0
  3545  		if !(ccARM64Eval(cc, flag) < 0) {
  3546  			break
  3547  		}
  3548  		v.reset(OpARM64MOVDconst)
  3549  		v.AuxInt = int64ToAuxInt(0)
  3550  		return true
  3551  	}
  3552  	return false
  3553  }
  3554  func rewriteValueARM64_OpARM64CSINC(v *Value) bool {
  3555  	v_2 := v.Args[2]
  3556  	v_1 := v.Args[1]
  3557  	v_0 := v.Args[0]
  3558  	// match: (CSINC [cc] x y (InvertFlags cmp))
  3559  	// result: (CSINC [arm64Invert(cc)] x y cmp)
  3560  	for {
  3561  		cc := auxIntToOp(v.AuxInt)
  3562  		x := v_0
  3563  		y := v_1
  3564  		if v_2.Op != OpARM64InvertFlags {
  3565  			break
  3566  		}
  3567  		cmp := v_2.Args[0]
  3568  		v.reset(OpARM64CSINC)
  3569  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3570  		v.AddArg3(x, y, cmp)
  3571  		return true
  3572  	}
  3573  	// match: (CSINC [cc] x _ flag)
  3574  	// cond: ccARM64Eval(cc, flag) > 0
  3575  	// result: x
  3576  	for {
  3577  		cc := auxIntToOp(v.AuxInt)
  3578  		x := v_0
  3579  		flag := v_2
  3580  		if !(ccARM64Eval(cc, flag) > 0) {
  3581  			break
  3582  		}
  3583  		v.copyOf(x)
  3584  		return true
  3585  	}
  3586  	// match: (CSINC [cc] _ y flag)
  3587  	// cond: ccARM64Eval(cc, flag) < 0
  3588  	// result: (ADDconst [1] y)
  3589  	for {
  3590  		cc := auxIntToOp(v.AuxInt)
  3591  		y := v_1
  3592  		flag := v_2
  3593  		if !(ccARM64Eval(cc, flag) < 0) {
  3594  			break
  3595  		}
  3596  		v.reset(OpARM64ADDconst)
  3597  		v.AuxInt = int64ToAuxInt(1)
  3598  		v.AddArg(y)
  3599  		return true
  3600  	}
  3601  	return false
  3602  }
  3603  func rewriteValueARM64_OpARM64CSINV(v *Value) bool {
  3604  	v_2 := v.Args[2]
  3605  	v_1 := v.Args[1]
  3606  	v_0 := v.Args[0]
  3607  	// match: (CSINV [cc] x y (InvertFlags cmp))
  3608  	// result: (CSINV [arm64Invert(cc)] x y cmp)
  3609  	for {
  3610  		cc := auxIntToOp(v.AuxInt)
  3611  		x := v_0
  3612  		y := v_1
  3613  		if v_2.Op != OpARM64InvertFlags {
  3614  			break
  3615  		}
  3616  		cmp := v_2.Args[0]
  3617  		v.reset(OpARM64CSINV)
  3618  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3619  		v.AddArg3(x, y, cmp)
  3620  		return true
  3621  	}
  3622  	// match: (CSINV [cc] x _ flag)
  3623  	// cond: ccARM64Eval(cc, flag) > 0
  3624  	// result: x
  3625  	for {
  3626  		cc := auxIntToOp(v.AuxInt)
  3627  		x := v_0
  3628  		flag := v_2
  3629  		if !(ccARM64Eval(cc, flag) > 0) {
  3630  			break
  3631  		}
  3632  		v.copyOf(x)
  3633  		return true
  3634  	}
  3635  	// match: (CSINV [cc] _ y flag)
  3636  	// cond: ccARM64Eval(cc, flag) < 0
  3637  	// result: (Not y)
  3638  	for {
  3639  		cc := auxIntToOp(v.AuxInt)
  3640  		y := v_1
  3641  		flag := v_2
  3642  		if !(ccARM64Eval(cc, flag) < 0) {
  3643  			break
  3644  		}
  3645  		v.reset(OpNot)
  3646  		v.AddArg(y)
  3647  		return true
  3648  	}
  3649  	return false
  3650  }
  3651  func rewriteValueARM64_OpARM64CSNEG(v *Value) bool {
  3652  	v_2 := v.Args[2]
  3653  	v_1 := v.Args[1]
  3654  	v_0 := v.Args[0]
  3655  	// match: (CSNEG [cc] x y (InvertFlags cmp))
  3656  	// result: (CSNEG [arm64Invert(cc)] x y cmp)
  3657  	for {
  3658  		cc := auxIntToOp(v.AuxInt)
  3659  		x := v_0
  3660  		y := v_1
  3661  		if v_2.Op != OpARM64InvertFlags {
  3662  			break
  3663  		}
  3664  		cmp := v_2.Args[0]
  3665  		v.reset(OpARM64CSNEG)
  3666  		v.AuxInt = opToAuxInt(arm64Invert(cc))
  3667  		v.AddArg3(x, y, cmp)
  3668  		return true
  3669  	}
  3670  	// match: (CSNEG [cc] x _ flag)
  3671  	// cond: ccARM64Eval(cc, flag) > 0
  3672  	// result: x
  3673  	for {
  3674  		cc := auxIntToOp(v.AuxInt)
  3675  		x := v_0
  3676  		flag := v_2
  3677  		if !(ccARM64Eval(cc, flag) > 0) {
  3678  			break
  3679  		}
  3680  		v.copyOf(x)
  3681  		return true
  3682  	}
  3683  	// match: (CSNEG [cc] _ y flag)
  3684  	// cond: ccARM64Eval(cc, flag) < 0
  3685  	// result: (NEG y)
  3686  	for {
  3687  		cc := auxIntToOp(v.AuxInt)
  3688  		y := v_1
  3689  		flag := v_2
  3690  		if !(ccARM64Eval(cc, flag) < 0) {
  3691  			break
  3692  		}
  3693  		v.reset(OpARM64NEG)
  3694  		v.AddArg(y)
  3695  		return true
  3696  	}
  3697  	return false
  3698  }
  3699  func rewriteValueARM64_OpARM64DIV(v *Value) bool {
  3700  	v_1 := v.Args[1]
  3701  	v_0 := v.Args[0]
  3702  	// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
  3703  	// cond: d != 0
  3704  	// result: (MOVDconst [c/d])
  3705  	for {
  3706  		if v_0.Op != OpARM64MOVDconst {
  3707  			break
  3708  		}
  3709  		c := auxIntToInt64(v_0.AuxInt)
  3710  		if v_1.Op != OpARM64MOVDconst {
  3711  			break
  3712  		}
  3713  		d := auxIntToInt64(v_1.AuxInt)
  3714  		if !(d != 0) {
  3715  			break
  3716  		}
  3717  		v.reset(OpARM64MOVDconst)
  3718  		v.AuxInt = int64ToAuxInt(c / d)
  3719  		return true
  3720  	}
  3721  	return false
  3722  }
  3723  func rewriteValueARM64_OpARM64DIVW(v *Value) bool {
  3724  	v_1 := v.Args[1]
  3725  	v_0 := v.Args[0]
  3726  	// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
  3727  	// cond: d != 0
  3728  	// result: (MOVDconst [int64(int32(c)/int32(d))])
  3729  	for {
  3730  		if v_0.Op != OpARM64MOVDconst {
  3731  			break
  3732  		}
  3733  		c := auxIntToInt64(v_0.AuxInt)
  3734  		if v_1.Op != OpARM64MOVDconst {
  3735  			break
  3736  		}
  3737  		d := auxIntToInt64(v_1.AuxInt)
  3738  		if !(d != 0) {
  3739  			break
  3740  		}
  3741  		v.reset(OpARM64MOVDconst)
  3742  		v.AuxInt = int64ToAuxInt(int64(int32(c) / int32(d)))
  3743  		return true
  3744  	}
  3745  	return false
  3746  }
  3747  func rewriteValueARM64_OpARM64EON(v *Value) bool {
  3748  	v_1 := v.Args[1]
  3749  	v_0 := v.Args[0]
  3750  	// match: (EON x (MOVDconst [c]))
  3751  	// result: (XORconst [^c] x)
  3752  	for {
  3753  		x := v_0
  3754  		if v_1.Op != OpARM64MOVDconst {
  3755  			break
  3756  		}
  3757  		c := auxIntToInt64(v_1.AuxInt)
  3758  		v.reset(OpARM64XORconst)
  3759  		v.AuxInt = int64ToAuxInt(^c)
  3760  		v.AddArg(x)
  3761  		return true
  3762  	}
  3763  	// match: (EON x x)
  3764  	// result: (MOVDconst [-1])
  3765  	for {
  3766  		x := v_0
  3767  		if x != v_1 {
  3768  			break
  3769  		}
  3770  		v.reset(OpARM64MOVDconst)
  3771  		v.AuxInt = int64ToAuxInt(-1)
  3772  		return true
  3773  	}
  3774  	// match: (EON x0 x1:(SLLconst [c] y))
  3775  	// cond: clobberIfDead(x1)
  3776  	// result: (EONshiftLL x0 y [c])
  3777  	for {
  3778  		x0 := v_0
  3779  		x1 := v_1
  3780  		if x1.Op != OpARM64SLLconst {
  3781  			break
  3782  		}
  3783  		c := auxIntToInt64(x1.AuxInt)
  3784  		y := x1.Args[0]
  3785  		if !(clobberIfDead(x1)) {
  3786  			break
  3787  		}
  3788  		v.reset(OpARM64EONshiftLL)
  3789  		v.AuxInt = int64ToAuxInt(c)
  3790  		v.AddArg2(x0, y)
  3791  		return true
  3792  	}
  3793  	// match: (EON x0 x1:(SRLconst [c] y))
  3794  	// cond: clobberIfDead(x1)
  3795  	// result: (EONshiftRL x0 y [c])
  3796  	for {
  3797  		x0 := v_0
  3798  		x1 := v_1
  3799  		if x1.Op != OpARM64SRLconst {
  3800  			break
  3801  		}
  3802  		c := auxIntToInt64(x1.AuxInt)
  3803  		y := x1.Args[0]
  3804  		if !(clobberIfDead(x1)) {
  3805  			break
  3806  		}
  3807  		v.reset(OpARM64EONshiftRL)
  3808  		v.AuxInt = int64ToAuxInt(c)
  3809  		v.AddArg2(x0, y)
  3810  		return true
  3811  	}
  3812  	// match: (EON x0 x1:(SRAconst [c] y))
  3813  	// cond: clobberIfDead(x1)
  3814  	// result: (EONshiftRA x0 y [c])
  3815  	for {
  3816  		x0 := v_0
  3817  		x1 := v_1
  3818  		if x1.Op != OpARM64SRAconst {
  3819  			break
  3820  		}
  3821  		c := auxIntToInt64(x1.AuxInt)
  3822  		y := x1.Args[0]
  3823  		if !(clobberIfDead(x1)) {
  3824  			break
  3825  		}
  3826  		v.reset(OpARM64EONshiftRA)
  3827  		v.AuxInt = int64ToAuxInt(c)
  3828  		v.AddArg2(x0, y)
  3829  		return true
  3830  	}
  3831  	// match: (EON x0 x1:(RORconst [c] y))
  3832  	// cond: clobberIfDead(x1)
  3833  	// result: (EONshiftRO x0 y [c])
  3834  	for {
  3835  		x0 := v_0
  3836  		x1 := v_1
  3837  		if x1.Op != OpARM64RORconst {
  3838  			break
  3839  		}
  3840  		c := auxIntToInt64(x1.AuxInt)
  3841  		y := x1.Args[0]
  3842  		if !(clobberIfDead(x1)) {
  3843  			break
  3844  		}
  3845  		v.reset(OpARM64EONshiftRO)
  3846  		v.AuxInt = int64ToAuxInt(c)
  3847  		v.AddArg2(x0, y)
  3848  		return true
  3849  	}
  3850  	return false
  3851  }
  3852  func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
  3853  	v_1 := v.Args[1]
  3854  	v_0 := v.Args[0]
  3855  	// match: (EONshiftLL x (MOVDconst [c]) [d])
  3856  	// result: (XORconst x [^int64(uint64(c)<<uint64(d))])
  3857  	for {
  3858  		d := auxIntToInt64(v.AuxInt)
  3859  		x := v_0
  3860  		if v_1.Op != OpARM64MOVDconst {
  3861  			break
  3862  		}
  3863  		c := auxIntToInt64(v_1.AuxInt)
  3864  		v.reset(OpARM64XORconst)
  3865  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
  3866  		v.AddArg(x)
  3867  		return true
  3868  	}
  3869  	// match: (EONshiftLL (SLLconst x [c]) x [c])
  3870  	// result: (MOVDconst [-1])
  3871  	for {
  3872  		c := auxIntToInt64(v.AuxInt)
  3873  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
  3874  			break
  3875  		}
  3876  		x := v_0.Args[0]
  3877  		if x != v_1 {
  3878  			break
  3879  		}
  3880  		v.reset(OpARM64MOVDconst)
  3881  		v.AuxInt = int64ToAuxInt(-1)
  3882  		return true
  3883  	}
  3884  	return false
  3885  }
  3886  func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool {
  3887  	v_1 := v.Args[1]
  3888  	v_0 := v.Args[0]
  3889  	// match: (EONshiftRA x (MOVDconst [c]) [d])
  3890  	// result: (XORconst x [^(c>>uint64(d))])
  3891  	for {
  3892  		d := auxIntToInt64(v.AuxInt)
  3893  		x := v_0
  3894  		if v_1.Op != OpARM64MOVDconst {
  3895  			break
  3896  		}
  3897  		c := auxIntToInt64(v_1.AuxInt)
  3898  		v.reset(OpARM64XORconst)
  3899  		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
  3900  		v.AddArg(x)
  3901  		return true
  3902  	}
  3903  	// match: (EONshiftRA (SRAconst x [c]) x [c])
  3904  	// result: (MOVDconst [-1])
  3905  	for {
  3906  		c := auxIntToInt64(v.AuxInt)
  3907  		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
  3908  			break
  3909  		}
  3910  		x := v_0.Args[0]
  3911  		if x != v_1 {
  3912  			break
  3913  		}
  3914  		v.reset(OpARM64MOVDconst)
  3915  		v.AuxInt = int64ToAuxInt(-1)
  3916  		return true
  3917  	}
  3918  	return false
  3919  }
  3920  func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
  3921  	v_1 := v.Args[1]
  3922  	v_0 := v.Args[0]
  3923  	// match: (EONshiftRL x (MOVDconst [c]) [d])
  3924  	// result: (XORconst x [^int64(uint64(c)>>uint64(d))])
  3925  	for {
  3926  		d := auxIntToInt64(v.AuxInt)
  3927  		x := v_0
  3928  		if v_1.Op != OpARM64MOVDconst {
  3929  			break
  3930  		}
  3931  		c := auxIntToInt64(v_1.AuxInt)
  3932  		v.reset(OpARM64XORconst)
  3933  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
  3934  		v.AddArg(x)
  3935  		return true
  3936  	}
  3937  	// match: (EONshiftRL (SRLconst x [c]) x [c])
  3938  	// result: (MOVDconst [-1])
  3939  	for {
  3940  		c := auxIntToInt64(v.AuxInt)
  3941  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
  3942  			break
  3943  		}
  3944  		x := v_0.Args[0]
  3945  		if x != v_1 {
  3946  			break
  3947  		}
  3948  		v.reset(OpARM64MOVDconst)
  3949  		v.AuxInt = int64ToAuxInt(-1)
  3950  		return true
  3951  	}
  3952  	return false
  3953  }
  3954  func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool {
  3955  	v_1 := v.Args[1]
  3956  	v_0 := v.Args[0]
  3957  	// match: (EONshiftRO x (MOVDconst [c]) [d])
  3958  	// result: (XORconst x [^rotateRight64(c, d)])
  3959  	for {
  3960  		d := auxIntToInt64(v.AuxInt)
  3961  		x := v_0
  3962  		if v_1.Op != OpARM64MOVDconst {
  3963  			break
  3964  		}
  3965  		c := auxIntToInt64(v_1.AuxInt)
  3966  		v.reset(OpARM64XORconst)
  3967  		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
  3968  		v.AddArg(x)
  3969  		return true
  3970  	}
  3971  	// match: (EONshiftRO (RORconst x [c]) x [c])
  3972  	// result: (MOVDconst [-1])
  3973  	for {
  3974  		c := auxIntToInt64(v.AuxInt)
  3975  		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
  3976  			break
  3977  		}
  3978  		x := v_0.Args[0]
  3979  		if x != v_1 {
  3980  			break
  3981  		}
  3982  		v.reset(OpARM64MOVDconst)
  3983  		v.AuxInt = int64ToAuxInt(-1)
  3984  		return true
  3985  	}
  3986  	return false
  3987  }
  3988  func rewriteValueARM64_OpARM64Equal(v *Value) bool {
  3989  	v_0 := v.Args[0]
  3990  	b := v.Block
  3991  	// match: (Equal (CMPconst [0] z:(AND x y)))
  3992  	// cond: z.Uses == 1
  3993  	// result: (Equal (TST x y))
  3994  	for {
  3995  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  3996  			break
  3997  		}
  3998  		z := v_0.Args[0]
  3999  		if z.Op != OpARM64AND {
  4000  			break
  4001  		}
  4002  		y := z.Args[1]
  4003  		x := z.Args[0]
  4004  		if !(z.Uses == 1) {
  4005  			break
  4006  		}
  4007  		v.reset(OpARM64Equal)
  4008  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
  4009  		v0.AddArg2(x, y)
  4010  		v.AddArg(v0)
  4011  		return true
  4012  	}
  4013  	// match: (Equal (CMPWconst [0] x:(ANDconst [c] y)))
  4014  	// cond: x.Uses == 1
  4015  	// result: (Equal (TSTWconst [int32(c)] y))
  4016  	for {
  4017  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4018  			break
  4019  		}
  4020  		x := v_0.Args[0]
  4021  		if x.Op != OpARM64ANDconst {
  4022  			break
  4023  		}
  4024  		c := auxIntToInt64(x.AuxInt)
  4025  		y := x.Args[0]
  4026  		if !(x.Uses == 1) {
  4027  			break
  4028  		}
  4029  		v.reset(OpARM64Equal)
  4030  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
  4031  		v0.AuxInt = int32ToAuxInt(int32(c))
  4032  		v0.AddArg(y)
  4033  		v.AddArg(v0)
  4034  		return true
  4035  	}
  4036  	// match: (Equal (CMPWconst [0] z:(AND x y)))
  4037  	// cond: z.Uses == 1
  4038  	// result: (Equal (TSTW x y))
  4039  	for {
  4040  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4041  			break
  4042  		}
  4043  		z := v_0.Args[0]
  4044  		if z.Op != OpARM64AND {
  4045  			break
  4046  		}
  4047  		y := z.Args[1]
  4048  		x := z.Args[0]
  4049  		if !(z.Uses == 1) {
  4050  			break
  4051  		}
  4052  		v.reset(OpARM64Equal)
  4053  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
  4054  		v0.AddArg2(x, y)
  4055  		v.AddArg(v0)
  4056  		return true
  4057  	}
  4058  	// match: (Equal (CMPconst [0] x:(ANDconst [c] y)))
  4059  	// cond: x.Uses == 1
  4060  	// result: (Equal (TSTconst [c] y))
  4061  	for {
  4062  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4063  			break
  4064  		}
  4065  		x := v_0.Args[0]
  4066  		if x.Op != OpARM64ANDconst {
  4067  			break
  4068  		}
  4069  		c := auxIntToInt64(x.AuxInt)
  4070  		y := x.Args[0]
  4071  		if !(x.Uses == 1) {
  4072  			break
  4073  		}
  4074  		v.reset(OpARM64Equal)
  4075  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
  4076  		v0.AuxInt = int64ToAuxInt(c)
  4077  		v0.AddArg(y)
  4078  		v.AddArg(v0)
  4079  		return true
  4080  	}
  4081  	// match: (Equal (CMP x z:(NEG y)))
  4082  	// cond: z.Uses == 1
  4083  	// result: (Equal (CMN x y))
  4084  	for {
  4085  		if v_0.Op != OpARM64CMP {
  4086  			break
  4087  		}
  4088  		_ = v_0.Args[1]
  4089  		x := v_0.Args[0]
  4090  		z := v_0.Args[1]
  4091  		if z.Op != OpARM64NEG {
  4092  			break
  4093  		}
  4094  		y := z.Args[0]
  4095  		if !(z.Uses == 1) {
  4096  			break
  4097  		}
  4098  		v.reset(OpARM64Equal)
  4099  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
  4100  		v0.AddArg2(x, y)
  4101  		v.AddArg(v0)
  4102  		return true
  4103  	}
  4104  	// match: (Equal (CMPW x z:(NEG y)))
  4105  	// cond: z.Uses == 1
  4106  	// result: (Equal (CMNW x y))
  4107  	for {
  4108  		if v_0.Op != OpARM64CMPW {
  4109  			break
  4110  		}
  4111  		_ = v_0.Args[1]
  4112  		x := v_0.Args[0]
  4113  		z := v_0.Args[1]
  4114  		if z.Op != OpARM64NEG {
  4115  			break
  4116  		}
  4117  		y := z.Args[0]
  4118  		if !(z.Uses == 1) {
  4119  			break
  4120  		}
  4121  		v.reset(OpARM64Equal)
  4122  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
  4123  		v0.AddArg2(x, y)
  4124  		v.AddArg(v0)
  4125  		return true
  4126  	}
  4127  	// match: (Equal (CMPconst [0] x:(ADDconst [c] y)))
  4128  	// cond: x.Uses == 1
  4129  	// result: (Equal (CMNconst [c] y))
  4130  	for {
  4131  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4132  			break
  4133  		}
  4134  		x := v_0.Args[0]
  4135  		if x.Op != OpARM64ADDconst {
  4136  			break
  4137  		}
  4138  		c := auxIntToInt64(x.AuxInt)
  4139  		y := x.Args[0]
  4140  		if !(x.Uses == 1) {
  4141  			break
  4142  		}
  4143  		v.reset(OpARM64Equal)
  4144  		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
  4145  		v0.AuxInt = int64ToAuxInt(c)
  4146  		v0.AddArg(y)
  4147  		v.AddArg(v0)
  4148  		return true
  4149  	}
  4150  	// match: (Equal (CMPWconst [0] x:(ADDconst [c] y)))
  4151  	// cond: x.Uses == 1
  4152  	// result: (Equal (CMNWconst [int32(c)] y))
  4153  	for {
  4154  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4155  			break
  4156  		}
  4157  		x := v_0.Args[0]
  4158  		if x.Op != OpARM64ADDconst {
  4159  			break
  4160  		}
  4161  		c := auxIntToInt64(x.AuxInt)
  4162  		y := x.Args[0]
  4163  		if !(x.Uses == 1) {
  4164  			break
  4165  		}
  4166  		v.reset(OpARM64Equal)
  4167  		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
  4168  		v0.AuxInt = int32ToAuxInt(int32(c))
  4169  		v0.AddArg(y)
  4170  		v.AddArg(v0)
  4171  		return true
  4172  	}
  4173  	// match: (Equal (CMPconst [0] z:(ADD x y)))
  4174  	// cond: z.Uses == 1
  4175  	// result: (Equal (CMN x y))
  4176  	for {
  4177  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4178  			break
  4179  		}
  4180  		z := v_0.Args[0]
  4181  		if z.Op != OpARM64ADD {
  4182  			break
  4183  		}
  4184  		y := z.Args[1]
  4185  		x := z.Args[0]
  4186  		if !(z.Uses == 1) {
  4187  			break
  4188  		}
  4189  		v.reset(OpARM64Equal)
  4190  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
  4191  		v0.AddArg2(x, y)
  4192  		v.AddArg(v0)
  4193  		return true
  4194  	}
  4195  	// match: (Equal (CMPWconst [0] z:(ADD x y)))
  4196  	// cond: z.Uses == 1
  4197  	// result: (Equal (CMNW x y))
  4198  	for {
  4199  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4200  			break
  4201  		}
  4202  		z := v_0.Args[0]
  4203  		if z.Op != OpARM64ADD {
  4204  			break
  4205  		}
  4206  		y := z.Args[1]
  4207  		x := z.Args[0]
  4208  		if !(z.Uses == 1) {
  4209  			break
  4210  		}
  4211  		v.reset(OpARM64Equal)
  4212  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
  4213  		v0.AddArg2(x, y)
  4214  		v.AddArg(v0)
  4215  		return true
  4216  	}
  4217  	// match: (Equal (CMPconst [0] z:(MADD a x y)))
  4218  	// cond: z.Uses==1
  4219  	// result: (Equal (CMN a (MUL <x.Type> x y)))
  4220  	for {
  4221  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4222  			break
  4223  		}
  4224  		z := v_0.Args[0]
  4225  		if z.Op != OpARM64MADD {
  4226  			break
  4227  		}
  4228  		y := z.Args[2]
  4229  		a := z.Args[0]
  4230  		x := z.Args[1]
  4231  		if !(z.Uses == 1) {
  4232  			break
  4233  		}
  4234  		v.reset(OpARM64Equal)
  4235  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
  4236  		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
  4237  		v1.AddArg2(x, y)
  4238  		v0.AddArg2(a, v1)
  4239  		v.AddArg(v0)
  4240  		return true
  4241  	}
  4242  	// match: (Equal (CMPconst [0] z:(MSUB a x y)))
  4243  	// cond: z.Uses==1
  4244  	// result: (Equal (CMP a (MUL <x.Type> x y)))
  4245  	for {
  4246  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4247  			break
  4248  		}
  4249  		z := v_0.Args[0]
  4250  		if z.Op != OpARM64MSUB {
  4251  			break
  4252  		}
  4253  		y := z.Args[2]
  4254  		a := z.Args[0]
  4255  		x := z.Args[1]
  4256  		if !(z.Uses == 1) {
  4257  			break
  4258  		}
  4259  		v.reset(OpARM64Equal)
  4260  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
  4261  		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
  4262  		v1.AddArg2(x, y)
  4263  		v0.AddArg2(a, v1)
  4264  		v.AddArg(v0)
  4265  		return true
  4266  	}
  4267  	// match: (Equal (CMPWconst [0] z:(MADDW a x y)))
  4268  	// cond: z.Uses==1
  4269  	// result: (Equal (CMNW a (MULW <x.Type> x y)))
  4270  	for {
  4271  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4272  			break
  4273  		}
  4274  		z := v_0.Args[0]
  4275  		if z.Op != OpARM64MADDW {
  4276  			break
  4277  		}
  4278  		y := z.Args[2]
  4279  		a := z.Args[0]
  4280  		x := z.Args[1]
  4281  		if !(z.Uses == 1) {
  4282  			break
  4283  		}
  4284  		v.reset(OpARM64Equal)
  4285  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
  4286  		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
  4287  		v1.AddArg2(x, y)
  4288  		v0.AddArg2(a, v1)
  4289  		v.AddArg(v0)
  4290  		return true
  4291  	}
  4292  	// match: (Equal (CMPWconst [0] z:(MSUBW a x y)))
  4293  	// cond: z.Uses==1
  4294  	// result: (Equal (CMPW a (MULW <x.Type> x y)))
  4295  	for {
  4296  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  4297  			break
  4298  		}
  4299  		z := v_0.Args[0]
  4300  		if z.Op != OpARM64MSUBW {
  4301  			break
  4302  		}
  4303  		y := z.Args[2]
  4304  		a := z.Args[0]
  4305  		x := z.Args[1]
  4306  		if !(z.Uses == 1) {
  4307  			break
  4308  		}
  4309  		v.reset(OpARM64Equal)
  4310  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
  4311  		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
  4312  		v1.AddArg2(x, y)
  4313  		v0.AddArg2(a, v1)
  4314  		v.AddArg(v0)
  4315  		return true
  4316  	}
  4317  	// match: (Equal (FlagConstant [fc]))
  4318  	// result: (MOVDconst [b2i(fc.eq())])
  4319  	for {
  4320  		if v_0.Op != OpARM64FlagConstant {
  4321  			break
  4322  		}
  4323  		fc := auxIntToFlagConstant(v_0.AuxInt)
  4324  		v.reset(OpARM64MOVDconst)
  4325  		v.AuxInt = int64ToAuxInt(b2i(fc.eq()))
  4326  		return true
  4327  	}
  4328  	// match: (Equal (InvertFlags x))
  4329  	// result: (Equal x)
  4330  	for {
  4331  		if v_0.Op != OpARM64InvertFlags {
  4332  			break
  4333  		}
  4334  		x := v_0.Args[0]
  4335  		v.reset(OpARM64Equal)
  4336  		v.AddArg(x)
  4337  		return true
  4338  	}
  4339  	return false
  4340  }
  4341  func rewriteValueARM64_OpARM64FADDD(v *Value) bool {
  4342  	v_1 := v.Args[1]
  4343  	v_0 := v.Args[0]
  4344  	// match: (FADDD a (FMULD x y))
  4345  	// cond: a.Block.Func.useFMA(v)
  4346  	// result: (FMADDD a x y)
  4347  	for {
  4348  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4349  			a := v_0
  4350  			if v_1.Op != OpARM64FMULD {
  4351  				continue
  4352  			}
  4353  			y := v_1.Args[1]
  4354  			x := v_1.Args[0]
  4355  			if !(a.Block.Func.useFMA(v)) {
  4356  				continue
  4357  			}
  4358  			v.reset(OpARM64FMADDD)
  4359  			v.AddArg3(a, x, y)
  4360  			return true
  4361  		}
  4362  		break
  4363  	}
  4364  	// match: (FADDD a (FNMULD x y))
  4365  	// cond: a.Block.Func.useFMA(v)
  4366  	// result: (FMSUBD a x y)
  4367  	for {
  4368  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4369  			a := v_0
  4370  			if v_1.Op != OpARM64FNMULD {
  4371  				continue
  4372  			}
  4373  			y := v_1.Args[1]
  4374  			x := v_1.Args[0]
  4375  			if !(a.Block.Func.useFMA(v)) {
  4376  				continue
  4377  			}
  4378  			v.reset(OpARM64FMSUBD)
  4379  			v.AddArg3(a, x, y)
  4380  			return true
  4381  		}
  4382  		break
  4383  	}
  4384  	return false
  4385  }
  4386  func rewriteValueARM64_OpARM64FADDS(v *Value) bool {
  4387  	v_1 := v.Args[1]
  4388  	v_0 := v.Args[0]
  4389  	// match: (FADDS a (FMULS x y))
  4390  	// cond: a.Block.Func.useFMA(v)
  4391  	// result: (FMADDS a x y)
  4392  	for {
  4393  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4394  			a := v_0
  4395  			if v_1.Op != OpARM64FMULS {
  4396  				continue
  4397  			}
  4398  			y := v_1.Args[1]
  4399  			x := v_1.Args[0]
  4400  			if !(a.Block.Func.useFMA(v)) {
  4401  				continue
  4402  			}
  4403  			v.reset(OpARM64FMADDS)
  4404  			v.AddArg3(a, x, y)
  4405  			return true
  4406  		}
  4407  		break
  4408  	}
  4409  	// match: (FADDS a (FNMULS x y))
  4410  	// cond: a.Block.Func.useFMA(v)
  4411  	// result: (FMSUBS a x y)
  4412  	for {
  4413  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4414  			a := v_0
  4415  			if v_1.Op != OpARM64FNMULS {
  4416  				continue
  4417  			}
  4418  			y := v_1.Args[1]
  4419  			x := v_1.Args[0]
  4420  			if !(a.Block.Func.useFMA(v)) {
  4421  				continue
  4422  			}
  4423  			v.reset(OpARM64FMSUBS)
  4424  			v.AddArg3(a, x, y)
  4425  			return true
  4426  		}
  4427  		break
  4428  	}
  4429  	return false
  4430  }
  4431  func rewriteValueARM64_OpARM64FCMPD(v *Value) bool {
  4432  	v_1 := v.Args[1]
  4433  	v_0 := v.Args[0]
  4434  	b := v.Block
  4435  	// match: (FCMPD x (FMOVDconst [0]))
  4436  	// result: (FCMPD0 x)
  4437  	for {
  4438  		x := v_0
  4439  		if v_1.Op != OpARM64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != 0 {
  4440  			break
  4441  		}
  4442  		v.reset(OpARM64FCMPD0)
  4443  		v.AddArg(x)
  4444  		return true
  4445  	}
  4446  	// match: (FCMPD (FMOVDconst [0]) x)
  4447  	// result: (InvertFlags (FCMPD0 x))
  4448  	for {
  4449  		if v_0.Op != OpARM64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != 0 {
  4450  			break
  4451  		}
  4452  		x := v_1
  4453  		v.reset(OpARM64InvertFlags)
  4454  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags)
  4455  		v0.AddArg(x)
  4456  		v.AddArg(v0)
  4457  		return true
  4458  	}
  4459  	return false
  4460  }
  4461  func rewriteValueARM64_OpARM64FCMPS(v *Value) bool {
  4462  	v_1 := v.Args[1]
  4463  	v_0 := v.Args[0]
  4464  	b := v.Block
  4465  	// match: (FCMPS x (FMOVSconst [0]))
  4466  	// result: (FCMPS0 x)
  4467  	for {
  4468  		x := v_0
  4469  		if v_1.Op != OpARM64FMOVSconst || auxIntToFloat64(v_1.AuxInt) != 0 {
  4470  			break
  4471  		}
  4472  		v.reset(OpARM64FCMPS0)
  4473  		v.AddArg(x)
  4474  		return true
  4475  	}
  4476  	// match: (FCMPS (FMOVSconst [0]) x)
  4477  	// result: (InvertFlags (FCMPS0 x))
  4478  	for {
  4479  		if v_0.Op != OpARM64FMOVSconst || auxIntToFloat64(v_0.AuxInt) != 0 {
  4480  			break
  4481  		}
  4482  		x := v_1
  4483  		v.reset(OpARM64InvertFlags)
  4484  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags)
  4485  		v0.AddArg(x)
  4486  		v.AddArg(v0)
  4487  		return true
  4488  	}
  4489  	return false
  4490  }
  4491  func rewriteValueARM64_OpARM64FMOVDfpgp(v *Value) bool {
  4492  	v_0 := v.Args[0]
  4493  	b := v.Block
  4494  	// match: (FMOVDfpgp <t> (Arg [off] {sym}))
  4495  	// result: @b.Func.Entry (Arg <t> [off] {sym})
  4496  	for {
  4497  		t := v.Type
  4498  		if v_0.Op != OpArg {
  4499  			break
  4500  		}
  4501  		off := auxIntToInt32(v_0.AuxInt)
  4502  		sym := auxToSym(v_0.Aux)
  4503  		b = b.Func.Entry
  4504  		v0 := b.NewValue0(v.Pos, OpArg, t)
  4505  		v.copyOf(v0)
  4506  		v0.AuxInt = int32ToAuxInt(off)
  4507  		v0.Aux = symToAux(sym)
  4508  		return true
  4509  	}
  4510  	return false
  4511  }
  4512  func rewriteValueARM64_OpARM64FMOVDgpfp(v *Value) bool {
  4513  	v_0 := v.Args[0]
  4514  	b := v.Block
  4515  	// match: (FMOVDgpfp <t> (Arg [off] {sym}))
  4516  	// result: @b.Func.Entry (Arg <t> [off] {sym})
  4517  	for {
  4518  		t := v.Type
  4519  		if v_0.Op != OpArg {
  4520  			break
  4521  		}
  4522  		off := auxIntToInt32(v_0.AuxInt)
  4523  		sym := auxToSym(v_0.Aux)
  4524  		b = b.Func.Entry
  4525  		v0 := b.NewValue0(v.Pos, OpArg, t)
  4526  		v.copyOf(v0)
  4527  		v0.AuxInt = int32ToAuxInt(off)
  4528  		v0.Aux = symToAux(sym)
  4529  		return true
  4530  	}
  4531  	return false
  4532  }
  4533  func rewriteValueARM64_OpARM64FMOVDload(v *Value) bool {
  4534  	v_1 := v.Args[1]
  4535  	v_0 := v.Args[0]
  4536  	b := v.Block
  4537  	config := b.Func.Config
  4538  	// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4539  	// result: (FMOVDgpfp val)
  4540  	for {
  4541  		off := auxIntToInt32(v.AuxInt)
  4542  		sym := auxToSym(v.Aux)
  4543  		ptr := v_0
  4544  		if v_1.Op != OpARM64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4545  			break
  4546  		}
  4547  		val := v_1.Args[1]
  4548  		if ptr != v_1.Args[0] {
  4549  			break
  4550  		}
  4551  		v.reset(OpARM64FMOVDgpfp)
  4552  		v.AddArg(val)
  4553  		return true
  4554  	}
  4555  	// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  4556  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4557  	// result: (FMOVDload [off1+int32(off2)] {sym} ptr mem)
  4558  	for {
  4559  		off1 := auxIntToInt32(v.AuxInt)
  4560  		sym := auxToSym(v.Aux)
  4561  		if v_0.Op != OpARM64ADDconst {
  4562  			break
  4563  		}
  4564  		off2 := auxIntToInt64(v_0.AuxInt)
  4565  		ptr := v_0.Args[0]
  4566  		mem := v_1
  4567  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4568  			break
  4569  		}
  4570  		v.reset(OpARM64FMOVDload)
  4571  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4572  		v.Aux = symToAux(sym)
  4573  		v.AddArg2(ptr, mem)
  4574  		return true
  4575  	}
  4576  	// match: (FMOVDload [off] {sym} (ADD ptr idx) mem)
  4577  	// cond: off == 0 && sym == nil
  4578  	// result: (FMOVDloadidx ptr idx mem)
  4579  	for {
  4580  		off := auxIntToInt32(v.AuxInt)
  4581  		sym := auxToSym(v.Aux)
  4582  		if v_0.Op != OpARM64ADD {
  4583  			break
  4584  		}
  4585  		idx := v_0.Args[1]
  4586  		ptr := v_0.Args[0]
  4587  		mem := v_1
  4588  		if !(off == 0 && sym == nil) {
  4589  			break
  4590  		}
  4591  		v.reset(OpARM64FMOVDloadidx)
  4592  		v.AddArg3(ptr, idx, mem)
  4593  		return true
  4594  	}
  4595  	// match: (FMOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem)
  4596  	// cond: off == 0 && sym == nil
  4597  	// result: (FMOVDloadidx8 ptr idx mem)
  4598  	for {
  4599  		off := auxIntToInt32(v.AuxInt)
  4600  		sym := auxToSym(v.Aux)
  4601  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
  4602  			break
  4603  		}
  4604  		idx := v_0.Args[1]
  4605  		ptr := v_0.Args[0]
  4606  		mem := v_1
  4607  		if !(off == 0 && sym == nil) {
  4608  			break
  4609  		}
  4610  		v.reset(OpARM64FMOVDloadidx8)
  4611  		v.AddArg3(ptr, idx, mem)
  4612  		return true
  4613  	}
  4614  	// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  4615  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4616  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4617  	for {
  4618  		off1 := auxIntToInt32(v.AuxInt)
  4619  		sym1 := auxToSym(v.Aux)
  4620  		if v_0.Op != OpARM64MOVDaddr {
  4621  			break
  4622  		}
  4623  		off2 := auxIntToInt32(v_0.AuxInt)
  4624  		sym2 := auxToSym(v_0.Aux)
  4625  		ptr := v_0.Args[0]
  4626  		mem := v_1
  4627  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4628  			break
  4629  		}
  4630  		v.reset(OpARM64FMOVDload)
  4631  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4632  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4633  		v.AddArg2(ptr, mem)
  4634  		return true
  4635  	}
  4636  	return false
  4637  }
  4638  func rewriteValueARM64_OpARM64FMOVDloadidx(v *Value) bool {
  4639  	v_2 := v.Args[2]
  4640  	v_1 := v.Args[1]
  4641  	v_0 := v.Args[0]
  4642  	// match: (FMOVDloadidx ptr (MOVDconst [c]) mem)
  4643  	// cond: is32Bit(c)
  4644  	// result: (FMOVDload [int32(c)] ptr mem)
  4645  	for {
  4646  		ptr := v_0
  4647  		if v_1.Op != OpARM64MOVDconst {
  4648  			break
  4649  		}
  4650  		c := auxIntToInt64(v_1.AuxInt)
  4651  		mem := v_2
  4652  		if !(is32Bit(c)) {
  4653  			break
  4654  		}
  4655  		v.reset(OpARM64FMOVDload)
  4656  		v.AuxInt = int32ToAuxInt(int32(c))
  4657  		v.AddArg2(ptr, mem)
  4658  		return true
  4659  	}
  4660  	// match: (FMOVDloadidx (MOVDconst [c]) ptr mem)
  4661  	// cond: is32Bit(c)
  4662  	// result: (FMOVDload [int32(c)] ptr mem)
  4663  	for {
  4664  		if v_0.Op != OpARM64MOVDconst {
  4665  			break
  4666  		}
  4667  		c := auxIntToInt64(v_0.AuxInt)
  4668  		ptr := v_1
  4669  		mem := v_2
  4670  		if !(is32Bit(c)) {
  4671  			break
  4672  		}
  4673  		v.reset(OpARM64FMOVDload)
  4674  		v.AuxInt = int32ToAuxInt(int32(c))
  4675  		v.AddArg2(ptr, mem)
  4676  		return true
  4677  	}
  4678  	// match: (FMOVDloadidx ptr (SLLconst [3] idx) mem)
  4679  	// result: (FMOVDloadidx8 ptr idx mem)
  4680  	for {
  4681  		ptr := v_0
  4682  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
  4683  			break
  4684  		}
  4685  		idx := v_1.Args[0]
  4686  		mem := v_2
  4687  		v.reset(OpARM64FMOVDloadidx8)
  4688  		v.AddArg3(ptr, idx, mem)
  4689  		return true
  4690  	}
  4691  	// match: (FMOVDloadidx (SLLconst [3] idx) ptr mem)
  4692  	// result: (FMOVDloadidx8 ptr idx mem)
  4693  	for {
  4694  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
  4695  			break
  4696  		}
  4697  		idx := v_0.Args[0]
  4698  		ptr := v_1
  4699  		mem := v_2
  4700  		v.reset(OpARM64FMOVDloadidx8)
  4701  		v.AddArg3(ptr, idx, mem)
  4702  		return true
  4703  	}
  4704  	return false
  4705  }
  4706  func rewriteValueARM64_OpARM64FMOVDloadidx8(v *Value) bool {
  4707  	v_2 := v.Args[2]
  4708  	v_1 := v.Args[1]
  4709  	v_0 := v.Args[0]
  4710  	// match: (FMOVDloadidx8 ptr (MOVDconst [c]) mem)
  4711  	// cond: is32Bit(c<<3)
  4712  	// result: (FMOVDload ptr [int32(c)<<3] mem)
  4713  	for {
  4714  		ptr := v_0
  4715  		if v_1.Op != OpARM64MOVDconst {
  4716  			break
  4717  		}
  4718  		c := auxIntToInt64(v_1.AuxInt)
  4719  		mem := v_2
  4720  		if !(is32Bit(c << 3)) {
  4721  			break
  4722  		}
  4723  		v.reset(OpARM64FMOVDload)
  4724  		v.AuxInt = int32ToAuxInt(int32(c) << 3)
  4725  		v.AddArg2(ptr, mem)
  4726  		return true
  4727  	}
  4728  	return false
  4729  }
  4730  func rewriteValueARM64_OpARM64FMOVDstore(v *Value) bool {
  4731  	v_2 := v.Args[2]
  4732  	v_1 := v.Args[1]
  4733  	v_0 := v.Args[0]
  4734  	b := v.Block
  4735  	config := b.Func.Config
  4736  	// match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem)
  4737  	// result: (MOVDstore [off] {sym} ptr val mem)
  4738  	for {
  4739  		off := auxIntToInt32(v.AuxInt)
  4740  		sym := auxToSym(v.Aux)
  4741  		ptr := v_0
  4742  		if v_1.Op != OpARM64FMOVDgpfp {
  4743  			break
  4744  		}
  4745  		val := v_1.Args[0]
  4746  		mem := v_2
  4747  		v.reset(OpARM64MOVDstore)
  4748  		v.AuxInt = int32ToAuxInt(off)
  4749  		v.Aux = symToAux(sym)
  4750  		v.AddArg3(ptr, val, mem)
  4751  		return true
  4752  	}
  4753  	// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  4754  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4755  	// result: (FMOVDstore [off1+int32(off2)] {sym} ptr val mem)
  4756  	for {
  4757  		off1 := auxIntToInt32(v.AuxInt)
  4758  		sym := auxToSym(v.Aux)
  4759  		if v_0.Op != OpARM64ADDconst {
  4760  			break
  4761  		}
  4762  		off2 := auxIntToInt64(v_0.AuxInt)
  4763  		ptr := v_0.Args[0]
  4764  		val := v_1
  4765  		mem := v_2
  4766  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4767  			break
  4768  		}
  4769  		v.reset(OpARM64FMOVDstore)
  4770  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4771  		v.Aux = symToAux(sym)
  4772  		v.AddArg3(ptr, val, mem)
  4773  		return true
  4774  	}
  4775  	// match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem)
  4776  	// cond: off == 0 && sym == nil
  4777  	// result: (FMOVDstoreidx ptr idx val mem)
  4778  	for {
  4779  		off := auxIntToInt32(v.AuxInt)
  4780  		sym := auxToSym(v.Aux)
  4781  		if v_0.Op != OpARM64ADD {
  4782  			break
  4783  		}
  4784  		idx := v_0.Args[1]
  4785  		ptr := v_0.Args[0]
  4786  		val := v_1
  4787  		mem := v_2
  4788  		if !(off == 0 && sym == nil) {
  4789  			break
  4790  		}
  4791  		v.reset(OpARM64FMOVDstoreidx)
  4792  		v.AddArg4(ptr, idx, val, mem)
  4793  		return true
  4794  	}
  4795  	// match: (FMOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem)
  4796  	// cond: off == 0 && sym == nil
  4797  	// result: (FMOVDstoreidx8 ptr idx val mem)
  4798  	for {
  4799  		off := auxIntToInt32(v.AuxInt)
  4800  		sym := auxToSym(v.Aux)
  4801  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
  4802  			break
  4803  		}
  4804  		idx := v_0.Args[1]
  4805  		ptr := v_0.Args[0]
  4806  		val := v_1
  4807  		mem := v_2
  4808  		if !(off == 0 && sym == nil) {
  4809  			break
  4810  		}
  4811  		v.reset(OpARM64FMOVDstoreidx8)
  4812  		v.AddArg4(ptr, idx, val, mem)
  4813  		return true
  4814  	}
  4815  	// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  4816  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4817  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4818  	for {
  4819  		off1 := auxIntToInt32(v.AuxInt)
  4820  		sym1 := auxToSym(v.Aux)
  4821  		if v_0.Op != OpARM64MOVDaddr {
  4822  			break
  4823  		}
  4824  		off2 := auxIntToInt32(v_0.AuxInt)
  4825  		sym2 := auxToSym(v_0.Aux)
  4826  		ptr := v_0.Args[0]
  4827  		val := v_1
  4828  		mem := v_2
  4829  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4830  			break
  4831  		}
  4832  		v.reset(OpARM64FMOVDstore)
  4833  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4834  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4835  		v.AddArg3(ptr, val, mem)
  4836  		return true
  4837  	}
  4838  	return false
  4839  }
  4840  func rewriteValueARM64_OpARM64FMOVDstoreidx(v *Value) bool {
  4841  	v_3 := v.Args[3]
  4842  	v_2 := v.Args[2]
  4843  	v_1 := v.Args[1]
  4844  	v_0 := v.Args[0]
  4845  	// match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem)
  4846  	// cond: is32Bit(c)
  4847  	// result: (FMOVDstore [int32(c)] ptr val mem)
  4848  	for {
  4849  		ptr := v_0
  4850  		if v_1.Op != OpARM64MOVDconst {
  4851  			break
  4852  		}
  4853  		c := auxIntToInt64(v_1.AuxInt)
  4854  		val := v_2
  4855  		mem := v_3
  4856  		if !(is32Bit(c)) {
  4857  			break
  4858  		}
  4859  		v.reset(OpARM64FMOVDstore)
  4860  		v.AuxInt = int32ToAuxInt(int32(c))
  4861  		v.AddArg3(ptr, val, mem)
  4862  		return true
  4863  	}
  4864  	// match: (FMOVDstoreidx (MOVDconst [c]) idx val mem)
  4865  	// cond: is32Bit(c)
  4866  	// result: (FMOVDstore [int32(c)] idx val mem)
  4867  	for {
  4868  		if v_0.Op != OpARM64MOVDconst {
  4869  			break
  4870  		}
  4871  		c := auxIntToInt64(v_0.AuxInt)
  4872  		idx := v_1
  4873  		val := v_2
  4874  		mem := v_3
  4875  		if !(is32Bit(c)) {
  4876  			break
  4877  		}
  4878  		v.reset(OpARM64FMOVDstore)
  4879  		v.AuxInt = int32ToAuxInt(int32(c))
  4880  		v.AddArg3(idx, val, mem)
  4881  		return true
  4882  	}
  4883  	// match: (FMOVDstoreidx ptr (SLLconst [3] idx) val mem)
  4884  	// result: (FMOVDstoreidx8 ptr idx val mem)
  4885  	for {
  4886  		ptr := v_0
  4887  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
  4888  			break
  4889  		}
  4890  		idx := v_1.Args[0]
  4891  		val := v_2
  4892  		mem := v_3
  4893  		v.reset(OpARM64FMOVDstoreidx8)
  4894  		v.AddArg4(ptr, idx, val, mem)
  4895  		return true
  4896  	}
  4897  	// match: (FMOVDstoreidx (SLLconst [3] idx) ptr val mem)
  4898  	// result: (FMOVDstoreidx8 ptr idx val mem)
  4899  	for {
  4900  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
  4901  			break
  4902  		}
  4903  		idx := v_0.Args[0]
  4904  		ptr := v_1
  4905  		val := v_2
  4906  		mem := v_3
  4907  		v.reset(OpARM64FMOVDstoreidx8)
  4908  		v.AddArg4(ptr, idx, val, mem)
  4909  		return true
  4910  	}
  4911  	return false
  4912  }
  4913  func rewriteValueARM64_OpARM64FMOVDstoreidx8(v *Value) bool {
  4914  	v_3 := v.Args[3]
  4915  	v_2 := v.Args[2]
  4916  	v_1 := v.Args[1]
  4917  	v_0 := v.Args[0]
  4918  	// match: (FMOVDstoreidx8 ptr (MOVDconst [c]) val mem)
  4919  	// cond: is32Bit(c<<3)
  4920  	// result: (FMOVDstore [int32(c)<<3] ptr val mem)
  4921  	for {
  4922  		ptr := v_0
  4923  		if v_1.Op != OpARM64MOVDconst {
  4924  			break
  4925  		}
  4926  		c := auxIntToInt64(v_1.AuxInt)
  4927  		val := v_2
  4928  		mem := v_3
  4929  		if !(is32Bit(c << 3)) {
  4930  			break
  4931  		}
  4932  		v.reset(OpARM64FMOVDstore)
  4933  		v.AuxInt = int32ToAuxInt(int32(c) << 3)
  4934  		v.AddArg3(ptr, val, mem)
  4935  		return true
  4936  	}
  4937  	return false
  4938  }
  4939  func rewriteValueARM64_OpARM64FMOVSload(v *Value) bool {
  4940  	v_1 := v.Args[1]
  4941  	v_0 := v.Args[0]
  4942  	b := v.Block
  4943  	config := b.Func.Config
  4944  	// match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  4945  	// result: (FMOVSgpfp val)
  4946  	for {
  4947  		off := auxIntToInt32(v.AuxInt)
  4948  		sym := auxToSym(v.Aux)
  4949  		ptr := v_0
  4950  		if v_1.Op != OpARM64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4951  			break
  4952  		}
  4953  		val := v_1.Args[1]
  4954  		if ptr != v_1.Args[0] {
  4955  			break
  4956  		}
  4957  		v.reset(OpARM64FMOVSgpfp)
  4958  		v.AddArg(val)
  4959  		return true
  4960  	}
  4961  	// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
  4962  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4963  	// result: (FMOVSload [off1+int32(off2)] {sym} ptr mem)
  4964  	for {
  4965  		off1 := auxIntToInt32(v.AuxInt)
  4966  		sym := auxToSym(v.Aux)
  4967  		if v_0.Op != OpARM64ADDconst {
  4968  			break
  4969  		}
  4970  		off2 := auxIntToInt64(v_0.AuxInt)
  4971  		ptr := v_0.Args[0]
  4972  		mem := v_1
  4973  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4974  			break
  4975  		}
  4976  		v.reset(OpARM64FMOVSload)
  4977  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4978  		v.Aux = symToAux(sym)
  4979  		v.AddArg2(ptr, mem)
  4980  		return true
  4981  	}
  4982  	// match: (FMOVSload [off] {sym} (ADD ptr idx) mem)
  4983  	// cond: off == 0 && sym == nil
  4984  	// result: (FMOVSloadidx ptr idx mem)
  4985  	for {
  4986  		off := auxIntToInt32(v.AuxInt)
  4987  		sym := auxToSym(v.Aux)
  4988  		if v_0.Op != OpARM64ADD {
  4989  			break
  4990  		}
  4991  		idx := v_0.Args[1]
  4992  		ptr := v_0.Args[0]
  4993  		mem := v_1
  4994  		if !(off == 0 && sym == nil) {
  4995  			break
  4996  		}
  4997  		v.reset(OpARM64FMOVSloadidx)
  4998  		v.AddArg3(ptr, idx, mem)
  4999  		return true
  5000  	}
  5001  	// match: (FMOVSload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
  5002  	// cond: off == 0 && sym == nil
  5003  	// result: (FMOVSloadidx4 ptr idx mem)
  5004  	for {
  5005  		off := auxIntToInt32(v.AuxInt)
  5006  		sym := auxToSym(v.Aux)
  5007  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
  5008  			break
  5009  		}
  5010  		idx := v_0.Args[1]
  5011  		ptr := v_0.Args[0]
  5012  		mem := v_1
  5013  		if !(off == 0 && sym == nil) {
  5014  			break
  5015  		}
  5016  		v.reset(OpARM64FMOVSloadidx4)
  5017  		v.AddArg3(ptr, idx, mem)
  5018  		return true
  5019  	}
  5020  	// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5021  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5022  	// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5023  	for {
  5024  		off1 := auxIntToInt32(v.AuxInt)
  5025  		sym1 := auxToSym(v.Aux)
  5026  		if v_0.Op != OpARM64MOVDaddr {
  5027  			break
  5028  		}
  5029  		off2 := auxIntToInt32(v_0.AuxInt)
  5030  		sym2 := auxToSym(v_0.Aux)
  5031  		ptr := v_0.Args[0]
  5032  		mem := v_1
  5033  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5034  			break
  5035  		}
  5036  		v.reset(OpARM64FMOVSload)
  5037  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5038  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5039  		v.AddArg2(ptr, mem)
  5040  		return true
  5041  	}
  5042  	return false
  5043  }
  5044  func rewriteValueARM64_OpARM64FMOVSloadidx(v *Value) bool {
  5045  	v_2 := v.Args[2]
  5046  	v_1 := v.Args[1]
  5047  	v_0 := v.Args[0]
  5048  	// match: (FMOVSloadidx ptr (MOVDconst [c]) mem)
  5049  	// cond: is32Bit(c)
  5050  	// result: (FMOVSload [int32(c)] ptr mem)
  5051  	for {
  5052  		ptr := v_0
  5053  		if v_1.Op != OpARM64MOVDconst {
  5054  			break
  5055  		}
  5056  		c := auxIntToInt64(v_1.AuxInt)
  5057  		mem := v_2
  5058  		if !(is32Bit(c)) {
  5059  			break
  5060  		}
  5061  		v.reset(OpARM64FMOVSload)
  5062  		v.AuxInt = int32ToAuxInt(int32(c))
  5063  		v.AddArg2(ptr, mem)
  5064  		return true
  5065  	}
  5066  	// match: (FMOVSloadidx (MOVDconst [c]) ptr mem)
  5067  	// cond: is32Bit(c)
  5068  	// result: (FMOVSload [int32(c)] ptr mem)
  5069  	for {
  5070  		if v_0.Op != OpARM64MOVDconst {
  5071  			break
  5072  		}
  5073  		c := auxIntToInt64(v_0.AuxInt)
  5074  		ptr := v_1
  5075  		mem := v_2
  5076  		if !(is32Bit(c)) {
  5077  			break
  5078  		}
  5079  		v.reset(OpARM64FMOVSload)
  5080  		v.AuxInt = int32ToAuxInt(int32(c))
  5081  		v.AddArg2(ptr, mem)
  5082  		return true
  5083  	}
  5084  	// match: (FMOVSloadidx ptr (SLLconst [2] idx) mem)
  5085  	// result: (FMOVSloadidx4 ptr idx mem)
  5086  	for {
  5087  		ptr := v_0
  5088  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
  5089  			break
  5090  		}
  5091  		idx := v_1.Args[0]
  5092  		mem := v_2
  5093  		v.reset(OpARM64FMOVSloadidx4)
  5094  		v.AddArg3(ptr, idx, mem)
  5095  		return true
  5096  	}
  5097  	// match: (FMOVSloadidx (SLLconst [2] idx) ptr mem)
  5098  	// result: (FMOVSloadidx4 ptr idx mem)
  5099  	for {
  5100  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
  5101  			break
  5102  		}
  5103  		idx := v_0.Args[0]
  5104  		ptr := v_1
  5105  		mem := v_2
  5106  		v.reset(OpARM64FMOVSloadidx4)
  5107  		v.AddArg3(ptr, idx, mem)
  5108  		return true
  5109  	}
  5110  	return false
  5111  }
  5112  func rewriteValueARM64_OpARM64FMOVSloadidx4(v *Value) bool {
  5113  	v_2 := v.Args[2]
  5114  	v_1 := v.Args[1]
  5115  	v_0 := v.Args[0]
  5116  	// match: (FMOVSloadidx4 ptr (MOVDconst [c]) mem)
  5117  	// cond: is32Bit(c<<2)
  5118  	// result: (FMOVSload ptr [int32(c)<<2] mem)
  5119  	for {
  5120  		ptr := v_0
  5121  		if v_1.Op != OpARM64MOVDconst {
  5122  			break
  5123  		}
  5124  		c := auxIntToInt64(v_1.AuxInt)
  5125  		mem := v_2
  5126  		if !(is32Bit(c << 2)) {
  5127  			break
  5128  		}
  5129  		v.reset(OpARM64FMOVSload)
  5130  		v.AuxInt = int32ToAuxInt(int32(c) << 2)
  5131  		v.AddArg2(ptr, mem)
  5132  		return true
  5133  	}
  5134  	return false
  5135  }
  5136  func rewriteValueARM64_OpARM64FMOVSstore(v *Value) bool {
  5137  	v_2 := v.Args[2]
  5138  	v_1 := v.Args[1]
  5139  	v_0 := v.Args[0]
  5140  	b := v.Block
  5141  	config := b.Func.Config
  5142  	// match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem)
  5143  	// result: (MOVWstore [off] {sym} ptr val mem)
  5144  	for {
  5145  		off := auxIntToInt32(v.AuxInt)
  5146  		sym := auxToSym(v.Aux)
  5147  		ptr := v_0
  5148  		if v_1.Op != OpARM64FMOVSgpfp {
  5149  			break
  5150  		}
  5151  		val := v_1.Args[0]
  5152  		mem := v_2
  5153  		v.reset(OpARM64MOVWstore)
  5154  		v.AuxInt = int32ToAuxInt(off)
  5155  		v.Aux = symToAux(sym)
  5156  		v.AddArg3(ptr, val, mem)
  5157  		return true
  5158  	}
  5159  	// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  5160  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5161  	// result: (FMOVSstore [off1+int32(off2)] {sym} ptr val mem)
  5162  	for {
  5163  		off1 := auxIntToInt32(v.AuxInt)
  5164  		sym := auxToSym(v.Aux)
  5165  		if v_0.Op != OpARM64ADDconst {
  5166  			break
  5167  		}
  5168  		off2 := auxIntToInt64(v_0.AuxInt)
  5169  		ptr := v_0.Args[0]
  5170  		val := v_1
  5171  		mem := v_2
  5172  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5173  			break
  5174  		}
  5175  		v.reset(OpARM64FMOVSstore)
  5176  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5177  		v.Aux = symToAux(sym)
  5178  		v.AddArg3(ptr, val, mem)
  5179  		return true
  5180  	}
  5181  	// match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem)
  5182  	// cond: off == 0 && sym == nil
  5183  	// result: (FMOVSstoreidx ptr idx val mem)
  5184  	for {
  5185  		off := auxIntToInt32(v.AuxInt)
  5186  		sym := auxToSym(v.Aux)
  5187  		if v_0.Op != OpARM64ADD {
  5188  			break
  5189  		}
  5190  		idx := v_0.Args[1]
  5191  		ptr := v_0.Args[0]
  5192  		val := v_1
  5193  		mem := v_2
  5194  		if !(off == 0 && sym == nil) {
  5195  			break
  5196  		}
  5197  		v.reset(OpARM64FMOVSstoreidx)
  5198  		v.AddArg4(ptr, idx, val, mem)
  5199  		return true
  5200  	}
  5201  	// match: (FMOVSstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem)
  5202  	// cond: off == 0 && sym == nil
  5203  	// result: (FMOVSstoreidx4 ptr idx val mem)
  5204  	for {
  5205  		off := auxIntToInt32(v.AuxInt)
  5206  		sym := auxToSym(v.Aux)
  5207  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
  5208  			break
  5209  		}
  5210  		idx := v_0.Args[1]
  5211  		ptr := v_0.Args[0]
  5212  		val := v_1
  5213  		mem := v_2
  5214  		if !(off == 0 && sym == nil) {
  5215  			break
  5216  		}
  5217  		v.reset(OpARM64FMOVSstoreidx4)
  5218  		v.AddArg4(ptr, idx, val, mem)
  5219  		return true
  5220  	}
  5221  	// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  5222  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5223  	// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5224  	for {
  5225  		off1 := auxIntToInt32(v.AuxInt)
  5226  		sym1 := auxToSym(v.Aux)
  5227  		if v_0.Op != OpARM64MOVDaddr {
  5228  			break
  5229  		}
  5230  		off2 := auxIntToInt32(v_0.AuxInt)
  5231  		sym2 := auxToSym(v_0.Aux)
  5232  		ptr := v_0.Args[0]
  5233  		val := v_1
  5234  		mem := v_2
  5235  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5236  			break
  5237  		}
  5238  		v.reset(OpARM64FMOVSstore)
  5239  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5240  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5241  		v.AddArg3(ptr, val, mem)
  5242  		return true
  5243  	}
  5244  	return false
  5245  }
  5246  func rewriteValueARM64_OpARM64FMOVSstoreidx(v *Value) bool {
  5247  	v_3 := v.Args[3]
  5248  	v_2 := v.Args[2]
  5249  	v_1 := v.Args[1]
  5250  	v_0 := v.Args[0]
  5251  	// match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem)
  5252  	// cond: is32Bit(c)
  5253  	// result: (FMOVSstore [int32(c)] ptr val mem)
  5254  	for {
  5255  		ptr := v_0
  5256  		if v_1.Op != OpARM64MOVDconst {
  5257  			break
  5258  		}
  5259  		c := auxIntToInt64(v_1.AuxInt)
  5260  		val := v_2
  5261  		mem := v_3
  5262  		if !(is32Bit(c)) {
  5263  			break
  5264  		}
  5265  		v.reset(OpARM64FMOVSstore)
  5266  		v.AuxInt = int32ToAuxInt(int32(c))
  5267  		v.AddArg3(ptr, val, mem)
  5268  		return true
  5269  	}
  5270  	// match: (FMOVSstoreidx (MOVDconst [c]) idx val mem)
  5271  	// cond: is32Bit(c)
  5272  	// result: (FMOVSstore [int32(c)] idx val mem)
  5273  	for {
  5274  		if v_0.Op != OpARM64MOVDconst {
  5275  			break
  5276  		}
  5277  		c := auxIntToInt64(v_0.AuxInt)
  5278  		idx := v_1
  5279  		val := v_2
  5280  		mem := v_3
  5281  		if !(is32Bit(c)) {
  5282  			break
  5283  		}
  5284  		v.reset(OpARM64FMOVSstore)
  5285  		v.AuxInt = int32ToAuxInt(int32(c))
  5286  		v.AddArg3(idx, val, mem)
  5287  		return true
  5288  	}
  5289  	// match: (FMOVSstoreidx ptr (SLLconst [2] idx) val mem)
  5290  	// result: (FMOVSstoreidx4 ptr idx val mem)
  5291  	for {
  5292  		ptr := v_0
  5293  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
  5294  			break
  5295  		}
  5296  		idx := v_1.Args[0]
  5297  		val := v_2
  5298  		mem := v_3
  5299  		v.reset(OpARM64FMOVSstoreidx4)
  5300  		v.AddArg4(ptr, idx, val, mem)
  5301  		return true
  5302  	}
  5303  	// match: (FMOVSstoreidx (SLLconst [2] idx) ptr val mem)
  5304  	// result: (FMOVSstoreidx4 ptr idx val mem)
  5305  	for {
  5306  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
  5307  			break
  5308  		}
  5309  		idx := v_0.Args[0]
  5310  		ptr := v_1
  5311  		val := v_2
  5312  		mem := v_3
  5313  		v.reset(OpARM64FMOVSstoreidx4)
  5314  		v.AddArg4(ptr, idx, val, mem)
  5315  		return true
  5316  	}
  5317  	return false
  5318  }
  5319  func rewriteValueARM64_OpARM64FMOVSstoreidx4(v *Value) bool {
  5320  	v_3 := v.Args[3]
  5321  	v_2 := v.Args[2]
  5322  	v_1 := v.Args[1]
  5323  	v_0 := v.Args[0]
  5324  	// match: (FMOVSstoreidx4 ptr (MOVDconst [c]) val mem)
  5325  	// cond: is32Bit(c<<2)
  5326  	// result: (FMOVSstore [int32(c)<<2] ptr val mem)
  5327  	for {
  5328  		ptr := v_0
  5329  		if v_1.Op != OpARM64MOVDconst {
  5330  			break
  5331  		}
  5332  		c := auxIntToInt64(v_1.AuxInt)
  5333  		val := v_2
  5334  		mem := v_3
  5335  		if !(is32Bit(c << 2)) {
  5336  			break
  5337  		}
  5338  		v.reset(OpARM64FMOVSstore)
  5339  		v.AuxInt = int32ToAuxInt(int32(c) << 2)
  5340  		v.AddArg3(ptr, val, mem)
  5341  		return true
  5342  	}
  5343  	return false
  5344  }
  5345  func rewriteValueARM64_OpARM64FMULD(v *Value) bool {
  5346  	v_1 := v.Args[1]
  5347  	v_0 := v.Args[0]
  5348  	// match: (FMULD (FNEGD x) y)
  5349  	// result: (FNMULD x y)
  5350  	for {
  5351  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5352  			if v_0.Op != OpARM64FNEGD {
  5353  				continue
  5354  			}
  5355  			x := v_0.Args[0]
  5356  			y := v_1
  5357  			v.reset(OpARM64FNMULD)
  5358  			v.AddArg2(x, y)
  5359  			return true
  5360  		}
  5361  		break
  5362  	}
  5363  	return false
  5364  }
  5365  func rewriteValueARM64_OpARM64FMULS(v *Value) bool {
  5366  	v_1 := v.Args[1]
  5367  	v_0 := v.Args[0]
  5368  	// match: (FMULS (FNEGS x) y)
  5369  	// result: (FNMULS x y)
  5370  	for {
  5371  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5372  			if v_0.Op != OpARM64FNEGS {
  5373  				continue
  5374  			}
  5375  			x := v_0.Args[0]
  5376  			y := v_1
  5377  			v.reset(OpARM64FNMULS)
  5378  			v.AddArg2(x, y)
  5379  			return true
  5380  		}
  5381  		break
  5382  	}
  5383  	return false
  5384  }
  5385  func rewriteValueARM64_OpARM64FNEGD(v *Value) bool {
  5386  	v_0 := v.Args[0]
  5387  	// match: (FNEGD (FMULD x y))
  5388  	// result: (FNMULD x y)
  5389  	for {
  5390  		if v_0.Op != OpARM64FMULD {
  5391  			break
  5392  		}
  5393  		y := v_0.Args[1]
  5394  		x := v_0.Args[0]
  5395  		v.reset(OpARM64FNMULD)
  5396  		v.AddArg2(x, y)
  5397  		return true
  5398  	}
  5399  	// match: (FNEGD (FNMULD x y))
  5400  	// result: (FMULD x y)
  5401  	for {
  5402  		if v_0.Op != OpARM64FNMULD {
  5403  			break
  5404  		}
  5405  		y := v_0.Args[1]
  5406  		x := v_0.Args[0]
  5407  		v.reset(OpARM64FMULD)
  5408  		v.AddArg2(x, y)
  5409  		return true
  5410  	}
  5411  	return false
  5412  }
  5413  func rewriteValueARM64_OpARM64FNEGS(v *Value) bool {
  5414  	v_0 := v.Args[0]
  5415  	// match: (FNEGS (FMULS x y))
  5416  	// result: (FNMULS x y)
  5417  	for {
  5418  		if v_0.Op != OpARM64FMULS {
  5419  			break
  5420  		}
  5421  		y := v_0.Args[1]
  5422  		x := v_0.Args[0]
  5423  		v.reset(OpARM64FNMULS)
  5424  		v.AddArg2(x, y)
  5425  		return true
  5426  	}
  5427  	// match: (FNEGS (FNMULS x y))
  5428  	// result: (FMULS x y)
  5429  	for {
  5430  		if v_0.Op != OpARM64FNMULS {
  5431  			break
  5432  		}
  5433  		y := v_0.Args[1]
  5434  		x := v_0.Args[0]
  5435  		v.reset(OpARM64FMULS)
  5436  		v.AddArg2(x, y)
  5437  		return true
  5438  	}
  5439  	return false
  5440  }
  5441  func rewriteValueARM64_OpARM64FNMULD(v *Value) bool {
  5442  	v_1 := v.Args[1]
  5443  	v_0 := v.Args[0]
  5444  	// match: (FNMULD (FNEGD x) y)
  5445  	// result: (FMULD x y)
  5446  	for {
  5447  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5448  			if v_0.Op != OpARM64FNEGD {
  5449  				continue
  5450  			}
  5451  			x := v_0.Args[0]
  5452  			y := v_1
  5453  			v.reset(OpARM64FMULD)
  5454  			v.AddArg2(x, y)
  5455  			return true
  5456  		}
  5457  		break
  5458  	}
  5459  	return false
  5460  }
  5461  func rewriteValueARM64_OpARM64FNMULS(v *Value) bool {
  5462  	v_1 := v.Args[1]
  5463  	v_0 := v.Args[0]
  5464  	// match: (FNMULS (FNEGS x) y)
  5465  	// result: (FMULS x y)
  5466  	for {
  5467  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5468  			if v_0.Op != OpARM64FNEGS {
  5469  				continue
  5470  			}
  5471  			x := v_0.Args[0]
  5472  			y := v_1
  5473  			v.reset(OpARM64FMULS)
  5474  			v.AddArg2(x, y)
  5475  			return true
  5476  		}
  5477  		break
  5478  	}
  5479  	return false
  5480  }
  5481  func rewriteValueARM64_OpARM64FSUBD(v *Value) bool {
  5482  	v_1 := v.Args[1]
  5483  	v_0 := v.Args[0]
  5484  	// match: (FSUBD a (FMULD x y))
  5485  	// cond: a.Block.Func.useFMA(v)
  5486  	// result: (FMSUBD a x y)
  5487  	for {
  5488  		a := v_0
  5489  		if v_1.Op != OpARM64FMULD {
  5490  			break
  5491  		}
  5492  		y := v_1.Args[1]
  5493  		x := v_1.Args[0]
  5494  		if !(a.Block.Func.useFMA(v)) {
  5495  			break
  5496  		}
  5497  		v.reset(OpARM64FMSUBD)
  5498  		v.AddArg3(a, x, y)
  5499  		return true
  5500  	}
  5501  	// match: (FSUBD (FMULD x y) a)
  5502  	// cond: a.Block.Func.useFMA(v)
  5503  	// result: (FNMSUBD a x y)
  5504  	for {
  5505  		if v_0.Op != OpARM64FMULD {
  5506  			break
  5507  		}
  5508  		y := v_0.Args[1]
  5509  		x := v_0.Args[0]
  5510  		a := v_1
  5511  		if !(a.Block.Func.useFMA(v)) {
  5512  			break
  5513  		}
  5514  		v.reset(OpARM64FNMSUBD)
  5515  		v.AddArg3(a, x, y)
  5516  		return true
  5517  	}
  5518  	// match: (FSUBD a (FNMULD x y))
  5519  	// cond: a.Block.Func.useFMA(v)
  5520  	// result: (FMADDD a x y)
  5521  	for {
  5522  		a := v_0
  5523  		if v_1.Op != OpARM64FNMULD {
  5524  			break
  5525  		}
  5526  		y := v_1.Args[1]
  5527  		x := v_1.Args[0]
  5528  		if !(a.Block.Func.useFMA(v)) {
  5529  			break
  5530  		}
  5531  		v.reset(OpARM64FMADDD)
  5532  		v.AddArg3(a, x, y)
  5533  		return true
  5534  	}
  5535  	// match: (FSUBD (FNMULD x y) a)
  5536  	// cond: a.Block.Func.useFMA(v)
  5537  	// result: (FNMADDD a x y)
  5538  	for {
  5539  		if v_0.Op != OpARM64FNMULD {
  5540  			break
  5541  		}
  5542  		y := v_0.Args[1]
  5543  		x := v_0.Args[0]
  5544  		a := v_1
  5545  		if !(a.Block.Func.useFMA(v)) {
  5546  			break
  5547  		}
  5548  		v.reset(OpARM64FNMADDD)
  5549  		v.AddArg3(a, x, y)
  5550  		return true
  5551  	}
  5552  	return false
  5553  }
  5554  func rewriteValueARM64_OpARM64FSUBS(v *Value) bool {
  5555  	v_1 := v.Args[1]
  5556  	v_0 := v.Args[0]
  5557  	// match: (FSUBS a (FMULS x y))
  5558  	// cond: a.Block.Func.useFMA(v)
  5559  	// result: (FMSUBS a x y)
  5560  	for {
  5561  		a := v_0
  5562  		if v_1.Op != OpARM64FMULS {
  5563  			break
  5564  		}
  5565  		y := v_1.Args[1]
  5566  		x := v_1.Args[0]
  5567  		if !(a.Block.Func.useFMA(v)) {
  5568  			break
  5569  		}
  5570  		v.reset(OpARM64FMSUBS)
  5571  		v.AddArg3(a, x, y)
  5572  		return true
  5573  	}
  5574  	// match: (FSUBS (FMULS x y) a)
  5575  	// cond: a.Block.Func.useFMA(v)
  5576  	// result: (FNMSUBS a x y)
  5577  	for {
  5578  		if v_0.Op != OpARM64FMULS {
  5579  			break
  5580  		}
  5581  		y := v_0.Args[1]
  5582  		x := v_0.Args[0]
  5583  		a := v_1
  5584  		if !(a.Block.Func.useFMA(v)) {
  5585  			break
  5586  		}
  5587  		v.reset(OpARM64FNMSUBS)
  5588  		v.AddArg3(a, x, y)
  5589  		return true
  5590  	}
  5591  	// match: (FSUBS a (FNMULS x y))
  5592  	// cond: a.Block.Func.useFMA(v)
  5593  	// result: (FMADDS a x y)
  5594  	for {
  5595  		a := v_0
  5596  		if v_1.Op != OpARM64FNMULS {
  5597  			break
  5598  		}
  5599  		y := v_1.Args[1]
  5600  		x := v_1.Args[0]
  5601  		if !(a.Block.Func.useFMA(v)) {
  5602  			break
  5603  		}
  5604  		v.reset(OpARM64FMADDS)
  5605  		v.AddArg3(a, x, y)
  5606  		return true
  5607  	}
  5608  	// match: (FSUBS (FNMULS x y) a)
  5609  	// cond: a.Block.Func.useFMA(v)
  5610  	// result: (FNMADDS a x y)
  5611  	for {
  5612  		if v_0.Op != OpARM64FNMULS {
  5613  			break
  5614  		}
  5615  		y := v_0.Args[1]
  5616  		x := v_0.Args[0]
  5617  		a := v_1
  5618  		if !(a.Block.Func.useFMA(v)) {
  5619  			break
  5620  		}
  5621  		v.reset(OpARM64FNMADDS)
  5622  		v.AddArg3(a, x, y)
  5623  		return true
  5624  	}
  5625  	return false
  5626  }
  5627  func rewriteValueARM64_OpARM64GreaterEqual(v *Value) bool {
  5628  	v_0 := v.Args[0]
  5629  	b := v.Block
  5630  	// match: (GreaterEqual (CMPconst [0] z:(AND x y)))
  5631  	// cond: z.Uses == 1
  5632  	// result: (GreaterEqual (TST x y))
  5633  	for {
  5634  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  5635  			break
  5636  		}
  5637  		z := v_0.Args[0]
  5638  		if z.Op != OpARM64AND {
  5639  			break
  5640  		}
  5641  		y := z.Args[1]
  5642  		x := z.Args[0]
  5643  		if !(z.Uses == 1) {
  5644  			break
  5645  		}
  5646  		v.reset(OpARM64GreaterEqual)
  5647  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
  5648  		v0.AddArg2(x, y)
  5649  		v.AddArg(v0)
  5650  		return true
  5651  	}
  5652  	// match: (GreaterEqual (CMPWconst [0] x:(ANDconst [c] y)))
  5653  	// cond: x.Uses == 1
  5654  	// result: (GreaterEqual (TSTWconst [int32(c)] y))
  5655  	for {
  5656  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5657  			break
  5658  		}
  5659  		x := v_0.Args[0]
  5660  		if x.Op != OpARM64ANDconst {
  5661  			break
  5662  		}
  5663  		c := auxIntToInt64(x.AuxInt)
  5664  		y := x.Args[0]
  5665  		if !(x.Uses == 1) {
  5666  			break
  5667  		}
  5668  		v.reset(OpARM64GreaterEqual)
  5669  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
  5670  		v0.AuxInt = int32ToAuxInt(int32(c))
  5671  		v0.AddArg(y)
  5672  		v.AddArg(v0)
  5673  		return true
  5674  	}
  5675  	// match: (GreaterEqual (CMPWconst [0] z:(AND x y)))
  5676  	// cond: z.Uses == 1
  5677  	// result: (GreaterEqual (TSTW x y))
  5678  	for {
  5679  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5680  			break
  5681  		}
  5682  		z := v_0.Args[0]
  5683  		if z.Op != OpARM64AND {
  5684  			break
  5685  		}
  5686  		y := z.Args[1]
  5687  		x := z.Args[0]
  5688  		if !(z.Uses == 1) {
  5689  			break
  5690  		}
  5691  		v.reset(OpARM64GreaterEqual)
  5692  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
  5693  		v0.AddArg2(x, y)
  5694  		v.AddArg(v0)
  5695  		return true
  5696  	}
  5697  	// match: (GreaterEqual (CMPWconst [0] x:(ANDconst [c] y)))
  5698  	// cond: x.Uses == 1
  5699  	// result: (GreaterEqual (TSTconst [c] y))
  5700  	for {
  5701  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5702  			break
  5703  		}
  5704  		x := v_0.Args[0]
  5705  		if x.Op != OpARM64ANDconst {
  5706  			break
  5707  		}
  5708  		c := auxIntToInt64(x.AuxInt)
  5709  		y := x.Args[0]
  5710  		if !(x.Uses == 1) {
  5711  			break
  5712  		}
  5713  		v.reset(OpARM64GreaterEqual)
  5714  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
  5715  		v0.AuxInt = int64ToAuxInt(c)
  5716  		v0.AddArg(y)
  5717  		v.AddArg(v0)
  5718  		return true
  5719  	}
  5720  	// match: (GreaterEqual (FlagConstant [fc]))
  5721  	// result: (MOVDconst [b2i(fc.ge())])
  5722  	for {
  5723  		if v_0.Op != OpARM64FlagConstant {
  5724  			break
  5725  		}
  5726  		fc := auxIntToFlagConstant(v_0.AuxInt)
  5727  		v.reset(OpARM64MOVDconst)
  5728  		v.AuxInt = int64ToAuxInt(b2i(fc.ge()))
  5729  		return true
  5730  	}
  5731  	// match: (GreaterEqual (InvertFlags x))
  5732  	// result: (LessEqual x)
  5733  	for {
  5734  		if v_0.Op != OpARM64InvertFlags {
  5735  			break
  5736  		}
  5737  		x := v_0.Args[0]
  5738  		v.reset(OpARM64LessEqual)
  5739  		v.AddArg(x)
  5740  		return true
  5741  	}
  5742  	return false
  5743  }
  5744  func rewriteValueARM64_OpARM64GreaterEqualF(v *Value) bool {
  5745  	v_0 := v.Args[0]
  5746  	// match: (GreaterEqualF (InvertFlags x))
  5747  	// result: (LessEqualF x)
  5748  	for {
  5749  		if v_0.Op != OpARM64InvertFlags {
  5750  			break
  5751  		}
  5752  		x := v_0.Args[0]
  5753  		v.reset(OpARM64LessEqualF)
  5754  		v.AddArg(x)
  5755  		return true
  5756  	}
  5757  	return false
  5758  }
  5759  func rewriteValueARM64_OpARM64GreaterEqualU(v *Value) bool {
  5760  	v_0 := v.Args[0]
  5761  	// match: (GreaterEqualU (FlagConstant [fc]))
  5762  	// result: (MOVDconst [b2i(fc.uge())])
  5763  	for {
  5764  		if v_0.Op != OpARM64FlagConstant {
  5765  			break
  5766  		}
  5767  		fc := auxIntToFlagConstant(v_0.AuxInt)
  5768  		v.reset(OpARM64MOVDconst)
  5769  		v.AuxInt = int64ToAuxInt(b2i(fc.uge()))
  5770  		return true
  5771  	}
  5772  	// match: (GreaterEqualU (InvertFlags x))
  5773  	// result: (LessEqualU x)
  5774  	for {
  5775  		if v_0.Op != OpARM64InvertFlags {
  5776  			break
  5777  		}
  5778  		x := v_0.Args[0]
  5779  		v.reset(OpARM64LessEqualU)
  5780  		v.AddArg(x)
  5781  		return true
  5782  	}
  5783  	return false
  5784  }
  5785  func rewriteValueARM64_OpARM64GreaterThan(v *Value) bool {
  5786  	v_0 := v.Args[0]
  5787  	b := v.Block
  5788  	// match: (GreaterThan (CMPconst [0] z:(AND x y)))
  5789  	// cond: z.Uses == 1
  5790  	// result: (GreaterThan (TST x y))
  5791  	for {
  5792  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  5793  			break
  5794  		}
  5795  		z := v_0.Args[0]
  5796  		if z.Op != OpARM64AND {
  5797  			break
  5798  		}
  5799  		y := z.Args[1]
  5800  		x := z.Args[0]
  5801  		if !(z.Uses == 1) {
  5802  			break
  5803  		}
  5804  		v.reset(OpARM64GreaterThan)
  5805  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
  5806  		v0.AddArg2(x, y)
  5807  		v.AddArg(v0)
  5808  		return true
  5809  	}
  5810  	// match: (GreaterThan (CMPWconst [0] x:(ANDconst [c] y)))
  5811  	// cond: x.Uses == 1
  5812  	// result: (GreaterThan (TSTWconst [int32(c)] y))
  5813  	for {
  5814  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5815  			break
  5816  		}
  5817  		x := v_0.Args[0]
  5818  		if x.Op != OpARM64ANDconst {
  5819  			break
  5820  		}
  5821  		c := auxIntToInt64(x.AuxInt)
  5822  		y := x.Args[0]
  5823  		if !(x.Uses == 1) {
  5824  			break
  5825  		}
  5826  		v.reset(OpARM64GreaterThan)
  5827  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
  5828  		v0.AuxInt = int32ToAuxInt(int32(c))
  5829  		v0.AddArg(y)
  5830  		v.AddArg(v0)
  5831  		return true
  5832  	}
  5833  	// match: (GreaterThan (CMPWconst [0] z:(AND x y)))
  5834  	// cond: z.Uses == 1
  5835  	// result: (GreaterThan (TSTW x y))
  5836  	for {
  5837  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5838  			break
  5839  		}
  5840  		z := v_0.Args[0]
  5841  		if z.Op != OpARM64AND {
  5842  			break
  5843  		}
  5844  		y := z.Args[1]
  5845  		x := z.Args[0]
  5846  		if !(z.Uses == 1) {
  5847  			break
  5848  		}
  5849  		v.reset(OpARM64GreaterThan)
  5850  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
  5851  		v0.AddArg2(x, y)
  5852  		v.AddArg(v0)
  5853  		return true
  5854  	}
  5855  	// match: (GreaterThan (CMPWconst [0] x:(ANDconst [c] y)))
  5856  	// cond: x.Uses == 1
  5857  	// result: (GreaterThan (TSTconst [c] y))
  5858  	for {
  5859  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  5860  			break
  5861  		}
  5862  		x := v_0.Args[0]
  5863  		if x.Op != OpARM64ANDconst {
  5864  			break
  5865  		}
  5866  		c := auxIntToInt64(x.AuxInt)
  5867  		y := x.Args[0]
  5868  		if !(x.Uses == 1) {
  5869  			break
  5870  		}
  5871  		v.reset(OpARM64GreaterThan)
  5872  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
  5873  		v0.AuxInt = int64ToAuxInt(c)
  5874  		v0.AddArg(y)
  5875  		v.AddArg(v0)
  5876  		return true
  5877  	}
  5878  	// match: (GreaterThan (FlagConstant [fc]))
  5879  	// result: (MOVDconst [b2i(fc.gt())])
  5880  	for {
  5881  		if v_0.Op != OpARM64FlagConstant {
  5882  			break
  5883  		}
  5884  		fc := auxIntToFlagConstant(v_0.AuxInt)
  5885  		v.reset(OpARM64MOVDconst)
  5886  		v.AuxInt = int64ToAuxInt(b2i(fc.gt()))
  5887  		return true
  5888  	}
  5889  	// match: (GreaterThan (InvertFlags x))
  5890  	// result: (LessThan x)
  5891  	for {
  5892  		if v_0.Op != OpARM64InvertFlags {
  5893  			break
  5894  		}
  5895  		x := v_0.Args[0]
  5896  		v.reset(OpARM64LessThan)
  5897  		v.AddArg(x)
  5898  		return true
  5899  	}
  5900  	return false
  5901  }
  5902  func rewriteValueARM64_OpARM64GreaterThanF(v *Value) bool {
  5903  	v_0 := v.Args[0]
  5904  	// match: (GreaterThanF (InvertFlags x))
  5905  	// result: (LessThanF x)
  5906  	for {
  5907  		if v_0.Op != OpARM64InvertFlags {
  5908  			break
  5909  		}
  5910  		x := v_0.Args[0]
  5911  		v.reset(OpARM64LessThanF)
  5912  		v.AddArg(x)
  5913  		return true
  5914  	}
  5915  	return false
  5916  }
  5917  func rewriteValueARM64_OpARM64GreaterThanU(v *Value) bool {
  5918  	v_0 := v.Args[0]
  5919  	// match: (GreaterThanU (FlagConstant [fc]))
  5920  	// result: (MOVDconst [b2i(fc.ugt())])
  5921  	for {
  5922  		if v_0.Op != OpARM64FlagConstant {
  5923  			break
  5924  		}
  5925  		fc := auxIntToFlagConstant(v_0.AuxInt)
  5926  		v.reset(OpARM64MOVDconst)
  5927  		v.AuxInt = int64ToAuxInt(b2i(fc.ugt()))
  5928  		return true
  5929  	}
  5930  	// match: (GreaterThanU (InvertFlags x))
  5931  	// result: (LessThanU x)
  5932  	for {
  5933  		if v_0.Op != OpARM64InvertFlags {
  5934  			break
  5935  		}
  5936  		x := v_0.Args[0]
  5937  		v.reset(OpARM64LessThanU)
  5938  		v.AddArg(x)
  5939  		return true
  5940  	}
  5941  	return false
  5942  }
  5943  func rewriteValueARM64_OpARM64LDP(v *Value) bool {
  5944  	v_1 := v.Args[1]
  5945  	v_0 := v.Args[0]
  5946  	b := v.Block
  5947  	config := b.Func.Config
  5948  	// match: (LDP [off1] {sym} (ADDconst [off2] ptr) mem)
  5949  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5950  	// result: (LDP [off1+int32(off2)] {sym} ptr mem)
  5951  	for {
  5952  		off1 := auxIntToInt32(v.AuxInt)
  5953  		sym := auxToSym(v.Aux)
  5954  		if v_0.Op != OpARM64ADDconst {
  5955  			break
  5956  		}
  5957  		off2 := auxIntToInt64(v_0.AuxInt)
  5958  		ptr := v_0.Args[0]
  5959  		mem := v_1
  5960  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5961  			break
  5962  		}
  5963  		v.reset(OpARM64LDP)
  5964  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5965  		v.Aux = symToAux(sym)
  5966  		v.AddArg2(ptr, mem)
  5967  		return true
  5968  	}
  5969  	// match: (LDP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5970  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5971  	// result: (LDP [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5972  	for {
  5973  		off1 := auxIntToInt32(v.AuxInt)
  5974  		sym1 := auxToSym(v.Aux)
  5975  		if v_0.Op != OpARM64MOVDaddr {
  5976  			break
  5977  		}
  5978  		off2 := auxIntToInt32(v_0.AuxInt)
  5979  		sym2 := auxToSym(v_0.Aux)
  5980  		ptr := v_0.Args[0]
  5981  		mem := v_1
  5982  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5983  			break
  5984  		}
  5985  		v.reset(OpARM64LDP)
  5986  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5987  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5988  		v.AddArg2(ptr, mem)
  5989  		return true
  5990  	}
  5991  	return false
  5992  }
  5993  func rewriteValueARM64_OpARM64LessEqual(v *Value) bool {
  5994  	v_0 := v.Args[0]
  5995  	b := v.Block
  5996  	// match: (LessEqual (CMPconst [0] z:(AND x y)))
  5997  	// cond: z.Uses == 1
  5998  	// result: (LessEqual (TST x y))
  5999  	for {
  6000  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6001  			break
  6002  		}
  6003  		z := v_0.Args[0]
  6004  		if z.Op != OpARM64AND {
  6005  			break
  6006  		}
  6007  		y := z.Args[1]
  6008  		x := z.Args[0]
  6009  		if !(z.Uses == 1) {
  6010  			break
  6011  		}
  6012  		v.reset(OpARM64LessEqual)
  6013  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
  6014  		v0.AddArg2(x, y)
  6015  		v.AddArg(v0)
  6016  		return true
  6017  	}
  6018  	// match: (LessEqual (CMPWconst [0] x:(ANDconst [c] y)))
  6019  	// cond: x.Uses == 1
  6020  	// result: (LessEqual (TSTWconst [int32(c)] y))
  6021  	for {
  6022  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6023  			break
  6024  		}
  6025  		x := v_0.Args[0]
  6026  		if x.Op != OpARM64ANDconst {
  6027  			break
  6028  		}
  6029  		c := auxIntToInt64(x.AuxInt)
  6030  		y := x.Args[0]
  6031  		if !(x.Uses == 1) {
  6032  			break
  6033  		}
  6034  		v.reset(OpARM64LessEqual)
  6035  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
  6036  		v0.AuxInt = int32ToAuxInt(int32(c))
  6037  		v0.AddArg(y)
  6038  		v.AddArg(v0)
  6039  		return true
  6040  	}
  6041  	// match: (LessEqual (CMPWconst [0] z:(AND x y)))
  6042  	// cond: z.Uses == 1
  6043  	// result: (LessEqual (TSTW x y))
  6044  	for {
  6045  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6046  			break
  6047  		}
  6048  		z := v_0.Args[0]
  6049  		if z.Op != OpARM64AND {
  6050  			break
  6051  		}
  6052  		y := z.Args[1]
  6053  		x := z.Args[0]
  6054  		if !(z.Uses == 1) {
  6055  			break
  6056  		}
  6057  		v.reset(OpARM64LessEqual)
  6058  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
  6059  		v0.AddArg2(x, y)
  6060  		v.AddArg(v0)
  6061  		return true
  6062  	}
  6063  	// match: (LessEqual (CMPWconst [0] x:(ANDconst [c] y)))
  6064  	// cond: x.Uses == 1
  6065  	// result: (LessEqual (TSTconst [c] y))
  6066  	for {
  6067  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6068  			break
  6069  		}
  6070  		x := v_0.Args[0]
  6071  		if x.Op != OpARM64ANDconst {
  6072  			break
  6073  		}
  6074  		c := auxIntToInt64(x.AuxInt)
  6075  		y := x.Args[0]
  6076  		if !(x.Uses == 1) {
  6077  			break
  6078  		}
  6079  		v.reset(OpARM64LessEqual)
  6080  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
  6081  		v0.AuxInt = int64ToAuxInt(c)
  6082  		v0.AddArg(y)
  6083  		v.AddArg(v0)
  6084  		return true
  6085  	}
  6086  	// match: (LessEqual (FlagConstant [fc]))
  6087  	// result: (MOVDconst [b2i(fc.le())])
  6088  	for {
  6089  		if v_0.Op != OpARM64FlagConstant {
  6090  			break
  6091  		}
  6092  		fc := auxIntToFlagConstant(v_0.AuxInt)
  6093  		v.reset(OpARM64MOVDconst)
  6094  		v.AuxInt = int64ToAuxInt(b2i(fc.le()))
  6095  		return true
  6096  	}
  6097  	// match: (LessEqual (InvertFlags x))
  6098  	// result: (GreaterEqual x)
  6099  	for {
  6100  		if v_0.Op != OpARM64InvertFlags {
  6101  			break
  6102  		}
  6103  		x := v_0.Args[0]
  6104  		v.reset(OpARM64GreaterEqual)
  6105  		v.AddArg(x)
  6106  		return true
  6107  	}
  6108  	return false
  6109  }
  6110  func rewriteValueARM64_OpARM64LessEqualF(v *Value) bool {
  6111  	v_0 := v.Args[0]
  6112  	// match: (LessEqualF (InvertFlags x))
  6113  	// result: (GreaterEqualF x)
  6114  	for {
  6115  		if v_0.Op != OpARM64InvertFlags {
  6116  			break
  6117  		}
  6118  		x := v_0.Args[0]
  6119  		v.reset(OpARM64GreaterEqualF)
  6120  		v.AddArg(x)
  6121  		return true
  6122  	}
  6123  	return false
  6124  }
  6125  func rewriteValueARM64_OpARM64LessEqualU(v *Value) bool {
  6126  	v_0 := v.Args[0]
  6127  	// match: (LessEqualU (FlagConstant [fc]))
  6128  	// result: (MOVDconst [b2i(fc.ule())])
  6129  	for {
  6130  		if v_0.Op != OpARM64FlagConstant {
  6131  			break
  6132  		}
  6133  		fc := auxIntToFlagConstant(v_0.AuxInt)
  6134  		v.reset(OpARM64MOVDconst)
  6135  		v.AuxInt = int64ToAuxInt(b2i(fc.ule()))
  6136  		return true
  6137  	}
  6138  	// match: (LessEqualU (InvertFlags x))
  6139  	// result: (GreaterEqualU x)
  6140  	for {
  6141  		if v_0.Op != OpARM64InvertFlags {
  6142  			break
  6143  		}
  6144  		x := v_0.Args[0]
  6145  		v.reset(OpARM64GreaterEqualU)
  6146  		v.AddArg(x)
  6147  		return true
  6148  	}
  6149  	return false
  6150  }
  6151  func rewriteValueARM64_OpARM64LessThan(v *Value) bool {
  6152  	v_0 := v.Args[0]
  6153  	b := v.Block
  6154  	// match: (LessThan (CMPconst [0] z:(AND x y)))
  6155  	// cond: z.Uses == 1
  6156  	// result: (LessThan (TST x y))
  6157  	for {
  6158  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6159  			break
  6160  		}
  6161  		z := v_0.Args[0]
  6162  		if z.Op != OpARM64AND {
  6163  			break
  6164  		}
  6165  		y := z.Args[1]
  6166  		x := z.Args[0]
  6167  		if !(z.Uses == 1) {
  6168  			break
  6169  		}
  6170  		v.reset(OpARM64LessThan)
  6171  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
  6172  		v0.AddArg2(x, y)
  6173  		v.AddArg(v0)
  6174  		return true
  6175  	}
  6176  	// match: (LessThan (CMPWconst [0] x:(ANDconst [c] y)))
  6177  	// cond: x.Uses == 1
  6178  	// result: (LessThan (TSTWconst [int32(c)] y))
  6179  	for {
  6180  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6181  			break
  6182  		}
  6183  		x := v_0.Args[0]
  6184  		if x.Op != OpARM64ANDconst {
  6185  			break
  6186  		}
  6187  		c := auxIntToInt64(x.AuxInt)
  6188  		y := x.Args[0]
  6189  		if !(x.Uses == 1) {
  6190  			break
  6191  		}
  6192  		v.reset(OpARM64LessThan)
  6193  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
  6194  		v0.AuxInt = int32ToAuxInt(int32(c))
  6195  		v0.AddArg(y)
  6196  		v.AddArg(v0)
  6197  		return true
  6198  	}
  6199  	// match: (LessThan (CMPWconst [0] z:(AND x y)))
  6200  	// cond: z.Uses == 1
  6201  	// result: (LessThan (TSTW x y))
  6202  	for {
  6203  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6204  			break
  6205  		}
  6206  		z := v_0.Args[0]
  6207  		if z.Op != OpARM64AND {
  6208  			break
  6209  		}
  6210  		y := z.Args[1]
  6211  		x := z.Args[0]
  6212  		if !(z.Uses == 1) {
  6213  			break
  6214  		}
  6215  		v.reset(OpARM64LessThan)
  6216  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
  6217  		v0.AddArg2(x, y)
  6218  		v.AddArg(v0)
  6219  		return true
  6220  	}
  6221  	// match: (LessThan (CMPWconst [0] x:(ANDconst [c] y)))
  6222  	// cond: x.Uses == 1
  6223  	// result: (LessThan (TSTconst [c] y))
  6224  	for {
  6225  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
  6226  			break
  6227  		}
  6228  		x := v_0.Args[0]
  6229  		if x.Op != OpARM64ANDconst {
  6230  			break
  6231  		}
  6232  		c := auxIntToInt64(x.AuxInt)
  6233  		y := x.Args[0]
  6234  		if !(x.Uses == 1) {
  6235  			break
  6236  		}
  6237  		v.reset(OpARM64LessThan)
  6238  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
  6239  		v0.AuxInt = int64ToAuxInt(c)
  6240  		v0.AddArg(y)
  6241  		v.AddArg(v0)
  6242  		return true
  6243  	}
  6244  	// match: (LessThan (FlagConstant [fc]))
  6245  	// result: (MOVDconst [b2i(fc.lt())])
  6246  	for {
  6247  		if v_0.Op != OpARM64FlagConstant {
  6248  			break
  6249  		}
  6250  		fc := auxIntToFlagConstant(v_0.AuxInt)
  6251  		v.reset(OpARM64MOVDconst)
  6252  		v.AuxInt = int64ToAuxInt(b2i(fc.lt()))
  6253  		return true
  6254  	}
  6255  	// match: (LessThan (InvertFlags x))
  6256  	// result: (GreaterThan x)
  6257  	for {
  6258  		if v_0.Op != OpARM64InvertFlags {
  6259  			break
  6260  		}
  6261  		x := v_0.Args[0]
  6262  		v.reset(OpARM64GreaterThan)
  6263  		v.AddArg(x)
  6264  		return true
  6265  	}
  6266  	return false
  6267  }
  6268  func rewriteValueARM64_OpARM64LessThanF(v *Value) bool {
  6269  	v_0 := v.Args[0]
  6270  	// match: (LessThanF (InvertFlags x))
  6271  	// result: (GreaterThanF x)
  6272  	for {
  6273  		if v_0.Op != OpARM64InvertFlags {
  6274  			break
  6275  		}
  6276  		x := v_0.Args[0]
  6277  		v.reset(OpARM64GreaterThanF)
  6278  		v.AddArg(x)
  6279  		return true
  6280  	}
  6281  	return false
  6282  }
  6283  func rewriteValueARM64_OpARM64LessThanU(v *Value) bool {
  6284  	v_0 := v.Args[0]
  6285  	// match: (LessThanU (FlagConstant [fc]))
  6286  	// result: (MOVDconst [b2i(fc.ult())])
  6287  	for {
  6288  		if v_0.Op != OpARM64FlagConstant {
  6289  			break
  6290  		}
  6291  		fc := auxIntToFlagConstant(v_0.AuxInt)
  6292  		v.reset(OpARM64MOVDconst)
  6293  		v.AuxInt = int64ToAuxInt(b2i(fc.ult()))
  6294  		return true
  6295  	}
  6296  	// match: (LessThanU (InvertFlags x))
  6297  	// result: (GreaterThanU x)
  6298  	for {
  6299  		if v_0.Op != OpARM64InvertFlags {
  6300  			break
  6301  		}
  6302  		x := v_0.Args[0]
  6303  		v.reset(OpARM64GreaterThanU)
  6304  		v.AddArg(x)
  6305  		return true
  6306  	}
  6307  	return false
  6308  }
  6309  func rewriteValueARM64_OpARM64MADD(v *Value) bool {
  6310  	v_2 := v.Args[2]
  6311  	v_1 := v.Args[1]
  6312  	v_0 := v.Args[0]
  6313  	b := v.Block
  6314  	// match: (MADD a x (MOVDconst [-1]))
  6315  	// result: (SUB a x)
  6316  	for {
  6317  		a := v_0
  6318  		x := v_1
  6319  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
  6320  			break
  6321  		}
  6322  		v.reset(OpARM64SUB)
  6323  		v.AddArg2(a, x)
  6324  		return true
  6325  	}
  6326  	// match: (MADD a _ (MOVDconst [0]))
  6327  	// result: a
  6328  	for {
  6329  		a := v_0
  6330  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
  6331  			break
  6332  		}
  6333  		v.copyOf(a)
  6334  		return true
  6335  	}
  6336  	// match: (MADD a x (MOVDconst [1]))
  6337  	// result: (ADD a x)
  6338  	for {
  6339  		a := v_0
  6340  		x := v_1
  6341  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
  6342  			break
  6343  		}
  6344  		v.reset(OpARM64ADD)
  6345  		v.AddArg2(a, x)
  6346  		return true
  6347  	}
  6348  	// match: (MADD a x (MOVDconst [c]))
  6349  	// cond: isPowerOfTwo64(c)
  6350  	// result: (ADDshiftLL a x [log64(c)])
  6351  	for {
  6352  		a := v_0
  6353  		x := v_1
  6354  		if v_2.Op != OpARM64MOVDconst {
  6355  			break
  6356  		}
  6357  		c := auxIntToInt64(v_2.AuxInt)
  6358  		if !(isPowerOfTwo64(c)) {
  6359  			break
  6360  		}
  6361  		v.reset(OpARM64ADDshiftLL)
  6362  		v.AuxInt = int64ToAuxInt(log64(c))
  6363  		v.AddArg2(a, x)
  6364  		return true
  6365  	}
  6366  	// match: (MADD a x (MOVDconst [c]))
  6367  	// cond: isPowerOfTwo64(c-1) && c>=3
  6368  	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
  6369  	for {
  6370  		a := v_0
  6371  		x := v_1
  6372  		if v_2.Op != OpARM64MOVDconst {
  6373  			break
  6374  		}
  6375  		c := auxIntToInt64(v_2.AuxInt)
  6376  		if !(isPowerOfTwo64(c-1) && c >= 3) {
  6377  			break
  6378  		}
  6379  		v.reset(OpARM64ADD)
  6380  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6381  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
  6382  		v0.AddArg2(x, x)
  6383  		v.AddArg2(a, v0)
  6384  		return true
  6385  	}
  6386  	// match: (MADD a x (MOVDconst [c]))
  6387  	// cond: isPowerOfTwo64(c+1) && c>=7
  6388  	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
  6389  	for {
  6390  		a := v_0
  6391  		x := v_1
  6392  		if v_2.Op != OpARM64MOVDconst {
  6393  			break
  6394  		}
  6395  		c := auxIntToInt64(v_2.AuxInt)
  6396  		if !(isPowerOfTwo64(c+1) && c >= 7) {
  6397  			break
  6398  		}
  6399  		v.reset(OpARM64SUB)
  6400  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6401  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
  6402  		v0.AddArg2(x, x)
  6403  		v.AddArg2(a, v0)
  6404  		return true
  6405  	}
  6406  	// match: (MADD a x (MOVDconst [c]))
  6407  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
  6408  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
  6409  	for {
  6410  		a := v_0
  6411  		x := v_1
  6412  		if v_2.Op != OpARM64MOVDconst {
  6413  			break
  6414  		}
  6415  		c := auxIntToInt64(v_2.AuxInt)
  6416  		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
  6417  			break
  6418  		}
  6419  		v.reset(OpARM64SUBshiftLL)
  6420  		v.AuxInt = int64ToAuxInt(log64(c / 3))
  6421  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6422  		v0.AuxInt = int64ToAuxInt(2)
  6423  		v0.AddArg2(x, x)
  6424  		v.AddArg2(a, v0)
  6425  		return true
  6426  	}
  6427  	// match: (MADD a x (MOVDconst [c]))
  6428  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
  6429  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
  6430  	for {
  6431  		a := v_0
  6432  		x := v_1
  6433  		if v_2.Op != OpARM64MOVDconst {
  6434  			break
  6435  		}
  6436  		c := auxIntToInt64(v_2.AuxInt)
  6437  		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
  6438  			break
  6439  		}
  6440  		v.reset(OpARM64ADDshiftLL)
  6441  		v.AuxInt = int64ToAuxInt(log64(c / 5))
  6442  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6443  		v0.AuxInt = int64ToAuxInt(2)
  6444  		v0.AddArg2(x, x)
  6445  		v.AddArg2(a, v0)
  6446  		return true
  6447  	}
  6448  	// match: (MADD a x (MOVDconst [c]))
  6449  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
  6450  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
  6451  	for {
  6452  		a := v_0
  6453  		x := v_1
  6454  		if v_2.Op != OpARM64MOVDconst {
  6455  			break
  6456  		}
  6457  		c := auxIntToInt64(v_2.AuxInt)
  6458  		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
  6459  			break
  6460  		}
  6461  		v.reset(OpARM64SUBshiftLL)
  6462  		v.AuxInt = int64ToAuxInt(log64(c / 7))
  6463  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6464  		v0.AuxInt = int64ToAuxInt(3)
  6465  		v0.AddArg2(x, x)
  6466  		v.AddArg2(a, v0)
  6467  		return true
  6468  	}
  6469  	// match: (MADD a x (MOVDconst [c]))
  6470  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
  6471  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
  6472  	for {
  6473  		a := v_0
  6474  		x := v_1
  6475  		if v_2.Op != OpARM64MOVDconst {
  6476  			break
  6477  		}
  6478  		c := auxIntToInt64(v_2.AuxInt)
  6479  		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
  6480  			break
  6481  		}
  6482  		v.reset(OpARM64ADDshiftLL)
  6483  		v.AuxInt = int64ToAuxInt(log64(c / 9))
  6484  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6485  		v0.AuxInt = int64ToAuxInt(3)
  6486  		v0.AddArg2(x, x)
  6487  		v.AddArg2(a, v0)
  6488  		return true
  6489  	}
  6490  	// match: (MADD a (MOVDconst [-1]) x)
  6491  	// result: (SUB a x)
  6492  	for {
  6493  		a := v_0
  6494  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
  6495  			break
  6496  		}
  6497  		x := v_2
  6498  		v.reset(OpARM64SUB)
  6499  		v.AddArg2(a, x)
  6500  		return true
  6501  	}
  6502  	// match: (MADD a (MOVDconst [0]) _)
  6503  	// result: a
  6504  	for {
  6505  		a := v_0
  6506  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6507  			break
  6508  		}
  6509  		v.copyOf(a)
  6510  		return true
  6511  	}
  6512  	// match: (MADD a (MOVDconst [1]) x)
  6513  	// result: (ADD a x)
  6514  	for {
  6515  		a := v_0
  6516  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
  6517  			break
  6518  		}
  6519  		x := v_2
  6520  		v.reset(OpARM64ADD)
  6521  		v.AddArg2(a, x)
  6522  		return true
  6523  	}
  6524  	// match: (MADD a (MOVDconst [c]) x)
  6525  	// cond: isPowerOfTwo64(c)
  6526  	// result: (ADDshiftLL a x [log64(c)])
  6527  	for {
  6528  		a := v_0
  6529  		if v_1.Op != OpARM64MOVDconst {
  6530  			break
  6531  		}
  6532  		c := auxIntToInt64(v_1.AuxInt)
  6533  		x := v_2
  6534  		if !(isPowerOfTwo64(c)) {
  6535  			break
  6536  		}
  6537  		v.reset(OpARM64ADDshiftLL)
  6538  		v.AuxInt = int64ToAuxInt(log64(c))
  6539  		v.AddArg2(a, x)
  6540  		return true
  6541  	}
  6542  	// match: (MADD a (MOVDconst [c]) x)
  6543  	// cond: isPowerOfTwo64(c-1) && c>=3
  6544  	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
  6545  	for {
  6546  		a := v_0
  6547  		if v_1.Op != OpARM64MOVDconst {
  6548  			break
  6549  		}
  6550  		c := auxIntToInt64(v_1.AuxInt)
  6551  		x := v_2
  6552  		if !(isPowerOfTwo64(c-1) && c >= 3) {
  6553  			break
  6554  		}
  6555  		v.reset(OpARM64ADD)
  6556  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6557  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
  6558  		v0.AddArg2(x, x)
  6559  		v.AddArg2(a, v0)
  6560  		return true
  6561  	}
  6562  	// match: (MADD a (MOVDconst [c]) x)
  6563  	// cond: isPowerOfTwo64(c+1) && c>=7
  6564  	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
  6565  	for {
  6566  		a := v_0
  6567  		if v_1.Op != OpARM64MOVDconst {
  6568  			break
  6569  		}
  6570  		c := auxIntToInt64(v_1.AuxInt)
  6571  		x := v_2
  6572  		if !(isPowerOfTwo64(c+1) && c >= 7) {
  6573  			break
  6574  		}
  6575  		v.reset(OpARM64SUB)
  6576  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6577  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
  6578  		v0.AddArg2(x, x)
  6579  		v.AddArg2(a, v0)
  6580  		return true
  6581  	}
  6582  	// match: (MADD a (MOVDconst [c]) x)
  6583  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
  6584  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
  6585  	for {
  6586  		a := v_0
  6587  		if v_1.Op != OpARM64MOVDconst {
  6588  			break
  6589  		}
  6590  		c := auxIntToInt64(v_1.AuxInt)
  6591  		x := v_2
  6592  		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
  6593  			break
  6594  		}
  6595  		v.reset(OpARM64SUBshiftLL)
  6596  		v.AuxInt = int64ToAuxInt(log64(c / 3))
  6597  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6598  		v0.AuxInt = int64ToAuxInt(2)
  6599  		v0.AddArg2(x, x)
  6600  		v.AddArg2(a, v0)
  6601  		return true
  6602  	}
  6603  	// match: (MADD a (MOVDconst [c]) x)
  6604  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
  6605  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
  6606  	for {
  6607  		a := v_0
  6608  		if v_1.Op != OpARM64MOVDconst {
  6609  			break
  6610  		}
  6611  		c := auxIntToInt64(v_1.AuxInt)
  6612  		x := v_2
  6613  		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
  6614  			break
  6615  		}
  6616  		v.reset(OpARM64ADDshiftLL)
  6617  		v.AuxInt = int64ToAuxInt(log64(c / 5))
  6618  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6619  		v0.AuxInt = int64ToAuxInt(2)
  6620  		v0.AddArg2(x, x)
  6621  		v.AddArg2(a, v0)
  6622  		return true
  6623  	}
  6624  	// match: (MADD a (MOVDconst [c]) x)
  6625  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
  6626  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
  6627  	for {
  6628  		a := v_0
  6629  		if v_1.Op != OpARM64MOVDconst {
  6630  			break
  6631  		}
  6632  		c := auxIntToInt64(v_1.AuxInt)
  6633  		x := v_2
  6634  		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
  6635  			break
  6636  		}
  6637  		v.reset(OpARM64SUBshiftLL)
  6638  		v.AuxInt = int64ToAuxInt(log64(c / 7))
  6639  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6640  		v0.AuxInt = int64ToAuxInt(3)
  6641  		v0.AddArg2(x, x)
  6642  		v.AddArg2(a, v0)
  6643  		return true
  6644  	}
  6645  	// match: (MADD a (MOVDconst [c]) x)
  6646  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
  6647  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
  6648  	for {
  6649  		a := v_0
  6650  		if v_1.Op != OpARM64MOVDconst {
  6651  			break
  6652  		}
  6653  		c := auxIntToInt64(v_1.AuxInt)
  6654  		x := v_2
  6655  		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
  6656  			break
  6657  		}
  6658  		v.reset(OpARM64ADDshiftLL)
  6659  		v.AuxInt = int64ToAuxInt(log64(c / 9))
  6660  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6661  		v0.AuxInt = int64ToAuxInt(3)
  6662  		v0.AddArg2(x, x)
  6663  		v.AddArg2(a, v0)
  6664  		return true
  6665  	}
  6666  	// match: (MADD (MOVDconst [c]) x y)
  6667  	// result: (ADDconst [c] (MUL <x.Type> x y))
  6668  	for {
  6669  		if v_0.Op != OpARM64MOVDconst {
  6670  			break
  6671  		}
  6672  		c := auxIntToInt64(v_0.AuxInt)
  6673  		x := v_1
  6674  		y := v_2
  6675  		v.reset(OpARM64ADDconst)
  6676  		v.AuxInt = int64ToAuxInt(c)
  6677  		v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
  6678  		v0.AddArg2(x, y)
  6679  		v.AddArg(v0)
  6680  		return true
  6681  	}
  6682  	// match: (MADD a (MOVDconst [c]) (MOVDconst [d]))
  6683  	// result: (ADDconst [c*d] a)
  6684  	for {
  6685  		a := v_0
  6686  		if v_1.Op != OpARM64MOVDconst {
  6687  			break
  6688  		}
  6689  		c := auxIntToInt64(v_1.AuxInt)
  6690  		if v_2.Op != OpARM64MOVDconst {
  6691  			break
  6692  		}
  6693  		d := auxIntToInt64(v_2.AuxInt)
  6694  		v.reset(OpARM64ADDconst)
  6695  		v.AuxInt = int64ToAuxInt(c * d)
  6696  		v.AddArg(a)
  6697  		return true
  6698  	}
  6699  	return false
  6700  }
  6701  func rewriteValueARM64_OpARM64MADDW(v *Value) bool {
  6702  	v_2 := v.Args[2]
  6703  	v_1 := v.Args[1]
  6704  	v_0 := v.Args[0]
  6705  	b := v.Block
  6706  	// match: (MADDW a x (MOVDconst [c]))
  6707  	// cond: int32(c)==-1
  6708  	// result: (SUB a x)
  6709  	for {
  6710  		a := v_0
  6711  		x := v_1
  6712  		if v_2.Op != OpARM64MOVDconst {
  6713  			break
  6714  		}
  6715  		c := auxIntToInt64(v_2.AuxInt)
  6716  		if !(int32(c) == -1) {
  6717  			break
  6718  		}
  6719  		v.reset(OpARM64SUB)
  6720  		v.AddArg2(a, x)
  6721  		return true
  6722  	}
  6723  	// match: (MADDW a _ (MOVDconst [c]))
  6724  	// cond: int32(c)==0
  6725  	// result: a
  6726  	for {
  6727  		a := v_0
  6728  		if v_2.Op != OpARM64MOVDconst {
  6729  			break
  6730  		}
  6731  		c := auxIntToInt64(v_2.AuxInt)
  6732  		if !(int32(c) == 0) {
  6733  			break
  6734  		}
  6735  		v.copyOf(a)
  6736  		return true
  6737  	}
  6738  	// match: (MADDW a x (MOVDconst [c]))
  6739  	// cond: int32(c)==1
  6740  	// result: (ADD a x)
  6741  	for {
  6742  		a := v_0
  6743  		x := v_1
  6744  		if v_2.Op != OpARM64MOVDconst {
  6745  			break
  6746  		}
  6747  		c := auxIntToInt64(v_2.AuxInt)
  6748  		if !(int32(c) == 1) {
  6749  			break
  6750  		}
  6751  		v.reset(OpARM64ADD)
  6752  		v.AddArg2(a, x)
  6753  		return true
  6754  	}
  6755  	// match: (MADDW a x (MOVDconst [c]))
  6756  	// cond: isPowerOfTwo64(c)
  6757  	// result: (ADDshiftLL a x [log64(c)])
  6758  	for {
  6759  		a := v_0
  6760  		x := v_1
  6761  		if v_2.Op != OpARM64MOVDconst {
  6762  			break
  6763  		}
  6764  		c := auxIntToInt64(v_2.AuxInt)
  6765  		if !(isPowerOfTwo64(c)) {
  6766  			break
  6767  		}
  6768  		v.reset(OpARM64ADDshiftLL)
  6769  		v.AuxInt = int64ToAuxInt(log64(c))
  6770  		v.AddArg2(a, x)
  6771  		return true
  6772  	}
  6773  	// match: (MADDW a x (MOVDconst [c]))
  6774  	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
  6775  	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
  6776  	for {
  6777  		a := v_0
  6778  		x := v_1
  6779  		if v_2.Op != OpARM64MOVDconst {
  6780  			break
  6781  		}
  6782  		c := auxIntToInt64(v_2.AuxInt)
  6783  		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
  6784  			break
  6785  		}
  6786  		v.reset(OpARM64ADD)
  6787  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6788  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
  6789  		v0.AddArg2(x, x)
  6790  		v.AddArg2(a, v0)
  6791  		return true
  6792  	}
  6793  	// match: (MADDW a x (MOVDconst [c]))
  6794  	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
  6795  	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
  6796  	for {
  6797  		a := v_0
  6798  		x := v_1
  6799  		if v_2.Op != OpARM64MOVDconst {
  6800  			break
  6801  		}
  6802  		c := auxIntToInt64(v_2.AuxInt)
  6803  		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
  6804  			break
  6805  		}
  6806  		v.reset(OpARM64SUB)
  6807  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6808  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
  6809  		v0.AddArg2(x, x)
  6810  		v.AddArg2(a, v0)
  6811  		return true
  6812  	}
  6813  	// match: (MADDW a x (MOVDconst [c]))
  6814  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
  6815  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
  6816  	for {
  6817  		a := v_0
  6818  		x := v_1
  6819  		if v_2.Op != OpARM64MOVDconst {
  6820  			break
  6821  		}
  6822  		c := auxIntToInt64(v_2.AuxInt)
  6823  		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
  6824  			break
  6825  		}
  6826  		v.reset(OpARM64SUBshiftLL)
  6827  		v.AuxInt = int64ToAuxInt(log64(c / 3))
  6828  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6829  		v0.AuxInt = int64ToAuxInt(2)
  6830  		v0.AddArg2(x, x)
  6831  		v.AddArg2(a, v0)
  6832  		return true
  6833  	}
  6834  	// match: (MADDW a x (MOVDconst [c]))
  6835  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
  6836  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
  6837  	for {
  6838  		a := v_0
  6839  		x := v_1
  6840  		if v_2.Op != OpARM64MOVDconst {
  6841  			break
  6842  		}
  6843  		c := auxIntToInt64(v_2.AuxInt)
  6844  		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
  6845  			break
  6846  		}
  6847  		v.reset(OpARM64ADDshiftLL)
  6848  		v.AuxInt = int64ToAuxInt(log64(c / 5))
  6849  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6850  		v0.AuxInt = int64ToAuxInt(2)
  6851  		v0.AddArg2(x, x)
  6852  		v.AddArg2(a, v0)
  6853  		return true
  6854  	}
  6855  	// match: (MADDW a x (MOVDconst [c]))
  6856  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
  6857  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
  6858  	for {
  6859  		a := v_0
  6860  		x := v_1
  6861  		if v_2.Op != OpARM64MOVDconst {
  6862  			break
  6863  		}
  6864  		c := auxIntToInt64(v_2.AuxInt)
  6865  		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
  6866  			break
  6867  		}
  6868  		v.reset(OpARM64SUBshiftLL)
  6869  		v.AuxInt = int64ToAuxInt(log64(c / 7))
  6870  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6871  		v0.AuxInt = int64ToAuxInt(3)
  6872  		v0.AddArg2(x, x)
  6873  		v.AddArg2(a, v0)
  6874  		return true
  6875  	}
  6876  	// match: (MADDW a x (MOVDconst [c]))
  6877  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
  6878  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
  6879  	for {
  6880  		a := v_0
  6881  		x := v_1
  6882  		if v_2.Op != OpARM64MOVDconst {
  6883  			break
  6884  		}
  6885  		c := auxIntToInt64(v_2.AuxInt)
  6886  		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
  6887  			break
  6888  		}
  6889  		v.reset(OpARM64ADDshiftLL)
  6890  		v.AuxInt = int64ToAuxInt(log64(c / 9))
  6891  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6892  		v0.AuxInt = int64ToAuxInt(3)
  6893  		v0.AddArg2(x, x)
  6894  		v.AddArg2(a, v0)
  6895  		return true
  6896  	}
  6897  	// match: (MADDW a (MOVDconst [c]) x)
  6898  	// cond: int32(c)==-1
  6899  	// result: (SUB a x)
  6900  	for {
  6901  		a := v_0
  6902  		if v_1.Op != OpARM64MOVDconst {
  6903  			break
  6904  		}
  6905  		c := auxIntToInt64(v_1.AuxInt)
  6906  		x := v_2
  6907  		if !(int32(c) == -1) {
  6908  			break
  6909  		}
  6910  		v.reset(OpARM64SUB)
  6911  		v.AddArg2(a, x)
  6912  		return true
  6913  	}
  6914  	// match: (MADDW a (MOVDconst [c]) _)
  6915  	// cond: int32(c)==0
  6916  	// result: a
  6917  	for {
  6918  		a := v_0
  6919  		if v_1.Op != OpARM64MOVDconst {
  6920  			break
  6921  		}
  6922  		c := auxIntToInt64(v_1.AuxInt)
  6923  		if !(int32(c) == 0) {
  6924  			break
  6925  		}
  6926  		v.copyOf(a)
  6927  		return true
  6928  	}
  6929  	// match: (MADDW a (MOVDconst [c]) x)
  6930  	// cond: int32(c)==1
  6931  	// result: (ADD a x)
  6932  	for {
  6933  		a := v_0
  6934  		if v_1.Op != OpARM64MOVDconst {
  6935  			break
  6936  		}
  6937  		c := auxIntToInt64(v_1.AuxInt)
  6938  		x := v_2
  6939  		if !(int32(c) == 1) {
  6940  			break
  6941  		}
  6942  		v.reset(OpARM64ADD)
  6943  		v.AddArg2(a, x)
  6944  		return true
  6945  	}
  6946  	// match: (MADDW a (MOVDconst [c]) x)
  6947  	// cond: isPowerOfTwo64(c)
  6948  	// result: (ADDshiftLL a x [log64(c)])
  6949  	for {
  6950  		a := v_0
  6951  		if v_1.Op != OpARM64MOVDconst {
  6952  			break
  6953  		}
  6954  		c := auxIntToInt64(v_1.AuxInt)
  6955  		x := v_2
  6956  		if !(isPowerOfTwo64(c)) {
  6957  			break
  6958  		}
  6959  		v.reset(OpARM64ADDshiftLL)
  6960  		v.AuxInt = int64ToAuxInt(log64(c))
  6961  		v.AddArg2(a, x)
  6962  		return true
  6963  	}
  6964  	// match: (MADDW a (MOVDconst [c]) x)
  6965  	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
  6966  	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
  6967  	for {
  6968  		a := v_0
  6969  		if v_1.Op != OpARM64MOVDconst {
  6970  			break
  6971  		}
  6972  		c := auxIntToInt64(v_1.AuxInt)
  6973  		x := v_2
  6974  		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
  6975  			break
  6976  		}
  6977  		v.reset(OpARM64ADD)
  6978  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6979  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
  6980  		v0.AddArg2(x, x)
  6981  		v.AddArg2(a, v0)
  6982  		return true
  6983  	}
  6984  	// match: (MADDW a (MOVDconst [c]) x)
  6985  	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
  6986  	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
  6987  	for {
  6988  		a := v_0
  6989  		if v_1.Op != OpARM64MOVDconst {
  6990  			break
  6991  		}
  6992  		c := auxIntToInt64(v_1.AuxInt)
  6993  		x := v_2
  6994  		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
  6995  			break
  6996  		}
  6997  		v.reset(OpARM64SUB)
  6998  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6999  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
  7000  		v0.AddArg2(x, x)
  7001  		v.AddArg2(a, v0)
  7002  		return true
  7003  	}
  7004  	// match: (MADDW a (MOVDconst [c]) x)
  7005  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
  7006  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
  7007  	for {
  7008  		a := v_0
  7009  		if v_1.Op != OpARM64MOVDconst {
  7010  			break
  7011  		}
  7012  		c := auxIntToInt64(v_1.AuxInt)
  7013  		x := v_2
  7014  		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
  7015  			break
  7016  		}
  7017  		v.reset(OpARM64SUBshiftLL)
  7018  		v.AuxInt = int64ToAuxInt(log64(c / 3))
  7019  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7020  		v0.AuxInt = int64ToAuxInt(2)
  7021  		v0.AddArg2(x, x)
  7022  		v.AddArg2(a, v0)
  7023  		return true
  7024  	}
  7025  	// match: (MADDW a (MOVDconst [c]) x)
  7026  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
  7027  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
  7028  	for {
  7029  		a := v_0
  7030  		if v_1.Op != OpARM64MOVDconst {
  7031  			break
  7032  		}
  7033  		c := auxIntToInt64(v_1.AuxInt)
  7034  		x := v_2
  7035  		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
  7036  			break
  7037  		}
  7038  		v.reset(OpARM64ADDshiftLL)
  7039  		v.AuxInt = int64ToAuxInt(log64(c / 5))
  7040  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7041  		v0.AuxInt = int64ToAuxInt(2)
  7042  		v0.AddArg2(x, x)
  7043  		v.AddArg2(a, v0)
  7044  		return true
  7045  	}
  7046  	// match: (MADDW a (MOVDconst [c]) x)
  7047  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
  7048  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
  7049  	for {
  7050  		a := v_0
  7051  		if v_1.Op != OpARM64MOVDconst {
  7052  			break
  7053  		}
  7054  		c := auxIntToInt64(v_1.AuxInt)
  7055  		x := v_2
  7056  		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
  7057  			break
  7058  		}
  7059  		v.reset(OpARM64SUBshiftLL)
  7060  		v.AuxInt = int64ToAuxInt(log64(c / 7))
  7061  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7062  		v0.AuxInt = int64ToAuxInt(3)
  7063  		v0.AddArg2(x, x)
  7064  		v.AddArg2(a, v0)
  7065  		return true
  7066  	}
  7067  	// match: (MADDW a (MOVDconst [c]) x)
  7068  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
  7069  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
  7070  	for {
  7071  		a := v_0
  7072  		if v_1.Op != OpARM64MOVDconst {
  7073  			break
  7074  		}
  7075  		c := auxIntToInt64(v_1.AuxInt)
  7076  		x := v_2
  7077  		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
  7078  			break
  7079  		}
  7080  		v.reset(OpARM64ADDshiftLL)
  7081  		v.AuxInt = int64ToAuxInt(log64(c / 9))
  7082  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7083  		v0.AuxInt = int64ToAuxInt(3)
  7084  		v0.AddArg2(x, x)
  7085  		v.AddArg2(a, v0)
  7086  		return true
  7087  	}
  7088  	// match: (MADDW (MOVDconst [c]) x y)
  7089  	// result: (ADDconst [c] (MULW <x.Type> x y))
  7090  	for {
  7091  		if v_0.Op != OpARM64MOVDconst {
  7092  			break
  7093  		}
  7094  		c := auxIntToInt64(v_0.AuxInt)
  7095  		x := v_1
  7096  		y := v_2
  7097  		v.reset(OpARM64ADDconst)
  7098  		v.AuxInt = int64ToAuxInt(c)
  7099  		v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
  7100  		v0.AddArg2(x, y)
  7101  		v.AddArg(v0)
  7102  		return true
  7103  	}
  7104  	// match: (MADDW a (MOVDconst [c]) (MOVDconst [d]))
  7105  	// result: (ADDconst [int64(int32(c)*int32(d))] a)
  7106  	for {
  7107  		a := v_0
  7108  		if v_1.Op != OpARM64MOVDconst {
  7109  			break
  7110  		}
  7111  		c := auxIntToInt64(v_1.AuxInt)
  7112  		if v_2.Op != OpARM64MOVDconst {
  7113  			break
  7114  		}
  7115  		d := auxIntToInt64(v_2.AuxInt)
  7116  		v.reset(OpARM64ADDconst)
  7117  		v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
  7118  		v.AddArg(a)
  7119  		return true
  7120  	}
  7121  	return false
  7122  }
  7123  func rewriteValueARM64_OpARM64MNEG(v *Value) bool {
  7124  	v_1 := v.Args[1]
  7125  	v_0 := v.Args[0]
  7126  	b := v.Block
  7127  	// match: (MNEG x (MOVDconst [-1]))
  7128  	// result: x
  7129  	for {
  7130  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7131  			x := v_0
  7132  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
  7133  				continue
  7134  			}
  7135  			v.copyOf(x)
  7136  			return true
  7137  		}
  7138  		break
  7139  	}
  7140  	// match: (MNEG _ (MOVDconst [0]))
  7141  	// result: (MOVDconst [0])
  7142  	for {
  7143  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7144  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7145  				continue
  7146  			}
  7147  			v.reset(OpARM64MOVDconst)
  7148  			v.AuxInt = int64ToAuxInt(0)
  7149  			return true
  7150  		}
  7151  		break
  7152  	}
  7153  	// match: (MNEG x (MOVDconst [1]))
  7154  	// result: (NEG x)
  7155  	for {
  7156  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7157  			x := v_0
  7158  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
  7159  				continue
  7160  			}
  7161  			v.reset(OpARM64NEG)
  7162  			v.AddArg(x)
  7163  			return true
  7164  		}
  7165  		break
  7166  	}
  7167  	// match: (MNEG x (MOVDconst [c]))
  7168  	// cond: isPowerOfTwo64(c)
  7169  	// result: (NEG (SLLconst <x.Type> [log64(c)] x))
  7170  	for {
  7171  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7172  			x := v_0
  7173  			if v_1.Op != OpARM64MOVDconst {
  7174  				continue
  7175  			}
  7176  			c := auxIntToInt64(v_1.AuxInt)
  7177  			if !(isPowerOfTwo64(c)) {
  7178  				continue
  7179  			}
  7180  			v.reset(OpARM64NEG)
  7181  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7182  			v0.AuxInt = int64ToAuxInt(log64(c))
  7183  			v0.AddArg(x)
  7184  			v.AddArg(v0)
  7185  			return true
  7186  		}
  7187  		break
  7188  	}
  7189  	// match: (MNEG x (MOVDconst [c]))
  7190  	// cond: isPowerOfTwo64(c-1) && c >= 3
  7191  	// result: (NEG (ADDshiftLL <x.Type> x x [log64(c-1)]))
  7192  	for {
  7193  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7194  			x := v_0
  7195  			if v_1.Op != OpARM64MOVDconst {
  7196  				continue
  7197  			}
  7198  			c := auxIntToInt64(v_1.AuxInt)
  7199  			if !(isPowerOfTwo64(c-1) && c >= 3) {
  7200  				continue
  7201  			}
  7202  			v.reset(OpARM64NEG)
  7203  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7204  			v0.AuxInt = int64ToAuxInt(log64(c - 1))
  7205  			v0.AddArg2(x, x)
  7206  			v.AddArg(v0)
  7207  			return true
  7208  		}
  7209  		break
  7210  	}
  7211  	// match: (MNEG x (MOVDconst [c]))
  7212  	// cond: isPowerOfTwo64(c+1) && c >= 7
  7213  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)]))
  7214  	for {
  7215  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7216  			x := v_0
  7217  			if v_1.Op != OpARM64MOVDconst {
  7218  				continue
  7219  			}
  7220  			c := auxIntToInt64(v_1.AuxInt)
  7221  			if !(isPowerOfTwo64(c+1) && c >= 7) {
  7222  				continue
  7223  			}
  7224  			v.reset(OpARM64NEG)
  7225  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7226  			v0.AuxInt = int64ToAuxInt(log64(c + 1))
  7227  			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  7228  			v1.AddArg(x)
  7229  			v0.AddArg2(v1, x)
  7230  			v.AddArg(v0)
  7231  			return true
  7232  		}
  7233  		break
  7234  	}
  7235  	// match: (MNEG x (MOVDconst [c]))
  7236  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
  7237  	// result: (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2]))
  7238  	for {
  7239  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7240  			x := v_0
  7241  			if v_1.Op != OpARM64MOVDconst {
  7242  				continue
  7243  			}
  7244  			c := auxIntToInt64(v_1.AuxInt)
  7245  			if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
  7246  				continue
  7247  			}
  7248  			v.reset(OpARM64SLLconst)
  7249  			v.Type = x.Type
  7250  			v.AuxInt = int64ToAuxInt(log64(c / 3))
  7251  			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7252  			v0.AuxInt = int64ToAuxInt(2)
  7253  			v0.AddArg2(x, x)
  7254  			v.AddArg(v0)
  7255  			return true
  7256  		}
  7257  		break
  7258  	}
  7259  	// match: (MNEG x (MOVDconst [c]))
  7260  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
  7261  	// result: (NEG (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2])))
  7262  	for {
  7263  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7264  			x := v_0
  7265  			if v_1.Op != OpARM64MOVDconst {
  7266  				continue
  7267  			}
  7268  			c := auxIntToInt64(v_1.AuxInt)
  7269  			if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
  7270  				continue
  7271  			}
  7272  			v.reset(OpARM64NEG)
  7273  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7274  			v0.AuxInt = int64ToAuxInt(log64(c / 5))
  7275  			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7276  			v1.AuxInt = int64ToAuxInt(2)
  7277  			v1.AddArg2(x, x)
  7278  			v0.AddArg(v1)
  7279  			v.AddArg(v0)
  7280  			return true
  7281  		}
  7282  		break
  7283  	}
  7284  	// match: (MNEG x (MOVDconst [c]))
  7285  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
  7286  	// result: (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3]))
  7287  	for {
  7288  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7289  			x := v_0
  7290  			if v_1.Op != OpARM64MOVDconst {
  7291  				continue
  7292  			}
  7293  			c := auxIntToInt64(v_1.AuxInt)
  7294  			if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
  7295  				continue
  7296  			}
  7297  			v.reset(OpARM64SLLconst)
  7298  			v.Type = x.Type
  7299  			v.AuxInt = int64ToAuxInt(log64(c / 7))
  7300  			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7301  			v0.AuxInt = int64ToAuxInt(3)
  7302  			v0.AddArg2(x, x)
  7303  			v.AddArg(v0)
  7304  			return true
  7305  		}
  7306  		break
  7307  	}
  7308  	// match: (MNEG x (MOVDconst [c]))
  7309  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
  7310  	// result: (NEG (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3])))
  7311  	for {
  7312  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7313  			x := v_0
  7314  			if v_1.Op != OpARM64MOVDconst {
  7315  				continue
  7316  			}
  7317  			c := auxIntToInt64(v_1.AuxInt)
  7318  			if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
  7319  				continue
  7320  			}
  7321  			v.reset(OpARM64NEG)
  7322  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7323  			v0.AuxInt = int64ToAuxInt(log64(c / 9))
  7324  			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7325  			v1.AuxInt = int64ToAuxInt(3)
  7326  			v1.AddArg2(x, x)
  7327  			v0.AddArg(v1)
  7328  			v.AddArg(v0)
  7329  			return true
  7330  		}
  7331  		break
  7332  	}
  7333  	// match: (MNEG (MOVDconst [c]) (MOVDconst [d]))
  7334  	// result: (MOVDconst [-c*d])
  7335  	for {
  7336  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7337  			if v_0.Op != OpARM64MOVDconst {
  7338  				continue
  7339  			}
  7340  			c := auxIntToInt64(v_0.AuxInt)
  7341  			if v_1.Op != OpARM64MOVDconst {
  7342  				continue
  7343  			}
  7344  			d := auxIntToInt64(v_1.AuxInt)
  7345  			v.reset(OpARM64MOVDconst)
  7346  			v.AuxInt = int64ToAuxInt(-c * d)
  7347  			return true
  7348  		}
  7349  		break
  7350  	}
  7351  	return false
  7352  }
  7353  func rewriteValueARM64_OpARM64MNEGW(v *Value) bool {
  7354  	v_1 := v.Args[1]
  7355  	v_0 := v.Args[0]
  7356  	b := v.Block
  7357  	// match: (MNEGW x (MOVDconst [c]))
  7358  	// cond: int32(c)==-1
  7359  	// result: x
  7360  	for {
  7361  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7362  			x := v_0
  7363  			if v_1.Op != OpARM64MOVDconst {
  7364  				continue
  7365  			}
  7366  			c := auxIntToInt64(v_1.AuxInt)
  7367  			if !(int32(c) == -1) {
  7368  				continue
  7369  			}
  7370  			v.copyOf(x)
  7371  			return true
  7372  		}
  7373  		break
  7374  	}
  7375  	// match: (MNEGW _ (MOVDconst [c]))
  7376  	// cond: int32(c)==0
  7377  	// result: (MOVDconst [0])
  7378  	for {
  7379  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7380  			if v_1.Op != OpARM64MOVDconst {
  7381  				continue
  7382  			}
  7383  			c := auxIntToInt64(v_1.AuxInt)
  7384  			if !(int32(c) == 0) {
  7385  				continue
  7386  			}
  7387  			v.reset(OpARM64MOVDconst)
  7388  			v.AuxInt = int64ToAuxInt(0)
  7389  			return true
  7390  		}
  7391  		break
  7392  	}
  7393  	// match: (MNEGW x (MOVDconst [c]))
  7394  	// cond: int32(c)==1
  7395  	// result: (NEG x)
  7396  	for {
  7397  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7398  			x := v_0
  7399  			if v_1.Op != OpARM64MOVDconst {
  7400  				continue
  7401  			}
  7402  			c := auxIntToInt64(v_1.AuxInt)
  7403  			if !(int32(c) == 1) {
  7404  				continue
  7405  			}
  7406  			v.reset(OpARM64NEG)
  7407  			v.AddArg(x)
  7408  			return true
  7409  		}
  7410  		break
  7411  	}
  7412  	// match: (MNEGW x (MOVDconst [c]))
  7413  	// cond: isPowerOfTwo64(c)
  7414  	// result: (NEG (SLLconst <x.Type> [log64(c)] x))
  7415  	for {
  7416  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7417  			x := v_0
  7418  			if v_1.Op != OpARM64MOVDconst {
  7419  				continue
  7420  			}
  7421  			c := auxIntToInt64(v_1.AuxInt)
  7422  			if !(isPowerOfTwo64(c)) {
  7423  				continue
  7424  			}
  7425  			v.reset(OpARM64NEG)
  7426  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7427  			v0.AuxInt = int64ToAuxInt(log64(c))
  7428  			v0.AddArg(x)
  7429  			v.AddArg(v0)
  7430  			return true
  7431  		}
  7432  		break
  7433  	}
  7434  	// match: (MNEGW x (MOVDconst [c]))
  7435  	// cond: isPowerOfTwo64(c-1) && int32(c) >= 3
  7436  	// result: (NEG (ADDshiftLL <x.Type> x x [log64(c-1)]))
  7437  	for {
  7438  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7439  			x := v_0
  7440  			if v_1.Op != OpARM64MOVDconst {
  7441  				continue
  7442  			}
  7443  			c := auxIntToInt64(v_1.AuxInt)
  7444  			if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
  7445  				continue
  7446  			}
  7447  			v.reset(OpARM64NEG)
  7448  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7449  			v0.AuxInt = int64ToAuxInt(log64(c - 1))
  7450  			v0.AddArg2(x, x)
  7451  			v.AddArg(v0)
  7452  			return true
  7453  		}
  7454  		break
  7455  	}
  7456  	// match: (MNEGW x (MOVDconst [c]))
  7457  	// cond: isPowerOfTwo64(c+1) && int32(c) >= 7
  7458  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)]))
  7459  	for {
  7460  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7461  			x := v_0
  7462  			if v_1.Op != OpARM64MOVDconst {
  7463  				continue
  7464  			}
  7465  			c := auxIntToInt64(v_1.AuxInt)
  7466  			if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
  7467  				continue
  7468  			}
  7469  			v.reset(OpARM64NEG)
  7470  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7471  			v0.AuxInt = int64ToAuxInt(log64(c + 1))
  7472  			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  7473  			v1.AddArg(x)
  7474  			v0.AddArg2(v1, x)
  7475  			v.AddArg(v0)
  7476  			return true
  7477  		}
  7478  		break
  7479  	}
  7480  	// match: (MNEGW x (MOVDconst [c]))
  7481  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
  7482  	// result: (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2]))
  7483  	for {
  7484  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7485  			x := v_0
  7486  			if v_1.Op != OpARM64MOVDconst {
  7487  				continue
  7488  			}
  7489  			c := auxIntToInt64(v_1.AuxInt)
  7490  			if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
  7491  				continue
  7492  			}
  7493  			v.reset(OpARM64SLLconst)
  7494  			v.Type = x.Type
  7495  			v.AuxInt = int64ToAuxInt(log64(c / 3))
  7496  			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7497  			v0.AuxInt = int64ToAuxInt(2)
  7498  			v0.AddArg2(x, x)
  7499  			v.AddArg(v0)
  7500  			return true
  7501  		}
  7502  		break
  7503  	}
  7504  	// match: (MNEGW x (MOVDconst [c]))
  7505  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
  7506  	// result: (NEG (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2])))
  7507  	for {
  7508  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7509  			x := v_0
  7510  			if v_1.Op != OpARM64MOVDconst {
  7511  				continue
  7512  			}
  7513  			c := auxIntToInt64(v_1.AuxInt)
  7514  			if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
  7515  				continue
  7516  			}
  7517  			v.reset(OpARM64NEG)
  7518  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7519  			v0.AuxInt = int64ToAuxInt(log64(c / 5))
  7520  			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7521  			v1.AuxInt = int64ToAuxInt(2)
  7522  			v1.AddArg2(x, x)
  7523  			v0.AddArg(v1)
  7524  			v.AddArg(v0)
  7525  			return true
  7526  		}
  7527  		break
  7528  	}
  7529  	// match: (MNEGW x (MOVDconst [c]))
  7530  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
  7531  	// result: (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3]))
  7532  	for {
  7533  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7534  			x := v_0
  7535  			if v_1.Op != OpARM64MOVDconst {
  7536  				continue
  7537  			}
  7538  			c := auxIntToInt64(v_1.AuxInt)
  7539  			if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
  7540  				continue
  7541  			}
  7542  			v.reset(OpARM64SLLconst)
  7543  			v.Type = x.Type
  7544  			v.AuxInt = int64ToAuxInt(log64(c / 7))
  7545  			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7546  			v0.AuxInt = int64ToAuxInt(3)
  7547  			v0.AddArg2(x, x)
  7548  			v.AddArg(v0)
  7549  			return true
  7550  		}
  7551  		break
  7552  	}
  7553  	// match: (MNEGW x (MOVDconst [c]))
  7554  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
  7555  	// result: (NEG (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3])))
  7556  	for {
  7557  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7558  			x := v_0
  7559  			if v_1.Op != OpARM64MOVDconst {
  7560  				continue
  7561  			}
  7562  			c := auxIntToInt64(v_1.AuxInt)
  7563  			if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
  7564  				continue
  7565  			}
  7566  			v.reset(OpARM64NEG)
  7567  			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7568  			v0.AuxInt = int64ToAuxInt(log64(c / 9))
  7569  			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7570  			v1.AuxInt = int64ToAuxInt(3)
  7571  			v1.AddArg2(x, x)
  7572  			v0.AddArg(v1)
  7573  			v.AddArg(v0)
  7574  			return true
  7575  		}
  7576  		break
  7577  	}
  7578  	// match: (MNEGW (MOVDconst [c]) (MOVDconst [d]))
  7579  	// result: (MOVDconst [-int64(int32(c)*int32(d))])
  7580  	for {
  7581  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7582  			if v_0.Op != OpARM64MOVDconst {
  7583  				continue
  7584  			}
  7585  			c := auxIntToInt64(v_0.AuxInt)
  7586  			if v_1.Op != OpARM64MOVDconst {
  7587  				continue
  7588  			}
  7589  			d := auxIntToInt64(v_1.AuxInt)
  7590  			v.reset(OpARM64MOVDconst)
  7591  			v.AuxInt = int64ToAuxInt(-int64(int32(c) * int32(d)))
  7592  			return true
  7593  		}
  7594  		break
  7595  	}
  7596  	return false
  7597  }
  7598  func rewriteValueARM64_OpARM64MOD(v *Value) bool {
  7599  	v_1 := v.Args[1]
  7600  	v_0 := v.Args[0]
  7601  	// match: (MOD (MOVDconst [c]) (MOVDconst [d]))
  7602  	// cond: d != 0
  7603  	// result: (MOVDconst [c%d])
  7604  	for {
  7605  		if v_0.Op != OpARM64MOVDconst {
  7606  			break
  7607  		}
  7608  		c := auxIntToInt64(v_0.AuxInt)
  7609  		if v_1.Op != OpARM64MOVDconst {
  7610  			break
  7611  		}
  7612  		d := auxIntToInt64(v_1.AuxInt)
  7613  		if !(d != 0) {
  7614  			break
  7615  		}
  7616  		v.reset(OpARM64MOVDconst)
  7617  		v.AuxInt = int64ToAuxInt(c % d)
  7618  		return true
  7619  	}
  7620  	return false
  7621  }
  7622  func rewriteValueARM64_OpARM64MODW(v *Value) bool {
  7623  	v_1 := v.Args[1]
  7624  	v_0 := v.Args[0]
  7625  	// match: (MODW (MOVDconst [c]) (MOVDconst [d]))
  7626  	// cond: d != 0
  7627  	// result: (MOVDconst [int64(int32(c)%int32(d))])
  7628  	for {
  7629  		if v_0.Op != OpARM64MOVDconst {
  7630  			break
  7631  		}
  7632  		c := auxIntToInt64(v_0.AuxInt)
  7633  		if v_1.Op != OpARM64MOVDconst {
  7634  			break
  7635  		}
  7636  		d := auxIntToInt64(v_1.AuxInt)
  7637  		if !(d != 0) {
  7638  			break
  7639  		}
  7640  		v.reset(OpARM64MOVDconst)
  7641  		v.AuxInt = int64ToAuxInt(int64(int32(c) % int32(d)))
  7642  		return true
  7643  	}
  7644  	return false
  7645  }
  7646  func rewriteValueARM64_OpARM64MOVBUload(v *Value) bool {
  7647  	v_1 := v.Args[1]
  7648  	v_0 := v.Args[0]
  7649  	b := v.Block
  7650  	config := b.Func.Config
  7651  	// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
  7652  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  7653  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  7654  	for {
  7655  		off1 := auxIntToInt32(v.AuxInt)
  7656  		sym := auxToSym(v.Aux)
  7657  		if v_0.Op != OpARM64ADDconst {
  7658  			break
  7659  		}
  7660  		off2 := auxIntToInt64(v_0.AuxInt)
  7661  		ptr := v_0.Args[0]
  7662  		mem := v_1
  7663  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  7664  			break
  7665  		}
  7666  		v.reset(OpARM64MOVBUload)
  7667  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  7668  		v.Aux = symToAux(sym)
  7669  		v.AddArg2(ptr, mem)
  7670  		return true
  7671  	}
  7672  	// match: (MOVBUload [off] {sym} (ADD ptr idx) mem)
  7673  	// cond: off == 0 && sym == nil
  7674  	// result: (MOVBUloadidx ptr idx mem)
  7675  	for {
  7676  		off := auxIntToInt32(v.AuxInt)
  7677  		sym := auxToSym(v.Aux)
  7678  		if v_0.Op != OpARM64ADD {
  7679  			break
  7680  		}
  7681  		idx := v_0.Args[1]
  7682  		ptr := v_0.Args[0]
  7683  		mem := v_1
  7684  		if !(off == 0 && sym == nil) {
  7685  			break
  7686  		}
  7687  		v.reset(OpARM64MOVBUloadidx)
  7688  		v.AddArg3(ptr, idx, mem)
  7689  		return true
  7690  	}
  7691  	// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  7692  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  7693  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  7694  	for {
  7695  		off1 := auxIntToInt32(v.AuxInt)
  7696  		sym1 := auxToSym(v.Aux)
  7697  		if v_0.Op != OpARM64MOVDaddr {
  7698  			break
  7699  		}
  7700  		off2 := auxIntToInt32(v_0.AuxInt)
  7701  		sym2 := auxToSym(v_0.Aux)
  7702  		ptr := v_0.Args[0]
  7703  		mem := v_1
  7704  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  7705  			break
  7706  		}
  7707  		v.reset(OpARM64MOVBUload)
  7708  		v.AuxInt = int32ToAuxInt(off1 + off2)
  7709  		v.Aux = symToAux(mergeSym(sym1, sym2))
  7710  		v.AddArg2(ptr, mem)
  7711  		return true
  7712  	}
  7713  	// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  7714  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  7715  	// result: (MOVDconst [0])
  7716  	for {
  7717  		off := auxIntToInt32(v.AuxInt)
  7718  		sym := auxToSym(v.Aux)
  7719  		ptr := v_0
  7720  		if v_1.Op != OpARM64MOVBstorezero {
  7721  			break
  7722  		}
  7723  		off2 := auxIntToInt32(v_1.AuxInt)
  7724  		sym2 := auxToSym(v_1.Aux)
  7725  		ptr2 := v_1.Args[0]
  7726  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  7727  			break
  7728  		}
  7729  		v.reset(OpARM64MOVDconst)
  7730  		v.AuxInt = int64ToAuxInt(0)
  7731  		return true
  7732  	}
  7733  	// match: (MOVBUload [off] {sym} (SB) _)
  7734  	// cond: symIsRO(sym)
  7735  	// result: (MOVDconst [int64(read8(sym, int64(off)))])
  7736  	for {
  7737  		off := auxIntToInt32(v.AuxInt)
  7738  		sym := auxToSym(v.Aux)
  7739  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  7740  			break
  7741  		}
  7742  		v.reset(OpARM64MOVDconst)
  7743  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  7744  		return true
  7745  	}
  7746  	return false
  7747  }
  7748  func rewriteValueARM64_OpARM64MOVBUloadidx(v *Value) bool {
  7749  	v_2 := v.Args[2]
  7750  	v_1 := v.Args[1]
  7751  	v_0 := v.Args[0]
  7752  	// match: (MOVBUloadidx ptr (MOVDconst [c]) mem)
  7753  	// cond: is32Bit(c)
  7754  	// result: (MOVBUload [int32(c)] ptr mem)
  7755  	for {
  7756  		ptr := v_0
  7757  		if v_1.Op != OpARM64MOVDconst {
  7758  			break
  7759  		}
  7760  		c := auxIntToInt64(v_1.AuxInt)
  7761  		mem := v_2
  7762  		if !(is32Bit(c)) {
  7763  			break
  7764  		}
  7765  		v.reset(OpARM64MOVBUload)
  7766  		v.AuxInt = int32ToAuxInt(int32(c))
  7767  		v.AddArg2(ptr, mem)
  7768  		return true
  7769  	}
  7770  	// match: (MOVBUloadidx (MOVDconst [c]) ptr mem)
  7771  	// cond: is32Bit(c)
  7772  	// result: (MOVBUload [int32(c)] ptr mem)
  7773  	for {
  7774  		if v_0.Op != OpARM64MOVDconst {
  7775  			break
  7776  		}
  7777  		c := auxIntToInt64(v_0.AuxInt)
  7778  		ptr := v_1
  7779  		mem := v_2
  7780  		if !(is32Bit(c)) {
  7781  			break
  7782  		}
  7783  		v.reset(OpARM64MOVBUload)
  7784  		v.AuxInt = int32ToAuxInt(int32(c))
  7785  		v.AddArg2(ptr, mem)
  7786  		return true
  7787  	}
  7788  	// match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
  7789  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
  7790  	// result: (MOVDconst [0])
  7791  	for {
  7792  		ptr := v_0
  7793  		idx := v_1
  7794  		if v_2.Op != OpARM64MOVBstorezeroidx {
  7795  			break
  7796  		}
  7797  		idx2 := v_2.Args[1]
  7798  		ptr2 := v_2.Args[0]
  7799  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
  7800  			break
  7801  		}
  7802  		v.reset(OpARM64MOVDconst)
  7803  		v.AuxInt = int64ToAuxInt(0)
  7804  		return true
  7805  	}
  7806  	return false
  7807  }
  7808  func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool {
  7809  	v_0 := v.Args[0]
  7810  	// match: (MOVBUreg x:(MOVBUload _ _))
  7811  	// result: (MOVDreg x)
  7812  	for {
  7813  		x := v_0
  7814  		if x.Op != OpARM64MOVBUload {
  7815  			break
  7816  		}
  7817  		v.reset(OpARM64MOVDreg)
  7818  		v.AddArg(x)
  7819  		return true
  7820  	}
  7821  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
  7822  	// result: (MOVDreg x)
  7823  	for {
  7824  		x := v_0
  7825  		if x.Op != OpARM64MOVBUloadidx {
  7826  			break
  7827  		}
  7828  		v.reset(OpARM64MOVDreg)
  7829  		v.AddArg(x)
  7830  		return true
  7831  	}
  7832  	// match: (MOVBUreg x:(MOVBUreg _))
  7833  	// result: (MOVDreg x)
  7834  	for {
  7835  		x := v_0
  7836  		if x.Op != OpARM64MOVBUreg {
  7837  			break
  7838  		}
  7839  		v.reset(OpARM64MOVDreg)
  7840  		v.AddArg(x)
  7841  		return true
  7842  	}
  7843  	// match: (MOVBUreg (ANDconst [c] x))
  7844  	// result: (ANDconst [c&(1<<8-1)] x)
  7845  	for {
  7846  		if v_0.Op != OpARM64ANDconst {
  7847  			break
  7848  		}
  7849  		c := auxIntToInt64(v_0.AuxInt)
  7850  		x := v_0.Args[0]
  7851  		v.reset(OpARM64ANDconst)
  7852  		v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
  7853  		v.AddArg(x)
  7854  		return true
  7855  	}
  7856  	// match: (MOVBUreg (MOVDconst [c]))
  7857  	// result: (MOVDconst [int64(uint8(c))])
  7858  	for {
  7859  		if v_0.Op != OpARM64MOVDconst {
  7860  			break
  7861  		}
  7862  		c := auxIntToInt64(v_0.AuxInt)
  7863  		v.reset(OpARM64MOVDconst)
  7864  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  7865  		return true
  7866  	}
  7867  	// match: (MOVBUreg x:(Equal _))
  7868  	// result: (MOVDreg x)
  7869  	for {
  7870  		x := v_0
  7871  		if x.Op != OpARM64Equal {
  7872  			break
  7873  		}
  7874  		v.reset(OpARM64MOVDreg)
  7875  		v.AddArg(x)
  7876  		return true
  7877  	}
  7878  	// match: (MOVBUreg x:(NotEqual _))
  7879  	// result: (MOVDreg x)
  7880  	for {
  7881  		x := v_0
  7882  		if x.Op != OpARM64NotEqual {
  7883  			break
  7884  		}
  7885  		v.reset(OpARM64MOVDreg)
  7886  		v.AddArg(x)
  7887  		return true
  7888  	}
  7889  	// match: (MOVBUreg x:(LessThan _))
  7890  	// result: (MOVDreg x)
  7891  	for {
  7892  		x := v_0
  7893  		if x.Op != OpARM64LessThan {
  7894  			break
  7895  		}
  7896  		v.reset(OpARM64MOVDreg)
  7897  		v.AddArg(x)
  7898  		return true
  7899  	}
  7900  	// match: (MOVBUreg x:(LessThanU _))
  7901  	// result: (MOVDreg x)
  7902  	for {
  7903  		x := v_0
  7904  		if x.Op != OpARM64LessThanU {
  7905  			break
  7906  		}
  7907  		v.reset(OpARM64MOVDreg)
  7908  		v.AddArg(x)
  7909  		return true
  7910  	}
  7911  	// match: (MOVBUreg x:(LessThanF _))
  7912  	// result: (MOVDreg x)
  7913  	for {
  7914  		x := v_0
  7915  		if x.Op != OpARM64LessThanF {
  7916  			break
  7917  		}
  7918  		v.reset(OpARM64MOVDreg)
  7919  		v.AddArg(x)
  7920  		return true
  7921  	}
  7922  	// match: (MOVBUreg x:(LessEqual _))
  7923  	// result: (MOVDreg x)
  7924  	for {
  7925  		x := v_0
  7926  		if x.Op != OpARM64LessEqual {
  7927  			break
  7928  		}
  7929  		v.reset(OpARM64MOVDreg)
  7930  		v.AddArg(x)
  7931  		return true
  7932  	}
  7933  	// match: (MOVBUreg x:(LessEqualU _))
  7934  	// result: (MOVDreg x)
  7935  	for {
  7936  		x := v_0
  7937  		if x.Op != OpARM64LessEqualU {
  7938  			break
  7939  		}
  7940  		v.reset(OpARM64MOVDreg)
  7941  		v.AddArg(x)
  7942  		return true
  7943  	}
  7944  	// match: (MOVBUreg x:(LessEqualF _))
  7945  	// result: (MOVDreg x)
  7946  	for {
  7947  		x := v_0
  7948  		if x.Op != OpARM64LessEqualF {
  7949  			break
  7950  		}
  7951  		v.reset(OpARM64MOVDreg)
  7952  		v.AddArg(x)
  7953  		return true
  7954  	}
  7955  	// match: (MOVBUreg x:(GreaterThan _))
  7956  	// result: (MOVDreg x)
  7957  	for {
  7958  		x := v_0
  7959  		if x.Op != OpARM64GreaterThan {
  7960  			break
  7961  		}
  7962  		v.reset(OpARM64MOVDreg)
  7963  		v.AddArg(x)
  7964  		return true
  7965  	}
  7966  	// match: (MOVBUreg x:(GreaterThanU _))
  7967  	// result: (MOVDreg x)
  7968  	for {
  7969  		x := v_0
  7970  		if x.Op != OpARM64GreaterThanU {
  7971  			break
  7972  		}
  7973  		v.reset(OpARM64MOVDreg)
  7974  		v.AddArg(x)
  7975  		return true
  7976  	}
  7977  	// match: (MOVBUreg x:(GreaterThanF _))
  7978  	// result: (MOVDreg x)
  7979  	for {
  7980  		x := v_0
  7981  		if x.Op != OpARM64GreaterThanF {
  7982  			break
  7983  		}
  7984  		v.reset(OpARM64MOVDreg)
  7985  		v.AddArg(x)
  7986  		return true
  7987  	}
  7988  	// match: (MOVBUreg x:(GreaterEqual _))
  7989  	// result: (MOVDreg x)
  7990  	for {
  7991  		x := v_0
  7992  		if x.Op != OpARM64GreaterEqual {
  7993  			break
  7994  		}
  7995  		v.reset(OpARM64MOVDreg)
  7996  		v.AddArg(x)
  7997  		return true
  7998  	}
  7999  	// match: (MOVBUreg x:(GreaterEqualU _))
  8000  	// result: (MOVDreg x)
  8001  	for {
  8002  		x := v_0
  8003  		if x.Op != OpARM64GreaterEqualU {
  8004  			break
  8005  		}
  8006  		v.reset(OpARM64MOVDreg)
  8007  		v.AddArg(x)
  8008  		return true
  8009  	}
  8010  	// match: (MOVBUreg x:(GreaterEqualF _))
  8011  	// result: (MOVDreg x)
  8012  	for {
  8013  		x := v_0
  8014  		if x.Op != OpARM64GreaterEqualF {
  8015  			break
  8016  		}
  8017  		v.reset(OpARM64MOVDreg)
  8018  		v.AddArg(x)
  8019  		return true
  8020  	}
  8021  	// match: (MOVBUreg (SLLconst [lc] x))
  8022  	// cond: lc >= 8
  8023  	// result: (MOVDconst [0])
  8024  	for {
  8025  		if v_0.Op != OpARM64SLLconst {
  8026  			break
  8027  		}
  8028  		lc := auxIntToInt64(v_0.AuxInt)
  8029  		if !(lc >= 8) {
  8030  			break
  8031  		}
  8032  		v.reset(OpARM64MOVDconst)
  8033  		v.AuxInt = int64ToAuxInt(0)
  8034  		return true
  8035  	}
  8036  	// match: (MOVBUreg (SLLconst [lc] x))
  8037  	// cond: lc < 8
  8038  	// result: (UBFIZ [armBFAuxInt(lc, 8-lc)] x)
  8039  	for {
  8040  		if v_0.Op != OpARM64SLLconst {
  8041  			break
  8042  		}
  8043  		lc := auxIntToInt64(v_0.AuxInt)
  8044  		x := v_0.Args[0]
  8045  		if !(lc < 8) {
  8046  			break
  8047  		}
  8048  		v.reset(OpARM64UBFIZ)
  8049  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
  8050  		v.AddArg(x)
  8051  		return true
  8052  	}
  8053  	// match: (MOVBUreg (SRLconst [rc] x))
  8054  	// cond: rc < 8
  8055  	// result: (UBFX [armBFAuxInt(rc, 8)] x)
  8056  	for {
  8057  		if v_0.Op != OpARM64SRLconst {
  8058  			break
  8059  		}
  8060  		rc := auxIntToInt64(v_0.AuxInt)
  8061  		x := v_0.Args[0]
  8062  		if !(rc < 8) {
  8063  			break
  8064  		}
  8065  		v.reset(OpARM64UBFX)
  8066  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8))
  8067  		v.AddArg(x)
  8068  		return true
  8069  	}
  8070  	// match: (MOVBUreg (UBFX [bfc] x))
  8071  	// cond: bfc.getARM64BFwidth() <= 8
  8072  	// result: (UBFX [bfc] x)
  8073  	for {
  8074  		if v_0.Op != OpARM64UBFX {
  8075  			break
  8076  		}
  8077  		bfc := auxIntToArm64BitField(v_0.AuxInt)
  8078  		x := v_0.Args[0]
  8079  		if !(bfc.getARM64BFwidth() <= 8) {
  8080  			break
  8081  		}
  8082  		v.reset(OpARM64UBFX)
  8083  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
  8084  		v.AddArg(x)
  8085  		return true
  8086  	}
  8087  	return false
  8088  }
  8089  func rewriteValueARM64_OpARM64MOVBload(v *Value) bool {
  8090  	v_1 := v.Args[1]
  8091  	v_0 := v.Args[0]
  8092  	b := v.Block
  8093  	config := b.Func.Config
  8094  	// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
  8095  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8096  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  8097  	for {
  8098  		off1 := auxIntToInt32(v.AuxInt)
  8099  		sym := auxToSym(v.Aux)
  8100  		if v_0.Op != OpARM64ADDconst {
  8101  			break
  8102  		}
  8103  		off2 := auxIntToInt64(v_0.AuxInt)
  8104  		ptr := v_0.Args[0]
  8105  		mem := v_1
  8106  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8107  			break
  8108  		}
  8109  		v.reset(OpARM64MOVBload)
  8110  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  8111  		v.Aux = symToAux(sym)
  8112  		v.AddArg2(ptr, mem)
  8113  		return true
  8114  	}
  8115  	// match: (MOVBload [off] {sym} (ADD ptr idx) mem)
  8116  	// cond: off == 0 && sym == nil
  8117  	// result: (MOVBloadidx ptr idx mem)
  8118  	for {
  8119  		off := auxIntToInt32(v.AuxInt)
  8120  		sym := auxToSym(v.Aux)
  8121  		if v_0.Op != OpARM64ADD {
  8122  			break
  8123  		}
  8124  		idx := v_0.Args[1]
  8125  		ptr := v_0.Args[0]
  8126  		mem := v_1
  8127  		if !(off == 0 && sym == nil) {
  8128  			break
  8129  		}
  8130  		v.reset(OpARM64MOVBloadidx)
  8131  		v.AddArg3(ptr, idx, mem)
  8132  		return true
  8133  	}
  8134  	// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  8135  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8136  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  8137  	for {
  8138  		off1 := auxIntToInt32(v.AuxInt)
  8139  		sym1 := auxToSym(v.Aux)
  8140  		if v_0.Op != OpARM64MOVDaddr {
  8141  			break
  8142  		}
  8143  		off2 := auxIntToInt32(v_0.AuxInt)
  8144  		sym2 := auxToSym(v_0.Aux)
  8145  		ptr := v_0.Args[0]
  8146  		mem := v_1
  8147  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8148  			break
  8149  		}
  8150  		v.reset(OpARM64MOVBload)
  8151  		v.AuxInt = int32ToAuxInt(off1 + off2)
  8152  		v.Aux = symToAux(mergeSym(sym1, sym2))
  8153  		v.AddArg2(ptr, mem)
  8154  		return true
  8155  	}
  8156  	// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  8157  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  8158  	// result: (MOVDconst [0])
  8159  	for {
  8160  		off := auxIntToInt32(v.AuxInt)
  8161  		sym := auxToSym(v.Aux)
  8162  		ptr := v_0
  8163  		if v_1.Op != OpARM64MOVBstorezero {
  8164  			break
  8165  		}
  8166  		off2 := auxIntToInt32(v_1.AuxInt)
  8167  		sym2 := auxToSym(v_1.Aux)
  8168  		ptr2 := v_1.Args[0]
  8169  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  8170  			break
  8171  		}
  8172  		v.reset(OpARM64MOVDconst)
  8173  		v.AuxInt = int64ToAuxInt(0)
  8174  		return true
  8175  	}
  8176  	return false
  8177  }
  8178  func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool {
  8179  	v_2 := v.Args[2]
  8180  	v_1 := v.Args[1]
  8181  	v_0 := v.Args[0]
  8182  	// match: (MOVBloadidx ptr (MOVDconst [c]) mem)
  8183  	// cond: is32Bit(c)
  8184  	// result: (MOVBload [int32(c)] ptr mem)
  8185  	for {
  8186  		ptr := v_0
  8187  		if v_1.Op != OpARM64MOVDconst {
  8188  			break
  8189  		}
  8190  		c := auxIntToInt64(v_1.AuxInt)
  8191  		mem := v_2
  8192  		if !(is32Bit(c)) {
  8193  			break
  8194  		}
  8195  		v.reset(OpARM64MOVBload)
  8196  		v.AuxInt = int32ToAuxInt(int32(c))
  8197  		v.AddArg2(ptr, mem)
  8198  		return true
  8199  	}
  8200  	// match: (MOVBloadidx (MOVDconst [c]) ptr mem)
  8201  	// cond: is32Bit(c)
  8202  	// result: (MOVBload [int32(c)] ptr mem)
  8203  	for {
  8204  		if v_0.Op != OpARM64MOVDconst {
  8205  			break
  8206  		}
  8207  		c := auxIntToInt64(v_0.AuxInt)
  8208  		ptr := v_1
  8209  		mem := v_2
  8210  		if !(is32Bit(c)) {
  8211  			break
  8212  		}
  8213  		v.reset(OpARM64MOVBload)
  8214  		v.AuxInt = int32ToAuxInt(int32(c))
  8215  		v.AddArg2(ptr, mem)
  8216  		return true
  8217  	}
  8218  	// match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
  8219  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
  8220  	// result: (MOVDconst [0])
  8221  	for {
  8222  		ptr := v_0
  8223  		idx := v_1
  8224  		if v_2.Op != OpARM64MOVBstorezeroidx {
  8225  			break
  8226  		}
  8227  		idx2 := v_2.Args[1]
  8228  		ptr2 := v_2.Args[0]
  8229  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
  8230  			break
  8231  		}
  8232  		v.reset(OpARM64MOVDconst)
  8233  		v.AuxInt = int64ToAuxInt(0)
  8234  		return true
  8235  	}
  8236  	return false
  8237  }
  8238  func rewriteValueARM64_OpARM64MOVBreg(v *Value) bool {
  8239  	v_0 := v.Args[0]
  8240  	// match: (MOVBreg x:(MOVBload _ _))
  8241  	// result: (MOVDreg x)
  8242  	for {
  8243  		x := v_0
  8244  		if x.Op != OpARM64MOVBload {
  8245  			break
  8246  		}
  8247  		v.reset(OpARM64MOVDreg)
  8248  		v.AddArg(x)
  8249  		return true
  8250  	}
  8251  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
  8252  	// result: (MOVDreg x)
  8253  	for {
  8254  		x := v_0
  8255  		if x.Op != OpARM64MOVBloadidx {
  8256  			break
  8257  		}
  8258  		v.reset(OpARM64MOVDreg)
  8259  		v.AddArg(x)
  8260  		return true
  8261  	}
  8262  	// match: (MOVBreg x:(MOVBreg _))
  8263  	// result: (MOVDreg x)
  8264  	for {
  8265  		x := v_0
  8266  		if x.Op != OpARM64MOVBreg {
  8267  			break
  8268  		}
  8269  		v.reset(OpARM64MOVDreg)
  8270  		v.AddArg(x)
  8271  		return true
  8272  	}
  8273  	// match: (MOVBreg (MOVDconst [c]))
  8274  	// result: (MOVDconst [int64(int8(c))])
  8275  	for {
  8276  		if v_0.Op != OpARM64MOVDconst {
  8277  			break
  8278  		}
  8279  		c := auxIntToInt64(v_0.AuxInt)
  8280  		v.reset(OpARM64MOVDconst)
  8281  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  8282  		return true
  8283  	}
  8284  	// match: (MOVBreg (SLLconst [lc] x))
  8285  	// cond: lc < 8
  8286  	// result: (SBFIZ [armBFAuxInt(lc, 8-lc)] x)
  8287  	for {
  8288  		if v_0.Op != OpARM64SLLconst {
  8289  			break
  8290  		}
  8291  		lc := auxIntToInt64(v_0.AuxInt)
  8292  		x := v_0.Args[0]
  8293  		if !(lc < 8) {
  8294  			break
  8295  		}
  8296  		v.reset(OpARM64SBFIZ)
  8297  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
  8298  		v.AddArg(x)
  8299  		return true
  8300  	}
  8301  	// match: (MOVBreg (SBFX [bfc] x))
  8302  	// cond: bfc.getARM64BFwidth() <= 8
  8303  	// result: (SBFX [bfc] x)
  8304  	for {
  8305  		if v_0.Op != OpARM64SBFX {
  8306  			break
  8307  		}
  8308  		bfc := auxIntToArm64BitField(v_0.AuxInt)
  8309  		x := v_0.Args[0]
  8310  		if !(bfc.getARM64BFwidth() <= 8) {
  8311  			break
  8312  		}
  8313  		v.reset(OpARM64SBFX)
  8314  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
  8315  		v.AddArg(x)
  8316  		return true
  8317  	}
  8318  	return false
  8319  }
  8320  func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
  8321  	v_2 := v.Args[2]
  8322  	v_1 := v.Args[1]
  8323  	v_0 := v.Args[0]
  8324  	b := v.Block
  8325  	config := b.Func.Config
  8326  	typ := &b.Func.Config.Types
  8327  	// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  8328  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8329  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  8330  	for {
  8331  		off1 := auxIntToInt32(v.AuxInt)
  8332  		sym := auxToSym(v.Aux)
  8333  		if v_0.Op != OpARM64ADDconst {
  8334  			break
  8335  		}
  8336  		off2 := auxIntToInt64(v_0.AuxInt)
  8337  		ptr := v_0.Args[0]
  8338  		val := v_1
  8339  		mem := v_2
  8340  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8341  			break
  8342  		}
  8343  		v.reset(OpARM64MOVBstore)
  8344  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  8345  		v.Aux = symToAux(sym)
  8346  		v.AddArg3(ptr, val, mem)
  8347  		return true
  8348  	}
  8349  	// match: (MOVBstore [off] {sym} (ADD ptr idx) val mem)
  8350  	// cond: off == 0 && sym == nil
  8351  	// result: (MOVBstoreidx ptr idx val mem)
  8352  	for {
  8353  		off := auxIntToInt32(v.AuxInt)
  8354  		sym := auxToSym(v.Aux)
  8355  		if v_0.Op != OpARM64ADD {
  8356  			break
  8357  		}
  8358  		idx := v_0.Args[1]
  8359  		ptr := v_0.Args[0]
  8360  		val := v_1
  8361  		mem := v_2
  8362  		if !(off == 0 && sym == nil) {
  8363  			break
  8364  		}
  8365  		v.reset(OpARM64MOVBstoreidx)
  8366  		v.AddArg4(ptr, idx, val, mem)
  8367  		return true
  8368  	}
  8369  	// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  8370  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8371  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  8372  	for {
  8373  		off1 := auxIntToInt32(v.AuxInt)
  8374  		sym1 := auxToSym(v.Aux)
  8375  		if v_0.Op != OpARM64MOVDaddr {
  8376  			break
  8377  		}
  8378  		off2 := auxIntToInt32(v_0.AuxInt)
  8379  		sym2 := auxToSym(v_0.Aux)
  8380  		ptr := v_0.Args[0]
  8381  		val := v_1
  8382  		mem := v_2
  8383  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8384  			break
  8385  		}
  8386  		v.reset(OpARM64MOVBstore)
  8387  		v.AuxInt = int32ToAuxInt(off1 + off2)
  8388  		v.Aux = symToAux(mergeSym(sym1, sym2))
  8389  		v.AddArg3(ptr, val, mem)
  8390  		return true
  8391  	}
  8392  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  8393  	// result: (MOVBstorezero [off] {sym} ptr mem)
  8394  	for {
  8395  		off := auxIntToInt32(v.AuxInt)
  8396  		sym := auxToSym(v.Aux)
  8397  		ptr := v_0
  8398  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  8399  			break
  8400  		}
  8401  		mem := v_2
  8402  		v.reset(OpARM64MOVBstorezero)
  8403  		v.AuxInt = int32ToAuxInt(off)
  8404  		v.Aux = symToAux(sym)
  8405  		v.AddArg2(ptr, mem)
  8406  		return true
  8407  	}
  8408  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  8409  	// result: (MOVBstore [off] {sym} ptr x mem)
  8410  	for {
  8411  		off := auxIntToInt32(v.AuxInt)
  8412  		sym := auxToSym(v.Aux)
  8413  		ptr := v_0
  8414  		if v_1.Op != OpARM64MOVBreg {
  8415  			break
  8416  		}
  8417  		x := v_1.Args[0]
  8418  		mem := v_2
  8419  		v.reset(OpARM64MOVBstore)
  8420  		v.AuxInt = int32ToAuxInt(off)
  8421  		v.Aux = symToAux(sym)
  8422  		v.AddArg3(ptr, x, mem)
  8423  		return true
  8424  	}
  8425  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  8426  	// result: (MOVBstore [off] {sym} ptr x mem)
  8427  	for {
  8428  		off := auxIntToInt32(v.AuxInt)
  8429  		sym := auxToSym(v.Aux)
  8430  		ptr := v_0
  8431  		if v_1.Op != OpARM64MOVBUreg {
  8432  			break
  8433  		}
  8434  		x := v_1.Args[0]
  8435  		mem := v_2
  8436  		v.reset(OpARM64MOVBstore)
  8437  		v.AuxInt = int32ToAuxInt(off)
  8438  		v.Aux = symToAux(sym)
  8439  		v.AddArg3(ptr, x, mem)
  8440  		return true
  8441  	}
  8442  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  8443  	// result: (MOVBstore [off] {sym} ptr x mem)
  8444  	for {
  8445  		off := auxIntToInt32(v.AuxInt)
  8446  		sym := auxToSym(v.Aux)
  8447  		ptr := v_0
  8448  		if v_1.Op != OpARM64MOVHreg {
  8449  			break
  8450  		}
  8451  		x := v_1.Args[0]
  8452  		mem := v_2
  8453  		v.reset(OpARM64MOVBstore)
  8454  		v.AuxInt = int32ToAuxInt(off)
  8455  		v.Aux = symToAux(sym)
  8456  		v.AddArg3(ptr, x, mem)
  8457  		return true
  8458  	}
  8459  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  8460  	// result: (MOVBstore [off] {sym} ptr x mem)
  8461  	for {
  8462  		off := auxIntToInt32(v.AuxInt)
  8463  		sym := auxToSym(v.Aux)
  8464  		ptr := v_0
  8465  		if v_1.Op != OpARM64MOVHUreg {
  8466  			break
  8467  		}
  8468  		x := v_1.Args[0]
  8469  		mem := v_2
  8470  		v.reset(OpARM64MOVBstore)
  8471  		v.AuxInt = int32ToAuxInt(off)
  8472  		v.Aux = symToAux(sym)
  8473  		v.AddArg3(ptr, x, mem)
  8474  		return true
  8475  	}
  8476  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  8477  	// result: (MOVBstore [off] {sym} ptr x mem)
  8478  	for {
  8479  		off := auxIntToInt32(v.AuxInt)
  8480  		sym := auxToSym(v.Aux)
  8481  		ptr := v_0
  8482  		if v_1.Op != OpARM64MOVWreg {
  8483  			break
  8484  		}
  8485  		x := v_1.Args[0]
  8486  		mem := v_2
  8487  		v.reset(OpARM64MOVBstore)
  8488  		v.AuxInt = int32ToAuxInt(off)
  8489  		v.Aux = symToAux(sym)
  8490  		v.AddArg3(ptr, x, mem)
  8491  		return true
  8492  	}
  8493  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  8494  	// result: (MOVBstore [off] {sym} ptr x mem)
  8495  	for {
  8496  		off := auxIntToInt32(v.AuxInt)
  8497  		sym := auxToSym(v.Aux)
  8498  		ptr := v_0
  8499  		if v_1.Op != OpARM64MOVWUreg {
  8500  			break
  8501  		}
  8502  		x := v_1.Args[0]
  8503  		mem := v_2
  8504  		v.reset(OpARM64MOVBstore)
  8505  		v.AuxInt = int32ToAuxInt(off)
  8506  		v.Aux = symToAux(sym)
  8507  		v.AddArg3(ptr, x, mem)
  8508  		return true
  8509  	}
  8510  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  8511  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8512  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  8513  	for {
  8514  		i := auxIntToInt32(v.AuxInt)
  8515  		s := auxToSym(v.Aux)
  8516  		ptr0 := v_0
  8517  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  8518  			break
  8519  		}
  8520  		w := v_1.Args[0]
  8521  		x := v_2
  8522  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8523  			break
  8524  		}
  8525  		mem := x.Args[2]
  8526  		ptr1 := x.Args[0]
  8527  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8528  			break
  8529  		}
  8530  		v.reset(OpARM64MOVHstore)
  8531  		v.AuxInt = int32ToAuxInt(i - 1)
  8532  		v.Aux = symToAux(s)
  8533  		v.AddArg3(ptr0, w, mem)
  8534  		return true
  8535  	}
  8536  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  8537  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8538  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  8539  	for {
  8540  		if auxIntToInt32(v.AuxInt) != 1 {
  8541  			break
  8542  		}
  8543  		s := auxToSym(v.Aux)
  8544  		if v_0.Op != OpARM64ADD {
  8545  			break
  8546  		}
  8547  		_ = v_0.Args[1]
  8548  		v_0_0 := v_0.Args[0]
  8549  		v_0_1 := v_0.Args[1]
  8550  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8551  			ptr0 := v_0_0
  8552  			idx0 := v_0_1
  8553  			if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  8554  				continue
  8555  			}
  8556  			w := v_1.Args[0]
  8557  			x := v_2
  8558  			if x.Op != OpARM64MOVBstoreidx {
  8559  				continue
  8560  			}
  8561  			mem := x.Args[3]
  8562  			ptr1 := x.Args[0]
  8563  			idx1 := x.Args[1]
  8564  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8565  				continue
  8566  			}
  8567  			v.reset(OpARM64MOVHstoreidx)
  8568  			v.AddArg4(ptr1, idx1, w, mem)
  8569  			return true
  8570  		}
  8571  		break
  8572  	}
  8573  	// match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  8574  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8575  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  8576  	for {
  8577  		i := auxIntToInt32(v.AuxInt)
  8578  		s := auxToSym(v.Aux)
  8579  		ptr0 := v_0
  8580  		if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
  8581  			break
  8582  		}
  8583  		w := v_1.Args[0]
  8584  		x := v_2
  8585  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8586  			break
  8587  		}
  8588  		mem := x.Args[2]
  8589  		ptr1 := x.Args[0]
  8590  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8591  			break
  8592  		}
  8593  		v.reset(OpARM64MOVHstore)
  8594  		v.AuxInt = int32ToAuxInt(i - 1)
  8595  		v.Aux = symToAux(s)
  8596  		v.AddArg3(ptr0, w, mem)
  8597  		return true
  8598  	}
  8599  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  8600  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8601  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  8602  	for {
  8603  		if auxIntToInt32(v.AuxInt) != 1 {
  8604  			break
  8605  		}
  8606  		s := auxToSym(v.Aux)
  8607  		if v_0.Op != OpARM64ADD {
  8608  			break
  8609  		}
  8610  		_ = v_0.Args[1]
  8611  		v_0_0 := v_0.Args[0]
  8612  		v_0_1 := v_0.Args[1]
  8613  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8614  			ptr0 := v_0_0
  8615  			idx0 := v_0_1
  8616  			if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
  8617  				continue
  8618  			}
  8619  			w := v_1.Args[0]
  8620  			x := v_2
  8621  			if x.Op != OpARM64MOVBstoreidx {
  8622  				continue
  8623  			}
  8624  			mem := x.Args[3]
  8625  			ptr1 := x.Args[0]
  8626  			idx1 := x.Args[1]
  8627  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8628  				continue
  8629  			}
  8630  			v.reset(OpARM64MOVHstoreidx)
  8631  			v.AddArg4(ptr1, idx1, w, mem)
  8632  			return true
  8633  		}
  8634  		break
  8635  	}
  8636  	// match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  8637  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8638  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  8639  	for {
  8640  		i := auxIntToInt32(v.AuxInt)
  8641  		s := auxToSym(v.Aux)
  8642  		ptr0 := v_0
  8643  		if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
  8644  			break
  8645  		}
  8646  		w := v_1.Args[0]
  8647  		x := v_2
  8648  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8649  			break
  8650  		}
  8651  		mem := x.Args[2]
  8652  		ptr1 := x.Args[0]
  8653  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8654  			break
  8655  		}
  8656  		v.reset(OpARM64MOVHstore)
  8657  		v.AuxInt = int32ToAuxInt(i - 1)
  8658  		v.Aux = symToAux(s)
  8659  		v.AddArg3(ptr0, w, mem)
  8660  		return true
  8661  	}
  8662  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  8663  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8664  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  8665  	for {
  8666  		if auxIntToInt32(v.AuxInt) != 1 {
  8667  			break
  8668  		}
  8669  		s := auxToSym(v.Aux)
  8670  		if v_0.Op != OpARM64ADD {
  8671  			break
  8672  		}
  8673  		_ = v_0.Args[1]
  8674  		v_0_0 := v_0.Args[0]
  8675  		v_0_1 := v_0.Args[1]
  8676  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8677  			ptr0 := v_0_0
  8678  			idx0 := v_0_1
  8679  			if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
  8680  				continue
  8681  			}
  8682  			w := v_1.Args[0]
  8683  			x := v_2
  8684  			if x.Op != OpARM64MOVBstoreidx {
  8685  				continue
  8686  			}
  8687  			mem := x.Args[3]
  8688  			ptr1 := x.Args[0]
  8689  			idx1 := x.Args[1]
  8690  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8691  				continue
  8692  			}
  8693  			v.reset(OpARM64MOVHstoreidx)
  8694  			v.AddArg4(ptr1, idx1, w, mem)
  8695  			return true
  8696  		}
  8697  		break
  8698  	}
  8699  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem))
  8700  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8701  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  8702  	for {
  8703  		i := auxIntToInt32(v.AuxInt)
  8704  		s := auxToSym(v.Aux)
  8705  		ptr0 := v_0
  8706  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  8707  			break
  8708  		}
  8709  		v_1_0 := v_1.Args[0]
  8710  		if v_1_0.Op != OpARM64MOVDreg {
  8711  			break
  8712  		}
  8713  		w := v_1_0.Args[0]
  8714  		x := v_2
  8715  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8716  			break
  8717  		}
  8718  		mem := x.Args[2]
  8719  		ptr1 := x.Args[0]
  8720  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8721  			break
  8722  		}
  8723  		v.reset(OpARM64MOVHstore)
  8724  		v.AuxInt = int32ToAuxInt(i - 1)
  8725  		v.Aux = symToAux(s)
  8726  		v.AddArg3(ptr0, w, mem)
  8727  		return true
  8728  	}
  8729  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem))
  8730  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8731  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  8732  	for {
  8733  		if auxIntToInt32(v.AuxInt) != 1 {
  8734  			break
  8735  		}
  8736  		s := auxToSym(v.Aux)
  8737  		if v_0.Op != OpARM64ADD {
  8738  			break
  8739  		}
  8740  		_ = v_0.Args[1]
  8741  		v_0_0 := v_0.Args[0]
  8742  		v_0_1 := v_0.Args[1]
  8743  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8744  			ptr0 := v_0_0
  8745  			idx0 := v_0_1
  8746  			if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  8747  				continue
  8748  			}
  8749  			v_1_0 := v_1.Args[0]
  8750  			if v_1_0.Op != OpARM64MOVDreg {
  8751  				continue
  8752  			}
  8753  			w := v_1_0.Args[0]
  8754  			x := v_2
  8755  			if x.Op != OpARM64MOVBstoreidx {
  8756  				continue
  8757  			}
  8758  			mem := x.Args[3]
  8759  			ptr1 := x.Args[0]
  8760  			idx1 := x.Args[1]
  8761  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8762  				continue
  8763  			}
  8764  			v.reset(OpARM64MOVHstoreidx)
  8765  			v.AddArg4(ptr1, idx1, w, mem)
  8766  			return true
  8767  		}
  8768  		break
  8769  	}
  8770  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem))
  8771  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8772  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
  8773  	for {
  8774  		i := auxIntToInt32(v.AuxInt)
  8775  		s := auxToSym(v.Aux)
  8776  		ptr0 := v_0
  8777  		if v_1.Op != OpARM64SRLconst {
  8778  			break
  8779  		}
  8780  		j := auxIntToInt64(v_1.AuxInt)
  8781  		w := v_1.Args[0]
  8782  		x := v_2
  8783  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8784  			break
  8785  		}
  8786  		mem := x.Args[2]
  8787  		ptr1 := x.Args[0]
  8788  		w0 := x.Args[1]
  8789  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8790  			break
  8791  		}
  8792  		v.reset(OpARM64MOVHstore)
  8793  		v.AuxInt = int32ToAuxInt(i - 1)
  8794  		v.Aux = symToAux(s)
  8795  		v.AddArg3(ptr0, w0, mem)
  8796  		return true
  8797  	}
  8798  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem))
  8799  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8800  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
  8801  	for {
  8802  		if auxIntToInt32(v.AuxInt) != 1 {
  8803  			break
  8804  		}
  8805  		s := auxToSym(v.Aux)
  8806  		if v_0.Op != OpARM64ADD {
  8807  			break
  8808  		}
  8809  		_ = v_0.Args[1]
  8810  		v_0_0 := v_0.Args[0]
  8811  		v_0_1 := v_0.Args[1]
  8812  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8813  			ptr0 := v_0_0
  8814  			idx0 := v_0_1
  8815  			if v_1.Op != OpARM64SRLconst {
  8816  				continue
  8817  			}
  8818  			j := auxIntToInt64(v_1.AuxInt)
  8819  			w := v_1.Args[0]
  8820  			x := v_2
  8821  			if x.Op != OpARM64MOVBstoreidx {
  8822  				continue
  8823  			}
  8824  			mem := x.Args[3]
  8825  			ptr1 := x.Args[0]
  8826  			idx1 := x.Args[1]
  8827  			w0 := x.Args[2]
  8828  			if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8829  				continue
  8830  			}
  8831  			v.reset(OpARM64MOVHstoreidx)
  8832  			v.AddArg4(ptr1, idx1, w0, mem)
  8833  			return true
  8834  		}
  8835  		break
  8836  	}
  8837  	// match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem))
  8838  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && bfc.getARM64BFwidth() == 32 - bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32 - bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb() - 8 && clobber(x)
  8839  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
  8840  	for {
  8841  		i := auxIntToInt32(v.AuxInt)
  8842  		s := auxToSym(v.Aux)
  8843  		ptr0 := v_0
  8844  		if v_1.Op != OpARM64UBFX {
  8845  			break
  8846  		}
  8847  		bfc := auxIntToArm64BitField(v_1.AuxInt)
  8848  		w := v_1.Args[0]
  8849  		x := v_2
  8850  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8851  			break
  8852  		}
  8853  		mem := x.Args[2]
  8854  		ptr1 := x.Args[0]
  8855  		w0 := x.Args[1]
  8856  		if w0.Op != OpARM64UBFX {
  8857  			break
  8858  		}
  8859  		bfc2 := auxIntToArm64BitField(w0.AuxInt)
  8860  		if w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
  8861  			break
  8862  		}
  8863  		v.reset(OpARM64MOVHstore)
  8864  		v.AuxInt = int32ToAuxInt(i - 1)
  8865  		v.Aux = symToAux(s)
  8866  		v.AddArg3(ptr0, w0, mem)
  8867  		return true
  8868  	}
  8869  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem))
  8870  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && bfc.getARM64BFwidth() == 32 - bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32 - bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb() - 8 && clobber(x)
  8871  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
  8872  	for {
  8873  		if auxIntToInt32(v.AuxInt) != 1 {
  8874  			break
  8875  		}
  8876  		s := auxToSym(v.Aux)
  8877  		if v_0.Op != OpARM64ADD {
  8878  			break
  8879  		}
  8880  		_ = v_0.Args[1]
  8881  		v_0_0 := v_0.Args[0]
  8882  		v_0_1 := v_0.Args[1]
  8883  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8884  			ptr0 := v_0_0
  8885  			idx0 := v_0_1
  8886  			if v_1.Op != OpARM64UBFX {
  8887  				continue
  8888  			}
  8889  			bfc := auxIntToArm64BitField(v_1.AuxInt)
  8890  			w := v_1.Args[0]
  8891  			x := v_2
  8892  			if x.Op != OpARM64MOVBstoreidx {
  8893  				continue
  8894  			}
  8895  			mem := x.Args[3]
  8896  			ptr1 := x.Args[0]
  8897  			idx1 := x.Args[1]
  8898  			w0 := x.Args[2]
  8899  			if w0.Op != OpARM64UBFX {
  8900  				continue
  8901  			}
  8902  			bfc2 := auxIntToArm64BitField(w0.AuxInt)
  8903  			if w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
  8904  				continue
  8905  			}
  8906  			v.reset(OpARM64MOVHstoreidx)
  8907  			v.AddArg4(ptr1, idx1, w0, mem)
  8908  			return true
  8909  		}
  8910  		break
  8911  	}
  8912  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
  8913  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  8914  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
  8915  	for {
  8916  		i := auxIntToInt32(v.AuxInt)
  8917  		s := auxToSym(v.Aux)
  8918  		ptr0 := v_0
  8919  		if v_1.Op != OpARM64SRLconst {
  8920  			break
  8921  		}
  8922  		j := auxIntToInt64(v_1.AuxInt)
  8923  		v_1_0 := v_1.Args[0]
  8924  		if v_1_0.Op != OpARM64MOVDreg {
  8925  			break
  8926  		}
  8927  		w := v_1_0.Args[0]
  8928  		x := v_2
  8929  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  8930  			break
  8931  		}
  8932  		mem := x.Args[2]
  8933  		ptr1 := x.Args[0]
  8934  		w0 := x.Args[1]
  8935  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
  8936  			break
  8937  		}
  8938  		w0_0 := w0.Args[0]
  8939  		if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  8940  			break
  8941  		}
  8942  		v.reset(OpARM64MOVHstore)
  8943  		v.AuxInt = int32ToAuxInt(i - 1)
  8944  		v.Aux = symToAux(s)
  8945  		v.AddArg3(ptr0, w0, mem)
  8946  		return true
  8947  	}
  8948  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
  8949  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  8950  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
  8951  	for {
  8952  		if auxIntToInt32(v.AuxInt) != 1 {
  8953  			break
  8954  		}
  8955  		s := auxToSym(v.Aux)
  8956  		if v_0.Op != OpARM64ADD {
  8957  			break
  8958  		}
  8959  		_ = v_0.Args[1]
  8960  		v_0_0 := v_0.Args[0]
  8961  		v_0_1 := v_0.Args[1]
  8962  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8963  			ptr0 := v_0_0
  8964  			idx0 := v_0_1
  8965  			if v_1.Op != OpARM64SRLconst {
  8966  				continue
  8967  			}
  8968  			j := auxIntToInt64(v_1.AuxInt)
  8969  			v_1_0 := v_1.Args[0]
  8970  			if v_1_0.Op != OpARM64MOVDreg {
  8971  				continue
  8972  			}
  8973  			w := v_1_0.Args[0]
  8974  			x := v_2
  8975  			if x.Op != OpARM64MOVBstoreidx {
  8976  				continue
  8977  			}
  8978  			mem := x.Args[3]
  8979  			ptr1 := x.Args[0]
  8980  			idx1 := x.Args[1]
  8981  			w0 := x.Args[2]
  8982  			if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
  8983  				continue
  8984  			}
  8985  			w0_0 := w0.Args[0]
  8986  			if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  8987  				continue
  8988  			}
  8989  			v.reset(OpARM64MOVHstoreidx)
  8990  			v.AddArg4(ptr1, idx1, w0, mem)
  8991  			return true
  8992  		}
  8993  		break
  8994  	}
  8995  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem))))))))
  8996  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)
  8997  	// result: (MOVDstore [i-7] {s} ptr (REV <typ.UInt64> w) mem)
  8998  	for {
  8999  		i := auxIntToInt32(v.AuxInt)
  9000  		s := auxToSym(v.Aux)
  9001  		ptr := v_0
  9002  		w := v_1
  9003  		x0 := v_2
  9004  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
  9005  			break
  9006  		}
  9007  		_ = x0.Args[2]
  9008  		if ptr != x0.Args[0] {
  9009  			break
  9010  		}
  9011  		x0_1 := x0.Args[1]
  9012  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
  9013  			break
  9014  		}
  9015  		x1 := x0.Args[2]
  9016  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
  9017  			break
  9018  		}
  9019  		_ = x1.Args[2]
  9020  		if ptr != x1.Args[0] {
  9021  			break
  9022  		}
  9023  		x1_1 := x1.Args[1]
  9024  		if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
  9025  			break
  9026  		}
  9027  		x2 := x1.Args[2]
  9028  		if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
  9029  			break
  9030  		}
  9031  		_ = x2.Args[2]
  9032  		if ptr != x2.Args[0] {
  9033  			break
  9034  		}
  9035  		x2_1 := x2.Args[1]
  9036  		if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
  9037  			break
  9038  		}
  9039  		x3 := x2.Args[2]
  9040  		if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != i-4 || auxToSym(x3.Aux) != s {
  9041  			break
  9042  		}
  9043  		_ = x3.Args[2]
  9044  		if ptr != x3.Args[0] {
  9045  			break
  9046  		}
  9047  		x3_1 := x3.Args[1]
  9048  		if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
  9049  			break
  9050  		}
  9051  		x4 := x3.Args[2]
  9052  		if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != i-5 || auxToSym(x4.Aux) != s {
  9053  			break
  9054  		}
  9055  		_ = x4.Args[2]
  9056  		if ptr != x4.Args[0] {
  9057  			break
  9058  		}
  9059  		x4_1 := x4.Args[1]
  9060  		if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
  9061  			break
  9062  		}
  9063  		x5 := x4.Args[2]
  9064  		if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != i-6 || auxToSym(x5.Aux) != s {
  9065  			break
  9066  		}
  9067  		_ = x5.Args[2]
  9068  		if ptr != x5.Args[0] {
  9069  			break
  9070  		}
  9071  		x5_1 := x5.Args[1]
  9072  		if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
  9073  			break
  9074  		}
  9075  		x6 := x5.Args[2]
  9076  		if x6.Op != OpARM64MOVBstore || auxIntToInt32(x6.AuxInt) != i-7 || auxToSym(x6.Aux) != s {
  9077  			break
  9078  		}
  9079  		mem := x6.Args[2]
  9080  		if ptr != x6.Args[0] {
  9081  			break
  9082  		}
  9083  		x6_1 := x6.Args[1]
  9084  		if x6_1.Op != OpARM64SRLconst || auxIntToInt64(x6_1.AuxInt) != 56 || w != x6_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)) {
  9085  			break
  9086  		}
  9087  		v.reset(OpARM64MOVDstore)
  9088  		v.AuxInt = int32ToAuxInt(i - 7)
  9089  		v.Aux = symToAux(s)
  9090  		v0 := b.NewValue0(x6.Pos, OpARM64REV, typ.UInt64)
  9091  		v0.AddArg(w)
  9092  		v.AddArg3(ptr, v0, mem)
  9093  		return true
  9094  	}
  9095  	// match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem))))))))
  9096  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)
  9097  	// result: (MOVDstoreidx ptr0 idx0 (REV <typ.UInt64> w) mem)
  9098  	for {
  9099  		if auxIntToInt32(v.AuxInt) != 7 {
  9100  			break
  9101  		}
  9102  		s := auxToSym(v.Aux)
  9103  		p := v_0
  9104  		w := v_1
  9105  		x0 := v_2
  9106  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 6 || auxToSym(x0.Aux) != s {
  9107  			break
  9108  		}
  9109  		_ = x0.Args[2]
  9110  		if p != x0.Args[0] {
  9111  			break
  9112  		}
  9113  		x0_1 := x0.Args[1]
  9114  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
  9115  			break
  9116  		}
  9117  		x1 := x0.Args[2]
  9118  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 5 || auxToSym(x1.Aux) != s {
  9119  			break
  9120  		}
  9121  		_ = x1.Args[2]
  9122  		if p != x1.Args[0] {
  9123  			break
  9124  		}
  9125  		x1_1 := x1.Args[1]
  9126  		if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
  9127  			break
  9128  		}
  9129  		x2 := x1.Args[2]
  9130  		if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != 4 || auxToSym(x2.Aux) != s {
  9131  			break
  9132  		}
  9133  		_ = x2.Args[2]
  9134  		if p != x2.Args[0] {
  9135  			break
  9136  		}
  9137  		x2_1 := x2.Args[1]
  9138  		if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
  9139  			break
  9140  		}
  9141  		x3 := x2.Args[2]
  9142  		if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
  9143  			break
  9144  		}
  9145  		_ = x3.Args[2]
  9146  		if p != x3.Args[0] {
  9147  			break
  9148  		}
  9149  		x3_1 := x3.Args[1]
  9150  		if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
  9151  			break
  9152  		}
  9153  		x4 := x3.Args[2]
  9154  		if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != 2 || auxToSym(x4.Aux) != s {
  9155  			break
  9156  		}
  9157  		_ = x4.Args[2]
  9158  		if p != x4.Args[0] {
  9159  			break
  9160  		}
  9161  		x4_1 := x4.Args[1]
  9162  		if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
  9163  			break
  9164  		}
  9165  		x5 := x4.Args[2]
  9166  		if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != 1 || auxToSym(x5.Aux) != s {
  9167  			break
  9168  		}
  9169  		_ = x5.Args[2]
  9170  		p1 := x5.Args[0]
  9171  		if p1.Op != OpARM64ADD {
  9172  			break
  9173  		}
  9174  		_ = p1.Args[1]
  9175  		p1_0 := p1.Args[0]
  9176  		p1_1 := p1.Args[1]
  9177  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
  9178  			ptr1 := p1_0
  9179  			idx1 := p1_1
  9180  			x5_1 := x5.Args[1]
  9181  			if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
  9182  				continue
  9183  			}
  9184  			x6 := x5.Args[2]
  9185  			if x6.Op != OpARM64MOVBstoreidx {
  9186  				continue
  9187  			}
  9188  			mem := x6.Args[3]
  9189  			ptr0 := x6.Args[0]
  9190  			idx0 := x6.Args[1]
  9191  			x6_2 := x6.Args[2]
  9192  			if x6_2.Op != OpARM64SRLconst || auxIntToInt64(x6_2.AuxInt) != 56 || w != x6_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)) {
  9193  				continue
  9194  			}
  9195  			v.reset(OpARM64MOVDstoreidx)
  9196  			v0 := b.NewValue0(x5.Pos, OpARM64REV, typ.UInt64)
  9197  			v0.AddArg(w)
  9198  			v.AddArg4(ptr0, idx0, v0, mem)
  9199  			return true
  9200  		}
  9201  		break
  9202  	}
  9203  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [armBFAuxInt(24, 8)] w) mem))))
  9204  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
  9205  	// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
  9206  	for {
  9207  		i := auxIntToInt32(v.AuxInt)
  9208  		s := auxToSym(v.Aux)
  9209  		ptr := v_0
  9210  		w := v_1
  9211  		x0 := v_2
  9212  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
  9213  			break
  9214  		}
  9215  		_ = x0.Args[2]
  9216  		if ptr != x0.Args[0] {
  9217  			break
  9218  		}
  9219  		x0_1 := x0.Args[1]
  9220  		if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
  9221  			break
  9222  		}
  9223  		x1 := x0.Args[2]
  9224  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
  9225  			break
  9226  		}
  9227  		_ = x1.Args[2]
  9228  		if ptr != x1.Args[0] {
  9229  			break
  9230  		}
  9231  		x1_1 := x1.Args[1]
  9232  		if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
  9233  			break
  9234  		}
  9235  		x2 := x1.Args[2]
  9236  		if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
  9237  			break
  9238  		}
  9239  		mem := x2.Args[2]
  9240  		if ptr != x2.Args[0] {
  9241  			break
  9242  		}
  9243  		x2_1 := x2.Args[1]
  9244  		if x2_1.Op != OpARM64UBFX || auxIntToArm64BitField(x2_1.AuxInt) != armBFAuxInt(24, 8) || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
  9245  			break
  9246  		}
  9247  		v.reset(OpARM64MOVWstore)
  9248  		v.AuxInt = int32ToAuxInt(i - 3)
  9249  		v.Aux = symToAux(s)
  9250  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
  9251  		v0.AddArg(w)
  9252  		v.AddArg3(ptr, v0, mem)
  9253  		return true
  9254  	}
  9255  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(24, 8)] w) mem))))
  9256  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
  9257  	// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
  9258  	for {
  9259  		if auxIntToInt32(v.AuxInt) != 3 {
  9260  			break
  9261  		}
  9262  		s := auxToSym(v.Aux)
  9263  		p := v_0
  9264  		w := v_1
  9265  		x0 := v_2
  9266  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
  9267  			break
  9268  		}
  9269  		_ = x0.Args[2]
  9270  		if p != x0.Args[0] {
  9271  			break
  9272  		}
  9273  		x0_1 := x0.Args[1]
  9274  		if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
  9275  			break
  9276  		}
  9277  		x1 := x0.Args[2]
  9278  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
  9279  			break
  9280  		}
  9281  		_ = x1.Args[2]
  9282  		p1 := x1.Args[0]
  9283  		if p1.Op != OpARM64ADD {
  9284  			break
  9285  		}
  9286  		_ = p1.Args[1]
  9287  		p1_0 := p1.Args[0]
  9288  		p1_1 := p1.Args[1]
  9289  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
  9290  			ptr1 := p1_0
  9291  			idx1 := p1_1
  9292  			x1_1 := x1.Args[1]
  9293  			if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
  9294  				continue
  9295  			}
  9296  			x2 := x1.Args[2]
  9297  			if x2.Op != OpARM64MOVBstoreidx {
  9298  				continue
  9299  			}
  9300  			mem := x2.Args[3]
  9301  			ptr0 := x2.Args[0]
  9302  			idx0 := x2.Args[1]
  9303  			x2_2 := x2.Args[2]
  9304  			if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
  9305  				continue
  9306  			}
  9307  			v.reset(OpARM64MOVWstoreidx)
  9308  			v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
  9309  			v0.AddArg(w)
  9310  			v.AddArg4(ptr0, idx0, v0, mem)
  9311  			return true
  9312  		}
  9313  		break
  9314  	}
  9315  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem))))
  9316  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
  9317  	// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
  9318  	for {
  9319  		i := auxIntToInt32(v.AuxInt)
  9320  		s := auxToSym(v.Aux)
  9321  		ptr := v_0
  9322  		w := v_1
  9323  		x0 := v_2
  9324  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
  9325  			break
  9326  		}
  9327  		_ = x0.Args[2]
  9328  		if ptr != x0.Args[0] {
  9329  			break
  9330  		}
  9331  		x0_1 := x0.Args[1]
  9332  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
  9333  			break
  9334  		}
  9335  		x0_1_0 := x0_1.Args[0]
  9336  		if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
  9337  			break
  9338  		}
  9339  		x1 := x0.Args[2]
  9340  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
  9341  			break
  9342  		}
  9343  		_ = x1.Args[2]
  9344  		if ptr != x1.Args[0] {
  9345  			break
  9346  		}
  9347  		x1_1 := x1.Args[1]
  9348  		if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
  9349  			break
  9350  		}
  9351  		x1_1_0 := x1_1.Args[0]
  9352  		if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
  9353  			break
  9354  		}
  9355  		x2 := x1.Args[2]
  9356  		if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
  9357  			break
  9358  		}
  9359  		mem := x2.Args[2]
  9360  		if ptr != x2.Args[0] {
  9361  			break
  9362  		}
  9363  		x2_1 := x2.Args[1]
  9364  		if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 {
  9365  			break
  9366  		}
  9367  		x2_1_0 := x2_1.Args[0]
  9368  		if x2_1_0.Op != OpARM64MOVDreg || w != x2_1_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
  9369  			break
  9370  		}
  9371  		v.reset(OpARM64MOVWstore)
  9372  		v.AuxInt = int32ToAuxInt(i - 3)
  9373  		v.Aux = symToAux(s)
  9374  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
  9375  		v0.AddArg(w)
  9376  		v.AddArg3(ptr, v0, mem)
  9377  		return true
  9378  	}
  9379  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem))))
  9380  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
  9381  	// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
  9382  	for {
  9383  		if auxIntToInt32(v.AuxInt) != 3 {
  9384  			break
  9385  		}
  9386  		s := auxToSym(v.Aux)
  9387  		p := v_0
  9388  		w := v_1
  9389  		x0 := v_2
  9390  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
  9391  			break
  9392  		}
  9393  		_ = x0.Args[2]
  9394  		if p != x0.Args[0] {
  9395  			break
  9396  		}
  9397  		x0_1 := x0.Args[1]
  9398  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
  9399  			break
  9400  		}
  9401  		x0_1_0 := x0_1.Args[0]
  9402  		if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
  9403  			break
  9404  		}
  9405  		x1 := x0.Args[2]
  9406  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
  9407  			break
  9408  		}
  9409  		_ = x1.Args[2]
  9410  		p1 := x1.Args[0]
  9411  		if p1.Op != OpARM64ADD {
  9412  			break
  9413  		}
  9414  		_ = p1.Args[1]
  9415  		p1_0 := p1.Args[0]
  9416  		p1_1 := p1.Args[1]
  9417  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
  9418  			ptr1 := p1_0
  9419  			idx1 := p1_1
  9420  			x1_1 := x1.Args[1]
  9421  			if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
  9422  				continue
  9423  			}
  9424  			x1_1_0 := x1_1.Args[0]
  9425  			if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
  9426  				continue
  9427  			}
  9428  			x2 := x1.Args[2]
  9429  			if x2.Op != OpARM64MOVBstoreidx {
  9430  				continue
  9431  			}
  9432  			mem := x2.Args[3]
  9433  			ptr0 := x2.Args[0]
  9434  			idx0 := x2.Args[1]
  9435  			x2_2 := x2.Args[2]
  9436  			if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 {
  9437  				continue
  9438  			}
  9439  			x2_2_0 := x2_2.Args[0]
  9440  			if x2_2_0.Op != OpARM64MOVDreg || w != x2_2_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
  9441  				continue
  9442  			}
  9443  			v.reset(OpARM64MOVWstoreidx)
  9444  			v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
  9445  			v0.AddArg(w)
  9446  			v.AddArg4(ptr0, idx0, v0, mem)
  9447  			return true
  9448  		}
  9449  		break
  9450  	}
  9451  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem))))
  9452  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
  9453  	// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
  9454  	for {
  9455  		i := auxIntToInt32(v.AuxInt)
  9456  		s := auxToSym(v.Aux)
  9457  		ptr := v_0
  9458  		w := v_1
  9459  		x0 := v_2
  9460  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
  9461  			break
  9462  		}
  9463  		_ = x0.Args[2]
  9464  		if ptr != x0.Args[0] {
  9465  			break
  9466  		}
  9467  		x0_1 := x0.Args[1]
  9468  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
  9469  			break
  9470  		}
  9471  		x1 := x0.Args[2]
  9472  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
  9473  			break
  9474  		}
  9475  		_ = x1.Args[2]
  9476  		if ptr != x1.Args[0] {
  9477  			break
  9478  		}
  9479  		x1_1 := x1.Args[1]
  9480  		if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
  9481  			break
  9482  		}
  9483  		x2 := x1.Args[2]
  9484  		if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
  9485  			break
  9486  		}
  9487  		mem := x2.Args[2]
  9488  		if ptr != x2.Args[0] {
  9489  			break
  9490  		}
  9491  		x2_1 := x2.Args[1]
  9492  		if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
  9493  			break
  9494  		}
  9495  		v.reset(OpARM64MOVWstore)
  9496  		v.AuxInt = int32ToAuxInt(i - 3)
  9497  		v.Aux = symToAux(s)
  9498  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
  9499  		v0.AddArg(w)
  9500  		v.AddArg3(ptr, v0, mem)
  9501  		return true
  9502  	}
  9503  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem))))
  9504  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
  9505  	// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
  9506  	for {
  9507  		if auxIntToInt32(v.AuxInt) != 3 {
  9508  			break
  9509  		}
  9510  		s := auxToSym(v.Aux)
  9511  		p := v_0
  9512  		w := v_1
  9513  		x0 := v_2
  9514  		if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
  9515  			break
  9516  		}
  9517  		_ = x0.Args[2]
  9518  		if p != x0.Args[0] {
  9519  			break
  9520  		}
  9521  		x0_1 := x0.Args[1]
  9522  		if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
  9523  			break
  9524  		}
  9525  		x1 := x0.Args[2]
  9526  		if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
  9527  			break
  9528  		}
  9529  		_ = x1.Args[2]
  9530  		p1 := x1.Args[0]
  9531  		if p1.Op != OpARM64ADD {
  9532  			break
  9533  		}
  9534  		_ = p1.Args[1]
  9535  		p1_0 := p1.Args[0]
  9536  		p1_1 := p1.Args[1]
  9537  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
  9538  			ptr1 := p1_0
  9539  			idx1 := p1_1
  9540  			x1_1 := x1.Args[1]
  9541  			if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
  9542  				continue
  9543  			}
  9544  			x2 := x1.Args[2]
  9545  			if x2.Op != OpARM64MOVBstoreidx {
  9546  				continue
  9547  			}
  9548  			mem := x2.Args[3]
  9549  			ptr0 := x2.Args[0]
  9550  			idx0 := x2.Args[1]
  9551  			x2_2 := x2.Args[2]
  9552  			if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
  9553  				continue
  9554  			}
  9555  			v.reset(OpARM64MOVWstoreidx)
  9556  			v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
  9557  			v0.AddArg(w)
  9558  			v.AddArg4(ptr0, idx0, v0, mem)
  9559  			return true
  9560  		}
  9561  		break
  9562  	}
  9563  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem))
  9564  	// cond: x.Uses == 1 && clobber(x)
  9565  	// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
  9566  	for {
  9567  		i := auxIntToInt32(v.AuxInt)
  9568  		s := auxToSym(v.Aux)
  9569  		ptr := v_0
  9570  		w := v_1
  9571  		x := v_2
  9572  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  9573  			break
  9574  		}
  9575  		mem := x.Args[2]
  9576  		if ptr != x.Args[0] {
  9577  			break
  9578  		}
  9579  		x_1 := x.Args[1]
  9580  		if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
  9581  			break
  9582  		}
  9583  		v.reset(OpARM64MOVHstore)
  9584  		v.AuxInt = int32ToAuxInt(i - 1)
  9585  		v.Aux = symToAux(s)
  9586  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
  9587  		v0.AddArg(w)
  9588  		v.AddArg3(ptr, v0, mem)
  9589  		return true
  9590  	}
  9591  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem))
  9592  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9593  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
  9594  	for {
  9595  		if auxIntToInt32(v.AuxInt) != 1 {
  9596  			break
  9597  		}
  9598  		s := auxToSym(v.Aux)
  9599  		if v_0.Op != OpARM64ADD {
  9600  			break
  9601  		}
  9602  		_ = v_0.Args[1]
  9603  		v_0_0 := v_0.Args[0]
  9604  		v_0_1 := v_0.Args[1]
  9605  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9606  			ptr1 := v_0_0
  9607  			idx1 := v_0_1
  9608  			w := v_1
  9609  			x := v_2
  9610  			if x.Op != OpARM64MOVBstoreidx {
  9611  				continue
  9612  			}
  9613  			mem := x.Args[3]
  9614  			ptr0 := x.Args[0]
  9615  			idx0 := x.Args[1]
  9616  			x_2 := x.Args[2]
  9617  			if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9618  				continue
  9619  			}
  9620  			v.reset(OpARM64MOVHstoreidx)
  9621  			v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
  9622  			v0.AddArg(w)
  9623  			v.AddArg4(ptr0, idx0, v0, mem)
  9624  			return true
  9625  		}
  9626  		break
  9627  	}
  9628  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 8)] w) mem))
  9629  	// cond: x.Uses == 1 && clobber(x)
  9630  	// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
  9631  	for {
  9632  		i := auxIntToInt32(v.AuxInt)
  9633  		s := auxToSym(v.Aux)
  9634  		ptr := v_0
  9635  		w := v_1
  9636  		x := v_2
  9637  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  9638  			break
  9639  		}
  9640  		mem := x.Args[2]
  9641  		if ptr != x.Args[0] {
  9642  			break
  9643  		}
  9644  		x_1 := x.Args[1]
  9645  		if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 8) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
  9646  			break
  9647  		}
  9648  		v.reset(OpARM64MOVHstore)
  9649  		v.AuxInt = int32ToAuxInt(i - 1)
  9650  		v.Aux = symToAux(s)
  9651  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
  9652  		v0.AddArg(w)
  9653  		v.AddArg3(ptr, v0, mem)
  9654  		return true
  9655  	}
  9656  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 8)] w) mem))
  9657  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9658  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
  9659  	for {
  9660  		if auxIntToInt32(v.AuxInt) != 1 {
  9661  			break
  9662  		}
  9663  		s := auxToSym(v.Aux)
  9664  		if v_0.Op != OpARM64ADD {
  9665  			break
  9666  		}
  9667  		_ = v_0.Args[1]
  9668  		v_0_0 := v_0.Args[0]
  9669  		v_0_1 := v_0.Args[1]
  9670  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9671  			ptr1 := v_0_0
  9672  			idx1 := v_0_1
  9673  			w := v_1
  9674  			x := v_2
  9675  			if x.Op != OpARM64MOVBstoreidx {
  9676  				continue
  9677  			}
  9678  			mem := x.Args[3]
  9679  			ptr0 := x.Args[0]
  9680  			idx0 := x.Args[1]
  9681  			x_2 := x.Args[2]
  9682  			if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9683  				continue
  9684  			}
  9685  			v.reset(OpARM64MOVHstoreidx)
  9686  			v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
  9687  			v0.AddArg(w)
  9688  			v.AddArg4(ptr0, idx0, v0, mem)
  9689  			return true
  9690  		}
  9691  		break
  9692  	}
  9693  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
  9694  	// cond: x.Uses == 1 && clobber(x)
  9695  	// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
  9696  	for {
  9697  		i := auxIntToInt32(v.AuxInt)
  9698  		s := auxToSym(v.Aux)
  9699  		ptr := v_0
  9700  		w := v_1
  9701  		x := v_2
  9702  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  9703  			break
  9704  		}
  9705  		mem := x.Args[2]
  9706  		if ptr != x.Args[0] {
  9707  			break
  9708  		}
  9709  		x_1 := x.Args[1]
  9710  		if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 {
  9711  			break
  9712  		}
  9713  		x_1_0 := x_1.Args[0]
  9714  		if x_1_0.Op != OpARM64MOVDreg || w != x_1_0.Args[0] || !(x.Uses == 1 && clobber(x)) {
  9715  			break
  9716  		}
  9717  		v.reset(OpARM64MOVHstore)
  9718  		v.AuxInt = int32ToAuxInt(i - 1)
  9719  		v.Aux = symToAux(s)
  9720  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
  9721  		v0.AddArg(w)
  9722  		v.AddArg3(ptr, v0, mem)
  9723  		return true
  9724  	}
  9725  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
  9726  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9727  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
  9728  	for {
  9729  		if auxIntToInt32(v.AuxInt) != 1 {
  9730  			break
  9731  		}
  9732  		s := auxToSym(v.Aux)
  9733  		if v_0.Op != OpARM64ADD {
  9734  			break
  9735  		}
  9736  		_ = v_0.Args[1]
  9737  		v_0_0 := v_0.Args[0]
  9738  		v_0_1 := v_0.Args[1]
  9739  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9740  			ptr1 := v_0_0
  9741  			idx1 := v_0_1
  9742  			w := v_1
  9743  			x := v_2
  9744  			if x.Op != OpARM64MOVBstoreidx {
  9745  				continue
  9746  			}
  9747  			mem := x.Args[3]
  9748  			ptr0 := x.Args[0]
  9749  			idx0 := x.Args[1]
  9750  			x_2 := x.Args[2]
  9751  			if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 {
  9752  				continue
  9753  			}
  9754  			x_2_0 := x_2.Args[0]
  9755  			if x_2_0.Op != OpARM64MOVDreg || w != x_2_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9756  				continue
  9757  			}
  9758  			v.reset(OpARM64MOVHstoreidx)
  9759  			v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
  9760  			v0.AddArg(w)
  9761  			v.AddArg4(ptr0, idx0, v0, mem)
  9762  			return true
  9763  		}
  9764  		break
  9765  	}
  9766  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) mem))
  9767  	// cond: x.Uses == 1 && clobber(x)
  9768  	// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
  9769  	for {
  9770  		i := auxIntToInt32(v.AuxInt)
  9771  		s := auxToSym(v.Aux)
  9772  		ptr := v_0
  9773  		w := v_1
  9774  		x := v_2
  9775  		if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
  9776  			break
  9777  		}
  9778  		mem := x.Args[2]
  9779  		if ptr != x.Args[0] {
  9780  			break
  9781  		}
  9782  		x_1 := x.Args[1]
  9783  		if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 24) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
  9784  			break
  9785  		}
  9786  		v.reset(OpARM64MOVHstore)
  9787  		v.AuxInt = int32ToAuxInt(i - 1)
  9788  		v.Aux = symToAux(s)
  9789  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
  9790  		v0.AddArg(w)
  9791  		v.AddArg3(ptr, v0, mem)
  9792  		return true
  9793  	}
  9794  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 24)] w) mem))
  9795  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9796  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
  9797  	for {
  9798  		if auxIntToInt32(v.AuxInt) != 1 {
  9799  			break
  9800  		}
  9801  		s := auxToSym(v.Aux)
  9802  		if v_0.Op != OpARM64ADD {
  9803  			break
  9804  		}
  9805  		_ = v_0.Args[1]
  9806  		v_0_0 := v_0.Args[0]
  9807  		v_0_1 := v_0.Args[1]
  9808  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9809  			ptr1 := v_0_0
  9810  			idx1 := v_0_1
  9811  			w := v_1
  9812  			x := v_2
  9813  			if x.Op != OpARM64MOVBstoreidx {
  9814  				continue
  9815  			}
  9816  			mem := x.Args[3]
  9817  			ptr0 := x.Args[0]
  9818  			idx0 := x.Args[1]
  9819  			x_2 := x.Args[2]
  9820  			if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 24) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9821  				continue
  9822  			}
  9823  			v.reset(OpARM64MOVHstoreidx)
  9824  			v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
  9825  			v0.AddArg(w)
  9826  			v.AddArg4(ptr0, idx0, v0, mem)
  9827  			return true
  9828  		}
  9829  		break
  9830  	}
  9831  	return false
  9832  }
  9833  func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
  9834  	v_3 := v.Args[3]
  9835  	v_2 := v.Args[2]
  9836  	v_1 := v.Args[1]
  9837  	v_0 := v.Args[0]
  9838  	b := v.Block
  9839  	typ := &b.Func.Config.Types
  9840  	// match: (MOVBstoreidx ptr (MOVDconst [c]) val mem)
  9841  	// cond: is32Bit(c)
  9842  	// result: (MOVBstore [int32(c)] ptr val mem)
  9843  	for {
  9844  		ptr := v_0
  9845  		if v_1.Op != OpARM64MOVDconst {
  9846  			break
  9847  		}
  9848  		c := auxIntToInt64(v_1.AuxInt)
  9849  		val := v_2
  9850  		mem := v_3
  9851  		if !(is32Bit(c)) {
  9852  			break
  9853  		}
  9854  		v.reset(OpARM64MOVBstore)
  9855  		v.AuxInt = int32ToAuxInt(int32(c))
  9856  		v.AddArg3(ptr, val, mem)
  9857  		return true
  9858  	}
  9859  	// match: (MOVBstoreidx (MOVDconst [c]) idx val mem)
  9860  	// cond: is32Bit(c)
  9861  	// result: (MOVBstore [int32(c)] idx val mem)
  9862  	for {
  9863  		if v_0.Op != OpARM64MOVDconst {
  9864  			break
  9865  		}
  9866  		c := auxIntToInt64(v_0.AuxInt)
  9867  		idx := v_1
  9868  		val := v_2
  9869  		mem := v_3
  9870  		if !(is32Bit(c)) {
  9871  			break
  9872  		}
  9873  		v.reset(OpARM64MOVBstore)
  9874  		v.AuxInt = int32ToAuxInt(int32(c))
  9875  		v.AddArg3(idx, val, mem)
  9876  		return true
  9877  	}
  9878  	// match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem)
  9879  	// result: (MOVBstorezeroidx ptr idx mem)
  9880  	for {
  9881  		ptr := v_0
  9882  		idx := v_1
  9883  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
  9884  			break
  9885  		}
  9886  		mem := v_3
  9887  		v.reset(OpARM64MOVBstorezeroidx)
  9888  		v.AddArg3(ptr, idx, mem)
  9889  		return true
  9890  	}
  9891  	// match: (MOVBstoreidx ptr idx (MOVBreg x) mem)
  9892  	// result: (MOVBstoreidx ptr idx x mem)
  9893  	for {
  9894  		ptr := v_0
  9895  		idx := v_1
  9896  		if v_2.Op != OpARM64MOVBreg {
  9897  			break
  9898  		}
  9899  		x := v_2.Args[0]
  9900  		mem := v_3
  9901  		v.reset(OpARM64MOVBstoreidx)
  9902  		v.AddArg4(ptr, idx, x, mem)
  9903  		return true
  9904  	}
  9905  	// match: (MOVBstoreidx ptr idx (MOVBUreg x) mem)
  9906  	// result: (MOVBstoreidx ptr idx x mem)
  9907  	for {
  9908  		ptr := v_0
  9909  		idx := v_1
  9910  		if v_2.Op != OpARM64MOVBUreg {
  9911  			break
  9912  		}
  9913  		x := v_2.Args[0]
  9914  		mem := v_3
  9915  		v.reset(OpARM64MOVBstoreidx)
  9916  		v.AddArg4(ptr, idx, x, mem)
  9917  		return true
  9918  	}
  9919  	// match: (MOVBstoreidx ptr idx (MOVHreg x) mem)
  9920  	// result: (MOVBstoreidx ptr idx x mem)
  9921  	for {
  9922  		ptr := v_0
  9923  		idx := v_1
  9924  		if v_2.Op != OpARM64MOVHreg {
  9925  			break
  9926  		}
  9927  		x := v_2.Args[0]
  9928  		mem := v_3
  9929  		v.reset(OpARM64MOVBstoreidx)
  9930  		v.AddArg4(ptr, idx, x, mem)
  9931  		return true
  9932  	}
  9933  	// match: (MOVBstoreidx ptr idx (MOVHUreg x) mem)
  9934  	// result: (MOVBstoreidx ptr idx x mem)
  9935  	for {
  9936  		ptr := v_0
  9937  		idx := v_1
  9938  		if v_2.Op != OpARM64MOVHUreg {
  9939  			break
  9940  		}
  9941  		x := v_2.Args[0]
  9942  		mem := v_3
  9943  		v.reset(OpARM64MOVBstoreidx)
  9944  		v.AddArg4(ptr, idx, x, mem)
  9945  		return true
  9946  	}
  9947  	// match: (MOVBstoreidx ptr idx (MOVWreg x) mem)
  9948  	// result: (MOVBstoreidx ptr idx x mem)
  9949  	for {
  9950  		ptr := v_0
  9951  		idx := v_1
  9952  		if v_2.Op != OpARM64MOVWreg {
  9953  			break
  9954  		}
  9955  		x := v_2.Args[0]
  9956  		mem := v_3
  9957  		v.reset(OpARM64MOVBstoreidx)
  9958  		v.AddArg4(ptr, idx, x, mem)
  9959  		return true
  9960  	}
  9961  	// match: (MOVBstoreidx ptr idx (MOVWUreg x) mem)
  9962  	// result: (MOVBstoreidx ptr idx x mem)
  9963  	for {
  9964  		ptr := v_0
  9965  		idx := v_1
  9966  		if v_2.Op != OpARM64MOVWUreg {
  9967  			break
  9968  		}
  9969  		x := v_2.Args[0]
  9970  		mem := v_3
  9971  		v.reset(OpARM64MOVBstoreidx)
  9972  		v.AddArg4(ptr, idx, x, mem)
  9973  		return true
  9974  	}
  9975  	// match: (MOVBstoreidx ptr (ADDconst [1] idx) (SRLconst [8] w) x:(MOVBstoreidx ptr idx w mem))
  9976  	// cond: x.Uses == 1 && clobber(x)
  9977  	// result: (MOVHstoreidx ptr idx w mem)
  9978  	for {
  9979  		ptr := v_0
  9980  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
  9981  			break
  9982  		}
  9983  		idx := v_1.Args[0]
  9984  		if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 8 {
  9985  			break
  9986  		}
  9987  		w := v_2.Args[0]
  9988  		x := v_3
  9989  		if x.Op != OpARM64MOVBstoreidx {
  9990  			break
  9991  		}
  9992  		mem := x.Args[3]
  9993  		if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
  9994  			break
  9995  		}
  9996  		v.reset(OpARM64MOVHstoreidx)
  9997  		v.AddArg4(ptr, idx, w, mem)
  9998  		return true
  9999  	}
 10000  	// match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(24, 8)] w) mem))))
 10001  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
 10002  	// result: (MOVWstoreidx ptr idx (REVW <typ.UInt32> w) mem)
 10003  	for {
 10004  		ptr := v_0
 10005  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 3 {
 10006  			break
 10007  		}
 10008  		idx := v_1.Args[0]
 10009  		w := v_2
 10010  		x0 := v_3
 10011  		if x0.Op != OpARM64MOVBstoreidx {
 10012  			break
 10013  		}
 10014  		_ = x0.Args[3]
 10015  		if ptr != x0.Args[0] {
 10016  			break
 10017  		}
 10018  		x0_1 := x0.Args[1]
 10019  		if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 2 || idx != x0_1.Args[0] {
 10020  			break
 10021  		}
 10022  		x0_2 := x0.Args[2]
 10023  		if x0_2.Op != OpARM64UBFX || auxIntToArm64BitField(x0_2.AuxInt) != armBFAuxInt(8, 24) || w != x0_2.Args[0] {
 10024  			break
 10025  		}
 10026  		x1 := x0.Args[3]
 10027  		if x1.Op != OpARM64MOVBstoreidx {
 10028  			break
 10029  		}
 10030  		_ = x1.Args[3]
 10031  		if ptr != x1.Args[0] {
 10032  			break
 10033  		}
 10034  		x1_1 := x1.Args[1]
 10035  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] {
 10036  			break
 10037  		}
 10038  		x1_2 := x1.Args[2]
 10039  		if x1_2.Op != OpARM64UBFX || auxIntToArm64BitField(x1_2.AuxInt) != armBFAuxInt(16, 16) || w != x1_2.Args[0] {
 10040  			break
 10041  		}
 10042  		x2 := x1.Args[3]
 10043  		if x2.Op != OpARM64MOVBstoreidx {
 10044  			break
 10045  		}
 10046  		mem := x2.Args[3]
 10047  		if ptr != x2.Args[0] || idx != x2.Args[1] {
 10048  			break
 10049  		}
 10050  		x2_2 := x2.Args[2]
 10051  		if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
 10052  			break
 10053  		}
 10054  		v.reset(OpARM64MOVWstoreidx)
 10055  		v0 := b.NewValue0(v.Pos, OpARM64REVW, typ.UInt32)
 10056  		v0.AddArg(w)
 10057  		v.AddArg4(ptr, idx, v0, mem)
 10058  		return true
 10059  	}
 10060  	// match: (MOVBstoreidx ptr idx w x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr (ADDconst [3] idx) (UBFX [armBFAuxInt(24, 8)] w) mem))))
 10061  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
 10062  	// result: (MOVWstoreidx ptr idx w mem)
 10063  	for {
 10064  		ptr := v_0
 10065  		idx := v_1
 10066  		w := v_2
 10067  		x0 := v_3
 10068  		if x0.Op != OpARM64MOVBstoreidx {
 10069  			break
 10070  		}
 10071  		_ = x0.Args[3]
 10072  		if ptr != x0.Args[0] {
 10073  			break
 10074  		}
 10075  		x0_1 := x0.Args[1]
 10076  		if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 1 || idx != x0_1.Args[0] {
 10077  			break
 10078  		}
 10079  		x0_2 := x0.Args[2]
 10080  		if x0_2.Op != OpARM64UBFX || auxIntToArm64BitField(x0_2.AuxInt) != armBFAuxInt(8, 24) || w != x0_2.Args[0] {
 10081  			break
 10082  		}
 10083  		x1 := x0.Args[3]
 10084  		if x1.Op != OpARM64MOVBstoreidx {
 10085  			break
 10086  		}
 10087  		_ = x1.Args[3]
 10088  		if ptr != x1.Args[0] {
 10089  			break
 10090  		}
 10091  		x1_1 := x1.Args[1]
 10092  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] {
 10093  			break
 10094  		}
 10095  		x1_2 := x1.Args[2]
 10096  		if x1_2.Op != OpARM64UBFX || auxIntToArm64BitField(x1_2.AuxInt) != armBFAuxInt(16, 16) || w != x1_2.Args[0] {
 10097  			break
 10098  		}
 10099  		x2 := x1.Args[3]
 10100  		if x2.Op != OpARM64MOVBstoreidx {
 10101  			break
 10102  		}
 10103  		mem := x2.Args[3]
 10104  		if ptr != x2.Args[0] {
 10105  			break
 10106  		}
 10107  		x2_1 := x2.Args[1]
 10108  		if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 3 || idx != x2_1.Args[0] {
 10109  			break
 10110  		}
 10111  		x2_2 := x2.Args[2]
 10112  		if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
 10113  			break
 10114  		}
 10115  		v.reset(OpARM64MOVWstoreidx)
 10116  		v.AddArg4(ptr, idx, w, mem)
 10117  		return true
 10118  	}
 10119  	// match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(8, 8)] w) mem))
 10120  	// cond: x.Uses == 1 && clobber(x)
 10121  	// result: (MOVHstoreidx ptr idx (REV16W <typ.UInt16> w) mem)
 10122  	for {
 10123  		ptr := v_0
 10124  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 10125  			break
 10126  		}
 10127  		idx := v_1.Args[0]
 10128  		w := v_2
 10129  		x := v_3
 10130  		if x.Op != OpARM64MOVBstoreidx {
 10131  			break
 10132  		}
 10133  		mem := x.Args[3]
 10134  		if ptr != x.Args[0] || idx != x.Args[1] {
 10135  			break
 10136  		}
 10137  		x_2 := x.Args[2]
 10138  		if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) {
 10139  			break
 10140  		}
 10141  		v.reset(OpARM64MOVHstoreidx)
 10142  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
 10143  		v0.AddArg(w)
 10144  		v.AddArg4(ptr, idx, v0, mem)
 10145  		return true
 10146  	}
 10147  	// match: (MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 8)] w) mem))
 10148  	// cond: x.Uses == 1 && clobber(x)
 10149  	// result: (MOVHstoreidx ptr idx w mem)
 10150  	for {
 10151  		ptr := v_0
 10152  		idx := v_1
 10153  		w := v_2
 10154  		x := v_3
 10155  		if x.Op != OpARM64MOVBstoreidx {
 10156  			break
 10157  		}
 10158  		mem := x.Args[3]
 10159  		if ptr != x.Args[0] {
 10160  			break
 10161  		}
 10162  		x_1 := x.Args[1]
 10163  		if x_1.Op != OpARM64ADDconst || auxIntToInt64(x_1.AuxInt) != 1 || idx != x_1.Args[0] {
 10164  			break
 10165  		}
 10166  		x_2 := x.Args[2]
 10167  		if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) {
 10168  			break
 10169  		}
 10170  		v.reset(OpARM64MOVHstoreidx)
 10171  		v.AddArg4(ptr, idx, w, mem)
 10172  		return true
 10173  	}
 10174  	return false
 10175  }
 10176  func rewriteValueARM64_OpARM64MOVBstorezero(v *Value) bool {
 10177  	v_1 := v.Args[1]
 10178  	v_0 := v.Args[0]
 10179  	b := v.Block
 10180  	config := b.Func.Config
 10181  	// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 10182  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10183  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
 10184  	for {
 10185  		off1 := auxIntToInt32(v.AuxInt)
 10186  		sym := auxToSym(v.Aux)
 10187  		if v_0.Op != OpARM64ADDconst {
 10188  			break
 10189  		}
 10190  		off2 := auxIntToInt64(v_0.AuxInt)
 10191  		ptr := v_0.Args[0]
 10192  		mem := v_1
 10193  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10194  			break
 10195  		}
 10196  		v.reset(OpARM64MOVBstorezero)
 10197  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 10198  		v.Aux = symToAux(sym)
 10199  		v.AddArg2(ptr, mem)
 10200  		return true
 10201  	}
 10202  	// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 10203  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10204  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 10205  	for {
 10206  		off1 := auxIntToInt32(v.AuxInt)
 10207  		sym1 := auxToSym(v.Aux)
 10208  		if v_0.Op != OpARM64MOVDaddr {
 10209  			break
 10210  		}
 10211  		off2 := auxIntToInt32(v_0.AuxInt)
 10212  		sym2 := auxToSym(v_0.Aux)
 10213  		ptr := v_0.Args[0]
 10214  		mem := v_1
 10215  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10216  			break
 10217  		}
 10218  		v.reset(OpARM64MOVBstorezero)
 10219  		v.AuxInt = int32ToAuxInt(off1 + off2)
 10220  		v.Aux = symToAux(mergeSym(sym1, sym2))
 10221  		v.AddArg2(ptr, mem)
 10222  		return true
 10223  	}
 10224  	// match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem)
 10225  	// cond: off == 0 && sym == nil
 10226  	// result: (MOVBstorezeroidx ptr idx mem)
 10227  	for {
 10228  		off := auxIntToInt32(v.AuxInt)
 10229  		sym := auxToSym(v.Aux)
 10230  		if v_0.Op != OpARM64ADD {
 10231  			break
 10232  		}
 10233  		idx := v_0.Args[1]
 10234  		ptr := v_0.Args[0]
 10235  		mem := v_1
 10236  		if !(off == 0 && sym == nil) {
 10237  			break
 10238  		}
 10239  		v.reset(OpARM64MOVBstorezeroidx)
 10240  		v.AddArg3(ptr, idx, mem)
 10241  		return true
 10242  	}
 10243  	// match: (MOVBstorezero [i] {s} ptr0 x:(MOVBstorezero [j] {s} ptr1 mem))
 10244  	// cond: x.Uses == 1 && areAdjacentOffsets(int64(i),int64(j),1) && isSamePtr(ptr0, ptr1) && clobber(x)
 10245  	// result: (MOVHstorezero [int32(min(int64(i),int64(j)))] {s} ptr0 mem)
 10246  	for {
 10247  		i := auxIntToInt32(v.AuxInt)
 10248  		s := auxToSym(v.Aux)
 10249  		ptr0 := v_0
 10250  		x := v_1
 10251  		if x.Op != OpARM64MOVBstorezero {
 10252  			break
 10253  		}
 10254  		j := auxIntToInt32(x.AuxInt)
 10255  		if auxToSym(x.Aux) != s {
 10256  			break
 10257  		}
 10258  		mem := x.Args[1]
 10259  		ptr1 := x.Args[0]
 10260  		if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 1) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 10261  			break
 10262  		}
 10263  		v.reset(OpARM64MOVHstorezero)
 10264  		v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
 10265  		v.Aux = symToAux(s)
 10266  		v.AddArg2(ptr0, mem)
 10267  		return true
 10268  	}
 10269  	// match: (MOVBstorezero [1] {s} (ADD ptr0 idx0) x:(MOVBstorezeroidx ptr1 idx1 mem))
 10270  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 10271  	// result: (MOVHstorezeroidx ptr1 idx1 mem)
 10272  	for {
 10273  		if auxIntToInt32(v.AuxInt) != 1 {
 10274  			break
 10275  		}
 10276  		s := auxToSym(v.Aux)
 10277  		if v_0.Op != OpARM64ADD {
 10278  			break
 10279  		}
 10280  		_ = v_0.Args[1]
 10281  		v_0_0 := v_0.Args[0]
 10282  		v_0_1 := v_0.Args[1]
 10283  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10284  			ptr0 := v_0_0
 10285  			idx0 := v_0_1
 10286  			x := v_1
 10287  			if x.Op != OpARM64MOVBstorezeroidx {
 10288  				continue
 10289  			}
 10290  			mem := x.Args[2]
 10291  			ptr1 := x.Args[0]
 10292  			idx1 := x.Args[1]
 10293  			if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 10294  				continue
 10295  			}
 10296  			v.reset(OpARM64MOVHstorezeroidx)
 10297  			v.AddArg3(ptr1, idx1, mem)
 10298  			return true
 10299  		}
 10300  		break
 10301  	}
 10302  	return false
 10303  }
 10304  func rewriteValueARM64_OpARM64MOVBstorezeroidx(v *Value) bool {
 10305  	v_2 := v.Args[2]
 10306  	v_1 := v.Args[1]
 10307  	v_0 := v.Args[0]
 10308  	// match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem)
 10309  	// cond: is32Bit(c)
 10310  	// result: (MOVBstorezero [int32(c)] ptr mem)
 10311  	for {
 10312  		ptr := v_0
 10313  		if v_1.Op != OpARM64MOVDconst {
 10314  			break
 10315  		}
 10316  		c := auxIntToInt64(v_1.AuxInt)
 10317  		mem := v_2
 10318  		if !(is32Bit(c)) {
 10319  			break
 10320  		}
 10321  		v.reset(OpARM64MOVBstorezero)
 10322  		v.AuxInt = int32ToAuxInt(int32(c))
 10323  		v.AddArg2(ptr, mem)
 10324  		return true
 10325  	}
 10326  	// match: (MOVBstorezeroidx (MOVDconst [c]) idx mem)
 10327  	// cond: is32Bit(c)
 10328  	// result: (MOVBstorezero [int32(c)] idx mem)
 10329  	for {
 10330  		if v_0.Op != OpARM64MOVDconst {
 10331  			break
 10332  		}
 10333  		c := auxIntToInt64(v_0.AuxInt)
 10334  		idx := v_1
 10335  		mem := v_2
 10336  		if !(is32Bit(c)) {
 10337  			break
 10338  		}
 10339  		v.reset(OpARM64MOVBstorezero)
 10340  		v.AuxInt = int32ToAuxInt(int32(c))
 10341  		v.AddArg2(idx, mem)
 10342  		return true
 10343  	}
 10344  	// match: (MOVBstorezeroidx ptr (ADDconst [1] idx) x:(MOVBstorezeroidx ptr idx mem))
 10345  	// cond: x.Uses == 1 && clobber(x)
 10346  	// result: (MOVHstorezeroidx ptr idx mem)
 10347  	for {
 10348  		ptr := v_0
 10349  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 10350  			break
 10351  		}
 10352  		idx := v_1.Args[0]
 10353  		x := v_2
 10354  		if x.Op != OpARM64MOVBstorezeroidx {
 10355  			break
 10356  		}
 10357  		mem := x.Args[2]
 10358  		if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
 10359  			break
 10360  		}
 10361  		v.reset(OpARM64MOVHstorezeroidx)
 10362  		v.AddArg3(ptr, idx, mem)
 10363  		return true
 10364  	}
 10365  	return false
 10366  }
 10367  func rewriteValueARM64_OpARM64MOVDload(v *Value) bool {
 10368  	v_1 := v.Args[1]
 10369  	v_0 := v.Args[0]
 10370  	b := v.Block
 10371  	config := b.Func.Config
 10372  	// match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _))
 10373  	// result: (FMOVDfpgp val)
 10374  	for {
 10375  		off := auxIntToInt32(v.AuxInt)
 10376  		sym := auxToSym(v.Aux)
 10377  		ptr := v_0
 10378  		if v_1.Op != OpARM64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
 10379  			break
 10380  		}
 10381  		val := v_1.Args[1]
 10382  		if ptr != v_1.Args[0] {
 10383  			break
 10384  		}
 10385  		v.reset(OpARM64FMOVDfpgp)
 10386  		v.AddArg(val)
 10387  		return true
 10388  	}
 10389  	// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
 10390  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10391  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
 10392  	for {
 10393  		off1 := auxIntToInt32(v.AuxInt)
 10394  		sym := auxToSym(v.Aux)
 10395  		if v_0.Op != OpARM64ADDconst {
 10396  			break
 10397  		}
 10398  		off2 := auxIntToInt64(v_0.AuxInt)
 10399  		ptr := v_0.Args[0]
 10400  		mem := v_1
 10401  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10402  			break
 10403  		}
 10404  		v.reset(OpARM64MOVDload)
 10405  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 10406  		v.Aux = symToAux(sym)
 10407  		v.AddArg2(ptr, mem)
 10408  		return true
 10409  	}
 10410  	// match: (MOVDload [off] {sym} (ADD ptr idx) mem)
 10411  	// cond: off == 0 && sym == nil
 10412  	// result: (MOVDloadidx ptr idx mem)
 10413  	for {
 10414  		off := auxIntToInt32(v.AuxInt)
 10415  		sym := auxToSym(v.Aux)
 10416  		if v_0.Op != OpARM64ADD {
 10417  			break
 10418  		}
 10419  		idx := v_0.Args[1]
 10420  		ptr := v_0.Args[0]
 10421  		mem := v_1
 10422  		if !(off == 0 && sym == nil) {
 10423  			break
 10424  		}
 10425  		v.reset(OpARM64MOVDloadidx)
 10426  		v.AddArg3(ptr, idx, mem)
 10427  		return true
 10428  	}
 10429  	// match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem)
 10430  	// cond: off == 0 && sym == nil
 10431  	// result: (MOVDloadidx8 ptr idx mem)
 10432  	for {
 10433  		off := auxIntToInt32(v.AuxInt)
 10434  		sym := auxToSym(v.Aux)
 10435  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
 10436  			break
 10437  		}
 10438  		idx := v_0.Args[1]
 10439  		ptr := v_0.Args[0]
 10440  		mem := v_1
 10441  		if !(off == 0 && sym == nil) {
 10442  			break
 10443  		}
 10444  		v.reset(OpARM64MOVDloadidx8)
 10445  		v.AddArg3(ptr, idx, mem)
 10446  		return true
 10447  	}
 10448  	// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 10449  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10450  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 10451  	for {
 10452  		off1 := auxIntToInt32(v.AuxInt)
 10453  		sym1 := auxToSym(v.Aux)
 10454  		if v_0.Op != OpARM64MOVDaddr {
 10455  			break
 10456  		}
 10457  		off2 := auxIntToInt32(v_0.AuxInt)
 10458  		sym2 := auxToSym(v_0.Aux)
 10459  		ptr := v_0.Args[0]
 10460  		mem := v_1
 10461  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10462  			break
 10463  		}
 10464  		v.reset(OpARM64MOVDload)
 10465  		v.AuxInt = int32ToAuxInt(off1 + off2)
 10466  		v.Aux = symToAux(mergeSym(sym1, sym2))
 10467  		v.AddArg2(ptr, mem)
 10468  		return true
 10469  	}
 10470  	// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
 10471  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 10472  	// result: (MOVDconst [0])
 10473  	for {
 10474  		off := auxIntToInt32(v.AuxInt)
 10475  		sym := auxToSym(v.Aux)
 10476  		ptr := v_0
 10477  		if v_1.Op != OpARM64MOVDstorezero {
 10478  			break
 10479  		}
 10480  		off2 := auxIntToInt32(v_1.AuxInt)
 10481  		sym2 := auxToSym(v_1.Aux)
 10482  		ptr2 := v_1.Args[0]
 10483  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 10484  			break
 10485  		}
 10486  		v.reset(OpARM64MOVDconst)
 10487  		v.AuxInt = int64ToAuxInt(0)
 10488  		return true
 10489  	}
 10490  	// match: (MOVDload [off] {sym} (SB) _)
 10491  	// cond: symIsRO(sym)
 10492  	// result: (MOVDconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 10493  	for {
 10494  		off := auxIntToInt32(v.AuxInt)
 10495  		sym := auxToSym(v.Aux)
 10496  		if v_0.Op != OpSB || !(symIsRO(sym)) {
 10497  			break
 10498  		}
 10499  		v.reset(OpARM64MOVDconst)
 10500  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
 10501  		return true
 10502  	}
 10503  	return false
 10504  }
 10505  func rewriteValueARM64_OpARM64MOVDloadidx(v *Value) bool {
 10506  	v_2 := v.Args[2]
 10507  	v_1 := v.Args[1]
 10508  	v_0 := v.Args[0]
 10509  	// match: (MOVDloadidx ptr (MOVDconst [c]) mem)
 10510  	// cond: is32Bit(c)
 10511  	// result: (MOVDload [int32(c)] ptr mem)
 10512  	for {
 10513  		ptr := v_0
 10514  		if v_1.Op != OpARM64MOVDconst {
 10515  			break
 10516  		}
 10517  		c := auxIntToInt64(v_1.AuxInt)
 10518  		mem := v_2
 10519  		if !(is32Bit(c)) {
 10520  			break
 10521  		}
 10522  		v.reset(OpARM64MOVDload)
 10523  		v.AuxInt = int32ToAuxInt(int32(c))
 10524  		v.AddArg2(ptr, mem)
 10525  		return true
 10526  	}
 10527  	// match: (MOVDloadidx (MOVDconst [c]) ptr mem)
 10528  	// cond: is32Bit(c)
 10529  	// result: (MOVDload [int32(c)] ptr mem)
 10530  	for {
 10531  		if v_0.Op != OpARM64MOVDconst {
 10532  			break
 10533  		}
 10534  		c := auxIntToInt64(v_0.AuxInt)
 10535  		ptr := v_1
 10536  		mem := v_2
 10537  		if !(is32Bit(c)) {
 10538  			break
 10539  		}
 10540  		v.reset(OpARM64MOVDload)
 10541  		v.AuxInt = int32ToAuxInt(int32(c))
 10542  		v.AddArg2(ptr, mem)
 10543  		return true
 10544  	}
 10545  	// match: (MOVDloadidx ptr (SLLconst [3] idx) mem)
 10546  	// result: (MOVDloadidx8 ptr idx mem)
 10547  	for {
 10548  		ptr := v_0
 10549  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
 10550  			break
 10551  		}
 10552  		idx := v_1.Args[0]
 10553  		mem := v_2
 10554  		v.reset(OpARM64MOVDloadidx8)
 10555  		v.AddArg3(ptr, idx, mem)
 10556  		return true
 10557  	}
 10558  	// match: (MOVDloadidx (SLLconst [3] idx) ptr mem)
 10559  	// result: (MOVDloadidx8 ptr idx mem)
 10560  	for {
 10561  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
 10562  			break
 10563  		}
 10564  		idx := v_0.Args[0]
 10565  		ptr := v_1
 10566  		mem := v_2
 10567  		v.reset(OpARM64MOVDloadidx8)
 10568  		v.AddArg3(ptr, idx, mem)
 10569  		return true
 10570  	}
 10571  	// match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _))
 10572  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 10573  	// result: (MOVDconst [0])
 10574  	for {
 10575  		ptr := v_0
 10576  		idx := v_1
 10577  		if v_2.Op != OpARM64MOVDstorezeroidx {
 10578  			break
 10579  		}
 10580  		idx2 := v_2.Args[1]
 10581  		ptr2 := v_2.Args[0]
 10582  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 10583  			break
 10584  		}
 10585  		v.reset(OpARM64MOVDconst)
 10586  		v.AuxInt = int64ToAuxInt(0)
 10587  		return true
 10588  	}
 10589  	return false
 10590  }
 10591  func rewriteValueARM64_OpARM64MOVDloadidx8(v *Value) bool {
 10592  	v_2 := v.Args[2]
 10593  	v_1 := v.Args[1]
 10594  	v_0 := v.Args[0]
 10595  	// match: (MOVDloadidx8 ptr (MOVDconst [c]) mem)
 10596  	// cond: is32Bit(c<<3)
 10597  	// result: (MOVDload [int32(c)<<3] ptr mem)
 10598  	for {
 10599  		ptr := v_0
 10600  		if v_1.Op != OpARM64MOVDconst {
 10601  			break
 10602  		}
 10603  		c := auxIntToInt64(v_1.AuxInt)
 10604  		mem := v_2
 10605  		if !(is32Bit(c << 3)) {
 10606  			break
 10607  		}
 10608  		v.reset(OpARM64MOVDload)
 10609  		v.AuxInt = int32ToAuxInt(int32(c) << 3)
 10610  		v.AddArg2(ptr, mem)
 10611  		return true
 10612  	}
 10613  	// match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _))
 10614  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 10615  	// result: (MOVDconst [0])
 10616  	for {
 10617  		ptr := v_0
 10618  		idx := v_1
 10619  		if v_2.Op != OpARM64MOVDstorezeroidx8 {
 10620  			break
 10621  		}
 10622  		idx2 := v_2.Args[1]
 10623  		ptr2 := v_2.Args[0]
 10624  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 10625  			break
 10626  		}
 10627  		v.reset(OpARM64MOVDconst)
 10628  		v.AuxInt = int64ToAuxInt(0)
 10629  		return true
 10630  	}
 10631  	return false
 10632  }
 10633  func rewriteValueARM64_OpARM64MOVDnop(v *Value) bool {
 10634  	v_0 := v.Args[0]
 10635  	// match: (MOVDnop (MOVDconst [c]))
 10636  	// result: (MOVDconst [c])
 10637  	for {
 10638  		if v_0.Op != OpARM64MOVDconst {
 10639  			break
 10640  		}
 10641  		c := auxIntToInt64(v_0.AuxInt)
 10642  		v.reset(OpARM64MOVDconst)
 10643  		v.AuxInt = int64ToAuxInt(c)
 10644  		return true
 10645  	}
 10646  	return false
 10647  }
 10648  func rewriteValueARM64_OpARM64MOVDreg(v *Value) bool {
 10649  	v_0 := v.Args[0]
 10650  	// match: (MOVDreg x)
 10651  	// cond: x.Uses == 1
 10652  	// result: (MOVDnop x)
 10653  	for {
 10654  		x := v_0
 10655  		if !(x.Uses == 1) {
 10656  			break
 10657  		}
 10658  		v.reset(OpARM64MOVDnop)
 10659  		v.AddArg(x)
 10660  		return true
 10661  	}
 10662  	// match: (MOVDreg (MOVDconst [c]))
 10663  	// result: (MOVDconst [c])
 10664  	for {
 10665  		if v_0.Op != OpARM64MOVDconst {
 10666  			break
 10667  		}
 10668  		c := auxIntToInt64(v_0.AuxInt)
 10669  		v.reset(OpARM64MOVDconst)
 10670  		v.AuxInt = int64ToAuxInt(c)
 10671  		return true
 10672  	}
 10673  	return false
 10674  }
 10675  func rewriteValueARM64_OpARM64MOVDstore(v *Value) bool {
 10676  	v_2 := v.Args[2]
 10677  	v_1 := v.Args[1]
 10678  	v_0 := v.Args[0]
 10679  	b := v.Block
 10680  	config := b.Func.Config
 10681  	// match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem)
 10682  	// result: (FMOVDstore [off] {sym} ptr val mem)
 10683  	for {
 10684  		off := auxIntToInt32(v.AuxInt)
 10685  		sym := auxToSym(v.Aux)
 10686  		ptr := v_0
 10687  		if v_1.Op != OpARM64FMOVDfpgp {
 10688  			break
 10689  		}
 10690  		val := v_1.Args[0]
 10691  		mem := v_2
 10692  		v.reset(OpARM64FMOVDstore)
 10693  		v.AuxInt = int32ToAuxInt(off)
 10694  		v.Aux = symToAux(sym)
 10695  		v.AddArg3(ptr, val, mem)
 10696  		return true
 10697  	}
 10698  	// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
 10699  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10700  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
 10701  	for {
 10702  		off1 := auxIntToInt32(v.AuxInt)
 10703  		sym := auxToSym(v.Aux)
 10704  		if v_0.Op != OpARM64ADDconst {
 10705  			break
 10706  		}
 10707  		off2 := auxIntToInt64(v_0.AuxInt)
 10708  		ptr := v_0.Args[0]
 10709  		val := v_1
 10710  		mem := v_2
 10711  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10712  			break
 10713  		}
 10714  		v.reset(OpARM64MOVDstore)
 10715  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 10716  		v.Aux = symToAux(sym)
 10717  		v.AddArg3(ptr, val, mem)
 10718  		return true
 10719  	}
 10720  	// match: (MOVDstore [off] {sym} (ADD ptr idx) val mem)
 10721  	// cond: off == 0 && sym == nil
 10722  	// result: (MOVDstoreidx ptr idx val mem)
 10723  	for {
 10724  		off := auxIntToInt32(v.AuxInt)
 10725  		sym := auxToSym(v.Aux)
 10726  		if v_0.Op != OpARM64ADD {
 10727  			break
 10728  		}
 10729  		idx := v_0.Args[1]
 10730  		ptr := v_0.Args[0]
 10731  		val := v_1
 10732  		mem := v_2
 10733  		if !(off == 0 && sym == nil) {
 10734  			break
 10735  		}
 10736  		v.reset(OpARM64MOVDstoreidx)
 10737  		v.AddArg4(ptr, idx, val, mem)
 10738  		return true
 10739  	}
 10740  	// match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem)
 10741  	// cond: off == 0 && sym == nil
 10742  	// result: (MOVDstoreidx8 ptr idx val mem)
 10743  	for {
 10744  		off := auxIntToInt32(v.AuxInt)
 10745  		sym := auxToSym(v.Aux)
 10746  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
 10747  			break
 10748  		}
 10749  		idx := v_0.Args[1]
 10750  		ptr := v_0.Args[0]
 10751  		val := v_1
 10752  		mem := v_2
 10753  		if !(off == 0 && sym == nil) {
 10754  			break
 10755  		}
 10756  		v.reset(OpARM64MOVDstoreidx8)
 10757  		v.AddArg4(ptr, idx, val, mem)
 10758  		return true
 10759  	}
 10760  	// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
 10761  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10762  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
 10763  	for {
 10764  		off1 := auxIntToInt32(v.AuxInt)
 10765  		sym1 := auxToSym(v.Aux)
 10766  		if v_0.Op != OpARM64MOVDaddr {
 10767  			break
 10768  		}
 10769  		off2 := auxIntToInt32(v_0.AuxInt)
 10770  		sym2 := auxToSym(v_0.Aux)
 10771  		ptr := v_0.Args[0]
 10772  		val := v_1
 10773  		mem := v_2
 10774  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10775  			break
 10776  		}
 10777  		v.reset(OpARM64MOVDstore)
 10778  		v.AuxInt = int32ToAuxInt(off1 + off2)
 10779  		v.Aux = symToAux(mergeSym(sym1, sym2))
 10780  		v.AddArg3(ptr, val, mem)
 10781  		return true
 10782  	}
 10783  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
 10784  	// result: (MOVDstorezero [off] {sym} ptr mem)
 10785  	for {
 10786  		off := auxIntToInt32(v.AuxInt)
 10787  		sym := auxToSym(v.Aux)
 10788  		ptr := v_0
 10789  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
 10790  			break
 10791  		}
 10792  		mem := v_2
 10793  		v.reset(OpARM64MOVDstorezero)
 10794  		v.AuxInt = int32ToAuxInt(off)
 10795  		v.Aux = symToAux(sym)
 10796  		v.AddArg2(ptr, mem)
 10797  		return true
 10798  	}
 10799  	return false
 10800  }
 10801  func rewriteValueARM64_OpARM64MOVDstoreidx(v *Value) bool {
 10802  	v_3 := v.Args[3]
 10803  	v_2 := v.Args[2]
 10804  	v_1 := v.Args[1]
 10805  	v_0 := v.Args[0]
 10806  	// match: (MOVDstoreidx ptr (MOVDconst [c]) val mem)
 10807  	// cond: is32Bit(c)
 10808  	// result: (MOVDstore [int32(c)] ptr val mem)
 10809  	for {
 10810  		ptr := v_0
 10811  		if v_1.Op != OpARM64MOVDconst {
 10812  			break
 10813  		}
 10814  		c := auxIntToInt64(v_1.AuxInt)
 10815  		val := v_2
 10816  		mem := v_3
 10817  		if !(is32Bit(c)) {
 10818  			break
 10819  		}
 10820  		v.reset(OpARM64MOVDstore)
 10821  		v.AuxInt = int32ToAuxInt(int32(c))
 10822  		v.AddArg3(ptr, val, mem)
 10823  		return true
 10824  	}
 10825  	// match: (MOVDstoreidx (MOVDconst [c]) idx val mem)
 10826  	// cond: is32Bit(c)
 10827  	// result: (MOVDstore [int32(c)] idx val mem)
 10828  	for {
 10829  		if v_0.Op != OpARM64MOVDconst {
 10830  			break
 10831  		}
 10832  		c := auxIntToInt64(v_0.AuxInt)
 10833  		idx := v_1
 10834  		val := v_2
 10835  		mem := v_3
 10836  		if !(is32Bit(c)) {
 10837  			break
 10838  		}
 10839  		v.reset(OpARM64MOVDstore)
 10840  		v.AuxInt = int32ToAuxInt(int32(c))
 10841  		v.AddArg3(idx, val, mem)
 10842  		return true
 10843  	}
 10844  	// match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem)
 10845  	// result: (MOVDstoreidx8 ptr idx val mem)
 10846  	for {
 10847  		ptr := v_0
 10848  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
 10849  			break
 10850  		}
 10851  		idx := v_1.Args[0]
 10852  		val := v_2
 10853  		mem := v_3
 10854  		v.reset(OpARM64MOVDstoreidx8)
 10855  		v.AddArg4(ptr, idx, val, mem)
 10856  		return true
 10857  	}
 10858  	// match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem)
 10859  	// result: (MOVDstoreidx8 ptr idx val mem)
 10860  	for {
 10861  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
 10862  			break
 10863  		}
 10864  		idx := v_0.Args[0]
 10865  		ptr := v_1
 10866  		val := v_2
 10867  		mem := v_3
 10868  		v.reset(OpARM64MOVDstoreidx8)
 10869  		v.AddArg4(ptr, idx, val, mem)
 10870  		return true
 10871  	}
 10872  	// match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem)
 10873  	// result: (MOVDstorezeroidx ptr idx mem)
 10874  	for {
 10875  		ptr := v_0
 10876  		idx := v_1
 10877  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 10878  			break
 10879  		}
 10880  		mem := v_3
 10881  		v.reset(OpARM64MOVDstorezeroidx)
 10882  		v.AddArg3(ptr, idx, mem)
 10883  		return true
 10884  	}
 10885  	return false
 10886  }
 10887  func rewriteValueARM64_OpARM64MOVDstoreidx8(v *Value) bool {
 10888  	v_3 := v.Args[3]
 10889  	v_2 := v.Args[2]
 10890  	v_1 := v.Args[1]
 10891  	v_0 := v.Args[0]
 10892  	// match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem)
 10893  	// cond: is32Bit(c<<3)
 10894  	// result: (MOVDstore [int32(c)<<3] ptr val mem)
 10895  	for {
 10896  		ptr := v_0
 10897  		if v_1.Op != OpARM64MOVDconst {
 10898  			break
 10899  		}
 10900  		c := auxIntToInt64(v_1.AuxInt)
 10901  		val := v_2
 10902  		mem := v_3
 10903  		if !(is32Bit(c << 3)) {
 10904  			break
 10905  		}
 10906  		v.reset(OpARM64MOVDstore)
 10907  		v.AuxInt = int32ToAuxInt(int32(c) << 3)
 10908  		v.AddArg3(ptr, val, mem)
 10909  		return true
 10910  	}
 10911  	// match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem)
 10912  	// result: (MOVDstorezeroidx8 ptr idx mem)
 10913  	for {
 10914  		ptr := v_0
 10915  		idx := v_1
 10916  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 10917  			break
 10918  		}
 10919  		mem := v_3
 10920  		v.reset(OpARM64MOVDstorezeroidx8)
 10921  		v.AddArg3(ptr, idx, mem)
 10922  		return true
 10923  	}
 10924  	return false
 10925  }
 10926  func rewriteValueARM64_OpARM64MOVDstorezero(v *Value) bool {
 10927  	v_1 := v.Args[1]
 10928  	v_0 := v.Args[0]
 10929  	b := v.Block
 10930  	config := b.Func.Config
 10931  	// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 10932  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10933  	// result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
 10934  	for {
 10935  		off1 := auxIntToInt32(v.AuxInt)
 10936  		sym := auxToSym(v.Aux)
 10937  		if v_0.Op != OpARM64ADDconst {
 10938  			break
 10939  		}
 10940  		off2 := auxIntToInt64(v_0.AuxInt)
 10941  		ptr := v_0.Args[0]
 10942  		mem := v_1
 10943  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10944  			break
 10945  		}
 10946  		v.reset(OpARM64MOVDstorezero)
 10947  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 10948  		v.Aux = symToAux(sym)
 10949  		v.AddArg2(ptr, mem)
 10950  		return true
 10951  	}
 10952  	// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 10953  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 10954  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 10955  	for {
 10956  		off1 := auxIntToInt32(v.AuxInt)
 10957  		sym1 := auxToSym(v.Aux)
 10958  		if v_0.Op != OpARM64MOVDaddr {
 10959  			break
 10960  		}
 10961  		off2 := auxIntToInt32(v_0.AuxInt)
 10962  		sym2 := auxToSym(v_0.Aux)
 10963  		ptr := v_0.Args[0]
 10964  		mem := v_1
 10965  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 10966  			break
 10967  		}
 10968  		v.reset(OpARM64MOVDstorezero)
 10969  		v.AuxInt = int32ToAuxInt(off1 + off2)
 10970  		v.Aux = symToAux(mergeSym(sym1, sym2))
 10971  		v.AddArg2(ptr, mem)
 10972  		return true
 10973  	}
 10974  	// match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem)
 10975  	// cond: off == 0 && sym == nil
 10976  	// result: (MOVDstorezeroidx ptr idx mem)
 10977  	for {
 10978  		off := auxIntToInt32(v.AuxInt)
 10979  		sym := auxToSym(v.Aux)
 10980  		if v_0.Op != OpARM64ADD {
 10981  			break
 10982  		}
 10983  		idx := v_0.Args[1]
 10984  		ptr := v_0.Args[0]
 10985  		mem := v_1
 10986  		if !(off == 0 && sym == nil) {
 10987  			break
 10988  		}
 10989  		v.reset(OpARM64MOVDstorezeroidx)
 10990  		v.AddArg3(ptr, idx, mem)
 10991  		return true
 10992  	}
 10993  	// match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem)
 10994  	// cond: off == 0 && sym == nil
 10995  	// result: (MOVDstorezeroidx8 ptr idx mem)
 10996  	for {
 10997  		off := auxIntToInt32(v.AuxInt)
 10998  		sym := auxToSym(v.Aux)
 10999  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
 11000  			break
 11001  		}
 11002  		idx := v_0.Args[1]
 11003  		ptr := v_0.Args[0]
 11004  		mem := v_1
 11005  		if !(off == 0 && sym == nil) {
 11006  			break
 11007  		}
 11008  		v.reset(OpARM64MOVDstorezeroidx8)
 11009  		v.AddArg3(ptr, idx, mem)
 11010  		return true
 11011  	}
 11012  	// match: (MOVDstorezero [i] {s} ptr0 x:(MOVDstorezero [j] {s} ptr1 mem))
 11013  	// cond: x.Uses == 1 && areAdjacentOffsets(int64(i),int64(j),8) && isSamePtr(ptr0, ptr1) && clobber(x)
 11014  	// result: (MOVQstorezero [int32(min(int64(i),int64(j)))] {s} ptr0 mem)
 11015  	for {
 11016  		i := auxIntToInt32(v.AuxInt)
 11017  		s := auxToSym(v.Aux)
 11018  		ptr0 := v_0
 11019  		x := v_1
 11020  		if x.Op != OpARM64MOVDstorezero {
 11021  			break
 11022  		}
 11023  		j := auxIntToInt32(x.AuxInt)
 11024  		if auxToSym(x.Aux) != s {
 11025  			break
 11026  		}
 11027  		mem := x.Args[1]
 11028  		ptr1 := x.Args[0]
 11029  		if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 8) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 11030  			break
 11031  		}
 11032  		v.reset(OpARM64MOVQstorezero)
 11033  		v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
 11034  		v.Aux = symToAux(s)
 11035  		v.AddArg2(ptr0, mem)
 11036  		return true
 11037  	}
 11038  	// match: (MOVDstorezero [8] {s} p0:(ADD ptr0 idx0) x:(MOVDstorezeroidx ptr1 idx1 mem))
 11039  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11040  	// result: (MOVQstorezero [0] {s} p0 mem)
 11041  	for {
 11042  		if auxIntToInt32(v.AuxInt) != 8 {
 11043  			break
 11044  		}
 11045  		s := auxToSym(v.Aux)
 11046  		p0 := v_0
 11047  		if p0.Op != OpARM64ADD {
 11048  			break
 11049  		}
 11050  		_ = p0.Args[1]
 11051  		p0_0 := p0.Args[0]
 11052  		p0_1 := p0.Args[1]
 11053  		for _i0 := 0; _i0 <= 1; _i0, p0_0, p0_1 = _i0+1, p0_1, p0_0 {
 11054  			ptr0 := p0_0
 11055  			idx0 := p0_1
 11056  			x := v_1
 11057  			if x.Op != OpARM64MOVDstorezeroidx {
 11058  				continue
 11059  			}
 11060  			mem := x.Args[2]
 11061  			ptr1 := x.Args[0]
 11062  			idx1 := x.Args[1]
 11063  			if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11064  				continue
 11065  			}
 11066  			v.reset(OpARM64MOVQstorezero)
 11067  			v.AuxInt = int32ToAuxInt(0)
 11068  			v.Aux = symToAux(s)
 11069  			v.AddArg2(p0, mem)
 11070  			return true
 11071  		}
 11072  		break
 11073  	}
 11074  	// match: (MOVDstorezero [8] {s} p0:(ADDshiftLL [3] ptr0 idx0) x:(MOVDstorezeroidx8 ptr1 idx1 mem))
 11075  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 11076  	// result: (MOVQstorezero [0] {s} p0 mem)
 11077  	for {
 11078  		if auxIntToInt32(v.AuxInt) != 8 {
 11079  			break
 11080  		}
 11081  		s := auxToSym(v.Aux)
 11082  		p0 := v_0
 11083  		if p0.Op != OpARM64ADDshiftLL || auxIntToInt64(p0.AuxInt) != 3 {
 11084  			break
 11085  		}
 11086  		idx0 := p0.Args[1]
 11087  		ptr0 := p0.Args[0]
 11088  		x := v_1
 11089  		if x.Op != OpARM64MOVDstorezeroidx8 {
 11090  			break
 11091  		}
 11092  		mem := x.Args[2]
 11093  		ptr1 := x.Args[0]
 11094  		idx1 := x.Args[1]
 11095  		if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 11096  			break
 11097  		}
 11098  		v.reset(OpARM64MOVQstorezero)
 11099  		v.AuxInt = int32ToAuxInt(0)
 11100  		v.Aux = symToAux(s)
 11101  		v.AddArg2(p0, mem)
 11102  		return true
 11103  	}
 11104  	return false
 11105  }
 11106  func rewriteValueARM64_OpARM64MOVDstorezeroidx(v *Value) bool {
 11107  	v_2 := v.Args[2]
 11108  	v_1 := v.Args[1]
 11109  	v_0 := v.Args[0]
 11110  	// match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem)
 11111  	// cond: is32Bit(c)
 11112  	// result: (MOVDstorezero [int32(c)] ptr mem)
 11113  	for {
 11114  		ptr := v_0
 11115  		if v_1.Op != OpARM64MOVDconst {
 11116  			break
 11117  		}
 11118  		c := auxIntToInt64(v_1.AuxInt)
 11119  		mem := v_2
 11120  		if !(is32Bit(c)) {
 11121  			break
 11122  		}
 11123  		v.reset(OpARM64MOVDstorezero)
 11124  		v.AuxInt = int32ToAuxInt(int32(c))
 11125  		v.AddArg2(ptr, mem)
 11126  		return true
 11127  	}
 11128  	// match: (MOVDstorezeroidx (MOVDconst [c]) idx mem)
 11129  	// cond: is32Bit(c)
 11130  	// result: (MOVDstorezero [int32(c)] idx mem)
 11131  	for {
 11132  		if v_0.Op != OpARM64MOVDconst {
 11133  			break
 11134  		}
 11135  		c := auxIntToInt64(v_0.AuxInt)
 11136  		idx := v_1
 11137  		mem := v_2
 11138  		if !(is32Bit(c)) {
 11139  			break
 11140  		}
 11141  		v.reset(OpARM64MOVDstorezero)
 11142  		v.AuxInt = int32ToAuxInt(int32(c))
 11143  		v.AddArg2(idx, mem)
 11144  		return true
 11145  	}
 11146  	// match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem)
 11147  	// result: (MOVDstorezeroidx8 ptr idx mem)
 11148  	for {
 11149  		ptr := v_0
 11150  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
 11151  			break
 11152  		}
 11153  		idx := v_1.Args[0]
 11154  		mem := v_2
 11155  		v.reset(OpARM64MOVDstorezeroidx8)
 11156  		v.AddArg3(ptr, idx, mem)
 11157  		return true
 11158  	}
 11159  	// match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem)
 11160  	// result: (MOVDstorezeroidx8 ptr idx mem)
 11161  	for {
 11162  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
 11163  			break
 11164  		}
 11165  		idx := v_0.Args[0]
 11166  		ptr := v_1
 11167  		mem := v_2
 11168  		v.reset(OpARM64MOVDstorezeroidx8)
 11169  		v.AddArg3(ptr, idx, mem)
 11170  		return true
 11171  	}
 11172  	return false
 11173  }
 11174  func rewriteValueARM64_OpARM64MOVDstorezeroidx8(v *Value) bool {
 11175  	v_2 := v.Args[2]
 11176  	v_1 := v.Args[1]
 11177  	v_0 := v.Args[0]
 11178  	// match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem)
 11179  	// cond: is32Bit(c<<3)
 11180  	// result: (MOVDstorezero [int32(c<<3)] ptr mem)
 11181  	for {
 11182  		ptr := v_0
 11183  		if v_1.Op != OpARM64MOVDconst {
 11184  			break
 11185  		}
 11186  		c := auxIntToInt64(v_1.AuxInt)
 11187  		mem := v_2
 11188  		if !(is32Bit(c << 3)) {
 11189  			break
 11190  		}
 11191  		v.reset(OpARM64MOVDstorezero)
 11192  		v.AuxInt = int32ToAuxInt(int32(c << 3))
 11193  		v.AddArg2(ptr, mem)
 11194  		return true
 11195  	}
 11196  	return false
 11197  }
 11198  func rewriteValueARM64_OpARM64MOVHUload(v *Value) bool {
 11199  	v_1 := v.Args[1]
 11200  	v_0 := v.Args[0]
 11201  	b := v.Block
 11202  	config := b.Func.Config
 11203  	// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
 11204  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 11205  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
 11206  	for {
 11207  		off1 := auxIntToInt32(v.AuxInt)
 11208  		sym := auxToSym(v.Aux)
 11209  		if v_0.Op != OpARM64ADDconst {
 11210  			break
 11211  		}
 11212  		off2 := auxIntToInt64(v_0.AuxInt)
 11213  		ptr := v_0.Args[0]
 11214  		mem := v_1
 11215  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 11216  			break
 11217  		}
 11218  		v.reset(OpARM64MOVHUload)
 11219  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 11220  		v.Aux = symToAux(sym)
 11221  		v.AddArg2(ptr, mem)
 11222  		return true
 11223  	}
 11224  	// match: (MOVHUload [off] {sym} (ADD ptr idx) mem)
 11225  	// cond: off == 0 && sym == nil
 11226  	// result: (MOVHUloadidx ptr idx mem)
 11227  	for {
 11228  		off := auxIntToInt32(v.AuxInt)
 11229  		sym := auxToSym(v.Aux)
 11230  		if v_0.Op != OpARM64ADD {
 11231  			break
 11232  		}
 11233  		idx := v_0.Args[1]
 11234  		ptr := v_0.Args[0]
 11235  		mem := v_1
 11236  		if !(off == 0 && sym == nil) {
 11237  			break
 11238  		}
 11239  		v.reset(OpARM64MOVHUloadidx)
 11240  		v.AddArg3(ptr, idx, mem)
 11241  		return true
 11242  	}
 11243  	// match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem)
 11244  	// cond: off == 0 && sym == nil
 11245  	// result: (MOVHUloadidx2 ptr idx mem)
 11246  	for {
 11247  		off := auxIntToInt32(v.AuxInt)
 11248  		sym := auxToSym(v.Aux)
 11249  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 11250  			break
 11251  		}
 11252  		idx := v_0.Args[1]
 11253  		ptr := v_0.Args[0]
 11254  		mem := v_1
 11255  		if !(off == 0 && sym == nil) {
 11256  			break
 11257  		}
 11258  		v.reset(OpARM64MOVHUloadidx2)
 11259  		v.AddArg3(ptr, idx, mem)
 11260  		return true
 11261  	}
 11262  	// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 11263  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 11264  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 11265  	for {
 11266  		off1 := auxIntToInt32(v.AuxInt)
 11267  		sym1 := auxToSym(v.Aux)
 11268  		if v_0.Op != OpARM64MOVDaddr {
 11269  			break
 11270  		}
 11271  		off2 := auxIntToInt32(v_0.AuxInt)
 11272  		sym2 := auxToSym(v_0.Aux)
 11273  		ptr := v_0.Args[0]
 11274  		mem := v_1
 11275  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 11276  			break
 11277  		}
 11278  		v.reset(OpARM64MOVHUload)
 11279  		v.AuxInt = int32ToAuxInt(off1 + off2)
 11280  		v.Aux = symToAux(mergeSym(sym1, sym2))
 11281  		v.AddArg2(ptr, mem)
 11282  		return true
 11283  	}
 11284  	// match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
 11285  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 11286  	// result: (MOVDconst [0])
 11287  	for {
 11288  		off := auxIntToInt32(v.AuxInt)
 11289  		sym := auxToSym(v.Aux)
 11290  		ptr := v_0
 11291  		if v_1.Op != OpARM64MOVHstorezero {
 11292  			break
 11293  		}
 11294  		off2 := auxIntToInt32(v_1.AuxInt)
 11295  		sym2 := auxToSym(v_1.Aux)
 11296  		ptr2 := v_1.Args[0]
 11297  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 11298  			break
 11299  		}
 11300  		v.reset(OpARM64MOVDconst)
 11301  		v.AuxInt = int64ToAuxInt(0)
 11302  		return true
 11303  	}
 11304  	// match: (MOVHUload [off] {sym} (SB) _)
 11305  	// cond: symIsRO(sym)
 11306  	// result: (MOVDconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 11307  	for {
 11308  		off := auxIntToInt32(v.AuxInt)
 11309  		sym := auxToSym(v.Aux)
 11310  		if v_0.Op != OpSB || !(symIsRO(sym)) {
 11311  			break
 11312  		}
 11313  		v.reset(OpARM64MOVDconst)
 11314  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
 11315  		return true
 11316  	}
 11317  	return false
 11318  }
 11319  func rewriteValueARM64_OpARM64MOVHUloadidx(v *Value) bool {
 11320  	v_2 := v.Args[2]
 11321  	v_1 := v.Args[1]
 11322  	v_0 := v.Args[0]
 11323  	// match: (MOVHUloadidx ptr (MOVDconst [c]) mem)
 11324  	// cond: is32Bit(c)
 11325  	// result: (MOVHUload [int32(c)] ptr mem)
 11326  	for {
 11327  		ptr := v_0
 11328  		if v_1.Op != OpARM64MOVDconst {
 11329  			break
 11330  		}
 11331  		c := auxIntToInt64(v_1.AuxInt)
 11332  		mem := v_2
 11333  		if !(is32Bit(c)) {
 11334  			break
 11335  		}
 11336  		v.reset(OpARM64MOVHUload)
 11337  		v.AuxInt = int32ToAuxInt(int32(c))
 11338  		v.AddArg2(ptr, mem)
 11339  		return true
 11340  	}
 11341  	// match: (MOVHUloadidx (MOVDconst [c]) ptr mem)
 11342  	// cond: is32Bit(c)
 11343  	// result: (MOVHUload [int32(c)] ptr mem)
 11344  	for {
 11345  		if v_0.Op != OpARM64MOVDconst {
 11346  			break
 11347  		}
 11348  		c := auxIntToInt64(v_0.AuxInt)
 11349  		ptr := v_1
 11350  		mem := v_2
 11351  		if !(is32Bit(c)) {
 11352  			break
 11353  		}
 11354  		v.reset(OpARM64MOVHUload)
 11355  		v.AuxInt = int32ToAuxInt(int32(c))
 11356  		v.AddArg2(ptr, mem)
 11357  		return true
 11358  	}
 11359  	// match: (MOVHUloadidx ptr (SLLconst [1] idx) mem)
 11360  	// result: (MOVHUloadidx2 ptr idx mem)
 11361  	for {
 11362  		ptr := v_0
 11363  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
 11364  			break
 11365  		}
 11366  		idx := v_1.Args[0]
 11367  		mem := v_2
 11368  		v.reset(OpARM64MOVHUloadidx2)
 11369  		v.AddArg3(ptr, idx, mem)
 11370  		return true
 11371  	}
 11372  	// match: (MOVHUloadidx ptr (ADD idx idx) mem)
 11373  	// result: (MOVHUloadidx2 ptr idx mem)
 11374  	for {
 11375  		ptr := v_0
 11376  		if v_1.Op != OpARM64ADD {
 11377  			break
 11378  		}
 11379  		idx := v_1.Args[1]
 11380  		if idx != v_1.Args[0] {
 11381  			break
 11382  		}
 11383  		mem := v_2
 11384  		v.reset(OpARM64MOVHUloadidx2)
 11385  		v.AddArg3(ptr, idx, mem)
 11386  		return true
 11387  	}
 11388  	// match: (MOVHUloadidx (ADD idx idx) ptr mem)
 11389  	// result: (MOVHUloadidx2 ptr idx mem)
 11390  	for {
 11391  		if v_0.Op != OpARM64ADD {
 11392  			break
 11393  		}
 11394  		idx := v_0.Args[1]
 11395  		if idx != v_0.Args[0] {
 11396  			break
 11397  		}
 11398  		ptr := v_1
 11399  		mem := v_2
 11400  		v.reset(OpARM64MOVHUloadidx2)
 11401  		v.AddArg3(ptr, idx, mem)
 11402  		return true
 11403  	}
 11404  	// match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _))
 11405  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 11406  	// result: (MOVDconst [0])
 11407  	for {
 11408  		ptr := v_0
 11409  		idx := v_1
 11410  		if v_2.Op != OpARM64MOVHstorezeroidx {
 11411  			break
 11412  		}
 11413  		idx2 := v_2.Args[1]
 11414  		ptr2 := v_2.Args[0]
 11415  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 11416  			break
 11417  		}
 11418  		v.reset(OpARM64MOVDconst)
 11419  		v.AuxInt = int64ToAuxInt(0)
 11420  		return true
 11421  	}
 11422  	return false
 11423  }
 11424  func rewriteValueARM64_OpARM64MOVHUloadidx2(v *Value) bool {
 11425  	v_2 := v.Args[2]
 11426  	v_1 := v.Args[1]
 11427  	v_0 := v.Args[0]
 11428  	// match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem)
 11429  	// cond: is32Bit(c<<1)
 11430  	// result: (MOVHUload [int32(c)<<1] ptr mem)
 11431  	for {
 11432  		ptr := v_0
 11433  		if v_1.Op != OpARM64MOVDconst {
 11434  			break
 11435  		}
 11436  		c := auxIntToInt64(v_1.AuxInt)
 11437  		mem := v_2
 11438  		if !(is32Bit(c << 1)) {
 11439  			break
 11440  		}
 11441  		v.reset(OpARM64MOVHUload)
 11442  		v.AuxInt = int32ToAuxInt(int32(c) << 1)
 11443  		v.AddArg2(ptr, mem)
 11444  		return true
 11445  	}
 11446  	// match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _))
 11447  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 11448  	// result: (MOVDconst [0])
 11449  	for {
 11450  		ptr := v_0
 11451  		idx := v_1
 11452  		if v_2.Op != OpARM64MOVHstorezeroidx2 {
 11453  			break
 11454  		}
 11455  		idx2 := v_2.Args[1]
 11456  		ptr2 := v_2.Args[0]
 11457  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 11458  			break
 11459  		}
 11460  		v.reset(OpARM64MOVDconst)
 11461  		v.AuxInt = int64ToAuxInt(0)
 11462  		return true
 11463  	}
 11464  	return false
 11465  }
 11466  func rewriteValueARM64_OpARM64MOVHUreg(v *Value) bool {
 11467  	v_0 := v.Args[0]
 11468  	// match: (MOVHUreg x:(MOVBUload _ _))
 11469  	// result: (MOVDreg x)
 11470  	for {
 11471  		x := v_0
 11472  		if x.Op != OpARM64MOVBUload {
 11473  			break
 11474  		}
 11475  		v.reset(OpARM64MOVDreg)
 11476  		v.AddArg(x)
 11477  		return true
 11478  	}
 11479  	// match: (MOVHUreg x:(MOVHUload _ _))
 11480  	// result: (MOVDreg x)
 11481  	for {
 11482  		x := v_0
 11483  		if x.Op != OpARM64MOVHUload {
 11484  			break
 11485  		}
 11486  		v.reset(OpARM64MOVDreg)
 11487  		v.AddArg(x)
 11488  		return true
 11489  	}
 11490  	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
 11491  	// result: (MOVDreg x)
 11492  	for {
 11493  		x := v_0
 11494  		if x.Op != OpARM64MOVBUloadidx {
 11495  			break
 11496  		}
 11497  		v.reset(OpARM64MOVDreg)
 11498  		v.AddArg(x)
 11499  		return true
 11500  	}
 11501  	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
 11502  	// result: (MOVDreg x)
 11503  	for {
 11504  		x := v_0
 11505  		if x.Op != OpARM64MOVHUloadidx {
 11506  			break
 11507  		}
 11508  		v.reset(OpARM64MOVDreg)
 11509  		v.AddArg(x)
 11510  		return true
 11511  	}
 11512  	// match: (MOVHUreg x:(MOVHUloadidx2 _ _ _))
 11513  	// result: (MOVDreg x)
 11514  	for {
 11515  		x := v_0
 11516  		if x.Op != OpARM64MOVHUloadidx2 {
 11517  			break
 11518  		}
 11519  		v.reset(OpARM64MOVDreg)
 11520  		v.AddArg(x)
 11521  		return true
 11522  	}
 11523  	// match: (MOVHUreg x:(MOVBUreg _))
 11524  	// result: (MOVDreg x)
 11525  	for {
 11526  		x := v_0
 11527  		if x.Op != OpARM64MOVBUreg {
 11528  			break
 11529  		}
 11530  		v.reset(OpARM64MOVDreg)
 11531  		v.AddArg(x)
 11532  		return true
 11533  	}
 11534  	// match: (MOVHUreg x:(MOVHUreg _))
 11535  	// result: (MOVDreg x)
 11536  	for {
 11537  		x := v_0
 11538  		if x.Op != OpARM64MOVHUreg {
 11539  			break
 11540  		}
 11541  		v.reset(OpARM64MOVDreg)
 11542  		v.AddArg(x)
 11543  		return true
 11544  	}
 11545  	// match: (MOVHUreg (ANDconst [c] x))
 11546  	// result: (ANDconst [c&(1<<16-1)] x)
 11547  	for {
 11548  		if v_0.Op != OpARM64ANDconst {
 11549  			break
 11550  		}
 11551  		c := auxIntToInt64(v_0.AuxInt)
 11552  		x := v_0.Args[0]
 11553  		v.reset(OpARM64ANDconst)
 11554  		v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
 11555  		v.AddArg(x)
 11556  		return true
 11557  	}
 11558  	// match: (MOVHUreg (MOVDconst [c]))
 11559  	// result: (MOVDconst [int64(uint16(c))])
 11560  	for {
 11561  		if v_0.Op != OpARM64MOVDconst {
 11562  			break
 11563  		}
 11564  		c := auxIntToInt64(v_0.AuxInt)
 11565  		v.reset(OpARM64MOVDconst)
 11566  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
 11567  		return true
 11568  	}
 11569  	// match: (MOVHUreg (SLLconst [lc] x))
 11570  	// cond: lc >= 16
 11571  	// result: (MOVDconst [0])
 11572  	for {
 11573  		if v_0.Op != OpARM64SLLconst {
 11574  			break
 11575  		}
 11576  		lc := auxIntToInt64(v_0.AuxInt)
 11577  		if !(lc >= 16) {
 11578  			break
 11579  		}
 11580  		v.reset(OpARM64MOVDconst)
 11581  		v.AuxInt = int64ToAuxInt(0)
 11582  		return true
 11583  	}
 11584  	// match: (MOVHUreg (SLLconst [lc] x))
 11585  	// cond: lc < 16
 11586  	// result: (UBFIZ [armBFAuxInt(lc, 16-lc)] x)
 11587  	for {
 11588  		if v_0.Op != OpARM64SLLconst {
 11589  			break
 11590  		}
 11591  		lc := auxIntToInt64(v_0.AuxInt)
 11592  		x := v_0.Args[0]
 11593  		if !(lc < 16) {
 11594  			break
 11595  		}
 11596  		v.reset(OpARM64UBFIZ)
 11597  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
 11598  		v.AddArg(x)
 11599  		return true
 11600  	}
 11601  	// match: (MOVHUreg (SRLconst [rc] x))
 11602  	// cond: rc < 16
 11603  	// result: (UBFX [armBFAuxInt(rc, 16)] x)
 11604  	for {
 11605  		if v_0.Op != OpARM64SRLconst {
 11606  			break
 11607  		}
 11608  		rc := auxIntToInt64(v_0.AuxInt)
 11609  		x := v_0.Args[0]
 11610  		if !(rc < 16) {
 11611  			break
 11612  		}
 11613  		v.reset(OpARM64UBFX)
 11614  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16))
 11615  		v.AddArg(x)
 11616  		return true
 11617  	}
 11618  	// match: (MOVHUreg (UBFX [bfc] x))
 11619  	// cond: bfc.getARM64BFwidth() <= 16
 11620  	// result: (UBFX [bfc] x)
 11621  	for {
 11622  		if v_0.Op != OpARM64UBFX {
 11623  			break
 11624  		}
 11625  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 11626  		x := v_0.Args[0]
 11627  		if !(bfc.getARM64BFwidth() <= 16) {
 11628  			break
 11629  		}
 11630  		v.reset(OpARM64UBFX)
 11631  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 11632  		v.AddArg(x)
 11633  		return true
 11634  	}
 11635  	return false
 11636  }
 11637  func rewriteValueARM64_OpARM64MOVHload(v *Value) bool {
 11638  	v_1 := v.Args[1]
 11639  	v_0 := v.Args[0]
 11640  	b := v.Block
 11641  	config := b.Func.Config
 11642  	// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
 11643  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 11644  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
 11645  	for {
 11646  		off1 := auxIntToInt32(v.AuxInt)
 11647  		sym := auxToSym(v.Aux)
 11648  		if v_0.Op != OpARM64ADDconst {
 11649  			break
 11650  		}
 11651  		off2 := auxIntToInt64(v_0.AuxInt)
 11652  		ptr := v_0.Args[0]
 11653  		mem := v_1
 11654  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 11655  			break
 11656  		}
 11657  		v.reset(OpARM64MOVHload)
 11658  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 11659  		v.Aux = symToAux(sym)
 11660  		v.AddArg2(ptr, mem)
 11661  		return true
 11662  	}
 11663  	// match: (MOVHload [off] {sym} (ADD ptr idx) mem)
 11664  	// cond: off == 0 && sym == nil
 11665  	// result: (MOVHloadidx ptr idx mem)
 11666  	for {
 11667  		off := auxIntToInt32(v.AuxInt)
 11668  		sym := auxToSym(v.Aux)
 11669  		if v_0.Op != OpARM64ADD {
 11670  			break
 11671  		}
 11672  		idx := v_0.Args[1]
 11673  		ptr := v_0.Args[0]
 11674  		mem := v_1
 11675  		if !(off == 0 && sym == nil) {
 11676  			break
 11677  		}
 11678  		v.reset(OpARM64MOVHloadidx)
 11679  		v.AddArg3(ptr, idx, mem)
 11680  		return true
 11681  	}
 11682  	// match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem)
 11683  	// cond: off == 0 && sym == nil
 11684  	// result: (MOVHloadidx2 ptr idx mem)
 11685  	for {
 11686  		off := auxIntToInt32(v.AuxInt)
 11687  		sym := auxToSym(v.Aux)
 11688  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 11689  			break
 11690  		}
 11691  		idx := v_0.Args[1]
 11692  		ptr := v_0.Args[0]
 11693  		mem := v_1
 11694  		if !(off == 0 && sym == nil) {
 11695  			break
 11696  		}
 11697  		v.reset(OpARM64MOVHloadidx2)
 11698  		v.AddArg3(ptr, idx, mem)
 11699  		return true
 11700  	}
 11701  	// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 11702  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 11703  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 11704  	for {
 11705  		off1 := auxIntToInt32(v.AuxInt)
 11706  		sym1 := auxToSym(v.Aux)
 11707  		if v_0.Op != OpARM64MOVDaddr {
 11708  			break
 11709  		}
 11710  		off2 := auxIntToInt32(v_0.AuxInt)
 11711  		sym2 := auxToSym(v_0.Aux)
 11712  		ptr := v_0.Args[0]
 11713  		mem := v_1
 11714  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 11715  			break
 11716  		}
 11717  		v.reset(OpARM64MOVHload)
 11718  		v.AuxInt = int32ToAuxInt(off1 + off2)
 11719  		v.Aux = symToAux(mergeSym(sym1, sym2))
 11720  		v.AddArg2(ptr, mem)
 11721  		return true
 11722  	}
 11723  	// match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
 11724  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 11725  	// result: (MOVDconst [0])
 11726  	for {
 11727  		off := auxIntToInt32(v.AuxInt)
 11728  		sym := auxToSym(v.Aux)
 11729  		ptr := v_0
 11730  		if v_1.Op != OpARM64MOVHstorezero {
 11731  			break
 11732  		}
 11733  		off2 := auxIntToInt32(v_1.AuxInt)
 11734  		sym2 := auxToSym(v_1.Aux)
 11735  		ptr2 := v_1.Args[0]
 11736  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 11737  			break
 11738  		}
 11739  		v.reset(OpARM64MOVDconst)
 11740  		v.AuxInt = int64ToAuxInt(0)
 11741  		return true
 11742  	}
 11743  	return false
 11744  }
 11745  func rewriteValueARM64_OpARM64MOVHloadidx(v *Value) bool {
 11746  	v_2 := v.Args[2]
 11747  	v_1 := v.Args[1]
 11748  	v_0 := v.Args[0]
 11749  	// match: (MOVHloadidx ptr (MOVDconst [c]) mem)
 11750  	// cond: is32Bit(c)
 11751  	// result: (MOVHload [int32(c)] ptr mem)
 11752  	for {
 11753  		ptr := v_0
 11754  		if v_1.Op != OpARM64MOVDconst {
 11755  			break
 11756  		}
 11757  		c := auxIntToInt64(v_1.AuxInt)
 11758  		mem := v_2
 11759  		if !(is32Bit(c)) {
 11760  			break
 11761  		}
 11762  		v.reset(OpARM64MOVHload)
 11763  		v.AuxInt = int32ToAuxInt(int32(c))
 11764  		v.AddArg2(ptr, mem)
 11765  		return true
 11766  	}
 11767  	// match: (MOVHloadidx (MOVDconst [c]) ptr mem)
 11768  	// cond: is32Bit(c)
 11769  	// result: (MOVHload [int32(c)] ptr mem)
 11770  	for {
 11771  		if v_0.Op != OpARM64MOVDconst {
 11772  			break
 11773  		}
 11774  		c := auxIntToInt64(v_0.AuxInt)
 11775  		ptr := v_1
 11776  		mem := v_2
 11777  		if !(is32Bit(c)) {
 11778  			break
 11779  		}
 11780  		v.reset(OpARM64MOVHload)
 11781  		v.AuxInt = int32ToAuxInt(int32(c))
 11782  		v.AddArg2(ptr, mem)
 11783  		return true
 11784  	}
 11785  	// match: (MOVHloadidx ptr (SLLconst [1] idx) mem)
 11786  	// result: (MOVHloadidx2 ptr idx mem)
 11787  	for {
 11788  		ptr := v_0
 11789  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
 11790  			break
 11791  		}
 11792  		idx := v_1.Args[0]
 11793  		mem := v_2
 11794  		v.reset(OpARM64MOVHloadidx2)
 11795  		v.AddArg3(ptr, idx, mem)
 11796  		return true
 11797  	}
 11798  	// match: (MOVHloadidx ptr (ADD idx idx) mem)
 11799  	// result: (MOVHloadidx2 ptr idx mem)
 11800  	for {
 11801  		ptr := v_0
 11802  		if v_1.Op != OpARM64ADD {
 11803  			break
 11804  		}
 11805  		idx := v_1.Args[1]
 11806  		if idx != v_1.Args[0] {
 11807  			break
 11808  		}
 11809  		mem := v_2
 11810  		v.reset(OpARM64MOVHloadidx2)
 11811  		v.AddArg3(ptr, idx, mem)
 11812  		return true
 11813  	}
 11814  	// match: (MOVHloadidx (ADD idx idx) ptr mem)
 11815  	// result: (MOVHloadidx2 ptr idx mem)
 11816  	for {
 11817  		if v_0.Op != OpARM64ADD {
 11818  			break
 11819  		}
 11820  		idx := v_0.Args[1]
 11821  		if idx != v_0.Args[0] {
 11822  			break
 11823  		}
 11824  		ptr := v_1
 11825  		mem := v_2
 11826  		v.reset(OpARM64MOVHloadidx2)
 11827  		v.AddArg3(ptr, idx, mem)
 11828  		return true
 11829  	}
 11830  	// match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _))
 11831  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 11832  	// result: (MOVDconst [0])
 11833  	for {
 11834  		ptr := v_0
 11835  		idx := v_1
 11836  		if v_2.Op != OpARM64MOVHstorezeroidx {
 11837  			break
 11838  		}
 11839  		idx2 := v_2.Args[1]
 11840  		ptr2 := v_2.Args[0]
 11841  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 11842  			break
 11843  		}
 11844  		v.reset(OpARM64MOVDconst)
 11845  		v.AuxInt = int64ToAuxInt(0)
 11846  		return true
 11847  	}
 11848  	return false
 11849  }
 11850  func rewriteValueARM64_OpARM64MOVHloadidx2(v *Value) bool {
 11851  	v_2 := v.Args[2]
 11852  	v_1 := v.Args[1]
 11853  	v_0 := v.Args[0]
 11854  	// match: (MOVHloadidx2 ptr (MOVDconst [c]) mem)
 11855  	// cond: is32Bit(c<<1)
 11856  	// result: (MOVHload [int32(c)<<1] ptr mem)
 11857  	for {
 11858  		ptr := v_0
 11859  		if v_1.Op != OpARM64MOVDconst {
 11860  			break
 11861  		}
 11862  		c := auxIntToInt64(v_1.AuxInt)
 11863  		mem := v_2
 11864  		if !(is32Bit(c << 1)) {
 11865  			break
 11866  		}
 11867  		v.reset(OpARM64MOVHload)
 11868  		v.AuxInt = int32ToAuxInt(int32(c) << 1)
 11869  		v.AddArg2(ptr, mem)
 11870  		return true
 11871  	}
 11872  	// match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _))
 11873  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 11874  	// result: (MOVDconst [0])
 11875  	for {
 11876  		ptr := v_0
 11877  		idx := v_1
 11878  		if v_2.Op != OpARM64MOVHstorezeroidx2 {
 11879  			break
 11880  		}
 11881  		idx2 := v_2.Args[1]
 11882  		ptr2 := v_2.Args[0]
 11883  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 11884  			break
 11885  		}
 11886  		v.reset(OpARM64MOVDconst)
 11887  		v.AuxInt = int64ToAuxInt(0)
 11888  		return true
 11889  	}
 11890  	return false
 11891  }
 11892  func rewriteValueARM64_OpARM64MOVHreg(v *Value) bool {
 11893  	v_0 := v.Args[0]
 11894  	// match: (MOVHreg x:(MOVBload _ _))
 11895  	// result: (MOVDreg x)
 11896  	for {
 11897  		x := v_0
 11898  		if x.Op != OpARM64MOVBload {
 11899  			break
 11900  		}
 11901  		v.reset(OpARM64MOVDreg)
 11902  		v.AddArg(x)
 11903  		return true
 11904  	}
 11905  	// match: (MOVHreg x:(MOVBUload _ _))
 11906  	// result: (MOVDreg x)
 11907  	for {
 11908  		x := v_0
 11909  		if x.Op != OpARM64MOVBUload {
 11910  			break
 11911  		}
 11912  		v.reset(OpARM64MOVDreg)
 11913  		v.AddArg(x)
 11914  		return true
 11915  	}
 11916  	// match: (MOVHreg x:(MOVHload _ _))
 11917  	// result: (MOVDreg x)
 11918  	for {
 11919  		x := v_0
 11920  		if x.Op != OpARM64MOVHload {
 11921  			break
 11922  		}
 11923  		v.reset(OpARM64MOVDreg)
 11924  		v.AddArg(x)
 11925  		return true
 11926  	}
 11927  	// match: (MOVHreg x:(MOVBloadidx _ _ _))
 11928  	// result: (MOVDreg x)
 11929  	for {
 11930  		x := v_0
 11931  		if x.Op != OpARM64MOVBloadidx {
 11932  			break
 11933  		}
 11934  		v.reset(OpARM64MOVDreg)
 11935  		v.AddArg(x)
 11936  		return true
 11937  	}
 11938  	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
 11939  	// result: (MOVDreg x)
 11940  	for {
 11941  		x := v_0
 11942  		if x.Op != OpARM64MOVBUloadidx {
 11943  			break
 11944  		}
 11945  		v.reset(OpARM64MOVDreg)
 11946  		v.AddArg(x)
 11947  		return true
 11948  	}
 11949  	// match: (MOVHreg x:(MOVHloadidx _ _ _))
 11950  	// result: (MOVDreg x)
 11951  	for {
 11952  		x := v_0
 11953  		if x.Op != OpARM64MOVHloadidx {
 11954  			break
 11955  		}
 11956  		v.reset(OpARM64MOVDreg)
 11957  		v.AddArg(x)
 11958  		return true
 11959  	}
 11960  	// match: (MOVHreg x:(MOVHloadidx2 _ _ _))
 11961  	// result: (MOVDreg x)
 11962  	for {
 11963  		x := v_0
 11964  		if x.Op != OpARM64MOVHloadidx2 {
 11965  			break
 11966  		}
 11967  		v.reset(OpARM64MOVDreg)
 11968  		v.AddArg(x)
 11969  		return true
 11970  	}
 11971  	// match: (MOVHreg x:(MOVBreg _))
 11972  	// result: (MOVDreg x)
 11973  	for {
 11974  		x := v_0
 11975  		if x.Op != OpARM64MOVBreg {
 11976  			break
 11977  		}
 11978  		v.reset(OpARM64MOVDreg)
 11979  		v.AddArg(x)
 11980  		return true
 11981  	}
 11982  	// match: (MOVHreg x:(MOVBUreg _))
 11983  	// result: (MOVDreg x)
 11984  	for {
 11985  		x := v_0
 11986  		if x.Op != OpARM64MOVBUreg {
 11987  			break
 11988  		}
 11989  		v.reset(OpARM64MOVDreg)
 11990  		v.AddArg(x)
 11991  		return true
 11992  	}
 11993  	// match: (MOVHreg x:(MOVHreg _))
 11994  	// result: (MOVDreg x)
 11995  	for {
 11996  		x := v_0
 11997  		if x.Op != OpARM64MOVHreg {
 11998  			break
 11999  		}
 12000  		v.reset(OpARM64MOVDreg)
 12001  		v.AddArg(x)
 12002  		return true
 12003  	}
 12004  	// match: (MOVHreg (MOVDconst [c]))
 12005  	// result: (MOVDconst [int64(int16(c))])
 12006  	for {
 12007  		if v_0.Op != OpARM64MOVDconst {
 12008  			break
 12009  		}
 12010  		c := auxIntToInt64(v_0.AuxInt)
 12011  		v.reset(OpARM64MOVDconst)
 12012  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
 12013  		return true
 12014  	}
 12015  	// match: (MOVHreg (SLLconst [lc] x))
 12016  	// cond: lc < 16
 12017  	// result: (SBFIZ [armBFAuxInt(lc, 16-lc)] x)
 12018  	for {
 12019  		if v_0.Op != OpARM64SLLconst {
 12020  			break
 12021  		}
 12022  		lc := auxIntToInt64(v_0.AuxInt)
 12023  		x := v_0.Args[0]
 12024  		if !(lc < 16) {
 12025  			break
 12026  		}
 12027  		v.reset(OpARM64SBFIZ)
 12028  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
 12029  		v.AddArg(x)
 12030  		return true
 12031  	}
 12032  	// match: (MOVHreg (SBFX [bfc] x))
 12033  	// cond: bfc.getARM64BFwidth() <= 16
 12034  	// result: (SBFX [bfc] x)
 12035  	for {
 12036  		if v_0.Op != OpARM64SBFX {
 12037  			break
 12038  		}
 12039  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 12040  		x := v_0.Args[0]
 12041  		if !(bfc.getARM64BFwidth() <= 16) {
 12042  			break
 12043  		}
 12044  		v.reset(OpARM64SBFX)
 12045  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 12046  		v.AddArg(x)
 12047  		return true
 12048  	}
 12049  	return false
 12050  }
 12051  func rewriteValueARM64_OpARM64MOVHstore(v *Value) bool {
 12052  	v_2 := v.Args[2]
 12053  	v_1 := v.Args[1]
 12054  	v_0 := v.Args[0]
 12055  	b := v.Block
 12056  	config := b.Func.Config
 12057  	// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
 12058  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12059  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
 12060  	for {
 12061  		off1 := auxIntToInt32(v.AuxInt)
 12062  		sym := auxToSym(v.Aux)
 12063  		if v_0.Op != OpARM64ADDconst {
 12064  			break
 12065  		}
 12066  		off2 := auxIntToInt64(v_0.AuxInt)
 12067  		ptr := v_0.Args[0]
 12068  		val := v_1
 12069  		mem := v_2
 12070  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12071  			break
 12072  		}
 12073  		v.reset(OpARM64MOVHstore)
 12074  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 12075  		v.Aux = symToAux(sym)
 12076  		v.AddArg3(ptr, val, mem)
 12077  		return true
 12078  	}
 12079  	// match: (MOVHstore [off] {sym} (ADD ptr idx) val mem)
 12080  	// cond: off == 0 && sym == nil
 12081  	// result: (MOVHstoreidx ptr idx val mem)
 12082  	for {
 12083  		off := auxIntToInt32(v.AuxInt)
 12084  		sym := auxToSym(v.Aux)
 12085  		if v_0.Op != OpARM64ADD {
 12086  			break
 12087  		}
 12088  		idx := v_0.Args[1]
 12089  		ptr := v_0.Args[0]
 12090  		val := v_1
 12091  		mem := v_2
 12092  		if !(off == 0 && sym == nil) {
 12093  			break
 12094  		}
 12095  		v.reset(OpARM64MOVHstoreidx)
 12096  		v.AddArg4(ptr, idx, val, mem)
 12097  		return true
 12098  	}
 12099  	// match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem)
 12100  	// cond: off == 0 && sym == nil
 12101  	// result: (MOVHstoreidx2 ptr idx val mem)
 12102  	for {
 12103  		off := auxIntToInt32(v.AuxInt)
 12104  		sym := auxToSym(v.Aux)
 12105  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 12106  			break
 12107  		}
 12108  		idx := v_0.Args[1]
 12109  		ptr := v_0.Args[0]
 12110  		val := v_1
 12111  		mem := v_2
 12112  		if !(off == 0 && sym == nil) {
 12113  			break
 12114  		}
 12115  		v.reset(OpARM64MOVHstoreidx2)
 12116  		v.AddArg4(ptr, idx, val, mem)
 12117  		return true
 12118  	}
 12119  	// match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
 12120  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12121  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
 12122  	for {
 12123  		off1 := auxIntToInt32(v.AuxInt)
 12124  		sym1 := auxToSym(v.Aux)
 12125  		if v_0.Op != OpARM64MOVDaddr {
 12126  			break
 12127  		}
 12128  		off2 := auxIntToInt32(v_0.AuxInt)
 12129  		sym2 := auxToSym(v_0.Aux)
 12130  		ptr := v_0.Args[0]
 12131  		val := v_1
 12132  		mem := v_2
 12133  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12134  			break
 12135  		}
 12136  		v.reset(OpARM64MOVHstore)
 12137  		v.AuxInt = int32ToAuxInt(off1 + off2)
 12138  		v.Aux = symToAux(mergeSym(sym1, sym2))
 12139  		v.AddArg3(ptr, val, mem)
 12140  		return true
 12141  	}
 12142  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
 12143  	// result: (MOVHstorezero [off] {sym} ptr mem)
 12144  	for {
 12145  		off := auxIntToInt32(v.AuxInt)
 12146  		sym := auxToSym(v.Aux)
 12147  		ptr := v_0
 12148  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
 12149  			break
 12150  		}
 12151  		mem := v_2
 12152  		v.reset(OpARM64MOVHstorezero)
 12153  		v.AuxInt = int32ToAuxInt(off)
 12154  		v.Aux = symToAux(sym)
 12155  		v.AddArg2(ptr, mem)
 12156  		return true
 12157  	}
 12158  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
 12159  	// result: (MOVHstore [off] {sym} ptr x mem)
 12160  	for {
 12161  		off := auxIntToInt32(v.AuxInt)
 12162  		sym := auxToSym(v.Aux)
 12163  		ptr := v_0
 12164  		if v_1.Op != OpARM64MOVHreg {
 12165  			break
 12166  		}
 12167  		x := v_1.Args[0]
 12168  		mem := v_2
 12169  		v.reset(OpARM64MOVHstore)
 12170  		v.AuxInt = int32ToAuxInt(off)
 12171  		v.Aux = symToAux(sym)
 12172  		v.AddArg3(ptr, x, mem)
 12173  		return true
 12174  	}
 12175  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
 12176  	// result: (MOVHstore [off] {sym} ptr x mem)
 12177  	for {
 12178  		off := auxIntToInt32(v.AuxInt)
 12179  		sym := auxToSym(v.Aux)
 12180  		ptr := v_0
 12181  		if v_1.Op != OpARM64MOVHUreg {
 12182  			break
 12183  		}
 12184  		x := v_1.Args[0]
 12185  		mem := v_2
 12186  		v.reset(OpARM64MOVHstore)
 12187  		v.AuxInt = int32ToAuxInt(off)
 12188  		v.Aux = symToAux(sym)
 12189  		v.AddArg3(ptr, x, mem)
 12190  		return true
 12191  	}
 12192  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
 12193  	// result: (MOVHstore [off] {sym} ptr x mem)
 12194  	for {
 12195  		off := auxIntToInt32(v.AuxInt)
 12196  		sym := auxToSym(v.Aux)
 12197  		ptr := v_0
 12198  		if v_1.Op != OpARM64MOVWreg {
 12199  			break
 12200  		}
 12201  		x := v_1.Args[0]
 12202  		mem := v_2
 12203  		v.reset(OpARM64MOVHstore)
 12204  		v.AuxInt = int32ToAuxInt(off)
 12205  		v.Aux = symToAux(sym)
 12206  		v.AddArg3(ptr, x, mem)
 12207  		return true
 12208  	}
 12209  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
 12210  	// result: (MOVHstore [off] {sym} ptr x mem)
 12211  	for {
 12212  		off := auxIntToInt32(v.AuxInt)
 12213  		sym := auxToSym(v.Aux)
 12214  		ptr := v_0
 12215  		if v_1.Op != OpARM64MOVWUreg {
 12216  			break
 12217  		}
 12218  		x := v_1.Args[0]
 12219  		mem := v_2
 12220  		v.reset(OpARM64MOVHstore)
 12221  		v.AuxInt = int32ToAuxInt(off)
 12222  		v.Aux = symToAux(sym)
 12223  		v.AddArg3(ptr, x, mem)
 12224  		return true
 12225  	}
 12226  	// match: (MOVHstore [i] {s} ptr0 (SRLconst [16] w) x:(MOVHstore [i-2] {s} ptr1 w mem))
 12227  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 12228  	// result: (MOVWstore [i-2] {s} ptr0 w mem)
 12229  	for {
 12230  		i := auxIntToInt32(v.AuxInt)
 12231  		s := auxToSym(v.Aux)
 12232  		ptr0 := v_0
 12233  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12234  			break
 12235  		}
 12236  		w := v_1.Args[0]
 12237  		x := v_2
 12238  		if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
 12239  			break
 12240  		}
 12241  		mem := x.Args[2]
 12242  		ptr1 := x.Args[0]
 12243  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 12244  			break
 12245  		}
 12246  		v.reset(OpARM64MOVWstore)
 12247  		v.AuxInt = int32ToAuxInt(i - 2)
 12248  		v.Aux = symToAux(s)
 12249  		v.AddArg3(ptr0, w, mem)
 12250  		return true
 12251  	}
 12252  	// match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx ptr1 idx1 w mem))
 12253  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 12254  	// result: (MOVWstoreidx ptr1 idx1 w mem)
 12255  	for {
 12256  		if auxIntToInt32(v.AuxInt) != 2 {
 12257  			break
 12258  		}
 12259  		s := auxToSym(v.Aux)
 12260  		if v_0.Op != OpARM64ADD {
 12261  			break
 12262  		}
 12263  		_ = v_0.Args[1]
 12264  		v_0_0 := v_0.Args[0]
 12265  		v_0_1 := v_0.Args[1]
 12266  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 12267  			ptr0 := v_0_0
 12268  			idx0 := v_0_1
 12269  			if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12270  				continue
 12271  			}
 12272  			w := v_1.Args[0]
 12273  			x := v_2
 12274  			if x.Op != OpARM64MOVHstoreidx {
 12275  				continue
 12276  			}
 12277  			mem := x.Args[3]
 12278  			ptr1 := x.Args[0]
 12279  			idx1 := x.Args[1]
 12280  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 12281  				continue
 12282  			}
 12283  			v.reset(OpARM64MOVWstoreidx)
 12284  			v.AddArg4(ptr1, idx1, w, mem)
 12285  			return true
 12286  		}
 12287  		break
 12288  	}
 12289  	// match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx2 ptr1 idx1 w mem))
 12290  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 12291  	// result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem)
 12292  	for {
 12293  		if auxIntToInt32(v.AuxInt) != 2 {
 12294  			break
 12295  		}
 12296  		s := auxToSym(v.Aux)
 12297  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 12298  			break
 12299  		}
 12300  		idx0 := v_0.Args[1]
 12301  		ptr0 := v_0.Args[0]
 12302  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12303  			break
 12304  		}
 12305  		w := v_1.Args[0]
 12306  		x := v_2
 12307  		if x.Op != OpARM64MOVHstoreidx2 {
 12308  			break
 12309  		}
 12310  		mem := x.Args[3]
 12311  		ptr1 := x.Args[0]
 12312  		idx1 := x.Args[1]
 12313  		if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 12314  			break
 12315  		}
 12316  		v.reset(OpARM64MOVWstoreidx)
 12317  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 12318  		v0.AuxInt = int64ToAuxInt(1)
 12319  		v0.AddArg(idx1)
 12320  		v.AddArg4(ptr1, v0, w, mem)
 12321  		return true
 12322  	}
 12323  	// match: (MOVHstore [i] {s} ptr0 (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstore [i-2] {s} ptr1 w mem))
 12324  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 12325  	// result: (MOVWstore [i-2] {s} ptr0 w mem)
 12326  	for {
 12327  		i := auxIntToInt32(v.AuxInt)
 12328  		s := auxToSym(v.Aux)
 12329  		ptr0 := v_0
 12330  		if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
 12331  			break
 12332  		}
 12333  		w := v_1.Args[0]
 12334  		x := v_2
 12335  		if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
 12336  			break
 12337  		}
 12338  		mem := x.Args[2]
 12339  		ptr1 := x.Args[0]
 12340  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 12341  			break
 12342  		}
 12343  		v.reset(OpARM64MOVWstore)
 12344  		v.AuxInt = int32ToAuxInt(i - 2)
 12345  		v.Aux = symToAux(s)
 12346  		v.AddArg3(ptr0, w, mem)
 12347  		return true
 12348  	}
 12349  	// match: (MOVHstore [2] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstoreidx ptr1 idx1 w mem))
 12350  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 12351  	// result: (MOVWstoreidx ptr1 idx1 w mem)
 12352  	for {
 12353  		if auxIntToInt32(v.AuxInt) != 2 {
 12354  			break
 12355  		}
 12356  		s := auxToSym(v.Aux)
 12357  		if v_0.Op != OpARM64ADD {
 12358  			break
 12359  		}
 12360  		_ = v_0.Args[1]
 12361  		v_0_0 := v_0.Args[0]
 12362  		v_0_1 := v_0.Args[1]
 12363  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 12364  			ptr0 := v_0_0
 12365  			idx0 := v_0_1
 12366  			if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
 12367  				continue
 12368  			}
 12369  			w := v_1.Args[0]
 12370  			x := v_2
 12371  			if x.Op != OpARM64MOVHstoreidx {
 12372  				continue
 12373  			}
 12374  			mem := x.Args[3]
 12375  			ptr1 := x.Args[0]
 12376  			idx1 := x.Args[1]
 12377  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 12378  				continue
 12379  			}
 12380  			v.reset(OpARM64MOVWstoreidx)
 12381  			v.AddArg4(ptr1, idx1, w, mem)
 12382  			return true
 12383  		}
 12384  		break
 12385  	}
 12386  	// match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstoreidx2 ptr1 idx1 w mem))
 12387  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 12388  	// result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem)
 12389  	for {
 12390  		if auxIntToInt32(v.AuxInt) != 2 {
 12391  			break
 12392  		}
 12393  		s := auxToSym(v.Aux)
 12394  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 12395  			break
 12396  		}
 12397  		idx0 := v_0.Args[1]
 12398  		ptr0 := v_0.Args[0]
 12399  		if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
 12400  			break
 12401  		}
 12402  		w := v_1.Args[0]
 12403  		x := v_2
 12404  		if x.Op != OpARM64MOVHstoreidx2 {
 12405  			break
 12406  		}
 12407  		mem := x.Args[3]
 12408  		ptr1 := x.Args[0]
 12409  		idx1 := x.Args[1]
 12410  		if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 12411  			break
 12412  		}
 12413  		v.reset(OpARM64MOVWstoreidx)
 12414  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 12415  		v0.AuxInt = int64ToAuxInt(1)
 12416  		v0.AddArg(idx1)
 12417  		v.AddArg4(ptr1, v0, w, mem)
 12418  		return true
 12419  	}
 12420  	// match: (MOVHstore [i] {s} ptr0 (SRLconst [16] (MOVDreg w)) x:(MOVHstore [i-2] {s} ptr1 w mem))
 12421  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 12422  	// result: (MOVWstore [i-2] {s} ptr0 w mem)
 12423  	for {
 12424  		i := auxIntToInt32(v.AuxInt)
 12425  		s := auxToSym(v.Aux)
 12426  		ptr0 := v_0
 12427  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12428  			break
 12429  		}
 12430  		v_1_0 := v_1.Args[0]
 12431  		if v_1_0.Op != OpARM64MOVDreg {
 12432  			break
 12433  		}
 12434  		w := v_1_0.Args[0]
 12435  		x := v_2
 12436  		if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
 12437  			break
 12438  		}
 12439  		mem := x.Args[2]
 12440  		ptr1 := x.Args[0]
 12441  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 12442  			break
 12443  		}
 12444  		v.reset(OpARM64MOVWstore)
 12445  		v.AuxInt = int32ToAuxInt(i - 2)
 12446  		v.Aux = symToAux(s)
 12447  		v.AddArg3(ptr0, w, mem)
 12448  		return true
 12449  	}
 12450  	// match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx ptr1 idx1 w mem))
 12451  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 12452  	// result: (MOVWstoreidx ptr1 idx1 w mem)
 12453  	for {
 12454  		if auxIntToInt32(v.AuxInt) != 2 {
 12455  			break
 12456  		}
 12457  		s := auxToSym(v.Aux)
 12458  		if v_0.Op != OpARM64ADD {
 12459  			break
 12460  		}
 12461  		_ = v_0.Args[1]
 12462  		v_0_0 := v_0.Args[0]
 12463  		v_0_1 := v_0.Args[1]
 12464  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 12465  			ptr0 := v_0_0
 12466  			idx0 := v_0_1
 12467  			if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12468  				continue
 12469  			}
 12470  			v_1_0 := v_1.Args[0]
 12471  			if v_1_0.Op != OpARM64MOVDreg {
 12472  				continue
 12473  			}
 12474  			w := v_1_0.Args[0]
 12475  			x := v_2
 12476  			if x.Op != OpARM64MOVHstoreidx {
 12477  				continue
 12478  			}
 12479  			mem := x.Args[3]
 12480  			ptr1 := x.Args[0]
 12481  			idx1 := x.Args[1]
 12482  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 12483  				continue
 12484  			}
 12485  			v.reset(OpARM64MOVWstoreidx)
 12486  			v.AddArg4(ptr1, idx1, w, mem)
 12487  			return true
 12488  		}
 12489  		break
 12490  	}
 12491  	// match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx2 ptr1 idx1 w mem))
 12492  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 12493  	// result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem)
 12494  	for {
 12495  		if auxIntToInt32(v.AuxInt) != 2 {
 12496  			break
 12497  		}
 12498  		s := auxToSym(v.Aux)
 12499  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 12500  			break
 12501  		}
 12502  		idx0 := v_0.Args[1]
 12503  		ptr0 := v_0.Args[0]
 12504  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
 12505  			break
 12506  		}
 12507  		v_1_0 := v_1.Args[0]
 12508  		if v_1_0.Op != OpARM64MOVDreg {
 12509  			break
 12510  		}
 12511  		w := v_1_0.Args[0]
 12512  		x := v_2
 12513  		if x.Op != OpARM64MOVHstoreidx2 {
 12514  			break
 12515  		}
 12516  		mem := x.Args[3]
 12517  		ptr1 := x.Args[0]
 12518  		idx1 := x.Args[1]
 12519  		if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 12520  			break
 12521  		}
 12522  		v.reset(OpARM64MOVWstoreidx)
 12523  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 12524  		v0.AuxInt = int64ToAuxInt(1)
 12525  		v0.AddArg(idx1)
 12526  		v.AddArg4(ptr1, v0, w, mem)
 12527  		return true
 12528  	}
 12529  	// match: (MOVHstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVHstore [i-2] {s} ptr1 w0:(SRLconst [j-16] w) mem))
 12530  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 12531  	// result: (MOVWstore [i-2] {s} ptr0 w0 mem)
 12532  	for {
 12533  		i := auxIntToInt32(v.AuxInt)
 12534  		s := auxToSym(v.Aux)
 12535  		ptr0 := v_0
 12536  		if v_1.Op != OpARM64SRLconst {
 12537  			break
 12538  		}
 12539  		j := auxIntToInt64(v_1.AuxInt)
 12540  		w := v_1.Args[0]
 12541  		x := v_2
 12542  		if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
 12543  			break
 12544  		}
 12545  		mem := x.Args[2]
 12546  		ptr1 := x.Args[0]
 12547  		w0 := x.Args[1]
 12548  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 12549  			break
 12550  		}
 12551  		v.reset(OpARM64MOVWstore)
 12552  		v.AuxInt = int32ToAuxInt(i - 2)
 12553  		v.Aux = symToAux(s)
 12554  		v.AddArg3(ptr0, w0, mem)
 12555  		return true
 12556  	}
 12557  	// match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx ptr1 idx1 w0:(SRLconst [j-16] w) mem))
 12558  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 12559  	// result: (MOVWstoreidx ptr1 idx1 w0 mem)
 12560  	for {
 12561  		if auxIntToInt32(v.AuxInt) != 2 {
 12562  			break
 12563  		}
 12564  		s := auxToSym(v.Aux)
 12565  		if v_0.Op != OpARM64ADD {
 12566  			break
 12567  		}
 12568  		_ = v_0.Args[1]
 12569  		v_0_0 := v_0.Args[0]
 12570  		v_0_1 := v_0.Args[1]
 12571  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 12572  			ptr0 := v_0_0
 12573  			idx0 := v_0_1
 12574  			if v_1.Op != OpARM64SRLconst {
 12575  				continue
 12576  			}
 12577  			j := auxIntToInt64(v_1.AuxInt)
 12578  			w := v_1.Args[0]
 12579  			x := v_2
 12580  			if x.Op != OpARM64MOVHstoreidx {
 12581  				continue
 12582  			}
 12583  			mem := x.Args[3]
 12584  			ptr1 := x.Args[0]
 12585  			idx1 := x.Args[1]
 12586  			w0 := x.Args[2]
 12587  			if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 12588  				continue
 12589  			}
 12590  			v.reset(OpARM64MOVWstoreidx)
 12591  			v.AddArg4(ptr1, idx1, w0, mem)
 12592  			return true
 12593  		}
 12594  		break
 12595  	}
 12596  	// match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx2 ptr1 idx1 w0:(SRLconst [j-16] w) mem))
 12597  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 12598  	// result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w0 mem)
 12599  	for {
 12600  		if auxIntToInt32(v.AuxInt) != 2 {
 12601  			break
 12602  		}
 12603  		s := auxToSym(v.Aux)
 12604  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 12605  			break
 12606  		}
 12607  		idx0 := v_0.Args[1]
 12608  		ptr0 := v_0.Args[0]
 12609  		if v_1.Op != OpARM64SRLconst {
 12610  			break
 12611  		}
 12612  		j := auxIntToInt64(v_1.AuxInt)
 12613  		w := v_1.Args[0]
 12614  		x := v_2
 12615  		if x.Op != OpARM64MOVHstoreidx2 {
 12616  			break
 12617  		}
 12618  		mem := x.Args[3]
 12619  		ptr1 := x.Args[0]
 12620  		idx1 := x.Args[1]
 12621  		w0 := x.Args[2]
 12622  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 12623  			break
 12624  		}
 12625  		v.reset(OpARM64MOVWstoreidx)
 12626  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 12627  		v0.AuxInt = int64ToAuxInt(1)
 12628  		v0.AddArg(idx1)
 12629  		v.AddArg4(ptr1, v0, w0, mem)
 12630  		return true
 12631  	}
 12632  	return false
 12633  }
 12634  func rewriteValueARM64_OpARM64MOVHstoreidx(v *Value) bool {
 12635  	v_3 := v.Args[3]
 12636  	v_2 := v.Args[2]
 12637  	v_1 := v.Args[1]
 12638  	v_0 := v.Args[0]
 12639  	// match: (MOVHstoreidx ptr (MOVDconst [c]) val mem)
 12640  	// cond: is32Bit(c)
 12641  	// result: (MOVHstore [int32(c)] ptr val mem)
 12642  	for {
 12643  		ptr := v_0
 12644  		if v_1.Op != OpARM64MOVDconst {
 12645  			break
 12646  		}
 12647  		c := auxIntToInt64(v_1.AuxInt)
 12648  		val := v_2
 12649  		mem := v_3
 12650  		if !(is32Bit(c)) {
 12651  			break
 12652  		}
 12653  		v.reset(OpARM64MOVHstore)
 12654  		v.AuxInt = int32ToAuxInt(int32(c))
 12655  		v.AddArg3(ptr, val, mem)
 12656  		return true
 12657  	}
 12658  	// match: (MOVHstoreidx (MOVDconst [c]) idx val mem)
 12659  	// cond: is32Bit(c)
 12660  	// result: (MOVHstore [int32(c)] idx val mem)
 12661  	for {
 12662  		if v_0.Op != OpARM64MOVDconst {
 12663  			break
 12664  		}
 12665  		c := auxIntToInt64(v_0.AuxInt)
 12666  		idx := v_1
 12667  		val := v_2
 12668  		mem := v_3
 12669  		if !(is32Bit(c)) {
 12670  			break
 12671  		}
 12672  		v.reset(OpARM64MOVHstore)
 12673  		v.AuxInt = int32ToAuxInt(int32(c))
 12674  		v.AddArg3(idx, val, mem)
 12675  		return true
 12676  	}
 12677  	// match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem)
 12678  	// result: (MOVHstoreidx2 ptr idx val mem)
 12679  	for {
 12680  		ptr := v_0
 12681  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
 12682  			break
 12683  		}
 12684  		idx := v_1.Args[0]
 12685  		val := v_2
 12686  		mem := v_3
 12687  		v.reset(OpARM64MOVHstoreidx2)
 12688  		v.AddArg4(ptr, idx, val, mem)
 12689  		return true
 12690  	}
 12691  	// match: (MOVHstoreidx ptr (ADD idx idx) val mem)
 12692  	// result: (MOVHstoreidx2 ptr idx val mem)
 12693  	for {
 12694  		ptr := v_0
 12695  		if v_1.Op != OpARM64ADD {
 12696  			break
 12697  		}
 12698  		idx := v_1.Args[1]
 12699  		if idx != v_1.Args[0] {
 12700  			break
 12701  		}
 12702  		val := v_2
 12703  		mem := v_3
 12704  		v.reset(OpARM64MOVHstoreidx2)
 12705  		v.AddArg4(ptr, idx, val, mem)
 12706  		return true
 12707  	}
 12708  	// match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem)
 12709  	// result: (MOVHstoreidx2 ptr idx val mem)
 12710  	for {
 12711  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
 12712  			break
 12713  		}
 12714  		idx := v_0.Args[0]
 12715  		ptr := v_1
 12716  		val := v_2
 12717  		mem := v_3
 12718  		v.reset(OpARM64MOVHstoreidx2)
 12719  		v.AddArg4(ptr, idx, val, mem)
 12720  		return true
 12721  	}
 12722  	// match: (MOVHstoreidx (ADD idx idx) ptr val mem)
 12723  	// result: (MOVHstoreidx2 ptr idx val mem)
 12724  	for {
 12725  		if v_0.Op != OpARM64ADD {
 12726  			break
 12727  		}
 12728  		idx := v_0.Args[1]
 12729  		if idx != v_0.Args[0] {
 12730  			break
 12731  		}
 12732  		ptr := v_1
 12733  		val := v_2
 12734  		mem := v_3
 12735  		v.reset(OpARM64MOVHstoreidx2)
 12736  		v.AddArg4(ptr, idx, val, mem)
 12737  		return true
 12738  	}
 12739  	// match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem)
 12740  	// result: (MOVHstorezeroidx ptr idx mem)
 12741  	for {
 12742  		ptr := v_0
 12743  		idx := v_1
 12744  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 12745  			break
 12746  		}
 12747  		mem := v_3
 12748  		v.reset(OpARM64MOVHstorezeroidx)
 12749  		v.AddArg3(ptr, idx, mem)
 12750  		return true
 12751  	}
 12752  	// match: (MOVHstoreidx ptr idx (MOVHreg x) mem)
 12753  	// result: (MOVHstoreidx ptr idx x mem)
 12754  	for {
 12755  		ptr := v_0
 12756  		idx := v_1
 12757  		if v_2.Op != OpARM64MOVHreg {
 12758  			break
 12759  		}
 12760  		x := v_2.Args[0]
 12761  		mem := v_3
 12762  		v.reset(OpARM64MOVHstoreidx)
 12763  		v.AddArg4(ptr, idx, x, mem)
 12764  		return true
 12765  	}
 12766  	// match: (MOVHstoreidx ptr idx (MOVHUreg x) mem)
 12767  	// result: (MOVHstoreidx ptr idx x mem)
 12768  	for {
 12769  		ptr := v_0
 12770  		idx := v_1
 12771  		if v_2.Op != OpARM64MOVHUreg {
 12772  			break
 12773  		}
 12774  		x := v_2.Args[0]
 12775  		mem := v_3
 12776  		v.reset(OpARM64MOVHstoreidx)
 12777  		v.AddArg4(ptr, idx, x, mem)
 12778  		return true
 12779  	}
 12780  	// match: (MOVHstoreidx ptr idx (MOVWreg x) mem)
 12781  	// result: (MOVHstoreidx ptr idx x mem)
 12782  	for {
 12783  		ptr := v_0
 12784  		idx := v_1
 12785  		if v_2.Op != OpARM64MOVWreg {
 12786  			break
 12787  		}
 12788  		x := v_2.Args[0]
 12789  		mem := v_3
 12790  		v.reset(OpARM64MOVHstoreidx)
 12791  		v.AddArg4(ptr, idx, x, mem)
 12792  		return true
 12793  	}
 12794  	// match: (MOVHstoreidx ptr idx (MOVWUreg x) mem)
 12795  	// result: (MOVHstoreidx ptr idx x mem)
 12796  	for {
 12797  		ptr := v_0
 12798  		idx := v_1
 12799  		if v_2.Op != OpARM64MOVWUreg {
 12800  			break
 12801  		}
 12802  		x := v_2.Args[0]
 12803  		mem := v_3
 12804  		v.reset(OpARM64MOVHstoreidx)
 12805  		v.AddArg4(ptr, idx, x, mem)
 12806  		return true
 12807  	}
 12808  	// match: (MOVHstoreidx ptr (ADDconst [2] idx) (SRLconst [16] w) x:(MOVHstoreidx ptr idx w mem))
 12809  	// cond: x.Uses == 1 && clobber(x)
 12810  	// result: (MOVWstoreidx ptr idx w mem)
 12811  	for {
 12812  		ptr := v_0
 12813  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 2 {
 12814  			break
 12815  		}
 12816  		idx := v_1.Args[0]
 12817  		if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 16 {
 12818  			break
 12819  		}
 12820  		w := v_2.Args[0]
 12821  		x := v_3
 12822  		if x.Op != OpARM64MOVHstoreidx {
 12823  			break
 12824  		}
 12825  		mem := x.Args[3]
 12826  		if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
 12827  			break
 12828  		}
 12829  		v.reset(OpARM64MOVWstoreidx)
 12830  		v.AddArg4(ptr, idx, w, mem)
 12831  		return true
 12832  	}
 12833  	return false
 12834  }
 12835  func rewriteValueARM64_OpARM64MOVHstoreidx2(v *Value) bool {
 12836  	v_3 := v.Args[3]
 12837  	v_2 := v.Args[2]
 12838  	v_1 := v.Args[1]
 12839  	v_0 := v.Args[0]
 12840  	// match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem)
 12841  	// cond: is32Bit(c<<1)
 12842  	// result: (MOVHstore [int32(c)<<1] ptr val mem)
 12843  	for {
 12844  		ptr := v_0
 12845  		if v_1.Op != OpARM64MOVDconst {
 12846  			break
 12847  		}
 12848  		c := auxIntToInt64(v_1.AuxInt)
 12849  		val := v_2
 12850  		mem := v_3
 12851  		if !(is32Bit(c << 1)) {
 12852  			break
 12853  		}
 12854  		v.reset(OpARM64MOVHstore)
 12855  		v.AuxInt = int32ToAuxInt(int32(c) << 1)
 12856  		v.AddArg3(ptr, val, mem)
 12857  		return true
 12858  	}
 12859  	// match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem)
 12860  	// result: (MOVHstorezeroidx2 ptr idx mem)
 12861  	for {
 12862  		ptr := v_0
 12863  		idx := v_1
 12864  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 12865  			break
 12866  		}
 12867  		mem := v_3
 12868  		v.reset(OpARM64MOVHstorezeroidx2)
 12869  		v.AddArg3(ptr, idx, mem)
 12870  		return true
 12871  	}
 12872  	// match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem)
 12873  	// result: (MOVHstoreidx2 ptr idx x mem)
 12874  	for {
 12875  		ptr := v_0
 12876  		idx := v_1
 12877  		if v_2.Op != OpARM64MOVHreg {
 12878  			break
 12879  		}
 12880  		x := v_2.Args[0]
 12881  		mem := v_3
 12882  		v.reset(OpARM64MOVHstoreidx2)
 12883  		v.AddArg4(ptr, idx, x, mem)
 12884  		return true
 12885  	}
 12886  	// match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem)
 12887  	// result: (MOVHstoreidx2 ptr idx x mem)
 12888  	for {
 12889  		ptr := v_0
 12890  		idx := v_1
 12891  		if v_2.Op != OpARM64MOVHUreg {
 12892  			break
 12893  		}
 12894  		x := v_2.Args[0]
 12895  		mem := v_3
 12896  		v.reset(OpARM64MOVHstoreidx2)
 12897  		v.AddArg4(ptr, idx, x, mem)
 12898  		return true
 12899  	}
 12900  	// match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem)
 12901  	// result: (MOVHstoreidx2 ptr idx x mem)
 12902  	for {
 12903  		ptr := v_0
 12904  		idx := v_1
 12905  		if v_2.Op != OpARM64MOVWreg {
 12906  			break
 12907  		}
 12908  		x := v_2.Args[0]
 12909  		mem := v_3
 12910  		v.reset(OpARM64MOVHstoreidx2)
 12911  		v.AddArg4(ptr, idx, x, mem)
 12912  		return true
 12913  	}
 12914  	// match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem)
 12915  	// result: (MOVHstoreidx2 ptr idx x mem)
 12916  	for {
 12917  		ptr := v_0
 12918  		idx := v_1
 12919  		if v_2.Op != OpARM64MOVWUreg {
 12920  			break
 12921  		}
 12922  		x := v_2.Args[0]
 12923  		mem := v_3
 12924  		v.reset(OpARM64MOVHstoreidx2)
 12925  		v.AddArg4(ptr, idx, x, mem)
 12926  		return true
 12927  	}
 12928  	return false
 12929  }
 12930  func rewriteValueARM64_OpARM64MOVHstorezero(v *Value) bool {
 12931  	v_1 := v.Args[1]
 12932  	v_0 := v.Args[0]
 12933  	b := v.Block
 12934  	config := b.Func.Config
 12935  	// match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 12936  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12937  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
 12938  	for {
 12939  		off1 := auxIntToInt32(v.AuxInt)
 12940  		sym := auxToSym(v.Aux)
 12941  		if v_0.Op != OpARM64ADDconst {
 12942  			break
 12943  		}
 12944  		off2 := auxIntToInt64(v_0.AuxInt)
 12945  		ptr := v_0.Args[0]
 12946  		mem := v_1
 12947  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12948  			break
 12949  		}
 12950  		v.reset(OpARM64MOVHstorezero)
 12951  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 12952  		v.Aux = symToAux(sym)
 12953  		v.AddArg2(ptr, mem)
 12954  		return true
 12955  	}
 12956  	// match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 12957  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12958  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 12959  	for {
 12960  		off1 := auxIntToInt32(v.AuxInt)
 12961  		sym1 := auxToSym(v.Aux)
 12962  		if v_0.Op != OpARM64MOVDaddr {
 12963  			break
 12964  		}
 12965  		off2 := auxIntToInt32(v_0.AuxInt)
 12966  		sym2 := auxToSym(v_0.Aux)
 12967  		ptr := v_0.Args[0]
 12968  		mem := v_1
 12969  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12970  			break
 12971  		}
 12972  		v.reset(OpARM64MOVHstorezero)
 12973  		v.AuxInt = int32ToAuxInt(off1 + off2)
 12974  		v.Aux = symToAux(mergeSym(sym1, sym2))
 12975  		v.AddArg2(ptr, mem)
 12976  		return true
 12977  	}
 12978  	// match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem)
 12979  	// cond: off == 0 && sym == nil
 12980  	// result: (MOVHstorezeroidx ptr idx mem)
 12981  	for {
 12982  		off := auxIntToInt32(v.AuxInt)
 12983  		sym := auxToSym(v.Aux)
 12984  		if v_0.Op != OpARM64ADD {
 12985  			break
 12986  		}
 12987  		idx := v_0.Args[1]
 12988  		ptr := v_0.Args[0]
 12989  		mem := v_1
 12990  		if !(off == 0 && sym == nil) {
 12991  			break
 12992  		}
 12993  		v.reset(OpARM64MOVHstorezeroidx)
 12994  		v.AddArg3(ptr, idx, mem)
 12995  		return true
 12996  	}
 12997  	// match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem)
 12998  	// cond: off == 0 && sym == nil
 12999  	// result: (MOVHstorezeroidx2 ptr idx mem)
 13000  	for {
 13001  		off := auxIntToInt32(v.AuxInt)
 13002  		sym := auxToSym(v.Aux)
 13003  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 13004  			break
 13005  		}
 13006  		idx := v_0.Args[1]
 13007  		ptr := v_0.Args[0]
 13008  		mem := v_1
 13009  		if !(off == 0 && sym == nil) {
 13010  			break
 13011  		}
 13012  		v.reset(OpARM64MOVHstorezeroidx2)
 13013  		v.AddArg3(ptr, idx, mem)
 13014  		return true
 13015  	}
 13016  	// match: (MOVHstorezero [i] {s} ptr0 x:(MOVHstorezero [j] {s} ptr1 mem))
 13017  	// cond: x.Uses == 1 && areAdjacentOffsets(int64(i),int64(j),2) && isSamePtr(ptr0, ptr1) && clobber(x)
 13018  	// result: (MOVWstorezero [int32(min(int64(i),int64(j)))] {s} ptr0 mem)
 13019  	for {
 13020  		i := auxIntToInt32(v.AuxInt)
 13021  		s := auxToSym(v.Aux)
 13022  		ptr0 := v_0
 13023  		x := v_1
 13024  		if x.Op != OpARM64MOVHstorezero {
 13025  			break
 13026  		}
 13027  		j := auxIntToInt32(x.AuxInt)
 13028  		if auxToSym(x.Aux) != s {
 13029  			break
 13030  		}
 13031  		mem := x.Args[1]
 13032  		ptr1 := x.Args[0]
 13033  		if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 2) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 13034  			break
 13035  		}
 13036  		v.reset(OpARM64MOVWstorezero)
 13037  		v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
 13038  		v.Aux = symToAux(s)
 13039  		v.AddArg2(ptr0, mem)
 13040  		return true
 13041  	}
 13042  	// match: (MOVHstorezero [2] {s} (ADD ptr0 idx0) x:(MOVHstorezeroidx ptr1 idx1 mem))
 13043  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 13044  	// result: (MOVWstorezeroidx ptr1 idx1 mem)
 13045  	for {
 13046  		if auxIntToInt32(v.AuxInt) != 2 {
 13047  			break
 13048  		}
 13049  		s := auxToSym(v.Aux)
 13050  		if v_0.Op != OpARM64ADD {
 13051  			break
 13052  		}
 13053  		_ = v_0.Args[1]
 13054  		v_0_0 := v_0.Args[0]
 13055  		v_0_1 := v_0.Args[1]
 13056  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 13057  			ptr0 := v_0_0
 13058  			idx0 := v_0_1
 13059  			x := v_1
 13060  			if x.Op != OpARM64MOVHstorezeroidx {
 13061  				continue
 13062  			}
 13063  			mem := x.Args[2]
 13064  			ptr1 := x.Args[0]
 13065  			idx1 := x.Args[1]
 13066  			if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 13067  				continue
 13068  			}
 13069  			v.reset(OpARM64MOVWstorezeroidx)
 13070  			v.AddArg3(ptr1, idx1, mem)
 13071  			return true
 13072  		}
 13073  		break
 13074  	}
 13075  	// match: (MOVHstorezero [2] {s} (ADDshiftLL [1] ptr0 idx0) x:(MOVHstorezeroidx2 ptr1 idx1 mem))
 13076  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 13077  	// result: (MOVWstorezeroidx ptr1 (SLLconst <idx1.Type> [1] idx1) mem)
 13078  	for {
 13079  		if auxIntToInt32(v.AuxInt) != 2 {
 13080  			break
 13081  		}
 13082  		s := auxToSym(v.Aux)
 13083  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
 13084  			break
 13085  		}
 13086  		idx0 := v_0.Args[1]
 13087  		ptr0 := v_0.Args[0]
 13088  		x := v_1
 13089  		if x.Op != OpARM64MOVHstorezeroidx2 {
 13090  			break
 13091  		}
 13092  		mem := x.Args[2]
 13093  		ptr1 := x.Args[0]
 13094  		idx1 := x.Args[1]
 13095  		if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 13096  			break
 13097  		}
 13098  		v.reset(OpARM64MOVWstorezeroidx)
 13099  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 13100  		v0.AuxInt = int64ToAuxInt(1)
 13101  		v0.AddArg(idx1)
 13102  		v.AddArg3(ptr1, v0, mem)
 13103  		return true
 13104  	}
 13105  	return false
 13106  }
 13107  func rewriteValueARM64_OpARM64MOVHstorezeroidx(v *Value) bool {
 13108  	v_2 := v.Args[2]
 13109  	v_1 := v.Args[1]
 13110  	v_0 := v.Args[0]
 13111  	// match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem)
 13112  	// cond: is32Bit(c)
 13113  	// result: (MOVHstorezero [int32(c)] ptr mem)
 13114  	for {
 13115  		ptr := v_0
 13116  		if v_1.Op != OpARM64MOVDconst {
 13117  			break
 13118  		}
 13119  		c := auxIntToInt64(v_1.AuxInt)
 13120  		mem := v_2
 13121  		if !(is32Bit(c)) {
 13122  			break
 13123  		}
 13124  		v.reset(OpARM64MOVHstorezero)
 13125  		v.AuxInt = int32ToAuxInt(int32(c))
 13126  		v.AddArg2(ptr, mem)
 13127  		return true
 13128  	}
 13129  	// match: (MOVHstorezeroidx (MOVDconst [c]) idx mem)
 13130  	// cond: is32Bit(c)
 13131  	// result: (MOVHstorezero [int32(c)] idx mem)
 13132  	for {
 13133  		if v_0.Op != OpARM64MOVDconst {
 13134  			break
 13135  		}
 13136  		c := auxIntToInt64(v_0.AuxInt)
 13137  		idx := v_1
 13138  		mem := v_2
 13139  		if !(is32Bit(c)) {
 13140  			break
 13141  		}
 13142  		v.reset(OpARM64MOVHstorezero)
 13143  		v.AuxInt = int32ToAuxInt(int32(c))
 13144  		v.AddArg2(idx, mem)
 13145  		return true
 13146  	}
 13147  	// match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem)
 13148  	// result: (MOVHstorezeroidx2 ptr idx mem)
 13149  	for {
 13150  		ptr := v_0
 13151  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
 13152  			break
 13153  		}
 13154  		idx := v_1.Args[0]
 13155  		mem := v_2
 13156  		v.reset(OpARM64MOVHstorezeroidx2)
 13157  		v.AddArg3(ptr, idx, mem)
 13158  		return true
 13159  	}
 13160  	// match: (MOVHstorezeroidx ptr (ADD idx idx) mem)
 13161  	// result: (MOVHstorezeroidx2 ptr idx mem)
 13162  	for {
 13163  		ptr := v_0
 13164  		if v_1.Op != OpARM64ADD {
 13165  			break
 13166  		}
 13167  		idx := v_1.Args[1]
 13168  		if idx != v_1.Args[0] {
 13169  			break
 13170  		}
 13171  		mem := v_2
 13172  		v.reset(OpARM64MOVHstorezeroidx2)
 13173  		v.AddArg3(ptr, idx, mem)
 13174  		return true
 13175  	}
 13176  	// match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem)
 13177  	// result: (MOVHstorezeroidx2 ptr idx mem)
 13178  	for {
 13179  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
 13180  			break
 13181  		}
 13182  		idx := v_0.Args[0]
 13183  		ptr := v_1
 13184  		mem := v_2
 13185  		v.reset(OpARM64MOVHstorezeroidx2)
 13186  		v.AddArg3(ptr, idx, mem)
 13187  		return true
 13188  	}
 13189  	// match: (MOVHstorezeroidx (ADD idx idx) ptr mem)
 13190  	// result: (MOVHstorezeroidx2 ptr idx mem)
 13191  	for {
 13192  		if v_0.Op != OpARM64ADD {
 13193  			break
 13194  		}
 13195  		idx := v_0.Args[1]
 13196  		if idx != v_0.Args[0] {
 13197  			break
 13198  		}
 13199  		ptr := v_1
 13200  		mem := v_2
 13201  		v.reset(OpARM64MOVHstorezeroidx2)
 13202  		v.AddArg3(ptr, idx, mem)
 13203  		return true
 13204  	}
 13205  	// match: (MOVHstorezeroidx ptr (ADDconst [2] idx) x:(MOVHstorezeroidx ptr idx mem))
 13206  	// cond: x.Uses == 1 && clobber(x)
 13207  	// result: (MOVWstorezeroidx ptr idx mem)
 13208  	for {
 13209  		ptr := v_0
 13210  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 2 {
 13211  			break
 13212  		}
 13213  		idx := v_1.Args[0]
 13214  		x := v_2
 13215  		if x.Op != OpARM64MOVHstorezeroidx {
 13216  			break
 13217  		}
 13218  		mem := x.Args[2]
 13219  		if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
 13220  			break
 13221  		}
 13222  		v.reset(OpARM64MOVWstorezeroidx)
 13223  		v.AddArg3(ptr, idx, mem)
 13224  		return true
 13225  	}
 13226  	return false
 13227  }
 13228  func rewriteValueARM64_OpARM64MOVHstorezeroidx2(v *Value) bool {
 13229  	v_2 := v.Args[2]
 13230  	v_1 := v.Args[1]
 13231  	v_0 := v.Args[0]
 13232  	// match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem)
 13233  	// cond: is32Bit(c<<1)
 13234  	// result: (MOVHstorezero [int32(c<<1)] ptr mem)
 13235  	for {
 13236  		ptr := v_0
 13237  		if v_1.Op != OpARM64MOVDconst {
 13238  			break
 13239  		}
 13240  		c := auxIntToInt64(v_1.AuxInt)
 13241  		mem := v_2
 13242  		if !(is32Bit(c << 1)) {
 13243  			break
 13244  		}
 13245  		v.reset(OpARM64MOVHstorezero)
 13246  		v.AuxInt = int32ToAuxInt(int32(c << 1))
 13247  		v.AddArg2(ptr, mem)
 13248  		return true
 13249  	}
 13250  	return false
 13251  }
 13252  func rewriteValueARM64_OpARM64MOVQstorezero(v *Value) bool {
 13253  	v_1 := v.Args[1]
 13254  	v_0 := v.Args[0]
 13255  	b := v.Block
 13256  	config := b.Func.Config
 13257  	// match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 13258  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13259  	// result: (MOVQstorezero [off1+int32(off2)] {sym} ptr mem)
 13260  	for {
 13261  		off1 := auxIntToInt32(v.AuxInt)
 13262  		sym := auxToSym(v.Aux)
 13263  		if v_0.Op != OpARM64ADDconst {
 13264  			break
 13265  		}
 13266  		off2 := auxIntToInt64(v_0.AuxInt)
 13267  		ptr := v_0.Args[0]
 13268  		mem := v_1
 13269  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13270  			break
 13271  		}
 13272  		v.reset(OpARM64MOVQstorezero)
 13273  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 13274  		v.Aux = symToAux(sym)
 13275  		v.AddArg2(ptr, mem)
 13276  		return true
 13277  	}
 13278  	// match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 13279  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13280  	// result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 13281  	for {
 13282  		off1 := auxIntToInt32(v.AuxInt)
 13283  		sym1 := auxToSym(v.Aux)
 13284  		if v_0.Op != OpARM64MOVDaddr {
 13285  			break
 13286  		}
 13287  		off2 := auxIntToInt32(v_0.AuxInt)
 13288  		sym2 := auxToSym(v_0.Aux)
 13289  		ptr := v_0.Args[0]
 13290  		mem := v_1
 13291  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13292  			break
 13293  		}
 13294  		v.reset(OpARM64MOVQstorezero)
 13295  		v.AuxInt = int32ToAuxInt(off1 + off2)
 13296  		v.Aux = symToAux(mergeSym(sym1, sym2))
 13297  		v.AddArg2(ptr, mem)
 13298  		return true
 13299  	}
 13300  	return false
 13301  }
 13302  func rewriteValueARM64_OpARM64MOVWUload(v *Value) bool {
 13303  	v_1 := v.Args[1]
 13304  	v_0 := v.Args[0]
 13305  	b := v.Block
 13306  	config := b.Func.Config
 13307  	// match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _))
 13308  	// result: (FMOVSfpgp val)
 13309  	for {
 13310  		off := auxIntToInt32(v.AuxInt)
 13311  		sym := auxToSym(v.Aux)
 13312  		ptr := v_0
 13313  		if v_1.Op != OpARM64FMOVSstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
 13314  			break
 13315  		}
 13316  		val := v_1.Args[1]
 13317  		if ptr != v_1.Args[0] {
 13318  			break
 13319  		}
 13320  		v.reset(OpARM64FMOVSfpgp)
 13321  		v.AddArg(val)
 13322  		return true
 13323  	}
 13324  	// match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem)
 13325  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13326  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
 13327  	for {
 13328  		off1 := auxIntToInt32(v.AuxInt)
 13329  		sym := auxToSym(v.Aux)
 13330  		if v_0.Op != OpARM64ADDconst {
 13331  			break
 13332  		}
 13333  		off2 := auxIntToInt64(v_0.AuxInt)
 13334  		ptr := v_0.Args[0]
 13335  		mem := v_1
 13336  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13337  			break
 13338  		}
 13339  		v.reset(OpARM64MOVWUload)
 13340  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 13341  		v.Aux = symToAux(sym)
 13342  		v.AddArg2(ptr, mem)
 13343  		return true
 13344  	}
 13345  	// match: (MOVWUload [off] {sym} (ADD ptr idx) mem)
 13346  	// cond: off == 0 && sym == nil
 13347  	// result: (MOVWUloadidx ptr idx mem)
 13348  	for {
 13349  		off := auxIntToInt32(v.AuxInt)
 13350  		sym := auxToSym(v.Aux)
 13351  		if v_0.Op != OpARM64ADD {
 13352  			break
 13353  		}
 13354  		idx := v_0.Args[1]
 13355  		ptr := v_0.Args[0]
 13356  		mem := v_1
 13357  		if !(off == 0 && sym == nil) {
 13358  			break
 13359  		}
 13360  		v.reset(OpARM64MOVWUloadidx)
 13361  		v.AddArg3(ptr, idx, mem)
 13362  		return true
 13363  	}
 13364  	// match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
 13365  	// cond: off == 0 && sym == nil
 13366  	// result: (MOVWUloadidx4 ptr idx mem)
 13367  	for {
 13368  		off := auxIntToInt32(v.AuxInt)
 13369  		sym := auxToSym(v.Aux)
 13370  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 13371  			break
 13372  		}
 13373  		idx := v_0.Args[1]
 13374  		ptr := v_0.Args[0]
 13375  		mem := v_1
 13376  		if !(off == 0 && sym == nil) {
 13377  			break
 13378  		}
 13379  		v.reset(OpARM64MOVWUloadidx4)
 13380  		v.AddArg3(ptr, idx, mem)
 13381  		return true
 13382  	}
 13383  	// match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 13384  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13385  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 13386  	for {
 13387  		off1 := auxIntToInt32(v.AuxInt)
 13388  		sym1 := auxToSym(v.Aux)
 13389  		if v_0.Op != OpARM64MOVDaddr {
 13390  			break
 13391  		}
 13392  		off2 := auxIntToInt32(v_0.AuxInt)
 13393  		sym2 := auxToSym(v_0.Aux)
 13394  		ptr := v_0.Args[0]
 13395  		mem := v_1
 13396  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13397  			break
 13398  		}
 13399  		v.reset(OpARM64MOVWUload)
 13400  		v.AuxInt = int32ToAuxInt(off1 + off2)
 13401  		v.Aux = symToAux(mergeSym(sym1, sym2))
 13402  		v.AddArg2(ptr, mem)
 13403  		return true
 13404  	}
 13405  	// match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
 13406  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 13407  	// result: (MOVDconst [0])
 13408  	for {
 13409  		off := auxIntToInt32(v.AuxInt)
 13410  		sym := auxToSym(v.Aux)
 13411  		ptr := v_0
 13412  		if v_1.Op != OpARM64MOVWstorezero {
 13413  			break
 13414  		}
 13415  		off2 := auxIntToInt32(v_1.AuxInt)
 13416  		sym2 := auxToSym(v_1.Aux)
 13417  		ptr2 := v_1.Args[0]
 13418  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 13419  			break
 13420  		}
 13421  		v.reset(OpARM64MOVDconst)
 13422  		v.AuxInt = int64ToAuxInt(0)
 13423  		return true
 13424  	}
 13425  	// match: (MOVWUload [off] {sym} (SB) _)
 13426  	// cond: symIsRO(sym)
 13427  	// result: (MOVDconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 13428  	for {
 13429  		off := auxIntToInt32(v.AuxInt)
 13430  		sym := auxToSym(v.Aux)
 13431  		if v_0.Op != OpSB || !(symIsRO(sym)) {
 13432  			break
 13433  		}
 13434  		v.reset(OpARM64MOVDconst)
 13435  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
 13436  		return true
 13437  	}
 13438  	return false
 13439  }
 13440  func rewriteValueARM64_OpARM64MOVWUloadidx(v *Value) bool {
 13441  	v_2 := v.Args[2]
 13442  	v_1 := v.Args[1]
 13443  	v_0 := v.Args[0]
 13444  	// match: (MOVWUloadidx ptr (MOVDconst [c]) mem)
 13445  	// cond: is32Bit(c)
 13446  	// result: (MOVWUload [int32(c)] ptr mem)
 13447  	for {
 13448  		ptr := v_0
 13449  		if v_1.Op != OpARM64MOVDconst {
 13450  			break
 13451  		}
 13452  		c := auxIntToInt64(v_1.AuxInt)
 13453  		mem := v_2
 13454  		if !(is32Bit(c)) {
 13455  			break
 13456  		}
 13457  		v.reset(OpARM64MOVWUload)
 13458  		v.AuxInt = int32ToAuxInt(int32(c))
 13459  		v.AddArg2(ptr, mem)
 13460  		return true
 13461  	}
 13462  	// match: (MOVWUloadidx (MOVDconst [c]) ptr mem)
 13463  	// cond: is32Bit(c)
 13464  	// result: (MOVWUload [int32(c)] ptr mem)
 13465  	for {
 13466  		if v_0.Op != OpARM64MOVDconst {
 13467  			break
 13468  		}
 13469  		c := auxIntToInt64(v_0.AuxInt)
 13470  		ptr := v_1
 13471  		mem := v_2
 13472  		if !(is32Bit(c)) {
 13473  			break
 13474  		}
 13475  		v.reset(OpARM64MOVWUload)
 13476  		v.AuxInt = int32ToAuxInt(int32(c))
 13477  		v.AddArg2(ptr, mem)
 13478  		return true
 13479  	}
 13480  	// match: (MOVWUloadidx ptr (SLLconst [2] idx) mem)
 13481  	// result: (MOVWUloadidx4 ptr idx mem)
 13482  	for {
 13483  		ptr := v_0
 13484  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
 13485  			break
 13486  		}
 13487  		idx := v_1.Args[0]
 13488  		mem := v_2
 13489  		v.reset(OpARM64MOVWUloadidx4)
 13490  		v.AddArg3(ptr, idx, mem)
 13491  		return true
 13492  	}
 13493  	// match: (MOVWUloadidx (SLLconst [2] idx) ptr mem)
 13494  	// result: (MOVWUloadidx4 ptr idx mem)
 13495  	for {
 13496  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
 13497  			break
 13498  		}
 13499  		idx := v_0.Args[0]
 13500  		ptr := v_1
 13501  		mem := v_2
 13502  		v.reset(OpARM64MOVWUloadidx4)
 13503  		v.AddArg3(ptr, idx, mem)
 13504  		return true
 13505  	}
 13506  	// match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _))
 13507  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 13508  	// result: (MOVDconst [0])
 13509  	for {
 13510  		ptr := v_0
 13511  		idx := v_1
 13512  		if v_2.Op != OpARM64MOVWstorezeroidx {
 13513  			break
 13514  		}
 13515  		idx2 := v_2.Args[1]
 13516  		ptr2 := v_2.Args[0]
 13517  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 13518  			break
 13519  		}
 13520  		v.reset(OpARM64MOVDconst)
 13521  		v.AuxInt = int64ToAuxInt(0)
 13522  		return true
 13523  	}
 13524  	return false
 13525  }
 13526  func rewriteValueARM64_OpARM64MOVWUloadidx4(v *Value) bool {
 13527  	v_2 := v.Args[2]
 13528  	v_1 := v.Args[1]
 13529  	v_0 := v.Args[0]
 13530  	// match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem)
 13531  	// cond: is32Bit(c<<2)
 13532  	// result: (MOVWUload [int32(c)<<2] ptr mem)
 13533  	for {
 13534  		ptr := v_0
 13535  		if v_1.Op != OpARM64MOVDconst {
 13536  			break
 13537  		}
 13538  		c := auxIntToInt64(v_1.AuxInt)
 13539  		mem := v_2
 13540  		if !(is32Bit(c << 2)) {
 13541  			break
 13542  		}
 13543  		v.reset(OpARM64MOVWUload)
 13544  		v.AuxInt = int32ToAuxInt(int32(c) << 2)
 13545  		v.AddArg2(ptr, mem)
 13546  		return true
 13547  	}
 13548  	// match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _))
 13549  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 13550  	// result: (MOVDconst [0])
 13551  	for {
 13552  		ptr := v_0
 13553  		idx := v_1
 13554  		if v_2.Op != OpARM64MOVWstorezeroidx4 {
 13555  			break
 13556  		}
 13557  		idx2 := v_2.Args[1]
 13558  		ptr2 := v_2.Args[0]
 13559  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 13560  			break
 13561  		}
 13562  		v.reset(OpARM64MOVDconst)
 13563  		v.AuxInt = int64ToAuxInt(0)
 13564  		return true
 13565  	}
 13566  	return false
 13567  }
 13568  func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool {
 13569  	v_0 := v.Args[0]
 13570  	// match: (MOVWUreg x:(MOVBUload _ _))
 13571  	// result: (MOVDreg x)
 13572  	for {
 13573  		x := v_0
 13574  		if x.Op != OpARM64MOVBUload {
 13575  			break
 13576  		}
 13577  		v.reset(OpARM64MOVDreg)
 13578  		v.AddArg(x)
 13579  		return true
 13580  	}
 13581  	// match: (MOVWUreg x:(MOVHUload _ _))
 13582  	// result: (MOVDreg x)
 13583  	for {
 13584  		x := v_0
 13585  		if x.Op != OpARM64MOVHUload {
 13586  			break
 13587  		}
 13588  		v.reset(OpARM64MOVDreg)
 13589  		v.AddArg(x)
 13590  		return true
 13591  	}
 13592  	// match: (MOVWUreg x:(MOVWUload _ _))
 13593  	// result: (MOVDreg x)
 13594  	for {
 13595  		x := v_0
 13596  		if x.Op != OpARM64MOVWUload {
 13597  			break
 13598  		}
 13599  		v.reset(OpARM64MOVDreg)
 13600  		v.AddArg(x)
 13601  		return true
 13602  	}
 13603  	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
 13604  	// result: (MOVDreg x)
 13605  	for {
 13606  		x := v_0
 13607  		if x.Op != OpARM64MOVBUloadidx {
 13608  			break
 13609  		}
 13610  		v.reset(OpARM64MOVDreg)
 13611  		v.AddArg(x)
 13612  		return true
 13613  	}
 13614  	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
 13615  	// result: (MOVDreg x)
 13616  	for {
 13617  		x := v_0
 13618  		if x.Op != OpARM64MOVHUloadidx {
 13619  			break
 13620  		}
 13621  		v.reset(OpARM64MOVDreg)
 13622  		v.AddArg(x)
 13623  		return true
 13624  	}
 13625  	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
 13626  	// result: (MOVDreg x)
 13627  	for {
 13628  		x := v_0
 13629  		if x.Op != OpARM64MOVWUloadidx {
 13630  			break
 13631  		}
 13632  		v.reset(OpARM64MOVDreg)
 13633  		v.AddArg(x)
 13634  		return true
 13635  	}
 13636  	// match: (MOVWUreg x:(MOVHUloadidx2 _ _ _))
 13637  	// result: (MOVDreg x)
 13638  	for {
 13639  		x := v_0
 13640  		if x.Op != OpARM64MOVHUloadidx2 {
 13641  			break
 13642  		}
 13643  		v.reset(OpARM64MOVDreg)
 13644  		v.AddArg(x)
 13645  		return true
 13646  	}
 13647  	// match: (MOVWUreg x:(MOVWUloadidx4 _ _ _))
 13648  	// result: (MOVDreg x)
 13649  	for {
 13650  		x := v_0
 13651  		if x.Op != OpARM64MOVWUloadidx4 {
 13652  			break
 13653  		}
 13654  		v.reset(OpARM64MOVDreg)
 13655  		v.AddArg(x)
 13656  		return true
 13657  	}
 13658  	// match: (MOVWUreg x:(MOVBUreg _))
 13659  	// result: (MOVDreg x)
 13660  	for {
 13661  		x := v_0
 13662  		if x.Op != OpARM64MOVBUreg {
 13663  			break
 13664  		}
 13665  		v.reset(OpARM64MOVDreg)
 13666  		v.AddArg(x)
 13667  		return true
 13668  	}
 13669  	// match: (MOVWUreg x:(MOVHUreg _))
 13670  	// result: (MOVDreg x)
 13671  	for {
 13672  		x := v_0
 13673  		if x.Op != OpARM64MOVHUreg {
 13674  			break
 13675  		}
 13676  		v.reset(OpARM64MOVDreg)
 13677  		v.AddArg(x)
 13678  		return true
 13679  	}
 13680  	// match: (MOVWUreg x:(MOVWUreg _))
 13681  	// result: (MOVDreg x)
 13682  	for {
 13683  		x := v_0
 13684  		if x.Op != OpARM64MOVWUreg {
 13685  			break
 13686  		}
 13687  		v.reset(OpARM64MOVDreg)
 13688  		v.AddArg(x)
 13689  		return true
 13690  	}
 13691  	// match: (MOVWUreg (ANDconst [c] x))
 13692  	// result: (ANDconst [c&(1<<32-1)] x)
 13693  	for {
 13694  		if v_0.Op != OpARM64ANDconst {
 13695  			break
 13696  		}
 13697  		c := auxIntToInt64(v_0.AuxInt)
 13698  		x := v_0.Args[0]
 13699  		v.reset(OpARM64ANDconst)
 13700  		v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
 13701  		v.AddArg(x)
 13702  		return true
 13703  	}
 13704  	// match: (MOVWUreg (MOVDconst [c]))
 13705  	// result: (MOVDconst [int64(uint32(c))])
 13706  	for {
 13707  		if v_0.Op != OpARM64MOVDconst {
 13708  			break
 13709  		}
 13710  		c := auxIntToInt64(v_0.AuxInt)
 13711  		v.reset(OpARM64MOVDconst)
 13712  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
 13713  		return true
 13714  	}
 13715  	// match: (MOVWUreg (SLLconst [lc] x))
 13716  	// cond: lc >= 32
 13717  	// result: (MOVDconst [0])
 13718  	for {
 13719  		if v_0.Op != OpARM64SLLconst {
 13720  			break
 13721  		}
 13722  		lc := auxIntToInt64(v_0.AuxInt)
 13723  		if !(lc >= 32) {
 13724  			break
 13725  		}
 13726  		v.reset(OpARM64MOVDconst)
 13727  		v.AuxInt = int64ToAuxInt(0)
 13728  		return true
 13729  	}
 13730  	// match: (MOVWUreg (SLLconst [lc] x))
 13731  	// cond: lc < 32
 13732  	// result: (UBFIZ [armBFAuxInt(lc, 32-lc)] x)
 13733  	for {
 13734  		if v_0.Op != OpARM64SLLconst {
 13735  			break
 13736  		}
 13737  		lc := auxIntToInt64(v_0.AuxInt)
 13738  		x := v_0.Args[0]
 13739  		if !(lc < 32) {
 13740  			break
 13741  		}
 13742  		v.reset(OpARM64UBFIZ)
 13743  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
 13744  		v.AddArg(x)
 13745  		return true
 13746  	}
 13747  	// match: (MOVWUreg (SRLconst [rc] x))
 13748  	// cond: rc < 32
 13749  	// result: (UBFX [armBFAuxInt(rc, 32)] x)
 13750  	for {
 13751  		if v_0.Op != OpARM64SRLconst {
 13752  			break
 13753  		}
 13754  		rc := auxIntToInt64(v_0.AuxInt)
 13755  		x := v_0.Args[0]
 13756  		if !(rc < 32) {
 13757  			break
 13758  		}
 13759  		v.reset(OpARM64UBFX)
 13760  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32))
 13761  		v.AddArg(x)
 13762  		return true
 13763  	}
 13764  	// match: (MOVWUreg (UBFX [bfc] x))
 13765  	// cond: bfc.getARM64BFwidth() <= 32
 13766  	// result: (UBFX [bfc] x)
 13767  	for {
 13768  		if v_0.Op != OpARM64UBFX {
 13769  			break
 13770  		}
 13771  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 13772  		x := v_0.Args[0]
 13773  		if !(bfc.getARM64BFwidth() <= 32) {
 13774  			break
 13775  		}
 13776  		v.reset(OpARM64UBFX)
 13777  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 13778  		v.AddArg(x)
 13779  		return true
 13780  	}
 13781  	return false
 13782  }
 13783  func rewriteValueARM64_OpARM64MOVWload(v *Value) bool {
 13784  	v_1 := v.Args[1]
 13785  	v_0 := v.Args[0]
 13786  	b := v.Block
 13787  	config := b.Func.Config
 13788  	// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
 13789  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13790  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
 13791  	for {
 13792  		off1 := auxIntToInt32(v.AuxInt)
 13793  		sym := auxToSym(v.Aux)
 13794  		if v_0.Op != OpARM64ADDconst {
 13795  			break
 13796  		}
 13797  		off2 := auxIntToInt64(v_0.AuxInt)
 13798  		ptr := v_0.Args[0]
 13799  		mem := v_1
 13800  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13801  			break
 13802  		}
 13803  		v.reset(OpARM64MOVWload)
 13804  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 13805  		v.Aux = symToAux(sym)
 13806  		v.AddArg2(ptr, mem)
 13807  		return true
 13808  	}
 13809  	// match: (MOVWload [off] {sym} (ADD ptr idx) mem)
 13810  	// cond: off == 0 && sym == nil
 13811  	// result: (MOVWloadidx ptr idx mem)
 13812  	for {
 13813  		off := auxIntToInt32(v.AuxInt)
 13814  		sym := auxToSym(v.Aux)
 13815  		if v_0.Op != OpARM64ADD {
 13816  			break
 13817  		}
 13818  		idx := v_0.Args[1]
 13819  		ptr := v_0.Args[0]
 13820  		mem := v_1
 13821  		if !(off == 0 && sym == nil) {
 13822  			break
 13823  		}
 13824  		v.reset(OpARM64MOVWloadidx)
 13825  		v.AddArg3(ptr, idx, mem)
 13826  		return true
 13827  	}
 13828  	// match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
 13829  	// cond: off == 0 && sym == nil
 13830  	// result: (MOVWloadidx4 ptr idx mem)
 13831  	for {
 13832  		off := auxIntToInt32(v.AuxInt)
 13833  		sym := auxToSym(v.Aux)
 13834  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 13835  			break
 13836  		}
 13837  		idx := v_0.Args[1]
 13838  		ptr := v_0.Args[0]
 13839  		mem := v_1
 13840  		if !(off == 0 && sym == nil) {
 13841  			break
 13842  		}
 13843  		v.reset(OpARM64MOVWloadidx4)
 13844  		v.AddArg3(ptr, idx, mem)
 13845  		return true
 13846  	}
 13847  	// match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 13848  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13849  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 13850  	for {
 13851  		off1 := auxIntToInt32(v.AuxInt)
 13852  		sym1 := auxToSym(v.Aux)
 13853  		if v_0.Op != OpARM64MOVDaddr {
 13854  			break
 13855  		}
 13856  		off2 := auxIntToInt32(v_0.AuxInt)
 13857  		sym2 := auxToSym(v_0.Aux)
 13858  		ptr := v_0.Args[0]
 13859  		mem := v_1
 13860  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13861  			break
 13862  		}
 13863  		v.reset(OpARM64MOVWload)
 13864  		v.AuxInt = int32ToAuxInt(off1 + off2)
 13865  		v.Aux = symToAux(mergeSym(sym1, sym2))
 13866  		v.AddArg2(ptr, mem)
 13867  		return true
 13868  	}
 13869  	// match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
 13870  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 13871  	// result: (MOVDconst [0])
 13872  	for {
 13873  		off := auxIntToInt32(v.AuxInt)
 13874  		sym := auxToSym(v.Aux)
 13875  		ptr := v_0
 13876  		if v_1.Op != OpARM64MOVWstorezero {
 13877  			break
 13878  		}
 13879  		off2 := auxIntToInt32(v_1.AuxInt)
 13880  		sym2 := auxToSym(v_1.Aux)
 13881  		ptr2 := v_1.Args[0]
 13882  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 13883  			break
 13884  		}
 13885  		v.reset(OpARM64MOVDconst)
 13886  		v.AuxInt = int64ToAuxInt(0)
 13887  		return true
 13888  	}
 13889  	return false
 13890  }
 13891  func rewriteValueARM64_OpARM64MOVWloadidx(v *Value) bool {
 13892  	v_2 := v.Args[2]
 13893  	v_1 := v.Args[1]
 13894  	v_0 := v.Args[0]
 13895  	// match: (MOVWloadidx ptr (MOVDconst [c]) mem)
 13896  	// cond: is32Bit(c)
 13897  	// result: (MOVWload [int32(c)] ptr mem)
 13898  	for {
 13899  		ptr := v_0
 13900  		if v_1.Op != OpARM64MOVDconst {
 13901  			break
 13902  		}
 13903  		c := auxIntToInt64(v_1.AuxInt)
 13904  		mem := v_2
 13905  		if !(is32Bit(c)) {
 13906  			break
 13907  		}
 13908  		v.reset(OpARM64MOVWload)
 13909  		v.AuxInt = int32ToAuxInt(int32(c))
 13910  		v.AddArg2(ptr, mem)
 13911  		return true
 13912  	}
 13913  	// match: (MOVWloadidx (MOVDconst [c]) ptr mem)
 13914  	// cond: is32Bit(c)
 13915  	// result: (MOVWload [int32(c)] ptr mem)
 13916  	for {
 13917  		if v_0.Op != OpARM64MOVDconst {
 13918  			break
 13919  		}
 13920  		c := auxIntToInt64(v_0.AuxInt)
 13921  		ptr := v_1
 13922  		mem := v_2
 13923  		if !(is32Bit(c)) {
 13924  			break
 13925  		}
 13926  		v.reset(OpARM64MOVWload)
 13927  		v.AuxInt = int32ToAuxInt(int32(c))
 13928  		v.AddArg2(ptr, mem)
 13929  		return true
 13930  	}
 13931  	// match: (MOVWloadidx ptr (SLLconst [2] idx) mem)
 13932  	// result: (MOVWloadidx4 ptr idx mem)
 13933  	for {
 13934  		ptr := v_0
 13935  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
 13936  			break
 13937  		}
 13938  		idx := v_1.Args[0]
 13939  		mem := v_2
 13940  		v.reset(OpARM64MOVWloadidx4)
 13941  		v.AddArg3(ptr, idx, mem)
 13942  		return true
 13943  	}
 13944  	// match: (MOVWloadidx (SLLconst [2] idx) ptr mem)
 13945  	// result: (MOVWloadidx4 ptr idx mem)
 13946  	for {
 13947  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
 13948  			break
 13949  		}
 13950  		idx := v_0.Args[0]
 13951  		ptr := v_1
 13952  		mem := v_2
 13953  		v.reset(OpARM64MOVWloadidx4)
 13954  		v.AddArg3(ptr, idx, mem)
 13955  		return true
 13956  	}
 13957  	// match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _))
 13958  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 13959  	// result: (MOVDconst [0])
 13960  	for {
 13961  		ptr := v_0
 13962  		idx := v_1
 13963  		if v_2.Op != OpARM64MOVWstorezeroidx {
 13964  			break
 13965  		}
 13966  		idx2 := v_2.Args[1]
 13967  		ptr2 := v_2.Args[0]
 13968  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 13969  			break
 13970  		}
 13971  		v.reset(OpARM64MOVDconst)
 13972  		v.AuxInt = int64ToAuxInt(0)
 13973  		return true
 13974  	}
 13975  	return false
 13976  }
 13977  func rewriteValueARM64_OpARM64MOVWloadidx4(v *Value) bool {
 13978  	v_2 := v.Args[2]
 13979  	v_1 := v.Args[1]
 13980  	v_0 := v.Args[0]
 13981  	// match: (MOVWloadidx4 ptr (MOVDconst [c]) mem)
 13982  	// cond: is32Bit(c<<2)
 13983  	// result: (MOVWload [int32(c)<<2] ptr mem)
 13984  	for {
 13985  		ptr := v_0
 13986  		if v_1.Op != OpARM64MOVDconst {
 13987  			break
 13988  		}
 13989  		c := auxIntToInt64(v_1.AuxInt)
 13990  		mem := v_2
 13991  		if !(is32Bit(c << 2)) {
 13992  			break
 13993  		}
 13994  		v.reset(OpARM64MOVWload)
 13995  		v.AuxInt = int32ToAuxInt(int32(c) << 2)
 13996  		v.AddArg2(ptr, mem)
 13997  		return true
 13998  	}
 13999  	// match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _))
 14000  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 14001  	// result: (MOVDconst [0])
 14002  	for {
 14003  		ptr := v_0
 14004  		idx := v_1
 14005  		if v_2.Op != OpARM64MOVWstorezeroidx4 {
 14006  			break
 14007  		}
 14008  		idx2 := v_2.Args[1]
 14009  		ptr2 := v_2.Args[0]
 14010  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 14011  			break
 14012  		}
 14013  		v.reset(OpARM64MOVDconst)
 14014  		v.AuxInt = int64ToAuxInt(0)
 14015  		return true
 14016  	}
 14017  	return false
 14018  }
 14019  func rewriteValueARM64_OpARM64MOVWreg(v *Value) bool {
 14020  	v_0 := v.Args[0]
 14021  	// match: (MOVWreg x:(MOVBload _ _))
 14022  	// result: (MOVDreg x)
 14023  	for {
 14024  		x := v_0
 14025  		if x.Op != OpARM64MOVBload {
 14026  			break
 14027  		}
 14028  		v.reset(OpARM64MOVDreg)
 14029  		v.AddArg(x)
 14030  		return true
 14031  	}
 14032  	// match: (MOVWreg x:(MOVBUload _ _))
 14033  	// result: (MOVDreg x)
 14034  	for {
 14035  		x := v_0
 14036  		if x.Op != OpARM64MOVBUload {
 14037  			break
 14038  		}
 14039  		v.reset(OpARM64MOVDreg)
 14040  		v.AddArg(x)
 14041  		return true
 14042  	}
 14043  	// match: (MOVWreg x:(MOVHload _ _))
 14044  	// result: (MOVDreg x)
 14045  	for {
 14046  		x := v_0
 14047  		if x.Op != OpARM64MOVHload {
 14048  			break
 14049  		}
 14050  		v.reset(OpARM64MOVDreg)
 14051  		v.AddArg(x)
 14052  		return true
 14053  	}
 14054  	// match: (MOVWreg x:(MOVHUload _ _))
 14055  	// result: (MOVDreg x)
 14056  	for {
 14057  		x := v_0
 14058  		if x.Op != OpARM64MOVHUload {
 14059  			break
 14060  		}
 14061  		v.reset(OpARM64MOVDreg)
 14062  		v.AddArg(x)
 14063  		return true
 14064  	}
 14065  	// match: (MOVWreg x:(MOVWload _ _))
 14066  	// result: (MOVDreg x)
 14067  	for {
 14068  		x := v_0
 14069  		if x.Op != OpARM64MOVWload {
 14070  			break
 14071  		}
 14072  		v.reset(OpARM64MOVDreg)
 14073  		v.AddArg(x)
 14074  		return true
 14075  	}
 14076  	// match: (MOVWreg x:(MOVBloadidx _ _ _))
 14077  	// result: (MOVDreg x)
 14078  	for {
 14079  		x := v_0
 14080  		if x.Op != OpARM64MOVBloadidx {
 14081  			break
 14082  		}
 14083  		v.reset(OpARM64MOVDreg)
 14084  		v.AddArg(x)
 14085  		return true
 14086  	}
 14087  	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
 14088  	// result: (MOVDreg x)
 14089  	for {
 14090  		x := v_0
 14091  		if x.Op != OpARM64MOVBUloadidx {
 14092  			break
 14093  		}
 14094  		v.reset(OpARM64MOVDreg)
 14095  		v.AddArg(x)
 14096  		return true
 14097  	}
 14098  	// match: (MOVWreg x:(MOVHloadidx _ _ _))
 14099  	// result: (MOVDreg x)
 14100  	for {
 14101  		x := v_0
 14102  		if x.Op != OpARM64MOVHloadidx {
 14103  			break
 14104  		}
 14105  		v.reset(OpARM64MOVDreg)
 14106  		v.AddArg(x)
 14107  		return true
 14108  	}
 14109  	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
 14110  	// result: (MOVDreg x)
 14111  	for {
 14112  		x := v_0
 14113  		if x.Op != OpARM64MOVHUloadidx {
 14114  			break
 14115  		}
 14116  		v.reset(OpARM64MOVDreg)
 14117  		v.AddArg(x)
 14118  		return true
 14119  	}
 14120  	// match: (MOVWreg x:(MOVWloadidx _ _ _))
 14121  	// result: (MOVDreg x)
 14122  	for {
 14123  		x := v_0
 14124  		if x.Op != OpARM64MOVWloadidx {
 14125  			break
 14126  		}
 14127  		v.reset(OpARM64MOVDreg)
 14128  		v.AddArg(x)
 14129  		return true
 14130  	}
 14131  	// match: (MOVWreg x:(MOVHloadidx2 _ _ _))
 14132  	// result: (MOVDreg x)
 14133  	for {
 14134  		x := v_0
 14135  		if x.Op != OpARM64MOVHloadidx2 {
 14136  			break
 14137  		}
 14138  		v.reset(OpARM64MOVDreg)
 14139  		v.AddArg(x)
 14140  		return true
 14141  	}
 14142  	// match: (MOVWreg x:(MOVHUloadidx2 _ _ _))
 14143  	// result: (MOVDreg x)
 14144  	for {
 14145  		x := v_0
 14146  		if x.Op != OpARM64MOVHUloadidx2 {
 14147  			break
 14148  		}
 14149  		v.reset(OpARM64MOVDreg)
 14150  		v.AddArg(x)
 14151  		return true
 14152  	}
 14153  	// match: (MOVWreg x:(MOVWloadidx4 _ _ _))
 14154  	// result: (MOVDreg x)
 14155  	for {
 14156  		x := v_0
 14157  		if x.Op != OpARM64MOVWloadidx4 {
 14158  			break
 14159  		}
 14160  		v.reset(OpARM64MOVDreg)
 14161  		v.AddArg(x)
 14162  		return true
 14163  	}
 14164  	// match: (MOVWreg x:(MOVBreg _))
 14165  	// result: (MOVDreg x)
 14166  	for {
 14167  		x := v_0
 14168  		if x.Op != OpARM64MOVBreg {
 14169  			break
 14170  		}
 14171  		v.reset(OpARM64MOVDreg)
 14172  		v.AddArg(x)
 14173  		return true
 14174  	}
 14175  	// match: (MOVWreg x:(MOVBUreg _))
 14176  	// result: (MOVDreg x)
 14177  	for {
 14178  		x := v_0
 14179  		if x.Op != OpARM64MOVBUreg {
 14180  			break
 14181  		}
 14182  		v.reset(OpARM64MOVDreg)
 14183  		v.AddArg(x)
 14184  		return true
 14185  	}
 14186  	// match: (MOVWreg x:(MOVHreg _))
 14187  	// result: (MOVDreg x)
 14188  	for {
 14189  		x := v_0
 14190  		if x.Op != OpARM64MOVHreg {
 14191  			break
 14192  		}
 14193  		v.reset(OpARM64MOVDreg)
 14194  		v.AddArg(x)
 14195  		return true
 14196  	}
 14197  	// match: (MOVWreg x:(MOVWreg _))
 14198  	// result: (MOVDreg x)
 14199  	for {
 14200  		x := v_0
 14201  		if x.Op != OpARM64MOVWreg {
 14202  			break
 14203  		}
 14204  		v.reset(OpARM64MOVDreg)
 14205  		v.AddArg(x)
 14206  		return true
 14207  	}
 14208  	// match: (MOVWreg (MOVDconst [c]))
 14209  	// result: (MOVDconst [int64(int32(c))])
 14210  	for {
 14211  		if v_0.Op != OpARM64MOVDconst {
 14212  			break
 14213  		}
 14214  		c := auxIntToInt64(v_0.AuxInt)
 14215  		v.reset(OpARM64MOVDconst)
 14216  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
 14217  		return true
 14218  	}
 14219  	// match: (MOVWreg (SLLconst [lc] x))
 14220  	// cond: lc < 32
 14221  	// result: (SBFIZ [armBFAuxInt(lc, 32-lc)] x)
 14222  	for {
 14223  		if v_0.Op != OpARM64SLLconst {
 14224  			break
 14225  		}
 14226  		lc := auxIntToInt64(v_0.AuxInt)
 14227  		x := v_0.Args[0]
 14228  		if !(lc < 32) {
 14229  			break
 14230  		}
 14231  		v.reset(OpARM64SBFIZ)
 14232  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
 14233  		v.AddArg(x)
 14234  		return true
 14235  	}
 14236  	// match: (MOVWreg (SBFX [bfc] x))
 14237  	// cond: bfc.getARM64BFwidth() <= 32
 14238  	// result: (SBFX [bfc] x)
 14239  	for {
 14240  		if v_0.Op != OpARM64SBFX {
 14241  			break
 14242  		}
 14243  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 14244  		x := v_0.Args[0]
 14245  		if !(bfc.getARM64BFwidth() <= 32) {
 14246  			break
 14247  		}
 14248  		v.reset(OpARM64SBFX)
 14249  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 14250  		v.AddArg(x)
 14251  		return true
 14252  	}
 14253  	return false
 14254  }
 14255  func rewriteValueARM64_OpARM64MOVWstore(v *Value) bool {
 14256  	v_2 := v.Args[2]
 14257  	v_1 := v.Args[1]
 14258  	v_0 := v.Args[0]
 14259  	b := v.Block
 14260  	config := b.Func.Config
 14261  	// match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem)
 14262  	// result: (FMOVSstore [off] {sym} ptr val mem)
 14263  	for {
 14264  		off := auxIntToInt32(v.AuxInt)
 14265  		sym := auxToSym(v.Aux)
 14266  		ptr := v_0
 14267  		if v_1.Op != OpARM64FMOVSfpgp {
 14268  			break
 14269  		}
 14270  		val := v_1.Args[0]
 14271  		mem := v_2
 14272  		v.reset(OpARM64FMOVSstore)
 14273  		v.AuxInt = int32ToAuxInt(off)
 14274  		v.Aux = symToAux(sym)
 14275  		v.AddArg3(ptr, val, mem)
 14276  		return true
 14277  	}
 14278  	// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
 14279  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 14280  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
 14281  	for {
 14282  		off1 := auxIntToInt32(v.AuxInt)
 14283  		sym := auxToSym(v.Aux)
 14284  		if v_0.Op != OpARM64ADDconst {
 14285  			break
 14286  		}
 14287  		off2 := auxIntToInt64(v_0.AuxInt)
 14288  		ptr := v_0.Args[0]
 14289  		val := v_1
 14290  		mem := v_2
 14291  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 14292  			break
 14293  		}
 14294  		v.reset(OpARM64MOVWstore)
 14295  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 14296  		v.Aux = symToAux(sym)
 14297  		v.AddArg3(ptr, val, mem)
 14298  		return true
 14299  	}
 14300  	// match: (MOVWstore [off] {sym} (ADD ptr idx) val mem)
 14301  	// cond: off == 0 && sym == nil
 14302  	// result: (MOVWstoreidx ptr idx val mem)
 14303  	for {
 14304  		off := auxIntToInt32(v.AuxInt)
 14305  		sym := auxToSym(v.Aux)
 14306  		if v_0.Op != OpARM64ADD {
 14307  			break
 14308  		}
 14309  		idx := v_0.Args[1]
 14310  		ptr := v_0.Args[0]
 14311  		val := v_1
 14312  		mem := v_2
 14313  		if !(off == 0 && sym == nil) {
 14314  			break
 14315  		}
 14316  		v.reset(OpARM64MOVWstoreidx)
 14317  		v.AddArg4(ptr, idx, val, mem)
 14318  		return true
 14319  	}
 14320  	// match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem)
 14321  	// cond: off == 0 && sym == nil
 14322  	// result: (MOVWstoreidx4 ptr idx val mem)
 14323  	for {
 14324  		off := auxIntToInt32(v.AuxInt)
 14325  		sym := auxToSym(v.Aux)
 14326  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 14327  			break
 14328  		}
 14329  		idx := v_0.Args[1]
 14330  		ptr := v_0.Args[0]
 14331  		val := v_1
 14332  		mem := v_2
 14333  		if !(off == 0 && sym == nil) {
 14334  			break
 14335  		}
 14336  		v.reset(OpARM64MOVWstoreidx4)
 14337  		v.AddArg4(ptr, idx, val, mem)
 14338  		return true
 14339  	}
 14340  	// match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
 14341  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 14342  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
 14343  	for {
 14344  		off1 := auxIntToInt32(v.AuxInt)
 14345  		sym1 := auxToSym(v.Aux)
 14346  		if v_0.Op != OpARM64MOVDaddr {
 14347  			break
 14348  		}
 14349  		off2 := auxIntToInt32(v_0.AuxInt)
 14350  		sym2 := auxToSym(v_0.Aux)
 14351  		ptr := v_0.Args[0]
 14352  		val := v_1
 14353  		mem := v_2
 14354  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 14355  			break
 14356  		}
 14357  		v.reset(OpARM64MOVWstore)
 14358  		v.AuxInt = int32ToAuxInt(off1 + off2)
 14359  		v.Aux = symToAux(mergeSym(sym1, sym2))
 14360  		v.AddArg3(ptr, val, mem)
 14361  		return true
 14362  	}
 14363  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
 14364  	// result: (MOVWstorezero [off] {sym} ptr mem)
 14365  	for {
 14366  		off := auxIntToInt32(v.AuxInt)
 14367  		sym := auxToSym(v.Aux)
 14368  		ptr := v_0
 14369  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
 14370  			break
 14371  		}
 14372  		mem := v_2
 14373  		v.reset(OpARM64MOVWstorezero)
 14374  		v.AuxInt = int32ToAuxInt(off)
 14375  		v.Aux = symToAux(sym)
 14376  		v.AddArg2(ptr, mem)
 14377  		return true
 14378  	}
 14379  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
 14380  	// result: (MOVWstore [off] {sym} ptr x mem)
 14381  	for {
 14382  		off := auxIntToInt32(v.AuxInt)
 14383  		sym := auxToSym(v.Aux)
 14384  		ptr := v_0
 14385  		if v_1.Op != OpARM64MOVWreg {
 14386  			break
 14387  		}
 14388  		x := v_1.Args[0]
 14389  		mem := v_2
 14390  		v.reset(OpARM64MOVWstore)
 14391  		v.AuxInt = int32ToAuxInt(off)
 14392  		v.Aux = symToAux(sym)
 14393  		v.AddArg3(ptr, x, mem)
 14394  		return true
 14395  	}
 14396  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
 14397  	// result: (MOVWstore [off] {sym} ptr x mem)
 14398  	for {
 14399  		off := auxIntToInt32(v.AuxInt)
 14400  		sym := auxToSym(v.Aux)
 14401  		ptr := v_0
 14402  		if v_1.Op != OpARM64MOVWUreg {
 14403  			break
 14404  		}
 14405  		x := v_1.Args[0]
 14406  		mem := v_2
 14407  		v.reset(OpARM64MOVWstore)
 14408  		v.AuxInt = int32ToAuxInt(off)
 14409  		v.Aux = symToAux(sym)
 14410  		v.AddArg3(ptr, x, mem)
 14411  		return true
 14412  	}
 14413  	// match: (MOVWstore [i] {s} ptr0 (SRLconst [32] w) x:(MOVWstore [i-4] {s} ptr1 w mem))
 14414  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 14415  	// result: (MOVDstore [i-4] {s} ptr0 w mem)
 14416  	for {
 14417  		i := auxIntToInt32(v.AuxInt)
 14418  		s := auxToSym(v.Aux)
 14419  		ptr0 := v_0
 14420  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
 14421  			break
 14422  		}
 14423  		w := v_1.Args[0]
 14424  		x := v_2
 14425  		if x.Op != OpARM64MOVWstore || auxIntToInt32(x.AuxInt) != i-4 || auxToSym(x.Aux) != s {
 14426  			break
 14427  		}
 14428  		mem := x.Args[2]
 14429  		ptr1 := x.Args[0]
 14430  		if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 14431  			break
 14432  		}
 14433  		v.reset(OpARM64MOVDstore)
 14434  		v.AuxInt = int32ToAuxInt(i - 4)
 14435  		v.Aux = symToAux(s)
 14436  		v.AddArg3(ptr0, w, mem)
 14437  		return true
 14438  	}
 14439  	// match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx ptr1 idx1 w mem))
 14440  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 14441  	// result: (MOVDstoreidx ptr1 idx1 w mem)
 14442  	for {
 14443  		if auxIntToInt32(v.AuxInt) != 4 {
 14444  			break
 14445  		}
 14446  		s := auxToSym(v.Aux)
 14447  		if v_0.Op != OpARM64ADD {
 14448  			break
 14449  		}
 14450  		_ = v_0.Args[1]
 14451  		v_0_0 := v_0.Args[0]
 14452  		v_0_1 := v_0.Args[1]
 14453  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14454  			ptr0 := v_0_0
 14455  			idx0 := v_0_1
 14456  			if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
 14457  				continue
 14458  			}
 14459  			w := v_1.Args[0]
 14460  			x := v_2
 14461  			if x.Op != OpARM64MOVWstoreidx {
 14462  				continue
 14463  			}
 14464  			mem := x.Args[3]
 14465  			ptr1 := x.Args[0]
 14466  			idx1 := x.Args[1]
 14467  			if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 14468  				continue
 14469  			}
 14470  			v.reset(OpARM64MOVDstoreidx)
 14471  			v.AddArg4(ptr1, idx1, w, mem)
 14472  			return true
 14473  		}
 14474  		break
 14475  	}
 14476  	// match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx4 ptr1 idx1 w mem))
 14477  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 14478  	// result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w mem)
 14479  	for {
 14480  		if auxIntToInt32(v.AuxInt) != 4 {
 14481  			break
 14482  		}
 14483  		s := auxToSym(v.Aux)
 14484  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 14485  			break
 14486  		}
 14487  		idx0 := v_0.Args[1]
 14488  		ptr0 := v_0.Args[0]
 14489  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
 14490  			break
 14491  		}
 14492  		w := v_1.Args[0]
 14493  		x := v_2
 14494  		if x.Op != OpARM64MOVWstoreidx4 {
 14495  			break
 14496  		}
 14497  		mem := x.Args[3]
 14498  		ptr1 := x.Args[0]
 14499  		idx1 := x.Args[1]
 14500  		if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 14501  			break
 14502  		}
 14503  		v.reset(OpARM64MOVDstoreidx)
 14504  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 14505  		v0.AuxInt = int64ToAuxInt(2)
 14506  		v0.AddArg(idx1)
 14507  		v.AddArg4(ptr1, v0, w, mem)
 14508  		return true
 14509  	}
 14510  	// match: (MOVWstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVWstore [i-4] {s} ptr1 w0:(SRLconst [j-32] w) mem))
 14511  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 14512  	// result: (MOVDstore [i-4] {s} ptr0 w0 mem)
 14513  	for {
 14514  		i := auxIntToInt32(v.AuxInt)
 14515  		s := auxToSym(v.Aux)
 14516  		ptr0 := v_0
 14517  		if v_1.Op != OpARM64SRLconst {
 14518  			break
 14519  		}
 14520  		j := auxIntToInt64(v_1.AuxInt)
 14521  		w := v_1.Args[0]
 14522  		x := v_2
 14523  		if x.Op != OpARM64MOVWstore || auxIntToInt32(x.AuxInt) != i-4 || auxToSym(x.Aux) != s {
 14524  			break
 14525  		}
 14526  		mem := x.Args[2]
 14527  		ptr1 := x.Args[0]
 14528  		w0 := x.Args[1]
 14529  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 14530  			break
 14531  		}
 14532  		v.reset(OpARM64MOVDstore)
 14533  		v.AuxInt = int32ToAuxInt(i - 4)
 14534  		v.Aux = symToAux(s)
 14535  		v.AddArg3(ptr0, w0, mem)
 14536  		return true
 14537  	}
 14538  	// match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx ptr1 idx1 w0:(SRLconst [j-32] w) mem))
 14539  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 14540  	// result: (MOVDstoreidx ptr1 idx1 w0 mem)
 14541  	for {
 14542  		if auxIntToInt32(v.AuxInt) != 4 {
 14543  			break
 14544  		}
 14545  		s := auxToSym(v.Aux)
 14546  		if v_0.Op != OpARM64ADD {
 14547  			break
 14548  		}
 14549  		_ = v_0.Args[1]
 14550  		v_0_0 := v_0.Args[0]
 14551  		v_0_1 := v_0.Args[1]
 14552  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14553  			ptr0 := v_0_0
 14554  			idx0 := v_0_1
 14555  			if v_1.Op != OpARM64SRLconst {
 14556  				continue
 14557  			}
 14558  			j := auxIntToInt64(v_1.AuxInt)
 14559  			w := v_1.Args[0]
 14560  			x := v_2
 14561  			if x.Op != OpARM64MOVWstoreidx {
 14562  				continue
 14563  			}
 14564  			mem := x.Args[3]
 14565  			ptr1 := x.Args[0]
 14566  			idx1 := x.Args[1]
 14567  			w0 := x.Args[2]
 14568  			if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 14569  				continue
 14570  			}
 14571  			v.reset(OpARM64MOVDstoreidx)
 14572  			v.AddArg4(ptr1, idx1, w0, mem)
 14573  			return true
 14574  		}
 14575  		break
 14576  	}
 14577  	// match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx4 ptr1 idx1 w0:(SRLconst [j-32] w) mem))
 14578  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 14579  	// result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w0 mem)
 14580  	for {
 14581  		if auxIntToInt32(v.AuxInt) != 4 {
 14582  			break
 14583  		}
 14584  		s := auxToSym(v.Aux)
 14585  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 14586  			break
 14587  		}
 14588  		idx0 := v_0.Args[1]
 14589  		ptr0 := v_0.Args[0]
 14590  		if v_1.Op != OpARM64SRLconst {
 14591  			break
 14592  		}
 14593  		j := auxIntToInt64(v_1.AuxInt)
 14594  		w := v_1.Args[0]
 14595  		x := v_2
 14596  		if x.Op != OpARM64MOVWstoreidx4 {
 14597  			break
 14598  		}
 14599  		mem := x.Args[3]
 14600  		ptr1 := x.Args[0]
 14601  		idx1 := x.Args[1]
 14602  		w0 := x.Args[2]
 14603  		if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 14604  			break
 14605  		}
 14606  		v.reset(OpARM64MOVDstoreidx)
 14607  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 14608  		v0.AuxInt = int64ToAuxInt(2)
 14609  		v0.AddArg(idx1)
 14610  		v.AddArg4(ptr1, v0, w0, mem)
 14611  		return true
 14612  	}
 14613  	return false
 14614  }
 14615  func rewriteValueARM64_OpARM64MOVWstoreidx(v *Value) bool {
 14616  	v_3 := v.Args[3]
 14617  	v_2 := v.Args[2]
 14618  	v_1 := v.Args[1]
 14619  	v_0 := v.Args[0]
 14620  	// match: (MOVWstoreidx ptr (MOVDconst [c]) val mem)
 14621  	// cond: is32Bit(c)
 14622  	// result: (MOVWstore [int32(c)] ptr val mem)
 14623  	for {
 14624  		ptr := v_0
 14625  		if v_1.Op != OpARM64MOVDconst {
 14626  			break
 14627  		}
 14628  		c := auxIntToInt64(v_1.AuxInt)
 14629  		val := v_2
 14630  		mem := v_3
 14631  		if !(is32Bit(c)) {
 14632  			break
 14633  		}
 14634  		v.reset(OpARM64MOVWstore)
 14635  		v.AuxInt = int32ToAuxInt(int32(c))
 14636  		v.AddArg3(ptr, val, mem)
 14637  		return true
 14638  	}
 14639  	// match: (MOVWstoreidx (MOVDconst [c]) idx val mem)
 14640  	// cond: is32Bit(c)
 14641  	// result: (MOVWstore [int32(c)] idx val mem)
 14642  	for {
 14643  		if v_0.Op != OpARM64MOVDconst {
 14644  			break
 14645  		}
 14646  		c := auxIntToInt64(v_0.AuxInt)
 14647  		idx := v_1
 14648  		val := v_2
 14649  		mem := v_3
 14650  		if !(is32Bit(c)) {
 14651  			break
 14652  		}
 14653  		v.reset(OpARM64MOVWstore)
 14654  		v.AuxInt = int32ToAuxInt(int32(c))
 14655  		v.AddArg3(idx, val, mem)
 14656  		return true
 14657  	}
 14658  	// match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem)
 14659  	// result: (MOVWstoreidx4 ptr idx val mem)
 14660  	for {
 14661  		ptr := v_0
 14662  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
 14663  			break
 14664  		}
 14665  		idx := v_1.Args[0]
 14666  		val := v_2
 14667  		mem := v_3
 14668  		v.reset(OpARM64MOVWstoreidx4)
 14669  		v.AddArg4(ptr, idx, val, mem)
 14670  		return true
 14671  	}
 14672  	// match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem)
 14673  	// result: (MOVWstoreidx4 ptr idx val mem)
 14674  	for {
 14675  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
 14676  			break
 14677  		}
 14678  		idx := v_0.Args[0]
 14679  		ptr := v_1
 14680  		val := v_2
 14681  		mem := v_3
 14682  		v.reset(OpARM64MOVWstoreidx4)
 14683  		v.AddArg4(ptr, idx, val, mem)
 14684  		return true
 14685  	}
 14686  	// match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem)
 14687  	// result: (MOVWstorezeroidx ptr idx mem)
 14688  	for {
 14689  		ptr := v_0
 14690  		idx := v_1
 14691  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 14692  			break
 14693  		}
 14694  		mem := v_3
 14695  		v.reset(OpARM64MOVWstorezeroidx)
 14696  		v.AddArg3(ptr, idx, mem)
 14697  		return true
 14698  	}
 14699  	// match: (MOVWstoreidx ptr idx (MOVWreg x) mem)
 14700  	// result: (MOVWstoreidx ptr idx x mem)
 14701  	for {
 14702  		ptr := v_0
 14703  		idx := v_1
 14704  		if v_2.Op != OpARM64MOVWreg {
 14705  			break
 14706  		}
 14707  		x := v_2.Args[0]
 14708  		mem := v_3
 14709  		v.reset(OpARM64MOVWstoreidx)
 14710  		v.AddArg4(ptr, idx, x, mem)
 14711  		return true
 14712  	}
 14713  	// match: (MOVWstoreidx ptr idx (MOVWUreg x) mem)
 14714  	// result: (MOVWstoreidx ptr idx x mem)
 14715  	for {
 14716  		ptr := v_0
 14717  		idx := v_1
 14718  		if v_2.Op != OpARM64MOVWUreg {
 14719  			break
 14720  		}
 14721  		x := v_2.Args[0]
 14722  		mem := v_3
 14723  		v.reset(OpARM64MOVWstoreidx)
 14724  		v.AddArg4(ptr, idx, x, mem)
 14725  		return true
 14726  	}
 14727  	// match: (MOVWstoreidx ptr (ADDconst [4] idx) (SRLconst [32] w) x:(MOVWstoreidx ptr idx w mem))
 14728  	// cond: x.Uses == 1 && clobber(x)
 14729  	// result: (MOVDstoreidx ptr idx w mem)
 14730  	for {
 14731  		ptr := v_0
 14732  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 4 {
 14733  			break
 14734  		}
 14735  		idx := v_1.Args[0]
 14736  		if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 32 {
 14737  			break
 14738  		}
 14739  		w := v_2.Args[0]
 14740  		x := v_3
 14741  		if x.Op != OpARM64MOVWstoreidx {
 14742  			break
 14743  		}
 14744  		mem := x.Args[3]
 14745  		if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
 14746  			break
 14747  		}
 14748  		v.reset(OpARM64MOVDstoreidx)
 14749  		v.AddArg4(ptr, idx, w, mem)
 14750  		return true
 14751  	}
 14752  	return false
 14753  }
 14754  func rewriteValueARM64_OpARM64MOVWstoreidx4(v *Value) bool {
 14755  	v_3 := v.Args[3]
 14756  	v_2 := v.Args[2]
 14757  	v_1 := v.Args[1]
 14758  	v_0 := v.Args[0]
 14759  	// match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem)
 14760  	// cond: is32Bit(c<<2)
 14761  	// result: (MOVWstore [int32(c)<<2] ptr val mem)
 14762  	for {
 14763  		ptr := v_0
 14764  		if v_1.Op != OpARM64MOVDconst {
 14765  			break
 14766  		}
 14767  		c := auxIntToInt64(v_1.AuxInt)
 14768  		val := v_2
 14769  		mem := v_3
 14770  		if !(is32Bit(c << 2)) {
 14771  			break
 14772  		}
 14773  		v.reset(OpARM64MOVWstore)
 14774  		v.AuxInt = int32ToAuxInt(int32(c) << 2)
 14775  		v.AddArg3(ptr, val, mem)
 14776  		return true
 14777  	}
 14778  	// match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem)
 14779  	// result: (MOVWstorezeroidx4 ptr idx mem)
 14780  	for {
 14781  		ptr := v_0
 14782  		idx := v_1
 14783  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 14784  			break
 14785  		}
 14786  		mem := v_3
 14787  		v.reset(OpARM64MOVWstorezeroidx4)
 14788  		v.AddArg3(ptr, idx, mem)
 14789  		return true
 14790  	}
 14791  	// match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem)
 14792  	// result: (MOVWstoreidx4 ptr idx x mem)
 14793  	for {
 14794  		ptr := v_0
 14795  		idx := v_1
 14796  		if v_2.Op != OpARM64MOVWreg {
 14797  			break
 14798  		}
 14799  		x := v_2.Args[0]
 14800  		mem := v_3
 14801  		v.reset(OpARM64MOVWstoreidx4)
 14802  		v.AddArg4(ptr, idx, x, mem)
 14803  		return true
 14804  	}
 14805  	// match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem)
 14806  	// result: (MOVWstoreidx4 ptr idx x mem)
 14807  	for {
 14808  		ptr := v_0
 14809  		idx := v_1
 14810  		if v_2.Op != OpARM64MOVWUreg {
 14811  			break
 14812  		}
 14813  		x := v_2.Args[0]
 14814  		mem := v_3
 14815  		v.reset(OpARM64MOVWstoreidx4)
 14816  		v.AddArg4(ptr, idx, x, mem)
 14817  		return true
 14818  	}
 14819  	return false
 14820  }
 14821  func rewriteValueARM64_OpARM64MOVWstorezero(v *Value) bool {
 14822  	v_1 := v.Args[1]
 14823  	v_0 := v.Args[0]
 14824  	b := v.Block
 14825  	config := b.Func.Config
 14826  	// match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 14827  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 14828  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
 14829  	for {
 14830  		off1 := auxIntToInt32(v.AuxInt)
 14831  		sym := auxToSym(v.Aux)
 14832  		if v_0.Op != OpARM64ADDconst {
 14833  			break
 14834  		}
 14835  		off2 := auxIntToInt64(v_0.AuxInt)
 14836  		ptr := v_0.Args[0]
 14837  		mem := v_1
 14838  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 14839  			break
 14840  		}
 14841  		v.reset(OpARM64MOVWstorezero)
 14842  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 14843  		v.Aux = symToAux(sym)
 14844  		v.AddArg2(ptr, mem)
 14845  		return true
 14846  	}
 14847  	// match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 14848  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 14849  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 14850  	for {
 14851  		off1 := auxIntToInt32(v.AuxInt)
 14852  		sym1 := auxToSym(v.Aux)
 14853  		if v_0.Op != OpARM64MOVDaddr {
 14854  			break
 14855  		}
 14856  		off2 := auxIntToInt32(v_0.AuxInt)
 14857  		sym2 := auxToSym(v_0.Aux)
 14858  		ptr := v_0.Args[0]
 14859  		mem := v_1
 14860  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 14861  			break
 14862  		}
 14863  		v.reset(OpARM64MOVWstorezero)
 14864  		v.AuxInt = int32ToAuxInt(off1 + off2)
 14865  		v.Aux = symToAux(mergeSym(sym1, sym2))
 14866  		v.AddArg2(ptr, mem)
 14867  		return true
 14868  	}
 14869  	// match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem)
 14870  	// cond: off == 0 && sym == nil
 14871  	// result: (MOVWstorezeroidx ptr idx mem)
 14872  	for {
 14873  		off := auxIntToInt32(v.AuxInt)
 14874  		sym := auxToSym(v.Aux)
 14875  		if v_0.Op != OpARM64ADD {
 14876  			break
 14877  		}
 14878  		idx := v_0.Args[1]
 14879  		ptr := v_0.Args[0]
 14880  		mem := v_1
 14881  		if !(off == 0 && sym == nil) {
 14882  			break
 14883  		}
 14884  		v.reset(OpARM64MOVWstorezeroidx)
 14885  		v.AddArg3(ptr, idx, mem)
 14886  		return true
 14887  	}
 14888  	// match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem)
 14889  	// cond: off == 0 && sym == nil
 14890  	// result: (MOVWstorezeroidx4 ptr idx mem)
 14891  	for {
 14892  		off := auxIntToInt32(v.AuxInt)
 14893  		sym := auxToSym(v.Aux)
 14894  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 14895  			break
 14896  		}
 14897  		idx := v_0.Args[1]
 14898  		ptr := v_0.Args[0]
 14899  		mem := v_1
 14900  		if !(off == 0 && sym == nil) {
 14901  			break
 14902  		}
 14903  		v.reset(OpARM64MOVWstorezeroidx4)
 14904  		v.AddArg3(ptr, idx, mem)
 14905  		return true
 14906  	}
 14907  	// match: (MOVWstorezero [i] {s} ptr0 x:(MOVWstorezero [j] {s} ptr1 mem))
 14908  	// cond: x.Uses == 1 && areAdjacentOffsets(int64(i),int64(j),4) && isSamePtr(ptr0, ptr1) && clobber(x)
 14909  	// result: (MOVDstorezero [int32(min(int64(i),int64(j)))] {s} ptr0 mem)
 14910  	for {
 14911  		i := auxIntToInt32(v.AuxInt)
 14912  		s := auxToSym(v.Aux)
 14913  		ptr0 := v_0
 14914  		x := v_1
 14915  		if x.Op != OpARM64MOVWstorezero {
 14916  			break
 14917  		}
 14918  		j := auxIntToInt32(x.AuxInt)
 14919  		if auxToSym(x.Aux) != s {
 14920  			break
 14921  		}
 14922  		mem := x.Args[1]
 14923  		ptr1 := x.Args[0]
 14924  		if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 4) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 14925  			break
 14926  		}
 14927  		v.reset(OpARM64MOVDstorezero)
 14928  		v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
 14929  		v.Aux = symToAux(s)
 14930  		v.AddArg2(ptr0, mem)
 14931  		return true
 14932  	}
 14933  	// match: (MOVWstorezero [4] {s} (ADD ptr0 idx0) x:(MOVWstorezeroidx ptr1 idx1 mem))
 14934  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 14935  	// result: (MOVDstorezeroidx ptr1 idx1 mem)
 14936  	for {
 14937  		if auxIntToInt32(v.AuxInt) != 4 {
 14938  			break
 14939  		}
 14940  		s := auxToSym(v.Aux)
 14941  		if v_0.Op != OpARM64ADD {
 14942  			break
 14943  		}
 14944  		_ = v_0.Args[1]
 14945  		v_0_0 := v_0.Args[0]
 14946  		v_0_1 := v_0.Args[1]
 14947  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14948  			ptr0 := v_0_0
 14949  			idx0 := v_0_1
 14950  			x := v_1
 14951  			if x.Op != OpARM64MOVWstorezeroidx {
 14952  				continue
 14953  			}
 14954  			mem := x.Args[2]
 14955  			ptr1 := x.Args[0]
 14956  			idx1 := x.Args[1]
 14957  			if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 14958  				continue
 14959  			}
 14960  			v.reset(OpARM64MOVDstorezeroidx)
 14961  			v.AddArg3(ptr1, idx1, mem)
 14962  			return true
 14963  		}
 14964  		break
 14965  	}
 14966  	// match: (MOVWstorezero [4] {s} (ADDshiftLL [2] ptr0 idx0) x:(MOVWstorezeroidx4 ptr1 idx1 mem))
 14967  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 14968  	// result: (MOVDstorezeroidx ptr1 (SLLconst <idx1.Type> [2] idx1) mem)
 14969  	for {
 14970  		if auxIntToInt32(v.AuxInt) != 4 {
 14971  			break
 14972  		}
 14973  		s := auxToSym(v.Aux)
 14974  		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
 14975  			break
 14976  		}
 14977  		idx0 := v_0.Args[1]
 14978  		ptr0 := v_0.Args[0]
 14979  		x := v_1
 14980  		if x.Op != OpARM64MOVWstorezeroidx4 {
 14981  			break
 14982  		}
 14983  		mem := x.Args[2]
 14984  		ptr1 := x.Args[0]
 14985  		idx1 := x.Args[1]
 14986  		if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 14987  			break
 14988  		}
 14989  		v.reset(OpARM64MOVDstorezeroidx)
 14990  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
 14991  		v0.AuxInt = int64ToAuxInt(2)
 14992  		v0.AddArg(idx1)
 14993  		v.AddArg3(ptr1, v0, mem)
 14994  		return true
 14995  	}
 14996  	return false
 14997  }
 14998  func rewriteValueARM64_OpARM64MOVWstorezeroidx(v *Value) bool {
 14999  	v_2 := v.Args[2]
 15000  	v_1 := v.Args[1]
 15001  	v_0 := v.Args[0]
 15002  	// match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem)
 15003  	// cond: is32Bit(c)
 15004  	// result: (MOVWstorezero [int32(c)] ptr mem)
 15005  	for {
 15006  		ptr := v_0
 15007  		if v_1.Op != OpARM64MOVDconst {
 15008  			break
 15009  		}
 15010  		c := auxIntToInt64(v_1.AuxInt)
 15011  		mem := v_2
 15012  		if !(is32Bit(c)) {
 15013  			break
 15014  		}
 15015  		v.reset(OpARM64MOVWstorezero)
 15016  		v.AuxInt = int32ToAuxInt(int32(c))
 15017  		v.AddArg2(ptr, mem)
 15018  		return true
 15019  	}
 15020  	// match: (MOVWstorezeroidx (MOVDconst [c]) idx mem)
 15021  	// cond: is32Bit(c)
 15022  	// result: (MOVWstorezero [int32(c)] idx mem)
 15023  	for {
 15024  		if v_0.Op != OpARM64MOVDconst {
 15025  			break
 15026  		}
 15027  		c := auxIntToInt64(v_0.AuxInt)
 15028  		idx := v_1
 15029  		mem := v_2
 15030  		if !(is32Bit(c)) {
 15031  			break
 15032  		}
 15033  		v.reset(OpARM64MOVWstorezero)
 15034  		v.AuxInt = int32ToAuxInt(int32(c))
 15035  		v.AddArg2(idx, mem)
 15036  		return true
 15037  	}
 15038  	// match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem)
 15039  	// result: (MOVWstorezeroidx4 ptr idx mem)
 15040  	for {
 15041  		ptr := v_0
 15042  		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
 15043  			break
 15044  		}
 15045  		idx := v_1.Args[0]
 15046  		mem := v_2
 15047  		v.reset(OpARM64MOVWstorezeroidx4)
 15048  		v.AddArg3(ptr, idx, mem)
 15049  		return true
 15050  	}
 15051  	// match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem)
 15052  	// result: (MOVWstorezeroidx4 ptr idx mem)
 15053  	for {
 15054  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
 15055  			break
 15056  		}
 15057  		idx := v_0.Args[0]
 15058  		ptr := v_1
 15059  		mem := v_2
 15060  		v.reset(OpARM64MOVWstorezeroidx4)
 15061  		v.AddArg3(ptr, idx, mem)
 15062  		return true
 15063  	}
 15064  	// match: (MOVWstorezeroidx ptr (ADDconst [4] idx) x:(MOVWstorezeroidx ptr idx mem))
 15065  	// cond: x.Uses == 1 && clobber(x)
 15066  	// result: (MOVDstorezeroidx ptr idx mem)
 15067  	for {
 15068  		ptr := v_0
 15069  		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 4 {
 15070  			break
 15071  		}
 15072  		idx := v_1.Args[0]
 15073  		x := v_2
 15074  		if x.Op != OpARM64MOVWstorezeroidx {
 15075  			break
 15076  		}
 15077  		mem := x.Args[2]
 15078  		if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
 15079  			break
 15080  		}
 15081  		v.reset(OpARM64MOVDstorezeroidx)
 15082  		v.AddArg3(ptr, idx, mem)
 15083  		return true
 15084  	}
 15085  	return false
 15086  }
 15087  func rewriteValueARM64_OpARM64MOVWstorezeroidx4(v *Value) bool {
 15088  	v_2 := v.Args[2]
 15089  	v_1 := v.Args[1]
 15090  	v_0 := v.Args[0]
 15091  	// match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem)
 15092  	// cond: is32Bit(c<<2)
 15093  	// result: (MOVWstorezero [int32(c<<2)] ptr mem)
 15094  	for {
 15095  		ptr := v_0
 15096  		if v_1.Op != OpARM64MOVDconst {
 15097  			break
 15098  		}
 15099  		c := auxIntToInt64(v_1.AuxInt)
 15100  		mem := v_2
 15101  		if !(is32Bit(c << 2)) {
 15102  			break
 15103  		}
 15104  		v.reset(OpARM64MOVWstorezero)
 15105  		v.AuxInt = int32ToAuxInt(int32(c << 2))
 15106  		v.AddArg2(ptr, mem)
 15107  		return true
 15108  	}
 15109  	return false
 15110  }
 15111  func rewriteValueARM64_OpARM64MSUB(v *Value) bool {
 15112  	v_2 := v.Args[2]
 15113  	v_1 := v.Args[1]
 15114  	v_0 := v.Args[0]
 15115  	b := v.Block
 15116  	// match: (MSUB a x (MOVDconst [-1]))
 15117  	// result: (ADD a x)
 15118  	for {
 15119  		a := v_0
 15120  		x := v_1
 15121  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
 15122  			break
 15123  		}
 15124  		v.reset(OpARM64ADD)
 15125  		v.AddArg2(a, x)
 15126  		return true
 15127  	}
 15128  	// match: (MSUB a _ (MOVDconst [0]))
 15129  	// result: a
 15130  	for {
 15131  		a := v_0
 15132  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 15133  			break
 15134  		}
 15135  		v.copyOf(a)
 15136  		return true
 15137  	}
 15138  	// match: (MSUB a x (MOVDconst [1]))
 15139  	// result: (SUB a x)
 15140  	for {
 15141  		a := v_0
 15142  		x := v_1
 15143  		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
 15144  			break
 15145  		}
 15146  		v.reset(OpARM64SUB)
 15147  		v.AddArg2(a, x)
 15148  		return true
 15149  	}
 15150  	// match: (MSUB a x (MOVDconst [c]))
 15151  	// cond: isPowerOfTwo64(c)
 15152  	// result: (SUBshiftLL a x [log64(c)])
 15153  	for {
 15154  		a := v_0
 15155  		x := v_1
 15156  		if v_2.Op != OpARM64MOVDconst {
 15157  			break
 15158  		}
 15159  		c := auxIntToInt64(v_2.AuxInt)
 15160  		if !(isPowerOfTwo64(c)) {
 15161  			break
 15162  		}
 15163  		v.reset(OpARM64SUBshiftLL)
 15164  		v.AuxInt = int64ToAuxInt(log64(c))
 15165  		v.AddArg2(a, x)
 15166  		return true
 15167  	}
 15168  	// match: (MSUB a x (MOVDconst [c]))
 15169  	// cond: isPowerOfTwo64(c-1) && c>=3
 15170  	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
 15171  	for {
 15172  		a := v_0
 15173  		x := v_1
 15174  		if v_2.Op != OpARM64MOVDconst {
 15175  			break
 15176  		}
 15177  		c := auxIntToInt64(v_2.AuxInt)
 15178  		if !(isPowerOfTwo64(c-1) && c >= 3) {
 15179  			break
 15180  		}
 15181  		v.reset(OpARM64SUB)
 15182  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15183  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
 15184  		v0.AddArg2(x, x)
 15185  		v.AddArg2(a, v0)
 15186  		return true
 15187  	}
 15188  	// match: (MSUB a x (MOVDconst [c]))
 15189  	// cond: isPowerOfTwo64(c+1) && c>=7
 15190  	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
 15191  	for {
 15192  		a := v_0
 15193  		x := v_1
 15194  		if v_2.Op != OpARM64MOVDconst {
 15195  			break
 15196  		}
 15197  		c := auxIntToInt64(v_2.AuxInt)
 15198  		if !(isPowerOfTwo64(c+1) && c >= 7) {
 15199  			break
 15200  		}
 15201  		v.reset(OpARM64ADD)
 15202  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15203  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
 15204  		v0.AddArg2(x, x)
 15205  		v.AddArg2(a, v0)
 15206  		return true
 15207  	}
 15208  	// match: (MSUB a x (MOVDconst [c]))
 15209  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
 15210  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
 15211  	for {
 15212  		a := v_0
 15213  		x := v_1
 15214  		if v_2.Op != OpARM64MOVDconst {
 15215  			break
 15216  		}
 15217  		c := auxIntToInt64(v_2.AuxInt)
 15218  		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
 15219  			break
 15220  		}
 15221  		v.reset(OpARM64ADDshiftLL)
 15222  		v.AuxInt = int64ToAuxInt(log64(c / 3))
 15223  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15224  		v0.AuxInt = int64ToAuxInt(2)
 15225  		v0.AddArg2(x, x)
 15226  		v.AddArg2(a, v0)
 15227  		return true
 15228  	}
 15229  	// match: (MSUB a x (MOVDconst [c]))
 15230  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
 15231  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
 15232  	for {
 15233  		a := v_0
 15234  		x := v_1
 15235  		if v_2.Op != OpARM64MOVDconst {
 15236  			break
 15237  		}
 15238  		c := auxIntToInt64(v_2.AuxInt)
 15239  		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
 15240  			break
 15241  		}
 15242  		v.reset(OpARM64SUBshiftLL)
 15243  		v.AuxInt = int64ToAuxInt(log64(c / 5))
 15244  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15245  		v0.AuxInt = int64ToAuxInt(2)
 15246  		v0.AddArg2(x, x)
 15247  		v.AddArg2(a, v0)
 15248  		return true
 15249  	}
 15250  	// match: (MSUB a x (MOVDconst [c]))
 15251  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
 15252  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
 15253  	for {
 15254  		a := v_0
 15255  		x := v_1
 15256  		if v_2.Op != OpARM64MOVDconst {
 15257  			break
 15258  		}
 15259  		c := auxIntToInt64(v_2.AuxInt)
 15260  		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
 15261  			break
 15262  		}
 15263  		v.reset(OpARM64ADDshiftLL)
 15264  		v.AuxInt = int64ToAuxInt(log64(c / 7))
 15265  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15266  		v0.AuxInt = int64ToAuxInt(3)
 15267  		v0.AddArg2(x, x)
 15268  		v.AddArg2(a, v0)
 15269  		return true
 15270  	}
 15271  	// match: (MSUB a x (MOVDconst [c]))
 15272  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
 15273  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
 15274  	for {
 15275  		a := v_0
 15276  		x := v_1
 15277  		if v_2.Op != OpARM64MOVDconst {
 15278  			break
 15279  		}
 15280  		c := auxIntToInt64(v_2.AuxInt)
 15281  		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
 15282  			break
 15283  		}
 15284  		v.reset(OpARM64SUBshiftLL)
 15285  		v.AuxInt = int64ToAuxInt(log64(c / 9))
 15286  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15287  		v0.AuxInt = int64ToAuxInt(3)
 15288  		v0.AddArg2(x, x)
 15289  		v.AddArg2(a, v0)
 15290  		return true
 15291  	}
 15292  	// match: (MSUB a (MOVDconst [-1]) x)
 15293  	// result: (ADD a x)
 15294  	for {
 15295  		a := v_0
 15296  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
 15297  			break
 15298  		}
 15299  		x := v_2
 15300  		v.reset(OpARM64ADD)
 15301  		v.AddArg2(a, x)
 15302  		return true
 15303  	}
 15304  	// match: (MSUB a (MOVDconst [0]) _)
 15305  	// result: a
 15306  	for {
 15307  		a := v_0
 15308  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
 15309  			break
 15310  		}
 15311  		v.copyOf(a)
 15312  		return true
 15313  	}
 15314  	// match: (MSUB a (MOVDconst [1]) x)
 15315  	// result: (SUB a x)
 15316  	for {
 15317  		a := v_0
 15318  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 15319  			break
 15320  		}
 15321  		x := v_2
 15322  		v.reset(OpARM64SUB)
 15323  		v.AddArg2(a, x)
 15324  		return true
 15325  	}
 15326  	// match: (MSUB a (MOVDconst [c]) x)
 15327  	// cond: isPowerOfTwo64(c)
 15328  	// result: (SUBshiftLL a x [log64(c)])
 15329  	for {
 15330  		a := v_0
 15331  		if v_1.Op != OpARM64MOVDconst {
 15332  			break
 15333  		}
 15334  		c := auxIntToInt64(v_1.AuxInt)
 15335  		x := v_2
 15336  		if !(isPowerOfTwo64(c)) {
 15337  			break
 15338  		}
 15339  		v.reset(OpARM64SUBshiftLL)
 15340  		v.AuxInt = int64ToAuxInt(log64(c))
 15341  		v.AddArg2(a, x)
 15342  		return true
 15343  	}
 15344  	// match: (MSUB a (MOVDconst [c]) x)
 15345  	// cond: isPowerOfTwo64(c-1) && c>=3
 15346  	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
 15347  	for {
 15348  		a := v_0
 15349  		if v_1.Op != OpARM64MOVDconst {
 15350  			break
 15351  		}
 15352  		c := auxIntToInt64(v_1.AuxInt)
 15353  		x := v_2
 15354  		if !(isPowerOfTwo64(c-1) && c >= 3) {
 15355  			break
 15356  		}
 15357  		v.reset(OpARM64SUB)
 15358  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15359  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
 15360  		v0.AddArg2(x, x)
 15361  		v.AddArg2(a, v0)
 15362  		return true
 15363  	}
 15364  	// match: (MSUB a (MOVDconst [c]) x)
 15365  	// cond: isPowerOfTwo64(c+1) && c>=7
 15366  	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
 15367  	for {
 15368  		a := v_0
 15369  		if v_1.Op != OpARM64MOVDconst {
 15370  			break
 15371  		}
 15372  		c := auxIntToInt64(v_1.AuxInt)
 15373  		x := v_2
 15374  		if !(isPowerOfTwo64(c+1) && c >= 7) {
 15375  			break
 15376  		}
 15377  		v.reset(OpARM64ADD)
 15378  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15379  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
 15380  		v0.AddArg2(x, x)
 15381  		v.AddArg2(a, v0)
 15382  		return true
 15383  	}
 15384  	// match: (MSUB a (MOVDconst [c]) x)
 15385  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
 15386  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
 15387  	for {
 15388  		a := v_0
 15389  		if v_1.Op != OpARM64MOVDconst {
 15390  			break
 15391  		}
 15392  		c := auxIntToInt64(v_1.AuxInt)
 15393  		x := v_2
 15394  		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
 15395  			break
 15396  		}
 15397  		v.reset(OpARM64ADDshiftLL)
 15398  		v.AuxInt = int64ToAuxInt(log64(c / 3))
 15399  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15400  		v0.AuxInt = int64ToAuxInt(2)
 15401  		v0.AddArg2(x, x)
 15402  		v.AddArg2(a, v0)
 15403  		return true
 15404  	}
 15405  	// match: (MSUB a (MOVDconst [c]) x)
 15406  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
 15407  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
 15408  	for {
 15409  		a := v_0
 15410  		if v_1.Op != OpARM64MOVDconst {
 15411  			break
 15412  		}
 15413  		c := auxIntToInt64(v_1.AuxInt)
 15414  		x := v_2
 15415  		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
 15416  			break
 15417  		}
 15418  		v.reset(OpARM64SUBshiftLL)
 15419  		v.AuxInt = int64ToAuxInt(log64(c / 5))
 15420  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15421  		v0.AuxInt = int64ToAuxInt(2)
 15422  		v0.AddArg2(x, x)
 15423  		v.AddArg2(a, v0)
 15424  		return true
 15425  	}
 15426  	// match: (MSUB a (MOVDconst [c]) x)
 15427  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
 15428  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
 15429  	for {
 15430  		a := v_0
 15431  		if v_1.Op != OpARM64MOVDconst {
 15432  			break
 15433  		}
 15434  		c := auxIntToInt64(v_1.AuxInt)
 15435  		x := v_2
 15436  		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
 15437  			break
 15438  		}
 15439  		v.reset(OpARM64ADDshiftLL)
 15440  		v.AuxInt = int64ToAuxInt(log64(c / 7))
 15441  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15442  		v0.AuxInt = int64ToAuxInt(3)
 15443  		v0.AddArg2(x, x)
 15444  		v.AddArg2(a, v0)
 15445  		return true
 15446  	}
 15447  	// match: (MSUB a (MOVDconst [c]) x)
 15448  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
 15449  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
 15450  	for {
 15451  		a := v_0
 15452  		if v_1.Op != OpARM64MOVDconst {
 15453  			break
 15454  		}
 15455  		c := auxIntToInt64(v_1.AuxInt)
 15456  		x := v_2
 15457  		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
 15458  			break
 15459  		}
 15460  		v.reset(OpARM64SUBshiftLL)
 15461  		v.AuxInt = int64ToAuxInt(log64(c / 9))
 15462  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15463  		v0.AuxInt = int64ToAuxInt(3)
 15464  		v0.AddArg2(x, x)
 15465  		v.AddArg2(a, v0)
 15466  		return true
 15467  	}
 15468  	// match: (MSUB (MOVDconst [c]) x y)
 15469  	// result: (ADDconst [c] (MNEG <x.Type> x y))
 15470  	for {
 15471  		if v_0.Op != OpARM64MOVDconst {
 15472  			break
 15473  		}
 15474  		c := auxIntToInt64(v_0.AuxInt)
 15475  		x := v_1
 15476  		y := v_2
 15477  		v.reset(OpARM64ADDconst)
 15478  		v.AuxInt = int64ToAuxInt(c)
 15479  		v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type)
 15480  		v0.AddArg2(x, y)
 15481  		v.AddArg(v0)
 15482  		return true
 15483  	}
 15484  	// match: (MSUB a (MOVDconst [c]) (MOVDconst [d]))
 15485  	// result: (SUBconst [c*d] a)
 15486  	for {
 15487  		a := v_0
 15488  		if v_1.Op != OpARM64MOVDconst {
 15489  			break
 15490  		}
 15491  		c := auxIntToInt64(v_1.AuxInt)
 15492  		if v_2.Op != OpARM64MOVDconst {
 15493  			break
 15494  		}
 15495  		d := auxIntToInt64(v_2.AuxInt)
 15496  		v.reset(OpARM64SUBconst)
 15497  		v.AuxInt = int64ToAuxInt(c * d)
 15498  		v.AddArg(a)
 15499  		return true
 15500  	}
 15501  	return false
 15502  }
 15503  func rewriteValueARM64_OpARM64MSUBW(v *Value) bool {
 15504  	v_2 := v.Args[2]
 15505  	v_1 := v.Args[1]
 15506  	v_0 := v.Args[0]
 15507  	b := v.Block
 15508  	// match: (MSUBW a x (MOVDconst [c]))
 15509  	// cond: int32(c)==-1
 15510  	// result: (ADD a x)
 15511  	for {
 15512  		a := v_0
 15513  		x := v_1
 15514  		if v_2.Op != OpARM64MOVDconst {
 15515  			break
 15516  		}
 15517  		c := auxIntToInt64(v_2.AuxInt)
 15518  		if !(int32(c) == -1) {
 15519  			break
 15520  		}
 15521  		v.reset(OpARM64ADD)
 15522  		v.AddArg2(a, x)
 15523  		return true
 15524  	}
 15525  	// match: (MSUBW a _ (MOVDconst [c]))
 15526  	// cond: int32(c)==0
 15527  	// result: a
 15528  	for {
 15529  		a := v_0
 15530  		if v_2.Op != OpARM64MOVDconst {
 15531  			break
 15532  		}
 15533  		c := auxIntToInt64(v_2.AuxInt)
 15534  		if !(int32(c) == 0) {
 15535  			break
 15536  		}
 15537  		v.copyOf(a)
 15538  		return true
 15539  	}
 15540  	// match: (MSUBW a x (MOVDconst [c]))
 15541  	// cond: int32(c)==1
 15542  	// result: (SUB a x)
 15543  	for {
 15544  		a := v_0
 15545  		x := v_1
 15546  		if v_2.Op != OpARM64MOVDconst {
 15547  			break
 15548  		}
 15549  		c := auxIntToInt64(v_2.AuxInt)
 15550  		if !(int32(c) == 1) {
 15551  			break
 15552  		}
 15553  		v.reset(OpARM64SUB)
 15554  		v.AddArg2(a, x)
 15555  		return true
 15556  	}
 15557  	// match: (MSUBW a x (MOVDconst [c]))
 15558  	// cond: isPowerOfTwo64(c)
 15559  	// result: (SUBshiftLL a x [log64(c)])
 15560  	for {
 15561  		a := v_0
 15562  		x := v_1
 15563  		if v_2.Op != OpARM64MOVDconst {
 15564  			break
 15565  		}
 15566  		c := auxIntToInt64(v_2.AuxInt)
 15567  		if !(isPowerOfTwo64(c)) {
 15568  			break
 15569  		}
 15570  		v.reset(OpARM64SUBshiftLL)
 15571  		v.AuxInt = int64ToAuxInt(log64(c))
 15572  		v.AddArg2(a, x)
 15573  		return true
 15574  	}
 15575  	// match: (MSUBW a x (MOVDconst [c]))
 15576  	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
 15577  	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
 15578  	for {
 15579  		a := v_0
 15580  		x := v_1
 15581  		if v_2.Op != OpARM64MOVDconst {
 15582  			break
 15583  		}
 15584  		c := auxIntToInt64(v_2.AuxInt)
 15585  		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
 15586  			break
 15587  		}
 15588  		v.reset(OpARM64SUB)
 15589  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15590  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
 15591  		v0.AddArg2(x, x)
 15592  		v.AddArg2(a, v0)
 15593  		return true
 15594  	}
 15595  	// match: (MSUBW a x (MOVDconst [c]))
 15596  	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
 15597  	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
 15598  	for {
 15599  		a := v_0
 15600  		x := v_1
 15601  		if v_2.Op != OpARM64MOVDconst {
 15602  			break
 15603  		}
 15604  		c := auxIntToInt64(v_2.AuxInt)
 15605  		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
 15606  			break
 15607  		}
 15608  		v.reset(OpARM64ADD)
 15609  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15610  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
 15611  		v0.AddArg2(x, x)
 15612  		v.AddArg2(a, v0)
 15613  		return true
 15614  	}
 15615  	// match: (MSUBW a x (MOVDconst [c]))
 15616  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
 15617  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
 15618  	for {
 15619  		a := v_0
 15620  		x := v_1
 15621  		if v_2.Op != OpARM64MOVDconst {
 15622  			break
 15623  		}
 15624  		c := auxIntToInt64(v_2.AuxInt)
 15625  		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
 15626  			break
 15627  		}
 15628  		v.reset(OpARM64ADDshiftLL)
 15629  		v.AuxInt = int64ToAuxInt(log64(c / 3))
 15630  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15631  		v0.AuxInt = int64ToAuxInt(2)
 15632  		v0.AddArg2(x, x)
 15633  		v.AddArg2(a, v0)
 15634  		return true
 15635  	}
 15636  	// match: (MSUBW a x (MOVDconst [c]))
 15637  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
 15638  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
 15639  	for {
 15640  		a := v_0
 15641  		x := v_1
 15642  		if v_2.Op != OpARM64MOVDconst {
 15643  			break
 15644  		}
 15645  		c := auxIntToInt64(v_2.AuxInt)
 15646  		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
 15647  			break
 15648  		}
 15649  		v.reset(OpARM64SUBshiftLL)
 15650  		v.AuxInt = int64ToAuxInt(log64(c / 5))
 15651  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15652  		v0.AuxInt = int64ToAuxInt(2)
 15653  		v0.AddArg2(x, x)
 15654  		v.AddArg2(a, v0)
 15655  		return true
 15656  	}
 15657  	// match: (MSUBW a x (MOVDconst [c]))
 15658  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
 15659  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
 15660  	for {
 15661  		a := v_0
 15662  		x := v_1
 15663  		if v_2.Op != OpARM64MOVDconst {
 15664  			break
 15665  		}
 15666  		c := auxIntToInt64(v_2.AuxInt)
 15667  		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
 15668  			break
 15669  		}
 15670  		v.reset(OpARM64ADDshiftLL)
 15671  		v.AuxInt = int64ToAuxInt(log64(c / 7))
 15672  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15673  		v0.AuxInt = int64ToAuxInt(3)
 15674  		v0.AddArg2(x, x)
 15675  		v.AddArg2(a, v0)
 15676  		return true
 15677  	}
 15678  	// match: (MSUBW a x (MOVDconst [c]))
 15679  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
 15680  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
 15681  	for {
 15682  		a := v_0
 15683  		x := v_1
 15684  		if v_2.Op != OpARM64MOVDconst {
 15685  			break
 15686  		}
 15687  		c := auxIntToInt64(v_2.AuxInt)
 15688  		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
 15689  			break
 15690  		}
 15691  		v.reset(OpARM64SUBshiftLL)
 15692  		v.AuxInt = int64ToAuxInt(log64(c / 9))
 15693  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15694  		v0.AuxInt = int64ToAuxInt(3)
 15695  		v0.AddArg2(x, x)
 15696  		v.AddArg2(a, v0)
 15697  		return true
 15698  	}
 15699  	// match: (MSUBW a (MOVDconst [c]) x)
 15700  	// cond: int32(c)==-1
 15701  	// result: (ADD a x)
 15702  	for {
 15703  		a := v_0
 15704  		if v_1.Op != OpARM64MOVDconst {
 15705  			break
 15706  		}
 15707  		c := auxIntToInt64(v_1.AuxInt)
 15708  		x := v_2
 15709  		if !(int32(c) == -1) {
 15710  			break
 15711  		}
 15712  		v.reset(OpARM64ADD)
 15713  		v.AddArg2(a, x)
 15714  		return true
 15715  	}
 15716  	// match: (MSUBW a (MOVDconst [c]) _)
 15717  	// cond: int32(c)==0
 15718  	// result: a
 15719  	for {
 15720  		a := v_0
 15721  		if v_1.Op != OpARM64MOVDconst {
 15722  			break
 15723  		}
 15724  		c := auxIntToInt64(v_1.AuxInt)
 15725  		if !(int32(c) == 0) {
 15726  			break
 15727  		}
 15728  		v.copyOf(a)
 15729  		return true
 15730  	}
 15731  	// match: (MSUBW a (MOVDconst [c]) x)
 15732  	// cond: int32(c)==1
 15733  	// result: (SUB a x)
 15734  	for {
 15735  		a := v_0
 15736  		if v_1.Op != OpARM64MOVDconst {
 15737  			break
 15738  		}
 15739  		c := auxIntToInt64(v_1.AuxInt)
 15740  		x := v_2
 15741  		if !(int32(c) == 1) {
 15742  			break
 15743  		}
 15744  		v.reset(OpARM64SUB)
 15745  		v.AddArg2(a, x)
 15746  		return true
 15747  	}
 15748  	// match: (MSUBW a (MOVDconst [c]) x)
 15749  	// cond: isPowerOfTwo64(c)
 15750  	// result: (SUBshiftLL a x [log64(c)])
 15751  	for {
 15752  		a := v_0
 15753  		if v_1.Op != OpARM64MOVDconst {
 15754  			break
 15755  		}
 15756  		c := auxIntToInt64(v_1.AuxInt)
 15757  		x := v_2
 15758  		if !(isPowerOfTwo64(c)) {
 15759  			break
 15760  		}
 15761  		v.reset(OpARM64SUBshiftLL)
 15762  		v.AuxInt = int64ToAuxInt(log64(c))
 15763  		v.AddArg2(a, x)
 15764  		return true
 15765  	}
 15766  	// match: (MSUBW a (MOVDconst [c]) x)
 15767  	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
 15768  	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
 15769  	for {
 15770  		a := v_0
 15771  		if v_1.Op != OpARM64MOVDconst {
 15772  			break
 15773  		}
 15774  		c := auxIntToInt64(v_1.AuxInt)
 15775  		x := v_2
 15776  		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
 15777  			break
 15778  		}
 15779  		v.reset(OpARM64SUB)
 15780  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15781  		v0.AuxInt = int64ToAuxInt(log64(c - 1))
 15782  		v0.AddArg2(x, x)
 15783  		v.AddArg2(a, v0)
 15784  		return true
 15785  	}
 15786  	// match: (MSUBW a (MOVDconst [c]) x)
 15787  	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
 15788  	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
 15789  	for {
 15790  		a := v_0
 15791  		if v_1.Op != OpARM64MOVDconst {
 15792  			break
 15793  		}
 15794  		c := auxIntToInt64(v_1.AuxInt)
 15795  		x := v_2
 15796  		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
 15797  			break
 15798  		}
 15799  		v.reset(OpARM64ADD)
 15800  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15801  		v0.AuxInt = int64ToAuxInt(log64(c + 1))
 15802  		v0.AddArg2(x, x)
 15803  		v.AddArg2(a, v0)
 15804  		return true
 15805  	}
 15806  	// match: (MSUBW a (MOVDconst [c]) x)
 15807  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
 15808  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
 15809  	for {
 15810  		a := v_0
 15811  		if v_1.Op != OpARM64MOVDconst {
 15812  			break
 15813  		}
 15814  		c := auxIntToInt64(v_1.AuxInt)
 15815  		x := v_2
 15816  		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
 15817  			break
 15818  		}
 15819  		v.reset(OpARM64ADDshiftLL)
 15820  		v.AuxInt = int64ToAuxInt(log64(c / 3))
 15821  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15822  		v0.AuxInt = int64ToAuxInt(2)
 15823  		v0.AddArg2(x, x)
 15824  		v.AddArg2(a, v0)
 15825  		return true
 15826  	}
 15827  	// match: (MSUBW a (MOVDconst [c]) x)
 15828  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
 15829  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
 15830  	for {
 15831  		a := v_0
 15832  		if v_1.Op != OpARM64MOVDconst {
 15833  			break
 15834  		}
 15835  		c := auxIntToInt64(v_1.AuxInt)
 15836  		x := v_2
 15837  		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
 15838  			break
 15839  		}
 15840  		v.reset(OpARM64SUBshiftLL)
 15841  		v.AuxInt = int64ToAuxInt(log64(c / 5))
 15842  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15843  		v0.AuxInt = int64ToAuxInt(2)
 15844  		v0.AddArg2(x, x)
 15845  		v.AddArg2(a, v0)
 15846  		return true
 15847  	}
 15848  	// match: (MSUBW a (MOVDconst [c]) x)
 15849  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
 15850  	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
 15851  	for {
 15852  		a := v_0
 15853  		if v_1.Op != OpARM64MOVDconst {
 15854  			break
 15855  		}
 15856  		c := auxIntToInt64(v_1.AuxInt)
 15857  		x := v_2
 15858  		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
 15859  			break
 15860  		}
 15861  		v.reset(OpARM64ADDshiftLL)
 15862  		v.AuxInt = int64ToAuxInt(log64(c / 7))
 15863  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
 15864  		v0.AuxInt = int64ToAuxInt(3)
 15865  		v0.AddArg2(x, x)
 15866  		v.AddArg2(a, v0)
 15867  		return true
 15868  	}
 15869  	// match: (MSUBW a (MOVDconst [c]) x)
 15870  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
 15871  	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
 15872  	for {
 15873  		a := v_0
 15874  		if v_1.Op != OpARM64MOVDconst {
 15875  			break
 15876  		}
 15877  		c := auxIntToInt64(v_1.AuxInt)
 15878  		x := v_2
 15879  		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
 15880  			break
 15881  		}
 15882  		v.reset(OpARM64SUBshiftLL)
 15883  		v.AuxInt = int64ToAuxInt(log64(c / 9))
 15884  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 15885  		v0.AuxInt = int64ToAuxInt(3)
 15886  		v0.AddArg2(x, x)
 15887  		v.AddArg2(a, v0)
 15888  		return true
 15889  	}
 15890  	// match: (MSUBW (MOVDconst [c]) x y)
 15891  	// result: (ADDconst [c] (MNEGW <x.Type> x y))
 15892  	for {
 15893  		if v_0.Op != OpARM64MOVDconst {
 15894  			break
 15895  		}
 15896  		c := auxIntToInt64(v_0.AuxInt)
 15897  		x := v_1
 15898  		y := v_2
 15899  		v.reset(OpARM64ADDconst)
 15900  		v.AuxInt = int64ToAuxInt(c)
 15901  		v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type)
 15902  		v0.AddArg2(x, y)
 15903  		v.AddArg(v0)
 15904  		return true
 15905  	}
 15906  	// match: (MSUBW a (MOVDconst [c]) (MOVDconst [d]))
 15907  	// result: (SUBconst [int64(int32(c)*int32(d))] a)
 15908  	for {
 15909  		a := v_0
 15910  		if v_1.Op != OpARM64MOVDconst {
 15911  			break
 15912  		}
 15913  		c := auxIntToInt64(v_1.AuxInt)
 15914  		if v_2.Op != OpARM64MOVDconst {
 15915  			break
 15916  		}
 15917  		d := auxIntToInt64(v_2.AuxInt)
 15918  		v.reset(OpARM64SUBconst)
 15919  		v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
 15920  		v.AddArg(a)
 15921  		return true
 15922  	}
 15923  	return false
 15924  }
 15925  func rewriteValueARM64_OpARM64MUL(v *Value) bool {
 15926  	v_1 := v.Args[1]
 15927  	v_0 := v.Args[0]
 15928  	b := v.Block
 15929  	// match: (MUL (NEG x) y)
 15930  	// result: (MNEG x y)
 15931  	for {
 15932  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15933  			if v_0.Op != OpARM64NEG {
 15934  				continue
 15935  			}
 15936  			x := v_0.Args[0]
 15937  			y := v_1
 15938  			v.reset(OpARM64MNEG)
 15939  			v.AddArg2(x, y)
 15940  			return true
 15941  		}
 15942  		break
 15943  	}
 15944  	// match: (MUL x (MOVDconst [-1]))
 15945  	// result: (NEG x)
 15946  	for {
 15947  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15948  			x := v_0
 15949  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
 15950  				continue
 15951  			}
 15952  			v.reset(OpARM64NEG)
 15953  			v.AddArg(x)
 15954  			return true
 15955  		}
 15956  		break
 15957  	}
 15958  	// match: (MUL _ (MOVDconst [0]))
 15959  	// result: (MOVDconst [0])
 15960  	for {
 15961  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15962  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
 15963  				continue
 15964  			}
 15965  			v.reset(OpARM64MOVDconst)
 15966  			v.AuxInt = int64ToAuxInt(0)
 15967  			return true
 15968  		}
 15969  		break
 15970  	}
 15971  	// match: (MUL x (MOVDconst [1]))
 15972  	// result: x
 15973  	for {
 15974  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15975  			x := v_0
 15976  			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 15977  				continue
 15978  			}
 15979  			v.copyOf(x)
 15980  			return true
 15981  		}
 15982  		break
 15983  	}
 15984  	// match: (MUL x (MOVDconst [c]))
 15985  	// cond: isPowerOfTwo64(c)
 15986  	// result: (SLLconst [log64(c)] x)
 15987  	for {
 15988  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15989  			x := v_0
 15990  			if v_1.Op != OpARM64MOVDconst {
 15991  				continue
 15992  			}
 15993  			c := auxIntToInt64(v_1.AuxInt)
 15994  			if !(isPowerOfTwo64(c)) {
 15995  				continue
 15996  			}
 15997  			v.reset(OpARM64SLLconst)
 15998  			v.AuxInt = int64ToAuxInt(log64(c))
 15999  			v.AddArg(x)
 16000  			return true
 16001  		}
 16002  		break
 16003  	}
 16004  	// match: (MUL x (MOVDconst [c]))
 16005  	// cond: isPowerOfTwo64(c-1) && c >= 3
 16006  	// result: (ADDshiftLL x x [log64(c-1)])
 16007  	for {
 16008  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16009  			x := v_0
 16010  			if v_1.Op != OpARM64MOVDconst {
 16011  				continue
 16012  			}
 16013  			c := auxIntToInt64(v_1.AuxInt)
 16014  			if !(isPowerOfTwo64(c-1) && c >= 3) {
 16015  				continue
 16016  			}
 16017  			v.reset(OpARM64ADDshiftLL)
 16018  			v.AuxInt = int64ToAuxInt(log64(c - 1))
 16019  			v.AddArg2(x, x)
 16020  			return true
 16021  		}
 16022  		break
 16023  	}
 16024  	// match: (MUL x (MOVDconst [c]))
 16025  	// cond: isPowerOfTwo64(c+1) && c >= 7
 16026  	// result: (ADDshiftLL (NEG <x.Type> x) x [log64(c+1)])
 16027  	for {
 16028  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16029  			x := v_0
 16030  			if v_1.Op != OpARM64MOVDconst {
 16031  				continue
 16032  			}
 16033  			c := auxIntToInt64(v_1.AuxInt)
 16034  			if !(isPowerOfTwo64(c+1) && c >= 7) {
 16035  				continue
 16036  			}
 16037  			v.reset(OpARM64ADDshiftLL)
 16038  			v.AuxInt = int64ToAuxInt(log64(c + 1))
 16039  			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
 16040  			v0.AddArg(x)
 16041  			v.AddArg2(v0, x)
 16042  			return true
 16043  		}
 16044  		break
 16045  	}
 16046  	// match: (MUL x (MOVDconst [c]))
 16047  	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
 16048  	// result: (SLLconst [log64(c/3)] (ADDshiftLL <x.Type> x x [1]))
 16049  	for {
 16050  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16051  			x := v_0
 16052  			if v_1.Op != OpARM64MOVDconst {
 16053  				continue
 16054  			}
 16055  			c := auxIntToInt64(v_1.AuxInt)
 16056  			if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
 16057  				continue
 16058  			}
 16059  			v.reset(OpARM64SLLconst)
 16060  			v.AuxInt = int64ToAuxInt(log64(c / 3))
 16061  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16062  			v0.AuxInt = int64ToAuxInt(1)
 16063  			v0.AddArg2(x, x)
 16064  			v.AddArg(v0)
 16065  			return true
 16066  		}
 16067  		break
 16068  	}
 16069  	// match: (MUL x (MOVDconst [c]))
 16070  	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
 16071  	// result: (SLLconst [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))
 16072  	for {
 16073  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16074  			x := v_0
 16075  			if v_1.Op != OpARM64MOVDconst {
 16076  				continue
 16077  			}
 16078  			c := auxIntToInt64(v_1.AuxInt)
 16079  			if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
 16080  				continue
 16081  			}
 16082  			v.reset(OpARM64SLLconst)
 16083  			v.AuxInt = int64ToAuxInt(log64(c / 5))
 16084  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16085  			v0.AuxInt = int64ToAuxInt(2)
 16086  			v0.AddArg2(x, x)
 16087  			v.AddArg(v0)
 16088  			return true
 16089  		}
 16090  		break
 16091  	}
 16092  	// match: (MUL x (MOVDconst [c]))
 16093  	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
 16094  	// result: (SLLconst [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
 16095  	for {
 16096  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16097  			x := v_0
 16098  			if v_1.Op != OpARM64MOVDconst {
 16099  				continue
 16100  			}
 16101  			c := auxIntToInt64(v_1.AuxInt)
 16102  			if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
 16103  				continue
 16104  			}
 16105  			v.reset(OpARM64SLLconst)
 16106  			v.AuxInt = int64ToAuxInt(log64(c / 7))
 16107  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16108  			v0.AuxInt = int64ToAuxInt(3)
 16109  			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
 16110  			v1.AddArg(x)
 16111  			v0.AddArg2(v1, x)
 16112  			v.AddArg(v0)
 16113  			return true
 16114  		}
 16115  		break
 16116  	}
 16117  	// match: (MUL x (MOVDconst [c]))
 16118  	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
 16119  	// result: (SLLconst [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))
 16120  	for {
 16121  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16122  			x := v_0
 16123  			if v_1.Op != OpARM64MOVDconst {
 16124  				continue
 16125  			}
 16126  			c := auxIntToInt64(v_1.AuxInt)
 16127  			if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
 16128  				continue
 16129  			}
 16130  			v.reset(OpARM64SLLconst)
 16131  			v.AuxInt = int64ToAuxInt(log64(c / 9))
 16132  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16133  			v0.AuxInt = int64ToAuxInt(3)
 16134  			v0.AddArg2(x, x)
 16135  			v.AddArg(v0)
 16136  			return true
 16137  		}
 16138  		break
 16139  	}
 16140  	// match: (MUL (MOVDconst [c]) (MOVDconst [d]))
 16141  	// result: (MOVDconst [c*d])
 16142  	for {
 16143  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16144  			if v_0.Op != OpARM64MOVDconst {
 16145  				continue
 16146  			}
 16147  			c := auxIntToInt64(v_0.AuxInt)
 16148  			if v_1.Op != OpARM64MOVDconst {
 16149  				continue
 16150  			}
 16151  			d := auxIntToInt64(v_1.AuxInt)
 16152  			v.reset(OpARM64MOVDconst)
 16153  			v.AuxInt = int64ToAuxInt(c * d)
 16154  			return true
 16155  		}
 16156  		break
 16157  	}
 16158  	return false
 16159  }
 16160  func rewriteValueARM64_OpARM64MULW(v *Value) bool {
 16161  	v_1 := v.Args[1]
 16162  	v_0 := v.Args[0]
 16163  	b := v.Block
 16164  	// match: (MULW (NEG x) y)
 16165  	// result: (MNEGW x y)
 16166  	for {
 16167  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16168  			if v_0.Op != OpARM64NEG {
 16169  				continue
 16170  			}
 16171  			x := v_0.Args[0]
 16172  			y := v_1
 16173  			v.reset(OpARM64MNEGW)
 16174  			v.AddArg2(x, y)
 16175  			return true
 16176  		}
 16177  		break
 16178  	}
 16179  	// match: (MULW x (MOVDconst [c]))
 16180  	// cond: int32(c)==-1
 16181  	// result: (NEG x)
 16182  	for {
 16183  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16184  			x := v_0
 16185  			if v_1.Op != OpARM64MOVDconst {
 16186  				continue
 16187  			}
 16188  			c := auxIntToInt64(v_1.AuxInt)
 16189  			if !(int32(c) == -1) {
 16190  				continue
 16191  			}
 16192  			v.reset(OpARM64NEG)
 16193  			v.AddArg(x)
 16194  			return true
 16195  		}
 16196  		break
 16197  	}
 16198  	// match: (MULW _ (MOVDconst [c]))
 16199  	// cond: int32(c)==0
 16200  	// result: (MOVDconst [0])
 16201  	for {
 16202  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16203  			if v_1.Op != OpARM64MOVDconst {
 16204  				continue
 16205  			}
 16206  			c := auxIntToInt64(v_1.AuxInt)
 16207  			if !(int32(c) == 0) {
 16208  				continue
 16209  			}
 16210  			v.reset(OpARM64MOVDconst)
 16211  			v.AuxInt = int64ToAuxInt(0)
 16212  			return true
 16213  		}
 16214  		break
 16215  	}
 16216  	// match: (MULW x (MOVDconst [c]))
 16217  	// cond: int32(c)==1
 16218  	// result: x
 16219  	for {
 16220  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16221  			x := v_0
 16222  			if v_1.Op != OpARM64MOVDconst {
 16223  				continue
 16224  			}
 16225  			c := auxIntToInt64(v_1.AuxInt)
 16226  			if !(int32(c) == 1) {
 16227  				continue
 16228  			}
 16229  			v.copyOf(x)
 16230  			return true
 16231  		}
 16232  		break
 16233  	}
 16234  	// match: (MULW x (MOVDconst [c]))
 16235  	// cond: isPowerOfTwo64(c)
 16236  	// result: (SLLconst [log64(c)] x)
 16237  	for {
 16238  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16239  			x := v_0
 16240  			if v_1.Op != OpARM64MOVDconst {
 16241  				continue
 16242  			}
 16243  			c := auxIntToInt64(v_1.AuxInt)
 16244  			if !(isPowerOfTwo64(c)) {
 16245  				continue
 16246  			}
 16247  			v.reset(OpARM64SLLconst)
 16248  			v.AuxInt = int64ToAuxInt(log64(c))
 16249  			v.AddArg(x)
 16250  			return true
 16251  		}
 16252  		break
 16253  	}
 16254  	// match: (MULW x (MOVDconst [c]))
 16255  	// cond: isPowerOfTwo64(c-1) && int32(c) >= 3
 16256  	// result: (ADDshiftLL x x [log64(c-1)])
 16257  	for {
 16258  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16259  			x := v_0
 16260  			if v_1.Op != OpARM64MOVDconst {
 16261  				continue
 16262  			}
 16263  			c := auxIntToInt64(v_1.AuxInt)
 16264  			if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
 16265  				continue
 16266  			}
 16267  			v.reset(OpARM64ADDshiftLL)
 16268  			v.AuxInt = int64ToAuxInt(log64(c - 1))
 16269  			v.AddArg2(x, x)
 16270  			return true
 16271  		}
 16272  		break
 16273  	}
 16274  	// match: (MULW x (MOVDconst [c]))
 16275  	// cond: isPowerOfTwo64(c+1) && int32(c) >= 7
 16276  	// result: (ADDshiftLL (NEG <x.Type> x) x [log64(c+1)])
 16277  	for {
 16278  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16279  			x := v_0
 16280  			if v_1.Op != OpARM64MOVDconst {
 16281  				continue
 16282  			}
 16283  			c := auxIntToInt64(v_1.AuxInt)
 16284  			if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
 16285  				continue
 16286  			}
 16287  			v.reset(OpARM64ADDshiftLL)
 16288  			v.AuxInt = int64ToAuxInt(log64(c + 1))
 16289  			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
 16290  			v0.AddArg(x)
 16291  			v.AddArg2(v0, x)
 16292  			return true
 16293  		}
 16294  		break
 16295  	}
 16296  	// match: (MULW x (MOVDconst [c]))
 16297  	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
 16298  	// result: (SLLconst [log64(c/3)] (ADDshiftLL <x.Type> x x [1]))
 16299  	for {
 16300  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16301  			x := v_0
 16302  			if v_1.Op != OpARM64MOVDconst {
 16303  				continue
 16304  			}
 16305  			c := auxIntToInt64(v_1.AuxInt)
 16306  			if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
 16307  				continue
 16308  			}
 16309  			v.reset(OpARM64SLLconst)
 16310  			v.AuxInt = int64ToAuxInt(log64(c / 3))
 16311  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16312  			v0.AuxInt = int64ToAuxInt(1)
 16313  			v0.AddArg2(x, x)
 16314  			v.AddArg(v0)
 16315  			return true
 16316  		}
 16317  		break
 16318  	}
 16319  	// match: (MULW x (MOVDconst [c]))
 16320  	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
 16321  	// result: (SLLconst [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))
 16322  	for {
 16323  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16324  			x := v_0
 16325  			if v_1.Op != OpARM64MOVDconst {
 16326  				continue
 16327  			}
 16328  			c := auxIntToInt64(v_1.AuxInt)
 16329  			if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
 16330  				continue
 16331  			}
 16332  			v.reset(OpARM64SLLconst)
 16333  			v.AuxInt = int64ToAuxInt(log64(c / 5))
 16334  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16335  			v0.AuxInt = int64ToAuxInt(2)
 16336  			v0.AddArg2(x, x)
 16337  			v.AddArg(v0)
 16338  			return true
 16339  		}
 16340  		break
 16341  	}
 16342  	// match: (MULW x (MOVDconst [c]))
 16343  	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
 16344  	// result: (SLLconst [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
 16345  	for {
 16346  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16347  			x := v_0
 16348  			if v_1.Op != OpARM64MOVDconst {
 16349  				continue
 16350  			}
 16351  			c := auxIntToInt64(v_1.AuxInt)
 16352  			if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
 16353  				continue
 16354  			}
 16355  			v.reset(OpARM64SLLconst)
 16356  			v.AuxInt = int64ToAuxInt(log64(c / 7))
 16357  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16358  			v0.AuxInt = int64ToAuxInt(3)
 16359  			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
 16360  			v1.AddArg(x)
 16361  			v0.AddArg2(v1, x)
 16362  			v.AddArg(v0)
 16363  			return true
 16364  		}
 16365  		break
 16366  	}
 16367  	// match: (MULW x (MOVDconst [c]))
 16368  	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
 16369  	// result: (SLLconst [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))
 16370  	for {
 16371  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16372  			x := v_0
 16373  			if v_1.Op != OpARM64MOVDconst {
 16374  				continue
 16375  			}
 16376  			c := auxIntToInt64(v_1.AuxInt)
 16377  			if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
 16378  				continue
 16379  			}
 16380  			v.reset(OpARM64SLLconst)
 16381  			v.AuxInt = int64ToAuxInt(log64(c / 9))
 16382  			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
 16383  			v0.AuxInt = int64ToAuxInt(3)
 16384  			v0.AddArg2(x, x)
 16385  			v.AddArg(v0)
 16386  			return true
 16387  		}
 16388  		break
 16389  	}
 16390  	// match: (MULW (MOVDconst [c]) (MOVDconst [d]))
 16391  	// result: (MOVDconst [int64(int32(c)*int32(d))])
 16392  	for {
 16393  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16394  			if v_0.Op != OpARM64MOVDconst {
 16395  				continue
 16396  			}
 16397  			c := auxIntToInt64(v_0.AuxInt)
 16398  			if v_1.Op != OpARM64MOVDconst {
 16399  				continue
 16400  			}
 16401  			d := auxIntToInt64(v_1.AuxInt)
 16402  			v.reset(OpARM64MOVDconst)
 16403  			v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
 16404  			return true
 16405  		}
 16406  		break
 16407  	}
 16408  	return false
 16409  }
 16410  func rewriteValueARM64_OpARM64MVN(v *Value) bool {
 16411  	v_0 := v.Args[0]
 16412  	// match: (MVN (XOR x y))
 16413  	// result: (EON x y)
 16414  	for {
 16415  		if v_0.Op != OpARM64XOR {
 16416  			break
 16417  		}
 16418  		y := v_0.Args[1]
 16419  		x := v_0.Args[0]
 16420  		v.reset(OpARM64EON)
 16421  		v.AddArg2(x, y)
 16422  		return true
 16423  	}
 16424  	// match: (MVN (MOVDconst [c]))
 16425  	// result: (MOVDconst [^c])
 16426  	for {
 16427  		if v_0.Op != OpARM64MOVDconst {
 16428  			break
 16429  		}
 16430  		c := auxIntToInt64(v_0.AuxInt)
 16431  		v.reset(OpARM64MOVDconst)
 16432  		v.AuxInt = int64ToAuxInt(^c)
 16433  		return true
 16434  	}
 16435  	// match: (MVN x:(SLLconst [c] y))
 16436  	// cond: clobberIfDead(x)
 16437  	// result: (MVNshiftLL [c] y)
 16438  	for {
 16439  		x := v_0
 16440  		if x.Op != OpARM64SLLconst {
 16441  			break
 16442  		}
 16443  		c := auxIntToInt64(x.AuxInt)
 16444  		y := x.Args[0]
 16445  		if !(clobberIfDead(x)) {
 16446  			break
 16447  		}
 16448  		v.reset(OpARM64MVNshiftLL)
 16449  		v.AuxInt = int64ToAuxInt(c)
 16450  		v.AddArg(y)
 16451  		return true
 16452  	}
 16453  	// match: (MVN x:(SRLconst [c] y))
 16454  	// cond: clobberIfDead(x)
 16455  	// result: (MVNshiftRL [c] y)
 16456  	for {
 16457  		x := v_0
 16458  		if x.Op != OpARM64SRLconst {
 16459  			break
 16460  		}
 16461  		c := auxIntToInt64(x.AuxInt)
 16462  		y := x.Args[0]
 16463  		if !(clobberIfDead(x)) {
 16464  			break
 16465  		}
 16466  		v.reset(OpARM64MVNshiftRL)
 16467  		v.AuxInt = int64ToAuxInt(c)
 16468  		v.AddArg(y)
 16469  		return true
 16470  	}
 16471  	// match: (MVN x:(SRAconst [c] y))
 16472  	// cond: clobberIfDead(x)
 16473  	// result: (MVNshiftRA [c] y)
 16474  	for {
 16475  		x := v_0
 16476  		if x.Op != OpARM64SRAconst {
 16477  			break
 16478  		}
 16479  		c := auxIntToInt64(x.AuxInt)
 16480  		y := x.Args[0]
 16481  		if !(clobberIfDead(x)) {
 16482  			break
 16483  		}
 16484  		v.reset(OpARM64MVNshiftRA)
 16485  		v.AuxInt = int64ToAuxInt(c)
 16486  		v.AddArg(y)
 16487  		return true
 16488  	}
 16489  	// match: (MVN x:(RORconst [c] y))
 16490  	// cond: clobberIfDead(x)
 16491  	// result: (MVNshiftRO [c] y)
 16492  	for {
 16493  		x := v_0
 16494  		if x.Op != OpARM64RORconst {
 16495  			break
 16496  		}
 16497  		c := auxIntToInt64(x.AuxInt)
 16498  		y := x.Args[0]
 16499  		if !(clobberIfDead(x)) {
 16500  			break
 16501  		}
 16502  		v.reset(OpARM64MVNshiftRO)
 16503  		v.AuxInt = int64ToAuxInt(c)
 16504  		v.AddArg(y)
 16505  		return true
 16506  	}
 16507  	return false
 16508  }
 16509  func rewriteValueARM64_OpARM64MVNshiftLL(v *Value) bool {
 16510  	v_0 := v.Args[0]
 16511  	// match: (MVNshiftLL (MOVDconst [c]) [d])
 16512  	// result: (MOVDconst [^int64(uint64(c)<<uint64(d))])
 16513  	for {
 16514  		d := auxIntToInt64(v.AuxInt)
 16515  		if v_0.Op != OpARM64MOVDconst {
 16516  			break
 16517  		}
 16518  		c := auxIntToInt64(v_0.AuxInt)
 16519  		v.reset(OpARM64MOVDconst)
 16520  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
 16521  		return true
 16522  	}
 16523  	return false
 16524  }
 16525  func rewriteValueARM64_OpARM64MVNshiftRA(v *Value) bool {
 16526  	v_0 := v.Args[0]
 16527  	// match: (MVNshiftRA (MOVDconst [c]) [d])
 16528  	// result: (MOVDconst [^(c>>uint64(d))])
 16529  	for {
 16530  		d := auxIntToInt64(v.AuxInt)
 16531  		if v_0.Op != OpARM64MOVDconst {
 16532  			break
 16533  		}
 16534  		c := auxIntToInt64(v_0.AuxInt)
 16535  		v.reset(OpARM64MOVDconst)
 16536  		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
 16537  		return true
 16538  	}
 16539  	return false
 16540  }
 16541  func rewriteValueARM64_OpARM64MVNshiftRL(v *Value) bool {
 16542  	v_0 := v.Args[0]
 16543  	// match: (MVNshiftRL (MOVDconst [c]) [d])
 16544  	// result: (MOVDconst [^int64(uint64(c)>>uint64(d))])
 16545  	for {
 16546  		d := auxIntToInt64(v.AuxInt)
 16547  		if v_0.Op != OpARM64MOVDconst {
 16548  			break
 16549  		}
 16550  		c := auxIntToInt64(v_0.AuxInt)
 16551  		v.reset(OpARM64MOVDconst)
 16552  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
 16553  		return true
 16554  	}
 16555  	return false
 16556  }
 16557  func rewriteValueARM64_OpARM64MVNshiftRO(v *Value) bool {
 16558  	v_0 := v.Args[0]
 16559  	// match: (MVNshiftRO (MOVDconst [c]) [d])
 16560  	// result: (MOVDconst [^rotateRight64(c, d)])
 16561  	for {
 16562  		d := auxIntToInt64(v.AuxInt)
 16563  		if v_0.Op != OpARM64MOVDconst {
 16564  			break
 16565  		}
 16566  		c := auxIntToInt64(v_0.AuxInt)
 16567  		v.reset(OpARM64MOVDconst)
 16568  		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
 16569  		return true
 16570  	}
 16571  	return false
 16572  }
 16573  func rewriteValueARM64_OpARM64NEG(v *Value) bool {
 16574  	v_0 := v.Args[0]
 16575  	// match: (NEG (MUL x y))
 16576  	// result: (MNEG x y)
 16577  	for {
 16578  		if v_0.Op != OpARM64MUL {
 16579  			break
 16580  		}
 16581  		y := v_0.Args[1]
 16582  		x := v_0.Args[0]
 16583  		v.reset(OpARM64MNEG)
 16584  		v.AddArg2(x, y)
 16585  		return true
 16586  	}
 16587  	// match: (NEG (MULW x y))
 16588  	// result: (MNEGW x y)
 16589  	for {
 16590  		if v_0.Op != OpARM64MULW {
 16591  			break
 16592  		}
 16593  		y := v_0.Args[1]
 16594  		x := v_0.Args[0]
 16595  		v.reset(OpARM64MNEGW)
 16596  		v.AddArg2(x, y)
 16597  		return true
 16598  	}
 16599  	// match: (NEG (NEG x))
 16600  	// result: x
 16601  	for {
 16602  		if v_0.Op != OpARM64NEG {
 16603  			break
 16604  		}
 16605  		x := v_0.Args[0]
 16606  		v.copyOf(x)
 16607  		return true
 16608  	}
 16609  	// match: (NEG (MOVDconst [c]))
 16610  	// result: (MOVDconst [-c])
 16611  	for {
 16612  		if v_0.Op != OpARM64MOVDconst {
 16613  			break
 16614  		}
 16615  		c := auxIntToInt64(v_0.AuxInt)
 16616  		v.reset(OpARM64MOVDconst)
 16617  		v.AuxInt = int64ToAuxInt(-c)
 16618  		return true
 16619  	}
 16620  	// match: (NEG x:(SLLconst [c] y))
 16621  	// cond: clobberIfDead(x)
 16622  	// result: (NEGshiftLL [c] y)
 16623  	for {
 16624  		x := v_0
 16625  		if x.Op != OpARM64SLLconst {
 16626  			break
 16627  		}
 16628  		c := auxIntToInt64(x.AuxInt)
 16629  		y := x.Args[0]
 16630  		if !(clobberIfDead(x)) {
 16631  			break
 16632  		}
 16633  		v.reset(OpARM64NEGshiftLL)
 16634  		v.AuxInt = int64ToAuxInt(c)
 16635  		v.AddArg(y)
 16636  		return true
 16637  	}
 16638  	// match: (NEG x:(SRLconst [c] y))
 16639  	// cond: clobberIfDead(x)
 16640  	// result: (NEGshiftRL [c] y)
 16641  	for {
 16642  		x := v_0
 16643  		if x.Op != OpARM64SRLconst {
 16644  			break
 16645  		}
 16646  		c := auxIntToInt64(x.AuxInt)
 16647  		y := x.Args[0]
 16648  		if !(clobberIfDead(x)) {
 16649  			break
 16650  		}
 16651  		v.reset(OpARM64NEGshiftRL)
 16652  		v.AuxInt = int64ToAuxInt(c)
 16653  		v.AddArg(y)
 16654  		return true
 16655  	}
 16656  	// match: (NEG x:(SRAconst [c] y))
 16657  	// cond: clobberIfDead(x)
 16658  	// result: (NEGshiftRA [c] y)
 16659  	for {
 16660  		x := v_0
 16661  		if x.Op != OpARM64SRAconst {
 16662  			break
 16663  		}
 16664  		c := auxIntToInt64(x.AuxInt)
 16665  		y := x.Args[0]
 16666  		if !(clobberIfDead(x)) {
 16667  			break
 16668  		}
 16669  		v.reset(OpARM64NEGshiftRA)
 16670  		v.AuxInt = int64ToAuxInt(c)
 16671  		v.AddArg(y)
 16672  		return true
 16673  	}
 16674  	return false
 16675  }
 16676  func rewriteValueARM64_OpARM64NEGshiftLL(v *Value) bool {
 16677  	v_0 := v.Args[0]
 16678  	// match: (NEGshiftLL (MOVDconst [c]) [d])
 16679  	// result: (MOVDconst [-int64(uint64(c)<<uint64(d))])
 16680  	for {
 16681  		d := auxIntToInt64(v.AuxInt)
 16682  		if v_0.Op != OpARM64MOVDconst {
 16683  			break
 16684  		}
 16685  		c := auxIntToInt64(v_0.AuxInt)
 16686  		v.reset(OpARM64MOVDconst)
 16687  		v.AuxInt = int64ToAuxInt(-int64(uint64(c) << uint64(d)))
 16688  		return true
 16689  	}
 16690  	return false
 16691  }
 16692  func rewriteValueARM64_OpARM64NEGshiftRA(v *Value) bool {
 16693  	v_0 := v.Args[0]
 16694  	// match: (NEGshiftRA (MOVDconst [c]) [d])
 16695  	// result: (MOVDconst [-(c>>uint64(d))])
 16696  	for {
 16697  		d := auxIntToInt64(v.AuxInt)
 16698  		if v_0.Op != OpARM64MOVDconst {
 16699  			break
 16700  		}
 16701  		c := auxIntToInt64(v_0.AuxInt)
 16702  		v.reset(OpARM64MOVDconst)
 16703  		v.AuxInt = int64ToAuxInt(-(c >> uint64(d)))
 16704  		return true
 16705  	}
 16706  	return false
 16707  }
 16708  func rewriteValueARM64_OpARM64NEGshiftRL(v *Value) bool {
 16709  	v_0 := v.Args[0]
 16710  	// match: (NEGshiftRL (MOVDconst [c]) [d])
 16711  	// result: (MOVDconst [-int64(uint64(c)>>uint64(d))])
 16712  	for {
 16713  		d := auxIntToInt64(v.AuxInt)
 16714  		if v_0.Op != OpARM64MOVDconst {
 16715  			break
 16716  		}
 16717  		c := auxIntToInt64(v_0.AuxInt)
 16718  		v.reset(OpARM64MOVDconst)
 16719  		v.AuxInt = int64ToAuxInt(-int64(uint64(c) >> uint64(d)))
 16720  		return true
 16721  	}
 16722  	return false
 16723  }
 16724  func rewriteValueARM64_OpARM64NotEqual(v *Value) bool {
 16725  	v_0 := v.Args[0]
 16726  	b := v.Block
 16727  	// match: (NotEqual (CMPconst [0] z:(AND x y)))
 16728  	// cond: z.Uses == 1
 16729  	// result: (NotEqual (TST x y))
 16730  	for {
 16731  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16732  			break
 16733  		}
 16734  		z := v_0.Args[0]
 16735  		if z.Op != OpARM64AND {
 16736  			break
 16737  		}
 16738  		y := z.Args[1]
 16739  		x := z.Args[0]
 16740  		if !(z.Uses == 1) {
 16741  			break
 16742  		}
 16743  		v.reset(OpARM64NotEqual)
 16744  		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
 16745  		v0.AddArg2(x, y)
 16746  		v.AddArg(v0)
 16747  		return true
 16748  	}
 16749  	// match: (NotEqual (CMPWconst [0] x:(ANDconst [c] y)))
 16750  	// cond: x.Uses == 1
 16751  	// result: (NotEqual (TSTWconst [int32(c)] y))
 16752  	for {
 16753  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 16754  			break
 16755  		}
 16756  		x := v_0.Args[0]
 16757  		if x.Op != OpARM64ANDconst {
 16758  			break
 16759  		}
 16760  		c := auxIntToInt64(x.AuxInt)
 16761  		y := x.Args[0]
 16762  		if !(x.Uses == 1) {
 16763  			break
 16764  		}
 16765  		v.reset(OpARM64NotEqual)
 16766  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
 16767  		v0.AuxInt = int32ToAuxInt(int32(c))
 16768  		v0.AddArg(y)
 16769  		v.AddArg(v0)
 16770  		return true
 16771  	}
 16772  	// match: (NotEqual (CMPWconst [0] z:(AND x y)))
 16773  	// cond: z.Uses == 1
 16774  	// result: (NotEqual (TSTW x y))
 16775  	for {
 16776  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 16777  			break
 16778  		}
 16779  		z := v_0.Args[0]
 16780  		if z.Op != OpARM64AND {
 16781  			break
 16782  		}
 16783  		y := z.Args[1]
 16784  		x := z.Args[0]
 16785  		if !(z.Uses == 1) {
 16786  			break
 16787  		}
 16788  		v.reset(OpARM64NotEqual)
 16789  		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
 16790  		v0.AddArg2(x, y)
 16791  		v.AddArg(v0)
 16792  		return true
 16793  	}
 16794  	// match: (NotEqual (CMPconst [0] x:(ANDconst [c] y)))
 16795  	// cond: x.Uses == 1
 16796  	// result: (NotEqual (TSTconst [c] y))
 16797  	for {
 16798  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16799  			break
 16800  		}
 16801  		x := v_0.Args[0]
 16802  		if x.Op != OpARM64ANDconst {
 16803  			break
 16804  		}
 16805  		c := auxIntToInt64(x.AuxInt)
 16806  		y := x.Args[0]
 16807  		if !(x.Uses == 1) {
 16808  			break
 16809  		}
 16810  		v.reset(OpARM64NotEqual)
 16811  		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
 16812  		v0.AuxInt = int64ToAuxInt(c)
 16813  		v0.AddArg(y)
 16814  		v.AddArg(v0)
 16815  		return true
 16816  	}
 16817  	// match: (NotEqual (CMP x z:(NEG y)))
 16818  	// cond: z.Uses == 1
 16819  	// result: (NotEqual (CMN x y))
 16820  	for {
 16821  		if v_0.Op != OpARM64CMP {
 16822  			break
 16823  		}
 16824  		_ = v_0.Args[1]
 16825  		x := v_0.Args[0]
 16826  		z := v_0.Args[1]
 16827  		if z.Op != OpARM64NEG {
 16828  			break
 16829  		}
 16830  		y := z.Args[0]
 16831  		if !(z.Uses == 1) {
 16832  			break
 16833  		}
 16834  		v.reset(OpARM64NotEqual)
 16835  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
 16836  		v0.AddArg2(x, y)
 16837  		v.AddArg(v0)
 16838  		return true
 16839  	}
 16840  	// match: (NotEqual (CMPW x z:(NEG y)))
 16841  	// cond: z.Uses == 1
 16842  	// result: (NotEqual (CMNW x y))
 16843  	for {
 16844  		if v_0.Op != OpARM64CMPW {
 16845  			break
 16846  		}
 16847  		_ = v_0.Args[1]
 16848  		x := v_0.Args[0]
 16849  		z := v_0.Args[1]
 16850  		if z.Op != OpARM64NEG {
 16851  			break
 16852  		}
 16853  		y := z.Args[0]
 16854  		if !(z.Uses == 1) {
 16855  			break
 16856  		}
 16857  		v.reset(OpARM64NotEqual)
 16858  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
 16859  		v0.AddArg2(x, y)
 16860  		v.AddArg(v0)
 16861  		return true
 16862  	}
 16863  	// match: (NotEqual (CMPconst [0] x:(ADDconst [c] y)))
 16864  	// cond: x.Uses == 1
 16865  	// result: (NotEqual (CMNconst [c] y))
 16866  	for {
 16867  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16868  			break
 16869  		}
 16870  		x := v_0.Args[0]
 16871  		if x.Op != OpARM64ADDconst {
 16872  			break
 16873  		}
 16874  		c := auxIntToInt64(x.AuxInt)
 16875  		y := x.Args[0]
 16876  		if !(x.Uses == 1) {
 16877  			break
 16878  		}
 16879  		v.reset(OpARM64NotEqual)
 16880  		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
 16881  		v0.AuxInt = int64ToAuxInt(c)
 16882  		v0.AddArg(y)
 16883  		v.AddArg(v0)
 16884  		return true
 16885  	}
 16886  	// match: (NotEqual (CMPWconst [0] x:(ADDconst [c] y)))
 16887  	// cond: x.Uses == 1
 16888  	// result: (NotEqual (CMNWconst [int32(c)] y))
 16889  	for {
 16890  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 16891  			break
 16892  		}
 16893  		x := v_0.Args[0]
 16894  		if x.Op != OpARM64ADDconst {
 16895  			break
 16896  		}
 16897  		c := auxIntToInt64(x.AuxInt)
 16898  		y := x.Args[0]
 16899  		if !(x.Uses == 1) {
 16900  			break
 16901  		}
 16902  		v.reset(OpARM64NotEqual)
 16903  		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
 16904  		v0.AuxInt = int32ToAuxInt(int32(c))
 16905  		v0.AddArg(y)
 16906  		v.AddArg(v0)
 16907  		return true
 16908  	}
 16909  	// match: (NotEqual (CMPconst [0] z:(ADD x y)))
 16910  	// cond: z.Uses == 1
 16911  	// result: (NotEqual (CMN x y))
 16912  	for {
 16913  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16914  			break
 16915  		}
 16916  		z := v_0.Args[0]
 16917  		if z.Op != OpARM64ADD {
 16918  			break
 16919  		}
 16920  		y := z.Args[1]
 16921  		x := z.Args[0]
 16922  		if !(z.Uses == 1) {
 16923  			break
 16924  		}
 16925  		v.reset(OpARM64NotEqual)
 16926  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
 16927  		v0.AddArg2(x, y)
 16928  		v.AddArg(v0)
 16929  		return true
 16930  	}
 16931  	// match: (NotEqual (CMPWconst [0] z:(ADD x y)))
 16932  	// cond: z.Uses == 1
 16933  	// result: (NotEqual (CMNW x y))
 16934  	for {
 16935  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 16936  			break
 16937  		}
 16938  		z := v_0.Args[0]
 16939  		if z.Op != OpARM64ADD {
 16940  			break
 16941  		}
 16942  		y := z.Args[1]
 16943  		x := z.Args[0]
 16944  		if !(z.Uses == 1) {
 16945  			break
 16946  		}
 16947  		v.reset(OpARM64NotEqual)
 16948  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
 16949  		v0.AddArg2(x, y)
 16950  		v.AddArg(v0)
 16951  		return true
 16952  	}
 16953  	// match: (NotEqual (CMPconst [0] z:(MADD a x y)))
 16954  	// cond: z.Uses==1
 16955  	// result: (NotEqual (CMN a (MUL <x.Type> x y)))
 16956  	for {
 16957  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16958  			break
 16959  		}
 16960  		z := v_0.Args[0]
 16961  		if z.Op != OpARM64MADD {
 16962  			break
 16963  		}
 16964  		y := z.Args[2]
 16965  		a := z.Args[0]
 16966  		x := z.Args[1]
 16967  		if !(z.Uses == 1) {
 16968  			break
 16969  		}
 16970  		v.reset(OpARM64NotEqual)
 16971  		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
 16972  		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
 16973  		v1.AddArg2(x, y)
 16974  		v0.AddArg2(a, v1)
 16975  		v.AddArg(v0)
 16976  		return true
 16977  	}
 16978  	// match: (NotEqual (CMPconst [0] z:(MSUB a x y)))
 16979  	// cond: z.Uses==1
 16980  	// result: (NotEqual (CMP a (MUL <x.Type> x y)))
 16981  	for {
 16982  		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
 16983  			break
 16984  		}
 16985  		z := v_0.Args[0]
 16986  		if z.Op != OpARM64MSUB {
 16987  			break
 16988  		}
 16989  		y := z.Args[2]
 16990  		a := z.Args[0]
 16991  		x := z.Args[1]
 16992  		if !(z.Uses == 1) {
 16993  			break
 16994  		}
 16995  		v.reset(OpARM64NotEqual)
 16996  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 16997  		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
 16998  		v1.AddArg2(x, y)
 16999  		v0.AddArg2(a, v1)
 17000  		v.AddArg(v0)
 17001  		return true
 17002  	}
 17003  	// match: (NotEqual (CMPWconst [0] z:(MADDW a x y)))
 17004  	// cond: z.Uses==1
 17005  	// result: (NotEqual (CMNW a (MULW <x.Type> x y)))
 17006  	for {
 17007  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 17008  			break
 17009  		}
 17010  		z := v_0.Args[0]
 17011  		if z.Op != OpARM64MADDW {
 17012  			break
 17013  		}
 17014  		y := z.Args[2]
 17015  		a := z.Args[0]
 17016  		x := z.Args[1]
 17017  		if !(z.Uses == 1) {
 17018  			break
 17019  		}
 17020  		v.reset(OpARM64NotEqual)
 17021  		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
 17022  		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
 17023  		v1.AddArg2(x, y)
 17024  		v0.AddArg2(a, v1)
 17025  		v.AddArg(v0)
 17026  		return true
 17027  	}
 17028  	// match: (NotEqual (CMPWconst [0] z:(MSUBW a x y)))
 17029  	// cond: z.Uses==1
 17030  	// result: (NotEqual (CMPW a (MULW <x.Type> x y)))
 17031  	for {
 17032  		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
 17033  			break
 17034  		}
 17035  		z := v_0.Args[0]
 17036  		if z.Op != OpARM64MSUBW {
 17037  			break
 17038  		}
 17039  		y := z.Args[2]
 17040  		a := z.Args[0]
 17041  		x := z.Args[1]
 17042  		if !(z.Uses == 1) {
 17043  			break
 17044  		}
 17045  		v.reset(OpARM64NotEqual)
 17046  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 17047  		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
 17048  		v1.AddArg2(x, y)
 17049  		v0.AddArg2(a, v1)
 17050  		v.AddArg(v0)
 17051  		return true
 17052  	}
 17053  	// match: (NotEqual (FlagConstant [fc]))
 17054  	// result: (MOVDconst [b2i(fc.ne())])
 17055  	for {
 17056  		if v_0.Op != OpARM64FlagConstant {
 17057  			break
 17058  		}
 17059  		fc := auxIntToFlagConstant(v_0.AuxInt)
 17060  		v.reset(OpARM64MOVDconst)
 17061  		v.AuxInt = int64ToAuxInt(b2i(fc.ne()))
 17062  		return true
 17063  	}
 17064  	// match: (NotEqual (InvertFlags x))
 17065  	// result: (NotEqual x)
 17066  	for {
 17067  		if v_0.Op != OpARM64InvertFlags {
 17068  			break
 17069  		}
 17070  		x := v_0.Args[0]
 17071  		v.reset(OpARM64NotEqual)
 17072  		v.AddArg(x)
 17073  		return true
 17074  	}
 17075  	return false
 17076  }
 17077  func rewriteValueARM64_OpARM64OR(v *Value) bool {
 17078  	v_1 := v.Args[1]
 17079  	v_0 := v.Args[0]
 17080  	b := v.Block
 17081  	// match: (OR x (MOVDconst [c]))
 17082  	// result: (ORconst [c] x)
 17083  	for {
 17084  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17085  			x := v_0
 17086  			if v_1.Op != OpARM64MOVDconst {
 17087  				continue
 17088  			}
 17089  			c := auxIntToInt64(v_1.AuxInt)
 17090  			v.reset(OpARM64ORconst)
 17091  			v.AuxInt = int64ToAuxInt(c)
 17092  			v.AddArg(x)
 17093  			return true
 17094  		}
 17095  		break
 17096  	}
 17097  	// match: (OR x x)
 17098  	// result: x
 17099  	for {
 17100  		x := v_0
 17101  		if x != v_1 {
 17102  			break
 17103  		}
 17104  		v.copyOf(x)
 17105  		return true
 17106  	}
 17107  	// match: (OR x (MVN y))
 17108  	// result: (ORN x y)
 17109  	for {
 17110  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17111  			x := v_0
 17112  			if v_1.Op != OpARM64MVN {
 17113  				continue
 17114  			}
 17115  			y := v_1.Args[0]
 17116  			v.reset(OpARM64ORN)
 17117  			v.AddArg2(x, y)
 17118  			return true
 17119  		}
 17120  		break
 17121  	}
 17122  	// match: (OR x0 x1:(SLLconst [c] y))
 17123  	// cond: clobberIfDead(x1)
 17124  	// result: (ORshiftLL x0 y [c])
 17125  	for {
 17126  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17127  			x0 := v_0
 17128  			x1 := v_1
 17129  			if x1.Op != OpARM64SLLconst {
 17130  				continue
 17131  			}
 17132  			c := auxIntToInt64(x1.AuxInt)
 17133  			y := x1.Args[0]
 17134  			if !(clobberIfDead(x1)) {
 17135  				continue
 17136  			}
 17137  			v.reset(OpARM64ORshiftLL)
 17138  			v.AuxInt = int64ToAuxInt(c)
 17139  			v.AddArg2(x0, y)
 17140  			return true
 17141  		}
 17142  		break
 17143  	}
 17144  	// match: (OR x0 x1:(SRLconst [c] y))
 17145  	// cond: clobberIfDead(x1)
 17146  	// result: (ORshiftRL x0 y [c])
 17147  	for {
 17148  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17149  			x0 := v_0
 17150  			x1 := v_1
 17151  			if x1.Op != OpARM64SRLconst {
 17152  				continue
 17153  			}
 17154  			c := auxIntToInt64(x1.AuxInt)
 17155  			y := x1.Args[0]
 17156  			if !(clobberIfDead(x1)) {
 17157  				continue
 17158  			}
 17159  			v.reset(OpARM64ORshiftRL)
 17160  			v.AuxInt = int64ToAuxInt(c)
 17161  			v.AddArg2(x0, y)
 17162  			return true
 17163  		}
 17164  		break
 17165  	}
 17166  	// match: (OR x0 x1:(SRAconst [c] y))
 17167  	// cond: clobberIfDead(x1)
 17168  	// result: (ORshiftRA x0 y [c])
 17169  	for {
 17170  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17171  			x0 := v_0
 17172  			x1 := v_1
 17173  			if x1.Op != OpARM64SRAconst {
 17174  				continue
 17175  			}
 17176  			c := auxIntToInt64(x1.AuxInt)
 17177  			y := x1.Args[0]
 17178  			if !(clobberIfDead(x1)) {
 17179  				continue
 17180  			}
 17181  			v.reset(OpARM64ORshiftRA)
 17182  			v.AuxInt = int64ToAuxInt(c)
 17183  			v.AddArg2(x0, y)
 17184  			return true
 17185  		}
 17186  		break
 17187  	}
 17188  	// match: (OR x0 x1:(RORconst [c] y))
 17189  	// cond: clobberIfDead(x1)
 17190  	// result: (ORshiftRO x0 y [c])
 17191  	for {
 17192  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17193  			x0 := v_0
 17194  			x1 := v_1
 17195  			if x1.Op != OpARM64RORconst {
 17196  				continue
 17197  			}
 17198  			c := auxIntToInt64(x1.AuxInt)
 17199  			y := x1.Args[0]
 17200  			if !(clobberIfDead(x1)) {
 17201  				continue
 17202  			}
 17203  			v.reset(OpARM64ORshiftRO)
 17204  			v.AuxInt = int64ToAuxInt(c)
 17205  			v.AddArg2(x0, y)
 17206  			return true
 17207  		}
 17208  		break
 17209  	}
 17210  	// match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y))
 17211  	// cond: ac == ^((1<<uint(bfc.getARM64BFwidth())-1) << uint(bfc.getARM64BFlsb()))
 17212  	// result: (BFI [bfc] y x)
 17213  	for {
 17214  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17215  			if v_0.Op != OpARM64UBFIZ {
 17216  				continue
 17217  			}
 17218  			bfc := auxIntToArm64BitField(v_0.AuxInt)
 17219  			x := v_0.Args[0]
 17220  			if v_1.Op != OpARM64ANDconst {
 17221  				continue
 17222  			}
 17223  			ac := auxIntToInt64(v_1.AuxInt)
 17224  			y := v_1.Args[0]
 17225  			if !(ac == ^((1<<uint(bfc.getARM64BFwidth()) - 1) << uint(bfc.getARM64BFlsb()))) {
 17226  				continue
 17227  			}
 17228  			v.reset(OpARM64BFI)
 17229  			v.AuxInt = arm64BitFieldToAuxInt(bfc)
 17230  			v.AddArg2(y, x)
 17231  			return true
 17232  		}
 17233  		break
 17234  	}
 17235  	// match: (OR (UBFX [bfc] x) (ANDconst [ac] y))
 17236  	// cond: ac == ^(1<<uint(bfc.getARM64BFwidth())-1)
 17237  	// result: (BFXIL [bfc] y x)
 17238  	for {
 17239  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17240  			if v_0.Op != OpARM64UBFX {
 17241  				continue
 17242  			}
 17243  			bfc := auxIntToArm64BitField(v_0.AuxInt)
 17244  			x := v_0.Args[0]
 17245  			if v_1.Op != OpARM64ANDconst {
 17246  				continue
 17247  			}
 17248  			ac := auxIntToInt64(v_1.AuxInt)
 17249  			y := v_1.Args[0]
 17250  			if !(ac == ^(1<<uint(bfc.getARM64BFwidth()) - 1)) {
 17251  				continue
 17252  			}
 17253  			v.reset(OpARM64BFXIL)
 17254  			v.AuxInt = arm64BitFieldToAuxInt(bfc)
 17255  			v.AddArg2(y, x)
 17256  			return true
 17257  		}
 17258  		break
 17259  	}
 17260  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)))
 17261  	// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 17262  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem)
 17263  	for {
 17264  		t := v.Type
 17265  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17266  			o0 := v_0
 17267  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17268  				continue
 17269  			}
 17270  			_ = o0.Args[1]
 17271  			o1 := o0.Args[0]
 17272  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17273  				continue
 17274  			}
 17275  			_ = o1.Args[1]
 17276  			s0 := o1.Args[0]
 17277  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 17278  				continue
 17279  			}
 17280  			y0 := s0.Args[0]
 17281  			if y0.Op != OpARM64MOVDnop {
 17282  				continue
 17283  			}
 17284  			x0 := y0.Args[0]
 17285  			if x0.Op != OpARM64MOVBUload {
 17286  				continue
 17287  			}
 17288  			i3 := auxIntToInt32(x0.AuxInt)
 17289  			s := auxToSym(x0.Aux)
 17290  			mem := x0.Args[1]
 17291  			p := x0.Args[0]
 17292  			y1 := o1.Args[1]
 17293  			if y1.Op != OpARM64MOVDnop {
 17294  				continue
 17295  			}
 17296  			x1 := y1.Args[0]
 17297  			if x1.Op != OpARM64MOVBUload {
 17298  				continue
 17299  			}
 17300  			i2 := auxIntToInt32(x1.AuxInt)
 17301  			if auxToSym(x1.Aux) != s {
 17302  				continue
 17303  			}
 17304  			_ = x1.Args[1]
 17305  			if p != x1.Args[0] || mem != x1.Args[1] {
 17306  				continue
 17307  			}
 17308  			y2 := o0.Args[1]
 17309  			if y2.Op != OpARM64MOVDnop {
 17310  				continue
 17311  			}
 17312  			x2 := y2.Args[0]
 17313  			if x2.Op != OpARM64MOVBUload {
 17314  				continue
 17315  			}
 17316  			i1 := auxIntToInt32(x2.AuxInt)
 17317  			if auxToSym(x2.Aux) != s {
 17318  				continue
 17319  			}
 17320  			_ = x2.Args[1]
 17321  			if p != x2.Args[0] || mem != x2.Args[1] {
 17322  				continue
 17323  			}
 17324  			y3 := v_1
 17325  			if y3.Op != OpARM64MOVDnop {
 17326  				continue
 17327  			}
 17328  			x3 := y3.Args[0]
 17329  			if x3.Op != OpARM64MOVBUload {
 17330  				continue
 17331  			}
 17332  			i0 := auxIntToInt32(x3.AuxInt)
 17333  			if auxToSym(x3.Aux) != s {
 17334  				continue
 17335  			}
 17336  			_ = x3.Args[1]
 17337  			if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 17338  				continue
 17339  			}
 17340  			b = mergePoint(b, x0, x1, x2, x3)
 17341  			v0 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
 17342  			v.copyOf(v0)
 17343  			v0.Aux = symToAux(s)
 17344  			v1 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
 17345  			v1.AuxInt = int64ToAuxInt(int64(i0))
 17346  			v1.AddArg(p)
 17347  			v0.AddArg2(v1, mem)
 17348  			return true
 17349  		}
 17350  		break
 17351  	}
 17352  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem)))
 17353  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 17354  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem)
 17355  	for {
 17356  		t := v.Type
 17357  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17358  			o0 := v_0
 17359  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17360  				continue
 17361  			}
 17362  			_ = o0.Args[1]
 17363  			o1 := o0.Args[0]
 17364  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17365  				continue
 17366  			}
 17367  			_ = o1.Args[1]
 17368  			s0 := o1.Args[0]
 17369  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 17370  				continue
 17371  			}
 17372  			y0 := s0.Args[0]
 17373  			if y0.Op != OpARM64MOVDnop {
 17374  				continue
 17375  			}
 17376  			x0 := y0.Args[0]
 17377  			if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 3 {
 17378  				continue
 17379  			}
 17380  			s := auxToSym(x0.Aux)
 17381  			mem := x0.Args[1]
 17382  			p := x0.Args[0]
 17383  			y1 := o1.Args[1]
 17384  			if y1.Op != OpARM64MOVDnop {
 17385  				continue
 17386  			}
 17387  			x1 := y1.Args[0]
 17388  			if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 || auxToSym(x1.Aux) != s {
 17389  				continue
 17390  			}
 17391  			_ = x1.Args[1]
 17392  			if p != x1.Args[0] || mem != x1.Args[1] {
 17393  				continue
 17394  			}
 17395  			y2 := o0.Args[1]
 17396  			if y2.Op != OpARM64MOVDnop {
 17397  				continue
 17398  			}
 17399  			x2 := y2.Args[0]
 17400  			if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 1 || auxToSym(x2.Aux) != s {
 17401  				continue
 17402  			}
 17403  			_ = x2.Args[1]
 17404  			p1 := x2.Args[0]
 17405  			if p1.Op != OpARM64ADD {
 17406  				continue
 17407  			}
 17408  			_ = p1.Args[1]
 17409  			p1_0 := p1.Args[0]
 17410  			p1_1 := p1.Args[1]
 17411  			for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
 17412  				ptr1 := p1_0
 17413  				idx1 := p1_1
 17414  				if mem != x2.Args[1] {
 17415  					continue
 17416  				}
 17417  				y3 := v_1
 17418  				if y3.Op != OpARM64MOVDnop {
 17419  					continue
 17420  				}
 17421  				x3 := y3.Args[0]
 17422  				if x3.Op != OpARM64MOVBUloadidx {
 17423  					continue
 17424  				}
 17425  				_ = x3.Args[2]
 17426  				ptr0 := x3.Args[0]
 17427  				idx0 := x3.Args[1]
 17428  				if mem != x3.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 17429  					continue
 17430  				}
 17431  				b = mergePoint(b, x0, x1, x2, x3)
 17432  				v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
 17433  				v.copyOf(v0)
 17434  				v0.AddArg3(ptr0, idx0, mem)
 17435  				return true
 17436  			}
 17437  		}
 17438  		break
 17439  	}
 17440  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem)))
 17441  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 17442  	// result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem)
 17443  	for {
 17444  		t := v.Type
 17445  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17446  			o0 := v_0
 17447  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17448  				continue
 17449  			}
 17450  			_ = o0.Args[1]
 17451  			o1 := o0.Args[0]
 17452  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17453  				continue
 17454  			}
 17455  			_ = o1.Args[1]
 17456  			s0 := o1.Args[0]
 17457  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 17458  				continue
 17459  			}
 17460  			y0 := s0.Args[0]
 17461  			if y0.Op != OpARM64MOVDnop {
 17462  				continue
 17463  			}
 17464  			x0 := y0.Args[0]
 17465  			if x0.Op != OpARM64MOVBUloadidx {
 17466  				continue
 17467  			}
 17468  			mem := x0.Args[2]
 17469  			ptr := x0.Args[0]
 17470  			x0_1 := x0.Args[1]
 17471  			if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 3 {
 17472  				continue
 17473  			}
 17474  			idx := x0_1.Args[0]
 17475  			y1 := o1.Args[1]
 17476  			if y1.Op != OpARM64MOVDnop {
 17477  				continue
 17478  			}
 17479  			x1 := y1.Args[0]
 17480  			if x1.Op != OpARM64MOVBUloadidx {
 17481  				continue
 17482  			}
 17483  			_ = x1.Args[2]
 17484  			if ptr != x1.Args[0] {
 17485  				continue
 17486  			}
 17487  			x1_1 := x1.Args[1]
 17488  			if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 17489  				continue
 17490  			}
 17491  			y2 := o0.Args[1]
 17492  			if y2.Op != OpARM64MOVDnop {
 17493  				continue
 17494  			}
 17495  			x2 := y2.Args[0]
 17496  			if x2.Op != OpARM64MOVBUloadidx {
 17497  				continue
 17498  			}
 17499  			_ = x2.Args[2]
 17500  			if ptr != x2.Args[0] {
 17501  				continue
 17502  			}
 17503  			x2_1 := x2.Args[1]
 17504  			if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 1 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 17505  				continue
 17506  			}
 17507  			y3 := v_1
 17508  			if y3.Op != OpARM64MOVDnop {
 17509  				continue
 17510  			}
 17511  			x3 := y3.Args[0]
 17512  			if x3.Op != OpARM64MOVBUloadidx {
 17513  				continue
 17514  			}
 17515  			_ = x3.Args[2]
 17516  			if ptr != x3.Args[0] || idx != x3.Args[1] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 17517  				continue
 17518  			}
 17519  			b = mergePoint(b, x0, x1, x2, x3)
 17520  			v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
 17521  			v.copyOf(v0)
 17522  			v0.AddArg3(ptr, idx, mem)
 17523  			return true
 17524  		}
 17525  		break
 17526  	}
 17527  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)))
 17528  	// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 17529  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem)
 17530  	for {
 17531  		t := v.Type
 17532  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17533  			o0 := v_0
 17534  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17535  				continue
 17536  			}
 17537  			_ = o0.Args[1]
 17538  			o1 := o0.Args[0]
 17539  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17540  				continue
 17541  			}
 17542  			_ = o1.Args[1]
 17543  			o2 := o1.Args[0]
 17544  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 17545  				continue
 17546  			}
 17547  			_ = o2.Args[1]
 17548  			o3 := o2.Args[0]
 17549  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 17550  				continue
 17551  			}
 17552  			_ = o3.Args[1]
 17553  			o4 := o3.Args[0]
 17554  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 17555  				continue
 17556  			}
 17557  			_ = o4.Args[1]
 17558  			o5 := o4.Args[0]
 17559  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 17560  				continue
 17561  			}
 17562  			_ = o5.Args[1]
 17563  			s0 := o5.Args[0]
 17564  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 17565  				continue
 17566  			}
 17567  			y0 := s0.Args[0]
 17568  			if y0.Op != OpARM64MOVDnop {
 17569  				continue
 17570  			}
 17571  			x0 := y0.Args[0]
 17572  			if x0.Op != OpARM64MOVBUload {
 17573  				continue
 17574  			}
 17575  			i7 := auxIntToInt32(x0.AuxInt)
 17576  			s := auxToSym(x0.Aux)
 17577  			mem := x0.Args[1]
 17578  			p := x0.Args[0]
 17579  			y1 := o5.Args[1]
 17580  			if y1.Op != OpARM64MOVDnop {
 17581  				continue
 17582  			}
 17583  			x1 := y1.Args[0]
 17584  			if x1.Op != OpARM64MOVBUload {
 17585  				continue
 17586  			}
 17587  			i6 := auxIntToInt32(x1.AuxInt)
 17588  			if auxToSym(x1.Aux) != s {
 17589  				continue
 17590  			}
 17591  			_ = x1.Args[1]
 17592  			if p != x1.Args[0] || mem != x1.Args[1] {
 17593  				continue
 17594  			}
 17595  			y2 := o4.Args[1]
 17596  			if y2.Op != OpARM64MOVDnop {
 17597  				continue
 17598  			}
 17599  			x2 := y2.Args[0]
 17600  			if x2.Op != OpARM64MOVBUload {
 17601  				continue
 17602  			}
 17603  			i5 := auxIntToInt32(x2.AuxInt)
 17604  			if auxToSym(x2.Aux) != s {
 17605  				continue
 17606  			}
 17607  			_ = x2.Args[1]
 17608  			if p != x2.Args[0] || mem != x2.Args[1] {
 17609  				continue
 17610  			}
 17611  			y3 := o3.Args[1]
 17612  			if y3.Op != OpARM64MOVDnop {
 17613  				continue
 17614  			}
 17615  			x3 := y3.Args[0]
 17616  			if x3.Op != OpARM64MOVBUload {
 17617  				continue
 17618  			}
 17619  			i4 := auxIntToInt32(x3.AuxInt)
 17620  			if auxToSym(x3.Aux) != s {
 17621  				continue
 17622  			}
 17623  			_ = x3.Args[1]
 17624  			if p != x3.Args[0] || mem != x3.Args[1] {
 17625  				continue
 17626  			}
 17627  			y4 := o2.Args[1]
 17628  			if y4.Op != OpARM64MOVDnop {
 17629  				continue
 17630  			}
 17631  			x4 := y4.Args[0]
 17632  			if x4.Op != OpARM64MOVBUload {
 17633  				continue
 17634  			}
 17635  			i3 := auxIntToInt32(x4.AuxInt)
 17636  			if auxToSym(x4.Aux) != s {
 17637  				continue
 17638  			}
 17639  			_ = x4.Args[1]
 17640  			if p != x4.Args[0] || mem != x4.Args[1] {
 17641  				continue
 17642  			}
 17643  			y5 := o1.Args[1]
 17644  			if y5.Op != OpARM64MOVDnop {
 17645  				continue
 17646  			}
 17647  			x5 := y5.Args[0]
 17648  			if x5.Op != OpARM64MOVBUload {
 17649  				continue
 17650  			}
 17651  			i2 := auxIntToInt32(x5.AuxInt)
 17652  			if auxToSym(x5.Aux) != s {
 17653  				continue
 17654  			}
 17655  			_ = x5.Args[1]
 17656  			if p != x5.Args[0] || mem != x5.Args[1] {
 17657  				continue
 17658  			}
 17659  			y6 := o0.Args[1]
 17660  			if y6.Op != OpARM64MOVDnop {
 17661  				continue
 17662  			}
 17663  			x6 := y6.Args[0]
 17664  			if x6.Op != OpARM64MOVBUload {
 17665  				continue
 17666  			}
 17667  			i1 := auxIntToInt32(x6.AuxInt)
 17668  			if auxToSym(x6.Aux) != s {
 17669  				continue
 17670  			}
 17671  			_ = x6.Args[1]
 17672  			if p != x6.Args[0] || mem != x6.Args[1] {
 17673  				continue
 17674  			}
 17675  			y7 := v_1
 17676  			if y7.Op != OpARM64MOVDnop {
 17677  				continue
 17678  			}
 17679  			x7 := y7.Args[0]
 17680  			if x7.Op != OpARM64MOVBUload {
 17681  				continue
 17682  			}
 17683  			i0 := auxIntToInt32(x7.AuxInt)
 17684  			if auxToSym(x7.Aux) != s {
 17685  				continue
 17686  			}
 17687  			_ = x7.Args[1]
 17688  			if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 17689  				continue
 17690  			}
 17691  			b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 17692  			v0 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
 17693  			v.copyOf(v0)
 17694  			v0.Aux = symToAux(s)
 17695  			v1 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
 17696  			v1.AuxInt = int64ToAuxInt(int64(i0))
 17697  			v1.AddArg(p)
 17698  			v0.AddArg2(v1, mem)
 17699  			return true
 17700  		}
 17701  		break
 17702  	}
 17703  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem)))
 17704  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 17705  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem)
 17706  	for {
 17707  		t := v.Type
 17708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17709  			o0 := v_0
 17710  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17711  				continue
 17712  			}
 17713  			_ = o0.Args[1]
 17714  			o1 := o0.Args[0]
 17715  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17716  				continue
 17717  			}
 17718  			_ = o1.Args[1]
 17719  			o2 := o1.Args[0]
 17720  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 17721  				continue
 17722  			}
 17723  			_ = o2.Args[1]
 17724  			o3 := o2.Args[0]
 17725  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 17726  				continue
 17727  			}
 17728  			_ = o3.Args[1]
 17729  			o4 := o3.Args[0]
 17730  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 17731  				continue
 17732  			}
 17733  			_ = o4.Args[1]
 17734  			o5 := o4.Args[0]
 17735  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 17736  				continue
 17737  			}
 17738  			_ = o5.Args[1]
 17739  			s0 := o5.Args[0]
 17740  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 17741  				continue
 17742  			}
 17743  			y0 := s0.Args[0]
 17744  			if y0.Op != OpARM64MOVDnop {
 17745  				continue
 17746  			}
 17747  			x0 := y0.Args[0]
 17748  			if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 7 {
 17749  				continue
 17750  			}
 17751  			s := auxToSym(x0.Aux)
 17752  			mem := x0.Args[1]
 17753  			p := x0.Args[0]
 17754  			y1 := o5.Args[1]
 17755  			if y1.Op != OpARM64MOVDnop {
 17756  				continue
 17757  			}
 17758  			x1 := y1.Args[0]
 17759  			if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 6 || auxToSym(x1.Aux) != s {
 17760  				continue
 17761  			}
 17762  			_ = x1.Args[1]
 17763  			if p != x1.Args[0] || mem != x1.Args[1] {
 17764  				continue
 17765  			}
 17766  			y2 := o4.Args[1]
 17767  			if y2.Op != OpARM64MOVDnop {
 17768  				continue
 17769  			}
 17770  			x2 := y2.Args[0]
 17771  			if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
 17772  				continue
 17773  			}
 17774  			_ = x2.Args[1]
 17775  			if p != x2.Args[0] || mem != x2.Args[1] {
 17776  				continue
 17777  			}
 17778  			y3 := o3.Args[1]
 17779  			if y3.Op != OpARM64MOVDnop {
 17780  				continue
 17781  			}
 17782  			x3 := y3.Args[0]
 17783  			if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 4 || auxToSym(x3.Aux) != s {
 17784  				continue
 17785  			}
 17786  			_ = x3.Args[1]
 17787  			if p != x3.Args[0] || mem != x3.Args[1] {
 17788  				continue
 17789  			}
 17790  			y4 := o2.Args[1]
 17791  			if y4.Op != OpARM64MOVDnop {
 17792  				continue
 17793  			}
 17794  			x4 := y4.Args[0]
 17795  			if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 3 || auxToSym(x4.Aux) != s {
 17796  				continue
 17797  			}
 17798  			_ = x4.Args[1]
 17799  			if p != x4.Args[0] || mem != x4.Args[1] {
 17800  				continue
 17801  			}
 17802  			y5 := o1.Args[1]
 17803  			if y5.Op != OpARM64MOVDnop {
 17804  				continue
 17805  			}
 17806  			x5 := y5.Args[0]
 17807  			if x5.Op != OpARM64MOVBUload || auxIntToInt32(x5.AuxInt) != 2 || auxToSym(x5.Aux) != s {
 17808  				continue
 17809  			}
 17810  			_ = x5.Args[1]
 17811  			if p != x5.Args[0] || mem != x5.Args[1] {
 17812  				continue
 17813  			}
 17814  			y6 := o0.Args[1]
 17815  			if y6.Op != OpARM64MOVDnop {
 17816  				continue
 17817  			}
 17818  			x6 := y6.Args[0]
 17819  			if x6.Op != OpARM64MOVBUload || auxIntToInt32(x6.AuxInt) != 1 || auxToSym(x6.Aux) != s {
 17820  				continue
 17821  			}
 17822  			_ = x6.Args[1]
 17823  			p1 := x6.Args[0]
 17824  			if p1.Op != OpARM64ADD {
 17825  				continue
 17826  			}
 17827  			_ = p1.Args[1]
 17828  			p1_0 := p1.Args[0]
 17829  			p1_1 := p1.Args[1]
 17830  			for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
 17831  				ptr1 := p1_0
 17832  				idx1 := p1_1
 17833  				if mem != x6.Args[1] {
 17834  					continue
 17835  				}
 17836  				y7 := v_1
 17837  				if y7.Op != OpARM64MOVDnop {
 17838  					continue
 17839  				}
 17840  				x7 := y7.Args[0]
 17841  				if x7.Op != OpARM64MOVBUloadidx {
 17842  					continue
 17843  				}
 17844  				_ = x7.Args[2]
 17845  				ptr0 := x7.Args[0]
 17846  				idx0 := x7.Args[1]
 17847  				if mem != x7.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 17848  					continue
 17849  				}
 17850  				b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 17851  				v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t)
 17852  				v.copyOf(v0)
 17853  				v0.AddArg3(ptr0, idx0, mem)
 17854  				return true
 17855  			}
 17856  		}
 17857  		break
 17858  	}
 17859  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem)))
 17860  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 17861  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem)
 17862  	for {
 17863  		t := v.Type
 17864  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17865  			o0 := v_0
 17866  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 17867  				continue
 17868  			}
 17869  			_ = o0.Args[1]
 17870  			o1 := o0.Args[0]
 17871  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 17872  				continue
 17873  			}
 17874  			_ = o1.Args[1]
 17875  			o2 := o1.Args[0]
 17876  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 17877  				continue
 17878  			}
 17879  			_ = o2.Args[1]
 17880  			o3 := o2.Args[0]
 17881  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 17882  				continue
 17883  			}
 17884  			_ = o3.Args[1]
 17885  			o4 := o3.Args[0]
 17886  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 17887  				continue
 17888  			}
 17889  			_ = o4.Args[1]
 17890  			o5 := o4.Args[0]
 17891  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 17892  				continue
 17893  			}
 17894  			_ = o5.Args[1]
 17895  			s0 := o5.Args[0]
 17896  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 17897  				continue
 17898  			}
 17899  			y0 := s0.Args[0]
 17900  			if y0.Op != OpARM64MOVDnop {
 17901  				continue
 17902  			}
 17903  			x0 := y0.Args[0]
 17904  			if x0.Op != OpARM64MOVBUloadidx {
 17905  				continue
 17906  			}
 17907  			mem := x0.Args[2]
 17908  			ptr := x0.Args[0]
 17909  			x0_1 := x0.Args[1]
 17910  			if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 7 {
 17911  				continue
 17912  			}
 17913  			idx := x0_1.Args[0]
 17914  			y1 := o5.Args[1]
 17915  			if y1.Op != OpARM64MOVDnop {
 17916  				continue
 17917  			}
 17918  			x1 := y1.Args[0]
 17919  			if x1.Op != OpARM64MOVBUloadidx {
 17920  				continue
 17921  			}
 17922  			_ = x1.Args[2]
 17923  			if ptr != x1.Args[0] {
 17924  				continue
 17925  			}
 17926  			x1_1 := x1.Args[1]
 17927  			if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 6 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 17928  				continue
 17929  			}
 17930  			y2 := o4.Args[1]
 17931  			if y2.Op != OpARM64MOVDnop {
 17932  				continue
 17933  			}
 17934  			x2 := y2.Args[0]
 17935  			if x2.Op != OpARM64MOVBUloadidx {
 17936  				continue
 17937  			}
 17938  			_ = x2.Args[2]
 17939  			if ptr != x2.Args[0] {
 17940  				continue
 17941  			}
 17942  			x2_1 := x2.Args[1]
 17943  			if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 17944  				continue
 17945  			}
 17946  			y3 := o3.Args[1]
 17947  			if y3.Op != OpARM64MOVDnop {
 17948  				continue
 17949  			}
 17950  			x3 := y3.Args[0]
 17951  			if x3.Op != OpARM64MOVBUloadidx {
 17952  				continue
 17953  			}
 17954  			_ = x3.Args[2]
 17955  			if ptr != x3.Args[0] {
 17956  				continue
 17957  			}
 17958  			x3_1 := x3.Args[1]
 17959  			if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 4 || idx != x3_1.Args[0] || mem != x3.Args[2] {
 17960  				continue
 17961  			}
 17962  			y4 := o2.Args[1]
 17963  			if y4.Op != OpARM64MOVDnop {
 17964  				continue
 17965  			}
 17966  			x4 := y4.Args[0]
 17967  			if x4.Op != OpARM64MOVBUloadidx {
 17968  				continue
 17969  			}
 17970  			_ = x4.Args[2]
 17971  			if ptr != x4.Args[0] {
 17972  				continue
 17973  			}
 17974  			x4_1 := x4.Args[1]
 17975  			if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 3 || idx != x4_1.Args[0] || mem != x4.Args[2] {
 17976  				continue
 17977  			}
 17978  			y5 := o1.Args[1]
 17979  			if y5.Op != OpARM64MOVDnop {
 17980  				continue
 17981  			}
 17982  			x5 := y5.Args[0]
 17983  			if x5.Op != OpARM64MOVBUloadidx {
 17984  				continue
 17985  			}
 17986  			_ = x5.Args[2]
 17987  			if ptr != x5.Args[0] {
 17988  				continue
 17989  			}
 17990  			x5_1 := x5.Args[1]
 17991  			if x5_1.Op != OpARM64ADDconst || auxIntToInt64(x5_1.AuxInt) != 2 || idx != x5_1.Args[0] || mem != x5.Args[2] {
 17992  				continue
 17993  			}
 17994  			y6 := o0.Args[1]
 17995  			if y6.Op != OpARM64MOVDnop {
 17996  				continue
 17997  			}
 17998  			x6 := y6.Args[0]
 17999  			if x6.Op != OpARM64MOVBUloadidx {
 18000  				continue
 18001  			}
 18002  			_ = x6.Args[2]
 18003  			if ptr != x6.Args[0] {
 18004  				continue
 18005  			}
 18006  			x6_1 := x6.Args[1]
 18007  			if x6_1.Op != OpARM64ADDconst || auxIntToInt64(x6_1.AuxInt) != 1 || idx != x6_1.Args[0] || mem != x6.Args[2] {
 18008  				continue
 18009  			}
 18010  			y7 := v_1
 18011  			if y7.Op != OpARM64MOVDnop {
 18012  				continue
 18013  			}
 18014  			x7 := y7.Args[0]
 18015  			if x7.Op != OpARM64MOVBUloadidx {
 18016  				continue
 18017  			}
 18018  			_ = x7.Args[2]
 18019  			if ptr != x7.Args[0] || idx != x7.Args[1] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 18020  				continue
 18021  			}
 18022  			b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 18023  			v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
 18024  			v.copyOf(v0)
 18025  			v0.AddArg3(ptr, idx, mem)
 18026  			return true
 18027  		}
 18028  		break
 18029  	}
 18030  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)))
 18031  	// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 18032  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem))
 18033  	for {
 18034  		t := v.Type
 18035  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18036  			o0 := v_0
 18037  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18038  				continue
 18039  			}
 18040  			_ = o0.Args[1]
 18041  			o1 := o0.Args[0]
 18042  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18043  				continue
 18044  			}
 18045  			_ = o1.Args[1]
 18046  			s0 := o1.Args[0]
 18047  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 18048  				continue
 18049  			}
 18050  			y0 := s0.Args[0]
 18051  			if y0.Op != OpARM64MOVDnop {
 18052  				continue
 18053  			}
 18054  			x0 := y0.Args[0]
 18055  			if x0.Op != OpARM64MOVBUload {
 18056  				continue
 18057  			}
 18058  			i0 := auxIntToInt32(x0.AuxInt)
 18059  			s := auxToSym(x0.Aux)
 18060  			mem := x0.Args[1]
 18061  			p := x0.Args[0]
 18062  			y1 := o1.Args[1]
 18063  			if y1.Op != OpARM64MOVDnop {
 18064  				continue
 18065  			}
 18066  			x1 := y1.Args[0]
 18067  			if x1.Op != OpARM64MOVBUload {
 18068  				continue
 18069  			}
 18070  			i1 := auxIntToInt32(x1.AuxInt)
 18071  			if auxToSym(x1.Aux) != s {
 18072  				continue
 18073  			}
 18074  			_ = x1.Args[1]
 18075  			if p != x1.Args[0] || mem != x1.Args[1] {
 18076  				continue
 18077  			}
 18078  			y2 := o0.Args[1]
 18079  			if y2.Op != OpARM64MOVDnop {
 18080  				continue
 18081  			}
 18082  			x2 := y2.Args[0]
 18083  			if x2.Op != OpARM64MOVBUload {
 18084  				continue
 18085  			}
 18086  			i2 := auxIntToInt32(x2.AuxInt)
 18087  			if auxToSym(x2.Aux) != s {
 18088  				continue
 18089  			}
 18090  			_ = x2.Args[1]
 18091  			if p != x2.Args[0] || mem != x2.Args[1] {
 18092  				continue
 18093  			}
 18094  			y3 := v_1
 18095  			if y3.Op != OpARM64MOVDnop {
 18096  				continue
 18097  			}
 18098  			x3 := y3.Args[0]
 18099  			if x3.Op != OpARM64MOVBUload {
 18100  				continue
 18101  			}
 18102  			i3 := auxIntToInt32(x3.AuxInt)
 18103  			if auxToSym(x3.Aux) != s {
 18104  				continue
 18105  			}
 18106  			_ = x3.Args[1]
 18107  			if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 18108  				continue
 18109  			}
 18110  			b = mergePoint(b, x0, x1, x2, x3)
 18111  			v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
 18112  			v.copyOf(v0)
 18113  			v1 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
 18114  			v1.Aux = symToAux(s)
 18115  			v2 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
 18116  			v2.AuxInt = int64ToAuxInt(int64(i0))
 18117  			v2.AddArg(p)
 18118  			v1.AddArg2(v2, mem)
 18119  			v0.AddArg(v1)
 18120  			return true
 18121  		}
 18122  		break
 18123  	}
 18124  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem)))
 18125  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 18126  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem))
 18127  	for {
 18128  		t := v.Type
 18129  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18130  			o0 := v_0
 18131  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18132  				continue
 18133  			}
 18134  			_ = o0.Args[1]
 18135  			o1 := o0.Args[0]
 18136  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18137  				continue
 18138  			}
 18139  			_ = o1.Args[1]
 18140  			s0 := o1.Args[0]
 18141  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 18142  				continue
 18143  			}
 18144  			y0 := s0.Args[0]
 18145  			if y0.Op != OpARM64MOVDnop {
 18146  				continue
 18147  			}
 18148  			x0 := y0.Args[0]
 18149  			if x0.Op != OpARM64MOVBUloadidx {
 18150  				continue
 18151  			}
 18152  			mem := x0.Args[2]
 18153  			ptr0 := x0.Args[0]
 18154  			idx0 := x0.Args[1]
 18155  			y1 := o1.Args[1]
 18156  			if y1.Op != OpARM64MOVDnop {
 18157  				continue
 18158  			}
 18159  			x1 := y1.Args[0]
 18160  			if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
 18161  				continue
 18162  			}
 18163  			s := auxToSym(x1.Aux)
 18164  			_ = x1.Args[1]
 18165  			p1 := x1.Args[0]
 18166  			if p1.Op != OpARM64ADD {
 18167  				continue
 18168  			}
 18169  			_ = p1.Args[1]
 18170  			p1_0 := p1.Args[0]
 18171  			p1_1 := p1.Args[1]
 18172  			for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
 18173  				ptr1 := p1_0
 18174  				idx1 := p1_1
 18175  				if mem != x1.Args[1] {
 18176  					continue
 18177  				}
 18178  				y2 := o0.Args[1]
 18179  				if y2.Op != OpARM64MOVDnop {
 18180  					continue
 18181  				}
 18182  				x2 := y2.Args[0]
 18183  				if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
 18184  					continue
 18185  				}
 18186  				_ = x2.Args[1]
 18187  				p := x2.Args[0]
 18188  				if mem != x2.Args[1] {
 18189  					continue
 18190  				}
 18191  				y3 := v_1
 18192  				if y3.Op != OpARM64MOVDnop {
 18193  					continue
 18194  				}
 18195  				x3 := y3.Args[0]
 18196  				if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
 18197  					continue
 18198  				}
 18199  				_ = x3.Args[1]
 18200  				if p != x3.Args[0] || mem != x3.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 18201  					continue
 18202  				}
 18203  				b = mergePoint(b, x0, x1, x2, x3)
 18204  				v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
 18205  				v.copyOf(v0)
 18206  				v1 := b.NewValue0(x3.Pos, OpARM64MOVWUloadidx, t)
 18207  				v1.AddArg3(ptr0, idx0, mem)
 18208  				v0.AddArg(v1)
 18209  				return true
 18210  			}
 18211  		}
 18212  		break
 18213  	}
 18214  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem)))
 18215  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)
 18216  	// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem))
 18217  	for {
 18218  		t := v.Type
 18219  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18220  			o0 := v_0
 18221  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18222  				continue
 18223  			}
 18224  			_ = o0.Args[1]
 18225  			o1 := o0.Args[0]
 18226  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18227  				continue
 18228  			}
 18229  			_ = o1.Args[1]
 18230  			s0 := o1.Args[0]
 18231  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
 18232  				continue
 18233  			}
 18234  			y0 := s0.Args[0]
 18235  			if y0.Op != OpARM64MOVDnop {
 18236  				continue
 18237  			}
 18238  			x0 := y0.Args[0]
 18239  			if x0.Op != OpARM64MOVBUloadidx {
 18240  				continue
 18241  			}
 18242  			mem := x0.Args[2]
 18243  			ptr := x0.Args[0]
 18244  			idx := x0.Args[1]
 18245  			y1 := o1.Args[1]
 18246  			if y1.Op != OpARM64MOVDnop {
 18247  				continue
 18248  			}
 18249  			x1 := y1.Args[0]
 18250  			if x1.Op != OpARM64MOVBUloadidx {
 18251  				continue
 18252  			}
 18253  			_ = x1.Args[2]
 18254  			if ptr != x1.Args[0] {
 18255  				continue
 18256  			}
 18257  			x1_1 := x1.Args[1]
 18258  			if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 18259  				continue
 18260  			}
 18261  			y2 := o0.Args[1]
 18262  			if y2.Op != OpARM64MOVDnop {
 18263  				continue
 18264  			}
 18265  			x2 := y2.Args[0]
 18266  			if x2.Op != OpARM64MOVBUloadidx {
 18267  				continue
 18268  			}
 18269  			_ = x2.Args[2]
 18270  			if ptr != x2.Args[0] {
 18271  				continue
 18272  			}
 18273  			x2_1 := x2.Args[1]
 18274  			if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 18275  				continue
 18276  			}
 18277  			y3 := v_1
 18278  			if y3.Op != OpARM64MOVDnop {
 18279  				continue
 18280  			}
 18281  			x3 := y3.Args[0]
 18282  			if x3.Op != OpARM64MOVBUloadidx {
 18283  				continue
 18284  			}
 18285  			_ = x3.Args[2]
 18286  			if ptr != x3.Args[0] {
 18287  				continue
 18288  			}
 18289  			x3_1 := x3.Args[1]
 18290  			if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
 18291  				continue
 18292  			}
 18293  			b = mergePoint(b, x0, x1, x2, x3)
 18294  			v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
 18295  			v.copyOf(v0)
 18296  			v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
 18297  			v1.AddArg3(ptr, idx, mem)
 18298  			v0.AddArg(v1)
 18299  			return true
 18300  		}
 18301  		break
 18302  	}
 18303  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)))
 18304  	// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 18305  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem))
 18306  	for {
 18307  		t := v.Type
 18308  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18309  			o0 := v_0
 18310  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18311  				continue
 18312  			}
 18313  			_ = o0.Args[1]
 18314  			o1 := o0.Args[0]
 18315  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18316  				continue
 18317  			}
 18318  			_ = o1.Args[1]
 18319  			o2 := o1.Args[0]
 18320  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 18321  				continue
 18322  			}
 18323  			_ = o2.Args[1]
 18324  			o3 := o2.Args[0]
 18325  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 18326  				continue
 18327  			}
 18328  			_ = o3.Args[1]
 18329  			o4 := o3.Args[0]
 18330  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 18331  				continue
 18332  			}
 18333  			_ = o4.Args[1]
 18334  			o5 := o4.Args[0]
 18335  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 18336  				continue
 18337  			}
 18338  			_ = o5.Args[1]
 18339  			s0 := o5.Args[0]
 18340  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 18341  				continue
 18342  			}
 18343  			y0 := s0.Args[0]
 18344  			if y0.Op != OpARM64MOVDnop {
 18345  				continue
 18346  			}
 18347  			x0 := y0.Args[0]
 18348  			if x0.Op != OpARM64MOVBUload {
 18349  				continue
 18350  			}
 18351  			i0 := auxIntToInt32(x0.AuxInt)
 18352  			s := auxToSym(x0.Aux)
 18353  			mem := x0.Args[1]
 18354  			p := x0.Args[0]
 18355  			y1 := o5.Args[1]
 18356  			if y1.Op != OpARM64MOVDnop {
 18357  				continue
 18358  			}
 18359  			x1 := y1.Args[0]
 18360  			if x1.Op != OpARM64MOVBUload {
 18361  				continue
 18362  			}
 18363  			i1 := auxIntToInt32(x1.AuxInt)
 18364  			if auxToSym(x1.Aux) != s {
 18365  				continue
 18366  			}
 18367  			_ = x1.Args[1]
 18368  			if p != x1.Args[0] || mem != x1.Args[1] {
 18369  				continue
 18370  			}
 18371  			y2 := o4.Args[1]
 18372  			if y2.Op != OpARM64MOVDnop {
 18373  				continue
 18374  			}
 18375  			x2 := y2.Args[0]
 18376  			if x2.Op != OpARM64MOVBUload {
 18377  				continue
 18378  			}
 18379  			i2 := auxIntToInt32(x2.AuxInt)
 18380  			if auxToSym(x2.Aux) != s {
 18381  				continue
 18382  			}
 18383  			_ = x2.Args[1]
 18384  			if p != x2.Args[0] || mem != x2.Args[1] {
 18385  				continue
 18386  			}
 18387  			y3 := o3.Args[1]
 18388  			if y3.Op != OpARM64MOVDnop {
 18389  				continue
 18390  			}
 18391  			x3 := y3.Args[0]
 18392  			if x3.Op != OpARM64MOVBUload {
 18393  				continue
 18394  			}
 18395  			i3 := auxIntToInt32(x3.AuxInt)
 18396  			if auxToSym(x3.Aux) != s {
 18397  				continue
 18398  			}
 18399  			_ = x3.Args[1]
 18400  			if p != x3.Args[0] || mem != x3.Args[1] {
 18401  				continue
 18402  			}
 18403  			y4 := o2.Args[1]
 18404  			if y4.Op != OpARM64MOVDnop {
 18405  				continue
 18406  			}
 18407  			x4 := y4.Args[0]
 18408  			if x4.Op != OpARM64MOVBUload {
 18409  				continue
 18410  			}
 18411  			i4 := auxIntToInt32(x4.AuxInt)
 18412  			if auxToSym(x4.Aux) != s {
 18413  				continue
 18414  			}
 18415  			_ = x4.Args[1]
 18416  			if p != x4.Args[0] || mem != x4.Args[1] {
 18417  				continue
 18418  			}
 18419  			y5 := o1.Args[1]
 18420  			if y5.Op != OpARM64MOVDnop {
 18421  				continue
 18422  			}
 18423  			x5 := y5.Args[0]
 18424  			if x5.Op != OpARM64MOVBUload {
 18425  				continue
 18426  			}
 18427  			i5 := auxIntToInt32(x5.AuxInt)
 18428  			if auxToSym(x5.Aux) != s {
 18429  				continue
 18430  			}
 18431  			_ = x5.Args[1]
 18432  			if p != x5.Args[0] || mem != x5.Args[1] {
 18433  				continue
 18434  			}
 18435  			y6 := o0.Args[1]
 18436  			if y6.Op != OpARM64MOVDnop {
 18437  				continue
 18438  			}
 18439  			x6 := y6.Args[0]
 18440  			if x6.Op != OpARM64MOVBUload {
 18441  				continue
 18442  			}
 18443  			i6 := auxIntToInt32(x6.AuxInt)
 18444  			if auxToSym(x6.Aux) != s {
 18445  				continue
 18446  			}
 18447  			_ = x6.Args[1]
 18448  			if p != x6.Args[0] || mem != x6.Args[1] {
 18449  				continue
 18450  			}
 18451  			y7 := v_1
 18452  			if y7.Op != OpARM64MOVDnop {
 18453  				continue
 18454  			}
 18455  			x7 := y7.Args[0]
 18456  			if x7.Op != OpARM64MOVBUload {
 18457  				continue
 18458  			}
 18459  			i7 := auxIntToInt32(x7.AuxInt)
 18460  			if auxToSym(x7.Aux) != s {
 18461  				continue
 18462  			}
 18463  			_ = x7.Args[1]
 18464  			if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 18465  				continue
 18466  			}
 18467  			b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 18468  			v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
 18469  			v.copyOf(v0)
 18470  			v1 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
 18471  			v1.Aux = symToAux(s)
 18472  			v2 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
 18473  			v2.AuxInt = int64ToAuxInt(int64(i0))
 18474  			v2.AddArg(p)
 18475  			v1.AddArg2(v2, mem)
 18476  			v0.AddArg(v1)
 18477  			return true
 18478  		}
 18479  		break
 18480  	}
 18481  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [7] {s} p mem)))
 18482  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 18483  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem))
 18484  	for {
 18485  		t := v.Type
 18486  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18487  			o0 := v_0
 18488  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18489  				continue
 18490  			}
 18491  			_ = o0.Args[1]
 18492  			o1 := o0.Args[0]
 18493  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18494  				continue
 18495  			}
 18496  			_ = o1.Args[1]
 18497  			o2 := o1.Args[0]
 18498  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 18499  				continue
 18500  			}
 18501  			_ = o2.Args[1]
 18502  			o3 := o2.Args[0]
 18503  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 18504  				continue
 18505  			}
 18506  			_ = o3.Args[1]
 18507  			o4 := o3.Args[0]
 18508  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 18509  				continue
 18510  			}
 18511  			_ = o4.Args[1]
 18512  			o5 := o4.Args[0]
 18513  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 18514  				continue
 18515  			}
 18516  			_ = o5.Args[1]
 18517  			s0 := o5.Args[0]
 18518  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 18519  				continue
 18520  			}
 18521  			y0 := s0.Args[0]
 18522  			if y0.Op != OpARM64MOVDnop {
 18523  				continue
 18524  			}
 18525  			x0 := y0.Args[0]
 18526  			if x0.Op != OpARM64MOVBUloadidx {
 18527  				continue
 18528  			}
 18529  			mem := x0.Args[2]
 18530  			ptr0 := x0.Args[0]
 18531  			idx0 := x0.Args[1]
 18532  			y1 := o5.Args[1]
 18533  			if y1.Op != OpARM64MOVDnop {
 18534  				continue
 18535  			}
 18536  			x1 := y1.Args[0]
 18537  			if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
 18538  				continue
 18539  			}
 18540  			s := auxToSym(x1.Aux)
 18541  			_ = x1.Args[1]
 18542  			p1 := x1.Args[0]
 18543  			if p1.Op != OpARM64ADD {
 18544  				continue
 18545  			}
 18546  			_ = p1.Args[1]
 18547  			p1_0 := p1.Args[0]
 18548  			p1_1 := p1.Args[1]
 18549  			for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
 18550  				ptr1 := p1_0
 18551  				idx1 := p1_1
 18552  				if mem != x1.Args[1] {
 18553  					continue
 18554  				}
 18555  				y2 := o4.Args[1]
 18556  				if y2.Op != OpARM64MOVDnop {
 18557  					continue
 18558  				}
 18559  				x2 := y2.Args[0]
 18560  				if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
 18561  					continue
 18562  				}
 18563  				_ = x2.Args[1]
 18564  				p := x2.Args[0]
 18565  				if mem != x2.Args[1] {
 18566  					continue
 18567  				}
 18568  				y3 := o3.Args[1]
 18569  				if y3.Op != OpARM64MOVDnop {
 18570  					continue
 18571  				}
 18572  				x3 := y3.Args[0]
 18573  				if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
 18574  					continue
 18575  				}
 18576  				_ = x3.Args[1]
 18577  				if p != x3.Args[0] || mem != x3.Args[1] {
 18578  					continue
 18579  				}
 18580  				y4 := o2.Args[1]
 18581  				if y4.Op != OpARM64MOVDnop {
 18582  					continue
 18583  				}
 18584  				x4 := y4.Args[0]
 18585  				if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 4 || auxToSym(x4.Aux) != s {
 18586  					continue
 18587  				}
 18588  				_ = x4.Args[1]
 18589  				if p != x4.Args[0] || mem != x4.Args[1] {
 18590  					continue
 18591  				}
 18592  				y5 := o1.Args[1]
 18593  				if y5.Op != OpARM64MOVDnop {
 18594  					continue
 18595  				}
 18596  				x5 := y5.Args[0]
 18597  				if x5.Op != OpARM64MOVBUload || auxIntToInt32(x5.AuxInt) != 5 || auxToSym(x5.Aux) != s {
 18598  					continue
 18599  				}
 18600  				_ = x5.Args[1]
 18601  				if p != x5.Args[0] || mem != x5.Args[1] {
 18602  					continue
 18603  				}
 18604  				y6 := o0.Args[1]
 18605  				if y6.Op != OpARM64MOVDnop {
 18606  					continue
 18607  				}
 18608  				x6 := y6.Args[0]
 18609  				if x6.Op != OpARM64MOVBUload || auxIntToInt32(x6.AuxInt) != 6 || auxToSym(x6.Aux) != s {
 18610  					continue
 18611  				}
 18612  				_ = x6.Args[1]
 18613  				if p != x6.Args[0] || mem != x6.Args[1] {
 18614  					continue
 18615  				}
 18616  				y7 := v_1
 18617  				if y7.Op != OpARM64MOVDnop {
 18618  					continue
 18619  				}
 18620  				x7 := y7.Args[0]
 18621  				if x7.Op != OpARM64MOVBUload || auxIntToInt32(x7.AuxInt) != 7 || auxToSym(x7.Aux) != s {
 18622  					continue
 18623  				}
 18624  				_ = x7.Args[1]
 18625  				if p != x7.Args[0] || mem != x7.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 18626  					continue
 18627  				}
 18628  				b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 18629  				v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
 18630  				v.copyOf(v0)
 18631  				v1 := b.NewValue0(x7.Pos, OpARM64MOVDloadidx, t)
 18632  				v1.AddArg3(ptr0, idx0, mem)
 18633  				v0.AddArg(v1)
 18634  				return true
 18635  			}
 18636  		}
 18637  		break
 18638  	}
 18639  	// match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem)))
 18640  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)
 18641  	// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem))
 18642  	for {
 18643  		t := v.Type
 18644  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18645  			o0 := v_0
 18646  			if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
 18647  				continue
 18648  			}
 18649  			_ = o0.Args[1]
 18650  			o1 := o0.Args[0]
 18651  			if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
 18652  				continue
 18653  			}
 18654  			_ = o1.Args[1]
 18655  			o2 := o1.Args[0]
 18656  			if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
 18657  				continue
 18658  			}
 18659  			_ = o2.Args[1]
 18660  			o3 := o2.Args[0]
 18661  			if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
 18662  				continue
 18663  			}
 18664  			_ = o3.Args[1]
 18665  			o4 := o3.Args[0]
 18666  			if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
 18667  				continue
 18668  			}
 18669  			_ = o4.Args[1]
 18670  			o5 := o4.Args[0]
 18671  			if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
 18672  				continue
 18673  			}
 18674  			_ = o5.Args[1]
 18675  			s0 := o5.Args[0]
 18676  			if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
 18677  				continue
 18678  			}
 18679  			y0 := s0.Args[0]
 18680  			if y0.Op != OpARM64MOVDnop {
 18681  				continue
 18682  			}
 18683  			x0 := y0.Args[0]
 18684  			if x0.Op != OpARM64MOVBUloadidx {
 18685  				continue
 18686  			}
 18687  			mem := x0.Args[2]
 18688  			ptr := x0.Args[0]
 18689  			idx := x0.Args[1]
 18690  			y1 := o5.Args[1]
 18691  			if y1.Op != OpARM64MOVDnop {
 18692  				continue
 18693  			}
 18694  			x1 := y1.Args[0]
 18695  			if x1.Op != OpARM64MOVBUloadidx {
 18696  				continue
 18697  			}
 18698  			_ = x1.Args[2]
 18699  			if ptr != x1.Args[0] {
 18700  				continue
 18701  			}
 18702  			x1_1 := x1.Args[1]
 18703  			if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 18704  				continue
 18705  			}
 18706  			y2 := o4.Args[1]
 18707  			if y2.Op != OpARM64MOVDnop {
 18708  				continue
 18709  			}
 18710  			x2 := y2.Args[0]
 18711  			if x2.Op != OpARM64MOVBUloadidx {
 18712  				continue
 18713  			}
 18714  			_ = x2.Args[2]
 18715  			if ptr != x2.Args[0] {
 18716  				continue
 18717  			}
 18718  			x2_1 := x2.Args[1]
 18719  			if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 18720  				continue
 18721  			}
 18722  			y3 := o3.Args[1]
 18723  			if y3.Op != OpARM64MOVDnop {
 18724  				continue
 18725  			}
 18726  			x3 := y3.Args[0]
 18727  			if x3.Op != OpARM64MOVBUloadidx {
 18728  				continue
 18729  			}
 18730  			_ = x3.Args[2]
 18731  			if ptr != x3.Args[0] {
 18732  				continue
 18733  			}
 18734  			x3_1 := x3.Args[1]
 18735  			if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] {
 18736  				continue
 18737  			}
 18738  			y4 := o2.Args[1]
 18739  			if y4.Op != OpARM64MOVDnop {
 18740  				continue
 18741  			}
 18742  			x4 := y4.Args[0]
 18743  			if x4.Op != OpARM64MOVBUloadidx {
 18744  				continue
 18745  			}
 18746  			_ = x4.Args[2]
 18747  			if ptr != x4.Args[0] {
 18748  				continue
 18749  			}
 18750  			x4_1 := x4.Args[1]
 18751  			if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 4 || idx != x4_1.Args[0] || mem != x4.Args[2] {
 18752  				continue
 18753  			}
 18754  			y5 := o1.Args[1]
 18755  			if y5.Op != OpARM64MOVDnop {
 18756  				continue
 18757  			}
 18758  			x5 := y5.Args[0]
 18759  			if x5.Op != OpARM64MOVBUloadidx {
 18760  				continue
 18761  			}
 18762  			_ = x5.Args[2]
 18763  			if ptr != x5.Args[0] {
 18764  				continue
 18765  			}
 18766  			x5_1 := x5.Args[1]
 18767  			if x5_1.Op != OpARM64ADDconst || auxIntToInt64(x5_1.AuxInt) != 5 || idx != x5_1.Args[0] || mem != x5.Args[2] {
 18768  				continue
 18769  			}
 18770  			y6 := o0.Args[1]
 18771  			if y6.Op != OpARM64MOVDnop {
 18772  				continue
 18773  			}
 18774  			x6 := y6.Args[0]
 18775  			if x6.Op != OpARM64MOVBUloadidx {
 18776  				continue
 18777  			}
 18778  			_ = x6.Args[2]
 18779  			if ptr != x6.Args[0] {
 18780  				continue
 18781  			}
 18782  			x6_1 := x6.Args[1]
 18783  			if x6_1.Op != OpARM64ADDconst || auxIntToInt64(x6_1.AuxInt) != 6 || idx != x6_1.Args[0] || mem != x6.Args[2] {
 18784  				continue
 18785  			}
 18786  			y7 := v_1
 18787  			if y7.Op != OpARM64MOVDnop {
 18788  				continue
 18789  			}
 18790  			x7 := y7.Args[0]
 18791  			if x7.Op != OpARM64MOVBUloadidx {
 18792  				continue
 18793  			}
 18794  			_ = x7.Args[2]
 18795  			if ptr != x7.Args[0] {
 18796  				continue
 18797  			}
 18798  			x7_1 := x7.Args[1]
 18799  			if x7_1.Op != OpARM64ADDconst || auxIntToInt64(x7_1.AuxInt) != 7 || idx != x7_1.Args[0] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
 18800  				continue
 18801  			}
 18802  			b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
 18803  			v0 := b.NewValue0(v.Pos, OpARM64REV, t)
 18804  			v.copyOf(v0)
 18805  			v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
 18806  			v1.AddArg3(ptr, idx, mem)
 18807  			v0.AddArg(v1)
 18808  			return true
 18809  		}
 18810  		break
 18811  	}
 18812  	return false
 18813  }
 18814  func rewriteValueARM64_OpARM64ORN(v *Value) bool {
 18815  	v_1 := v.Args[1]
 18816  	v_0 := v.Args[0]
 18817  	// match: (ORN x (MOVDconst [c]))
 18818  	// result: (ORconst [^c] x)
 18819  	for {
 18820  		x := v_0
 18821  		if v_1.Op != OpARM64MOVDconst {
 18822  			break
 18823  		}
 18824  		c := auxIntToInt64(v_1.AuxInt)
 18825  		v.reset(OpARM64ORconst)
 18826  		v.AuxInt = int64ToAuxInt(^c)
 18827  		v.AddArg(x)
 18828  		return true
 18829  	}
 18830  	// match: (ORN x x)
 18831  	// result: (MOVDconst [-1])
 18832  	for {
 18833  		x := v_0
 18834  		if x != v_1 {
 18835  			break
 18836  		}
 18837  		v.reset(OpARM64MOVDconst)
 18838  		v.AuxInt = int64ToAuxInt(-1)
 18839  		return true
 18840  	}
 18841  	// match: (ORN x0 x1:(SLLconst [c] y))
 18842  	// cond: clobberIfDead(x1)
 18843  	// result: (ORNshiftLL x0 y [c])
 18844  	for {
 18845  		x0 := v_0
 18846  		x1 := v_1
 18847  		if x1.Op != OpARM64SLLconst {
 18848  			break
 18849  		}
 18850  		c := auxIntToInt64(x1.AuxInt)
 18851  		y := x1.Args[0]
 18852  		if !(clobberIfDead(x1)) {
 18853  			break
 18854  		}
 18855  		v.reset(OpARM64ORNshiftLL)
 18856  		v.AuxInt = int64ToAuxInt(c)
 18857  		v.AddArg2(x0, y)
 18858  		return true
 18859  	}
 18860  	// match: (ORN x0 x1:(SRLconst [c] y))
 18861  	// cond: clobberIfDead(x1)
 18862  	// result: (ORNshiftRL x0 y [c])
 18863  	for {
 18864  		x0 := v_0
 18865  		x1 := v_1
 18866  		if x1.Op != OpARM64SRLconst {
 18867  			break
 18868  		}
 18869  		c := auxIntToInt64(x1.AuxInt)
 18870  		y := x1.Args[0]
 18871  		if !(clobberIfDead(x1)) {
 18872  			break
 18873  		}
 18874  		v.reset(OpARM64ORNshiftRL)
 18875  		v.AuxInt = int64ToAuxInt(c)
 18876  		v.AddArg2(x0, y)
 18877  		return true
 18878  	}
 18879  	// match: (ORN x0 x1:(SRAconst [c] y))
 18880  	// cond: clobberIfDead(x1)
 18881  	// result: (ORNshiftRA x0 y [c])
 18882  	for {
 18883  		x0 := v_0
 18884  		x1 := v_1
 18885  		if x1.Op != OpARM64SRAconst {
 18886  			break
 18887  		}
 18888  		c := auxIntToInt64(x1.AuxInt)
 18889  		y := x1.Args[0]
 18890  		if !(clobberIfDead(x1)) {
 18891  			break
 18892  		}
 18893  		v.reset(OpARM64ORNshiftRA)
 18894  		v.AuxInt = int64ToAuxInt(c)
 18895  		v.AddArg2(x0, y)
 18896  		return true
 18897  	}
 18898  	// match: (ORN x0 x1:(RORconst [c] y))
 18899  	// cond: clobberIfDead(x1)
 18900  	// result: (ORNshiftRO x0 y [c])
 18901  	for {
 18902  		x0 := v_0
 18903  		x1 := v_1
 18904  		if x1.Op != OpARM64RORconst {
 18905  			break
 18906  		}
 18907  		c := auxIntToInt64(x1.AuxInt)
 18908  		y := x1.Args[0]
 18909  		if !(clobberIfDead(x1)) {
 18910  			break
 18911  		}
 18912  		v.reset(OpARM64ORNshiftRO)
 18913  		v.AuxInt = int64ToAuxInt(c)
 18914  		v.AddArg2(x0, y)
 18915  		return true
 18916  	}
 18917  	return false
 18918  }
 18919  func rewriteValueARM64_OpARM64ORNshiftLL(v *Value) bool {
 18920  	v_1 := v.Args[1]
 18921  	v_0 := v.Args[0]
 18922  	// match: (ORNshiftLL x (MOVDconst [c]) [d])
 18923  	// result: (ORconst x [^int64(uint64(c)<<uint64(d))])
 18924  	for {
 18925  		d := auxIntToInt64(v.AuxInt)
 18926  		x := v_0
 18927  		if v_1.Op != OpARM64MOVDconst {
 18928  			break
 18929  		}
 18930  		c := auxIntToInt64(v_1.AuxInt)
 18931  		v.reset(OpARM64ORconst)
 18932  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
 18933  		v.AddArg(x)
 18934  		return true
 18935  	}
 18936  	// match: (ORNshiftLL (SLLconst x [c]) x [c])
 18937  	// result: (MOVDconst [-1])
 18938  	for {
 18939  		c := auxIntToInt64(v.AuxInt)
 18940  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
 18941  			break
 18942  		}
 18943  		x := v_0.Args[0]
 18944  		if x != v_1 {
 18945  			break
 18946  		}
 18947  		v.reset(OpARM64MOVDconst)
 18948  		v.AuxInt = int64ToAuxInt(-1)
 18949  		return true
 18950  	}
 18951  	return false
 18952  }
 18953  func rewriteValueARM64_OpARM64ORNshiftRA(v *Value) bool {
 18954  	v_1 := v.Args[1]
 18955  	v_0 := v.Args[0]
 18956  	// match: (ORNshiftRA x (MOVDconst [c]) [d])
 18957  	// result: (ORconst x [^(c>>uint64(d))])
 18958  	for {
 18959  		d := auxIntToInt64(v.AuxInt)
 18960  		x := v_0
 18961  		if v_1.Op != OpARM64MOVDconst {
 18962  			break
 18963  		}
 18964  		c := auxIntToInt64(v_1.AuxInt)
 18965  		v.reset(OpARM64ORconst)
 18966  		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
 18967  		v.AddArg(x)
 18968  		return true
 18969  	}
 18970  	// match: (ORNshiftRA (SRAconst x [c]) x [c])
 18971  	// result: (MOVDconst [-1])
 18972  	for {
 18973  		c := auxIntToInt64(v.AuxInt)
 18974  		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
 18975  			break
 18976  		}
 18977  		x := v_0.Args[0]
 18978  		if x != v_1 {
 18979  			break
 18980  		}
 18981  		v.reset(OpARM64MOVDconst)
 18982  		v.AuxInt = int64ToAuxInt(-1)
 18983  		return true
 18984  	}
 18985  	return false
 18986  }
 18987  func rewriteValueARM64_OpARM64ORNshiftRL(v *Value) bool {
 18988  	v_1 := v.Args[1]
 18989  	v_0 := v.Args[0]
 18990  	// match: (ORNshiftRL x (MOVDconst [c]) [d])
 18991  	// result: (ORconst x [^int64(uint64(c)>>uint64(d))])
 18992  	for {
 18993  		d := auxIntToInt64(v.AuxInt)
 18994  		x := v_0
 18995  		if v_1.Op != OpARM64MOVDconst {
 18996  			break
 18997  		}
 18998  		c := auxIntToInt64(v_1.AuxInt)
 18999  		v.reset(OpARM64ORconst)
 19000  		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
 19001  		v.AddArg(x)
 19002  		return true
 19003  	}
 19004  	// match: (ORNshiftRL (SRLconst x [c]) x [c])
 19005  	// result: (MOVDconst [-1])
 19006  	for {
 19007  		c := auxIntToInt64(v.AuxInt)
 19008  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
 19009  			break
 19010  		}
 19011  		x := v_0.Args[0]
 19012  		if x != v_1 {
 19013  			break
 19014  		}
 19015  		v.reset(OpARM64MOVDconst)
 19016  		v.AuxInt = int64ToAuxInt(-1)
 19017  		return true
 19018  	}
 19019  	return false
 19020  }
 19021  func rewriteValueARM64_OpARM64ORNshiftRO(v *Value) bool {
 19022  	v_1 := v.Args[1]
 19023  	v_0 := v.Args[0]
 19024  	// match: (ORNshiftRO x (MOVDconst [c]) [d])
 19025  	// result: (ORconst x [^rotateRight64(c, d)])
 19026  	for {
 19027  		d := auxIntToInt64(v.AuxInt)
 19028  		x := v_0
 19029  		if v_1.Op != OpARM64MOVDconst {
 19030  			break
 19031  		}
 19032  		c := auxIntToInt64(v_1.AuxInt)
 19033  		v.reset(OpARM64ORconst)
 19034  		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
 19035  		v.AddArg(x)
 19036  		return true
 19037  	}
 19038  	// match: (ORNshiftRO (RORconst x [c]) x [c])
 19039  	// result: (MOVDconst [-1])
 19040  	for {
 19041  		c := auxIntToInt64(v.AuxInt)
 19042  		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
 19043  			break
 19044  		}
 19045  		x := v_0.Args[0]
 19046  		if x != v_1 {
 19047  			break
 19048  		}
 19049  		v.reset(OpARM64MOVDconst)
 19050  		v.AuxInt = int64ToAuxInt(-1)
 19051  		return true
 19052  	}
 19053  	return false
 19054  }
 19055  func rewriteValueARM64_OpARM64ORconst(v *Value) bool {
 19056  	v_0 := v.Args[0]
 19057  	// match: (ORconst [0] x)
 19058  	// result: x
 19059  	for {
 19060  		if auxIntToInt64(v.AuxInt) != 0 {
 19061  			break
 19062  		}
 19063  		x := v_0
 19064  		v.copyOf(x)
 19065  		return true
 19066  	}
 19067  	// match: (ORconst [-1] _)
 19068  	// result: (MOVDconst [-1])
 19069  	for {
 19070  		if auxIntToInt64(v.AuxInt) != -1 {
 19071  			break
 19072  		}
 19073  		v.reset(OpARM64MOVDconst)
 19074  		v.AuxInt = int64ToAuxInt(-1)
 19075  		return true
 19076  	}
 19077  	// match: (ORconst [c] (MOVDconst [d]))
 19078  	// result: (MOVDconst [c|d])
 19079  	for {
 19080  		c := auxIntToInt64(v.AuxInt)
 19081  		if v_0.Op != OpARM64MOVDconst {
 19082  			break
 19083  		}
 19084  		d := auxIntToInt64(v_0.AuxInt)
 19085  		v.reset(OpARM64MOVDconst)
 19086  		v.AuxInt = int64ToAuxInt(c | d)
 19087  		return true
 19088  	}
 19089  	// match: (ORconst [c] (ORconst [d] x))
 19090  	// result: (ORconst [c|d] x)
 19091  	for {
 19092  		c := auxIntToInt64(v.AuxInt)
 19093  		if v_0.Op != OpARM64ORconst {
 19094  			break
 19095  		}
 19096  		d := auxIntToInt64(v_0.AuxInt)
 19097  		x := v_0.Args[0]
 19098  		v.reset(OpARM64ORconst)
 19099  		v.AuxInt = int64ToAuxInt(c | d)
 19100  		v.AddArg(x)
 19101  		return true
 19102  	}
 19103  	// match: (ORconst [c1] (ANDconst [c2] x))
 19104  	// cond: c2|c1 == ^0
 19105  	// result: (ORconst [c1] x)
 19106  	for {
 19107  		c1 := auxIntToInt64(v.AuxInt)
 19108  		if v_0.Op != OpARM64ANDconst {
 19109  			break
 19110  		}
 19111  		c2 := auxIntToInt64(v_0.AuxInt)
 19112  		x := v_0.Args[0]
 19113  		if !(c2|c1 == ^0) {
 19114  			break
 19115  		}
 19116  		v.reset(OpARM64ORconst)
 19117  		v.AuxInt = int64ToAuxInt(c1)
 19118  		v.AddArg(x)
 19119  		return true
 19120  	}
 19121  	return false
 19122  }
 19123  func rewriteValueARM64_OpARM64ORshiftLL(v *Value) bool {
 19124  	v_1 := v.Args[1]
 19125  	v_0 := v.Args[0]
 19126  	b := v.Block
 19127  	typ := &b.Func.Config.Types
 19128  	// match: (ORshiftLL (MOVDconst [c]) x [d])
 19129  	// result: (ORconst [c] (SLLconst <x.Type> x [d]))
 19130  	for {
 19131  		d := auxIntToInt64(v.AuxInt)
 19132  		if v_0.Op != OpARM64MOVDconst {
 19133  			break
 19134  		}
 19135  		c := auxIntToInt64(v_0.AuxInt)
 19136  		x := v_1
 19137  		v.reset(OpARM64ORconst)
 19138  		v.AuxInt = int64ToAuxInt(c)
 19139  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
 19140  		v0.AuxInt = int64ToAuxInt(d)
 19141  		v0.AddArg(x)
 19142  		v.AddArg(v0)
 19143  		return true
 19144  	}
 19145  	// match: (ORshiftLL x (MOVDconst [c]) [d])
 19146  	// result: (ORconst x [int64(uint64(c)<<uint64(d))])
 19147  	for {
 19148  		d := auxIntToInt64(v.AuxInt)
 19149  		x := v_0
 19150  		if v_1.Op != OpARM64MOVDconst {
 19151  			break
 19152  		}
 19153  		c := auxIntToInt64(v_1.AuxInt)
 19154  		v.reset(OpARM64ORconst)
 19155  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
 19156  		v.AddArg(x)
 19157  		return true
 19158  	}
 19159  	// match: (ORshiftLL y:(SLLconst x [c]) x [c])
 19160  	// result: y
 19161  	for {
 19162  		c := auxIntToInt64(v.AuxInt)
 19163  		y := v_0
 19164  		if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
 19165  			break
 19166  		}
 19167  		x := y.Args[0]
 19168  		if x != v_1 {
 19169  			break
 19170  		}
 19171  		v.copyOf(y)
 19172  		return true
 19173  	}
 19174  	// match: (ORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
 19175  	// result: (REV16W x)
 19176  	for {
 19177  		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
 19178  			break
 19179  		}
 19180  		x := v_0.Args[0]
 19181  		if x != v_1 {
 19182  			break
 19183  		}
 19184  		v.reset(OpARM64REV16W)
 19185  		v.AddArg(x)
 19186  		return true
 19187  	}
 19188  	// match: (ORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
 19189  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
 19190  	// result: (REV16W x)
 19191  	for {
 19192  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
 19193  			break
 19194  		}
 19195  		v_0_0 := v_0.Args[0]
 19196  		if v_0_0.Op != OpARM64ANDconst {
 19197  			break
 19198  		}
 19199  		c1 := auxIntToInt64(v_0_0.AuxInt)
 19200  		x := v_0_0.Args[0]
 19201  		if v_1.Op != OpARM64ANDconst {
 19202  			break
 19203  		}
 19204  		c2 := auxIntToInt64(v_1.AuxInt)
 19205  		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
 19206  			break
 19207  		}
 19208  		v.reset(OpARM64REV16W)
 19209  		v.AddArg(x)
 19210  		return true
 19211  	}
 19212  	// match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
 19213  	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
 19214  	// result: (REV16 x)
 19215  	for {
 19216  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
 19217  			break
 19218  		}
 19219  		v_0_0 := v_0.Args[0]
 19220  		if v_0_0.Op != OpARM64ANDconst {
 19221  			break
 19222  		}
 19223  		c1 := auxIntToInt64(v_0_0.AuxInt)
 19224  		x := v_0_0.Args[0]
 19225  		if v_1.Op != OpARM64ANDconst {
 19226  			break
 19227  		}
 19228  		c2 := auxIntToInt64(v_1.AuxInt)
 19229  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
 19230  			break
 19231  		}
 19232  		v.reset(OpARM64REV16)
 19233  		v.AddArg(x)
 19234  		return true
 19235  	}
 19236  	// match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
 19237  	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
 19238  	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
 19239  	for {
 19240  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
 19241  			break
 19242  		}
 19243  		v_0_0 := v_0.Args[0]
 19244  		if v_0_0.Op != OpARM64ANDconst {
 19245  			break
 19246  		}
 19247  		c1 := auxIntToInt64(v_0_0.AuxInt)
 19248  		x := v_0_0.Args[0]
 19249  		if v_1.Op != OpARM64ANDconst {
 19250  			break
 19251  		}
 19252  		c2 := auxIntToInt64(v_1.AuxInt)
 19253  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
 19254  			break
 19255  		}
 19256  		v.reset(OpARM64REV16)
 19257  		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
 19258  		v0.AuxInt = int64ToAuxInt(0xffffffff)
 19259  		v0.AddArg(x)
 19260  		v.AddArg(v0)
 19261  		return true
 19262  	}
 19263  	// match: ( ORshiftLL [c] (SRLconst x [64-c]) x2)
 19264  	// result: (EXTRconst [64-c] x2 x)
 19265  	for {
 19266  		c := auxIntToInt64(v.AuxInt)
 19267  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
 19268  			break
 19269  		}
 19270  		x := v_0.Args[0]
 19271  		x2 := v_1
 19272  		v.reset(OpARM64EXTRconst)
 19273  		v.AuxInt = int64ToAuxInt(64 - c)
 19274  		v.AddArg2(x2, x)
 19275  		return true
 19276  	}
 19277  	// match: ( ORshiftLL <t> [c] (UBFX [bfc] x) x2)
 19278  	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
 19279  	// result: (EXTRWconst [32-c] x2 x)
 19280  	for {
 19281  		t := v.Type
 19282  		c := auxIntToInt64(v.AuxInt)
 19283  		if v_0.Op != OpARM64UBFX {
 19284  			break
 19285  		}
 19286  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 19287  		x := v_0.Args[0]
 19288  		x2 := v_1
 19289  		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
 19290  			break
 19291  		}
 19292  		v.reset(OpARM64EXTRWconst)
 19293  		v.AuxInt = int64ToAuxInt(32 - c)
 19294  		v.AddArg2(x2, x)
 19295  		return true
 19296  	}
 19297  	// match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y))
 19298  	// cond: sc == bfc.getARM64BFwidth()
 19299  	// result: (BFXIL [bfc] y x)
 19300  	for {
 19301  		sc := auxIntToInt64(v.AuxInt)
 19302  		if v_0.Op != OpARM64UBFX {
 19303  			break
 19304  		}
 19305  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 19306  		x := v_0.Args[0]
 19307  		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != sc {
 19308  			break
 19309  		}
 19310  		y := v_1.Args[0]
 19311  		if !(sc == bfc.getARM64BFwidth()) {
 19312  			break
 19313  		}
 19314  		v.reset(OpARM64BFXIL)
 19315  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 19316  		v.AddArg2(y, x)
 19317  		return true
 19318  	}
 19319  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem)))
 19320  	// cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0, x1, y0, y1)
 19321  	// result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem)
 19322  	for {
 19323  		t := v.Type
 19324  		if auxIntToInt64(v.AuxInt) != 8 {
 19325  			break
 19326  		}
 19327  		y0 := v_0
 19328  		if y0.Op != OpARM64MOVDnop {
 19329  			break
 19330  		}
 19331  		x0 := y0.Args[0]
 19332  		if x0.Op != OpARM64MOVBUload {
 19333  			break
 19334  		}
 19335  		i0 := auxIntToInt32(x0.AuxInt)
 19336  		s := auxToSym(x0.Aux)
 19337  		mem := x0.Args[1]
 19338  		p := x0.Args[0]
 19339  		y1 := v_1
 19340  		if y1.Op != OpARM64MOVDnop {
 19341  			break
 19342  		}
 19343  		x1 := y1.Args[0]
 19344  		if x1.Op != OpARM64MOVBUload {
 19345  			break
 19346  		}
 19347  		i1 := auxIntToInt32(x1.AuxInt)
 19348  		if auxToSym(x1.Aux) != s {
 19349  			break
 19350  		}
 19351  		_ = x1.Args[1]
 19352  		if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
 19353  			break
 19354  		}
 19355  		b = mergePoint(b, x0, x1)
 19356  		v0 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
 19357  		v.copyOf(v0)
 19358  		v0.Aux = symToAux(s)
 19359  		v1 := b.NewValue0(x1.Pos, OpOffPtr, p.Type)
 19360  		v1.AuxInt = int64ToAuxInt(int64(i0))
 19361  		v1.AddArg(p)
 19362  		v0.AddArg2(v1, mem)
 19363  		return true
 19364  	}
 19365  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))
 19366  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)
 19367  	// result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr0 idx0 mem)
 19368  	for {
 19369  		t := v.Type
 19370  		if auxIntToInt64(v.AuxInt) != 8 {
 19371  			break
 19372  		}
 19373  		y0 := v_0
 19374  		if y0.Op != OpARM64MOVDnop {
 19375  			break
 19376  		}
 19377  		x0 := y0.Args[0]
 19378  		if x0.Op != OpARM64MOVBUloadidx {
 19379  			break
 19380  		}
 19381  		mem := x0.Args[2]
 19382  		ptr0 := x0.Args[0]
 19383  		idx0 := x0.Args[1]
 19384  		y1 := v_1
 19385  		if y1.Op != OpARM64MOVDnop {
 19386  			break
 19387  		}
 19388  		x1 := y1.Args[0]
 19389  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
 19390  			break
 19391  		}
 19392  		s := auxToSym(x1.Aux)
 19393  		_ = x1.Args[1]
 19394  		p1 := x1.Args[0]
 19395  		if p1.Op != OpARM64ADD {
 19396  			break
 19397  		}
 19398  		_ = p1.Args[1]
 19399  		p1_0 := p1.Args[0]
 19400  		p1_1 := p1.Args[1]
 19401  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 19402  			ptr1 := p1_0
 19403  			idx1 := p1_1
 19404  			if mem != x1.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)) {
 19405  				continue
 19406  			}
 19407  			b = mergePoint(b, x0, x1)
 19408  			v0 := b.NewValue0(x1.Pos, OpARM64MOVHUloadidx, t)
 19409  			v.copyOf(v0)
 19410  			v0.AddArg3(ptr0, idx0, mem)
 19411  			return true
 19412  		}
 19413  		break
 19414  	}
 19415  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))
 19416  	// cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0, x1, y0, y1)
 19417  	// result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr idx mem)
 19418  	for {
 19419  		t := v.Type
 19420  		if auxIntToInt64(v.AuxInt) != 8 {
 19421  			break
 19422  		}
 19423  		y0 := v_0
 19424  		if y0.Op != OpARM64MOVDnop {
 19425  			break
 19426  		}
 19427  		x0 := y0.Args[0]
 19428  		if x0.Op != OpARM64MOVBUloadidx {
 19429  			break
 19430  		}
 19431  		mem := x0.Args[2]
 19432  		ptr := x0.Args[0]
 19433  		idx := x0.Args[1]
 19434  		y1 := v_1
 19435  		if y1.Op != OpARM64MOVDnop {
 19436  			break
 19437  		}
 19438  		x1 := y1.Args[0]
 19439  		if x1.Op != OpARM64MOVBUloadidx {
 19440  			break
 19441  		}
 19442  		_ = x1.Args[2]
 19443  		if ptr != x1.Args[0] {
 19444  			break
 19445  		}
 19446  		x1_1 := x1.Args[1]
 19447  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
 19448  			break
 19449  		}
 19450  		b = mergePoint(b, x0, x1)
 19451  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
 19452  		v.copyOf(v0)
 19453  		v0.AddArg3(ptr, idx, mem)
 19454  		return true
 19455  	}
 19456  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem)))
 19457  	// cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0, x1, x2, y1, y2, o0)
 19458  	// result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem)
 19459  	for {
 19460  		t := v.Type
 19461  		if auxIntToInt64(v.AuxInt) != 24 {
 19462  			break
 19463  		}
 19464  		o0 := v_0
 19465  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 19466  			break
 19467  		}
 19468  		_ = o0.Args[1]
 19469  		x0 := o0.Args[0]
 19470  		if x0.Op != OpARM64MOVHUload {
 19471  			break
 19472  		}
 19473  		i0 := auxIntToInt32(x0.AuxInt)
 19474  		s := auxToSym(x0.Aux)
 19475  		mem := x0.Args[1]
 19476  		p := x0.Args[0]
 19477  		y1 := o0.Args[1]
 19478  		if y1.Op != OpARM64MOVDnop {
 19479  			break
 19480  		}
 19481  		x1 := y1.Args[0]
 19482  		if x1.Op != OpARM64MOVBUload {
 19483  			break
 19484  		}
 19485  		i2 := auxIntToInt32(x1.AuxInt)
 19486  		if auxToSym(x1.Aux) != s {
 19487  			break
 19488  		}
 19489  		_ = x1.Args[1]
 19490  		if p != x1.Args[0] || mem != x1.Args[1] {
 19491  			break
 19492  		}
 19493  		y2 := v_1
 19494  		if y2.Op != OpARM64MOVDnop {
 19495  			break
 19496  		}
 19497  		x2 := y2.Args[0]
 19498  		if x2.Op != OpARM64MOVBUload {
 19499  			break
 19500  		}
 19501  		i3 := auxIntToInt32(x2.AuxInt)
 19502  		if auxToSym(x2.Aux) != s {
 19503  			break
 19504  		}
 19505  		_ = x2.Args[1]
 19506  		if p != x2.Args[0] || mem != x2.Args[1] || !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y1, y2, o0)) {
 19507  			break
 19508  		}
 19509  		b = mergePoint(b, x0, x1, x2)
 19510  		v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
 19511  		v.copyOf(v0)
 19512  		v0.Aux = symToAux(s)
 19513  		v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
 19514  		v1.AuxInt = int64ToAuxInt(int64(i0))
 19515  		v1.AddArg(p)
 19516  		v0.AddArg2(v1, mem)
 19517  		return true
 19518  	}
 19519  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem)))
 19520  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)
 19521  	// result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 idx0 mem)
 19522  	for {
 19523  		t := v.Type
 19524  		if auxIntToInt64(v.AuxInt) != 24 {
 19525  			break
 19526  		}
 19527  		o0 := v_0
 19528  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 19529  			break
 19530  		}
 19531  		_ = o0.Args[1]
 19532  		x0 := o0.Args[0]
 19533  		if x0.Op != OpARM64MOVHUloadidx {
 19534  			break
 19535  		}
 19536  		mem := x0.Args[2]
 19537  		ptr0 := x0.Args[0]
 19538  		idx0 := x0.Args[1]
 19539  		y1 := o0.Args[1]
 19540  		if y1.Op != OpARM64MOVDnop {
 19541  			break
 19542  		}
 19543  		x1 := y1.Args[0]
 19544  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 {
 19545  			break
 19546  		}
 19547  		s := auxToSym(x1.Aux)
 19548  		_ = x1.Args[1]
 19549  		p1 := x1.Args[0]
 19550  		if p1.Op != OpARM64ADD {
 19551  			break
 19552  		}
 19553  		_ = p1.Args[1]
 19554  		p1_0 := p1.Args[0]
 19555  		p1_1 := p1.Args[1]
 19556  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 19557  			ptr1 := p1_0
 19558  			idx1 := p1_1
 19559  			if mem != x1.Args[1] {
 19560  				continue
 19561  			}
 19562  			y2 := v_1
 19563  			if y2.Op != OpARM64MOVDnop {
 19564  				continue
 19565  			}
 19566  			x2 := y2.Args[0]
 19567  			if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 3 || auxToSym(x2.Aux) != s {
 19568  				continue
 19569  			}
 19570  			_ = x2.Args[1]
 19571  			p := x2.Args[0]
 19572  			if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)) {
 19573  				continue
 19574  			}
 19575  			b = mergePoint(b, x0, x1, x2)
 19576  			v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
 19577  			v.copyOf(v0)
 19578  			v0.AddArg3(ptr0, idx0, mem)
 19579  			return true
 19580  		}
 19581  		break
 19582  	}
 19583  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [3] idx) mem)))
 19584  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0, x1, x2, y1, y2, o0)
 19585  	// result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr idx mem)
 19586  	for {
 19587  		t := v.Type
 19588  		if auxIntToInt64(v.AuxInt) != 24 {
 19589  			break
 19590  		}
 19591  		o0 := v_0
 19592  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 19593  			break
 19594  		}
 19595  		_ = o0.Args[1]
 19596  		x0 := o0.Args[0]
 19597  		if x0.Op != OpARM64MOVHUloadidx {
 19598  			break
 19599  		}
 19600  		mem := x0.Args[2]
 19601  		ptr := x0.Args[0]
 19602  		idx := x0.Args[1]
 19603  		y1 := o0.Args[1]
 19604  		if y1.Op != OpARM64MOVDnop {
 19605  			break
 19606  		}
 19607  		x1 := y1.Args[0]
 19608  		if x1.Op != OpARM64MOVBUloadidx {
 19609  			break
 19610  		}
 19611  		_ = x1.Args[2]
 19612  		if ptr != x1.Args[0] {
 19613  			break
 19614  		}
 19615  		x1_1 := x1.Args[1]
 19616  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 19617  			break
 19618  		}
 19619  		y2 := v_1
 19620  		if y2.Op != OpARM64MOVDnop {
 19621  			break
 19622  		}
 19623  		x2 := y2.Args[0]
 19624  		if x2.Op != OpARM64MOVBUloadidx {
 19625  			break
 19626  		}
 19627  		_ = x2.Args[2]
 19628  		if ptr != x2.Args[0] {
 19629  			break
 19630  		}
 19631  		x2_1 := x2.Args[1]
 19632  		if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 3 || idx != x2_1.Args[0] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y1, y2, o0)) {
 19633  			break
 19634  		}
 19635  		b = mergePoint(b, x0, x1, x2)
 19636  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
 19637  		v.copyOf(v0)
 19638  		v0.AddArg3(ptr, idx, mem)
 19639  		return true
 19640  	}
 19641  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx2 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADDshiftLL [1] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem)))
 19642  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)
 19643  	// result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 (SLLconst <idx0.Type> [1] idx0) mem)
 19644  	for {
 19645  		t := v.Type
 19646  		if auxIntToInt64(v.AuxInt) != 24 {
 19647  			break
 19648  		}
 19649  		o0 := v_0
 19650  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 19651  			break
 19652  		}
 19653  		_ = o0.Args[1]
 19654  		x0 := o0.Args[0]
 19655  		if x0.Op != OpARM64MOVHUloadidx2 {
 19656  			break
 19657  		}
 19658  		mem := x0.Args[2]
 19659  		ptr0 := x0.Args[0]
 19660  		idx0 := x0.Args[1]
 19661  		y1 := o0.Args[1]
 19662  		if y1.Op != OpARM64MOVDnop {
 19663  			break
 19664  		}
 19665  		x1 := y1.Args[0]
 19666  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 {
 19667  			break
 19668  		}
 19669  		s := auxToSym(x1.Aux)
 19670  		_ = x1.Args[1]
 19671  		p1 := x1.Args[0]
 19672  		if p1.Op != OpARM64ADDshiftLL || auxIntToInt64(p1.AuxInt) != 1 {
 19673  			break
 19674  		}
 19675  		idx1 := p1.Args[1]
 19676  		ptr1 := p1.Args[0]
 19677  		if mem != x1.Args[1] {
 19678  			break
 19679  		}
 19680  		y2 := v_1
 19681  		if y2.Op != OpARM64MOVDnop {
 19682  			break
 19683  		}
 19684  		x2 := y2.Args[0]
 19685  		if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 3 || auxToSym(x2.Aux) != s {
 19686  			break
 19687  		}
 19688  		_ = x2.Args[1]
 19689  		p := x2.Args[0]
 19690  		if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)) {
 19691  			break
 19692  		}
 19693  		b = mergePoint(b, x0, x1, x2)
 19694  		v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
 19695  		v.copyOf(v0)
 19696  		v1 := b.NewValue0(x2.Pos, OpARM64SLLconst, idx0.Type)
 19697  		v1.AuxInt = int64ToAuxInt(1)
 19698  		v1.AddArg(idx0)
 19699  		v0.AddArg3(ptr0, v1, mem)
 19700  		return true
 19701  	}
 19702  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem)))
 19703  	// cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)
 19704  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem)
 19705  	for {
 19706  		t := v.Type
 19707  		if auxIntToInt64(v.AuxInt) != 56 {
 19708  			break
 19709  		}
 19710  		o0 := v_0
 19711  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 19712  			break
 19713  		}
 19714  		_ = o0.Args[1]
 19715  		o1 := o0.Args[0]
 19716  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 19717  			break
 19718  		}
 19719  		_ = o1.Args[1]
 19720  		o2 := o1.Args[0]
 19721  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 19722  			break
 19723  		}
 19724  		_ = o2.Args[1]
 19725  		x0 := o2.Args[0]
 19726  		if x0.Op != OpARM64MOVWUload {
 19727  			break
 19728  		}
 19729  		i0 := auxIntToInt32(x0.AuxInt)
 19730  		s := auxToSym(x0.Aux)
 19731  		mem := x0.Args[1]
 19732  		p := x0.Args[0]
 19733  		y1 := o2.Args[1]
 19734  		if y1.Op != OpARM64MOVDnop {
 19735  			break
 19736  		}
 19737  		x1 := y1.Args[0]
 19738  		if x1.Op != OpARM64MOVBUload {
 19739  			break
 19740  		}
 19741  		i4 := auxIntToInt32(x1.AuxInt)
 19742  		if auxToSym(x1.Aux) != s {
 19743  			break
 19744  		}
 19745  		_ = x1.Args[1]
 19746  		if p != x1.Args[0] || mem != x1.Args[1] {
 19747  			break
 19748  		}
 19749  		y2 := o1.Args[1]
 19750  		if y2.Op != OpARM64MOVDnop {
 19751  			break
 19752  		}
 19753  		x2 := y2.Args[0]
 19754  		if x2.Op != OpARM64MOVBUload {
 19755  			break
 19756  		}
 19757  		i5 := auxIntToInt32(x2.AuxInt)
 19758  		if auxToSym(x2.Aux) != s {
 19759  			break
 19760  		}
 19761  		_ = x2.Args[1]
 19762  		if p != x2.Args[0] || mem != x2.Args[1] {
 19763  			break
 19764  		}
 19765  		y3 := o0.Args[1]
 19766  		if y3.Op != OpARM64MOVDnop {
 19767  			break
 19768  		}
 19769  		x3 := y3.Args[0]
 19770  		if x3.Op != OpARM64MOVBUload {
 19771  			break
 19772  		}
 19773  		i6 := auxIntToInt32(x3.AuxInt)
 19774  		if auxToSym(x3.Aux) != s {
 19775  			break
 19776  		}
 19777  		_ = x3.Args[1]
 19778  		if p != x3.Args[0] || mem != x3.Args[1] {
 19779  			break
 19780  		}
 19781  		y4 := v_1
 19782  		if y4.Op != OpARM64MOVDnop {
 19783  			break
 19784  		}
 19785  		x4 := y4.Args[0]
 19786  		if x4.Op != OpARM64MOVBUload {
 19787  			break
 19788  		}
 19789  		i7 := auxIntToInt32(x4.AuxInt)
 19790  		if auxToSym(x4.Aux) != s {
 19791  			break
 19792  		}
 19793  		_ = x4.Args[1]
 19794  		if p != x4.Args[0] || mem != x4.Args[1] || !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
 19795  			break
 19796  		}
 19797  		b = mergePoint(b, x0, x1, x2, x3, x4)
 19798  		v0 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
 19799  		v.copyOf(v0)
 19800  		v0.Aux = symToAux(s)
 19801  		v1 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
 19802  		v1.AuxInt = int64ToAuxInt(int64(i0))
 19803  		v1.AddArg(p)
 19804  		v0.AddArg2(v1, mem)
 19805  		return true
 19806  	}
 19807  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem)))
 19808  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)
 19809  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 idx0 mem)
 19810  	for {
 19811  		t := v.Type
 19812  		if auxIntToInt64(v.AuxInt) != 56 {
 19813  			break
 19814  		}
 19815  		o0 := v_0
 19816  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 19817  			break
 19818  		}
 19819  		_ = o0.Args[1]
 19820  		o1 := o0.Args[0]
 19821  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 19822  			break
 19823  		}
 19824  		_ = o1.Args[1]
 19825  		o2 := o1.Args[0]
 19826  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 19827  			break
 19828  		}
 19829  		_ = o2.Args[1]
 19830  		x0 := o2.Args[0]
 19831  		if x0.Op != OpARM64MOVWUloadidx {
 19832  			break
 19833  		}
 19834  		mem := x0.Args[2]
 19835  		ptr0 := x0.Args[0]
 19836  		idx0 := x0.Args[1]
 19837  		y1 := o2.Args[1]
 19838  		if y1.Op != OpARM64MOVDnop {
 19839  			break
 19840  		}
 19841  		x1 := y1.Args[0]
 19842  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 4 {
 19843  			break
 19844  		}
 19845  		s := auxToSym(x1.Aux)
 19846  		_ = x1.Args[1]
 19847  		p1 := x1.Args[0]
 19848  		if p1.Op != OpARM64ADD {
 19849  			break
 19850  		}
 19851  		_ = p1.Args[1]
 19852  		p1_0 := p1.Args[0]
 19853  		p1_1 := p1.Args[1]
 19854  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 19855  			ptr1 := p1_0
 19856  			idx1 := p1_1
 19857  			if mem != x1.Args[1] {
 19858  				continue
 19859  			}
 19860  			y2 := o1.Args[1]
 19861  			if y2.Op != OpARM64MOVDnop {
 19862  				continue
 19863  			}
 19864  			x2 := y2.Args[0]
 19865  			if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
 19866  				continue
 19867  			}
 19868  			_ = x2.Args[1]
 19869  			p := x2.Args[0]
 19870  			if mem != x2.Args[1] {
 19871  				continue
 19872  			}
 19873  			y3 := o0.Args[1]
 19874  			if y3.Op != OpARM64MOVDnop {
 19875  				continue
 19876  			}
 19877  			x3 := y3.Args[0]
 19878  			if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 6 || auxToSym(x3.Aux) != s {
 19879  				continue
 19880  			}
 19881  			_ = x3.Args[1]
 19882  			if p != x3.Args[0] || mem != x3.Args[1] {
 19883  				continue
 19884  			}
 19885  			y4 := v_1
 19886  			if y4.Op != OpARM64MOVDnop {
 19887  				continue
 19888  			}
 19889  			x4 := y4.Args[0]
 19890  			if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 7 || auxToSym(x4.Aux) != s {
 19891  				continue
 19892  			}
 19893  			_ = x4.Args[1]
 19894  			if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
 19895  				continue
 19896  			}
 19897  			b = mergePoint(b, x0, x1, x2, x3, x4)
 19898  			v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
 19899  			v.copyOf(v0)
 19900  			v0.AddArg3(ptr0, idx0, mem)
 19901  			return true
 19902  		}
 19903  		break
 19904  	}
 19905  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx4 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADDshiftLL [2] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem)))
 19906  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)
 19907  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 (SLLconst <idx0.Type> [2] idx0) mem)
 19908  	for {
 19909  		t := v.Type
 19910  		if auxIntToInt64(v.AuxInt) != 56 {
 19911  			break
 19912  		}
 19913  		o0 := v_0
 19914  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 19915  			break
 19916  		}
 19917  		_ = o0.Args[1]
 19918  		o1 := o0.Args[0]
 19919  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 19920  			break
 19921  		}
 19922  		_ = o1.Args[1]
 19923  		o2 := o1.Args[0]
 19924  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 19925  			break
 19926  		}
 19927  		_ = o2.Args[1]
 19928  		x0 := o2.Args[0]
 19929  		if x0.Op != OpARM64MOVWUloadidx4 {
 19930  			break
 19931  		}
 19932  		mem := x0.Args[2]
 19933  		ptr0 := x0.Args[0]
 19934  		idx0 := x0.Args[1]
 19935  		y1 := o2.Args[1]
 19936  		if y1.Op != OpARM64MOVDnop {
 19937  			break
 19938  		}
 19939  		x1 := y1.Args[0]
 19940  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 4 {
 19941  			break
 19942  		}
 19943  		s := auxToSym(x1.Aux)
 19944  		_ = x1.Args[1]
 19945  		p1 := x1.Args[0]
 19946  		if p1.Op != OpARM64ADDshiftLL || auxIntToInt64(p1.AuxInt) != 2 {
 19947  			break
 19948  		}
 19949  		idx1 := p1.Args[1]
 19950  		ptr1 := p1.Args[0]
 19951  		if mem != x1.Args[1] {
 19952  			break
 19953  		}
 19954  		y2 := o1.Args[1]
 19955  		if y2.Op != OpARM64MOVDnop {
 19956  			break
 19957  		}
 19958  		x2 := y2.Args[0]
 19959  		if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
 19960  			break
 19961  		}
 19962  		_ = x2.Args[1]
 19963  		p := x2.Args[0]
 19964  		if mem != x2.Args[1] {
 19965  			break
 19966  		}
 19967  		y3 := o0.Args[1]
 19968  		if y3.Op != OpARM64MOVDnop {
 19969  			break
 19970  		}
 19971  		x3 := y3.Args[0]
 19972  		if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 6 || auxToSym(x3.Aux) != s {
 19973  			break
 19974  		}
 19975  		_ = x3.Args[1]
 19976  		if p != x3.Args[0] || mem != x3.Args[1] {
 19977  			break
 19978  		}
 19979  		y4 := v_1
 19980  		if y4.Op != OpARM64MOVDnop {
 19981  			break
 19982  		}
 19983  		x4 := y4.Args[0]
 19984  		if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 7 || auxToSym(x4.Aux) != s {
 19985  			break
 19986  		}
 19987  		_ = x4.Args[1]
 19988  		if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
 19989  			break
 19990  		}
 19991  		b = mergePoint(b, x0, x1, x2, x3, x4)
 19992  		v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
 19993  		v.copyOf(v0)
 19994  		v1 := b.NewValue0(x4.Pos, OpARM64SLLconst, idx0.Type)
 19995  		v1.AuxInt = int64ToAuxInt(2)
 19996  		v1.AddArg(idx0)
 19997  		v0.AddArg3(ptr0, v1, mem)
 19998  		return true
 19999  	}
 20000  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [7] idx) mem)))
 20001  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)
 20002  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr idx mem)
 20003  	for {
 20004  		t := v.Type
 20005  		if auxIntToInt64(v.AuxInt) != 56 {
 20006  			break
 20007  		}
 20008  		o0 := v_0
 20009  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 20010  			break
 20011  		}
 20012  		_ = o0.Args[1]
 20013  		o1 := o0.Args[0]
 20014  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 20015  			break
 20016  		}
 20017  		_ = o1.Args[1]
 20018  		o2 := o1.Args[0]
 20019  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 20020  			break
 20021  		}
 20022  		_ = o2.Args[1]
 20023  		x0 := o2.Args[0]
 20024  		if x0.Op != OpARM64MOVWUloadidx {
 20025  			break
 20026  		}
 20027  		mem := x0.Args[2]
 20028  		ptr := x0.Args[0]
 20029  		idx := x0.Args[1]
 20030  		y1 := o2.Args[1]
 20031  		if y1.Op != OpARM64MOVDnop {
 20032  			break
 20033  		}
 20034  		x1 := y1.Args[0]
 20035  		if x1.Op != OpARM64MOVBUloadidx {
 20036  			break
 20037  		}
 20038  		_ = x1.Args[2]
 20039  		if ptr != x1.Args[0] {
 20040  			break
 20041  		}
 20042  		x1_1 := x1.Args[1]
 20043  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 4 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 20044  			break
 20045  		}
 20046  		y2 := o1.Args[1]
 20047  		if y2.Op != OpARM64MOVDnop {
 20048  			break
 20049  		}
 20050  		x2 := y2.Args[0]
 20051  		if x2.Op != OpARM64MOVBUloadidx {
 20052  			break
 20053  		}
 20054  		_ = x2.Args[2]
 20055  		if ptr != x2.Args[0] {
 20056  			break
 20057  		}
 20058  		x2_1 := x2.Args[1]
 20059  		if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 20060  			break
 20061  		}
 20062  		y3 := o0.Args[1]
 20063  		if y3.Op != OpARM64MOVDnop {
 20064  			break
 20065  		}
 20066  		x3 := y3.Args[0]
 20067  		if x3.Op != OpARM64MOVBUloadidx {
 20068  			break
 20069  		}
 20070  		_ = x3.Args[2]
 20071  		if ptr != x3.Args[0] {
 20072  			break
 20073  		}
 20074  		x3_1 := x3.Args[1]
 20075  		if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 6 || idx != x3_1.Args[0] || mem != x3.Args[2] {
 20076  			break
 20077  		}
 20078  		y4 := v_1
 20079  		if y4.Op != OpARM64MOVDnop {
 20080  			break
 20081  		}
 20082  		x4 := y4.Args[0]
 20083  		if x4.Op != OpARM64MOVBUloadidx {
 20084  			break
 20085  		}
 20086  		_ = x4.Args[2]
 20087  		if ptr != x4.Args[0] {
 20088  			break
 20089  		}
 20090  		x4_1 := x4.Args[1]
 20091  		if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 7 || idx != x4_1.Args[0] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
 20092  			break
 20093  		}
 20094  		b = mergePoint(b, x0, x1, x2, x3, x4)
 20095  		v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
 20096  		v.copyOf(v0)
 20097  		v0.AddArg3(ptr, idx, mem)
 20098  		return true
 20099  	}
 20100  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem)))
 20101  	// cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0, x1, y0, y1)
 20102  	// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem))
 20103  	for {
 20104  		t := v.Type
 20105  		if auxIntToInt64(v.AuxInt) != 8 {
 20106  			break
 20107  		}
 20108  		y0 := v_0
 20109  		if y0.Op != OpARM64MOVDnop {
 20110  			break
 20111  		}
 20112  		x0 := y0.Args[0]
 20113  		if x0.Op != OpARM64MOVBUload {
 20114  			break
 20115  		}
 20116  		i1 := auxIntToInt32(x0.AuxInt)
 20117  		s := auxToSym(x0.Aux)
 20118  		mem := x0.Args[1]
 20119  		p := x0.Args[0]
 20120  		y1 := v_1
 20121  		if y1.Op != OpARM64MOVDnop {
 20122  			break
 20123  		}
 20124  		x1 := y1.Args[0]
 20125  		if x1.Op != OpARM64MOVBUload {
 20126  			break
 20127  		}
 20128  		i0 := auxIntToInt32(x1.AuxInt)
 20129  		if auxToSym(x1.Aux) != s {
 20130  			break
 20131  		}
 20132  		_ = x1.Args[1]
 20133  		if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
 20134  			break
 20135  		}
 20136  		b = mergePoint(b, x0, x1)
 20137  		v0 := b.NewValue0(x1.Pos, OpARM64REV16W, t)
 20138  		v.copyOf(v0)
 20139  		v1 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
 20140  		v1.AuxInt = int32ToAuxInt(i0)
 20141  		v1.Aux = symToAux(s)
 20142  		v1.AddArg2(p, mem)
 20143  		v0.AddArg(v1)
 20144  		return true
 20145  	}
 20146  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr0 idx0 mem)))
 20147  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)
 20148  	// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr0 idx0 mem))
 20149  	for {
 20150  		t := v.Type
 20151  		if auxIntToInt64(v.AuxInt) != 8 {
 20152  			break
 20153  		}
 20154  		y0 := v_0
 20155  		if y0.Op != OpARM64MOVDnop {
 20156  			break
 20157  		}
 20158  		x0 := y0.Args[0]
 20159  		if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 1 {
 20160  			break
 20161  		}
 20162  		s := auxToSym(x0.Aux)
 20163  		mem := x0.Args[1]
 20164  		p1 := x0.Args[0]
 20165  		if p1.Op != OpARM64ADD {
 20166  			break
 20167  		}
 20168  		_ = p1.Args[1]
 20169  		p1_0 := p1.Args[0]
 20170  		p1_1 := p1.Args[1]
 20171  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 20172  			ptr1 := p1_0
 20173  			idx1 := p1_1
 20174  			y1 := v_1
 20175  			if y1.Op != OpARM64MOVDnop {
 20176  				continue
 20177  			}
 20178  			x1 := y1.Args[0]
 20179  			if x1.Op != OpARM64MOVBUloadidx {
 20180  				continue
 20181  			}
 20182  			_ = x1.Args[2]
 20183  			ptr0 := x1.Args[0]
 20184  			idx0 := x1.Args[1]
 20185  			if mem != x1.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)) {
 20186  				continue
 20187  			}
 20188  			b = mergePoint(b, x0, x1)
 20189  			v0 := b.NewValue0(x0.Pos, OpARM64REV16W, t)
 20190  			v.copyOf(v0)
 20191  			v1 := b.NewValue0(x0.Pos, OpARM64MOVHUloadidx, t)
 20192  			v1.AddArg3(ptr0, idx0, mem)
 20193  			v0.AddArg(v1)
 20194  			return true
 20195  		}
 20196  		break
 20197  	}
 20198  	// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [1] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr idx mem)))
 20199  	// cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0, x1, y0, y1)
 20200  	// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr idx mem))
 20201  	for {
 20202  		t := v.Type
 20203  		if auxIntToInt64(v.AuxInt) != 8 {
 20204  			break
 20205  		}
 20206  		y0 := v_0
 20207  		if y0.Op != OpARM64MOVDnop {
 20208  			break
 20209  		}
 20210  		x0 := y0.Args[0]
 20211  		if x0.Op != OpARM64MOVBUloadidx {
 20212  			break
 20213  		}
 20214  		mem := x0.Args[2]
 20215  		ptr := x0.Args[0]
 20216  		x0_1 := x0.Args[1]
 20217  		if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 1 {
 20218  			break
 20219  		}
 20220  		idx := x0_1.Args[0]
 20221  		y1 := v_1
 20222  		if y1.Op != OpARM64MOVDnop {
 20223  			break
 20224  		}
 20225  		x1 := y1.Args[0]
 20226  		if x1.Op != OpARM64MOVBUloadidx {
 20227  			break
 20228  		}
 20229  		_ = x1.Args[2]
 20230  		if ptr != x1.Args[0] || idx != x1.Args[1] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
 20231  			break
 20232  		}
 20233  		b = mergePoint(b, x0, x1)
 20234  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, t)
 20235  		v.copyOf(v0)
 20236  		v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
 20237  		v1.AddArg3(ptr, idx, mem)
 20238  		v0.AddArg(v1)
 20239  		return true
 20240  	}
 20241  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem)))
 20242  	// cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)
 20243  	// result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem))
 20244  	for {
 20245  		t := v.Type
 20246  		if auxIntToInt64(v.AuxInt) != 24 {
 20247  			break
 20248  		}
 20249  		o0 := v_0
 20250  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 20251  			break
 20252  		}
 20253  		_ = o0.Args[1]
 20254  		y0 := o0.Args[0]
 20255  		if y0.Op != OpARM64REV16W {
 20256  			break
 20257  		}
 20258  		x0 := y0.Args[0]
 20259  		if x0.Op != OpARM64MOVHUload {
 20260  			break
 20261  		}
 20262  		i2 := auxIntToInt32(x0.AuxInt)
 20263  		s := auxToSym(x0.Aux)
 20264  		mem := x0.Args[1]
 20265  		p := x0.Args[0]
 20266  		y1 := o0.Args[1]
 20267  		if y1.Op != OpARM64MOVDnop {
 20268  			break
 20269  		}
 20270  		x1 := y1.Args[0]
 20271  		if x1.Op != OpARM64MOVBUload {
 20272  			break
 20273  		}
 20274  		i1 := auxIntToInt32(x1.AuxInt)
 20275  		if auxToSym(x1.Aux) != s {
 20276  			break
 20277  		}
 20278  		_ = x1.Args[1]
 20279  		if p != x1.Args[0] || mem != x1.Args[1] {
 20280  			break
 20281  		}
 20282  		y2 := v_1
 20283  		if y2.Op != OpARM64MOVDnop {
 20284  			break
 20285  		}
 20286  		x2 := y2.Args[0]
 20287  		if x2.Op != OpARM64MOVBUload {
 20288  			break
 20289  		}
 20290  		i0 := auxIntToInt32(x2.AuxInt)
 20291  		if auxToSym(x2.Aux) != s {
 20292  			break
 20293  		}
 20294  		_ = x2.Args[1]
 20295  		if p != x2.Args[0] || mem != x2.Args[1] || !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)) {
 20296  			break
 20297  		}
 20298  		b = mergePoint(b, x0, x1, x2)
 20299  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, t)
 20300  		v.copyOf(v0)
 20301  		v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
 20302  		v1.Aux = symToAux(s)
 20303  		v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
 20304  		v2.AuxInt = int64ToAuxInt(int64(i0))
 20305  		v2.AddArg(p)
 20306  		v1.AddArg2(v2, mem)
 20307  		v0.AddArg(v1)
 20308  		return true
 20309  	}
 20310  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr0 idx0 mem)))
 20311  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y0, y1, y2, o0)
 20312  	// result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem))
 20313  	for {
 20314  		t := v.Type
 20315  		if auxIntToInt64(v.AuxInt) != 24 {
 20316  			break
 20317  		}
 20318  		o0 := v_0
 20319  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 20320  			break
 20321  		}
 20322  		_ = o0.Args[1]
 20323  		y0 := o0.Args[0]
 20324  		if y0.Op != OpARM64REV16W {
 20325  			break
 20326  		}
 20327  		x0 := y0.Args[0]
 20328  		if x0.Op != OpARM64MOVHUload || auxIntToInt32(x0.AuxInt) != 2 {
 20329  			break
 20330  		}
 20331  		s := auxToSym(x0.Aux)
 20332  		mem := x0.Args[1]
 20333  		p := x0.Args[0]
 20334  		y1 := o0.Args[1]
 20335  		if y1.Op != OpARM64MOVDnop {
 20336  			break
 20337  		}
 20338  		x1 := y1.Args[0]
 20339  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
 20340  			break
 20341  		}
 20342  		_ = x1.Args[1]
 20343  		p1 := x1.Args[0]
 20344  		if p1.Op != OpARM64ADD {
 20345  			break
 20346  		}
 20347  		_ = p1.Args[1]
 20348  		p1_0 := p1.Args[0]
 20349  		p1_1 := p1.Args[1]
 20350  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 20351  			ptr1 := p1_0
 20352  			idx1 := p1_1
 20353  			if mem != x1.Args[1] {
 20354  				continue
 20355  			}
 20356  			y2 := v_1
 20357  			if y2.Op != OpARM64MOVDnop {
 20358  				continue
 20359  			}
 20360  			x2 := y2.Args[0]
 20361  			if x2.Op != OpARM64MOVBUloadidx {
 20362  				continue
 20363  			}
 20364  			_ = x2.Args[2]
 20365  			ptr0 := x2.Args[0]
 20366  			idx0 := x2.Args[1]
 20367  			if mem != x2.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y0, y1, y2, o0)) {
 20368  				continue
 20369  			}
 20370  			b = mergePoint(b, x0, x1, x2)
 20371  			v0 := b.NewValue0(x1.Pos, OpARM64REVW, t)
 20372  			v.copyOf(v0)
 20373  			v1 := b.NewValue0(x1.Pos, OpARM64MOVWUloadidx, t)
 20374  			v1.AddArg3(ptr0, idx0, mem)
 20375  			v0.AddArg(v1)
 20376  			return true
 20377  		}
 20378  		break
 20379  	}
 20380  	// match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUloadidx ptr (ADDconst [2] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr idx mem)))
 20381  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)
 20382  	// result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr idx mem))
 20383  	for {
 20384  		t := v.Type
 20385  		if auxIntToInt64(v.AuxInt) != 24 {
 20386  			break
 20387  		}
 20388  		o0 := v_0
 20389  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
 20390  			break
 20391  		}
 20392  		_ = o0.Args[1]
 20393  		y0 := o0.Args[0]
 20394  		if y0.Op != OpARM64REV16W {
 20395  			break
 20396  		}
 20397  		x0 := y0.Args[0]
 20398  		if x0.Op != OpARM64MOVHUloadidx {
 20399  			break
 20400  		}
 20401  		mem := x0.Args[2]
 20402  		ptr := x0.Args[0]
 20403  		x0_1 := x0.Args[1]
 20404  		if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 2 {
 20405  			break
 20406  		}
 20407  		idx := x0_1.Args[0]
 20408  		y1 := o0.Args[1]
 20409  		if y1.Op != OpARM64MOVDnop {
 20410  			break
 20411  		}
 20412  		x1 := y1.Args[0]
 20413  		if x1.Op != OpARM64MOVBUloadidx {
 20414  			break
 20415  		}
 20416  		_ = x1.Args[2]
 20417  		if ptr != x1.Args[0] {
 20418  			break
 20419  		}
 20420  		x1_1 := x1.Args[1]
 20421  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 20422  			break
 20423  		}
 20424  		y2 := v_1
 20425  		if y2.Op != OpARM64MOVDnop {
 20426  			break
 20427  		}
 20428  		x2 := y2.Args[0]
 20429  		if x2.Op != OpARM64MOVBUloadidx {
 20430  			break
 20431  		}
 20432  		_ = x2.Args[2]
 20433  		if ptr != x2.Args[0] || idx != x2.Args[1] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)) {
 20434  			break
 20435  		}
 20436  		b = mergePoint(b, x0, x1, x2)
 20437  		v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
 20438  		v.copyOf(v0)
 20439  		v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
 20440  		v1.AddArg3(ptr, idx, mem)
 20441  		v0.AddArg(v1)
 20442  		return true
 20443  	}
 20444  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem)))
 20445  	// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)
 20446  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [int64(i0)] p) mem))
 20447  	for {
 20448  		t := v.Type
 20449  		if auxIntToInt64(v.AuxInt) != 56 {
 20450  			break
 20451  		}
 20452  		o0 := v_0
 20453  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 20454  			break
 20455  		}
 20456  		_ = o0.Args[1]
 20457  		o1 := o0.Args[0]
 20458  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 20459  			break
 20460  		}
 20461  		_ = o1.Args[1]
 20462  		o2 := o1.Args[0]
 20463  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 20464  			break
 20465  		}
 20466  		_ = o2.Args[1]
 20467  		y0 := o2.Args[0]
 20468  		if y0.Op != OpARM64REVW {
 20469  			break
 20470  		}
 20471  		x0 := y0.Args[0]
 20472  		if x0.Op != OpARM64MOVWUload {
 20473  			break
 20474  		}
 20475  		i4 := auxIntToInt32(x0.AuxInt)
 20476  		s := auxToSym(x0.Aux)
 20477  		mem := x0.Args[1]
 20478  		p := x0.Args[0]
 20479  		y1 := o2.Args[1]
 20480  		if y1.Op != OpARM64MOVDnop {
 20481  			break
 20482  		}
 20483  		x1 := y1.Args[0]
 20484  		if x1.Op != OpARM64MOVBUload {
 20485  			break
 20486  		}
 20487  		i3 := auxIntToInt32(x1.AuxInt)
 20488  		if auxToSym(x1.Aux) != s {
 20489  			break
 20490  		}
 20491  		_ = x1.Args[1]
 20492  		if p != x1.Args[0] || mem != x1.Args[1] {
 20493  			break
 20494  		}
 20495  		y2 := o1.Args[1]
 20496  		if y2.Op != OpARM64MOVDnop {
 20497  			break
 20498  		}
 20499  		x2 := y2.Args[0]
 20500  		if x2.Op != OpARM64MOVBUload {
 20501  			break
 20502  		}
 20503  		i2 := auxIntToInt32(x2.AuxInt)
 20504  		if auxToSym(x2.Aux) != s {
 20505  			break
 20506  		}
 20507  		_ = x2.Args[1]
 20508  		if p != x2.Args[0] || mem != x2.Args[1] {
 20509  			break
 20510  		}
 20511  		y3 := o0.Args[1]
 20512  		if y3.Op != OpARM64MOVDnop {
 20513  			break
 20514  		}
 20515  		x3 := y3.Args[0]
 20516  		if x3.Op != OpARM64MOVBUload {
 20517  			break
 20518  		}
 20519  		i1 := auxIntToInt32(x3.AuxInt)
 20520  		if auxToSym(x3.Aux) != s {
 20521  			break
 20522  		}
 20523  		_ = x3.Args[1]
 20524  		if p != x3.Args[0] || mem != x3.Args[1] {
 20525  			break
 20526  		}
 20527  		y4 := v_1
 20528  		if y4.Op != OpARM64MOVDnop {
 20529  			break
 20530  		}
 20531  		x4 := y4.Args[0]
 20532  		if x4.Op != OpARM64MOVBUload {
 20533  			break
 20534  		}
 20535  		i0 := auxIntToInt32(x4.AuxInt)
 20536  		if auxToSym(x4.Aux) != s {
 20537  			break
 20538  		}
 20539  		_ = x4.Args[1]
 20540  		if p != x4.Args[0] || mem != x4.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
 20541  			break
 20542  		}
 20543  		b = mergePoint(b, x0, x1, x2, x3, x4)
 20544  		v0 := b.NewValue0(x4.Pos, OpARM64REV, t)
 20545  		v.copyOf(v0)
 20546  		v1 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
 20547  		v1.Aux = symToAux(s)
 20548  		v2 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
 20549  		v2.AuxInt = int64ToAuxInt(int64(i0))
 20550  		v2.AddArg(p)
 20551  		v1.AddArg2(v2, mem)
 20552  		v0.AddArg(v1)
 20553  		return true
 20554  	}
 20555  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr0 idx0 mem)))
 20556  	// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)
 20557  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem))
 20558  	for {
 20559  		t := v.Type
 20560  		if auxIntToInt64(v.AuxInt) != 56 {
 20561  			break
 20562  		}
 20563  		o0 := v_0
 20564  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 20565  			break
 20566  		}
 20567  		_ = o0.Args[1]
 20568  		o1 := o0.Args[0]
 20569  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 20570  			break
 20571  		}
 20572  		_ = o1.Args[1]
 20573  		o2 := o1.Args[0]
 20574  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 20575  			break
 20576  		}
 20577  		_ = o2.Args[1]
 20578  		y0 := o2.Args[0]
 20579  		if y0.Op != OpARM64REVW {
 20580  			break
 20581  		}
 20582  		x0 := y0.Args[0]
 20583  		if x0.Op != OpARM64MOVWUload || auxIntToInt32(x0.AuxInt) != 4 {
 20584  			break
 20585  		}
 20586  		s := auxToSym(x0.Aux)
 20587  		mem := x0.Args[1]
 20588  		p := x0.Args[0]
 20589  		y1 := o2.Args[1]
 20590  		if y1.Op != OpARM64MOVDnop {
 20591  			break
 20592  		}
 20593  		x1 := y1.Args[0]
 20594  		if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 3 || auxToSym(x1.Aux) != s {
 20595  			break
 20596  		}
 20597  		_ = x1.Args[1]
 20598  		if p != x1.Args[0] || mem != x1.Args[1] {
 20599  			break
 20600  		}
 20601  		y2 := o1.Args[1]
 20602  		if y2.Op != OpARM64MOVDnop {
 20603  			break
 20604  		}
 20605  		x2 := y2.Args[0]
 20606  		if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
 20607  			break
 20608  		}
 20609  		_ = x2.Args[1]
 20610  		if p != x2.Args[0] || mem != x2.Args[1] {
 20611  			break
 20612  		}
 20613  		y3 := o0.Args[1]
 20614  		if y3.Op != OpARM64MOVDnop {
 20615  			break
 20616  		}
 20617  		x3 := y3.Args[0]
 20618  		if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 1 || auxToSym(x3.Aux) != s {
 20619  			break
 20620  		}
 20621  		_ = x3.Args[1]
 20622  		p1 := x3.Args[0]
 20623  		if p1.Op != OpARM64ADD {
 20624  			break
 20625  		}
 20626  		_ = p1.Args[1]
 20627  		p1_0 := p1.Args[0]
 20628  		p1_1 := p1.Args[1]
 20629  		for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
 20630  			ptr1 := p1_0
 20631  			idx1 := p1_1
 20632  			if mem != x3.Args[1] {
 20633  				continue
 20634  			}
 20635  			y4 := v_1
 20636  			if y4.Op != OpARM64MOVDnop {
 20637  				continue
 20638  			}
 20639  			x4 := y4.Args[0]
 20640  			if x4.Op != OpARM64MOVBUloadidx {
 20641  				continue
 20642  			}
 20643  			_ = x4.Args[2]
 20644  			ptr0 := x4.Args[0]
 20645  			idx0 := x4.Args[1]
 20646  			if mem != x4.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
 20647  				continue
 20648  			}
 20649  			b = mergePoint(b, x0, x1, x2, x3, x4)
 20650  			v0 := b.NewValue0(x3.Pos, OpARM64REV, t)
 20651  			v.copyOf(v0)
 20652  			v1 := b.NewValue0(x3.Pos, OpARM64MOVDloadidx, t)
 20653  			v1.AddArg3(ptr0, idx0, mem)
 20654  			v0.AddArg(v1)
 20655  			return true
 20656  		}
 20657  		break
 20658  	}
 20659  	// match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUloadidx ptr (ADDconst [4] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr idx mem)))
 20660  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)
 20661  	// result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr idx mem))
 20662  	for {
 20663  		t := v.Type
 20664  		if auxIntToInt64(v.AuxInt) != 56 {
 20665  			break
 20666  		}
 20667  		o0 := v_0
 20668  		if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
 20669  			break
 20670  		}
 20671  		_ = o0.Args[1]
 20672  		o1 := o0.Args[0]
 20673  		if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
 20674  			break
 20675  		}
 20676  		_ = o1.Args[1]
 20677  		o2 := o1.Args[0]
 20678  		if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
 20679  			break
 20680  		}
 20681  		_ = o2.Args[1]
 20682  		y0 := o2.Args[0]
 20683  		if y0.Op != OpARM64REVW {
 20684  			break
 20685  		}
 20686  		x0 := y0.Args[0]
 20687  		if x0.Op != OpARM64MOVWUloadidx {
 20688  			break
 20689  		}
 20690  		mem := x0.Args[2]
 20691  		ptr := x0.Args[0]
 20692  		x0_1 := x0.Args[1]
 20693  		if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 4 {
 20694  			break
 20695  		}
 20696  		idx := x0_1.Args[0]
 20697  		y1 := o2.Args[1]
 20698  		if y1.Op != OpARM64MOVDnop {
 20699  			break
 20700  		}
 20701  		x1 := y1.Args[0]
 20702  		if x1.Op != OpARM64MOVBUloadidx {
 20703  			break
 20704  		}
 20705  		_ = x1.Args[2]
 20706  		if ptr != x1.Args[0] {
 20707  			break
 20708  		}
 20709  		x1_1 := x1.Args[1]
 20710  		if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 3 || idx != x1_1.Args[0] || mem != x1.Args[2] {
 20711  			break
 20712  		}
 20713  		y2 := o1.Args[1]
 20714  		if y2.Op != OpARM64MOVDnop {
 20715  			break
 20716  		}
 20717  		x2 := y2.Args[0]
 20718  		if x2.Op != OpARM64MOVBUloadidx {
 20719  			break
 20720  		}
 20721  		_ = x2.Args[2]
 20722  		if ptr != x2.Args[0] {
 20723  			break
 20724  		}
 20725  		x2_1 := x2.Args[1]
 20726  		if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
 20727  			break
 20728  		}
 20729  		y3 := o0.Args[1]
 20730  		if y3.Op != OpARM64MOVDnop {
 20731  			break
 20732  		}
 20733  		x3 := y3.Args[0]
 20734  		if x3.Op != OpARM64MOVBUloadidx {
 20735  			break
 20736  		}
 20737  		_ = x3.Args[2]
 20738  		if ptr != x3.Args[0] {
 20739  			break
 20740  		}
 20741  		x3_1 := x3.Args[1]
 20742  		if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 1 || idx != x3_1.Args[0] || mem != x3.Args[2] {
 20743  			break
 20744  		}
 20745  		y4 := v_1
 20746  		if y4.Op != OpARM64MOVDnop {
 20747  			break
 20748  		}
 20749  		x4 := y4.Args[0]
 20750  		if x4.Op != OpARM64MOVBUloadidx {
 20751  			break
 20752  		}
 20753  		_ = x4.Args[2]
 20754  		if ptr != x4.Args[0] || idx != x4.Args[1] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
 20755  			break
 20756  		}
 20757  		b = mergePoint(b, x0, x1, x2, x3, x4)
 20758  		v0 := b.NewValue0(v.Pos, OpARM64REV, t)
 20759  		v.copyOf(v0)
 20760  		v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
 20761  		v1.AddArg3(ptr, idx, mem)
 20762  		v0.AddArg(v1)
 20763  		return true
 20764  	}
 20765  	return false
 20766  }
 20767  func rewriteValueARM64_OpARM64ORshiftRA(v *Value) bool {
 20768  	v_1 := v.Args[1]
 20769  	v_0 := v.Args[0]
 20770  	b := v.Block
 20771  	// match: (ORshiftRA (MOVDconst [c]) x [d])
 20772  	// result: (ORconst [c] (SRAconst <x.Type> x [d]))
 20773  	for {
 20774  		d := auxIntToInt64(v.AuxInt)
 20775  		if v_0.Op != OpARM64MOVDconst {
 20776  			break
 20777  		}
 20778  		c := auxIntToInt64(v_0.AuxInt)
 20779  		x := v_1
 20780  		v.reset(OpARM64ORconst)
 20781  		v.AuxInt = int64ToAuxInt(c)
 20782  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
 20783  		v0.AuxInt = int64ToAuxInt(d)
 20784  		v0.AddArg(x)
 20785  		v.AddArg(v0)
 20786  		return true
 20787  	}
 20788  	// match: (ORshiftRA x (MOVDconst [c]) [d])
 20789  	// result: (ORconst x [c>>uint64(d)])
 20790  	for {
 20791  		d := auxIntToInt64(v.AuxInt)
 20792  		x := v_0
 20793  		if v_1.Op != OpARM64MOVDconst {
 20794  			break
 20795  		}
 20796  		c := auxIntToInt64(v_1.AuxInt)
 20797  		v.reset(OpARM64ORconst)
 20798  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 20799  		v.AddArg(x)
 20800  		return true
 20801  	}
 20802  	// match: (ORshiftRA y:(SRAconst x [c]) x [c])
 20803  	// result: y
 20804  	for {
 20805  		c := auxIntToInt64(v.AuxInt)
 20806  		y := v_0
 20807  		if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
 20808  			break
 20809  		}
 20810  		x := y.Args[0]
 20811  		if x != v_1 {
 20812  			break
 20813  		}
 20814  		v.copyOf(y)
 20815  		return true
 20816  	}
 20817  	return false
 20818  }
 20819  func rewriteValueARM64_OpARM64ORshiftRL(v *Value) bool {
 20820  	v_1 := v.Args[1]
 20821  	v_0 := v.Args[0]
 20822  	b := v.Block
 20823  	// match: (ORshiftRL (MOVDconst [c]) x [d])
 20824  	// result: (ORconst [c] (SRLconst <x.Type> x [d]))
 20825  	for {
 20826  		d := auxIntToInt64(v.AuxInt)
 20827  		if v_0.Op != OpARM64MOVDconst {
 20828  			break
 20829  		}
 20830  		c := auxIntToInt64(v_0.AuxInt)
 20831  		x := v_1
 20832  		v.reset(OpARM64ORconst)
 20833  		v.AuxInt = int64ToAuxInt(c)
 20834  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
 20835  		v0.AuxInt = int64ToAuxInt(d)
 20836  		v0.AddArg(x)
 20837  		v.AddArg(v0)
 20838  		return true
 20839  	}
 20840  	// match: (ORshiftRL x (MOVDconst [c]) [d])
 20841  	// result: (ORconst x [int64(uint64(c)>>uint64(d))])
 20842  	for {
 20843  		d := auxIntToInt64(v.AuxInt)
 20844  		x := v_0
 20845  		if v_1.Op != OpARM64MOVDconst {
 20846  			break
 20847  		}
 20848  		c := auxIntToInt64(v_1.AuxInt)
 20849  		v.reset(OpARM64ORconst)
 20850  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 20851  		v.AddArg(x)
 20852  		return true
 20853  	}
 20854  	// match: (ORshiftRL y:(SRLconst x [c]) x [c])
 20855  	// result: y
 20856  	for {
 20857  		c := auxIntToInt64(v.AuxInt)
 20858  		y := v_0
 20859  		if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
 20860  			break
 20861  		}
 20862  		x := y.Args[0]
 20863  		if x != v_1 {
 20864  			break
 20865  		}
 20866  		v.copyOf(y)
 20867  		return true
 20868  	}
 20869  	// match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y))
 20870  	// cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc))
 20871  	// result: (BFI [armBFAuxInt(lc-rc, 64-lc)] x y)
 20872  	for {
 20873  		rc := auxIntToInt64(v.AuxInt)
 20874  		if v_0.Op != OpARM64ANDconst {
 20875  			break
 20876  		}
 20877  		ac := auxIntToInt64(v_0.AuxInt)
 20878  		x := v_0.Args[0]
 20879  		if v_1.Op != OpARM64SLLconst {
 20880  			break
 20881  		}
 20882  		lc := auxIntToInt64(v_1.AuxInt)
 20883  		y := v_1.Args[0]
 20884  		if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) {
 20885  			break
 20886  		}
 20887  		v.reset(OpARM64BFI)
 20888  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
 20889  		v.AddArg2(x, y)
 20890  		return true
 20891  	}
 20892  	// match: (ORshiftRL [rc] (ANDconst [ac] y) (SLLconst [lc] x))
 20893  	// cond: lc < rc && ac == ^((1<<uint(64-rc)-1))
 20894  	// result: (BFXIL [armBFAuxInt(rc-lc, 64-rc)] y x)
 20895  	for {
 20896  		rc := auxIntToInt64(v.AuxInt)
 20897  		if v_0.Op != OpARM64ANDconst {
 20898  			break
 20899  		}
 20900  		ac := auxIntToInt64(v_0.AuxInt)
 20901  		y := v_0.Args[0]
 20902  		if v_1.Op != OpARM64SLLconst {
 20903  			break
 20904  		}
 20905  		lc := auxIntToInt64(v_1.AuxInt)
 20906  		x := v_1.Args[0]
 20907  		if !(lc < rc && ac == ^(1<<uint(64-rc)-1)) {
 20908  			break
 20909  		}
 20910  		v.reset(OpARM64BFXIL)
 20911  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
 20912  		v.AddArg2(y, x)
 20913  		return true
 20914  	}
 20915  	return false
 20916  }
 20917  func rewriteValueARM64_OpARM64ORshiftRO(v *Value) bool {
 20918  	v_1 := v.Args[1]
 20919  	v_0 := v.Args[0]
 20920  	b := v.Block
 20921  	// match: (ORshiftRO (MOVDconst [c]) x [d])
 20922  	// result: (ORconst [c] (RORconst <x.Type> x [d]))
 20923  	for {
 20924  		d := auxIntToInt64(v.AuxInt)
 20925  		if v_0.Op != OpARM64MOVDconst {
 20926  			break
 20927  		}
 20928  		c := auxIntToInt64(v_0.AuxInt)
 20929  		x := v_1
 20930  		v.reset(OpARM64ORconst)
 20931  		v.AuxInt = int64ToAuxInt(c)
 20932  		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
 20933  		v0.AuxInt = int64ToAuxInt(d)
 20934  		v0.AddArg(x)
 20935  		v.AddArg(v0)
 20936  		return true
 20937  	}
 20938  	// match: (ORshiftRO x (MOVDconst [c]) [d])
 20939  	// result: (ORconst x [rotateRight64(c, d)])
 20940  	for {
 20941  		d := auxIntToInt64(v.AuxInt)
 20942  		x := v_0
 20943  		if v_1.Op != OpARM64MOVDconst {
 20944  			break
 20945  		}
 20946  		c := auxIntToInt64(v_1.AuxInt)
 20947  		v.reset(OpARM64ORconst)
 20948  		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
 20949  		v.AddArg(x)
 20950  		return true
 20951  	}
 20952  	// match: (ORshiftRO y:(RORconst x [c]) x [c])
 20953  	// result: y
 20954  	for {
 20955  		c := auxIntToInt64(v.AuxInt)
 20956  		y := v_0
 20957  		if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
 20958  			break
 20959  		}
 20960  		x := y.Args[0]
 20961  		if x != v_1 {
 20962  			break
 20963  		}
 20964  		v.copyOf(y)
 20965  		return true
 20966  	}
 20967  	return false
 20968  }
 20969  func rewriteValueARM64_OpARM64REV(v *Value) bool {
 20970  	v_0 := v.Args[0]
 20971  	// match: (REV (REV p))
 20972  	// result: p
 20973  	for {
 20974  		if v_0.Op != OpARM64REV {
 20975  			break
 20976  		}
 20977  		p := v_0.Args[0]
 20978  		v.copyOf(p)
 20979  		return true
 20980  	}
 20981  	return false
 20982  }
 20983  func rewriteValueARM64_OpARM64REVW(v *Value) bool {
 20984  	v_0 := v.Args[0]
 20985  	// match: (REVW (REVW p))
 20986  	// result: p
 20987  	for {
 20988  		if v_0.Op != OpARM64REVW {
 20989  			break
 20990  		}
 20991  		p := v_0.Args[0]
 20992  		v.copyOf(p)
 20993  		return true
 20994  	}
 20995  	return false
 20996  }
 20997  func rewriteValueARM64_OpARM64ROR(v *Value) bool {
 20998  	v_1 := v.Args[1]
 20999  	v_0 := v.Args[0]
 21000  	// match: (ROR x (MOVDconst [c]))
 21001  	// result: (RORconst x [c&63])
 21002  	for {
 21003  		x := v_0
 21004  		if v_1.Op != OpARM64MOVDconst {
 21005  			break
 21006  		}
 21007  		c := auxIntToInt64(v_1.AuxInt)
 21008  		v.reset(OpARM64RORconst)
 21009  		v.AuxInt = int64ToAuxInt(c & 63)
 21010  		v.AddArg(x)
 21011  		return true
 21012  	}
 21013  	return false
 21014  }
 21015  func rewriteValueARM64_OpARM64RORW(v *Value) bool {
 21016  	v_1 := v.Args[1]
 21017  	v_0 := v.Args[0]
 21018  	// match: (RORW x (MOVDconst [c]))
 21019  	// result: (RORWconst x [c&31])
 21020  	for {
 21021  		x := v_0
 21022  		if v_1.Op != OpARM64MOVDconst {
 21023  			break
 21024  		}
 21025  		c := auxIntToInt64(v_1.AuxInt)
 21026  		v.reset(OpARM64RORWconst)
 21027  		v.AuxInt = int64ToAuxInt(c & 31)
 21028  		v.AddArg(x)
 21029  		return true
 21030  	}
 21031  	return false
 21032  }
 21033  func rewriteValueARM64_OpARM64SBCSflags(v *Value) bool {
 21034  	v_2 := v.Args[2]
 21035  	v_1 := v.Args[1]
 21036  	v_0 := v.Args[0]
 21037  	b := v.Block
 21038  	typ := &b.Func.Config.Types
 21039  	// match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> bo)))))
 21040  	// result: (SBCSflags x y bo)
 21041  	for {
 21042  		x := v_0
 21043  		y := v_1
 21044  		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
 21045  			break
 21046  		}
 21047  		v_2_0 := v_2.Args[0]
 21048  		if v_2_0.Op != OpARM64NEGSflags {
 21049  			break
 21050  		}
 21051  		v_2_0_0 := v_2_0.Args[0]
 21052  		if v_2_0_0.Op != OpARM64NEG || v_2_0_0.Type != typ.UInt64 {
 21053  			break
 21054  		}
 21055  		v_2_0_0_0 := v_2_0_0.Args[0]
 21056  		if v_2_0_0_0.Op != OpARM64NGCzerocarry || v_2_0_0_0.Type != typ.UInt64 {
 21057  			break
 21058  		}
 21059  		bo := v_2_0_0_0.Args[0]
 21060  		v.reset(OpARM64SBCSflags)
 21061  		v.AddArg3(x, y, bo)
 21062  		return true
 21063  	}
 21064  	// match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (MOVDconst [0]))))
 21065  	// result: (SUBSflags x y)
 21066  	for {
 21067  		x := v_0
 21068  		y := v_1
 21069  		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
 21070  			break
 21071  		}
 21072  		v_2_0 := v_2.Args[0]
 21073  		if v_2_0.Op != OpARM64NEGSflags {
 21074  			break
 21075  		}
 21076  		v_2_0_0 := v_2_0.Args[0]
 21077  		if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
 21078  			break
 21079  		}
 21080  		v.reset(OpARM64SUBSflags)
 21081  		v.AddArg2(x, y)
 21082  		return true
 21083  	}
 21084  	return false
 21085  }
 21086  func rewriteValueARM64_OpARM64SLL(v *Value) bool {
 21087  	v_1 := v.Args[1]
 21088  	v_0 := v.Args[0]
 21089  	// match: (SLL x (MOVDconst [c]))
 21090  	// result: (SLLconst x [c&63])
 21091  	for {
 21092  		x := v_0
 21093  		if v_1.Op != OpARM64MOVDconst {
 21094  			break
 21095  		}
 21096  		c := auxIntToInt64(v_1.AuxInt)
 21097  		v.reset(OpARM64SLLconst)
 21098  		v.AuxInt = int64ToAuxInt(c & 63)
 21099  		v.AddArg(x)
 21100  		return true
 21101  	}
 21102  	// match: (SLL x (ANDconst [63] y))
 21103  	// result: (SLL x y)
 21104  	for {
 21105  		x := v_0
 21106  		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
 21107  			break
 21108  		}
 21109  		y := v_1.Args[0]
 21110  		v.reset(OpARM64SLL)
 21111  		v.AddArg2(x, y)
 21112  		return true
 21113  	}
 21114  	return false
 21115  }
 21116  func rewriteValueARM64_OpARM64SLLconst(v *Value) bool {
 21117  	v_0 := v.Args[0]
 21118  	// match: (SLLconst [c] (MOVDconst [d]))
 21119  	// result: (MOVDconst [d<<uint64(c)])
 21120  	for {
 21121  		c := auxIntToInt64(v.AuxInt)
 21122  		if v_0.Op != OpARM64MOVDconst {
 21123  			break
 21124  		}
 21125  		d := auxIntToInt64(v_0.AuxInt)
 21126  		v.reset(OpARM64MOVDconst)
 21127  		v.AuxInt = int64ToAuxInt(d << uint64(c))
 21128  		return true
 21129  	}
 21130  	// match: (SLLconst [c] (SRLconst [c] x))
 21131  	// cond: 0 < c && c < 64
 21132  	// result: (ANDconst [^(1<<uint(c)-1)] x)
 21133  	for {
 21134  		c := auxIntToInt64(v.AuxInt)
 21135  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
 21136  			break
 21137  		}
 21138  		x := v_0.Args[0]
 21139  		if !(0 < c && c < 64) {
 21140  			break
 21141  		}
 21142  		v.reset(OpARM64ANDconst)
 21143  		v.AuxInt = int64ToAuxInt(^(1<<uint(c) - 1))
 21144  		v.AddArg(x)
 21145  		return true
 21146  	}
 21147  	// match: (SLLconst [lc] (MOVWreg x))
 21148  	// result: (SBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x)
 21149  	for {
 21150  		lc := auxIntToInt64(v.AuxInt)
 21151  		if v_0.Op != OpARM64MOVWreg {
 21152  			break
 21153  		}
 21154  		x := v_0.Args[0]
 21155  		v.reset(OpARM64SBFIZ)
 21156  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
 21157  		v.AddArg(x)
 21158  		return true
 21159  	}
 21160  	// match: (SLLconst [lc] (MOVHreg x))
 21161  	// result: (SBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x)
 21162  	for {
 21163  		lc := auxIntToInt64(v.AuxInt)
 21164  		if v_0.Op != OpARM64MOVHreg {
 21165  			break
 21166  		}
 21167  		x := v_0.Args[0]
 21168  		v.reset(OpARM64SBFIZ)
 21169  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
 21170  		v.AddArg(x)
 21171  		return true
 21172  	}
 21173  	// match: (SLLconst [lc] (MOVBreg x))
 21174  	// result: (SBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x)
 21175  	for {
 21176  		lc := auxIntToInt64(v.AuxInt)
 21177  		if v_0.Op != OpARM64MOVBreg {
 21178  			break
 21179  		}
 21180  		x := v_0.Args[0]
 21181  		v.reset(OpARM64SBFIZ)
 21182  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
 21183  		v.AddArg(x)
 21184  		return true
 21185  	}
 21186  	// match: (SLLconst [lc] (MOVWUreg x))
 21187  	// result: (UBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x)
 21188  	for {
 21189  		lc := auxIntToInt64(v.AuxInt)
 21190  		if v_0.Op != OpARM64MOVWUreg {
 21191  			break
 21192  		}
 21193  		x := v_0.Args[0]
 21194  		v.reset(OpARM64UBFIZ)
 21195  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
 21196  		v.AddArg(x)
 21197  		return true
 21198  	}
 21199  	// match: (SLLconst [lc] (MOVHUreg x))
 21200  	// result: (UBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x)
 21201  	for {
 21202  		lc := auxIntToInt64(v.AuxInt)
 21203  		if v_0.Op != OpARM64MOVHUreg {
 21204  			break
 21205  		}
 21206  		x := v_0.Args[0]
 21207  		v.reset(OpARM64UBFIZ)
 21208  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
 21209  		v.AddArg(x)
 21210  		return true
 21211  	}
 21212  	// match: (SLLconst [lc] (MOVBUreg x))
 21213  	// result: (UBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x)
 21214  	for {
 21215  		lc := auxIntToInt64(v.AuxInt)
 21216  		if v_0.Op != OpARM64MOVBUreg {
 21217  			break
 21218  		}
 21219  		x := v_0.Args[0]
 21220  		v.reset(OpARM64UBFIZ)
 21221  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
 21222  		v.AddArg(x)
 21223  		return true
 21224  	}
 21225  	// match: (SLLconst [sc] (ANDconst [ac] x))
 21226  	// cond: isARM64BFMask(sc, ac, 0)
 21227  	// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
 21228  	for {
 21229  		sc := auxIntToInt64(v.AuxInt)
 21230  		if v_0.Op != OpARM64ANDconst {
 21231  			break
 21232  		}
 21233  		ac := auxIntToInt64(v_0.AuxInt)
 21234  		x := v_0.Args[0]
 21235  		if !(isARM64BFMask(sc, ac, 0)) {
 21236  			break
 21237  		}
 21238  		v.reset(OpARM64UBFIZ)
 21239  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
 21240  		v.AddArg(x)
 21241  		return true
 21242  	}
 21243  	// match: (SLLconst [sc] (UBFIZ [bfc] x))
 21244  	// cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64
 21245  	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x)
 21246  	for {
 21247  		sc := auxIntToInt64(v.AuxInt)
 21248  		if v_0.Op != OpARM64UBFIZ {
 21249  			break
 21250  		}
 21251  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21252  		x := v_0.Args[0]
 21253  		if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
 21254  			break
 21255  		}
 21256  		v.reset(OpARM64UBFIZ)
 21257  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
 21258  		v.AddArg(x)
 21259  		return true
 21260  	}
 21261  	return false
 21262  }
 21263  func rewriteValueARM64_OpARM64SRA(v *Value) bool {
 21264  	v_1 := v.Args[1]
 21265  	v_0 := v.Args[0]
 21266  	// match: (SRA x (MOVDconst [c]))
 21267  	// result: (SRAconst x [c&63])
 21268  	for {
 21269  		x := v_0
 21270  		if v_1.Op != OpARM64MOVDconst {
 21271  			break
 21272  		}
 21273  		c := auxIntToInt64(v_1.AuxInt)
 21274  		v.reset(OpARM64SRAconst)
 21275  		v.AuxInt = int64ToAuxInt(c & 63)
 21276  		v.AddArg(x)
 21277  		return true
 21278  	}
 21279  	// match: (SRA x (ANDconst [63] y))
 21280  	// result: (SRA x y)
 21281  	for {
 21282  		x := v_0
 21283  		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
 21284  			break
 21285  		}
 21286  		y := v_1.Args[0]
 21287  		v.reset(OpARM64SRA)
 21288  		v.AddArg2(x, y)
 21289  		return true
 21290  	}
 21291  	return false
 21292  }
 21293  func rewriteValueARM64_OpARM64SRAconst(v *Value) bool {
 21294  	v_0 := v.Args[0]
 21295  	// match: (SRAconst [c] (MOVDconst [d]))
 21296  	// result: (MOVDconst [d>>uint64(c)])
 21297  	for {
 21298  		c := auxIntToInt64(v.AuxInt)
 21299  		if v_0.Op != OpARM64MOVDconst {
 21300  			break
 21301  		}
 21302  		d := auxIntToInt64(v_0.AuxInt)
 21303  		v.reset(OpARM64MOVDconst)
 21304  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
 21305  		return true
 21306  	}
 21307  	// match: (SRAconst [rc] (SLLconst [lc] x))
 21308  	// cond: lc > rc
 21309  	// result: (SBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
 21310  	for {
 21311  		rc := auxIntToInt64(v.AuxInt)
 21312  		if v_0.Op != OpARM64SLLconst {
 21313  			break
 21314  		}
 21315  		lc := auxIntToInt64(v_0.AuxInt)
 21316  		x := v_0.Args[0]
 21317  		if !(lc > rc) {
 21318  			break
 21319  		}
 21320  		v.reset(OpARM64SBFIZ)
 21321  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
 21322  		v.AddArg(x)
 21323  		return true
 21324  	}
 21325  	// match: (SRAconst [rc] (SLLconst [lc] x))
 21326  	// cond: lc <= rc
 21327  	// result: (SBFX [armBFAuxInt(rc-lc, 64-rc)] x)
 21328  	for {
 21329  		rc := auxIntToInt64(v.AuxInt)
 21330  		if v_0.Op != OpARM64SLLconst {
 21331  			break
 21332  		}
 21333  		lc := auxIntToInt64(v_0.AuxInt)
 21334  		x := v_0.Args[0]
 21335  		if !(lc <= rc) {
 21336  			break
 21337  		}
 21338  		v.reset(OpARM64SBFX)
 21339  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
 21340  		v.AddArg(x)
 21341  		return true
 21342  	}
 21343  	// match: (SRAconst [rc] (MOVWreg x))
 21344  	// cond: rc < 32
 21345  	// result: (SBFX [armBFAuxInt(rc, 32-rc)] x)
 21346  	for {
 21347  		rc := auxIntToInt64(v.AuxInt)
 21348  		if v_0.Op != OpARM64MOVWreg {
 21349  			break
 21350  		}
 21351  		x := v_0.Args[0]
 21352  		if !(rc < 32) {
 21353  			break
 21354  		}
 21355  		v.reset(OpARM64SBFX)
 21356  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
 21357  		v.AddArg(x)
 21358  		return true
 21359  	}
 21360  	// match: (SRAconst [rc] (MOVHreg x))
 21361  	// cond: rc < 16
 21362  	// result: (SBFX [armBFAuxInt(rc, 16-rc)] x)
 21363  	for {
 21364  		rc := auxIntToInt64(v.AuxInt)
 21365  		if v_0.Op != OpARM64MOVHreg {
 21366  			break
 21367  		}
 21368  		x := v_0.Args[0]
 21369  		if !(rc < 16) {
 21370  			break
 21371  		}
 21372  		v.reset(OpARM64SBFX)
 21373  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
 21374  		v.AddArg(x)
 21375  		return true
 21376  	}
 21377  	// match: (SRAconst [rc] (MOVBreg x))
 21378  	// cond: rc < 8
 21379  	// result: (SBFX [armBFAuxInt(rc, 8-rc)] x)
 21380  	for {
 21381  		rc := auxIntToInt64(v.AuxInt)
 21382  		if v_0.Op != OpARM64MOVBreg {
 21383  			break
 21384  		}
 21385  		x := v_0.Args[0]
 21386  		if !(rc < 8) {
 21387  			break
 21388  		}
 21389  		v.reset(OpARM64SBFX)
 21390  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
 21391  		v.AddArg(x)
 21392  		return true
 21393  	}
 21394  	// match: (SRAconst [sc] (SBFIZ [bfc] x))
 21395  	// cond: sc < bfc.getARM64BFlsb()
 21396  	// result: (SBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
 21397  	for {
 21398  		sc := auxIntToInt64(v.AuxInt)
 21399  		if v_0.Op != OpARM64SBFIZ {
 21400  			break
 21401  		}
 21402  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21403  		x := v_0.Args[0]
 21404  		if !(sc < bfc.getARM64BFlsb()) {
 21405  			break
 21406  		}
 21407  		v.reset(OpARM64SBFIZ)
 21408  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
 21409  		v.AddArg(x)
 21410  		return true
 21411  	}
 21412  	// match: (SRAconst [sc] (SBFIZ [bfc] x))
 21413  	// cond: sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
 21414  	// result: (SBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
 21415  	for {
 21416  		sc := auxIntToInt64(v.AuxInt)
 21417  		if v_0.Op != OpARM64SBFIZ {
 21418  			break
 21419  		}
 21420  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21421  		x := v_0.Args[0]
 21422  		if !(sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
 21423  			break
 21424  		}
 21425  		v.reset(OpARM64SBFX)
 21426  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
 21427  		v.AddArg(x)
 21428  		return true
 21429  	}
 21430  	return false
 21431  }
 21432  func rewriteValueARM64_OpARM64SRL(v *Value) bool {
 21433  	v_1 := v.Args[1]
 21434  	v_0 := v.Args[0]
 21435  	// match: (SRL x (MOVDconst [c]))
 21436  	// result: (SRLconst x [c&63])
 21437  	for {
 21438  		x := v_0
 21439  		if v_1.Op != OpARM64MOVDconst {
 21440  			break
 21441  		}
 21442  		c := auxIntToInt64(v_1.AuxInt)
 21443  		v.reset(OpARM64SRLconst)
 21444  		v.AuxInt = int64ToAuxInt(c & 63)
 21445  		v.AddArg(x)
 21446  		return true
 21447  	}
 21448  	// match: (SRL x (ANDconst [63] y))
 21449  	// result: (SRL x y)
 21450  	for {
 21451  		x := v_0
 21452  		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
 21453  			break
 21454  		}
 21455  		y := v_1.Args[0]
 21456  		v.reset(OpARM64SRL)
 21457  		v.AddArg2(x, y)
 21458  		return true
 21459  	}
 21460  	return false
 21461  }
 21462  func rewriteValueARM64_OpARM64SRLconst(v *Value) bool {
 21463  	v_0 := v.Args[0]
 21464  	// match: (SRLconst [c] (MOVDconst [d]))
 21465  	// result: (MOVDconst [int64(uint64(d)>>uint64(c))])
 21466  	for {
 21467  		c := auxIntToInt64(v.AuxInt)
 21468  		if v_0.Op != OpARM64MOVDconst {
 21469  			break
 21470  		}
 21471  		d := auxIntToInt64(v_0.AuxInt)
 21472  		v.reset(OpARM64MOVDconst)
 21473  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
 21474  		return true
 21475  	}
 21476  	// match: (SRLconst [c] (SLLconst [c] x))
 21477  	// cond: 0 < c && c < 64
 21478  	// result: (ANDconst [1<<uint(64-c)-1] x)
 21479  	for {
 21480  		c := auxIntToInt64(v.AuxInt)
 21481  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
 21482  			break
 21483  		}
 21484  		x := v_0.Args[0]
 21485  		if !(0 < c && c < 64) {
 21486  			break
 21487  		}
 21488  		v.reset(OpARM64ANDconst)
 21489  		v.AuxInt = int64ToAuxInt(1<<uint(64-c) - 1)
 21490  		v.AddArg(x)
 21491  		return true
 21492  	}
 21493  	// match: (SRLconst [rc] (MOVWUreg x))
 21494  	// cond: rc >= 32
 21495  	// result: (MOVDconst [0])
 21496  	for {
 21497  		rc := auxIntToInt64(v.AuxInt)
 21498  		if v_0.Op != OpARM64MOVWUreg {
 21499  			break
 21500  		}
 21501  		if !(rc >= 32) {
 21502  			break
 21503  		}
 21504  		v.reset(OpARM64MOVDconst)
 21505  		v.AuxInt = int64ToAuxInt(0)
 21506  		return true
 21507  	}
 21508  	// match: (SRLconst [rc] (MOVHUreg x))
 21509  	// cond: rc >= 16
 21510  	// result: (MOVDconst [0])
 21511  	for {
 21512  		rc := auxIntToInt64(v.AuxInt)
 21513  		if v_0.Op != OpARM64MOVHUreg {
 21514  			break
 21515  		}
 21516  		if !(rc >= 16) {
 21517  			break
 21518  		}
 21519  		v.reset(OpARM64MOVDconst)
 21520  		v.AuxInt = int64ToAuxInt(0)
 21521  		return true
 21522  	}
 21523  	// match: (SRLconst [rc] (MOVBUreg x))
 21524  	// cond: rc >= 8
 21525  	// result: (MOVDconst [0])
 21526  	for {
 21527  		rc := auxIntToInt64(v.AuxInt)
 21528  		if v_0.Op != OpARM64MOVBUreg {
 21529  			break
 21530  		}
 21531  		if !(rc >= 8) {
 21532  			break
 21533  		}
 21534  		v.reset(OpARM64MOVDconst)
 21535  		v.AuxInt = int64ToAuxInt(0)
 21536  		return true
 21537  	}
 21538  	// match: (SRLconst [rc] (SLLconst [lc] x))
 21539  	// cond: lc > rc
 21540  	// result: (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
 21541  	for {
 21542  		rc := auxIntToInt64(v.AuxInt)
 21543  		if v_0.Op != OpARM64SLLconst {
 21544  			break
 21545  		}
 21546  		lc := auxIntToInt64(v_0.AuxInt)
 21547  		x := v_0.Args[0]
 21548  		if !(lc > rc) {
 21549  			break
 21550  		}
 21551  		v.reset(OpARM64UBFIZ)
 21552  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
 21553  		v.AddArg(x)
 21554  		return true
 21555  	}
 21556  	// match: (SRLconst [rc] (SLLconst [lc] x))
 21557  	// cond: lc < rc
 21558  	// result: (UBFX [armBFAuxInt(rc-lc, 64-rc)] x)
 21559  	for {
 21560  		rc := auxIntToInt64(v.AuxInt)
 21561  		if v_0.Op != OpARM64SLLconst {
 21562  			break
 21563  		}
 21564  		lc := auxIntToInt64(v_0.AuxInt)
 21565  		x := v_0.Args[0]
 21566  		if !(lc < rc) {
 21567  			break
 21568  		}
 21569  		v.reset(OpARM64UBFX)
 21570  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
 21571  		v.AddArg(x)
 21572  		return true
 21573  	}
 21574  	// match: (SRLconst [rc] (MOVWUreg x))
 21575  	// cond: rc < 32
 21576  	// result: (UBFX [armBFAuxInt(rc, 32-rc)] x)
 21577  	for {
 21578  		rc := auxIntToInt64(v.AuxInt)
 21579  		if v_0.Op != OpARM64MOVWUreg {
 21580  			break
 21581  		}
 21582  		x := v_0.Args[0]
 21583  		if !(rc < 32) {
 21584  			break
 21585  		}
 21586  		v.reset(OpARM64UBFX)
 21587  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
 21588  		v.AddArg(x)
 21589  		return true
 21590  	}
 21591  	// match: (SRLconst [rc] (MOVHUreg x))
 21592  	// cond: rc < 16
 21593  	// result: (UBFX [armBFAuxInt(rc, 16-rc)] x)
 21594  	for {
 21595  		rc := auxIntToInt64(v.AuxInt)
 21596  		if v_0.Op != OpARM64MOVHUreg {
 21597  			break
 21598  		}
 21599  		x := v_0.Args[0]
 21600  		if !(rc < 16) {
 21601  			break
 21602  		}
 21603  		v.reset(OpARM64UBFX)
 21604  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
 21605  		v.AddArg(x)
 21606  		return true
 21607  	}
 21608  	// match: (SRLconst [rc] (MOVBUreg x))
 21609  	// cond: rc < 8
 21610  	// result: (UBFX [armBFAuxInt(rc, 8-rc)] x)
 21611  	for {
 21612  		rc := auxIntToInt64(v.AuxInt)
 21613  		if v_0.Op != OpARM64MOVBUreg {
 21614  			break
 21615  		}
 21616  		x := v_0.Args[0]
 21617  		if !(rc < 8) {
 21618  			break
 21619  		}
 21620  		v.reset(OpARM64UBFX)
 21621  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
 21622  		v.AddArg(x)
 21623  		return true
 21624  	}
 21625  	// match: (SRLconst [sc] (ANDconst [ac] x))
 21626  	// cond: isARM64BFMask(sc, ac, sc)
 21627  	// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
 21628  	for {
 21629  		sc := auxIntToInt64(v.AuxInt)
 21630  		if v_0.Op != OpARM64ANDconst {
 21631  			break
 21632  		}
 21633  		ac := auxIntToInt64(v_0.AuxInt)
 21634  		x := v_0.Args[0]
 21635  		if !(isARM64BFMask(sc, ac, sc)) {
 21636  			break
 21637  		}
 21638  		v.reset(OpARM64UBFX)
 21639  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
 21640  		v.AddArg(x)
 21641  		return true
 21642  	}
 21643  	// match: (SRLconst [sc] (UBFX [bfc] x))
 21644  	// cond: sc < bfc.getARM64BFwidth()
 21645  	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x)
 21646  	for {
 21647  		sc := auxIntToInt64(v.AuxInt)
 21648  		if v_0.Op != OpARM64UBFX {
 21649  			break
 21650  		}
 21651  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21652  		x := v_0.Args[0]
 21653  		if !(sc < bfc.getARM64BFwidth()) {
 21654  			break
 21655  		}
 21656  		v.reset(OpARM64UBFX)
 21657  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
 21658  		v.AddArg(x)
 21659  		return true
 21660  	}
 21661  	// match: (SRLconst [sc] (UBFIZ [bfc] x))
 21662  	// cond: sc == bfc.getARM64BFlsb()
 21663  	// result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x)
 21664  	for {
 21665  		sc := auxIntToInt64(v.AuxInt)
 21666  		if v_0.Op != OpARM64UBFIZ {
 21667  			break
 21668  		}
 21669  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21670  		x := v_0.Args[0]
 21671  		if !(sc == bfc.getARM64BFlsb()) {
 21672  			break
 21673  		}
 21674  		v.reset(OpARM64ANDconst)
 21675  		v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
 21676  		v.AddArg(x)
 21677  		return true
 21678  	}
 21679  	// match: (SRLconst [sc] (UBFIZ [bfc] x))
 21680  	// cond: sc < bfc.getARM64BFlsb()
 21681  	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
 21682  	for {
 21683  		sc := auxIntToInt64(v.AuxInt)
 21684  		if v_0.Op != OpARM64UBFIZ {
 21685  			break
 21686  		}
 21687  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21688  		x := v_0.Args[0]
 21689  		if !(sc < bfc.getARM64BFlsb()) {
 21690  			break
 21691  		}
 21692  		v.reset(OpARM64UBFIZ)
 21693  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
 21694  		v.AddArg(x)
 21695  		return true
 21696  	}
 21697  	// match: (SRLconst [sc] (UBFIZ [bfc] x))
 21698  	// cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
 21699  	// result: (UBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
 21700  	for {
 21701  		sc := auxIntToInt64(v.AuxInt)
 21702  		if v_0.Op != OpARM64UBFIZ {
 21703  			break
 21704  		}
 21705  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 21706  		x := v_0.Args[0]
 21707  		if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
 21708  			break
 21709  		}
 21710  		v.reset(OpARM64UBFX)
 21711  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
 21712  		v.AddArg(x)
 21713  		return true
 21714  	}
 21715  	return false
 21716  }
 21717  func rewriteValueARM64_OpARM64STP(v *Value) bool {
 21718  	v_3 := v.Args[3]
 21719  	v_2 := v.Args[2]
 21720  	v_1 := v.Args[1]
 21721  	v_0 := v.Args[0]
 21722  	b := v.Block
 21723  	config := b.Func.Config
 21724  	// match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem)
 21725  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 21726  	// result: (STP [off1+int32(off2)] {sym} ptr val1 val2 mem)
 21727  	for {
 21728  		off1 := auxIntToInt32(v.AuxInt)
 21729  		sym := auxToSym(v.Aux)
 21730  		if v_0.Op != OpARM64ADDconst {
 21731  			break
 21732  		}
 21733  		off2 := auxIntToInt64(v_0.AuxInt)
 21734  		ptr := v_0.Args[0]
 21735  		val1 := v_1
 21736  		val2 := v_2
 21737  		mem := v_3
 21738  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 21739  			break
 21740  		}
 21741  		v.reset(OpARM64STP)
 21742  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
 21743  		v.Aux = symToAux(sym)
 21744  		v.AddArg4(ptr, val1, val2, mem)
 21745  		return true
 21746  	}
 21747  	// match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem)
 21748  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 21749  	// result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem)
 21750  	for {
 21751  		off1 := auxIntToInt32(v.AuxInt)
 21752  		sym1 := auxToSym(v.Aux)
 21753  		if v_0.Op != OpARM64MOVDaddr {
 21754  			break
 21755  		}
 21756  		off2 := auxIntToInt32(v_0.AuxInt)
 21757  		sym2 := auxToSym(v_0.Aux)
 21758  		ptr := v_0.Args[0]
 21759  		val1 := v_1
 21760  		val2 := v_2
 21761  		mem := v_3
 21762  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 21763  			break
 21764  		}
 21765  		v.reset(OpARM64STP)
 21766  		v.AuxInt = int32ToAuxInt(off1 + off2)
 21767  		v.Aux = symToAux(mergeSym(sym1, sym2))
 21768  		v.AddArg4(ptr, val1, val2, mem)
 21769  		return true
 21770  	}
 21771  	// match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem)
 21772  	// result: (MOVQstorezero [off] {sym} ptr mem)
 21773  	for {
 21774  		off := auxIntToInt32(v.AuxInt)
 21775  		sym := auxToSym(v.Aux)
 21776  		ptr := v_0
 21777  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 || v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
 21778  			break
 21779  		}
 21780  		mem := v_3
 21781  		v.reset(OpARM64MOVQstorezero)
 21782  		v.AuxInt = int32ToAuxInt(off)
 21783  		v.Aux = symToAux(sym)
 21784  		v.AddArg2(ptr, mem)
 21785  		return true
 21786  	}
 21787  	return false
 21788  }
 21789  func rewriteValueARM64_OpARM64SUB(v *Value) bool {
 21790  	v_1 := v.Args[1]
 21791  	v_0 := v.Args[0]
 21792  	b := v.Block
 21793  	// match: (SUB x (MOVDconst [c]))
 21794  	// result: (SUBconst [c] x)
 21795  	for {
 21796  		x := v_0
 21797  		if v_1.Op != OpARM64MOVDconst {
 21798  			break
 21799  		}
 21800  		c := auxIntToInt64(v_1.AuxInt)
 21801  		v.reset(OpARM64SUBconst)
 21802  		v.AuxInt = int64ToAuxInt(c)
 21803  		v.AddArg(x)
 21804  		return true
 21805  	}
 21806  	// match: (SUB a l:(MUL x y))
 21807  	// cond: l.Uses==1 && clobber(l)
 21808  	// result: (MSUB a x y)
 21809  	for {
 21810  		a := v_0
 21811  		l := v_1
 21812  		if l.Op != OpARM64MUL {
 21813  			break
 21814  		}
 21815  		y := l.Args[1]
 21816  		x := l.Args[0]
 21817  		if !(l.Uses == 1 && clobber(l)) {
 21818  			break
 21819  		}
 21820  		v.reset(OpARM64MSUB)
 21821  		v.AddArg3(a, x, y)
 21822  		return true
 21823  	}
 21824  	// match: (SUB a l:(MNEG x y))
 21825  	// cond: l.Uses==1 && clobber(l)
 21826  	// result: (MADD a x y)
 21827  	for {
 21828  		a := v_0
 21829  		l := v_1
 21830  		if l.Op != OpARM64MNEG {
 21831  			break
 21832  		}
 21833  		y := l.Args[1]
 21834  		x := l.Args[0]
 21835  		if !(l.Uses == 1 && clobber(l)) {
 21836  			break
 21837  		}
 21838  		v.reset(OpARM64MADD)
 21839  		v.AddArg3(a, x, y)
 21840  		return true
 21841  	}
 21842  	// match: (SUB a l:(MULW x y))
 21843  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
 21844  	// result: (MSUBW a x y)
 21845  	for {
 21846  		a := v_0
 21847  		l := v_1
 21848  		if l.Op != OpARM64MULW {
 21849  			break
 21850  		}
 21851  		y := l.Args[1]
 21852  		x := l.Args[0]
 21853  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
 21854  			break
 21855  		}
 21856  		v.reset(OpARM64MSUBW)
 21857  		v.AddArg3(a, x, y)
 21858  		return true
 21859  	}
 21860  	// match: (SUB a l:(MNEGW x y))
 21861  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
 21862  	// result: (MADDW a x y)
 21863  	for {
 21864  		a := v_0
 21865  		l := v_1
 21866  		if l.Op != OpARM64MNEGW {
 21867  			break
 21868  		}
 21869  		y := l.Args[1]
 21870  		x := l.Args[0]
 21871  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
 21872  			break
 21873  		}
 21874  		v.reset(OpARM64MADDW)
 21875  		v.AddArg3(a, x, y)
 21876  		return true
 21877  	}
 21878  	// match: (SUB x x)
 21879  	// result: (MOVDconst [0])
 21880  	for {
 21881  		x := v_0
 21882  		if x != v_1 {
 21883  			break
 21884  		}
 21885  		v.reset(OpARM64MOVDconst)
 21886  		v.AuxInt = int64ToAuxInt(0)
 21887  		return true
 21888  	}
 21889  	// match: (SUB x (SUB y z))
 21890  	// result: (SUB (ADD <v.Type> x z) y)
 21891  	for {
 21892  		x := v_0
 21893  		if v_1.Op != OpARM64SUB {
 21894  			break
 21895  		}
 21896  		z := v_1.Args[1]
 21897  		y := v_1.Args[0]
 21898  		v.reset(OpARM64SUB)
 21899  		v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type)
 21900  		v0.AddArg2(x, z)
 21901  		v.AddArg2(v0, y)
 21902  		return true
 21903  	}
 21904  	// match: (SUB (SUB x y) z)
 21905  	// result: (SUB x (ADD <y.Type> y z))
 21906  	for {
 21907  		if v_0.Op != OpARM64SUB {
 21908  			break
 21909  		}
 21910  		y := v_0.Args[1]
 21911  		x := v_0.Args[0]
 21912  		z := v_1
 21913  		v.reset(OpARM64SUB)
 21914  		v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type)
 21915  		v0.AddArg2(y, z)
 21916  		v.AddArg2(x, v0)
 21917  		return true
 21918  	}
 21919  	// match: (SUB x0 x1:(SLLconst [c] y))
 21920  	// cond: clobberIfDead(x1)
 21921  	// result: (SUBshiftLL x0 y [c])
 21922  	for {
 21923  		x0 := v_0
 21924  		x1 := v_1
 21925  		if x1.Op != OpARM64SLLconst {
 21926  			break
 21927  		}
 21928  		c := auxIntToInt64(x1.AuxInt)
 21929  		y := x1.Args[0]
 21930  		if !(clobberIfDead(x1)) {
 21931  			break
 21932  		}
 21933  		v.reset(OpARM64SUBshiftLL)
 21934  		v.AuxInt = int64ToAuxInt(c)
 21935  		v.AddArg2(x0, y)
 21936  		return true
 21937  	}
 21938  	// match: (SUB x0 x1:(SRLconst [c] y))
 21939  	// cond: clobberIfDead(x1)
 21940  	// result: (SUBshiftRL x0 y [c])
 21941  	for {
 21942  		x0 := v_0
 21943  		x1 := v_1
 21944  		if x1.Op != OpARM64SRLconst {
 21945  			break
 21946  		}
 21947  		c := auxIntToInt64(x1.AuxInt)
 21948  		y := x1.Args[0]
 21949  		if !(clobberIfDead(x1)) {
 21950  			break
 21951  		}
 21952  		v.reset(OpARM64SUBshiftRL)
 21953  		v.AuxInt = int64ToAuxInt(c)
 21954  		v.AddArg2(x0, y)
 21955  		return true
 21956  	}
 21957  	// match: (SUB x0 x1:(SRAconst [c] y))
 21958  	// cond: clobberIfDead(x1)
 21959  	// result: (SUBshiftRA x0 y [c])
 21960  	for {
 21961  		x0 := v_0
 21962  		x1 := v_1
 21963  		if x1.Op != OpARM64SRAconst {
 21964  			break
 21965  		}
 21966  		c := auxIntToInt64(x1.AuxInt)
 21967  		y := x1.Args[0]
 21968  		if !(clobberIfDead(x1)) {
 21969  			break
 21970  		}
 21971  		v.reset(OpARM64SUBshiftRA)
 21972  		v.AuxInt = int64ToAuxInt(c)
 21973  		v.AddArg2(x0, y)
 21974  		return true
 21975  	}
 21976  	return false
 21977  }
 21978  func rewriteValueARM64_OpARM64SUBconst(v *Value) bool {
 21979  	v_0 := v.Args[0]
 21980  	// match: (SUBconst [0] x)
 21981  	// result: x
 21982  	for {
 21983  		if auxIntToInt64(v.AuxInt) != 0 {
 21984  			break
 21985  		}
 21986  		x := v_0
 21987  		v.copyOf(x)
 21988  		return true
 21989  	}
 21990  	// match: (SUBconst [c] (MOVDconst [d]))
 21991  	// result: (MOVDconst [d-c])
 21992  	for {
 21993  		c := auxIntToInt64(v.AuxInt)
 21994  		if v_0.Op != OpARM64MOVDconst {
 21995  			break
 21996  		}
 21997  		d := auxIntToInt64(v_0.AuxInt)
 21998  		v.reset(OpARM64MOVDconst)
 21999  		v.AuxInt = int64ToAuxInt(d - c)
 22000  		return true
 22001  	}
 22002  	// match: (SUBconst [c] (SUBconst [d] x))
 22003  	// result: (ADDconst [-c-d] x)
 22004  	for {
 22005  		c := auxIntToInt64(v.AuxInt)
 22006  		if v_0.Op != OpARM64SUBconst {
 22007  			break
 22008  		}
 22009  		d := auxIntToInt64(v_0.AuxInt)
 22010  		x := v_0.Args[0]
 22011  		v.reset(OpARM64ADDconst)
 22012  		v.AuxInt = int64ToAuxInt(-c - d)
 22013  		v.AddArg(x)
 22014  		return true
 22015  	}
 22016  	// match: (SUBconst [c] (ADDconst [d] x))
 22017  	// result: (ADDconst [-c+d] x)
 22018  	for {
 22019  		c := auxIntToInt64(v.AuxInt)
 22020  		if v_0.Op != OpARM64ADDconst {
 22021  			break
 22022  		}
 22023  		d := auxIntToInt64(v_0.AuxInt)
 22024  		x := v_0.Args[0]
 22025  		v.reset(OpARM64ADDconst)
 22026  		v.AuxInt = int64ToAuxInt(-c + d)
 22027  		v.AddArg(x)
 22028  		return true
 22029  	}
 22030  	return false
 22031  }
 22032  func rewriteValueARM64_OpARM64SUBshiftLL(v *Value) bool {
 22033  	v_1 := v.Args[1]
 22034  	v_0 := v.Args[0]
 22035  	// match: (SUBshiftLL x (MOVDconst [c]) [d])
 22036  	// result: (SUBconst x [int64(uint64(c)<<uint64(d))])
 22037  	for {
 22038  		d := auxIntToInt64(v.AuxInt)
 22039  		x := v_0
 22040  		if v_1.Op != OpARM64MOVDconst {
 22041  			break
 22042  		}
 22043  		c := auxIntToInt64(v_1.AuxInt)
 22044  		v.reset(OpARM64SUBconst)
 22045  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
 22046  		v.AddArg(x)
 22047  		return true
 22048  	}
 22049  	// match: (SUBshiftLL (SLLconst x [c]) x [c])
 22050  	// result: (MOVDconst [0])
 22051  	for {
 22052  		c := auxIntToInt64(v.AuxInt)
 22053  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
 22054  			break
 22055  		}
 22056  		x := v_0.Args[0]
 22057  		if x != v_1 {
 22058  			break
 22059  		}
 22060  		v.reset(OpARM64MOVDconst)
 22061  		v.AuxInt = int64ToAuxInt(0)
 22062  		return true
 22063  	}
 22064  	return false
 22065  }
 22066  func rewriteValueARM64_OpARM64SUBshiftRA(v *Value) bool {
 22067  	v_1 := v.Args[1]
 22068  	v_0 := v.Args[0]
 22069  	// match: (SUBshiftRA x (MOVDconst [c]) [d])
 22070  	// result: (SUBconst x [c>>uint64(d)])
 22071  	for {
 22072  		d := auxIntToInt64(v.AuxInt)
 22073  		x := v_0
 22074  		if v_1.Op != OpARM64MOVDconst {
 22075  			break
 22076  		}
 22077  		c := auxIntToInt64(v_1.AuxInt)
 22078  		v.reset(OpARM64SUBconst)
 22079  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 22080  		v.AddArg(x)
 22081  		return true
 22082  	}
 22083  	// match: (SUBshiftRA (SRAconst x [c]) x [c])
 22084  	// result: (MOVDconst [0])
 22085  	for {
 22086  		c := auxIntToInt64(v.AuxInt)
 22087  		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
 22088  			break
 22089  		}
 22090  		x := v_0.Args[0]
 22091  		if x != v_1 {
 22092  			break
 22093  		}
 22094  		v.reset(OpARM64MOVDconst)
 22095  		v.AuxInt = int64ToAuxInt(0)
 22096  		return true
 22097  	}
 22098  	return false
 22099  }
 22100  func rewriteValueARM64_OpARM64SUBshiftRL(v *Value) bool {
 22101  	v_1 := v.Args[1]
 22102  	v_0 := v.Args[0]
 22103  	// match: (SUBshiftRL x (MOVDconst [c]) [d])
 22104  	// result: (SUBconst x [int64(uint64(c)>>uint64(d))])
 22105  	for {
 22106  		d := auxIntToInt64(v.AuxInt)
 22107  		x := v_0
 22108  		if v_1.Op != OpARM64MOVDconst {
 22109  			break
 22110  		}
 22111  		c := auxIntToInt64(v_1.AuxInt)
 22112  		v.reset(OpARM64SUBconst)
 22113  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 22114  		v.AddArg(x)
 22115  		return true
 22116  	}
 22117  	// match: (SUBshiftRL (SRLconst x [c]) x [c])
 22118  	// result: (MOVDconst [0])
 22119  	for {
 22120  		c := auxIntToInt64(v.AuxInt)
 22121  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
 22122  			break
 22123  		}
 22124  		x := v_0.Args[0]
 22125  		if x != v_1 {
 22126  			break
 22127  		}
 22128  		v.reset(OpARM64MOVDconst)
 22129  		v.AuxInt = int64ToAuxInt(0)
 22130  		return true
 22131  	}
 22132  	return false
 22133  }
 22134  func rewriteValueARM64_OpARM64TST(v *Value) bool {
 22135  	v_1 := v.Args[1]
 22136  	v_0 := v.Args[0]
 22137  	// match: (TST x (MOVDconst [c]))
 22138  	// result: (TSTconst [c] x)
 22139  	for {
 22140  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22141  			x := v_0
 22142  			if v_1.Op != OpARM64MOVDconst {
 22143  				continue
 22144  			}
 22145  			c := auxIntToInt64(v_1.AuxInt)
 22146  			v.reset(OpARM64TSTconst)
 22147  			v.AuxInt = int64ToAuxInt(c)
 22148  			v.AddArg(x)
 22149  			return true
 22150  		}
 22151  		break
 22152  	}
 22153  	// match: (TST x0 x1:(SLLconst [c] y))
 22154  	// cond: clobberIfDead(x1)
 22155  	// result: (TSTshiftLL x0 y [c])
 22156  	for {
 22157  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22158  			x0 := v_0
 22159  			x1 := v_1
 22160  			if x1.Op != OpARM64SLLconst {
 22161  				continue
 22162  			}
 22163  			c := auxIntToInt64(x1.AuxInt)
 22164  			y := x1.Args[0]
 22165  			if !(clobberIfDead(x1)) {
 22166  				continue
 22167  			}
 22168  			v.reset(OpARM64TSTshiftLL)
 22169  			v.AuxInt = int64ToAuxInt(c)
 22170  			v.AddArg2(x0, y)
 22171  			return true
 22172  		}
 22173  		break
 22174  	}
 22175  	// match: (TST x0 x1:(SRLconst [c] y))
 22176  	// cond: clobberIfDead(x1)
 22177  	// result: (TSTshiftRL x0 y [c])
 22178  	for {
 22179  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22180  			x0 := v_0
 22181  			x1 := v_1
 22182  			if x1.Op != OpARM64SRLconst {
 22183  				continue
 22184  			}
 22185  			c := auxIntToInt64(x1.AuxInt)
 22186  			y := x1.Args[0]
 22187  			if !(clobberIfDead(x1)) {
 22188  				continue
 22189  			}
 22190  			v.reset(OpARM64TSTshiftRL)
 22191  			v.AuxInt = int64ToAuxInt(c)
 22192  			v.AddArg2(x0, y)
 22193  			return true
 22194  		}
 22195  		break
 22196  	}
 22197  	// match: (TST x0 x1:(SRAconst [c] y))
 22198  	// cond: clobberIfDead(x1)
 22199  	// result: (TSTshiftRA x0 y [c])
 22200  	for {
 22201  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22202  			x0 := v_0
 22203  			x1 := v_1
 22204  			if x1.Op != OpARM64SRAconst {
 22205  				continue
 22206  			}
 22207  			c := auxIntToInt64(x1.AuxInt)
 22208  			y := x1.Args[0]
 22209  			if !(clobberIfDead(x1)) {
 22210  				continue
 22211  			}
 22212  			v.reset(OpARM64TSTshiftRA)
 22213  			v.AuxInt = int64ToAuxInt(c)
 22214  			v.AddArg2(x0, y)
 22215  			return true
 22216  		}
 22217  		break
 22218  	}
 22219  	// match: (TST x0 x1:(RORconst [c] y))
 22220  	// cond: clobberIfDead(x1)
 22221  	// result: (TSTshiftRO x0 y [c])
 22222  	for {
 22223  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22224  			x0 := v_0
 22225  			x1 := v_1
 22226  			if x1.Op != OpARM64RORconst {
 22227  				continue
 22228  			}
 22229  			c := auxIntToInt64(x1.AuxInt)
 22230  			y := x1.Args[0]
 22231  			if !(clobberIfDead(x1)) {
 22232  				continue
 22233  			}
 22234  			v.reset(OpARM64TSTshiftRO)
 22235  			v.AuxInt = int64ToAuxInt(c)
 22236  			v.AddArg2(x0, y)
 22237  			return true
 22238  		}
 22239  		break
 22240  	}
 22241  	return false
 22242  }
 22243  func rewriteValueARM64_OpARM64TSTW(v *Value) bool {
 22244  	v_1 := v.Args[1]
 22245  	v_0 := v.Args[0]
 22246  	// match: (TSTW x (MOVDconst [c]))
 22247  	// result: (TSTWconst [int32(c)] x)
 22248  	for {
 22249  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22250  			x := v_0
 22251  			if v_1.Op != OpARM64MOVDconst {
 22252  				continue
 22253  			}
 22254  			c := auxIntToInt64(v_1.AuxInt)
 22255  			v.reset(OpARM64TSTWconst)
 22256  			v.AuxInt = int32ToAuxInt(int32(c))
 22257  			v.AddArg(x)
 22258  			return true
 22259  		}
 22260  		break
 22261  	}
 22262  	return false
 22263  }
 22264  func rewriteValueARM64_OpARM64TSTWconst(v *Value) bool {
 22265  	v_0 := v.Args[0]
 22266  	// match: (TSTWconst (MOVDconst [x]) [y])
 22267  	// result: (FlagConstant [logicFlags32(int32(x)&y)])
 22268  	for {
 22269  		y := auxIntToInt32(v.AuxInt)
 22270  		if v_0.Op != OpARM64MOVDconst {
 22271  			break
 22272  		}
 22273  		x := auxIntToInt64(v_0.AuxInt)
 22274  		v.reset(OpARM64FlagConstant)
 22275  		v.AuxInt = flagConstantToAuxInt(logicFlags32(int32(x) & y))
 22276  		return true
 22277  	}
 22278  	return false
 22279  }
 22280  func rewriteValueARM64_OpARM64TSTconst(v *Value) bool {
 22281  	v_0 := v.Args[0]
 22282  	// match: (TSTconst (MOVDconst [x]) [y])
 22283  	// result: (FlagConstant [logicFlags64(x&y)])
 22284  	for {
 22285  		y := auxIntToInt64(v.AuxInt)
 22286  		if v_0.Op != OpARM64MOVDconst {
 22287  			break
 22288  		}
 22289  		x := auxIntToInt64(v_0.AuxInt)
 22290  		v.reset(OpARM64FlagConstant)
 22291  		v.AuxInt = flagConstantToAuxInt(logicFlags64(x & y))
 22292  		return true
 22293  	}
 22294  	return false
 22295  }
 22296  func rewriteValueARM64_OpARM64TSTshiftLL(v *Value) bool {
 22297  	v_1 := v.Args[1]
 22298  	v_0 := v.Args[0]
 22299  	b := v.Block
 22300  	// match: (TSTshiftLL (MOVDconst [c]) x [d])
 22301  	// result: (TSTconst [c] (SLLconst <x.Type> x [d]))
 22302  	for {
 22303  		d := auxIntToInt64(v.AuxInt)
 22304  		if v_0.Op != OpARM64MOVDconst {
 22305  			break
 22306  		}
 22307  		c := auxIntToInt64(v_0.AuxInt)
 22308  		x := v_1
 22309  		v.reset(OpARM64TSTconst)
 22310  		v.AuxInt = int64ToAuxInt(c)
 22311  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
 22312  		v0.AuxInt = int64ToAuxInt(d)
 22313  		v0.AddArg(x)
 22314  		v.AddArg(v0)
 22315  		return true
 22316  	}
 22317  	// match: (TSTshiftLL x (MOVDconst [c]) [d])
 22318  	// result: (TSTconst x [int64(uint64(c)<<uint64(d))])
 22319  	for {
 22320  		d := auxIntToInt64(v.AuxInt)
 22321  		x := v_0
 22322  		if v_1.Op != OpARM64MOVDconst {
 22323  			break
 22324  		}
 22325  		c := auxIntToInt64(v_1.AuxInt)
 22326  		v.reset(OpARM64TSTconst)
 22327  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
 22328  		v.AddArg(x)
 22329  		return true
 22330  	}
 22331  	return false
 22332  }
 22333  func rewriteValueARM64_OpARM64TSTshiftRA(v *Value) bool {
 22334  	v_1 := v.Args[1]
 22335  	v_0 := v.Args[0]
 22336  	b := v.Block
 22337  	// match: (TSTshiftRA (MOVDconst [c]) x [d])
 22338  	// result: (TSTconst [c] (SRAconst <x.Type> x [d]))
 22339  	for {
 22340  		d := auxIntToInt64(v.AuxInt)
 22341  		if v_0.Op != OpARM64MOVDconst {
 22342  			break
 22343  		}
 22344  		c := auxIntToInt64(v_0.AuxInt)
 22345  		x := v_1
 22346  		v.reset(OpARM64TSTconst)
 22347  		v.AuxInt = int64ToAuxInt(c)
 22348  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
 22349  		v0.AuxInt = int64ToAuxInt(d)
 22350  		v0.AddArg(x)
 22351  		v.AddArg(v0)
 22352  		return true
 22353  	}
 22354  	// match: (TSTshiftRA x (MOVDconst [c]) [d])
 22355  	// result: (TSTconst x [c>>uint64(d)])
 22356  	for {
 22357  		d := auxIntToInt64(v.AuxInt)
 22358  		x := v_0
 22359  		if v_1.Op != OpARM64MOVDconst {
 22360  			break
 22361  		}
 22362  		c := auxIntToInt64(v_1.AuxInt)
 22363  		v.reset(OpARM64TSTconst)
 22364  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 22365  		v.AddArg(x)
 22366  		return true
 22367  	}
 22368  	return false
 22369  }
 22370  func rewriteValueARM64_OpARM64TSTshiftRL(v *Value) bool {
 22371  	v_1 := v.Args[1]
 22372  	v_0 := v.Args[0]
 22373  	b := v.Block
 22374  	// match: (TSTshiftRL (MOVDconst [c]) x [d])
 22375  	// result: (TSTconst [c] (SRLconst <x.Type> x [d]))
 22376  	for {
 22377  		d := auxIntToInt64(v.AuxInt)
 22378  		if v_0.Op != OpARM64MOVDconst {
 22379  			break
 22380  		}
 22381  		c := auxIntToInt64(v_0.AuxInt)
 22382  		x := v_1
 22383  		v.reset(OpARM64TSTconst)
 22384  		v.AuxInt = int64ToAuxInt(c)
 22385  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
 22386  		v0.AuxInt = int64ToAuxInt(d)
 22387  		v0.AddArg(x)
 22388  		v.AddArg(v0)
 22389  		return true
 22390  	}
 22391  	// match: (TSTshiftRL x (MOVDconst [c]) [d])
 22392  	// result: (TSTconst x [int64(uint64(c)>>uint64(d))])
 22393  	for {
 22394  		d := auxIntToInt64(v.AuxInt)
 22395  		x := v_0
 22396  		if v_1.Op != OpARM64MOVDconst {
 22397  			break
 22398  		}
 22399  		c := auxIntToInt64(v_1.AuxInt)
 22400  		v.reset(OpARM64TSTconst)
 22401  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 22402  		v.AddArg(x)
 22403  		return true
 22404  	}
 22405  	return false
 22406  }
 22407  func rewriteValueARM64_OpARM64TSTshiftRO(v *Value) bool {
 22408  	v_1 := v.Args[1]
 22409  	v_0 := v.Args[0]
 22410  	b := v.Block
 22411  	// match: (TSTshiftRO (MOVDconst [c]) x [d])
 22412  	// result: (TSTconst [c] (RORconst <x.Type> x [d]))
 22413  	for {
 22414  		d := auxIntToInt64(v.AuxInt)
 22415  		if v_0.Op != OpARM64MOVDconst {
 22416  			break
 22417  		}
 22418  		c := auxIntToInt64(v_0.AuxInt)
 22419  		x := v_1
 22420  		v.reset(OpARM64TSTconst)
 22421  		v.AuxInt = int64ToAuxInt(c)
 22422  		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
 22423  		v0.AuxInt = int64ToAuxInt(d)
 22424  		v0.AddArg(x)
 22425  		v.AddArg(v0)
 22426  		return true
 22427  	}
 22428  	// match: (TSTshiftRO x (MOVDconst [c]) [d])
 22429  	// result: (TSTconst x [rotateRight64(c, d)])
 22430  	for {
 22431  		d := auxIntToInt64(v.AuxInt)
 22432  		x := v_0
 22433  		if v_1.Op != OpARM64MOVDconst {
 22434  			break
 22435  		}
 22436  		c := auxIntToInt64(v_1.AuxInt)
 22437  		v.reset(OpARM64TSTconst)
 22438  		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
 22439  		v.AddArg(x)
 22440  		return true
 22441  	}
 22442  	return false
 22443  }
 22444  func rewriteValueARM64_OpARM64UBFIZ(v *Value) bool {
 22445  	v_0 := v.Args[0]
 22446  	// match: (UBFIZ [bfc] (SLLconst [sc] x))
 22447  	// cond: sc < bfc.getARM64BFwidth()
 22448  	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x)
 22449  	for {
 22450  		bfc := auxIntToArm64BitField(v.AuxInt)
 22451  		if v_0.Op != OpARM64SLLconst {
 22452  			break
 22453  		}
 22454  		sc := auxIntToInt64(v_0.AuxInt)
 22455  		x := v_0.Args[0]
 22456  		if !(sc < bfc.getARM64BFwidth()) {
 22457  			break
 22458  		}
 22459  		v.reset(OpARM64UBFIZ)
 22460  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
 22461  		v.AddArg(x)
 22462  		return true
 22463  	}
 22464  	return false
 22465  }
 22466  func rewriteValueARM64_OpARM64UBFX(v *Value) bool {
 22467  	v_0 := v.Args[0]
 22468  	// match: (UBFX [bfc] (ANDconst [c] x))
 22469  	// cond: isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb() + bfc.getARM64BFwidth() <= arm64BFWidth(c, 0)
 22470  	// result: (UBFX [bfc] x)
 22471  	for {
 22472  		bfc := auxIntToArm64BitField(v.AuxInt)
 22473  		if v_0.Op != OpARM64ANDconst {
 22474  			break
 22475  		}
 22476  		c := auxIntToInt64(v_0.AuxInt)
 22477  		x := v_0.Args[0]
 22478  		if !(isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb()+bfc.getARM64BFwidth() <= arm64BFWidth(c, 0)) {
 22479  			break
 22480  		}
 22481  		v.reset(OpARM64UBFX)
 22482  		v.AuxInt = arm64BitFieldToAuxInt(bfc)
 22483  		v.AddArg(x)
 22484  		return true
 22485  	}
 22486  	// match: (UBFX [bfc] (SRLconst [sc] x))
 22487  	// cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64
 22488  	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x)
 22489  	for {
 22490  		bfc := auxIntToArm64BitField(v.AuxInt)
 22491  		if v_0.Op != OpARM64SRLconst {
 22492  			break
 22493  		}
 22494  		sc := auxIntToInt64(v_0.AuxInt)
 22495  		x := v_0.Args[0]
 22496  		if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
 22497  			break
 22498  		}
 22499  		v.reset(OpARM64UBFX)
 22500  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
 22501  		v.AddArg(x)
 22502  		return true
 22503  	}
 22504  	// match: (UBFX [bfc] (SLLconst [sc] x))
 22505  	// cond: sc == bfc.getARM64BFlsb()
 22506  	// result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x)
 22507  	for {
 22508  		bfc := auxIntToArm64BitField(v.AuxInt)
 22509  		if v_0.Op != OpARM64SLLconst {
 22510  			break
 22511  		}
 22512  		sc := auxIntToInt64(v_0.AuxInt)
 22513  		x := v_0.Args[0]
 22514  		if !(sc == bfc.getARM64BFlsb()) {
 22515  			break
 22516  		}
 22517  		v.reset(OpARM64ANDconst)
 22518  		v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
 22519  		v.AddArg(x)
 22520  		return true
 22521  	}
 22522  	// match: (UBFX [bfc] (SLLconst [sc] x))
 22523  	// cond: sc < bfc.getARM64BFlsb()
 22524  	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
 22525  	for {
 22526  		bfc := auxIntToArm64BitField(v.AuxInt)
 22527  		if v_0.Op != OpARM64SLLconst {
 22528  			break
 22529  		}
 22530  		sc := auxIntToInt64(v_0.AuxInt)
 22531  		x := v_0.Args[0]
 22532  		if !(sc < bfc.getARM64BFlsb()) {
 22533  			break
 22534  		}
 22535  		v.reset(OpARM64UBFX)
 22536  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
 22537  		v.AddArg(x)
 22538  		return true
 22539  	}
 22540  	// match: (UBFX [bfc] (SLLconst [sc] x))
 22541  	// cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
 22542  	// result: (UBFIZ [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
 22543  	for {
 22544  		bfc := auxIntToArm64BitField(v.AuxInt)
 22545  		if v_0.Op != OpARM64SLLconst {
 22546  			break
 22547  		}
 22548  		sc := auxIntToInt64(v_0.AuxInt)
 22549  		x := v_0.Args[0]
 22550  		if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
 22551  			break
 22552  		}
 22553  		v.reset(OpARM64UBFIZ)
 22554  		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
 22555  		v.AddArg(x)
 22556  		return true
 22557  	}
 22558  	return false
 22559  }
 22560  func rewriteValueARM64_OpARM64UDIV(v *Value) bool {
 22561  	v_1 := v.Args[1]
 22562  	v_0 := v.Args[0]
 22563  	// match: (UDIV x (MOVDconst [1]))
 22564  	// result: x
 22565  	for {
 22566  		x := v_0
 22567  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 22568  			break
 22569  		}
 22570  		v.copyOf(x)
 22571  		return true
 22572  	}
 22573  	// match: (UDIV x (MOVDconst [c]))
 22574  	// cond: isPowerOfTwo64(c)
 22575  	// result: (SRLconst [log64(c)] x)
 22576  	for {
 22577  		x := v_0
 22578  		if v_1.Op != OpARM64MOVDconst {
 22579  			break
 22580  		}
 22581  		c := auxIntToInt64(v_1.AuxInt)
 22582  		if !(isPowerOfTwo64(c)) {
 22583  			break
 22584  		}
 22585  		v.reset(OpARM64SRLconst)
 22586  		v.AuxInt = int64ToAuxInt(log64(c))
 22587  		v.AddArg(x)
 22588  		return true
 22589  	}
 22590  	// match: (UDIV (MOVDconst [c]) (MOVDconst [d]))
 22591  	// cond: d != 0
 22592  	// result: (MOVDconst [int64(uint64(c)/uint64(d))])
 22593  	for {
 22594  		if v_0.Op != OpARM64MOVDconst {
 22595  			break
 22596  		}
 22597  		c := auxIntToInt64(v_0.AuxInt)
 22598  		if v_1.Op != OpARM64MOVDconst {
 22599  			break
 22600  		}
 22601  		d := auxIntToInt64(v_1.AuxInt)
 22602  		if !(d != 0) {
 22603  			break
 22604  		}
 22605  		v.reset(OpARM64MOVDconst)
 22606  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
 22607  		return true
 22608  	}
 22609  	return false
 22610  }
 22611  func rewriteValueARM64_OpARM64UDIVW(v *Value) bool {
 22612  	v_1 := v.Args[1]
 22613  	v_0 := v.Args[0]
 22614  	// match: (UDIVW x (MOVDconst [c]))
 22615  	// cond: uint32(c)==1
 22616  	// result: x
 22617  	for {
 22618  		x := v_0
 22619  		if v_1.Op != OpARM64MOVDconst {
 22620  			break
 22621  		}
 22622  		c := auxIntToInt64(v_1.AuxInt)
 22623  		if !(uint32(c) == 1) {
 22624  			break
 22625  		}
 22626  		v.copyOf(x)
 22627  		return true
 22628  	}
 22629  	// match: (UDIVW x (MOVDconst [c]))
 22630  	// cond: isPowerOfTwo64(c) && is32Bit(c)
 22631  	// result: (SRLconst [log64(c)] x)
 22632  	for {
 22633  		x := v_0
 22634  		if v_1.Op != OpARM64MOVDconst {
 22635  			break
 22636  		}
 22637  		c := auxIntToInt64(v_1.AuxInt)
 22638  		if !(isPowerOfTwo64(c) && is32Bit(c)) {
 22639  			break
 22640  		}
 22641  		v.reset(OpARM64SRLconst)
 22642  		v.AuxInt = int64ToAuxInt(log64(c))
 22643  		v.AddArg(x)
 22644  		return true
 22645  	}
 22646  	// match: (UDIVW (MOVDconst [c]) (MOVDconst [d]))
 22647  	// cond: d != 0
 22648  	// result: (MOVDconst [int64(uint32(c)/uint32(d))])
 22649  	for {
 22650  		if v_0.Op != OpARM64MOVDconst {
 22651  			break
 22652  		}
 22653  		c := auxIntToInt64(v_0.AuxInt)
 22654  		if v_1.Op != OpARM64MOVDconst {
 22655  			break
 22656  		}
 22657  		d := auxIntToInt64(v_1.AuxInt)
 22658  		if !(d != 0) {
 22659  			break
 22660  		}
 22661  		v.reset(OpARM64MOVDconst)
 22662  		v.AuxInt = int64ToAuxInt(int64(uint32(c) / uint32(d)))
 22663  		return true
 22664  	}
 22665  	return false
 22666  }
 22667  func rewriteValueARM64_OpARM64UMOD(v *Value) bool {
 22668  	v_1 := v.Args[1]
 22669  	v_0 := v.Args[0]
 22670  	b := v.Block
 22671  	typ := &b.Func.Config.Types
 22672  	// match: (UMOD <typ.UInt64> x y)
 22673  	// result: (MSUB <typ.UInt64> x y (UDIV <typ.UInt64> x y))
 22674  	for {
 22675  		if v.Type != typ.UInt64 {
 22676  			break
 22677  		}
 22678  		x := v_0
 22679  		y := v_1
 22680  		v.reset(OpARM64MSUB)
 22681  		v.Type = typ.UInt64
 22682  		v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64)
 22683  		v0.AddArg2(x, y)
 22684  		v.AddArg3(x, y, v0)
 22685  		return true
 22686  	}
 22687  	// match: (UMOD _ (MOVDconst [1]))
 22688  	// result: (MOVDconst [0])
 22689  	for {
 22690  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 22691  			break
 22692  		}
 22693  		v.reset(OpARM64MOVDconst)
 22694  		v.AuxInt = int64ToAuxInt(0)
 22695  		return true
 22696  	}
 22697  	// match: (UMOD x (MOVDconst [c]))
 22698  	// cond: isPowerOfTwo64(c)
 22699  	// result: (ANDconst [c-1] x)
 22700  	for {
 22701  		x := v_0
 22702  		if v_1.Op != OpARM64MOVDconst {
 22703  			break
 22704  		}
 22705  		c := auxIntToInt64(v_1.AuxInt)
 22706  		if !(isPowerOfTwo64(c)) {
 22707  			break
 22708  		}
 22709  		v.reset(OpARM64ANDconst)
 22710  		v.AuxInt = int64ToAuxInt(c - 1)
 22711  		v.AddArg(x)
 22712  		return true
 22713  	}
 22714  	// match: (UMOD (MOVDconst [c]) (MOVDconst [d]))
 22715  	// cond: d != 0
 22716  	// result: (MOVDconst [int64(uint64(c)%uint64(d))])
 22717  	for {
 22718  		if v_0.Op != OpARM64MOVDconst {
 22719  			break
 22720  		}
 22721  		c := auxIntToInt64(v_0.AuxInt)
 22722  		if v_1.Op != OpARM64MOVDconst {
 22723  			break
 22724  		}
 22725  		d := auxIntToInt64(v_1.AuxInt)
 22726  		if !(d != 0) {
 22727  			break
 22728  		}
 22729  		v.reset(OpARM64MOVDconst)
 22730  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
 22731  		return true
 22732  	}
 22733  	return false
 22734  }
 22735  func rewriteValueARM64_OpARM64UMODW(v *Value) bool {
 22736  	v_1 := v.Args[1]
 22737  	v_0 := v.Args[0]
 22738  	b := v.Block
 22739  	typ := &b.Func.Config.Types
 22740  	// match: (UMODW <typ.UInt32> x y)
 22741  	// result: (MSUBW <typ.UInt32> x y (UDIVW <typ.UInt32> x y))
 22742  	for {
 22743  		if v.Type != typ.UInt32 {
 22744  			break
 22745  		}
 22746  		x := v_0
 22747  		y := v_1
 22748  		v.reset(OpARM64MSUBW)
 22749  		v.Type = typ.UInt32
 22750  		v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32)
 22751  		v0.AddArg2(x, y)
 22752  		v.AddArg3(x, y, v0)
 22753  		return true
 22754  	}
 22755  	// match: (UMODW _ (MOVDconst [c]))
 22756  	// cond: uint32(c)==1
 22757  	// result: (MOVDconst [0])
 22758  	for {
 22759  		if v_1.Op != OpARM64MOVDconst {
 22760  			break
 22761  		}
 22762  		c := auxIntToInt64(v_1.AuxInt)
 22763  		if !(uint32(c) == 1) {
 22764  			break
 22765  		}
 22766  		v.reset(OpARM64MOVDconst)
 22767  		v.AuxInt = int64ToAuxInt(0)
 22768  		return true
 22769  	}
 22770  	// match: (UMODW x (MOVDconst [c]))
 22771  	// cond: isPowerOfTwo64(c) && is32Bit(c)
 22772  	// result: (ANDconst [c-1] x)
 22773  	for {
 22774  		x := v_0
 22775  		if v_1.Op != OpARM64MOVDconst {
 22776  			break
 22777  		}
 22778  		c := auxIntToInt64(v_1.AuxInt)
 22779  		if !(isPowerOfTwo64(c) && is32Bit(c)) {
 22780  			break
 22781  		}
 22782  		v.reset(OpARM64ANDconst)
 22783  		v.AuxInt = int64ToAuxInt(c - 1)
 22784  		v.AddArg(x)
 22785  		return true
 22786  	}
 22787  	// match: (UMODW (MOVDconst [c]) (MOVDconst [d]))
 22788  	// cond: d != 0
 22789  	// result: (MOVDconst [int64(uint32(c)%uint32(d))])
 22790  	for {
 22791  		if v_0.Op != OpARM64MOVDconst {
 22792  			break
 22793  		}
 22794  		c := auxIntToInt64(v_0.AuxInt)
 22795  		if v_1.Op != OpARM64MOVDconst {
 22796  			break
 22797  		}
 22798  		d := auxIntToInt64(v_1.AuxInt)
 22799  		if !(d != 0) {
 22800  			break
 22801  		}
 22802  		v.reset(OpARM64MOVDconst)
 22803  		v.AuxInt = int64ToAuxInt(int64(uint32(c) % uint32(d)))
 22804  		return true
 22805  	}
 22806  	return false
 22807  }
 22808  func rewriteValueARM64_OpARM64XOR(v *Value) bool {
 22809  	v_1 := v.Args[1]
 22810  	v_0 := v.Args[0]
 22811  	// match: (XOR x (MOVDconst [c]))
 22812  	// result: (XORconst [c] x)
 22813  	for {
 22814  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22815  			x := v_0
 22816  			if v_1.Op != OpARM64MOVDconst {
 22817  				continue
 22818  			}
 22819  			c := auxIntToInt64(v_1.AuxInt)
 22820  			v.reset(OpARM64XORconst)
 22821  			v.AuxInt = int64ToAuxInt(c)
 22822  			v.AddArg(x)
 22823  			return true
 22824  		}
 22825  		break
 22826  	}
 22827  	// match: (XOR x x)
 22828  	// result: (MOVDconst [0])
 22829  	for {
 22830  		x := v_0
 22831  		if x != v_1 {
 22832  			break
 22833  		}
 22834  		v.reset(OpARM64MOVDconst)
 22835  		v.AuxInt = int64ToAuxInt(0)
 22836  		return true
 22837  	}
 22838  	// match: (XOR x (MVN y))
 22839  	// result: (EON x y)
 22840  	for {
 22841  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22842  			x := v_0
 22843  			if v_1.Op != OpARM64MVN {
 22844  				continue
 22845  			}
 22846  			y := v_1.Args[0]
 22847  			v.reset(OpARM64EON)
 22848  			v.AddArg2(x, y)
 22849  			return true
 22850  		}
 22851  		break
 22852  	}
 22853  	// match: (XOR x0 x1:(SLLconst [c] y))
 22854  	// cond: clobberIfDead(x1)
 22855  	// result: (XORshiftLL x0 y [c])
 22856  	for {
 22857  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22858  			x0 := v_0
 22859  			x1 := v_1
 22860  			if x1.Op != OpARM64SLLconst {
 22861  				continue
 22862  			}
 22863  			c := auxIntToInt64(x1.AuxInt)
 22864  			y := x1.Args[0]
 22865  			if !(clobberIfDead(x1)) {
 22866  				continue
 22867  			}
 22868  			v.reset(OpARM64XORshiftLL)
 22869  			v.AuxInt = int64ToAuxInt(c)
 22870  			v.AddArg2(x0, y)
 22871  			return true
 22872  		}
 22873  		break
 22874  	}
 22875  	// match: (XOR x0 x1:(SRLconst [c] y))
 22876  	// cond: clobberIfDead(x1)
 22877  	// result: (XORshiftRL x0 y [c])
 22878  	for {
 22879  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22880  			x0 := v_0
 22881  			x1 := v_1
 22882  			if x1.Op != OpARM64SRLconst {
 22883  				continue
 22884  			}
 22885  			c := auxIntToInt64(x1.AuxInt)
 22886  			y := x1.Args[0]
 22887  			if !(clobberIfDead(x1)) {
 22888  				continue
 22889  			}
 22890  			v.reset(OpARM64XORshiftRL)
 22891  			v.AuxInt = int64ToAuxInt(c)
 22892  			v.AddArg2(x0, y)
 22893  			return true
 22894  		}
 22895  		break
 22896  	}
 22897  	// match: (XOR x0 x1:(SRAconst [c] y))
 22898  	// cond: clobberIfDead(x1)
 22899  	// result: (XORshiftRA x0 y [c])
 22900  	for {
 22901  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22902  			x0 := v_0
 22903  			x1 := v_1
 22904  			if x1.Op != OpARM64SRAconst {
 22905  				continue
 22906  			}
 22907  			c := auxIntToInt64(x1.AuxInt)
 22908  			y := x1.Args[0]
 22909  			if !(clobberIfDead(x1)) {
 22910  				continue
 22911  			}
 22912  			v.reset(OpARM64XORshiftRA)
 22913  			v.AuxInt = int64ToAuxInt(c)
 22914  			v.AddArg2(x0, y)
 22915  			return true
 22916  		}
 22917  		break
 22918  	}
 22919  	// match: (XOR x0 x1:(RORconst [c] y))
 22920  	// cond: clobberIfDead(x1)
 22921  	// result: (XORshiftRO x0 y [c])
 22922  	for {
 22923  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22924  			x0 := v_0
 22925  			x1 := v_1
 22926  			if x1.Op != OpARM64RORconst {
 22927  				continue
 22928  			}
 22929  			c := auxIntToInt64(x1.AuxInt)
 22930  			y := x1.Args[0]
 22931  			if !(clobberIfDead(x1)) {
 22932  				continue
 22933  			}
 22934  			v.reset(OpARM64XORshiftRO)
 22935  			v.AuxInt = int64ToAuxInt(c)
 22936  			v.AddArg2(x0, y)
 22937  			return true
 22938  		}
 22939  		break
 22940  	}
 22941  	return false
 22942  }
 22943  func rewriteValueARM64_OpARM64XORconst(v *Value) bool {
 22944  	v_0 := v.Args[0]
 22945  	// match: (XORconst [0] x)
 22946  	// result: x
 22947  	for {
 22948  		if auxIntToInt64(v.AuxInt) != 0 {
 22949  			break
 22950  		}
 22951  		x := v_0
 22952  		v.copyOf(x)
 22953  		return true
 22954  	}
 22955  	// match: (XORconst [-1] x)
 22956  	// result: (MVN x)
 22957  	for {
 22958  		if auxIntToInt64(v.AuxInt) != -1 {
 22959  			break
 22960  		}
 22961  		x := v_0
 22962  		v.reset(OpARM64MVN)
 22963  		v.AddArg(x)
 22964  		return true
 22965  	}
 22966  	// match: (XORconst [c] (MOVDconst [d]))
 22967  	// result: (MOVDconst [c^d])
 22968  	for {
 22969  		c := auxIntToInt64(v.AuxInt)
 22970  		if v_0.Op != OpARM64MOVDconst {
 22971  			break
 22972  		}
 22973  		d := auxIntToInt64(v_0.AuxInt)
 22974  		v.reset(OpARM64MOVDconst)
 22975  		v.AuxInt = int64ToAuxInt(c ^ d)
 22976  		return true
 22977  	}
 22978  	// match: (XORconst [c] (XORconst [d] x))
 22979  	// result: (XORconst [c^d] x)
 22980  	for {
 22981  		c := auxIntToInt64(v.AuxInt)
 22982  		if v_0.Op != OpARM64XORconst {
 22983  			break
 22984  		}
 22985  		d := auxIntToInt64(v_0.AuxInt)
 22986  		x := v_0.Args[0]
 22987  		v.reset(OpARM64XORconst)
 22988  		v.AuxInt = int64ToAuxInt(c ^ d)
 22989  		v.AddArg(x)
 22990  		return true
 22991  	}
 22992  	return false
 22993  }
 22994  func rewriteValueARM64_OpARM64XORshiftLL(v *Value) bool {
 22995  	v_1 := v.Args[1]
 22996  	v_0 := v.Args[0]
 22997  	b := v.Block
 22998  	typ := &b.Func.Config.Types
 22999  	// match: (XORshiftLL (MOVDconst [c]) x [d])
 23000  	// result: (XORconst [c] (SLLconst <x.Type> x [d]))
 23001  	for {
 23002  		d := auxIntToInt64(v.AuxInt)
 23003  		if v_0.Op != OpARM64MOVDconst {
 23004  			break
 23005  		}
 23006  		c := auxIntToInt64(v_0.AuxInt)
 23007  		x := v_1
 23008  		v.reset(OpARM64XORconst)
 23009  		v.AuxInt = int64ToAuxInt(c)
 23010  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
 23011  		v0.AuxInt = int64ToAuxInt(d)
 23012  		v0.AddArg(x)
 23013  		v.AddArg(v0)
 23014  		return true
 23015  	}
 23016  	// match: (XORshiftLL x (MOVDconst [c]) [d])
 23017  	// result: (XORconst x [int64(uint64(c)<<uint64(d))])
 23018  	for {
 23019  		d := auxIntToInt64(v.AuxInt)
 23020  		x := v_0
 23021  		if v_1.Op != OpARM64MOVDconst {
 23022  			break
 23023  		}
 23024  		c := auxIntToInt64(v_1.AuxInt)
 23025  		v.reset(OpARM64XORconst)
 23026  		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
 23027  		v.AddArg(x)
 23028  		return true
 23029  	}
 23030  	// match: (XORshiftLL (SLLconst x [c]) x [c])
 23031  	// result: (MOVDconst [0])
 23032  	for {
 23033  		c := auxIntToInt64(v.AuxInt)
 23034  		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
 23035  			break
 23036  		}
 23037  		x := v_0.Args[0]
 23038  		if x != v_1 {
 23039  			break
 23040  		}
 23041  		v.reset(OpARM64MOVDconst)
 23042  		v.AuxInt = int64ToAuxInt(0)
 23043  		return true
 23044  	}
 23045  	// match: (XORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
 23046  	// result: (REV16W x)
 23047  	for {
 23048  		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
 23049  			break
 23050  		}
 23051  		x := v_0.Args[0]
 23052  		if x != v_1 {
 23053  			break
 23054  		}
 23055  		v.reset(OpARM64REV16W)
 23056  		v.AddArg(x)
 23057  		return true
 23058  	}
 23059  	// match: (XORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
 23060  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
 23061  	// result: (REV16W x)
 23062  	for {
 23063  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
 23064  			break
 23065  		}
 23066  		v_0_0 := v_0.Args[0]
 23067  		if v_0_0.Op != OpARM64ANDconst {
 23068  			break
 23069  		}
 23070  		c1 := auxIntToInt64(v_0_0.AuxInt)
 23071  		x := v_0_0.Args[0]
 23072  		if v_1.Op != OpARM64ANDconst {
 23073  			break
 23074  		}
 23075  		c2 := auxIntToInt64(v_1.AuxInt)
 23076  		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
 23077  			break
 23078  		}
 23079  		v.reset(OpARM64REV16W)
 23080  		v.AddArg(x)
 23081  		return true
 23082  	}
 23083  	// match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
 23084  	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
 23085  	// result: (REV16 x)
 23086  	for {
 23087  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
 23088  			break
 23089  		}
 23090  		v_0_0 := v_0.Args[0]
 23091  		if v_0_0.Op != OpARM64ANDconst {
 23092  			break
 23093  		}
 23094  		c1 := auxIntToInt64(v_0_0.AuxInt)
 23095  		x := v_0_0.Args[0]
 23096  		if v_1.Op != OpARM64ANDconst {
 23097  			break
 23098  		}
 23099  		c2 := auxIntToInt64(v_1.AuxInt)
 23100  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
 23101  			break
 23102  		}
 23103  		v.reset(OpARM64REV16)
 23104  		v.AddArg(x)
 23105  		return true
 23106  	}
 23107  	// match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
 23108  	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
 23109  	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
 23110  	for {
 23111  		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
 23112  			break
 23113  		}
 23114  		v_0_0 := v_0.Args[0]
 23115  		if v_0_0.Op != OpARM64ANDconst {
 23116  			break
 23117  		}
 23118  		c1 := auxIntToInt64(v_0_0.AuxInt)
 23119  		x := v_0_0.Args[0]
 23120  		if v_1.Op != OpARM64ANDconst {
 23121  			break
 23122  		}
 23123  		c2 := auxIntToInt64(v_1.AuxInt)
 23124  		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
 23125  			break
 23126  		}
 23127  		v.reset(OpARM64REV16)
 23128  		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
 23129  		v0.AuxInt = int64ToAuxInt(0xffffffff)
 23130  		v0.AddArg(x)
 23131  		v.AddArg(v0)
 23132  		return true
 23133  	}
 23134  	// match: (XORshiftLL [c] (SRLconst x [64-c]) x2)
 23135  	// result: (EXTRconst [64-c] x2 x)
 23136  	for {
 23137  		c := auxIntToInt64(v.AuxInt)
 23138  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
 23139  			break
 23140  		}
 23141  		x := v_0.Args[0]
 23142  		x2 := v_1
 23143  		v.reset(OpARM64EXTRconst)
 23144  		v.AuxInt = int64ToAuxInt(64 - c)
 23145  		v.AddArg2(x2, x)
 23146  		return true
 23147  	}
 23148  	// match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2)
 23149  	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
 23150  	// result: (EXTRWconst [32-c] x2 x)
 23151  	for {
 23152  		t := v.Type
 23153  		c := auxIntToInt64(v.AuxInt)
 23154  		if v_0.Op != OpARM64UBFX {
 23155  			break
 23156  		}
 23157  		bfc := auxIntToArm64BitField(v_0.AuxInt)
 23158  		x := v_0.Args[0]
 23159  		x2 := v_1
 23160  		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
 23161  			break
 23162  		}
 23163  		v.reset(OpARM64EXTRWconst)
 23164  		v.AuxInt = int64ToAuxInt(32 - c)
 23165  		v.AddArg2(x2, x)
 23166  		return true
 23167  	}
 23168  	return false
 23169  }
 23170  func rewriteValueARM64_OpARM64XORshiftRA(v *Value) bool {
 23171  	v_1 := v.Args[1]
 23172  	v_0 := v.Args[0]
 23173  	b := v.Block
 23174  	// match: (XORshiftRA (MOVDconst [c]) x [d])
 23175  	// result: (XORconst [c] (SRAconst <x.Type> x [d]))
 23176  	for {
 23177  		d := auxIntToInt64(v.AuxInt)
 23178  		if v_0.Op != OpARM64MOVDconst {
 23179  			break
 23180  		}
 23181  		c := auxIntToInt64(v_0.AuxInt)
 23182  		x := v_1
 23183  		v.reset(OpARM64XORconst)
 23184  		v.AuxInt = int64ToAuxInt(c)
 23185  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
 23186  		v0.AuxInt = int64ToAuxInt(d)
 23187  		v0.AddArg(x)
 23188  		v.AddArg(v0)
 23189  		return true
 23190  	}
 23191  	// match: (XORshiftRA x (MOVDconst [c]) [d])
 23192  	// result: (XORconst x [c>>uint64(d)])
 23193  	for {
 23194  		d := auxIntToInt64(v.AuxInt)
 23195  		x := v_0
 23196  		if v_1.Op != OpARM64MOVDconst {
 23197  			break
 23198  		}
 23199  		c := auxIntToInt64(v_1.AuxInt)
 23200  		v.reset(OpARM64XORconst)
 23201  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 23202  		v.AddArg(x)
 23203  		return true
 23204  	}
 23205  	// match: (XORshiftRA (SRAconst x [c]) x [c])
 23206  	// result: (MOVDconst [0])
 23207  	for {
 23208  		c := auxIntToInt64(v.AuxInt)
 23209  		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
 23210  			break
 23211  		}
 23212  		x := v_0.Args[0]
 23213  		if x != v_1 {
 23214  			break
 23215  		}
 23216  		v.reset(OpARM64MOVDconst)
 23217  		v.AuxInt = int64ToAuxInt(0)
 23218  		return true
 23219  	}
 23220  	return false
 23221  }
 23222  func rewriteValueARM64_OpARM64XORshiftRL(v *Value) bool {
 23223  	v_1 := v.Args[1]
 23224  	v_0 := v.Args[0]
 23225  	b := v.Block
 23226  	// match: (XORshiftRL (MOVDconst [c]) x [d])
 23227  	// result: (XORconst [c] (SRLconst <x.Type> x [d]))
 23228  	for {
 23229  		d := auxIntToInt64(v.AuxInt)
 23230  		if v_0.Op != OpARM64MOVDconst {
 23231  			break
 23232  		}
 23233  		c := auxIntToInt64(v_0.AuxInt)
 23234  		x := v_1
 23235  		v.reset(OpARM64XORconst)
 23236  		v.AuxInt = int64ToAuxInt(c)
 23237  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
 23238  		v0.AuxInt = int64ToAuxInt(d)
 23239  		v0.AddArg(x)
 23240  		v.AddArg(v0)
 23241  		return true
 23242  	}
 23243  	// match: (XORshiftRL x (MOVDconst [c]) [d])
 23244  	// result: (XORconst x [int64(uint64(c)>>uint64(d))])
 23245  	for {
 23246  		d := auxIntToInt64(v.AuxInt)
 23247  		x := v_0
 23248  		if v_1.Op != OpARM64MOVDconst {
 23249  			break
 23250  		}
 23251  		c := auxIntToInt64(v_1.AuxInt)
 23252  		v.reset(OpARM64XORconst)
 23253  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 23254  		v.AddArg(x)
 23255  		return true
 23256  	}
 23257  	// match: (XORshiftRL (SRLconst x [c]) x [c])
 23258  	// result: (MOVDconst [0])
 23259  	for {
 23260  		c := auxIntToInt64(v.AuxInt)
 23261  		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
 23262  			break
 23263  		}
 23264  		x := v_0.Args[0]
 23265  		if x != v_1 {
 23266  			break
 23267  		}
 23268  		v.reset(OpARM64MOVDconst)
 23269  		v.AuxInt = int64ToAuxInt(0)
 23270  		return true
 23271  	}
 23272  	return false
 23273  }
 23274  func rewriteValueARM64_OpARM64XORshiftRO(v *Value) bool {
 23275  	v_1 := v.Args[1]
 23276  	v_0 := v.Args[0]
 23277  	b := v.Block
 23278  	// match: (XORshiftRO (MOVDconst [c]) x [d])
 23279  	// result: (XORconst [c] (RORconst <x.Type> x [d]))
 23280  	for {
 23281  		d := auxIntToInt64(v.AuxInt)
 23282  		if v_0.Op != OpARM64MOVDconst {
 23283  			break
 23284  		}
 23285  		c := auxIntToInt64(v_0.AuxInt)
 23286  		x := v_1
 23287  		v.reset(OpARM64XORconst)
 23288  		v.AuxInt = int64ToAuxInt(c)
 23289  		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
 23290  		v0.AuxInt = int64ToAuxInt(d)
 23291  		v0.AddArg(x)
 23292  		v.AddArg(v0)
 23293  		return true
 23294  	}
 23295  	// match: (XORshiftRO x (MOVDconst [c]) [d])
 23296  	// result: (XORconst x [rotateRight64(c, d)])
 23297  	for {
 23298  		d := auxIntToInt64(v.AuxInt)
 23299  		x := v_0
 23300  		if v_1.Op != OpARM64MOVDconst {
 23301  			break
 23302  		}
 23303  		c := auxIntToInt64(v_1.AuxInt)
 23304  		v.reset(OpARM64XORconst)
 23305  		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
 23306  		v.AddArg(x)
 23307  		return true
 23308  	}
 23309  	// match: (XORshiftRO (RORconst x [c]) x [c])
 23310  	// result: (MOVDconst [0])
 23311  	for {
 23312  		c := auxIntToInt64(v.AuxInt)
 23313  		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
 23314  			break
 23315  		}
 23316  		x := v_0.Args[0]
 23317  		if x != v_1 {
 23318  			break
 23319  		}
 23320  		v.reset(OpARM64MOVDconst)
 23321  		v.AuxInt = int64ToAuxInt(0)
 23322  		return true
 23323  	}
 23324  	return false
 23325  }
 23326  func rewriteValueARM64_OpAddr(v *Value) bool {
 23327  	v_0 := v.Args[0]
 23328  	// match: (Addr {sym} base)
 23329  	// result: (MOVDaddr {sym} base)
 23330  	for {
 23331  		sym := auxToSym(v.Aux)
 23332  		base := v_0
 23333  		v.reset(OpARM64MOVDaddr)
 23334  		v.Aux = symToAux(sym)
 23335  		v.AddArg(base)
 23336  		return true
 23337  	}
 23338  }
 23339  func rewriteValueARM64_OpAtomicAnd32(v *Value) bool {
 23340  	v_2 := v.Args[2]
 23341  	v_1 := v.Args[1]
 23342  	v_0 := v.Args[0]
 23343  	b := v.Block
 23344  	typ := &b.Func.Config.Types
 23345  	// match: (AtomicAnd32 ptr val mem)
 23346  	// result: (Select1 (LoweredAtomicAnd32 ptr val mem))
 23347  	for {
 23348  		ptr := v_0
 23349  		val := v_1
 23350  		mem := v_2
 23351  		v.reset(OpSelect1)
 23352  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32, types.NewTuple(typ.UInt32, types.TypeMem))
 23353  		v0.AddArg3(ptr, val, mem)
 23354  		v.AddArg(v0)
 23355  		return true
 23356  	}
 23357  }
 23358  func rewriteValueARM64_OpAtomicAnd32Variant(v *Value) bool {
 23359  	v_2 := v.Args[2]
 23360  	v_1 := v.Args[1]
 23361  	v_0 := v.Args[0]
 23362  	b := v.Block
 23363  	typ := &b.Func.Config.Types
 23364  	// match: (AtomicAnd32Variant ptr val mem)
 23365  	// result: (Select1 (LoweredAtomicAnd32Variant ptr val mem))
 23366  	for {
 23367  		ptr := v_0
 23368  		val := v_1
 23369  		mem := v_2
 23370  		v.reset(OpSelect1)
 23371  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32Variant, types.NewTuple(typ.UInt32, types.TypeMem))
 23372  		v0.AddArg3(ptr, val, mem)
 23373  		v.AddArg(v0)
 23374  		return true
 23375  	}
 23376  }
 23377  func rewriteValueARM64_OpAtomicAnd8(v *Value) bool {
 23378  	v_2 := v.Args[2]
 23379  	v_1 := v.Args[1]
 23380  	v_0 := v.Args[0]
 23381  	b := v.Block
 23382  	typ := &b.Func.Config.Types
 23383  	// match: (AtomicAnd8 ptr val mem)
 23384  	// result: (Select1 (LoweredAtomicAnd8 ptr val mem))
 23385  	for {
 23386  		ptr := v_0
 23387  		val := v_1
 23388  		mem := v_2
 23389  		v.reset(OpSelect1)
 23390  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem))
 23391  		v0.AddArg3(ptr, val, mem)
 23392  		v.AddArg(v0)
 23393  		return true
 23394  	}
 23395  }
 23396  func rewriteValueARM64_OpAtomicAnd8Variant(v *Value) bool {
 23397  	v_2 := v.Args[2]
 23398  	v_1 := v.Args[1]
 23399  	v_0 := v.Args[0]
 23400  	b := v.Block
 23401  	typ := &b.Func.Config.Types
 23402  	// match: (AtomicAnd8Variant ptr val mem)
 23403  	// result: (Select1 (LoweredAtomicAnd8Variant ptr val mem))
 23404  	for {
 23405  		ptr := v_0
 23406  		val := v_1
 23407  		mem := v_2
 23408  		v.reset(OpSelect1)
 23409  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8Variant, types.NewTuple(typ.UInt8, types.TypeMem))
 23410  		v0.AddArg3(ptr, val, mem)
 23411  		v.AddArg(v0)
 23412  		return true
 23413  	}
 23414  }
 23415  func rewriteValueARM64_OpAtomicOr32(v *Value) bool {
 23416  	v_2 := v.Args[2]
 23417  	v_1 := v.Args[1]
 23418  	v_0 := v.Args[0]
 23419  	b := v.Block
 23420  	typ := &b.Func.Config.Types
 23421  	// match: (AtomicOr32 ptr val mem)
 23422  	// result: (Select1 (LoweredAtomicOr32 ptr val mem))
 23423  	for {
 23424  		ptr := v_0
 23425  		val := v_1
 23426  		mem := v_2
 23427  		v.reset(OpSelect1)
 23428  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32, types.NewTuple(typ.UInt32, types.TypeMem))
 23429  		v0.AddArg3(ptr, val, mem)
 23430  		v.AddArg(v0)
 23431  		return true
 23432  	}
 23433  }
 23434  func rewriteValueARM64_OpAtomicOr32Variant(v *Value) bool {
 23435  	v_2 := v.Args[2]
 23436  	v_1 := v.Args[1]
 23437  	v_0 := v.Args[0]
 23438  	b := v.Block
 23439  	typ := &b.Func.Config.Types
 23440  	// match: (AtomicOr32Variant ptr val mem)
 23441  	// result: (Select1 (LoweredAtomicOr32Variant ptr val mem))
 23442  	for {
 23443  		ptr := v_0
 23444  		val := v_1
 23445  		mem := v_2
 23446  		v.reset(OpSelect1)
 23447  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32Variant, types.NewTuple(typ.UInt32, types.TypeMem))
 23448  		v0.AddArg3(ptr, val, mem)
 23449  		v.AddArg(v0)
 23450  		return true
 23451  	}
 23452  }
 23453  func rewriteValueARM64_OpAtomicOr8(v *Value) bool {
 23454  	v_2 := v.Args[2]
 23455  	v_1 := v.Args[1]
 23456  	v_0 := v.Args[0]
 23457  	b := v.Block
 23458  	typ := &b.Func.Config.Types
 23459  	// match: (AtomicOr8 ptr val mem)
 23460  	// result: (Select1 (LoweredAtomicOr8 ptr val mem))
 23461  	for {
 23462  		ptr := v_0
 23463  		val := v_1
 23464  		mem := v_2
 23465  		v.reset(OpSelect1)
 23466  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem))
 23467  		v0.AddArg3(ptr, val, mem)
 23468  		v.AddArg(v0)
 23469  		return true
 23470  	}
 23471  }
 23472  func rewriteValueARM64_OpAtomicOr8Variant(v *Value) bool {
 23473  	v_2 := v.Args[2]
 23474  	v_1 := v.Args[1]
 23475  	v_0 := v.Args[0]
 23476  	b := v.Block
 23477  	typ := &b.Func.Config.Types
 23478  	// match: (AtomicOr8Variant ptr val mem)
 23479  	// result: (Select1 (LoweredAtomicOr8Variant ptr val mem))
 23480  	for {
 23481  		ptr := v_0
 23482  		val := v_1
 23483  		mem := v_2
 23484  		v.reset(OpSelect1)
 23485  		v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8Variant, types.NewTuple(typ.UInt8, types.TypeMem))
 23486  		v0.AddArg3(ptr, val, mem)
 23487  		v.AddArg(v0)
 23488  		return true
 23489  	}
 23490  }
 23491  func rewriteValueARM64_OpAvg64u(v *Value) bool {
 23492  	v_1 := v.Args[1]
 23493  	v_0 := v.Args[0]
 23494  	b := v.Block
 23495  	// match: (Avg64u <t> x y)
 23496  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
 23497  	for {
 23498  		t := v.Type
 23499  		x := v_0
 23500  		y := v_1
 23501  		v.reset(OpARM64ADD)
 23502  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t)
 23503  		v0.AuxInt = int64ToAuxInt(1)
 23504  		v1 := b.NewValue0(v.Pos, OpARM64SUB, t)
 23505  		v1.AddArg2(x, y)
 23506  		v0.AddArg(v1)
 23507  		v.AddArg2(v0, y)
 23508  		return true
 23509  	}
 23510  }
 23511  func rewriteValueARM64_OpBitLen32(v *Value) bool {
 23512  	v_0 := v.Args[0]
 23513  	b := v.Block
 23514  	typ := &b.Func.Config.Types
 23515  	// match: (BitLen32 x)
 23516  	// result: (SUB (MOVDconst [32]) (CLZW <typ.Int> x))
 23517  	for {
 23518  		x := v_0
 23519  		v.reset(OpARM64SUB)
 23520  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 23521  		v0.AuxInt = int64ToAuxInt(32)
 23522  		v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int)
 23523  		v1.AddArg(x)
 23524  		v.AddArg2(v0, v1)
 23525  		return true
 23526  	}
 23527  }
 23528  func rewriteValueARM64_OpBitLen64(v *Value) bool {
 23529  	v_0 := v.Args[0]
 23530  	b := v.Block
 23531  	typ := &b.Func.Config.Types
 23532  	// match: (BitLen64 x)
 23533  	// result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x))
 23534  	for {
 23535  		x := v_0
 23536  		v.reset(OpARM64SUB)
 23537  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 23538  		v0.AuxInt = int64ToAuxInt(64)
 23539  		v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int)
 23540  		v1.AddArg(x)
 23541  		v.AddArg2(v0, v1)
 23542  		return true
 23543  	}
 23544  }
 23545  func rewriteValueARM64_OpBitRev16(v *Value) bool {
 23546  	v_0 := v.Args[0]
 23547  	b := v.Block
 23548  	typ := &b.Func.Config.Types
 23549  	// match: (BitRev16 x)
 23550  	// result: (SRLconst [48] (RBIT <typ.UInt64> x))
 23551  	for {
 23552  		x := v_0
 23553  		v.reset(OpARM64SRLconst)
 23554  		v.AuxInt = int64ToAuxInt(48)
 23555  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
 23556  		v0.AddArg(x)
 23557  		v.AddArg(v0)
 23558  		return true
 23559  	}
 23560  }
 23561  func rewriteValueARM64_OpBitRev8(v *Value) bool {
 23562  	v_0 := v.Args[0]
 23563  	b := v.Block
 23564  	typ := &b.Func.Config.Types
 23565  	// match: (BitRev8 x)
 23566  	// result: (SRLconst [56] (RBIT <typ.UInt64> x))
 23567  	for {
 23568  		x := v_0
 23569  		v.reset(OpARM64SRLconst)
 23570  		v.AuxInt = int64ToAuxInt(56)
 23571  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
 23572  		v0.AddArg(x)
 23573  		v.AddArg(v0)
 23574  		return true
 23575  	}
 23576  }
 23577  func rewriteValueARM64_OpCondSelect(v *Value) bool {
 23578  	v_2 := v.Args[2]
 23579  	v_1 := v.Args[1]
 23580  	v_0 := v.Args[0]
 23581  	b := v.Block
 23582  	// match: (CondSelect x y boolval)
 23583  	// cond: flagArg(boolval) != nil
 23584  	// result: (CSEL [boolval.Op] x y flagArg(boolval))
 23585  	for {
 23586  		x := v_0
 23587  		y := v_1
 23588  		boolval := v_2
 23589  		if !(flagArg(boolval) != nil) {
 23590  			break
 23591  		}
 23592  		v.reset(OpARM64CSEL)
 23593  		v.AuxInt = opToAuxInt(boolval.Op)
 23594  		v.AddArg3(x, y, flagArg(boolval))
 23595  		return true
 23596  	}
 23597  	// match: (CondSelect x y boolval)
 23598  	// cond: flagArg(boolval) == nil
 23599  	// result: (CSEL [OpARM64NotEqual] x y (TSTWconst [1] boolval))
 23600  	for {
 23601  		x := v_0
 23602  		y := v_1
 23603  		boolval := v_2
 23604  		if !(flagArg(boolval) == nil) {
 23605  			break
 23606  		}
 23607  		v.reset(OpARM64CSEL)
 23608  		v.AuxInt = opToAuxInt(OpARM64NotEqual)
 23609  		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
 23610  		v0.AuxInt = int32ToAuxInt(1)
 23611  		v0.AddArg(boolval)
 23612  		v.AddArg3(x, y, v0)
 23613  		return true
 23614  	}
 23615  	return false
 23616  }
 23617  func rewriteValueARM64_OpConst16(v *Value) bool {
 23618  	// match: (Const16 [val])
 23619  	// result: (MOVDconst [int64(val)])
 23620  	for {
 23621  		val := auxIntToInt16(v.AuxInt)
 23622  		v.reset(OpARM64MOVDconst)
 23623  		v.AuxInt = int64ToAuxInt(int64(val))
 23624  		return true
 23625  	}
 23626  }
 23627  func rewriteValueARM64_OpConst32(v *Value) bool {
 23628  	// match: (Const32 [val])
 23629  	// result: (MOVDconst [int64(val)])
 23630  	for {
 23631  		val := auxIntToInt32(v.AuxInt)
 23632  		v.reset(OpARM64MOVDconst)
 23633  		v.AuxInt = int64ToAuxInt(int64(val))
 23634  		return true
 23635  	}
 23636  }
 23637  func rewriteValueARM64_OpConst32F(v *Value) bool {
 23638  	// match: (Const32F [val])
 23639  	// result: (FMOVSconst [float64(val)])
 23640  	for {
 23641  		val := auxIntToFloat32(v.AuxInt)
 23642  		v.reset(OpARM64FMOVSconst)
 23643  		v.AuxInt = float64ToAuxInt(float64(val))
 23644  		return true
 23645  	}
 23646  }
 23647  func rewriteValueARM64_OpConst64(v *Value) bool {
 23648  	// match: (Const64 [val])
 23649  	// result: (MOVDconst [int64(val)])
 23650  	for {
 23651  		val := auxIntToInt64(v.AuxInt)
 23652  		v.reset(OpARM64MOVDconst)
 23653  		v.AuxInt = int64ToAuxInt(int64(val))
 23654  		return true
 23655  	}
 23656  }
 23657  func rewriteValueARM64_OpConst64F(v *Value) bool {
 23658  	// match: (Const64F [val])
 23659  	// result: (FMOVDconst [float64(val)])
 23660  	for {
 23661  		val := auxIntToFloat64(v.AuxInt)
 23662  		v.reset(OpARM64FMOVDconst)
 23663  		v.AuxInt = float64ToAuxInt(float64(val))
 23664  		return true
 23665  	}
 23666  }
 23667  func rewriteValueARM64_OpConst8(v *Value) bool {
 23668  	// match: (Const8 [val])
 23669  	// result: (MOVDconst [int64(val)])
 23670  	for {
 23671  		val := auxIntToInt8(v.AuxInt)
 23672  		v.reset(OpARM64MOVDconst)
 23673  		v.AuxInt = int64ToAuxInt(int64(val))
 23674  		return true
 23675  	}
 23676  }
 23677  func rewriteValueARM64_OpConstBool(v *Value) bool {
 23678  	// match: (ConstBool [t])
 23679  	// result: (MOVDconst [b2i(t)])
 23680  	for {
 23681  		t := auxIntToBool(v.AuxInt)
 23682  		v.reset(OpARM64MOVDconst)
 23683  		v.AuxInt = int64ToAuxInt(b2i(t))
 23684  		return true
 23685  	}
 23686  }
 23687  func rewriteValueARM64_OpConstNil(v *Value) bool {
 23688  	// match: (ConstNil)
 23689  	// result: (MOVDconst [0])
 23690  	for {
 23691  		v.reset(OpARM64MOVDconst)
 23692  		v.AuxInt = int64ToAuxInt(0)
 23693  		return true
 23694  	}
 23695  }
 23696  func rewriteValueARM64_OpCtz16(v *Value) bool {
 23697  	v_0 := v.Args[0]
 23698  	b := v.Block
 23699  	typ := &b.Func.Config.Types
 23700  	// match: (Ctz16 <t> x)
 23701  	// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x)))
 23702  	for {
 23703  		t := v.Type
 23704  		x := v_0
 23705  		v.reset(OpARM64CLZW)
 23706  		v.Type = t
 23707  		v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
 23708  		v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
 23709  		v1.AuxInt = int64ToAuxInt(0x10000)
 23710  		v1.AddArg(x)
 23711  		v0.AddArg(v1)
 23712  		v.AddArg(v0)
 23713  		return true
 23714  	}
 23715  }
 23716  func rewriteValueARM64_OpCtz32(v *Value) bool {
 23717  	v_0 := v.Args[0]
 23718  	b := v.Block
 23719  	// match: (Ctz32 <t> x)
 23720  	// result: (CLZW (RBITW <t> x))
 23721  	for {
 23722  		t := v.Type
 23723  		x := v_0
 23724  		v.reset(OpARM64CLZW)
 23725  		v0 := b.NewValue0(v.Pos, OpARM64RBITW, t)
 23726  		v0.AddArg(x)
 23727  		v.AddArg(v0)
 23728  		return true
 23729  	}
 23730  }
 23731  func rewriteValueARM64_OpCtz64(v *Value) bool {
 23732  	v_0 := v.Args[0]
 23733  	b := v.Block
 23734  	// match: (Ctz64 <t> x)
 23735  	// result: (CLZ (RBIT <t> x))
 23736  	for {
 23737  		t := v.Type
 23738  		x := v_0
 23739  		v.reset(OpARM64CLZ)
 23740  		v0 := b.NewValue0(v.Pos, OpARM64RBIT, t)
 23741  		v0.AddArg(x)
 23742  		v.AddArg(v0)
 23743  		return true
 23744  	}
 23745  }
 23746  func rewriteValueARM64_OpCtz8(v *Value) bool {
 23747  	v_0 := v.Args[0]
 23748  	b := v.Block
 23749  	typ := &b.Func.Config.Types
 23750  	// match: (Ctz8 <t> x)
 23751  	// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x100] x)))
 23752  	for {
 23753  		t := v.Type
 23754  		x := v_0
 23755  		v.reset(OpARM64CLZW)
 23756  		v.Type = t
 23757  		v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
 23758  		v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
 23759  		v1.AuxInt = int64ToAuxInt(0x100)
 23760  		v1.AddArg(x)
 23761  		v0.AddArg(v1)
 23762  		v.AddArg(v0)
 23763  		return true
 23764  	}
 23765  }
 23766  func rewriteValueARM64_OpDiv16(v *Value) bool {
 23767  	v_1 := v.Args[1]
 23768  	v_0 := v.Args[0]
 23769  	b := v.Block
 23770  	typ := &b.Func.Config.Types
 23771  	// match: (Div16 [false] x y)
 23772  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
 23773  	for {
 23774  		if auxIntToBool(v.AuxInt) != false {
 23775  			break
 23776  		}
 23777  		x := v_0
 23778  		y := v_1
 23779  		v.reset(OpARM64DIVW)
 23780  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 23781  		v0.AddArg(x)
 23782  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 23783  		v1.AddArg(y)
 23784  		v.AddArg2(v0, v1)
 23785  		return true
 23786  	}
 23787  	return false
 23788  }
 23789  func rewriteValueARM64_OpDiv16u(v *Value) bool {
 23790  	v_1 := v.Args[1]
 23791  	v_0 := v.Args[0]
 23792  	b := v.Block
 23793  	typ := &b.Func.Config.Types
 23794  	// match: (Div16u x y)
 23795  	// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
 23796  	for {
 23797  		x := v_0
 23798  		y := v_1
 23799  		v.reset(OpARM64UDIVW)
 23800  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 23801  		v0.AddArg(x)
 23802  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 23803  		v1.AddArg(y)
 23804  		v.AddArg2(v0, v1)
 23805  		return true
 23806  	}
 23807  }
 23808  func rewriteValueARM64_OpDiv32(v *Value) bool {
 23809  	v_1 := v.Args[1]
 23810  	v_0 := v.Args[0]
 23811  	// match: (Div32 [false] x y)
 23812  	// result: (DIVW x y)
 23813  	for {
 23814  		if auxIntToBool(v.AuxInt) != false {
 23815  			break
 23816  		}
 23817  		x := v_0
 23818  		y := v_1
 23819  		v.reset(OpARM64DIVW)
 23820  		v.AddArg2(x, y)
 23821  		return true
 23822  	}
 23823  	return false
 23824  }
 23825  func rewriteValueARM64_OpDiv64(v *Value) bool {
 23826  	v_1 := v.Args[1]
 23827  	v_0 := v.Args[0]
 23828  	// match: (Div64 [false] x y)
 23829  	// result: (DIV x y)
 23830  	for {
 23831  		if auxIntToBool(v.AuxInt) != false {
 23832  			break
 23833  		}
 23834  		x := v_0
 23835  		y := v_1
 23836  		v.reset(OpARM64DIV)
 23837  		v.AddArg2(x, y)
 23838  		return true
 23839  	}
 23840  	return false
 23841  }
 23842  func rewriteValueARM64_OpDiv8(v *Value) bool {
 23843  	v_1 := v.Args[1]
 23844  	v_0 := v.Args[0]
 23845  	b := v.Block
 23846  	typ := &b.Func.Config.Types
 23847  	// match: (Div8 x y)
 23848  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
 23849  	for {
 23850  		x := v_0
 23851  		y := v_1
 23852  		v.reset(OpARM64DIVW)
 23853  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 23854  		v0.AddArg(x)
 23855  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 23856  		v1.AddArg(y)
 23857  		v.AddArg2(v0, v1)
 23858  		return true
 23859  	}
 23860  }
 23861  func rewriteValueARM64_OpDiv8u(v *Value) bool {
 23862  	v_1 := v.Args[1]
 23863  	v_0 := v.Args[0]
 23864  	b := v.Block
 23865  	typ := &b.Func.Config.Types
 23866  	// match: (Div8u x y)
 23867  	// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
 23868  	for {
 23869  		x := v_0
 23870  		y := v_1
 23871  		v.reset(OpARM64UDIVW)
 23872  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 23873  		v0.AddArg(x)
 23874  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 23875  		v1.AddArg(y)
 23876  		v.AddArg2(v0, v1)
 23877  		return true
 23878  	}
 23879  }
 23880  func rewriteValueARM64_OpEq16(v *Value) bool {
 23881  	v_1 := v.Args[1]
 23882  	v_0 := v.Args[0]
 23883  	b := v.Block
 23884  	typ := &b.Func.Config.Types
 23885  	// match: (Eq16 x y)
 23886  	// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 23887  	for {
 23888  		x := v_0
 23889  		y := v_1
 23890  		v.reset(OpARM64Equal)
 23891  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 23892  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 23893  		v1.AddArg(x)
 23894  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 23895  		v2.AddArg(y)
 23896  		v0.AddArg2(v1, v2)
 23897  		v.AddArg(v0)
 23898  		return true
 23899  	}
 23900  }
 23901  func rewriteValueARM64_OpEq32(v *Value) bool {
 23902  	v_1 := v.Args[1]
 23903  	v_0 := v.Args[0]
 23904  	b := v.Block
 23905  	// match: (Eq32 x y)
 23906  	// result: (Equal (CMPW x y))
 23907  	for {
 23908  		x := v_0
 23909  		y := v_1
 23910  		v.reset(OpARM64Equal)
 23911  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 23912  		v0.AddArg2(x, y)
 23913  		v.AddArg(v0)
 23914  		return true
 23915  	}
 23916  }
 23917  func rewriteValueARM64_OpEq32F(v *Value) bool {
 23918  	v_1 := v.Args[1]
 23919  	v_0 := v.Args[0]
 23920  	b := v.Block
 23921  	// match: (Eq32F x y)
 23922  	// result: (Equal (FCMPS x y))
 23923  	for {
 23924  		x := v_0
 23925  		y := v_1
 23926  		v.reset(OpARM64Equal)
 23927  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 23928  		v0.AddArg2(x, y)
 23929  		v.AddArg(v0)
 23930  		return true
 23931  	}
 23932  }
 23933  func rewriteValueARM64_OpEq64(v *Value) bool {
 23934  	v_1 := v.Args[1]
 23935  	v_0 := v.Args[0]
 23936  	b := v.Block
 23937  	// match: (Eq64 x y)
 23938  	// result: (Equal (CMP x y))
 23939  	for {
 23940  		x := v_0
 23941  		y := v_1
 23942  		v.reset(OpARM64Equal)
 23943  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 23944  		v0.AddArg2(x, y)
 23945  		v.AddArg(v0)
 23946  		return true
 23947  	}
 23948  }
 23949  func rewriteValueARM64_OpEq64F(v *Value) bool {
 23950  	v_1 := v.Args[1]
 23951  	v_0 := v.Args[0]
 23952  	b := v.Block
 23953  	// match: (Eq64F x y)
 23954  	// result: (Equal (FCMPD x y))
 23955  	for {
 23956  		x := v_0
 23957  		y := v_1
 23958  		v.reset(OpARM64Equal)
 23959  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 23960  		v0.AddArg2(x, y)
 23961  		v.AddArg(v0)
 23962  		return true
 23963  	}
 23964  }
 23965  func rewriteValueARM64_OpEq8(v *Value) bool {
 23966  	v_1 := v.Args[1]
 23967  	v_0 := v.Args[0]
 23968  	b := v.Block
 23969  	typ := &b.Func.Config.Types
 23970  	// match: (Eq8 x y)
 23971  	// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 23972  	for {
 23973  		x := v_0
 23974  		y := v_1
 23975  		v.reset(OpARM64Equal)
 23976  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 23977  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 23978  		v1.AddArg(x)
 23979  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 23980  		v2.AddArg(y)
 23981  		v0.AddArg2(v1, v2)
 23982  		v.AddArg(v0)
 23983  		return true
 23984  	}
 23985  }
 23986  func rewriteValueARM64_OpEqB(v *Value) bool {
 23987  	v_1 := v.Args[1]
 23988  	v_0 := v.Args[0]
 23989  	b := v.Block
 23990  	typ := &b.Func.Config.Types
 23991  	// match: (EqB x y)
 23992  	// result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y))
 23993  	for {
 23994  		x := v_0
 23995  		y := v_1
 23996  		v.reset(OpARM64XOR)
 23997  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 23998  		v0.AuxInt = int64ToAuxInt(1)
 23999  		v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool)
 24000  		v1.AddArg2(x, y)
 24001  		v.AddArg2(v0, v1)
 24002  		return true
 24003  	}
 24004  }
 24005  func rewriteValueARM64_OpEqPtr(v *Value) bool {
 24006  	v_1 := v.Args[1]
 24007  	v_0 := v.Args[0]
 24008  	b := v.Block
 24009  	// match: (EqPtr x y)
 24010  	// result: (Equal (CMP x y))
 24011  	for {
 24012  		x := v_0
 24013  		y := v_1
 24014  		v.reset(OpARM64Equal)
 24015  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24016  		v0.AddArg2(x, y)
 24017  		v.AddArg(v0)
 24018  		return true
 24019  	}
 24020  }
 24021  func rewriteValueARM64_OpFMA(v *Value) bool {
 24022  	v_2 := v.Args[2]
 24023  	v_1 := v.Args[1]
 24024  	v_0 := v.Args[0]
 24025  	// match: (FMA x y z)
 24026  	// result: (FMADDD z x y)
 24027  	for {
 24028  		x := v_0
 24029  		y := v_1
 24030  		z := v_2
 24031  		v.reset(OpARM64FMADDD)
 24032  		v.AddArg3(z, x, y)
 24033  		return true
 24034  	}
 24035  }
 24036  func rewriteValueARM64_OpHmul32(v *Value) bool {
 24037  	v_1 := v.Args[1]
 24038  	v_0 := v.Args[0]
 24039  	b := v.Block
 24040  	typ := &b.Func.Config.Types
 24041  	// match: (Hmul32 x y)
 24042  	// result: (SRAconst (MULL <typ.Int64> x y) [32])
 24043  	for {
 24044  		x := v_0
 24045  		y := v_1
 24046  		v.reset(OpARM64SRAconst)
 24047  		v.AuxInt = int64ToAuxInt(32)
 24048  		v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64)
 24049  		v0.AddArg2(x, y)
 24050  		v.AddArg(v0)
 24051  		return true
 24052  	}
 24053  }
 24054  func rewriteValueARM64_OpHmul32u(v *Value) bool {
 24055  	v_1 := v.Args[1]
 24056  	v_0 := v.Args[0]
 24057  	b := v.Block
 24058  	typ := &b.Func.Config.Types
 24059  	// match: (Hmul32u x y)
 24060  	// result: (SRAconst (UMULL <typ.UInt64> x y) [32])
 24061  	for {
 24062  		x := v_0
 24063  		y := v_1
 24064  		v.reset(OpARM64SRAconst)
 24065  		v.AuxInt = int64ToAuxInt(32)
 24066  		v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64)
 24067  		v0.AddArg2(x, y)
 24068  		v.AddArg(v0)
 24069  		return true
 24070  	}
 24071  }
 24072  func rewriteValueARM64_OpIsInBounds(v *Value) bool {
 24073  	v_1 := v.Args[1]
 24074  	v_0 := v.Args[0]
 24075  	b := v.Block
 24076  	// match: (IsInBounds idx len)
 24077  	// result: (LessThanU (CMP idx len))
 24078  	for {
 24079  		idx := v_0
 24080  		len := v_1
 24081  		v.reset(OpARM64LessThanU)
 24082  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24083  		v0.AddArg2(idx, len)
 24084  		v.AddArg(v0)
 24085  		return true
 24086  	}
 24087  }
 24088  func rewriteValueARM64_OpIsNonNil(v *Value) bool {
 24089  	v_0 := v.Args[0]
 24090  	b := v.Block
 24091  	// match: (IsNonNil ptr)
 24092  	// result: (NotEqual (CMPconst [0] ptr))
 24093  	for {
 24094  		ptr := v_0
 24095  		v.reset(OpARM64NotEqual)
 24096  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 24097  		v0.AuxInt = int64ToAuxInt(0)
 24098  		v0.AddArg(ptr)
 24099  		v.AddArg(v0)
 24100  		return true
 24101  	}
 24102  }
 24103  func rewriteValueARM64_OpIsSliceInBounds(v *Value) bool {
 24104  	v_1 := v.Args[1]
 24105  	v_0 := v.Args[0]
 24106  	b := v.Block
 24107  	// match: (IsSliceInBounds idx len)
 24108  	// result: (LessEqualU (CMP idx len))
 24109  	for {
 24110  		idx := v_0
 24111  		len := v_1
 24112  		v.reset(OpARM64LessEqualU)
 24113  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24114  		v0.AddArg2(idx, len)
 24115  		v.AddArg(v0)
 24116  		return true
 24117  	}
 24118  }
 24119  func rewriteValueARM64_OpLeq16(v *Value) bool {
 24120  	v_1 := v.Args[1]
 24121  	v_0 := v.Args[0]
 24122  	b := v.Block
 24123  	typ := &b.Func.Config.Types
 24124  	// match: (Leq16 x y)
 24125  	// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 24126  	for {
 24127  		x := v_0
 24128  		y := v_1
 24129  		v.reset(OpARM64LessEqual)
 24130  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24131  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 24132  		v1.AddArg(x)
 24133  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 24134  		v2.AddArg(y)
 24135  		v0.AddArg2(v1, v2)
 24136  		v.AddArg(v0)
 24137  		return true
 24138  	}
 24139  }
 24140  func rewriteValueARM64_OpLeq16U(v *Value) bool {
 24141  	v_1 := v.Args[1]
 24142  	v_0 := v.Args[0]
 24143  	b := v.Block
 24144  	typ := &b.Func.Config.Types
 24145  	// match: (Leq16U x zero:(MOVDconst [0]))
 24146  	// result: (Eq16 x zero)
 24147  	for {
 24148  		x := v_0
 24149  		zero := v_1
 24150  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24151  			break
 24152  		}
 24153  		v.reset(OpEq16)
 24154  		v.AddArg2(x, zero)
 24155  		return true
 24156  	}
 24157  	// match: (Leq16U (MOVDconst [1]) x)
 24158  	// result: (Neq16 (MOVDconst [0]) x)
 24159  	for {
 24160  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
 24161  			break
 24162  		}
 24163  		x := v_1
 24164  		v.reset(OpNeq16)
 24165  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24166  		v0.AuxInt = int64ToAuxInt(0)
 24167  		v.AddArg2(v0, x)
 24168  		return true
 24169  	}
 24170  	// match: (Leq16U x y)
 24171  	// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 24172  	for {
 24173  		x := v_0
 24174  		y := v_1
 24175  		v.reset(OpARM64LessEqualU)
 24176  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24177  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 24178  		v1.AddArg(x)
 24179  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 24180  		v2.AddArg(y)
 24181  		v0.AddArg2(v1, v2)
 24182  		v.AddArg(v0)
 24183  		return true
 24184  	}
 24185  }
 24186  func rewriteValueARM64_OpLeq32(v *Value) bool {
 24187  	v_1 := v.Args[1]
 24188  	v_0 := v.Args[0]
 24189  	b := v.Block
 24190  	// match: (Leq32 x y)
 24191  	// result: (LessEqual (CMPW x y))
 24192  	for {
 24193  		x := v_0
 24194  		y := v_1
 24195  		v.reset(OpARM64LessEqual)
 24196  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24197  		v0.AddArg2(x, y)
 24198  		v.AddArg(v0)
 24199  		return true
 24200  	}
 24201  }
 24202  func rewriteValueARM64_OpLeq32F(v *Value) bool {
 24203  	v_1 := v.Args[1]
 24204  	v_0 := v.Args[0]
 24205  	b := v.Block
 24206  	// match: (Leq32F x y)
 24207  	// result: (LessEqualF (FCMPS x y))
 24208  	for {
 24209  		x := v_0
 24210  		y := v_1
 24211  		v.reset(OpARM64LessEqualF)
 24212  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 24213  		v0.AddArg2(x, y)
 24214  		v.AddArg(v0)
 24215  		return true
 24216  	}
 24217  }
 24218  func rewriteValueARM64_OpLeq32U(v *Value) bool {
 24219  	v_1 := v.Args[1]
 24220  	v_0 := v.Args[0]
 24221  	b := v.Block
 24222  	typ := &b.Func.Config.Types
 24223  	// match: (Leq32U x zero:(MOVDconst [0]))
 24224  	// result: (Eq32 x zero)
 24225  	for {
 24226  		x := v_0
 24227  		zero := v_1
 24228  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24229  			break
 24230  		}
 24231  		v.reset(OpEq32)
 24232  		v.AddArg2(x, zero)
 24233  		return true
 24234  	}
 24235  	// match: (Leq32U (MOVDconst [1]) x)
 24236  	// result: (Neq32 (MOVDconst [0]) x)
 24237  	for {
 24238  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
 24239  			break
 24240  		}
 24241  		x := v_1
 24242  		v.reset(OpNeq32)
 24243  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24244  		v0.AuxInt = int64ToAuxInt(0)
 24245  		v.AddArg2(v0, x)
 24246  		return true
 24247  	}
 24248  	// match: (Leq32U x y)
 24249  	// result: (LessEqualU (CMPW x y))
 24250  	for {
 24251  		x := v_0
 24252  		y := v_1
 24253  		v.reset(OpARM64LessEqualU)
 24254  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24255  		v0.AddArg2(x, y)
 24256  		v.AddArg(v0)
 24257  		return true
 24258  	}
 24259  }
 24260  func rewriteValueARM64_OpLeq64(v *Value) bool {
 24261  	v_1 := v.Args[1]
 24262  	v_0 := v.Args[0]
 24263  	b := v.Block
 24264  	// match: (Leq64 x y)
 24265  	// result: (LessEqual (CMP x y))
 24266  	for {
 24267  		x := v_0
 24268  		y := v_1
 24269  		v.reset(OpARM64LessEqual)
 24270  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24271  		v0.AddArg2(x, y)
 24272  		v.AddArg(v0)
 24273  		return true
 24274  	}
 24275  }
 24276  func rewriteValueARM64_OpLeq64F(v *Value) bool {
 24277  	v_1 := v.Args[1]
 24278  	v_0 := v.Args[0]
 24279  	b := v.Block
 24280  	// match: (Leq64F x y)
 24281  	// result: (LessEqualF (FCMPD x y))
 24282  	for {
 24283  		x := v_0
 24284  		y := v_1
 24285  		v.reset(OpARM64LessEqualF)
 24286  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 24287  		v0.AddArg2(x, y)
 24288  		v.AddArg(v0)
 24289  		return true
 24290  	}
 24291  }
 24292  func rewriteValueARM64_OpLeq64U(v *Value) bool {
 24293  	v_1 := v.Args[1]
 24294  	v_0 := v.Args[0]
 24295  	b := v.Block
 24296  	typ := &b.Func.Config.Types
 24297  	// match: (Leq64U x zero:(MOVDconst [0]))
 24298  	// result: (Eq64 x zero)
 24299  	for {
 24300  		x := v_0
 24301  		zero := v_1
 24302  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24303  			break
 24304  		}
 24305  		v.reset(OpEq64)
 24306  		v.AddArg2(x, zero)
 24307  		return true
 24308  	}
 24309  	// match: (Leq64U (MOVDconst [1]) x)
 24310  	// result: (Neq64 (MOVDconst [0]) x)
 24311  	for {
 24312  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
 24313  			break
 24314  		}
 24315  		x := v_1
 24316  		v.reset(OpNeq64)
 24317  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24318  		v0.AuxInt = int64ToAuxInt(0)
 24319  		v.AddArg2(v0, x)
 24320  		return true
 24321  	}
 24322  	// match: (Leq64U x y)
 24323  	// result: (LessEqualU (CMP x y))
 24324  	for {
 24325  		x := v_0
 24326  		y := v_1
 24327  		v.reset(OpARM64LessEqualU)
 24328  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24329  		v0.AddArg2(x, y)
 24330  		v.AddArg(v0)
 24331  		return true
 24332  	}
 24333  }
 24334  func rewriteValueARM64_OpLeq8(v *Value) bool {
 24335  	v_1 := v.Args[1]
 24336  	v_0 := v.Args[0]
 24337  	b := v.Block
 24338  	typ := &b.Func.Config.Types
 24339  	// match: (Leq8 x y)
 24340  	// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 24341  	for {
 24342  		x := v_0
 24343  		y := v_1
 24344  		v.reset(OpARM64LessEqual)
 24345  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24346  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 24347  		v1.AddArg(x)
 24348  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 24349  		v2.AddArg(y)
 24350  		v0.AddArg2(v1, v2)
 24351  		v.AddArg(v0)
 24352  		return true
 24353  	}
 24354  }
 24355  func rewriteValueARM64_OpLeq8U(v *Value) bool {
 24356  	v_1 := v.Args[1]
 24357  	v_0 := v.Args[0]
 24358  	b := v.Block
 24359  	typ := &b.Func.Config.Types
 24360  	// match: (Leq8U x zero:(MOVDconst [0]))
 24361  	// result: (Eq8 x zero)
 24362  	for {
 24363  		x := v_0
 24364  		zero := v_1
 24365  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24366  			break
 24367  		}
 24368  		v.reset(OpEq8)
 24369  		v.AddArg2(x, zero)
 24370  		return true
 24371  	}
 24372  	// match: (Leq8U (MOVDconst [1]) x)
 24373  	// result: (Neq8 (MOVDconst [0]) x)
 24374  	for {
 24375  		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
 24376  			break
 24377  		}
 24378  		x := v_1
 24379  		v.reset(OpNeq8)
 24380  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24381  		v0.AuxInt = int64ToAuxInt(0)
 24382  		v.AddArg2(v0, x)
 24383  		return true
 24384  	}
 24385  	// match: (Leq8U x y)
 24386  	// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 24387  	for {
 24388  		x := v_0
 24389  		y := v_1
 24390  		v.reset(OpARM64LessEqualU)
 24391  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24392  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 24393  		v1.AddArg(x)
 24394  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 24395  		v2.AddArg(y)
 24396  		v0.AddArg2(v1, v2)
 24397  		v.AddArg(v0)
 24398  		return true
 24399  	}
 24400  }
 24401  func rewriteValueARM64_OpLess16(v *Value) bool {
 24402  	v_1 := v.Args[1]
 24403  	v_0 := v.Args[0]
 24404  	b := v.Block
 24405  	typ := &b.Func.Config.Types
 24406  	// match: (Less16 x y)
 24407  	// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
 24408  	for {
 24409  		x := v_0
 24410  		y := v_1
 24411  		v.reset(OpARM64LessThan)
 24412  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24413  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 24414  		v1.AddArg(x)
 24415  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 24416  		v2.AddArg(y)
 24417  		v0.AddArg2(v1, v2)
 24418  		v.AddArg(v0)
 24419  		return true
 24420  	}
 24421  }
 24422  func rewriteValueARM64_OpLess16U(v *Value) bool {
 24423  	v_1 := v.Args[1]
 24424  	v_0 := v.Args[0]
 24425  	b := v.Block
 24426  	typ := &b.Func.Config.Types
 24427  	// match: (Less16U zero:(MOVDconst [0]) x)
 24428  	// result: (Neq16 zero x)
 24429  	for {
 24430  		zero := v_0
 24431  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24432  			break
 24433  		}
 24434  		x := v_1
 24435  		v.reset(OpNeq16)
 24436  		v.AddArg2(zero, x)
 24437  		return true
 24438  	}
 24439  	// match: (Less16U x (MOVDconst [1]))
 24440  	// result: (Eq16 x (MOVDconst [0]))
 24441  	for {
 24442  		x := v_0
 24443  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 24444  			break
 24445  		}
 24446  		v.reset(OpEq16)
 24447  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24448  		v0.AuxInt = int64ToAuxInt(0)
 24449  		v.AddArg2(x, v0)
 24450  		return true
 24451  	}
 24452  	// match: (Less16U x y)
 24453  	// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 24454  	for {
 24455  		x := v_0
 24456  		y := v_1
 24457  		v.reset(OpARM64LessThanU)
 24458  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24459  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 24460  		v1.AddArg(x)
 24461  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 24462  		v2.AddArg(y)
 24463  		v0.AddArg2(v1, v2)
 24464  		v.AddArg(v0)
 24465  		return true
 24466  	}
 24467  }
 24468  func rewriteValueARM64_OpLess32(v *Value) bool {
 24469  	v_1 := v.Args[1]
 24470  	v_0 := v.Args[0]
 24471  	b := v.Block
 24472  	// match: (Less32 x y)
 24473  	// result: (LessThan (CMPW x y))
 24474  	for {
 24475  		x := v_0
 24476  		y := v_1
 24477  		v.reset(OpARM64LessThan)
 24478  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24479  		v0.AddArg2(x, y)
 24480  		v.AddArg(v0)
 24481  		return true
 24482  	}
 24483  }
 24484  func rewriteValueARM64_OpLess32F(v *Value) bool {
 24485  	v_1 := v.Args[1]
 24486  	v_0 := v.Args[0]
 24487  	b := v.Block
 24488  	// match: (Less32F x y)
 24489  	// result: (LessThanF (FCMPS x y))
 24490  	for {
 24491  		x := v_0
 24492  		y := v_1
 24493  		v.reset(OpARM64LessThanF)
 24494  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 24495  		v0.AddArg2(x, y)
 24496  		v.AddArg(v0)
 24497  		return true
 24498  	}
 24499  }
 24500  func rewriteValueARM64_OpLess32U(v *Value) bool {
 24501  	v_1 := v.Args[1]
 24502  	v_0 := v.Args[0]
 24503  	b := v.Block
 24504  	typ := &b.Func.Config.Types
 24505  	// match: (Less32U zero:(MOVDconst [0]) x)
 24506  	// result: (Neq32 zero x)
 24507  	for {
 24508  		zero := v_0
 24509  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24510  			break
 24511  		}
 24512  		x := v_1
 24513  		v.reset(OpNeq32)
 24514  		v.AddArg2(zero, x)
 24515  		return true
 24516  	}
 24517  	// match: (Less32U x (MOVDconst [1]))
 24518  	// result: (Eq32 x (MOVDconst [0]))
 24519  	for {
 24520  		x := v_0
 24521  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 24522  			break
 24523  		}
 24524  		v.reset(OpEq32)
 24525  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24526  		v0.AuxInt = int64ToAuxInt(0)
 24527  		v.AddArg2(x, v0)
 24528  		return true
 24529  	}
 24530  	// match: (Less32U x y)
 24531  	// result: (LessThanU (CMPW x y))
 24532  	for {
 24533  		x := v_0
 24534  		y := v_1
 24535  		v.reset(OpARM64LessThanU)
 24536  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24537  		v0.AddArg2(x, y)
 24538  		v.AddArg(v0)
 24539  		return true
 24540  	}
 24541  }
 24542  func rewriteValueARM64_OpLess64(v *Value) bool {
 24543  	v_1 := v.Args[1]
 24544  	v_0 := v.Args[0]
 24545  	b := v.Block
 24546  	// match: (Less64 x y)
 24547  	// result: (LessThan (CMP x y))
 24548  	for {
 24549  		x := v_0
 24550  		y := v_1
 24551  		v.reset(OpARM64LessThan)
 24552  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24553  		v0.AddArg2(x, y)
 24554  		v.AddArg(v0)
 24555  		return true
 24556  	}
 24557  }
 24558  func rewriteValueARM64_OpLess64F(v *Value) bool {
 24559  	v_1 := v.Args[1]
 24560  	v_0 := v.Args[0]
 24561  	b := v.Block
 24562  	// match: (Less64F x y)
 24563  	// result: (LessThanF (FCMPD x y))
 24564  	for {
 24565  		x := v_0
 24566  		y := v_1
 24567  		v.reset(OpARM64LessThanF)
 24568  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 24569  		v0.AddArg2(x, y)
 24570  		v.AddArg(v0)
 24571  		return true
 24572  	}
 24573  }
 24574  func rewriteValueARM64_OpLess64U(v *Value) bool {
 24575  	v_1 := v.Args[1]
 24576  	v_0 := v.Args[0]
 24577  	b := v.Block
 24578  	typ := &b.Func.Config.Types
 24579  	// match: (Less64U zero:(MOVDconst [0]) x)
 24580  	// result: (Neq64 zero x)
 24581  	for {
 24582  		zero := v_0
 24583  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24584  			break
 24585  		}
 24586  		x := v_1
 24587  		v.reset(OpNeq64)
 24588  		v.AddArg2(zero, x)
 24589  		return true
 24590  	}
 24591  	// match: (Less64U x (MOVDconst [1]))
 24592  	// result: (Eq64 x (MOVDconst [0]))
 24593  	for {
 24594  		x := v_0
 24595  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 24596  			break
 24597  		}
 24598  		v.reset(OpEq64)
 24599  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24600  		v0.AuxInt = int64ToAuxInt(0)
 24601  		v.AddArg2(x, v0)
 24602  		return true
 24603  	}
 24604  	// match: (Less64U x y)
 24605  	// result: (LessThanU (CMP x y))
 24606  	for {
 24607  		x := v_0
 24608  		y := v_1
 24609  		v.reset(OpARM64LessThanU)
 24610  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 24611  		v0.AddArg2(x, y)
 24612  		v.AddArg(v0)
 24613  		return true
 24614  	}
 24615  }
 24616  func rewriteValueARM64_OpLess8(v *Value) bool {
 24617  	v_1 := v.Args[1]
 24618  	v_0 := v.Args[0]
 24619  	b := v.Block
 24620  	typ := &b.Func.Config.Types
 24621  	// match: (Less8 x y)
 24622  	// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
 24623  	for {
 24624  		x := v_0
 24625  		y := v_1
 24626  		v.reset(OpARM64LessThan)
 24627  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24628  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 24629  		v1.AddArg(x)
 24630  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 24631  		v2.AddArg(y)
 24632  		v0.AddArg2(v1, v2)
 24633  		v.AddArg(v0)
 24634  		return true
 24635  	}
 24636  }
 24637  func rewriteValueARM64_OpLess8U(v *Value) bool {
 24638  	v_1 := v.Args[1]
 24639  	v_0 := v.Args[0]
 24640  	b := v.Block
 24641  	typ := &b.Func.Config.Types
 24642  	// match: (Less8U zero:(MOVDconst [0]) x)
 24643  	// result: (Neq8 zero x)
 24644  	for {
 24645  		zero := v_0
 24646  		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
 24647  			break
 24648  		}
 24649  		x := v_1
 24650  		v.reset(OpNeq8)
 24651  		v.AddArg2(zero, x)
 24652  		return true
 24653  	}
 24654  	// match: (Less8U x (MOVDconst [1]))
 24655  	// result: (Eq8 x (MOVDconst [0]))
 24656  	for {
 24657  		x := v_0
 24658  		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
 24659  			break
 24660  		}
 24661  		v.reset(OpEq8)
 24662  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 24663  		v0.AuxInt = int64ToAuxInt(0)
 24664  		v.AddArg2(x, v0)
 24665  		return true
 24666  	}
 24667  	// match: (Less8U x y)
 24668  	// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 24669  	for {
 24670  		x := v_0
 24671  		y := v_1
 24672  		v.reset(OpARM64LessThanU)
 24673  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 24674  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 24675  		v1.AddArg(x)
 24676  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 24677  		v2.AddArg(y)
 24678  		v0.AddArg2(v1, v2)
 24679  		v.AddArg(v0)
 24680  		return true
 24681  	}
 24682  }
 24683  func rewriteValueARM64_OpLoad(v *Value) bool {
 24684  	v_1 := v.Args[1]
 24685  	v_0 := v.Args[0]
 24686  	// match: (Load <t> ptr mem)
 24687  	// cond: t.IsBoolean()
 24688  	// result: (MOVBUload ptr mem)
 24689  	for {
 24690  		t := v.Type
 24691  		ptr := v_0
 24692  		mem := v_1
 24693  		if !(t.IsBoolean()) {
 24694  			break
 24695  		}
 24696  		v.reset(OpARM64MOVBUload)
 24697  		v.AddArg2(ptr, mem)
 24698  		return true
 24699  	}
 24700  	// match: (Load <t> ptr mem)
 24701  	// cond: (is8BitInt(t) && isSigned(t))
 24702  	// result: (MOVBload ptr mem)
 24703  	for {
 24704  		t := v.Type
 24705  		ptr := v_0
 24706  		mem := v_1
 24707  		if !(is8BitInt(t) && isSigned(t)) {
 24708  			break
 24709  		}
 24710  		v.reset(OpARM64MOVBload)
 24711  		v.AddArg2(ptr, mem)
 24712  		return true
 24713  	}
 24714  	// match: (Load <t> ptr mem)
 24715  	// cond: (is8BitInt(t) && !isSigned(t))
 24716  	// result: (MOVBUload ptr mem)
 24717  	for {
 24718  		t := v.Type
 24719  		ptr := v_0
 24720  		mem := v_1
 24721  		if !(is8BitInt(t) && !isSigned(t)) {
 24722  			break
 24723  		}
 24724  		v.reset(OpARM64MOVBUload)
 24725  		v.AddArg2(ptr, mem)
 24726  		return true
 24727  	}
 24728  	// match: (Load <t> ptr mem)
 24729  	// cond: (is16BitInt(t) && isSigned(t))
 24730  	// result: (MOVHload ptr mem)
 24731  	for {
 24732  		t := v.Type
 24733  		ptr := v_0
 24734  		mem := v_1
 24735  		if !(is16BitInt(t) && isSigned(t)) {
 24736  			break
 24737  		}
 24738  		v.reset(OpARM64MOVHload)
 24739  		v.AddArg2(ptr, mem)
 24740  		return true
 24741  	}
 24742  	// match: (Load <t> ptr mem)
 24743  	// cond: (is16BitInt(t) && !isSigned(t))
 24744  	// result: (MOVHUload ptr mem)
 24745  	for {
 24746  		t := v.Type
 24747  		ptr := v_0
 24748  		mem := v_1
 24749  		if !(is16BitInt(t) && !isSigned(t)) {
 24750  			break
 24751  		}
 24752  		v.reset(OpARM64MOVHUload)
 24753  		v.AddArg2(ptr, mem)
 24754  		return true
 24755  	}
 24756  	// match: (Load <t> ptr mem)
 24757  	// cond: (is32BitInt(t) && isSigned(t))
 24758  	// result: (MOVWload ptr mem)
 24759  	for {
 24760  		t := v.Type
 24761  		ptr := v_0
 24762  		mem := v_1
 24763  		if !(is32BitInt(t) && isSigned(t)) {
 24764  			break
 24765  		}
 24766  		v.reset(OpARM64MOVWload)
 24767  		v.AddArg2(ptr, mem)
 24768  		return true
 24769  	}
 24770  	// match: (Load <t> ptr mem)
 24771  	// cond: (is32BitInt(t) && !isSigned(t))
 24772  	// result: (MOVWUload ptr mem)
 24773  	for {
 24774  		t := v.Type
 24775  		ptr := v_0
 24776  		mem := v_1
 24777  		if !(is32BitInt(t) && !isSigned(t)) {
 24778  			break
 24779  		}
 24780  		v.reset(OpARM64MOVWUload)
 24781  		v.AddArg2(ptr, mem)
 24782  		return true
 24783  	}
 24784  	// match: (Load <t> ptr mem)
 24785  	// cond: (is64BitInt(t) || isPtr(t))
 24786  	// result: (MOVDload ptr mem)
 24787  	for {
 24788  		t := v.Type
 24789  		ptr := v_0
 24790  		mem := v_1
 24791  		if !(is64BitInt(t) || isPtr(t)) {
 24792  			break
 24793  		}
 24794  		v.reset(OpARM64MOVDload)
 24795  		v.AddArg2(ptr, mem)
 24796  		return true
 24797  	}
 24798  	// match: (Load <t> ptr mem)
 24799  	// cond: is32BitFloat(t)
 24800  	// result: (FMOVSload ptr mem)
 24801  	for {
 24802  		t := v.Type
 24803  		ptr := v_0
 24804  		mem := v_1
 24805  		if !(is32BitFloat(t)) {
 24806  			break
 24807  		}
 24808  		v.reset(OpARM64FMOVSload)
 24809  		v.AddArg2(ptr, mem)
 24810  		return true
 24811  	}
 24812  	// match: (Load <t> ptr mem)
 24813  	// cond: is64BitFloat(t)
 24814  	// result: (FMOVDload ptr mem)
 24815  	for {
 24816  		t := v.Type
 24817  		ptr := v_0
 24818  		mem := v_1
 24819  		if !(is64BitFloat(t)) {
 24820  			break
 24821  		}
 24822  		v.reset(OpARM64FMOVDload)
 24823  		v.AddArg2(ptr, mem)
 24824  		return true
 24825  	}
 24826  	return false
 24827  }
 24828  func rewriteValueARM64_OpLocalAddr(v *Value) bool {
 24829  	v_0 := v.Args[0]
 24830  	// match: (LocalAddr {sym} base _)
 24831  	// result: (MOVDaddr {sym} base)
 24832  	for {
 24833  		sym := auxToSym(v.Aux)
 24834  		base := v_0
 24835  		v.reset(OpARM64MOVDaddr)
 24836  		v.Aux = symToAux(sym)
 24837  		v.AddArg(base)
 24838  		return true
 24839  	}
 24840  }
 24841  func rewriteValueARM64_OpLsh16x16(v *Value) bool {
 24842  	v_1 := v.Args[1]
 24843  	v_0 := v.Args[0]
 24844  	b := v.Block
 24845  	typ := &b.Func.Config.Types
 24846  	// match: (Lsh16x16 <t> x y)
 24847  	// cond: shiftIsBounded(v)
 24848  	// result: (SLL <t> x y)
 24849  	for {
 24850  		t := v.Type
 24851  		x := v_0
 24852  		y := v_1
 24853  		if !(shiftIsBounded(v)) {
 24854  			break
 24855  		}
 24856  		v.reset(OpARM64SLL)
 24857  		v.Type = t
 24858  		v.AddArg2(x, y)
 24859  		return true
 24860  	}
 24861  	// match: (Lsh16x16 <t> x y)
 24862  	// cond: !shiftIsBounded(v)
 24863  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 24864  	for {
 24865  		t := v.Type
 24866  		x := v_0
 24867  		y := v_1
 24868  		if !(!shiftIsBounded(v)) {
 24869  			break
 24870  		}
 24871  		v.reset(OpARM64CSEL)
 24872  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 24873  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 24874  		v0.AddArg2(x, y)
 24875  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 24876  		v1.AuxInt = int64ToAuxInt(0)
 24877  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 24878  		v2.AuxInt = int64ToAuxInt(64)
 24879  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 24880  		v3.AddArg(y)
 24881  		v2.AddArg(v3)
 24882  		v.AddArg3(v0, v1, v2)
 24883  		return true
 24884  	}
 24885  	return false
 24886  }
 24887  func rewriteValueARM64_OpLsh16x32(v *Value) bool {
 24888  	v_1 := v.Args[1]
 24889  	v_0 := v.Args[0]
 24890  	b := v.Block
 24891  	typ := &b.Func.Config.Types
 24892  	// match: (Lsh16x32 <t> x y)
 24893  	// cond: shiftIsBounded(v)
 24894  	// result: (SLL <t> x y)
 24895  	for {
 24896  		t := v.Type
 24897  		x := v_0
 24898  		y := v_1
 24899  		if !(shiftIsBounded(v)) {
 24900  			break
 24901  		}
 24902  		v.reset(OpARM64SLL)
 24903  		v.Type = t
 24904  		v.AddArg2(x, y)
 24905  		return true
 24906  	}
 24907  	// match: (Lsh16x32 <t> x y)
 24908  	// cond: !shiftIsBounded(v)
 24909  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 24910  	for {
 24911  		t := v.Type
 24912  		x := v_0
 24913  		y := v_1
 24914  		if !(!shiftIsBounded(v)) {
 24915  			break
 24916  		}
 24917  		v.reset(OpARM64CSEL)
 24918  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 24919  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 24920  		v0.AddArg2(x, y)
 24921  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 24922  		v1.AuxInt = int64ToAuxInt(0)
 24923  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 24924  		v2.AuxInt = int64ToAuxInt(64)
 24925  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 24926  		v3.AddArg(y)
 24927  		v2.AddArg(v3)
 24928  		v.AddArg3(v0, v1, v2)
 24929  		return true
 24930  	}
 24931  	return false
 24932  }
 24933  func rewriteValueARM64_OpLsh16x64(v *Value) bool {
 24934  	v_1 := v.Args[1]
 24935  	v_0 := v.Args[0]
 24936  	b := v.Block
 24937  	// match: (Lsh16x64 <t> x y)
 24938  	// cond: shiftIsBounded(v)
 24939  	// result: (SLL <t> x y)
 24940  	for {
 24941  		t := v.Type
 24942  		x := v_0
 24943  		y := v_1
 24944  		if !(shiftIsBounded(v)) {
 24945  			break
 24946  		}
 24947  		v.reset(OpARM64SLL)
 24948  		v.Type = t
 24949  		v.AddArg2(x, y)
 24950  		return true
 24951  	}
 24952  	// match: (Lsh16x64 <t> x y)
 24953  	// cond: !shiftIsBounded(v)
 24954  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 24955  	for {
 24956  		t := v.Type
 24957  		x := v_0
 24958  		y := v_1
 24959  		if !(!shiftIsBounded(v)) {
 24960  			break
 24961  		}
 24962  		v.reset(OpARM64CSEL)
 24963  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 24964  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 24965  		v0.AddArg2(x, y)
 24966  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 24967  		v1.AuxInt = int64ToAuxInt(0)
 24968  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 24969  		v2.AuxInt = int64ToAuxInt(64)
 24970  		v2.AddArg(y)
 24971  		v.AddArg3(v0, v1, v2)
 24972  		return true
 24973  	}
 24974  	return false
 24975  }
 24976  func rewriteValueARM64_OpLsh16x8(v *Value) bool {
 24977  	v_1 := v.Args[1]
 24978  	v_0 := v.Args[0]
 24979  	b := v.Block
 24980  	typ := &b.Func.Config.Types
 24981  	// match: (Lsh16x8 <t> x y)
 24982  	// cond: shiftIsBounded(v)
 24983  	// result: (SLL <t> x y)
 24984  	for {
 24985  		t := v.Type
 24986  		x := v_0
 24987  		y := v_1
 24988  		if !(shiftIsBounded(v)) {
 24989  			break
 24990  		}
 24991  		v.reset(OpARM64SLL)
 24992  		v.Type = t
 24993  		v.AddArg2(x, y)
 24994  		return true
 24995  	}
 24996  	// match: (Lsh16x8 <t> x y)
 24997  	// cond: !shiftIsBounded(v)
 24998  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 24999  	for {
 25000  		t := v.Type
 25001  		x := v_0
 25002  		y := v_1
 25003  		if !(!shiftIsBounded(v)) {
 25004  			break
 25005  		}
 25006  		v.reset(OpARM64CSEL)
 25007  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25008  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25009  		v0.AddArg2(x, y)
 25010  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25011  		v1.AuxInt = int64ToAuxInt(0)
 25012  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25013  		v2.AuxInt = int64ToAuxInt(64)
 25014  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 25015  		v3.AddArg(y)
 25016  		v2.AddArg(v3)
 25017  		v.AddArg3(v0, v1, v2)
 25018  		return true
 25019  	}
 25020  	return false
 25021  }
 25022  func rewriteValueARM64_OpLsh32x16(v *Value) bool {
 25023  	v_1 := v.Args[1]
 25024  	v_0 := v.Args[0]
 25025  	b := v.Block
 25026  	typ := &b.Func.Config.Types
 25027  	// match: (Lsh32x16 <t> x y)
 25028  	// cond: shiftIsBounded(v)
 25029  	// result: (SLL <t> x y)
 25030  	for {
 25031  		t := v.Type
 25032  		x := v_0
 25033  		y := v_1
 25034  		if !(shiftIsBounded(v)) {
 25035  			break
 25036  		}
 25037  		v.reset(OpARM64SLL)
 25038  		v.Type = t
 25039  		v.AddArg2(x, y)
 25040  		return true
 25041  	}
 25042  	// match: (Lsh32x16 <t> x y)
 25043  	// cond: !shiftIsBounded(v)
 25044  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 25045  	for {
 25046  		t := v.Type
 25047  		x := v_0
 25048  		y := v_1
 25049  		if !(!shiftIsBounded(v)) {
 25050  			break
 25051  		}
 25052  		v.reset(OpARM64CSEL)
 25053  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25054  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25055  		v0.AddArg2(x, y)
 25056  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25057  		v1.AuxInt = int64ToAuxInt(0)
 25058  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25059  		v2.AuxInt = int64ToAuxInt(64)
 25060  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 25061  		v3.AddArg(y)
 25062  		v2.AddArg(v3)
 25063  		v.AddArg3(v0, v1, v2)
 25064  		return true
 25065  	}
 25066  	return false
 25067  }
 25068  func rewriteValueARM64_OpLsh32x32(v *Value) bool {
 25069  	v_1 := v.Args[1]
 25070  	v_0 := v.Args[0]
 25071  	b := v.Block
 25072  	typ := &b.Func.Config.Types
 25073  	// match: (Lsh32x32 <t> x y)
 25074  	// cond: shiftIsBounded(v)
 25075  	// result: (SLL <t> x y)
 25076  	for {
 25077  		t := v.Type
 25078  		x := v_0
 25079  		y := v_1
 25080  		if !(shiftIsBounded(v)) {
 25081  			break
 25082  		}
 25083  		v.reset(OpARM64SLL)
 25084  		v.Type = t
 25085  		v.AddArg2(x, y)
 25086  		return true
 25087  	}
 25088  	// match: (Lsh32x32 <t> x y)
 25089  	// cond: !shiftIsBounded(v)
 25090  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 25091  	for {
 25092  		t := v.Type
 25093  		x := v_0
 25094  		y := v_1
 25095  		if !(!shiftIsBounded(v)) {
 25096  			break
 25097  		}
 25098  		v.reset(OpARM64CSEL)
 25099  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25100  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25101  		v0.AddArg2(x, y)
 25102  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25103  		v1.AuxInt = int64ToAuxInt(0)
 25104  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25105  		v2.AuxInt = int64ToAuxInt(64)
 25106  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 25107  		v3.AddArg(y)
 25108  		v2.AddArg(v3)
 25109  		v.AddArg3(v0, v1, v2)
 25110  		return true
 25111  	}
 25112  	return false
 25113  }
 25114  func rewriteValueARM64_OpLsh32x64(v *Value) bool {
 25115  	v_1 := v.Args[1]
 25116  	v_0 := v.Args[0]
 25117  	b := v.Block
 25118  	// match: (Lsh32x64 <t> x y)
 25119  	// cond: shiftIsBounded(v)
 25120  	// result: (SLL <t> x y)
 25121  	for {
 25122  		t := v.Type
 25123  		x := v_0
 25124  		y := v_1
 25125  		if !(shiftIsBounded(v)) {
 25126  			break
 25127  		}
 25128  		v.reset(OpARM64SLL)
 25129  		v.Type = t
 25130  		v.AddArg2(x, y)
 25131  		return true
 25132  	}
 25133  	// match: (Lsh32x64 <t> x y)
 25134  	// cond: !shiftIsBounded(v)
 25135  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 25136  	for {
 25137  		t := v.Type
 25138  		x := v_0
 25139  		y := v_1
 25140  		if !(!shiftIsBounded(v)) {
 25141  			break
 25142  		}
 25143  		v.reset(OpARM64CSEL)
 25144  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25145  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25146  		v0.AddArg2(x, y)
 25147  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25148  		v1.AuxInt = int64ToAuxInt(0)
 25149  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25150  		v2.AuxInt = int64ToAuxInt(64)
 25151  		v2.AddArg(y)
 25152  		v.AddArg3(v0, v1, v2)
 25153  		return true
 25154  	}
 25155  	return false
 25156  }
 25157  func rewriteValueARM64_OpLsh32x8(v *Value) bool {
 25158  	v_1 := v.Args[1]
 25159  	v_0 := v.Args[0]
 25160  	b := v.Block
 25161  	typ := &b.Func.Config.Types
 25162  	// match: (Lsh32x8 <t> x y)
 25163  	// cond: shiftIsBounded(v)
 25164  	// result: (SLL <t> x y)
 25165  	for {
 25166  		t := v.Type
 25167  		x := v_0
 25168  		y := v_1
 25169  		if !(shiftIsBounded(v)) {
 25170  			break
 25171  		}
 25172  		v.reset(OpARM64SLL)
 25173  		v.Type = t
 25174  		v.AddArg2(x, y)
 25175  		return true
 25176  	}
 25177  	// match: (Lsh32x8 <t> x y)
 25178  	// cond: !shiftIsBounded(v)
 25179  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 25180  	for {
 25181  		t := v.Type
 25182  		x := v_0
 25183  		y := v_1
 25184  		if !(!shiftIsBounded(v)) {
 25185  			break
 25186  		}
 25187  		v.reset(OpARM64CSEL)
 25188  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25189  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25190  		v0.AddArg2(x, y)
 25191  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25192  		v1.AuxInt = int64ToAuxInt(0)
 25193  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25194  		v2.AuxInt = int64ToAuxInt(64)
 25195  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 25196  		v3.AddArg(y)
 25197  		v2.AddArg(v3)
 25198  		v.AddArg3(v0, v1, v2)
 25199  		return true
 25200  	}
 25201  	return false
 25202  }
 25203  func rewriteValueARM64_OpLsh64x16(v *Value) bool {
 25204  	v_1 := v.Args[1]
 25205  	v_0 := v.Args[0]
 25206  	b := v.Block
 25207  	typ := &b.Func.Config.Types
 25208  	// match: (Lsh64x16 <t> x y)
 25209  	// cond: shiftIsBounded(v)
 25210  	// result: (SLL <t> x y)
 25211  	for {
 25212  		t := v.Type
 25213  		x := v_0
 25214  		y := v_1
 25215  		if !(shiftIsBounded(v)) {
 25216  			break
 25217  		}
 25218  		v.reset(OpARM64SLL)
 25219  		v.Type = t
 25220  		v.AddArg2(x, y)
 25221  		return true
 25222  	}
 25223  	// match: (Lsh64x16 <t> x y)
 25224  	// cond: !shiftIsBounded(v)
 25225  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 25226  	for {
 25227  		t := v.Type
 25228  		x := v_0
 25229  		y := v_1
 25230  		if !(!shiftIsBounded(v)) {
 25231  			break
 25232  		}
 25233  		v.reset(OpARM64CSEL)
 25234  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25235  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25236  		v0.AddArg2(x, y)
 25237  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25238  		v1.AuxInt = int64ToAuxInt(0)
 25239  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25240  		v2.AuxInt = int64ToAuxInt(64)
 25241  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 25242  		v3.AddArg(y)
 25243  		v2.AddArg(v3)
 25244  		v.AddArg3(v0, v1, v2)
 25245  		return true
 25246  	}
 25247  	return false
 25248  }
 25249  func rewriteValueARM64_OpLsh64x32(v *Value) bool {
 25250  	v_1 := v.Args[1]
 25251  	v_0 := v.Args[0]
 25252  	b := v.Block
 25253  	typ := &b.Func.Config.Types
 25254  	// match: (Lsh64x32 <t> x y)
 25255  	// cond: shiftIsBounded(v)
 25256  	// result: (SLL <t> x y)
 25257  	for {
 25258  		t := v.Type
 25259  		x := v_0
 25260  		y := v_1
 25261  		if !(shiftIsBounded(v)) {
 25262  			break
 25263  		}
 25264  		v.reset(OpARM64SLL)
 25265  		v.Type = t
 25266  		v.AddArg2(x, y)
 25267  		return true
 25268  	}
 25269  	// match: (Lsh64x32 <t> x y)
 25270  	// cond: !shiftIsBounded(v)
 25271  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 25272  	for {
 25273  		t := v.Type
 25274  		x := v_0
 25275  		y := v_1
 25276  		if !(!shiftIsBounded(v)) {
 25277  			break
 25278  		}
 25279  		v.reset(OpARM64CSEL)
 25280  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25281  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25282  		v0.AddArg2(x, y)
 25283  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25284  		v1.AuxInt = int64ToAuxInt(0)
 25285  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25286  		v2.AuxInt = int64ToAuxInt(64)
 25287  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 25288  		v3.AddArg(y)
 25289  		v2.AddArg(v3)
 25290  		v.AddArg3(v0, v1, v2)
 25291  		return true
 25292  	}
 25293  	return false
 25294  }
 25295  func rewriteValueARM64_OpLsh64x64(v *Value) bool {
 25296  	v_1 := v.Args[1]
 25297  	v_0 := v.Args[0]
 25298  	b := v.Block
 25299  	// match: (Lsh64x64 <t> x y)
 25300  	// cond: shiftIsBounded(v)
 25301  	// result: (SLL <t> x y)
 25302  	for {
 25303  		t := v.Type
 25304  		x := v_0
 25305  		y := v_1
 25306  		if !(shiftIsBounded(v)) {
 25307  			break
 25308  		}
 25309  		v.reset(OpARM64SLL)
 25310  		v.Type = t
 25311  		v.AddArg2(x, y)
 25312  		return true
 25313  	}
 25314  	// match: (Lsh64x64 <t> x y)
 25315  	// cond: !shiftIsBounded(v)
 25316  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 25317  	for {
 25318  		t := v.Type
 25319  		x := v_0
 25320  		y := v_1
 25321  		if !(!shiftIsBounded(v)) {
 25322  			break
 25323  		}
 25324  		v.reset(OpARM64CSEL)
 25325  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25326  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25327  		v0.AddArg2(x, y)
 25328  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25329  		v1.AuxInt = int64ToAuxInt(0)
 25330  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25331  		v2.AuxInt = int64ToAuxInt(64)
 25332  		v2.AddArg(y)
 25333  		v.AddArg3(v0, v1, v2)
 25334  		return true
 25335  	}
 25336  	return false
 25337  }
 25338  func rewriteValueARM64_OpLsh64x8(v *Value) bool {
 25339  	v_1 := v.Args[1]
 25340  	v_0 := v.Args[0]
 25341  	b := v.Block
 25342  	typ := &b.Func.Config.Types
 25343  	// match: (Lsh64x8 <t> x y)
 25344  	// cond: shiftIsBounded(v)
 25345  	// result: (SLL <t> x y)
 25346  	for {
 25347  		t := v.Type
 25348  		x := v_0
 25349  		y := v_1
 25350  		if !(shiftIsBounded(v)) {
 25351  			break
 25352  		}
 25353  		v.reset(OpARM64SLL)
 25354  		v.Type = t
 25355  		v.AddArg2(x, y)
 25356  		return true
 25357  	}
 25358  	// match: (Lsh64x8 <t> x y)
 25359  	// cond: !shiftIsBounded(v)
 25360  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 25361  	for {
 25362  		t := v.Type
 25363  		x := v_0
 25364  		y := v_1
 25365  		if !(!shiftIsBounded(v)) {
 25366  			break
 25367  		}
 25368  		v.reset(OpARM64CSEL)
 25369  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25370  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25371  		v0.AddArg2(x, y)
 25372  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25373  		v1.AuxInt = int64ToAuxInt(0)
 25374  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25375  		v2.AuxInt = int64ToAuxInt(64)
 25376  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 25377  		v3.AddArg(y)
 25378  		v2.AddArg(v3)
 25379  		v.AddArg3(v0, v1, v2)
 25380  		return true
 25381  	}
 25382  	return false
 25383  }
 25384  func rewriteValueARM64_OpLsh8x16(v *Value) bool {
 25385  	v_1 := v.Args[1]
 25386  	v_0 := v.Args[0]
 25387  	b := v.Block
 25388  	typ := &b.Func.Config.Types
 25389  	// match: (Lsh8x16 <t> x y)
 25390  	// cond: shiftIsBounded(v)
 25391  	// result: (SLL <t> x y)
 25392  	for {
 25393  		t := v.Type
 25394  		x := v_0
 25395  		y := v_1
 25396  		if !(shiftIsBounded(v)) {
 25397  			break
 25398  		}
 25399  		v.reset(OpARM64SLL)
 25400  		v.Type = t
 25401  		v.AddArg2(x, y)
 25402  		return true
 25403  	}
 25404  	// match: (Lsh8x16 <t> x y)
 25405  	// cond: !shiftIsBounded(v)
 25406  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 25407  	for {
 25408  		t := v.Type
 25409  		x := v_0
 25410  		y := v_1
 25411  		if !(!shiftIsBounded(v)) {
 25412  			break
 25413  		}
 25414  		v.reset(OpARM64CSEL)
 25415  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25416  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25417  		v0.AddArg2(x, y)
 25418  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25419  		v1.AuxInt = int64ToAuxInt(0)
 25420  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25421  		v2.AuxInt = int64ToAuxInt(64)
 25422  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 25423  		v3.AddArg(y)
 25424  		v2.AddArg(v3)
 25425  		v.AddArg3(v0, v1, v2)
 25426  		return true
 25427  	}
 25428  	return false
 25429  }
 25430  func rewriteValueARM64_OpLsh8x32(v *Value) bool {
 25431  	v_1 := v.Args[1]
 25432  	v_0 := v.Args[0]
 25433  	b := v.Block
 25434  	typ := &b.Func.Config.Types
 25435  	// match: (Lsh8x32 <t> x y)
 25436  	// cond: shiftIsBounded(v)
 25437  	// result: (SLL <t> x y)
 25438  	for {
 25439  		t := v.Type
 25440  		x := v_0
 25441  		y := v_1
 25442  		if !(shiftIsBounded(v)) {
 25443  			break
 25444  		}
 25445  		v.reset(OpARM64SLL)
 25446  		v.Type = t
 25447  		v.AddArg2(x, y)
 25448  		return true
 25449  	}
 25450  	// match: (Lsh8x32 <t> x y)
 25451  	// cond: !shiftIsBounded(v)
 25452  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 25453  	for {
 25454  		t := v.Type
 25455  		x := v_0
 25456  		y := v_1
 25457  		if !(!shiftIsBounded(v)) {
 25458  			break
 25459  		}
 25460  		v.reset(OpARM64CSEL)
 25461  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25462  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25463  		v0.AddArg2(x, y)
 25464  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25465  		v1.AuxInt = int64ToAuxInt(0)
 25466  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25467  		v2.AuxInt = int64ToAuxInt(64)
 25468  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 25469  		v3.AddArg(y)
 25470  		v2.AddArg(v3)
 25471  		v.AddArg3(v0, v1, v2)
 25472  		return true
 25473  	}
 25474  	return false
 25475  }
 25476  func rewriteValueARM64_OpLsh8x64(v *Value) bool {
 25477  	v_1 := v.Args[1]
 25478  	v_0 := v.Args[0]
 25479  	b := v.Block
 25480  	// match: (Lsh8x64 <t> x y)
 25481  	// cond: shiftIsBounded(v)
 25482  	// result: (SLL <t> x y)
 25483  	for {
 25484  		t := v.Type
 25485  		x := v_0
 25486  		y := v_1
 25487  		if !(shiftIsBounded(v)) {
 25488  			break
 25489  		}
 25490  		v.reset(OpARM64SLL)
 25491  		v.Type = t
 25492  		v.AddArg2(x, y)
 25493  		return true
 25494  	}
 25495  	// match: (Lsh8x64 <t> x y)
 25496  	// cond: !shiftIsBounded(v)
 25497  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 25498  	for {
 25499  		t := v.Type
 25500  		x := v_0
 25501  		y := v_1
 25502  		if !(!shiftIsBounded(v)) {
 25503  			break
 25504  		}
 25505  		v.reset(OpARM64CSEL)
 25506  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25507  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25508  		v0.AddArg2(x, y)
 25509  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25510  		v1.AuxInt = int64ToAuxInt(0)
 25511  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25512  		v2.AuxInt = int64ToAuxInt(64)
 25513  		v2.AddArg(y)
 25514  		v.AddArg3(v0, v1, v2)
 25515  		return true
 25516  	}
 25517  	return false
 25518  }
 25519  func rewriteValueARM64_OpLsh8x8(v *Value) bool {
 25520  	v_1 := v.Args[1]
 25521  	v_0 := v.Args[0]
 25522  	b := v.Block
 25523  	typ := &b.Func.Config.Types
 25524  	// match: (Lsh8x8 <t> x y)
 25525  	// cond: shiftIsBounded(v)
 25526  	// result: (SLL <t> x y)
 25527  	for {
 25528  		t := v.Type
 25529  		x := v_0
 25530  		y := v_1
 25531  		if !(shiftIsBounded(v)) {
 25532  			break
 25533  		}
 25534  		v.reset(OpARM64SLL)
 25535  		v.Type = t
 25536  		v.AddArg2(x, y)
 25537  		return true
 25538  	}
 25539  	// match: (Lsh8x8 <t> x y)
 25540  	// cond: !shiftIsBounded(v)
 25541  	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 25542  	for {
 25543  		t := v.Type
 25544  		x := v_0
 25545  		y := v_1
 25546  		if !(!shiftIsBounded(v)) {
 25547  			break
 25548  		}
 25549  		v.reset(OpARM64CSEL)
 25550  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 25551  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 25552  		v0.AddArg2(x, y)
 25553  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 25554  		v1.AuxInt = int64ToAuxInt(0)
 25555  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 25556  		v2.AuxInt = int64ToAuxInt(64)
 25557  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 25558  		v3.AddArg(y)
 25559  		v2.AddArg(v3)
 25560  		v.AddArg3(v0, v1, v2)
 25561  		return true
 25562  	}
 25563  	return false
 25564  }
 25565  func rewriteValueARM64_OpMod16(v *Value) bool {
 25566  	v_1 := v.Args[1]
 25567  	v_0 := v.Args[0]
 25568  	b := v.Block
 25569  	typ := &b.Func.Config.Types
 25570  	// match: (Mod16 x y)
 25571  	// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
 25572  	for {
 25573  		x := v_0
 25574  		y := v_1
 25575  		v.reset(OpARM64MODW)
 25576  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 25577  		v0.AddArg(x)
 25578  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
 25579  		v1.AddArg(y)
 25580  		v.AddArg2(v0, v1)
 25581  		return true
 25582  	}
 25583  }
 25584  func rewriteValueARM64_OpMod16u(v *Value) bool {
 25585  	v_1 := v.Args[1]
 25586  	v_0 := v.Args[0]
 25587  	b := v.Block
 25588  	typ := &b.Func.Config.Types
 25589  	// match: (Mod16u x y)
 25590  	// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
 25591  	for {
 25592  		x := v_0
 25593  		y := v_1
 25594  		v.reset(OpARM64UMODW)
 25595  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 25596  		v0.AddArg(x)
 25597  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 25598  		v1.AddArg(y)
 25599  		v.AddArg2(v0, v1)
 25600  		return true
 25601  	}
 25602  }
 25603  func rewriteValueARM64_OpMod32(v *Value) bool {
 25604  	v_1 := v.Args[1]
 25605  	v_0 := v.Args[0]
 25606  	// match: (Mod32 x y)
 25607  	// result: (MODW x y)
 25608  	for {
 25609  		x := v_0
 25610  		y := v_1
 25611  		v.reset(OpARM64MODW)
 25612  		v.AddArg2(x, y)
 25613  		return true
 25614  	}
 25615  }
 25616  func rewriteValueARM64_OpMod64(v *Value) bool {
 25617  	v_1 := v.Args[1]
 25618  	v_0 := v.Args[0]
 25619  	// match: (Mod64 x y)
 25620  	// result: (MOD x y)
 25621  	for {
 25622  		x := v_0
 25623  		y := v_1
 25624  		v.reset(OpARM64MOD)
 25625  		v.AddArg2(x, y)
 25626  		return true
 25627  	}
 25628  }
 25629  func rewriteValueARM64_OpMod8(v *Value) bool {
 25630  	v_1 := v.Args[1]
 25631  	v_0 := v.Args[0]
 25632  	b := v.Block
 25633  	typ := &b.Func.Config.Types
 25634  	// match: (Mod8 x y)
 25635  	// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
 25636  	for {
 25637  		x := v_0
 25638  		y := v_1
 25639  		v.reset(OpARM64MODW)
 25640  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 25641  		v0.AddArg(x)
 25642  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
 25643  		v1.AddArg(y)
 25644  		v.AddArg2(v0, v1)
 25645  		return true
 25646  	}
 25647  }
 25648  func rewriteValueARM64_OpMod8u(v *Value) bool {
 25649  	v_1 := v.Args[1]
 25650  	v_0 := v.Args[0]
 25651  	b := v.Block
 25652  	typ := &b.Func.Config.Types
 25653  	// match: (Mod8u x y)
 25654  	// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
 25655  	for {
 25656  		x := v_0
 25657  		y := v_1
 25658  		v.reset(OpARM64UMODW)
 25659  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 25660  		v0.AddArg(x)
 25661  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 25662  		v1.AddArg(y)
 25663  		v.AddArg2(v0, v1)
 25664  		return true
 25665  	}
 25666  }
 25667  func rewriteValueARM64_OpMove(v *Value) bool {
 25668  	v_2 := v.Args[2]
 25669  	v_1 := v.Args[1]
 25670  	v_0 := v.Args[0]
 25671  	b := v.Block
 25672  	config := b.Func.Config
 25673  	typ := &b.Func.Config.Types
 25674  	// match: (Move [0] _ _ mem)
 25675  	// result: mem
 25676  	for {
 25677  		if auxIntToInt64(v.AuxInt) != 0 {
 25678  			break
 25679  		}
 25680  		mem := v_2
 25681  		v.copyOf(mem)
 25682  		return true
 25683  	}
 25684  	// match: (Move [1] dst src mem)
 25685  	// result: (MOVBstore dst (MOVBUload src mem) mem)
 25686  	for {
 25687  		if auxIntToInt64(v.AuxInt) != 1 {
 25688  			break
 25689  		}
 25690  		dst := v_0
 25691  		src := v_1
 25692  		mem := v_2
 25693  		v.reset(OpARM64MOVBstore)
 25694  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 25695  		v0.AddArg2(src, mem)
 25696  		v.AddArg3(dst, v0, mem)
 25697  		return true
 25698  	}
 25699  	// match: (Move [2] dst src mem)
 25700  	// result: (MOVHstore dst (MOVHUload src mem) mem)
 25701  	for {
 25702  		if auxIntToInt64(v.AuxInt) != 2 {
 25703  			break
 25704  		}
 25705  		dst := v_0
 25706  		src := v_1
 25707  		mem := v_2
 25708  		v.reset(OpARM64MOVHstore)
 25709  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 25710  		v0.AddArg2(src, mem)
 25711  		v.AddArg3(dst, v0, mem)
 25712  		return true
 25713  	}
 25714  	// match: (Move [3] dst src mem)
 25715  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
 25716  	for {
 25717  		if auxIntToInt64(v.AuxInt) != 3 {
 25718  			break
 25719  		}
 25720  		dst := v_0
 25721  		src := v_1
 25722  		mem := v_2
 25723  		v.reset(OpARM64MOVBstore)
 25724  		v.AuxInt = int32ToAuxInt(2)
 25725  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 25726  		v0.AuxInt = int32ToAuxInt(2)
 25727  		v0.AddArg2(src, mem)
 25728  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 25729  		v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 25730  		v2.AddArg2(src, mem)
 25731  		v1.AddArg3(dst, v2, mem)
 25732  		v.AddArg3(dst, v0, v1)
 25733  		return true
 25734  	}
 25735  	// match: (Move [4] dst src mem)
 25736  	// result: (MOVWstore dst (MOVWUload src mem) mem)
 25737  	for {
 25738  		if auxIntToInt64(v.AuxInt) != 4 {
 25739  			break
 25740  		}
 25741  		dst := v_0
 25742  		src := v_1
 25743  		mem := v_2
 25744  		v.reset(OpARM64MOVWstore)
 25745  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25746  		v0.AddArg2(src, mem)
 25747  		v.AddArg3(dst, v0, mem)
 25748  		return true
 25749  	}
 25750  	// match: (Move [5] dst src mem)
 25751  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
 25752  	for {
 25753  		if auxIntToInt64(v.AuxInt) != 5 {
 25754  			break
 25755  		}
 25756  		dst := v_0
 25757  		src := v_1
 25758  		mem := v_2
 25759  		v.reset(OpARM64MOVBstore)
 25760  		v.AuxInt = int32ToAuxInt(4)
 25761  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 25762  		v0.AuxInt = int32ToAuxInt(4)
 25763  		v0.AddArg2(src, mem)
 25764  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 25765  		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25766  		v2.AddArg2(src, mem)
 25767  		v1.AddArg3(dst, v2, mem)
 25768  		v.AddArg3(dst, v0, v1)
 25769  		return true
 25770  	}
 25771  	// match: (Move [6] dst src mem)
 25772  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
 25773  	for {
 25774  		if auxIntToInt64(v.AuxInt) != 6 {
 25775  			break
 25776  		}
 25777  		dst := v_0
 25778  		src := v_1
 25779  		mem := v_2
 25780  		v.reset(OpARM64MOVHstore)
 25781  		v.AuxInt = int32ToAuxInt(4)
 25782  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 25783  		v0.AuxInt = int32ToAuxInt(4)
 25784  		v0.AddArg2(src, mem)
 25785  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 25786  		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25787  		v2.AddArg2(src, mem)
 25788  		v1.AddArg3(dst, v2, mem)
 25789  		v.AddArg3(dst, v0, v1)
 25790  		return true
 25791  	}
 25792  	// match: (Move [7] dst src mem)
 25793  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
 25794  	for {
 25795  		if auxIntToInt64(v.AuxInt) != 7 {
 25796  			break
 25797  		}
 25798  		dst := v_0
 25799  		src := v_1
 25800  		mem := v_2
 25801  		v.reset(OpARM64MOVWstore)
 25802  		v.AuxInt = int32ToAuxInt(3)
 25803  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25804  		v0.AuxInt = int32ToAuxInt(3)
 25805  		v0.AddArg2(src, mem)
 25806  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 25807  		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25808  		v2.AddArg2(src, mem)
 25809  		v1.AddArg3(dst, v2, mem)
 25810  		v.AddArg3(dst, v0, v1)
 25811  		return true
 25812  	}
 25813  	// match: (Move [8] dst src mem)
 25814  	// result: (MOVDstore dst (MOVDload src mem) mem)
 25815  	for {
 25816  		if auxIntToInt64(v.AuxInt) != 8 {
 25817  			break
 25818  		}
 25819  		dst := v_0
 25820  		src := v_1
 25821  		mem := v_2
 25822  		v.reset(OpARM64MOVDstore)
 25823  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25824  		v0.AddArg2(src, mem)
 25825  		v.AddArg3(dst, v0, mem)
 25826  		return true
 25827  	}
 25828  	// match: (Move [9] dst src mem)
 25829  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25830  	for {
 25831  		if auxIntToInt64(v.AuxInt) != 9 {
 25832  			break
 25833  		}
 25834  		dst := v_0
 25835  		src := v_1
 25836  		mem := v_2
 25837  		v.reset(OpARM64MOVBstore)
 25838  		v.AuxInt = int32ToAuxInt(8)
 25839  		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
 25840  		v0.AuxInt = int32ToAuxInt(8)
 25841  		v0.AddArg2(src, mem)
 25842  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25843  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25844  		v2.AddArg2(src, mem)
 25845  		v1.AddArg3(dst, v2, mem)
 25846  		v.AddArg3(dst, v0, v1)
 25847  		return true
 25848  	}
 25849  	// match: (Move [10] dst src mem)
 25850  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25851  	for {
 25852  		if auxIntToInt64(v.AuxInt) != 10 {
 25853  			break
 25854  		}
 25855  		dst := v_0
 25856  		src := v_1
 25857  		mem := v_2
 25858  		v.reset(OpARM64MOVHstore)
 25859  		v.AuxInt = int32ToAuxInt(8)
 25860  		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
 25861  		v0.AuxInt = int32ToAuxInt(8)
 25862  		v0.AddArg2(src, mem)
 25863  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25864  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25865  		v2.AddArg2(src, mem)
 25866  		v1.AddArg3(dst, v2, mem)
 25867  		v.AddArg3(dst, v0, v1)
 25868  		return true
 25869  	}
 25870  	// match: (Move [11] dst src mem)
 25871  	// result: (MOVDstore [3] dst (MOVDload [3] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25872  	for {
 25873  		if auxIntToInt64(v.AuxInt) != 11 {
 25874  			break
 25875  		}
 25876  		dst := v_0
 25877  		src := v_1
 25878  		mem := v_2
 25879  		v.reset(OpARM64MOVDstore)
 25880  		v.AuxInt = int32ToAuxInt(3)
 25881  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25882  		v0.AuxInt = int32ToAuxInt(3)
 25883  		v0.AddArg2(src, mem)
 25884  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25885  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25886  		v2.AddArg2(src, mem)
 25887  		v1.AddArg3(dst, v2, mem)
 25888  		v.AddArg3(dst, v0, v1)
 25889  		return true
 25890  	}
 25891  	// match: (Move [12] dst src mem)
 25892  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25893  	for {
 25894  		if auxIntToInt64(v.AuxInt) != 12 {
 25895  			break
 25896  		}
 25897  		dst := v_0
 25898  		src := v_1
 25899  		mem := v_2
 25900  		v.reset(OpARM64MOVWstore)
 25901  		v.AuxInt = int32ToAuxInt(8)
 25902  		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
 25903  		v0.AuxInt = int32ToAuxInt(8)
 25904  		v0.AddArg2(src, mem)
 25905  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25906  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25907  		v2.AddArg2(src, mem)
 25908  		v1.AddArg3(dst, v2, mem)
 25909  		v.AddArg3(dst, v0, v1)
 25910  		return true
 25911  	}
 25912  	// match: (Move [13] dst src mem)
 25913  	// result: (MOVDstore [5] dst (MOVDload [5] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25914  	for {
 25915  		if auxIntToInt64(v.AuxInt) != 13 {
 25916  			break
 25917  		}
 25918  		dst := v_0
 25919  		src := v_1
 25920  		mem := v_2
 25921  		v.reset(OpARM64MOVDstore)
 25922  		v.AuxInt = int32ToAuxInt(5)
 25923  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25924  		v0.AuxInt = int32ToAuxInt(5)
 25925  		v0.AddArg2(src, mem)
 25926  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25927  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25928  		v2.AddArg2(src, mem)
 25929  		v1.AddArg3(dst, v2, mem)
 25930  		v.AddArg3(dst, v0, v1)
 25931  		return true
 25932  	}
 25933  	// match: (Move [14] dst src mem)
 25934  	// result: (MOVDstore [6] dst (MOVDload [6] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25935  	for {
 25936  		if auxIntToInt64(v.AuxInt) != 14 {
 25937  			break
 25938  		}
 25939  		dst := v_0
 25940  		src := v_1
 25941  		mem := v_2
 25942  		v.reset(OpARM64MOVDstore)
 25943  		v.AuxInt = int32ToAuxInt(6)
 25944  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25945  		v0.AuxInt = int32ToAuxInt(6)
 25946  		v0.AddArg2(src, mem)
 25947  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25948  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25949  		v2.AddArg2(src, mem)
 25950  		v1.AddArg3(dst, v2, mem)
 25951  		v.AddArg3(dst, v0, v1)
 25952  		return true
 25953  	}
 25954  	// match: (Move [15] dst src mem)
 25955  	// result: (MOVDstore [7] dst (MOVDload [7] src mem) (MOVDstore dst (MOVDload src mem) mem))
 25956  	for {
 25957  		if auxIntToInt64(v.AuxInt) != 15 {
 25958  			break
 25959  		}
 25960  		dst := v_0
 25961  		src := v_1
 25962  		mem := v_2
 25963  		v.reset(OpARM64MOVDstore)
 25964  		v.AuxInt = int32ToAuxInt(7)
 25965  		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25966  		v0.AuxInt = int32ToAuxInt(7)
 25967  		v0.AddArg2(src, mem)
 25968  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 25969  		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
 25970  		v2.AddArg2(src, mem)
 25971  		v1.AddArg3(dst, v2, mem)
 25972  		v.AddArg3(dst, v0, v1)
 25973  		return true
 25974  	}
 25975  	// match: (Move [16] dst src mem)
 25976  	// result: (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)
 25977  	for {
 25978  		if auxIntToInt64(v.AuxInt) != 16 {
 25979  			break
 25980  		}
 25981  		dst := v_0
 25982  		src := v_1
 25983  		mem := v_2
 25984  		v.reset(OpARM64STP)
 25985  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 25986  		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 25987  		v1.AddArg2(src, mem)
 25988  		v0.AddArg(v1)
 25989  		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 25990  		v2.AddArg(v1)
 25991  		v.AddArg4(dst, v0, v2, mem)
 25992  		return true
 25993  	}
 25994  	// match: (Move [32] dst src mem)
 25995  	// result: (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))
 25996  	for {
 25997  		if auxIntToInt64(v.AuxInt) != 32 {
 25998  			break
 25999  		}
 26000  		dst := v_0
 26001  		src := v_1
 26002  		mem := v_2
 26003  		v.reset(OpARM64STP)
 26004  		v.AuxInt = int32ToAuxInt(16)
 26005  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26006  		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26007  		v1.AuxInt = int32ToAuxInt(16)
 26008  		v1.AddArg2(src, mem)
 26009  		v0.AddArg(v1)
 26010  		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26011  		v2.AddArg(v1)
 26012  		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26013  		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26014  		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26015  		v5.AddArg2(src, mem)
 26016  		v4.AddArg(v5)
 26017  		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26018  		v6.AddArg(v5)
 26019  		v3.AddArg4(dst, v4, v6, mem)
 26020  		v.AddArg4(dst, v0, v2, v3)
 26021  		return true
 26022  	}
 26023  	// match: (Move [48] dst src mem)
 26024  	// result: (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)))
 26025  	for {
 26026  		if auxIntToInt64(v.AuxInt) != 48 {
 26027  			break
 26028  		}
 26029  		dst := v_0
 26030  		src := v_1
 26031  		mem := v_2
 26032  		v.reset(OpARM64STP)
 26033  		v.AuxInt = int32ToAuxInt(32)
 26034  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26035  		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26036  		v1.AuxInt = int32ToAuxInt(32)
 26037  		v1.AddArg2(src, mem)
 26038  		v0.AddArg(v1)
 26039  		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26040  		v2.AddArg(v1)
 26041  		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26042  		v3.AuxInt = int32ToAuxInt(16)
 26043  		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26044  		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26045  		v5.AuxInt = int32ToAuxInt(16)
 26046  		v5.AddArg2(src, mem)
 26047  		v4.AddArg(v5)
 26048  		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26049  		v6.AddArg(v5)
 26050  		v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26051  		v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26052  		v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26053  		v9.AddArg2(src, mem)
 26054  		v8.AddArg(v9)
 26055  		v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26056  		v10.AddArg(v9)
 26057  		v7.AddArg4(dst, v8, v10, mem)
 26058  		v3.AddArg4(dst, v4, v6, v7)
 26059  		v.AddArg4(dst, v0, v2, v3)
 26060  		return true
 26061  	}
 26062  	// match: (Move [64] dst src mem)
 26063  	// result: (STP [48] dst (Select0 <typ.UInt64> (LDP [48] src mem)) (Select1 <typ.UInt64> (LDP [48] src mem)) (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))))
 26064  	for {
 26065  		if auxIntToInt64(v.AuxInt) != 64 {
 26066  			break
 26067  		}
 26068  		dst := v_0
 26069  		src := v_1
 26070  		mem := v_2
 26071  		v.reset(OpARM64STP)
 26072  		v.AuxInt = int32ToAuxInt(48)
 26073  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26074  		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26075  		v1.AuxInt = int32ToAuxInt(48)
 26076  		v1.AddArg2(src, mem)
 26077  		v0.AddArg(v1)
 26078  		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26079  		v2.AddArg(v1)
 26080  		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26081  		v3.AuxInt = int32ToAuxInt(32)
 26082  		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26083  		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26084  		v5.AuxInt = int32ToAuxInt(32)
 26085  		v5.AddArg2(src, mem)
 26086  		v4.AddArg(v5)
 26087  		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26088  		v6.AddArg(v5)
 26089  		v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26090  		v7.AuxInt = int32ToAuxInt(16)
 26091  		v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26092  		v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26093  		v9.AuxInt = int32ToAuxInt(16)
 26094  		v9.AddArg2(src, mem)
 26095  		v8.AddArg(v9)
 26096  		v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26097  		v10.AddArg(v9)
 26098  		v11 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 26099  		v12 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
 26100  		v13 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
 26101  		v13.AddArg2(src, mem)
 26102  		v12.AddArg(v13)
 26103  		v14 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
 26104  		v14.AddArg(v13)
 26105  		v11.AddArg4(dst, v12, v14, mem)
 26106  		v7.AddArg4(dst, v8, v10, v11)
 26107  		v3.AddArg4(dst, v4, v6, v7)
 26108  		v.AddArg4(dst, v0, v2, v3)
 26109  		return true
 26110  	}
 26111  	// match: (Move [s] dst src mem)
 26112  	// cond: s%16 != 0 && s%16 <= 8 && s > 16
 26113  	// result: (Move [8] (OffPtr <dst.Type> dst [s-8]) (OffPtr <src.Type> src [s-8]) (Move [s-s%16] dst src mem))
 26114  	for {
 26115  		s := auxIntToInt64(v.AuxInt)
 26116  		dst := v_0
 26117  		src := v_1
 26118  		mem := v_2
 26119  		if !(s%16 != 0 && s%16 <= 8 && s > 16) {
 26120  			break
 26121  		}
 26122  		v.reset(OpMove)
 26123  		v.AuxInt = int64ToAuxInt(8)
 26124  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
 26125  		v0.AuxInt = int64ToAuxInt(s - 8)
 26126  		v0.AddArg(dst)
 26127  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
 26128  		v1.AuxInt = int64ToAuxInt(s - 8)
 26129  		v1.AddArg(src)
 26130  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
 26131  		v2.AuxInt = int64ToAuxInt(s - s%16)
 26132  		v2.AddArg3(dst, src, mem)
 26133  		v.AddArg3(v0, v1, v2)
 26134  		return true
 26135  	}
 26136  	// match: (Move [s] dst src mem)
 26137  	// cond: s%16 != 0 && s%16 > 8 && s > 16
 26138  	// result: (Move [16] (OffPtr <dst.Type> dst [s-16]) (OffPtr <src.Type> src [s-16]) (Move [s-s%16] dst src mem))
 26139  	for {
 26140  		s := auxIntToInt64(v.AuxInt)
 26141  		dst := v_0
 26142  		src := v_1
 26143  		mem := v_2
 26144  		if !(s%16 != 0 && s%16 > 8 && s > 16) {
 26145  			break
 26146  		}
 26147  		v.reset(OpMove)
 26148  		v.AuxInt = int64ToAuxInt(16)
 26149  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
 26150  		v0.AuxInt = int64ToAuxInt(s - 16)
 26151  		v0.AddArg(dst)
 26152  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
 26153  		v1.AuxInt = int64ToAuxInt(s - 16)
 26154  		v1.AddArg(src)
 26155  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
 26156  		v2.AuxInt = int64ToAuxInt(s - s%16)
 26157  		v2.AddArg3(dst, src, mem)
 26158  		v.AddArg3(v0, v1, v2)
 26159  		return true
 26160  	}
 26161  	// match: (Move [s] dst src mem)
 26162  	// cond: s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
 26163  	// result: (DUFFCOPY [8 * (64 - s/16)] dst src mem)
 26164  	for {
 26165  		s := auxIntToInt64(v.AuxInt)
 26166  		dst := v_0
 26167  		src := v_1
 26168  		mem := v_2
 26169  		if !(s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
 26170  			break
 26171  		}
 26172  		v.reset(OpARM64DUFFCOPY)
 26173  		v.AuxInt = int64ToAuxInt(8 * (64 - s/16))
 26174  		v.AddArg3(dst, src, mem)
 26175  		return true
 26176  	}
 26177  	// match: (Move [s] dst src mem)
 26178  	// cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s)
 26179  	// result: (LoweredMove dst src (ADDconst <src.Type> src [s-16]) mem)
 26180  	for {
 26181  		s := auxIntToInt64(v.AuxInt)
 26182  		dst := v_0
 26183  		src := v_1
 26184  		mem := v_2
 26185  		if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s)) {
 26186  			break
 26187  		}
 26188  		v.reset(OpARM64LoweredMove)
 26189  		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
 26190  		v0.AuxInt = int64ToAuxInt(s - 16)
 26191  		v0.AddArg(src)
 26192  		v.AddArg4(dst, src, v0, mem)
 26193  		return true
 26194  	}
 26195  	return false
 26196  }
 26197  func rewriteValueARM64_OpNeq16(v *Value) bool {
 26198  	v_1 := v.Args[1]
 26199  	v_0 := v.Args[0]
 26200  	b := v.Block
 26201  	typ := &b.Func.Config.Types
 26202  	// match: (Neq16 x y)
 26203  	// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
 26204  	for {
 26205  		x := v_0
 26206  		y := v_1
 26207  		v.reset(OpARM64NotEqual)
 26208  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 26209  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 26210  		v1.AddArg(x)
 26211  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 26212  		v2.AddArg(y)
 26213  		v0.AddArg2(v1, v2)
 26214  		v.AddArg(v0)
 26215  		return true
 26216  	}
 26217  }
 26218  func rewriteValueARM64_OpNeq32(v *Value) bool {
 26219  	v_1 := v.Args[1]
 26220  	v_0 := v.Args[0]
 26221  	b := v.Block
 26222  	// match: (Neq32 x y)
 26223  	// result: (NotEqual (CMPW x y))
 26224  	for {
 26225  		x := v_0
 26226  		y := v_1
 26227  		v.reset(OpARM64NotEqual)
 26228  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 26229  		v0.AddArg2(x, y)
 26230  		v.AddArg(v0)
 26231  		return true
 26232  	}
 26233  }
 26234  func rewriteValueARM64_OpNeq32F(v *Value) bool {
 26235  	v_1 := v.Args[1]
 26236  	v_0 := v.Args[0]
 26237  	b := v.Block
 26238  	// match: (Neq32F x y)
 26239  	// result: (NotEqual (FCMPS x y))
 26240  	for {
 26241  		x := v_0
 26242  		y := v_1
 26243  		v.reset(OpARM64NotEqual)
 26244  		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
 26245  		v0.AddArg2(x, y)
 26246  		v.AddArg(v0)
 26247  		return true
 26248  	}
 26249  }
 26250  func rewriteValueARM64_OpNeq64(v *Value) bool {
 26251  	v_1 := v.Args[1]
 26252  	v_0 := v.Args[0]
 26253  	b := v.Block
 26254  	// match: (Neq64 x y)
 26255  	// result: (NotEqual (CMP x y))
 26256  	for {
 26257  		x := v_0
 26258  		y := v_1
 26259  		v.reset(OpARM64NotEqual)
 26260  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 26261  		v0.AddArg2(x, y)
 26262  		v.AddArg(v0)
 26263  		return true
 26264  	}
 26265  }
 26266  func rewriteValueARM64_OpNeq64F(v *Value) bool {
 26267  	v_1 := v.Args[1]
 26268  	v_0 := v.Args[0]
 26269  	b := v.Block
 26270  	// match: (Neq64F x y)
 26271  	// result: (NotEqual (FCMPD x y))
 26272  	for {
 26273  		x := v_0
 26274  		y := v_1
 26275  		v.reset(OpARM64NotEqual)
 26276  		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
 26277  		v0.AddArg2(x, y)
 26278  		v.AddArg(v0)
 26279  		return true
 26280  	}
 26281  }
 26282  func rewriteValueARM64_OpNeq8(v *Value) bool {
 26283  	v_1 := v.Args[1]
 26284  	v_0 := v.Args[0]
 26285  	b := v.Block
 26286  	typ := &b.Func.Config.Types
 26287  	// match: (Neq8 x y)
 26288  	// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
 26289  	for {
 26290  		x := v_0
 26291  		y := v_1
 26292  		v.reset(OpARM64NotEqual)
 26293  		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
 26294  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 26295  		v1.AddArg(x)
 26296  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
 26297  		v2.AddArg(y)
 26298  		v0.AddArg2(v1, v2)
 26299  		v.AddArg(v0)
 26300  		return true
 26301  	}
 26302  }
 26303  func rewriteValueARM64_OpNeqPtr(v *Value) bool {
 26304  	v_1 := v.Args[1]
 26305  	v_0 := v.Args[0]
 26306  	b := v.Block
 26307  	// match: (NeqPtr x y)
 26308  	// result: (NotEqual (CMP x y))
 26309  	for {
 26310  		x := v_0
 26311  		y := v_1
 26312  		v.reset(OpARM64NotEqual)
 26313  		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
 26314  		v0.AddArg2(x, y)
 26315  		v.AddArg(v0)
 26316  		return true
 26317  	}
 26318  }
 26319  func rewriteValueARM64_OpNot(v *Value) bool {
 26320  	v_0 := v.Args[0]
 26321  	b := v.Block
 26322  	typ := &b.Func.Config.Types
 26323  	// match: (Not x)
 26324  	// result: (XOR (MOVDconst [1]) x)
 26325  	for {
 26326  		x := v_0
 26327  		v.reset(OpARM64XOR)
 26328  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 26329  		v0.AuxInt = int64ToAuxInt(1)
 26330  		v.AddArg2(v0, x)
 26331  		return true
 26332  	}
 26333  }
 26334  func rewriteValueARM64_OpOffPtr(v *Value) bool {
 26335  	v_0 := v.Args[0]
 26336  	// match: (OffPtr [off] ptr:(SP))
 26337  	// cond: is32Bit(off)
 26338  	// result: (MOVDaddr [int32(off)] ptr)
 26339  	for {
 26340  		off := auxIntToInt64(v.AuxInt)
 26341  		ptr := v_0
 26342  		if ptr.Op != OpSP || !(is32Bit(off)) {
 26343  			break
 26344  		}
 26345  		v.reset(OpARM64MOVDaddr)
 26346  		v.AuxInt = int32ToAuxInt(int32(off))
 26347  		v.AddArg(ptr)
 26348  		return true
 26349  	}
 26350  	// match: (OffPtr [off] ptr)
 26351  	// result: (ADDconst [off] ptr)
 26352  	for {
 26353  		off := auxIntToInt64(v.AuxInt)
 26354  		ptr := v_0
 26355  		v.reset(OpARM64ADDconst)
 26356  		v.AuxInt = int64ToAuxInt(off)
 26357  		v.AddArg(ptr)
 26358  		return true
 26359  	}
 26360  }
 26361  func rewriteValueARM64_OpPanicBounds(v *Value) bool {
 26362  	v_2 := v.Args[2]
 26363  	v_1 := v.Args[1]
 26364  	v_0 := v.Args[0]
 26365  	// match: (PanicBounds [kind] x y mem)
 26366  	// cond: boundsABI(kind) == 0
 26367  	// result: (LoweredPanicBoundsA [kind] x y mem)
 26368  	for {
 26369  		kind := auxIntToInt64(v.AuxInt)
 26370  		x := v_0
 26371  		y := v_1
 26372  		mem := v_2
 26373  		if !(boundsABI(kind) == 0) {
 26374  			break
 26375  		}
 26376  		v.reset(OpARM64LoweredPanicBoundsA)
 26377  		v.AuxInt = int64ToAuxInt(kind)
 26378  		v.AddArg3(x, y, mem)
 26379  		return true
 26380  	}
 26381  	// match: (PanicBounds [kind] x y mem)
 26382  	// cond: boundsABI(kind) == 1
 26383  	// result: (LoweredPanicBoundsB [kind] x y mem)
 26384  	for {
 26385  		kind := auxIntToInt64(v.AuxInt)
 26386  		x := v_0
 26387  		y := v_1
 26388  		mem := v_2
 26389  		if !(boundsABI(kind) == 1) {
 26390  			break
 26391  		}
 26392  		v.reset(OpARM64LoweredPanicBoundsB)
 26393  		v.AuxInt = int64ToAuxInt(kind)
 26394  		v.AddArg3(x, y, mem)
 26395  		return true
 26396  	}
 26397  	// match: (PanicBounds [kind] x y mem)
 26398  	// cond: boundsABI(kind) == 2
 26399  	// result: (LoweredPanicBoundsC [kind] x y mem)
 26400  	for {
 26401  		kind := auxIntToInt64(v.AuxInt)
 26402  		x := v_0
 26403  		y := v_1
 26404  		mem := v_2
 26405  		if !(boundsABI(kind) == 2) {
 26406  			break
 26407  		}
 26408  		v.reset(OpARM64LoweredPanicBoundsC)
 26409  		v.AuxInt = int64ToAuxInt(kind)
 26410  		v.AddArg3(x, y, mem)
 26411  		return true
 26412  	}
 26413  	return false
 26414  }
 26415  func rewriteValueARM64_OpPopCount16(v *Value) bool {
 26416  	v_0 := v.Args[0]
 26417  	b := v.Block
 26418  	typ := &b.Func.Config.Types
 26419  	// match: (PopCount16 <t> x)
 26420  	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x)))))
 26421  	for {
 26422  		t := v.Type
 26423  		x := v_0
 26424  		v.reset(OpARM64FMOVDfpgp)
 26425  		v.Type = t
 26426  		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
 26427  		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
 26428  		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
 26429  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26430  		v3.AddArg(x)
 26431  		v2.AddArg(v3)
 26432  		v1.AddArg(v2)
 26433  		v0.AddArg(v1)
 26434  		v.AddArg(v0)
 26435  		return true
 26436  	}
 26437  }
 26438  func rewriteValueARM64_OpPopCount32(v *Value) bool {
 26439  	v_0 := v.Args[0]
 26440  	b := v.Block
 26441  	typ := &b.Func.Config.Types
 26442  	// match: (PopCount32 <t> x)
 26443  	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x)))))
 26444  	for {
 26445  		t := v.Type
 26446  		x := v_0
 26447  		v.reset(OpARM64FMOVDfpgp)
 26448  		v.Type = t
 26449  		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
 26450  		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
 26451  		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
 26452  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 26453  		v3.AddArg(x)
 26454  		v2.AddArg(v3)
 26455  		v1.AddArg(v2)
 26456  		v0.AddArg(v1)
 26457  		v.AddArg(v0)
 26458  		return true
 26459  	}
 26460  }
 26461  func rewriteValueARM64_OpPopCount64(v *Value) bool {
 26462  	v_0 := v.Args[0]
 26463  	b := v.Block
 26464  	typ := &b.Func.Config.Types
 26465  	// match: (PopCount64 <t> x)
 26466  	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x))))
 26467  	for {
 26468  		t := v.Type
 26469  		x := v_0
 26470  		v.reset(OpARM64FMOVDfpgp)
 26471  		v.Type = t
 26472  		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
 26473  		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
 26474  		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
 26475  		v2.AddArg(x)
 26476  		v1.AddArg(v2)
 26477  		v0.AddArg(v1)
 26478  		v.AddArg(v0)
 26479  		return true
 26480  	}
 26481  }
 26482  func rewriteValueARM64_OpPrefetchCache(v *Value) bool {
 26483  	v_1 := v.Args[1]
 26484  	v_0 := v.Args[0]
 26485  	// match: (PrefetchCache addr mem)
 26486  	// result: (PRFM [0] addr mem)
 26487  	for {
 26488  		addr := v_0
 26489  		mem := v_1
 26490  		v.reset(OpARM64PRFM)
 26491  		v.AuxInt = int64ToAuxInt(0)
 26492  		v.AddArg2(addr, mem)
 26493  		return true
 26494  	}
 26495  }
 26496  func rewriteValueARM64_OpPrefetchCacheStreamed(v *Value) bool {
 26497  	v_1 := v.Args[1]
 26498  	v_0 := v.Args[0]
 26499  	// match: (PrefetchCacheStreamed addr mem)
 26500  	// result: (PRFM [1] addr mem)
 26501  	for {
 26502  		addr := v_0
 26503  		mem := v_1
 26504  		v.reset(OpARM64PRFM)
 26505  		v.AuxInt = int64ToAuxInt(1)
 26506  		v.AddArg2(addr, mem)
 26507  		return true
 26508  	}
 26509  }
 26510  func rewriteValueARM64_OpPubBarrier(v *Value) bool {
 26511  	v_0 := v.Args[0]
 26512  	// match: (PubBarrier mem)
 26513  	// result: (DMB [0xe] mem)
 26514  	for {
 26515  		mem := v_0
 26516  		v.reset(OpARM64DMB)
 26517  		v.AuxInt = int64ToAuxInt(0xe)
 26518  		v.AddArg(mem)
 26519  		return true
 26520  	}
 26521  }
 26522  func rewriteValueARM64_OpRotateLeft16(v *Value) bool {
 26523  	v_1 := v.Args[1]
 26524  	v_0 := v.Args[0]
 26525  	b := v.Block
 26526  	typ := &b.Func.Config.Types
 26527  	// match: (RotateLeft16 <t> x (MOVDconst [c]))
 26528  	// result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
 26529  	for {
 26530  		t := v.Type
 26531  		x := v_0
 26532  		if v_1.Op != OpARM64MOVDconst {
 26533  			break
 26534  		}
 26535  		c := auxIntToInt64(v_1.AuxInt)
 26536  		v.reset(OpOr16)
 26537  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 26538  		v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 26539  		v1.AuxInt = int64ToAuxInt(c & 15)
 26540  		v0.AddArg2(x, v1)
 26541  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
 26542  		v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 26543  		v3.AuxInt = int64ToAuxInt(-c & 15)
 26544  		v2.AddArg2(x, v3)
 26545  		v.AddArg2(v0, v2)
 26546  		return true
 26547  	}
 26548  	// match: (RotateLeft16 <t> x y)
 26549  	// result: (RORW <t> (ORshiftLL <typ.UInt32> (ZeroExt16to32 x) (ZeroExt16to32 x) [16]) (NEG <typ.Int64> y))
 26550  	for {
 26551  		t := v.Type
 26552  		x := v_0
 26553  		y := v_1
 26554  		v.reset(OpARM64RORW)
 26555  		v.Type = t
 26556  		v0 := b.NewValue0(v.Pos, OpARM64ORshiftLL, typ.UInt32)
 26557  		v0.AuxInt = int64ToAuxInt(16)
 26558  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 26559  		v1.AddArg(x)
 26560  		v0.AddArg2(v1, v1)
 26561  		v2 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64)
 26562  		v2.AddArg(y)
 26563  		v.AddArg2(v0, v2)
 26564  		return true
 26565  	}
 26566  }
 26567  func rewriteValueARM64_OpRotateLeft32(v *Value) bool {
 26568  	v_1 := v.Args[1]
 26569  	v_0 := v.Args[0]
 26570  	b := v.Block
 26571  	// match: (RotateLeft32 x y)
 26572  	// result: (RORW x (NEG <y.Type> y))
 26573  	for {
 26574  		x := v_0
 26575  		y := v_1
 26576  		v.reset(OpARM64RORW)
 26577  		v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
 26578  		v0.AddArg(y)
 26579  		v.AddArg2(x, v0)
 26580  		return true
 26581  	}
 26582  }
 26583  func rewriteValueARM64_OpRotateLeft64(v *Value) bool {
 26584  	v_1 := v.Args[1]
 26585  	v_0 := v.Args[0]
 26586  	b := v.Block
 26587  	// match: (RotateLeft64 x y)
 26588  	// result: (ROR x (NEG <y.Type> y))
 26589  	for {
 26590  		x := v_0
 26591  		y := v_1
 26592  		v.reset(OpARM64ROR)
 26593  		v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
 26594  		v0.AddArg(y)
 26595  		v.AddArg2(x, v0)
 26596  		return true
 26597  	}
 26598  }
 26599  func rewriteValueARM64_OpRotateLeft8(v *Value) bool {
 26600  	v_1 := v.Args[1]
 26601  	v_0 := v.Args[0]
 26602  	b := v.Block
 26603  	typ := &b.Func.Config.Types
 26604  	// match: (RotateLeft8 <t> x (MOVDconst [c]))
 26605  	// result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
 26606  	for {
 26607  		t := v.Type
 26608  		x := v_0
 26609  		if v_1.Op != OpARM64MOVDconst {
 26610  			break
 26611  		}
 26612  		c := auxIntToInt64(v_1.AuxInt)
 26613  		v.reset(OpOr8)
 26614  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 26615  		v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 26616  		v1.AuxInt = int64ToAuxInt(c & 7)
 26617  		v0.AddArg2(x, v1)
 26618  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
 26619  		v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 26620  		v3.AuxInt = int64ToAuxInt(-c & 7)
 26621  		v2.AddArg2(x, v3)
 26622  		v.AddArg2(v0, v2)
 26623  		return true
 26624  	}
 26625  	// match: (RotateLeft8 <t> x y)
 26626  	// result: (OR <t> (SLL <t> x (ANDconst <typ.Int64> [7] y)) (SRL <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEG <typ.Int64> y))))
 26627  	for {
 26628  		t := v.Type
 26629  		x := v_0
 26630  		y := v_1
 26631  		v.reset(OpARM64OR)
 26632  		v.Type = t
 26633  		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
 26634  		v1 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64)
 26635  		v1.AuxInt = int64ToAuxInt(7)
 26636  		v1.AddArg(y)
 26637  		v0.AddArg2(x, v1)
 26638  		v2 := b.NewValue0(v.Pos, OpARM64SRL, t)
 26639  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 26640  		v3.AddArg(x)
 26641  		v4 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64)
 26642  		v4.AuxInt = int64ToAuxInt(7)
 26643  		v5 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64)
 26644  		v5.AddArg(y)
 26645  		v4.AddArg(v5)
 26646  		v2.AddArg2(v3, v4)
 26647  		v.AddArg2(v0, v2)
 26648  		return true
 26649  	}
 26650  }
 26651  func rewriteValueARM64_OpRsh16Ux16(v *Value) bool {
 26652  	v_1 := v.Args[1]
 26653  	v_0 := v.Args[0]
 26654  	b := v.Block
 26655  	typ := &b.Func.Config.Types
 26656  	// match: (Rsh16Ux16 <t> x y)
 26657  	// cond: shiftIsBounded(v)
 26658  	// result: (SRL <t> (ZeroExt16to64 x) y)
 26659  	for {
 26660  		t := v.Type
 26661  		x := v_0
 26662  		y := v_1
 26663  		if !(shiftIsBounded(v)) {
 26664  			break
 26665  		}
 26666  		v.reset(OpARM64SRL)
 26667  		v.Type = t
 26668  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26669  		v0.AddArg(x)
 26670  		v.AddArg2(v0, y)
 26671  		return true
 26672  	}
 26673  	// match: (Rsh16Ux16 <t> x y)
 26674  	// cond: !shiftIsBounded(v)
 26675  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 26676  	for {
 26677  		t := v.Type
 26678  		x := v_0
 26679  		y := v_1
 26680  		if !(!shiftIsBounded(v)) {
 26681  			break
 26682  		}
 26683  		v.reset(OpARM64CSEL)
 26684  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 26685  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 26686  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26687  		v1.AddArg(x)
 26688  		v0.AddArg2(v1, y)
 26689  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 26690  		v2.AuxInt = int64ToAuxInt(0)
 26691  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26692  		v3.AuxInt = int64ToAuxInt(64)
 26693  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26694  		v4.AddArg(y)
 26695  		v3.AddArg(v4)
 26696  		v.AddArg3(v0, v2, v3)
 26697  		return true
 26698  	}
 26699  	return false
 26700  }
 26701  func rewriteValueARM64_OpRsh16Ux32(v *Value) bool {
 26702  	v_1 := v.Args[1]
 26703  	v_0 := v.Args[0]
 26704  	b := v.Block
 26705  	typ := &b.Func.Config.Types
 26706  	// match: (Rsh16Ux32 <t> x y)
 26707  	// cond: shiftIsBounded(v)
 26708  	// result: (SRL <t> (ZeroExt16to64 x) y)
 26709  	for {
 26710  		t := v.Type
 26711  		x := v_0
 26712  		y := v_1
 26713  		if !(shiftIsBounded(v)) {
 26714  			break
 26715  		}
 26716  		v.reset(OpARM64SRL)
 26717  		v.Type = t
 26718  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26719  		v0.AddArg(x)
 26720  		v.AddArg2(v0, y)
 26721  		return true
 26722  	}
 26723  	// match: (Rsh16Ux32 <t> x y)
 26724  	// cond: !shiftIsBounded(v)
 26725  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 26726  	for {
 26727  		t := v.Type
 26728  		x := v_0
 26729  		y := v_1
 26730  		if !(!shiftIsBounded(v)) {
 26731  			break
 26732  		}
 26733  		v.reset(OpARM64CSEL)
 26734  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 26735  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 26736  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26737  		v1.AddArg(x)
 26738  		v0.AddArg2(v1, y)
 26739  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 26740  		v2.AuxInt = int64ToAuxInt(0)
 26741  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26742  		v3.AuxInt = int64ToAuxInt(64)
 26743  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 26744  		v4.AddArg(y)
 26745  		v3.AddArg(v4)
 26746  		v.AddArg3(v0, v2, v3)
 26747  		return true
 26748  	}
 26749  	return false
 26750  }
 26751  func rewriteValueARM64_OpRsh16Ux64(v *Value) bool {
 26752  	v_1 := v.Args[1]
 26753  	v_0 := v.Args[0]
 26754  	b := v.Block
 26755  	typ := &b.Func.Config.Types
 26756  	// match: (Rsh16Ux64 <t> x y)
 26757  	// cond: shiftIsBounded(v)
 26758  	// result: (SRL <t> (ZeroExt16to64 x) y)
 26759  	for {
 26760  		t := v.Type
 26761  		x := v_0
 26762  		y := v_1
 26763  		if !(shiftIsBounded(v)) {
 26764  			break
 26765  		}
 26766  		v.reset(OpARM64SRL)
 26767  		v.Type = t
 26768  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26769  		v0.AddArg(x)
 26770  		v.AddArg2(v0, y)
 26771  		return true
 26772  	}
 26773  	// match: (Rsh16Ux64 <t> x y)
 26774  	// cond: !shiftIsBounded(v)
 26775  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 26776  	for {
 26777  		t := v.Type
 26778  		x := v_0
 26779  		y := v_1
 26780  		if !(!shiftIsBounded(v)) {
 26781  			break
 26782  		}
 26783  		v.reset(OpARM64CSEL)
 26784  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 26785  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 26786  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26787  		v1.AddArg(x)
 26788  		v0.AddArg2(v1, y)
 26789  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 26790  		v2.AuxInt = int64ToAuxInt(0)
 26791  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26792  		v3.AuxInt = int64ToAuxInt(64)
 26793  		v3.AddArg(y)
 26794  		v.AddArg3(v0, v2, v3)
 26795  		return true
 26796  	}
 26797  	return false
 26798  }
 26799  func rewriteValueARM64_OpRsh16Ux8(v *Value) bool {
 26800  	v_1 := v.Args[1]
 26801  	v_0 := v.Args[0]
 26802  	b := v.Block
 26803  	typ := &b.Func.Config.Types
 26804  	// match: (Rsh16Ux8 <t> x y)
 26805  	// cond: shiftIsBounded(v)
 26806  	// result: (SRL <t> (ZeroExt16to64 x) y)
 26807  	for {
 26808  		t := v.Type
 26809  		x := v_0
 26810  		y := v_1
 26811  		if !(shiftIsBounded(v)) {
 26812  			break
 26813  		}
 26814  		v.reset(OpARM64SRL)
 26815  		v.Type = t
 26816  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26817  		v0.AddArg(x)
 26818  		v.AddArg2(v0, y)
 26819  		return true
 26820  	}
 26821  	// match: (Rsh16Ux8 <t> x y)
 26822  	// cond: !shiftIsBounded(v)
 26823  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 26824  	for {
 26825  		t := v.Type
 26826  		x := v_0
 26827  		y := v_1
 26828  		if !(!shiftIsBounded(v)) {
 26829  			break
 26830  		}
 26831  		v.reset(OpARM64CSEL)
 26832  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 26833  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 26834  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26835  		v1.AddArg(x)
 26836  		v0.AddArg2(v1, y)
 26837  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 26838  		v2.AuxInt = int64ToAuxInt(0)
 26839  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26840  		v3.AuxInt = int64ToAuxInt(64)
 26841  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 26842  		v4.AddArg(y)
 26843  		v3.AddArg(v4)
 26844  		v.AddArg3(v0, v2, v3)
 26845  		return true
 26846  	}
 26847  	return false
 26848  }
 26849  func rewriteValueARM64_OpRsh16x16(v *Value) bool {
 26850  	v_1 := v.Args[1]
 26851  	v_0 := v.Args[0]
 26852  	b := v.Block
 26853  	typ := &b.Func.Config.Types
 26854  	// match: (Rsh16x16 <t> x y)
 26855  	// cond: shiftIsBounded(v)
 26856  	// result: (SRA <t> (SignExt16to64 x) y)
 26857  	for {
 26858  		t := v.Type
 26859  		x := v_0
 26860  		y := v_1
 26861  		if !(shiftIsBounded(v)) {
 26862  			break
 26863  		}
 26864  		v.reset(OpARM64SRA)
 26865  		v.Type = t
 26866  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26867  		v0.AddArg(x)
 26868  		v.AddArg2(v0, y)
 26869  		return true
 26870  	}
 26871  	// match: (Rsh16x16 x y)
 26872  	// cond: !shiftIsBounded(v)
 26873  	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 26874  	for {
 26875  		x := v_0
 26876  		y := v_1
 26877  		if !(!shiftIsBounded(v)) {
 26878  			break
 26879  		}
 26880  		v.reset(OpARM64SRA)
 26881  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26882  		v0.AddArg(x)
 26883  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 26884  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 26885  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 26886  		v2.AuxInt = int64ToAuxInt(63)
 26887  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26888  		v3.AuxInt = int64ToAuxInt(64)
 26889  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 26890  		v4.AddArg(y)
 26891  		v3.AddArg(v4)
 26892  		v1.AddArg3(y, v2, v3)
 26893  		v.AddArg2(v0, v1)
 26894  		return true
 26895  	}
 26896  	return false
 26897  }
 26898  func rewriteValueARM64_OpRsh16x32(v *Value) bool {
 26899  	v_1 := v.Args[1]
 26900  	v_0 := v.Args[0]
 26901  	b := v.Block
 26902  	typ := &b.Func.Config.Types
 26903  	// match: (Rsh16x32 <t> x y)
 26904  	// cond: shiftIsBounded(v)
 26905  	// result: (SRA <t> (SignExt16to64 x) y)
 26906  	for {
 26907  		t := v.Type
 26908  		x := v_0
 26909  		y := v_1
 26910  		if !(shiftIsBounded(v)) {
 26911  			break
 26912  		}
 26913  		v.reset(OpARM64SRA)
 26914  		v.Type = t
 26915  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26916  		v0.AddArg(x)
 26917  		v.AddArg2(v0, y)
 26918  		return true
 26919  	}
 26920  	// match: (Rsh16x32 x y)
 26921  	// cond: !shiftIsBounded(v)
 26922  	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 26923  	for {
 26924  		x := v_0
 26925  		y := v_1
 26926  		if !(!shiftIsBounded(v)) {
 26927  			break
 26928  		}
 26929  		v.reset(OpARM64SRA)
 26930  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26931  		v0.AddArg(x)
 26932  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 26933  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 26934  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 26935  		v2.AuxInt = int64ToAuxInt(63)
 26936  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26937  		v3.AuxInt = int64ToAuxInt(64)
 26938  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 26939  		v4.AddArg(y)
 26940  		v3.AddArg(v4)
 26941  		v1.AddArg3(y, v2, v3)
 26942  		v.AddArg2(v0, v1)
 26943  		return true
 26944  	}
 26945  	return false
 26946  }
 26947  func rewriteValueARM64_OpRsh16x64(v *Value) bool {
 26948  	v_1 := v.Args[1]
 26949  	v_0 := v.Args[0]
 26950  	b := v.Block
 26951  	typ := &b.Func.Config.Types
 26952  	// match: (Rsh16x64 <t> x y)
 26953  	// cond: shiftIsBounded(v)
 26954  	// result: (SRA <t> (SignExt16to64 x) y)
 26955  	for {
 26956  		t := v.Type
 26957  		x := v_0
 26958  		y := v_1
 26959  		if !(shiftIsBounded(v)) {
 26960  			break
 26961  		}
 26962  		v.reset(OpARM64SRA)
 26963  		v.Type = t
 26964  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26965  		v0.AddArg(x)
 26966  		v.AddArg2(v0, y)
 26967  		return true
 26968  	}
 26969  	// match: (Rsh16x64 x y)
 26970  	// cond: !shiftIsBounded(v)
 26971  	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 26972  	for {
 26973  		x := v_0
 26974  		y := v_1
 26975  		if !(!shiftIsBounded(v)) {
 26976  			break
 26977  		}
 26978  		v.reset(OpARM64SRA)
 26979  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 26980  		v0.AddArg(x)
 26981  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 26982  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 26983  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 26984  		v2.AuxInt = int64ToAuxInt(63)
 26985  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 26986  		v3.AuxInt = int64ToAuxInt(64)
 26987  		v3.AddArg(y)
 26988  		v1.AddArg3(y, v2, v3)
 26989  		v.AddArg2(v0, v1)
 26990  		return true
 26991  	}
 26992  	return false
 26993  }
 26994  func rewriteValueARM64_OpRsh16x8(v *Value) bool {
 26995  	v_1 := v.Args[1]
 26996  	v_0 := v.Args[0]
 26997  	b := v.Block
 26998  	typ := &b.Func.Config.Types
 26999  	// match: (Rsh16x8 <t> x y)
 27000  	// cond: shiftIsBounded(v)
 27001  	// result: (SRA <t> (SignExt16to64 x) y)
 27002  	for {
 27003  		t := v.Type
 27004  		x := v_0
 27005  		y := v_1
 27006  		if !(shiftIsBounded(v)) {
 27007  			break
 27008  		}
 27009  		v.reset(OpARM64SRA)
 27010  		v.Type = t
 27011  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 27012  		v0.AddArg(x)
 27013  		v.AddArg2(v0, y)
 27014  		return true
 27015  	}
 27016  	// match: (Rsh16x8 x y)
 27017  	// cond: !shiftIsBounded(v)
 27018  	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
 27019  	for {
 27020  		x := v_0
 27021  		y := v_1
 27022  		if !(!shiftIsBounded(v)) {
 27023  			break
 27024  		}
 27025  		v.reset(OpARM64SRA)
 27026  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 27027  		v0.AddArg(x)
 27028  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27029  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 27030  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27031  		v2.AuxInt = int64ToAuxInt(63)
 27032  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27033  		v3.AuxInt = int64ToAuxInt(64)
 27034  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27035  		v4.AddArg(y)
 27036  		v3.AddArg(v4)
 27037  		v1.AddArg3(y, v2, v3)
 27038  		v.AddArg2(v0, v1)
 27039  		return true
 27040  	}
 27041  	return false
 27042  }
 27043  func rewriteValueARM64_OpRsh32Ux16(v *Value) bool {
 27044  	v_1 := v.Args[1]
 27045  	v_0 := v.Args[0]
 27046  	b := v.Block
 27047  	typ := &b.Func.Config.Types
 27048  	// match: (Rsh32Ux16 <t> x y)
 27049  	// cond: shiftIsBounded(v)
 27050  	// result: (SRL <t> (ZeroExt32to64 x) y)
 27051  	for {
 27052  		t := v.Type
 27053  		x := v_0
 27054  		y := v_1
 27055  		if !(shiftIsBounded(v)) {
 27056  			break
 27057  		}
 27058  		v.reset(OpARM64SRL)
 27059  		v.Type = t
 27060  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27061  		v0.AddArg(x)
 27062  		v.AddArg2(v0, y)
 27063  		return true
 27064  	}
 27065  	// match: (Rsh32Ux16 <t> x y)
 27066  	// cond: !shiftIsBounded(v)
 27067  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 27068  	for {
 27069  		t := v.Type
 27070  		x := v_0
 27071  		y := v_1
 27072  		if !(!shiftIsBounded(v)) {
 27073  			break
 27074  		}
 27075  		v.reset(OpARM64CSEL)
 27076  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27077  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27078  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27079  		v1.AddArg(x)
 27080  		v0.AddArg2(v1, y)
 27081  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27082  		v2.AuxInt = int64ToAuxInt(0)
 27083  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27084  		v3.AuxInt = int64ToAuxInt(64)
 27085  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 27086  		v4.AddArg(y)
 27087  		v3.AddArg(v4)
 27088  		v.AddArg3(v0, v2, v3)
 27089  		return true
 27090  	}
 27091  	return false
 27092  }
 27093  func rewriteValueARM64_OpRsh32Ux32(v *Value) bool {
 27094  	v_1 := v.Args[1]
 27095  	v_0 := v.Args[0]
 27096  	b := v.Block
 27097  	typ := &b.Func.Config.Types
 27098  	// match: (Rsh32Ux32 <t> x y)
 27099  	// cond: shiftIsBounded(v)
 27100  	// result: (SRL <t> (ZeroExt32to64 x) y)
 27101  	for {
 27102  		t := v.Type
 27103  		x := v_0
 27104  		y := v_1
 27105  		if !(shiftIsBounded(v)) {
 27106  			break
 27107  		}
 27108  		v.reset(OpARM64SRL)
 27109  		v.Type = t
 27110  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27111  		v0.AddArg(x)
 27112  		v.AddArg2(v0, y)
 27113  		return true
 27114  	}
 27115  	// match: (Rsh32Ux32 <t> x y)
 27116  	// cond: !shiftIsBounded(v)
 27117  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 27118  	for {
 27119  		t := v.Type
 27120  		x := v_0
 27121  		y := v_1
 27122  		if !(!shiftIsBounded(v)) {
 27123  			break
 27124  		}
 27125  		v.reset(OpARM64CSEL)
 27126  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27127  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27128  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27129  		v1.AddArg(x)
 27130  		v0.AddArg2(v1, y)
 27131  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27132  		v2.AuxInt = int64ToAuxInt(0)
 27133  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27134  		v3.AuxInt = int64ToAuxInt(64)
 27135  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27136  		v4.AddArg(y)
 27137  		v3.AddArg(v4)
 27138  		v.AddArg3(v0, v2, v3)
 27139  		return true
 27140  	}
 27141  	return false
 27142  }
 27143  func rewriteValueARM64_OpRsh32Ux64(v *Value) bool {
 27144  	v_1 := v.Args[1]
 27145  	v_0 := v.Args[0]
 27146  	b := v.Block
 27147  	typ := &b.Func.Config.Types
 27148  	// match: (Rsh32Ux64 <t> x y)
 27149  	// cond: shiftIsBounded(v)
 27150  	// result: (SRL <t> (ZeroExt32to64 x) y)
 27151  	for {
 27152  		t := v.Type
 27153  		x := v_0
 27154  		y := v_1
 27155  		if !(shiftIsBounded(v)) {
 27156  			break
 27157  		}
 27158  		v.reset(OpARM64SRL)
 27159  		v.Type = t
 27160  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27161  		v0.AddArg(x)
 27162  		v.AddArg2(v0, y)
 27163  		return true
 27164  	}
 27165  	// match: (Rsh32Ux64 <t> x y)
 27166  	// cond: !shiftIsBounded(v)
 27167  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 27168  	for {
 27169  		t := v.Type
 27170  		x := v_0
 27171  		y := v_1
 27172  		if !(!shiftIsBounded(v)) {
 27173  			break
 27174  		}
 27175  		v.reset(OpARM64CSEL)
 27176  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27177  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27178  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27179  		v1.AddArg(x)
 27180  		v0.AddArg2(v1, y)
 27181  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27182  		v2.AuxInt = int64ToAuxInt(0)
 27183  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27184  		v3.AuxInt = int64ToAuxInt(64)
 27185  		v3.AddArg(y)
 27186  		v.AddArg3(v0, v2, v3)
 27187  		return true
 27188  	}
 27189  	return false
 27190  }
 27191  func rewriteValueARM64_OpRsh32Ux8(v *Value) bool {
 27192  	v_1 := v.Args[1]
 27193  	v_0 := v.Args[0]
 27194  	b := v.Block
 27195  	typ := &b.Func.Config.Types
 27196  	// match: (Rsh32Ux8 <t> x y)
 27197  	// cond: shiftIsBounded(v)
 27198  	// result: (SRL <t> (ZeroExt32to64 x) y)
 27199  	for {
 27200  		t := v.Type
 27201  		x := v_0
 27202  		y := v_1
 27203  		if !(shiftIsBounded(v)) {
 27204  			break
 27205  		}
 27206  		v.reset(OpARM64SRL)
 27207  		v.Type = t
 27208  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27209  		v0.AddArg(x)
 27210  		v.AddArg2(v0, y)
 27211  		return true
 27212  	}
 27213  	// match: (Rsh32Ux8 <t> x y)
 27214  	// cond: !shiftIsBounded(v)
 27215  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 27216  	for {
 27217  		t := v.Type
 27218  		x := v_0
 27219  		y := v_1
 27220  		if !(!shiftIsBounded(v)) {
 27221  			break
 27222  		}
 27223  		v.reset(OpARM64CSEL)
 27224  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27225  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27226  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27227  		v1.AddArg(x)
 27228  		v0.AddArg2(v1, y)
 27229  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27230  		v2.AuxInt = int64ToAuxInt(0)
 27231  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27232  		v3.AuxInt = int64ToAuxInt(64)
 27233  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27234  		v4.AddArg(y)
 27235  		v3.AddArg(v4)
 27236  		v.AddArg3(v0, v2, v3)
 27237  		return true
 27238  	}
 27239  	return false
 27240  }
 27241  func rewriteValueARM64_OpRsh32x16(v *Value) bool {
 27242  	v_1 := v.Args[1]
 27243  	v_0 := v.Args[0]
 27244  	b := v.Block
 27245  	typ := &b.Func.Config.Types
 27246  	// match: (Rsh32x16 <t> x y)
 27247  	// cond: shiftIsBounded(v)
 27248  	// result: (SRA <t> (SignExt32to64 x) y)
 27249  	for {
 27250  		t := v.Type
 27251  		x := v_0
 27252  		y := v_1
 27253  		if !(shiftIsBounded(v)) {
 27254  			break
 27255  		}
 27256  		v.reset(OpARM64SRA)
 27257  		v.Type = t
 27258  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27259  		v0.AddArg(x)
 27260  		v.AddArg2(v0, y)
 27261  		return true
 27262  	}
 27263  	// match: (Rsh32x16 x y)
 27264  	// cond: !shiftIsBounded(v)
 27265  	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 27266  	for {
 27267  		x := v_0
 27268  		y := v_1
 27269  		if !(!shiftIsBounded(v)) {
 27270  			break
 27271  		}
 27272  		v.reset(OpARM64SRA)
 27273  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27274  		v0.AddArg(x)
 27275  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27276  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 27277  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27278  		v2.AuxInt = int64ToAuxInt(63)
 27279  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27280  		v3.AuxInt = int64ToAuxInt(64)
 27281  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 27282  		v4.AddArg(y)
 27283  		v3.AddArg(v4)
 27284  		v1.AddArg3(y, v2, v3)
 27285  		v.AddArg2(v0, v1)
 27286  		return true
 27287  	}
 27288  	return false
 27289  }
 27290  func rewriteValueARM64_OpRsh32x32(v *Value) bool {
 27291  	v_1 := v.Args[1]
 27292  	v_0 := v.Args[0]
 27293  	b := v.Block
 27294  	typ := &b.Func.Config.Types
 27295  	// match: (Rsh32x32 <t> x y)
 27296  	// cond: shiftIsBounded(v)
 27297  	// result: (SRA <t> (SignExt32to64 x) y)
 27298  	for {
 27299  		t := v.Type
 27300  		x := v_0
 27301  		y := v_1
 27302  		if !(shiftIsBounded(v)) {
 27303  			break
 27304  		}
 27305  		v.reset(OpARM64SRA)
 27306  		v.Type = t
 27307  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27308  		v0.AddArg(x)
 27309  		v.AddArg2(v0, y)
 27310  		return true
 27311  	}
 27312  	// match: (Rsh32x32 x y)
 27313  	// cond: !shiftIsBounded(v)
 27314  	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 27315  	for {
 27316  		x := v_0
 27317  		y := v_1
 27318  		if !(!shiftIsBounded(v)) {
 27319  			break
 27320  		}
 27321  		v.reset(OpARM64SRA)
 27322  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27323  		v0.AddArg(x)
 27324  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27325  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 27326  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27327  		v2.AuxInt = int64ToAuxInt(63)
 27328  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27329  		v3.AuxInt = int64ToAuxInt(64)
 27330  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27331  		v4.AddArg(y)
 27332  		v3.AddArg(v4)
 27333  		v1.AddArg3(y, v2, v3)
 27334  		v.AddArg2(v0, v1)
 27335  		return true
 27336  	}
 27337  	return false
 27338  }
 27339  func rewriteValueARM64_OpRsh32x64(v *Value) bool {
 27340  	v_1 := v.Args[1]
 27341  	v_0 := v.Args[0]
 27342  	b := v.Block
 27343  	typ := &b.Func.Config.Types
 27344  	// match: (Rsh32x64 <t> x y)
 27345  	// cond: shiftIsBounded(v)
 27346  	// result: (SRA <t> (SignExt32to64 x) y)
 27347  	for {
 27348  		t := v.Type
 27349  		x := v_0
 27350  		y := v_1
 27351  		if !(shiftIsBounded(v)) {
 27352  			break
 27353  		}
 27354  		v.reset(OpARM64SRA)
 27355  		v.Type = t
 27356  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27357  		v0.AddArg(x)
 27358  		v.AddArg2(v0, y)
 27359  		return true
 27360  	}
 27361  	// match: (Rsh32x64 x y)
 27362  	// cond: !shiftIsBounded(v)
 27363  	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 27364  	for {
 27365  		x := v_0
 27366  		y := v_1
 27367  		if !(!shiftIsBounded(v)) {
 27368  			break
 27369  		}
 27370  		v.reset(OpARM64SRA)
 27371  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27372  		v0.AddArg(x)
 27373  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27374  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 27375  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27376  		v2.AuxInt = int64ToAuxInt(63)
 27377  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27378  		v3.AuxInt = int64ToAuxInt(64)
 27379  		v3.AddArg(y)
 27380  		v1.AddArg3(y, v2, v3)
 27381  		v.AddArg2(v0, v1)
 27382  		return true
 27383  	}
 27384  	return false
 27385  }
 27386  func rewriteValueARM64_OpRsh32x8(v *Value) bool {
 27387  	v_1 := v.Args[1]
 27388  	v_0 := v.Args[0]
 27389  	b := v.Block
 27390  	typ := &b.Func.Config.Types
 27391  	// match: (Rsh32x8 <t> x y)
 27392  	// cond: shiftIsBounded(v)
 27393  	// result: (SRA <t> (SignExt32to64 x) y)
 27394  	for {
 27395  		t := v.Type
 27396  		x := v_0
 27397  		y := v_1
 27398  		if !(shiftIsBounded(v)) {
 27399  			break
 27400  		}
 27401  		v.reset(OpARM64SRA)
 27402  		v.Type = t
 27403  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27404  		v0.AddArg(x)
 27405  		v.AddArg2(v0, y)
 27406  		return true
 27407  	}
 27408  	// match: (Rsh32x8 x y)
 27409  	// cond: !shiftIsBounded(v)
 27410  	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
 27411  	for {
 27412  		x := v_0
 27413  		y := v_1
 27414  		if !(!shiftIsBounded(v)) {
 27415  			break
 27416  		}
 27417  		v.reset(OpARM64SRA)
 27418  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
 27419  		v0.AddArg(x)
 27420  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27421  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 27422  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27423  		v2.AuxInt = int64ToAuxInt(63)
 27424  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27425  		v3.AuxInt = int64ToAuxInt(64)
 27426  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27427  		v4.AddArg(y)
 27428  		v3.AddArg(v4)
 27429  		v1.AddArg3(y, v2, v3)
 27430  		v.AddArg2(v0, v1)
 27431  		return true
 27432  	}
 27433  	return false
 27434  }
 27435  func rewriteValueARM64_OpRsh64Ux16(v *Value) bool {
 27436  	v_1 := v.Args[1]
 27437  	v_0 := v.Args[0]
 27438  	b := v.Block
 27439  	typ := &b.Func.Config.Types
 27440  	// match: (Rsh64Ux16 <t> x y)
 27441  	// cond: shiftIsBounded(v)
 27442  	// result: (SRL <t> x y)
 27443  	for {
 27444  		t := v.Type
 27445  		x := v_0
 27446  		y := v_1
 27447  		if !(shiftIsBounded(v)) {
 27448  			break
 27449  		}
 27450  		v.reset(OpARM64SRL)
 27451  		v.Type = t
 27452  		v.AddArg2(x, y)
 27453  		return true
 27454  	}
 27455  	// match: (Rsh64Ux16 <t> x y)
 27456  	// cond: !shiftIsBounded(v)
 27457  	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 27458  	for {
 27459  		t := v.Type
 27460  		x := v_0
 27461  		y := v_1
 27462  		if !(!shiftIsBounded(v)) {
 27463  			break
 27464  		}
 27465  		v.reset(OpARM64CSEL)
 27466  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27467  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27468  		v0.AddArg2(x, y)
 27469  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 27470  		v1.AuxInt = int64ToAuxInt(0)
 27471  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27472  		v2.AuxInt = int64ToAuxInt(64)
 27473  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 27474  		v3.AddArg(y)
 27475  		v2.AddArg(v3)
 27476  		v.AddArg3(v0, v1, v2)
 27477  		return true
 27478  	}
 27479  	return false
 27480  }
 27481  func rewriteValueARM64_OpRsh64Ux32(v *Value) bool {
 27482  	v_1 := v.Args[1]
 27483  	v_0 := v.Args[0]
 27484  	b := v.Block
 27485  	typ := &b.Func.Config.Types
 27486  	// match: (Rsh64Ux32 <t> x y)
 27487  	// cond: shiftIsBounded(v)
 27488  	// result: (SRL <t> x y)
 27489  	for {
 27490  		t := v.Type
 27491  		x := v_0
 27492  		y := v_1
 27493  		if !(shiftIsBounded(v)) {
 27494  			break
 27495  		}
 27496  		v.reset(OpARM64SRL)
 27497  		v.Type = t
 27498  		v.AddArg2(x, y)
 27499  		return true
 27500  	}
 27501  	// match: (Rsh64Ux32 <t> x y)
 27502  	// cond: !shiftIsBounded(v)
 27503  	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 27504  	for {
 27505  		t := v.Type
 27506  		x := v_0
 27507  		y := v_1
 27508  		if !(!shiftIsBounded(v)) {
 27509  			break
 27510  		}
 27511  		v.reset(OpARM64CSEL)
 27512  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27513  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27514  		v0.AddArg2(x, y)
 27515  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 27516  		v1.AuxInt = int64ToAuxInt(0)
 27517  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27518  		v2.AuxInt = int64ToAuxInt(64)
 27519  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27520  		v3.AddArg(y)
 27521  		v2.AddArg(v3)
 27522  		v.AddArg3(v0, v1, v2)
 27523  		return true
 27524  	}
 27525  	return false
 27526  }
 27527  func rewriteValueARM64_OpRsh64Ux64(v *Value) bool {
 27528  	v_1 := v.Args[1]
 27529  	v_0 := v.Args[0]
 27530  	b := v.Block
 27531  	// match: (Rsh64Ux64 <t> x y)
 27532  	// cond: shiftIsBounded(v)
 27533  	// result: (SRL <t> x y)
 27534  	for {
 27535  		t := v.Type
 27536  		x := v_0
 27537  		y := v_1
 27538  		if !(shiftIsBounded(v)) {
 27539  			break
 27540  		}
 27541  		v.reset(OpARM64SRL)
 27542  		v.Type = t
 27543  		v.AddArg2(x, y)
 27544  		return true
 27545  	}
 27546  	// match: (Rsh64Ux64 <t> x y)
 27547  	// cond: !shiftIsBounded(v)
 27548  	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
 27549  	for {
 27550  		t := v.Type
 27551  		x := v_0
 27552  		y := v_1
 27553  		if !(!shiftIsBounded(v)) {
 27554  			break
 27555  		}
 27556  		v.reset(OpARM64CSEL)
 27557  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27558  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27559  		v0.AddArg2(x, y)
 27560  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 27561  		v1.AuxInt = int64ToAuxInt(0)
 27562  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27563  		v2.AuxInt = int64ToAuxInt(64)
 27564  		v2.AddArg(y)
 27565  		v.AddArg3(v0, v1, v2)
 27566  		return true
 27567  	}
 27568  	return false
 27569  }
 27570  func rewriteValueARM64_OpRsh64Ux8(v *Value) bool {
 27571  	v_1 := v.Args[1]
 27572  	v_0 := v.Args[0]
 27573  	b := v.Block
 27574  	typ := &b.Func.Config.Types
 27575  	// match: (Rsh64Ux8 <t> x y)
 27576  	// cond: shiftIsBounded(v)
 27577  	// result: (SRL <t> x y)
 27578  	for {
 27579  		t := v.Type
 27580  		x := v_0
 27581  		y := v_1
 27582  		if !(shiftIsBounded(v)) {
 27583  			break
 27584  		}
 27585  		v.reset(OpARM64SRL)
 27586  		v.Type = t
 27587  		v.AddArg2(x, y)
 27588  		return true
 27589  	}
 27590  	// match: (Rsh64Ux8 <t> x y)
 27591  	// cond: !shiftIsBounded(v)
 27592  	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 27593  	for {
 27594  		t := v.Type
 27595  		x := v_0
 27596  		y := v_1
 27597  		if !(!shiftIsBounded(v)) {
 27598  			break
 27599  		}
 27600  		v.reset(OpARM64CSEL)
 27601  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27602  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27603  		v0.AddArg2(x, y)
 27604  		v1 := b.NewValue0(v.Pos, OpConst64, t)
 27605  		v1.AuxInt = int64ToAuxInt(0)
 27606  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27607  		v2.AuxInt = int64ToAuxInt(64)
 27608  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27609  		v3.AddArg(y)
 27610  		v2.AddArg(v3)
 27611  		v.AddArg3(v0, v1, v2)
 27612  		return true
 27613  	}
 27614  	return false
 27615  }
 27616  func rewriteValueARM64_OpRsh64x16(v *Value) bool {
 27617  	v_1 := v.Args[1]
 27618  	v_0 := v.Args[0]
 27619  	b := v.Block
 27620  	typ := &b.Func.Config.Types
 27621  	// match: (Rsh64x16 <t> x y)
 27622  	// cond: shiftIsBounded(v)
 27623  	// result: (SRA <t> x y)
 27624  	for {
 27625  		t := v.Type
 27626  		x := v_0
 27627  		y := v_1
 27628  		if !(shiftIsBounded(v)) {
 27629  			break
 27630  		}
 27631  		v.reset(OpARM64SRA)
 27632  		v.Type = t
 27633  		v.AddArg2(x, y)
 27634  		return true
 27635  	}
 27636  	// match: (Rsh64x16 x y)
 27637  	// cond: !shiftIsBounded(v)
 27638  	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 27639  	for {
 27640  		x := v_0
 27641  		y := v_1
 27642  		if !(!shiftIsBounded(v)) {
 27643  			break
 27644  		}
 27645  		v.reset(OpARM64SRA)
 27646  		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27647  		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
 27648  		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27649  		v1.AuxInt = int64ToAuxInt(63)
 27650  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27651  		v2.AuxInt = int64ToAuxInt(64)
 27652  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 27653  		v3.AddArg(y)
 27654  		v2.AddArg(v3)
 27655  		v0.AddArg3(y, v1, v2)
 27656  		v.AddArg2(x, v0)
 27657  		return true
 27658  	}
 27659  	return false
 27660  }
 27661  func rewriteValueARM64_OpRsh64x32(v *Value) bool {
 27662  	v_1 := v.Args[1]
 27663  	v_0 := v.Args[0]
 27664  	b := v.Block
 27665  	typ := &b.Func.Config.Types
 27666  	// match: (Rsh64x32 <t> x y)
 27667  	// cond: shiftIsBounded(v)
 27668  	// result: (SRA <t> x y)
 27669  	for {
 27670  		t := v.Type
 27671  		x := v_0
 27672  		y := v_1
 27673  		if !(shiftIsBounded(v)) {
 27674  			break
 27675  		}
 27676  		v.reset(OpARM64SRA)
 27677  		v.Type = t
 27678  		v.AddArg2(x, y)
 27679  		return true
 27680  	}
 27681  	// match: (Rsh64x32 x y)
 27682  	// cond: !shiftIsBounded(v)
 27683  	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 27684  	for {
 27685  		x := v_0
 27686  		y := v_1
 27687  		if !(!shiftIsBounded(v)) {
 27688  			break
 27689  		}
 27690  		v.reset(OpARM64SRA)
 27691  		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27692  		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
 27693  		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27694  		v1.AuxInt = int64ToAuxInt(63)
 27695  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27696  		v2.AuxInt = int64ToAuxInt(64)
 27697  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27698  		v3.AddArg(y)
 27699  		v2.AddArg(v3)
 27700  		v0.AddArg3(y, v1, v2)
 27701  		v.AddArg2(x, v0)
 27702  		return true
 27703  	}
 27704  	return false
 27705  }
 27706  func rewriteValueARM64_OpRsh64x64(v *Value) bool {
 27707  	v_1 := v.Args[1]
 27708  	v_0 := v.Args[0]
 27709  	b := v.Block
 27710  	// match: (Rsh64x64 <t> x y)
 27711  	// cond: shiftIsBounded(v)
 27712  	// result: (SRA <t> x y)
 27713  	for {
 27714  		t := v.Type
 27715  		x := v_0
 27716  		y := v_1
 27717  		if !(shiftIsBounded(v)) {
 27718  			break
 27719  		}
 27720  		v.reset(OpARM64SRA)
 27721  		v.Type = t
 27722  		v.AddArg2(x, y)
 27723  		return true
 27724  	}
 27725  	// match: (Rsh64x64 x y)
 27726  	// cond: !shiftIsBounded(v)
 27727  	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 27728  	for {
 27729  		x := v_0
 27730  		y := v_1
 27731  		if !(!shiftIsBounded(v)) {
 27732  			break
 27733  		}
 27734  		v.reset(OpARM64SRA)
 27735  		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27736  		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
 27737  		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27738  		v1.AuxInt = int64ToAuxInt(63)
 27739  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27740  		v2.AuxInt = int64ToAuxInt(64)
 27741  		v2.AddArg(y)
 27742  		v0.AddArg3(y, v1, v2)
 27743  		v.AddArg2(x, v0)
 27744  		return true
 27745  	}
 27746  	return false
 27747  }
 27748  func rewriteValueARM64_OpRsh64x8(v *Value) bool {
 27749  	v_1 := v.Args[1]
 27750  	v_0 := v.Args[0]
 27751  	b := v.Block
 27752  	typ := &b.Func.Config.Types
 27753  	// match: (Rsh64x8 <t> x y)
 27754  	// cond: shiftIsBounded(v)
 27755  	// result: (SRA <t> x y)
 27756  	for {
 27757  		t := v.Type
 27758  		x := v_0
 27759  		y := v_1
 27760  		if !(shiftIsBounded(v)) {
 27761  			break
 27762  		}
 27763  		v.reset(OpARM64SRA)
 27764  		v.Type = t
 27765  		v.AddArg2(x, y)
 27766  		return true
 27767  	}
 27768  	// match: (Rsh64x8 x y)
 27769  	// cond: !shiftIsBounded(v)
 27770  	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
 27771  	for {
 27772  		x := v_0
 27773  		y := v_1
 27774  		if !(!shiftIsBounded(v)) {
 27775  			break
 27776  		}
 27777  		v.reset(OpARM64SRA)
 27778  		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 27779  		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
 27780  		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
 27781  		v1.AuxInt = int64ToAuxInt(63)
 27782  		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27783  		v2.AuxInt = int64ToAuxInt(64)
 27784  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27785  		v3.AddArg(y)
 27786  		v2.AddArg(v3)
 27787  		v0.AddArg3(y, v1, v2)
 27788  		v.AddArg2(x, v0)
 27789  		return true
 27790  	}
 27791  	return false
 27792  }
 27793  func rewriteValueARM64_OpRsh8Ux16(v *Value) bool {
 27794  	v_1 := v.Args[1]
 27795  	v_0 := v.Args[0]
 27796  	b := v.Block
 27797  	typ := &b.Func.Config.Types
 27798  	// match: (Rsh8Ux16 <t> x y)
 27799  	// cond: shiftIsBounded(v)
 27800  	// result: (SRL <t> (ZeroExt8to64 x) y)
 27801  	for {
 27802  		t := v.Type
 27803  		x := v_0
 27804  		y := v_1
 27805  		if !(shiftIsBounded(v)) {
 27806  			break
 27807  		}
 27808  		v.reset(OpARM64SRL)
 27809  		v.Type = t
 27810  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27811  		v0.AddArg(x)
 27812  		v.AddArg2(v0, y)
 27813  		return true
 27814  	}
 27815  	// match: (Rsh8Ux16 <t> x y)
 27816  	// cond: !shiftIsBounded(v)
 27817  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
 27818  	for {
 27819  		t := v.Type
 27820  		x := v_0
 27821  		y := v_1
 27822  		if !(!shiftIsBounded(v)) {
 27823  			break
 27824  		}
 27825  		v.reset(OpARM64CSEL)
 27826  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27827  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27828  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27829  		v1.AddArg(x)
 27830  		v0.AddArg2(v1, y)
 27831  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27832  		v2.AuxInt = int64ToAuxInt(0)
 27833  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27834  		v3.AuxInt = int64ToAuxInt(64)
 27835  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 27836  		v4.AddArg(y)
 27837  		v3.AddArg(v4)
 27838  		v.AddArg3(v0, v2, v3)
 27839  		return true
 27840  	}
 27841  	return false
 27842  }
 27843  func rewriteValueARM64_OpRsh8Ux32(v *Value) bool {
 27844  	v_1 := v.Args[1]
 27845  	v_0 := v.Args[0]
 27846  	b := v.Block
 27847  	typ := &b.Func.Config.Types
 27848  	// match: (Rsh8Ux32 <t> x y)
 27849  	// cond: shiftIsBounded(v)
 27850  	// result: (SRL <t> (ZeroExt8to64 x) y)
 27851  	for {
 27852  		t := v.Type
 27853  		x := v_0
 27854  		y := v_1
 27855  		if !(shiftIsBounded(v)) {
 27856  			break
 27857  		}
 27858  		v.reset(OpARM64SRL)
 27859  		v.Type = t
 27860  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27861  		v0.AddArg(x)
 27862  		v.AddArg2(v0, y)
 27863  		return true
 27864  	}
 27865  	// match: (Rsh8Ux32 <t> x y)
 27866  	// cond: !shiftIsBounded(v)
 27867  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
 27868  	for {
 27869  		t := v.Type
 27870  		x := v_0
 27871  		y := v_1
 27872  		if !(!shiftIsBounded(v)) {
 27873  			break
 27874  		}
 27875  		v.reset(OpARM64CSEL)
 27876  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27877  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27878  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27879  		v1.AddArg(x)
 27880  		v0.AddArg2(v1, y)
 27881  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27882  		v2.AuxInt = int64ToAuxInt(0)
 27883  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27884  		v3.AuxInt = int64ToAuxInt(64)
 27885  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 27886  		v4.AddArg(y)
 27887  		v3.AddArg(v4)
 27888  		v.AddArg3(v0, v2, v3)
 27889  		return true
 27890  	}
 27891  	return false
 27892  }
 27893  func rewriteValueARM64_OpRsh8Ux64(v *Value) bool {
 27894  	v_1 := v.Args[1]
 27895  	v_0 := v.Args[0]
 27896  	b := v.Block
 27897  	typ := &b.Func.Config.Types
 27898  	// match: (Rsh8Ux64 <t> x y)
 27899  	// cond: shiftIsBounded(v)
 27900  	// result: (SRL <t> (ZeroExt8to64 x) y)
 27901  	for {
 27902  		t := v.Type
 27903  		x := v_0
 27904  		y := v_1
 27905  		if !(shiftIsBounded(v)) {
 27906  			break
 27907  		}
 27908  		v.reset(OpARM64SRL)
 27909  		v.Type = t
 27910  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27911  		v0.AddArg(x)
 27912  		v.AddArg2(v0, y)
 27913  		return true
 27914  	}
 27915  	// match: (Rsh8Ux64 <t> x y)
 27916  	// cond: !shiftIsBounded(v)
 27917  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
 27918  	for {
 27919  		t := v.Type
 27920  		x := v_0
 27921  		y := v_1
 27922  		if !(!shiftIsBounded(v)) {
 27923  			break
 27924  		}
 27925  		v.reset(OpARM64CSEL)
 27926  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27927  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27928  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27929  		v1.AddArg(x)
 27930  		v0.AddArg2(v1, y)
 27931  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27932  		v2.AuxInt = int64ToAuxInt(0)
 27933  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27934  		v3.AuxInt = int64ToAuxInt(64)
 27935  		v3.AddArg(y)
 27936  		v.AddArg3(v0, v2, v3)
 27937  		return true
 27938  	}
 27939  	return false
 27940  }
 27941  func rewriteValueARM64_OpRsh8Ux8(v *Value) bool {
 27942  	v_1 := v.Args[1]
 27943  	v_0 := v.Args[0]
 27944  	b := v.Block
 27945  	typ := &b.Func.Config.Types
 27946  	// match: (Rsh8Ux8 <t> x y)
 27947  	// cond: shiftIsBounded(v)
 27948  	// result: (SRL <t> (ZeroExt8to64 x) y)
 27949  	for {
 27950  		t := v.Type
 27951  		x := v_0
 27952  		y := v_1
 27953  		if !(shiftIsBounded(v)) {
 27954  			break
 27955  		}
 27956  		v.reset(OpARM64SRL)
 27957  		v.Type = t
 27958  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27959  		v0.AddArg(x)
 27960  		v.AddArg2(v0, y)
 27961  		return true
 27962  	}
 27963  	// match: (Rsh8Ux8 <t> x y)
 27964  	// cond: !shiftIsBounded(v)
 27965  	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
 27966  	for {
 27967  		t := v.Type
 27968  		x := v_0
 27969  		y := v_1
 27970  		if !(!shiftIsBounded(v)) {
 27971  			break
 27972  		}
 27973  		v.reset(OpARM64CSEL)
 27974  		v.AuxInt = opToAuxInt(OpARM64LessThanU)
 27975  		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
 27976  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27977  		v1.AddArg(x)
 27978  		v0.AddArg2(v1, y)
 27979  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 27980  		v2.AuxInt = int64ToAuxInt(0)
 27981  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 27982  		v3.AuxInt = int64ToAuxInt(64)
 27983  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 27984  		v4.AddArg(y)
 27985  		v3.AddArg(v4)
 27986  		v.AddArg3(v0, v2, v3)
 27987  		return true
 27988  	}
 27989  	return false
 27990  }
 27991  func rewriteValueARM64_OpRsh8x16(v *Value) bool {
 27992  	v_1 := v.Args[1]
 27993  	v_0 := v.Args[0]
 27994  	b := v.Block
 27995  	typ := &b.Func.Config.Types
 27996  	// match: (Rsh8x16 <t> x y)
 27997  	// cond: shiftIsBounded(v)
 27998  	// result: (SRA <t> (SignExt8to64 x) y)
 27999  	for {
 28000  		t := v.Type
 28001  		x := v_0
 28002  		y := v_1
 28003  		if !(shiftIsBounded(v)) {
 28004  			break
 28005  		}
 28006  		v.reset(OpARM64SRA)
 28007  		v.Type = t
 28008  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28009  		v0.AddArg(x)
 28010  		v.AddArg2(v0, y)
 28011  		return true
 28012  	}
 28013  	// match: (Rsh8x16 x y)
 28014  	// cond: !shiftIsBounded(v)
 28015  	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
 28016  	for {
 28017  		x := v_0
 28018  		y := v_1
 28019  		if !(!shiftIsBounded(v)) {
 28020  			break
 28021  		}
 28022  		v.reset(OpARM64SRA)
 28023  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28024  		v0.AddArg(x)
 28025  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 28026  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 28027  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 28028  		v2.AuxInt = int64ToAuxInt(63)
 28029  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 28030  		v3.AuxInt = int64ToAuxInt(64)
 28031  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 28032  		v4.AddArg(y)
 28033  		v3.AddArg(v4)
 28034  		v1.AddArg3(y, v2, v3)
 28035  		v.AddArg2(v0, v1)
 28036  		return true
 28037  	}
 28038  	return false
 28039  }
 28040  func rewriteValueARM64_OpRsh8x32(v *Value) bool {
 28041  	v_1 := v.Args[1]
 28042  	v_0 := v.Args[0]
 28043  	b := v.Block
 28044  	typ := &b.Func.Config.Types
 28045  	// match: (Rsh8x32 <t> x y)
 28046  	// cond: shiftIsBounded(v)
 28047  	// result: (SRA <t> (SignExt8to64 x) y)
 28048  	for {
 28049  		t := v.Type
 28050  		x := v_0
 28051  		y := v_1
 28052  		if !(shiftIsBounded(v)) {
 28053  			break
 28054  		}
 28055  		v.reset(OpARM64SRA)
 28056  		v.Type = t
 28057  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28058  		v0.AddArg(x)
 28059  		v.AddArg2(v0, y)
 28060  		return true
 28061  	}
 28062  	// match: (Rsh8x32 x y)
 28063  	// cond: !shiftIsBounded(v)
 28064  	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
 28065  	for {
 28066  		x := v_0
 28067  		y := v_1
 28068  		if !(!shiftIsBounded(v)) {
 28069  			break
 28070  		}
 28071  		v.reset(OpARM64SRA)
 28072  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28073  		v0.AddArg(x)
 28074  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 28075  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 28076  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 28077  		v2.AuxInt = int64ToAuxInt(63)
 28078  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 28079  		v3.AuxInt = int64ToAuxInt(64)
 28080  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 28081  		v4.AddArg(y)
 28082  		v3.AddArg(v4)
 28083  		v1.AddArg3(y, v2, v3)
 28084  		v.AddArg2(v0, v1)
 28085  		return true
 28086  	}
 28087  	return false
 28088  }
 28089  func rewriteValueARM64_OpRsh8x64(v *Value) bool {
 28090  	v_1 := v.Args[1]
 28091  	v_0 := v.Args[0]
 28092  	b := v.Block
 28093  	typ := &b.Func.Config.Types
 28094  	// match: (Rsh8x64 <t> x y)
 28095  	// cond: shiftIsBounded(v)
 28096  	// result: (SRA <t> (SignExt8to64 x) y)
 28097  	for {
 28098  		t := v.Type
 28099  		x := v_0
 28100  		y := v_1
 28101  		if !(shiftIsBounded(v)) {
 28102  			break
 28103  		}
 28104  		v.reset(OpARM64SRA)
 28105  		v.Type = t
 28106  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28107  		v0.AddArg(x)
 28108  		v.AddArg2(v0, y)
 28109  		return true
 28110  	}
 28111  	// match: (Rsh8x64 x y)
 28112  	// cond: !shiftIsBounded(v)
 28113  	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
 28114  	for {
 28115  		x := v_0
 28116  		y := v_1
 28117  		if !(!shiftIsBounded(v)) {
 28118  			break
 28119  		}
 28120  		v.reset(OpARM64SRA)
 28121  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28122  		v0.AddArg(x)
 28123  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 28124  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 28125  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 28126  		v2.AuxInt = int64ToAuxInt(63)
 28127  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 28128  		v3.AuxInt = int64ToAuxInt(64)
 28129  		v3.AddArg(y)
 28130  		v1.AddArg3(y, v2, v3)
 28131  		v.AddArg2(v0, v1)
 28132  		return true
 28133  	}
 28134  	return false
 28135  }
 28136  func rewriteValueARM64_OpRsh8x8(v *Value) bool {
 28137  	v_1 := v.Args[1]
 28138  	v_0 := v.Args[0]
 28139  	b := v.Block
 28140  	typ := &b.Func.Config.Types
 28141  	// match: (Rsh8x8 <t> x y)
 28142  	// cond: shiftIsBounded(v)
 28143  	// result: (SRA <t> (SignExt8to64 x) y)
 28144  	for {
 28145  		t := v.Type
 28146  		x := v_0
 28147  		y := v_1
 28148  		if !(shiftIsBounded(v)) {
 28149  			break
 28150  		}
 28151  		v.reset(OpARM64SRA)
 28152  		v.Type = t
 28153  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28154  		v0.AddArg(x)
 28155  		v.AddArg2(v0, y)
 28156  		return true
 28157  	}
 28158  	// match: (Rsh8x8 x y)
 28159  	// cond: !shiftIsBounded(v)
 28160  	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
 28161  	for {
 28162  		x := v_0
 28163  		y := v_1
 28164  		if !(!shiftIsBounded(v)) {
 28165  			break
 28166  		}
 28167  		v.reset(OpARM64SRA)
 28168  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 28169  		v0.AddArg(x)
 28170  		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
 28171  		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
 28172  		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
 28173  		v2.AuxInt = int64ToAuxInt(63)
 28174  		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 28175  		v3.AuxInt = int64ToAuxInt(64)
 28176  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 28177  		v4.AddArg(y)
 28178  		v3.AddArg(v4)
 28179  		v1.AddArg3(y, v2, v3)
 28180  		v.AddArg2(v0, v1)
 28181  		return true
 28182  	}
 28183  	return false
 28184  }
 28185  func rewriteValueARM64_OpSelect0(v *Value) bool {
 28186  	v_0 := v.Args[0]
 28187  	b := v.Block
 28188  	typ := &b.Func.Config.Types
 28189  	// match: (Select0 (Mul64uhilo x y))
 28190  	// result: (UMULH x y)
 28191  	for {
 28192  		if v_0.Op != OpMul64uhilo {
 28193  			break
 28194  		}
 28195  		y := v_0.Args[1]
 28196  		x := v_0.Args[0]
 28197  		v.reset(OpARM64UMULH)
 28198  		v.AddArg2(x, y)
 28199  		return true
 28200  	}
 28201  	// match: (Select0 (Add64carry x y c))
 28202  	// result: (Select0 <typ.UInt64> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c))))
 28203  	for {
 28204  		if v_0.Op != OpAdd64carry {
 28205  			break
 28206  		}
 28207  		c := v_0.Args[2]
 28208  		x := v_0.Args[0]
 28209  		y := v_0.Args[1]
 28210  		v.reset(OpSelect0)
 28211  		v.Type = typ.UInt64
 28212  		v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28213  		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28214  		v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28215  		v2.AuxInt = int64ToAuxInt(-1)
 28216  		v2.AddArg(c)
 28217  		v1.AddArg(v2)
 28218  		v0.AddArg3(x, y, v1)
 28219  		v.AddArg(v0)
 28220  		return true
 28221  	}
 28222  	// match: (Select0 (Sub64borrow x y bo))
 28223  	// result: (Select0 <typ.UInt64> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo))))
 28224  	for {
 28225  		if v_0.Op != OpSub64borrow {
 28226  			break
 28227  		}
 28228  		bo := v_0.Args[2]
 28229  		x := v_0.Args[0]
 28230  		y := v_0.Args[1]
 28231  		v.reset(OpSelect0)
 28232  		v.Type = typ.UInt64
 28233  		v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28234  		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28235  		v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28236  		v2.AddArg(bo)
 28237  		v1.AddArg(v2)
 28238  		v0.AddArg3(x, y, v1)
 28239  		v.AddArg(v0)
 28240  		return true
 28241  	}
 28242  	// match: (Select0 (Mul64uover x y))
 28243  	// result: (MUL x y)
 28244  	for {
 28245  		if v_0.Op != OpMul64uover {
 28246  			break
 28247  		}
 28248  		y := v_0.Args[1]
 28249  		x := v_0.Args[0]
 28250  		v.reset(OpARM64MUL)
 28251  		v.AddArg2(x, y)
 28252  		return true
 28253  	}
 28254  	return false
 28255  }
 28256  func rewriteValueARM64_OpSelect1(v *Value) bool {
 28257  	v_0 := v.Args[0]
 28258  	b := v.Block
 28259  	typ := &b.Func.Config.Types
 28260  	// match: (Select1 (Mul64uhilo x y))
 28261  	// result: (MUL x y)
 28262  	for {
 28263  		if v_0.Op != OpMul64uhilo {
 28264  			break
 28265  		}
 28266  		y := v_0.Args[1]
 28267  		x := v_0.Args[0]
 28268  		v.reset(OpARM64MUL)
 28269  		v.AddArg2(x, y)
 28270  		return true
 28271  	}
 28272  	// match: (Select1 (Add64carry x y c))
 28273  	// result: (ADCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c)))))
 28274  	for {
 28275  		if v_0.Op != OpAdd64carry {
 28276  			break
 28277  		}
 28278  		c := v_0.Args[2]
 28279  		x := v_0.Args[0]
 28280  		y := v_0.Args[1]
 28281  		v.reset(OpARM64ADCzerocarry)
 28282  		v.Type = typ.UInt64
 28283  		v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28284  		v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28285  		v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28286  		v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28287  		v3.AuxInt = int64ToAuxInt(-1)
 28288  		v3.AddArg(c)
 28289  		v2.AddArg(v3)
 28290  		v1.AddArg3(x, y, v2)
 28291  		v0.AddArg(v1)
 28292  		v.AddArg(v0)
 28293  		return true
 28294  	}
 28295  	// match: (Select1 (Sub64borrow x y bo))
 28296  	// result: (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo))))))
 28297  	for {
 28298  		if v_0.Op != OpSub64borrow {
 28299  			break
 28300  		}
 28301  		bo := v_0.Args[2]
 28302  		x := v_0.Args[0]
 28303  		y := v_0.Args[1]
 28304  		v.reset(OpARM64NEG)
 28305  		v.Type = typ.UInt64
 28306  		v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64)
 28307  		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28308  		v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28309  		v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
 28310  		v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
 28311  		v4.AddArg(bo)
 28312  		v3.AddArg(v4)
 28313  		v2.AddArg3(x, y, v3)
 28314  		v1.AddArg(v2)
 28315  		v0.AddArg(v1)
 28316  		v.AddArg(v0)
 28317  		return true
 28318  	}
 28319  	// match: (Select1 (Mul64uover x y))
 28320  	// result: (NotEqual (CMPconst (UMULH <typ.UInt64> x y) [0]))
 28321  	for {
 28322  		if v_0.Op != OpMul64uover {
 28323  			break
 28324  		}
 28325  		y := v_0.Args[1]
 28326  		x := v_0.Args[0]
 28327  		v.reset(OpARM64NotEqual)
 28328  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
 28329  		v0.AuxInt = int64ToAuxInt(0)
 28330  		v1 := b.NewValue0(v.Pos, OpARM64UMULH, typ.UInt64)
 28331  		v1.AddArg2(x, y)
 28332  		v0.AddArg(v1)
 28333  		v.AddArg(v0)
 28334  		return true
 28335  	}
 28336  	return false
 28337  }
 28338  func rewriteValueARM64_OpSelectN(v *Value) bool {
 28339  	v_0 := v.Args[0]
 28340  	b := v.Block
 28341  	config := b.Func.Config
 28342  	// match: (SelectN [0] call:(CALLstatic {sym} s1:(MOVDstore _ (MOVDconst [sz]) s2:(MOVDstore _ src s3:(MOVDstore {t} _ dst mem)))))
 28343  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)
 28344  	// result: (Move [sz] dst src mem)
 28345  	for {
 28346  		if auxIntToInt64(v.AuxInt) != 0 {
 28347  			break
 28348  		}
 28349  		call := v_0
 28350  		if call.Op != OpARM64CALLstatic || len(call.Args) != 1 {
 28351  			break
 28352  		}
 28353  		sym := auxToCall(call.Aux)
 28354  		s1 := call.Args[0]
 28355  		if s1.Op != OpARM64MOVDstore {
 28356  			break
 28357  		}
 28358  		_ = s1.Args[2]
 28359  		s1_1 := s1.Args[1]
 28360  		if s1_1.Op != OpARM64MOVDconst {
 28361  			break
 28362  		}
 28363  		sz := auxIntToInt64(s1_1.AuxInt)
 28364  		s2 := s1.Args[2]
 28365  		if s2.Op != OpARM64MOVDstore {
 28366  			break
 28367  		}
 28368  		_ = s2.Args[2]
 28369  		src := s2.Args[1]
 28370  		s3 := s2.Args[2]
 28371  		if s3.Op != OpARM64MOVDstore {
 28372  			break
 28373  		}
 28374  		mem := s3.Args[2]
 28375  		dst := s3.Args[1]
 28376  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)) {
 28377  			break
 28378  		}
 28379  		v.reset(OpMove)
 28380  		v.AuxInt = int64ToAuxInt(sz)
 28381  		v.AddArg3(dst, src, mem)
 28382  		return true
 28383  	}
 28384  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
 28385  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 28386  	// result: (Move [sz] dst src mem)
 28387  	for {
 28388  		if auxIntToInt64(v.AuxInt) != 0 {
 28389  			break
 28390  		}
 28391  		call := v_0
 28392  		if call.Op != OpARM64CALLstatic || len(call.Args) != 4 {
 28393  			break
 28394  		}
 28395  		sym := auxToCall(call.Aux)
 28396  		mem := call.Args[3]
 28397  		dst := call.Args[0]
 28398  		src := call.Args[1]
 28399  		call_2 := call.Args[2]
 28400  		if call_2.Op != OpARM64MOVDconst {
 28401  			break
 28402  		}
 28403  		sz := auxIntToInt64(call_2.AuxInt)
 28404  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 28405  			break
 28406  		}
 28407  		v.reset(OpMove)
 28408  		v.AuxInt = int64ToAuxInt(sz)
 28409  		v.AddArg3(dst, src, mem)
 28410  		return true
 28411  	}
 28412  	return false
 28413  }
 28414  func rewriteValueARM64_OpSlicemask(v *Value) bool {
 28415  	v_0 := v.Args[0]
 28416  	b := v.Block
 28417  	// match: (Slicemask <t> x)
 28418  	// result: (SRAconst (NEG <t> x) [63])
 28419  	for {
 28420  		t := v.Type
 28421  		x := v_0
 28422  		v.reset(OpARM64SRAconst)
 28423  		v.AuxInt = int64ToAuxInt(63)
 28424  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
 28425  		v0.AddArg(x)
 28426  		v.AddArg(v0)
 28427  		return true
 28428  	}
 28429  }
 28430  func rewriteValueARM64_OpStore(v *Value) bool {
 28431  	v_2 := v.Args[2]
 28432  	v_1 := v.Args[1]
 28433  	v_0 := v.Args[0]
 28434  	// match: (Store {t} ptr val mem)
 28435  	// cond: t.Size() == 1
 28436  	// result: (MOVBstore ptr val mem)
 28437  	for {
 28438  		t := auxToType(v.Aux)
 28439  		ptr := v_0
 28440  		val := v_1
 28441  		mem := v_2
 28442  		if !(t.Size() == 1) {
 28443  			break
 28444  		}
 28445  		v.reset(OpARM64MOVBstore)
 28446  		v.AddArg3(ptr, val, mem)
 28447  		return true
 28448  	}
 28449  	// match: (Store {t} ptr val mem)
 28450  	// cond: t.Size() == 2
 28451  	// result: (MOVHstore ptr val mem)
 28452  	for {
 28453  		t := auxToType(v.Aux)
 28454  		ptr := v_0
 28455  		val := v_1
 28456  		mem := v_2
 28457  		if !(t.Size() == 2) {
 28458  			break
 28459  		}
 28460  		v.reset(OpARM64MOVHstore)
 28461  		v.AddArg3(ptr, val, mem)
 28462  		return true
 28463  	}
 28464  	// match: (Store {t} ptr val mem)
 28465  	// cond: t.Size() == 4 && !is32BitFloat(val.Type)
 28466  	// result: (MOVWstore ptr val mem)
 28467  	for {
 28468  		t := auxToType(v.Aux)
 28469  		ptr := v_0
 28470  		val := v_1
 28471  		mem := v_2
 28472  		if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
 28473  			break
 28474  		}
 28475  		v.reset(OpARM64MOVWstore)
 28476  		v.AddArg3(ptr, val, mem)
 28477  		return true
 28478  	}
 28479  	// match: (Store {t} ptr val mem)
 28480  	// cond: t.Size() == 8 && !is64BitFloat(val.Type)
 28481  	// result: (MOVDstore ptr val mem)
 28482  	for {
 28483  		t := auxToType(v.Aux)
 28484  		ptr := v_0
 28485  		val := v_1
 28486  		mem := v_2
 28487  		if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
 28488  			break
 28489  		}
 28490  		v.reset(OpARM64MOVDstore)
 28491  		v.AddArg3(ptr, val, mem)
 28492  		return true
 28493  	}
 28494  	// match: (Store {t} ptr val mem)
 28495  	// cond: t.Size() == 4 && is32BitFloat(val.Type)
 28496  	// result: (FMOVSstore ptr val mem)
 28497  	for {
 28498  		t := auxToType(v.Aux)
 28499  		ptr := v_0
 28500  		val := v_1
 28501  		mem := v_2
 28502  		if !(t.Size() == 4 && is32BitFloat(val.Type)) {
 28503  			break
 28504  		}
 28505  		v.reset(OpARM64FMOVSstore)
 28506  		v.AddArg3(ptr, val, mem)
 28507  		return true
 28508  	}
 28509  	// match: (Store {t} ptr val mem)
 28510  	// cond: t.Size() == 8 && is64BitFloat(val.Type)
 28511  	// result: (FMOVDstore ptr val mem)
 28512  	for {
 28513  		t := auxToType(v.Aux)
 28514  		ptr := v_0
 28515  		val := v_1
 28516  		mem := v_2
 28517  		if !(t.Size() == 8 && is64BitFloat(val.Type)) {
 28518  			break
 28519  		}
 28520  		v.reset(OpARM64FMOVDstore)
 28521  		v.AddArg3(ptr, val, mem)
 28522  		return true
 28523  	}
 28524  	return false
 28525  }
 28526  func rewriteValueARM64_OpZero(v *Value) bool {
 28527  	v_1 := v.Args[1]
 28528  	v_0 := v.Args[0]
 28529  	b := v.Block
 28530  	config := b.Func.Config
 28531  	typ := &b.Func.Config.Types
 28532  	// match: (Zero [0] _ mem)
 28533  	// result: mem
 28534  	for {
 28535  		if auxIntToInt64(v.AuxInt) != 0 {
 28536  			break
 28537  		}
 28538  		mem := v_1
 28539  		v.copyOf(mem)
 28540  		return true
 28541  	}
 28542  	// match: (Zero [1] ptr mem)
 28543  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
 28544  	for {
 28545  		if auxIntToInt64(v.AuxInt) != 1 {
 28546  			break
 28547  		}
 28548  		ptr := v_0
 28549  		mem := v_1
 28550  		v.reset(OpARM64MOVBstore)
 28551  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28552  		v0.AuxInt = int64ToAuxInt(0)
 28553  		v.AddArg3(ptr, v0, mem)
 28554  		return true
 28555  	}
 28556  	// match: (Zero [2] ptr mem)
 28557  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
 28558  	for {
 28559  		if auxIntToInt64(v.AuxInt) != 2 {
 28560  			break
 28561  		}
 28562  		ptr := v_0
 28563  		mem := v_1
 28564  		v.reset(OpARM64MOVHstore)
 28565  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28566  		v0.AuxInt = int64ToAuxInt(0)
 28567  		v.AddArg3(ptr, v0, mem)
 28568  		return true
 28569  	}
 28570  	// match: (Zero [4] ptr mem)
 28571  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
 28572  	for {
 28573  		if auxIntToInt64(v.AuxInt) != 4 {
 28574  			break
 28575  		}
 28576  		ptr := v_0
 28577  		mem := v_1
 28578  		v.reset(OpARM64MOVWstore)
 28579  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28580  		v0.AuxInt = int64ToAuxInt(0)
 28581  		v.AddArg3(ptr, v0, mem)
 28582  		return true
 28583  	}
 28584  	// match: (Zero [3] ptr mem)
 28585  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
 28586  	for {
 28587  		if auxIntToInt64(v.AuxInt) != 3 {
 28588  			break
 28589  		}
 28590  		ptr := v_0
 28591  		mem := v_1
 28592  		v.reset(OpARM64MOVBstore)
 28593  		v.AuxInt = int32ToAuxInt(2)
 28594  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28595  		v0.AuxInt = int64ToAuxInt(0)
 28596  		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
 28597  		v1.AddArg3(ptr, v0, mem)
 28598  		v.AddArg3(ptr, v0, v1)
 28599  		return true
 28600  	}
 28601  	// match: (Zero [5] ptr mem)
 28602  	// result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
 28603  	for {
 28604  		if auxIntToInt64(v.AuxInt) != 5 {
 28605  			break
 28606  		}
 28607  		ptr := v_0
 28608  		mem := v_1
 28609  		v.reset(OpARM64MOVBstore)
 28610  		v.AuxInt = int32ToAuxInt(4)
 28611  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28612  		v0.AuxInt = int64ToAuxInt(0)
 28613  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 28614  		v1.AddArg3(ptr, v0, mem)
 28615  		v.AddArg3(ptr, v0, v1)
 28616  		return true
 28617  	}
 28618  	// match: (Zero [6] ptr mem)
 28619  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
 28620  	for {
 28621  		if auxIntToInt64(v.AuxInt) != 6 {
 28622  			break
 28623  		}
 28624  		ptr := v_0
 28625  		mem := v_1
 28626  		v.reset(OpARM64MOVHstore)
 28627  		v.AuxInt = int32ToAuxInt(4)
 28628  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28629  		v0.AuxInt = int64ToAuxInt(0)
 28630  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 28631  		v1.AddArg3(ptr, v0, mem)
 28632  		v.AddArg3(ptr, v0, v1)
 28633  		return true
 28634  	}
 28635  	// match: (Zero [7] ptr mem)
 28636  	// result: (MOVWstore [3] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
 28637  	for {
 28638  		if auxIntToInt64(v.AuxInt) != 7 {
 28639  			break
 28640  		}
 28641  		ptr := v_0
 28642  		mem := v_1
 28643  		v.reset(OpARM64MOVWstore)
 28644  		v.AuxInt = int32ToAuxInt(3)
 28645  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28646  		v0.AuxInt = int64ToAuxInt(0)
 28647  		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
 28648  		v1.AddArg3(ptr, v0, mem)
 28649  		v.AddArg3(ptr, v0, v1)
 28650  		return true
 28651  	}
 28652  	// match: (Zero [8] ptr mem)
 28653  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
 28654  	for {
 28655  		if auxIntToInt64(v.AuxInt) != 8 {
 28656  			break
 28657  		}
 28658  		ptr := v_0
 28659  		mem := v_1
 28660  		v.reset(OpARM64MOVDstore)
 28661  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28662  		v0.AuxInt = int64ToAuxInt(0)
 28663  		v.AddArg3(ptr, v0, mem)
 28664  		return true
 28665  	}
 28666  	// match: (Zero [9] ptr mem)
 28667  	// result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28668  	for {
 28669  		if auxIntToInt64(v.AuxInt) != 9 {
 28670  			break
 28671  		}
 28672  		ptr := v_0
 28673  		mem := v_1
 28674  		v.reset(OpARM64MOVBstore)
 28675  		v.AuxInt = int32ToAuxInt(8)
 28676  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28677  		v0.AuxInt = int64ToAuxInt(0)
 28678  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28679  		v1.AddArg3(ptr, v0, mem)
 28680  		v.AddArg3(ptr, v0, v1)
 28681  		return true
 28682  	}
 28683  	// match: (Zero [10] ptr mem)
 28684  	// result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28685  	for {
 28686  		if auxIntToInt64(v.AuxInt) != 10 {
 28687  			break
 28688  		}
 28689  		ptr := v_0
 28690  		mem := v_1
 28691  		v.reset(OpARM64MOVHstore)
 28692  		v.AuxInt = int32ToAuxInt(8)
 28693  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28694  		v0.AuxInt = int64ToAuxInt(0)
 28695  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28696  		v1.AddArg3(ptr, v0, mem)
 28697  		v.AddArg3(ptr, v0, v1)
 28698  		return true
 28699  	}
 28700  	// match: (Zero [11] ptr mem)
 28701  	// result: (MOVDstore [3] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28702  	for {
 28703  		if auxIntToInt64(v.AuxInt) != 11 {
 28704  			break
 28705  		}
 28706  		ptr := v_0
 28707  		mem := v_1
 28708  		v.reset(OpARM64MOVDstore)
 28709  		v.AuxInt = int32ToAuxInt(3)
 28710  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28711  		v0.AuxInt = int64ToAuxInt(0)
 28712  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28713  		v1.AddArg3(ptr, v0, mem)
 28714  		v.AddArg3(ptr, v0, v1)
 28715  		return true
 28716  	}
 28717  	// match: (Zero [12] ptr mem)
 28718  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28719  	for {
 28720  		if auxIntToInt64(v.AuxInt) != 12 {
 28721  			break
 28722  		}
 28723  		ptr := v_0
 28724  		mem := v_1
 28725  		v.reset(OpARM64MOVWstore)
 28726  		v.AuxInt = int32ToAuxInt(8)
 28727  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28728  		v0.AuxInt = int64ToAuxInt(0)
 28729  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28730  		v1.AddArg3(ptr, v0, mem)
 28731  		v.AddArg3(ptr, v0, v1)
 28732  		return true
 28733  	}
 28734  	// match: (Zero [13] ptr mem)
 28735  	// result: (MOVDstore [5] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28736  	for {
 28737  		if auxIntToInt64(v.AuxInt) != 13 {
 28738  			break
 28739  		}
 28740  		ptr := v_0
 28741  		mem := v_1
 28742  		v.reset(OpARM64MOVDstore)
 28743  		v.AuxInt = int32ToAuxInt(5)
 28744  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28745  		v0.AuxInt = int64ToAuxInt(0)
 28746  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28747  		v1.AddArg3(ptr, v0, mem)
 28748  		v.AddArg3(ptr, v0, v1)
 28749  		return true
 28750  	}
 28751  	// match: (Zero [14] ptr mem)
 28752  	// result: (MOVDstore [6] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28753  	for {
 28754  		if auxIntToInt64(v.AuxInt) != 14 {
 28755  			break
 28756  		}
 28757  		ptr := v_0
 28758  		mem := v_1
 28759  		v.reset(OpARM64MOVDstore)
 28760  		v.AuxInt = int32ToAuxInt(6)
 28761  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28762  		v0.AuxInt = int64ToAuxInt(0)
 28763  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28764  		v1.AddArg3(ptr, v0, mem)
 28765  		v.AddArg3(ptr, v0, v1)
 28766  		return true
 28767  	}
 28768  	// match: (Zero [15] ptr mem)
 28769  	// result: (MOVDstore [7] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
 28770  	for {
 28771  		if auxIntToInt64(v.AuxInt) != 15 {
 28772  			break
 28773  		}
 28774  		ptr := v_0
 28775  		mem := v_1
 28776  		v.reset(OpARM64MOVDstore)
 28777  		v.AuxInt = int32ToAuxInt(7)
 28778  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28779  		v0.AuxInt = int64ToAuxInt(0)
 28780  		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
 28781  		v1.AddArg3(ptr, v0, mem)
 28782  		v.AddArg3(ptr, v0, v1)
 28783  		return true
 28784  	}
 28785  	// match: (Zero [16] ptr mem)
 28786  	// result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)
 28787  	for {
 28788  		if auxIntToInt64(v.AuxInt) != 16 {
 28789  			break
 28790  		}
 28791  		ptr := v_0
 28792  		mem := v_1
 28793  		v.reset(OpARM64STP)
 28794  		v.AuxInt = int32ToAuxInt(0)
 28795  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28796  		v0.AuxInt = int64ToAuxInt(0)
 28797  		v.AddArg4(ptr, v0, v0, mem)
 28798  		return true
 28799  	}
 28800  	// match: (Zero [32] ptr mem)
 28801  	// result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))
 28802  	for {
 28803  		if auxIntToInt64(v.AuxInt) != 32 {
 28804  			break
 28805  		}
 28806  		ptr := v_0
 28807  		mem := v_1
 28808  		v.reset(OpARM64STP)
 28809  		v.AuxInt = int32ToAuxInt(16)
 28810  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28811  		v0.AuxInt = int64ToAuxInt(0)
 28812  		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28813  		v1.AuxInt = int32ToAuxInt(0)
 28814  		v1.AddArg4(ptr, v0, v0, mem)
 28815  		v.AddArg4(ptr, v0, v0, v1)
 28816  		return true
 28817  	}
 28818  	// match: (Zero [48] ptr mem)
 28819  	// result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))
 28820  	for {
 28821  		if auxIntToInt64(v.AuxInt) != 48 {
 28822  			break
 28823  		}
 28824  		ptr := v_0
 28825  		mem := v_1
 28826  		v.reset(OpARM64STP)
 28827  		v.AuxInt = int32ToAuxInt(32)
 28828  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28829  		v0.AuxInt = int64ToAuxInt(0)
 28830  		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28831  		v1.AuxInt = int32ToAuxInt(16)
 28832  		v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28833  		v2.AuxInt = int32ToAuxInt(0)
 28834  		v2.AddArg4(ptr, v0, v0, mem)
 28835  		v1.AddArg4(ptr, v0, v0, v2)
 28836  		v.AddArg4(ptr, v0, v0, v1)
 28837  		return true
 28838  	}
 28839  	// match: (Zero [64] ptr mem)
 28840  	// result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))))
 28841  	for {
 28842  		if auxIntToInt64(v.AuxInt) != 64 {
 28843  			break
 28844  		}
 28845  		ptr := v_0
 28846  		mem := v_1
 28847  		v.reset(OpARM64STP)
 28848  		v.AuxInt = int32ToAuxInt(48)
 28849  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
 28850  		v0.AuxInt = int64ToAuxInt(0)
 28851  		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28852  		v1.AuxInt = int32ToAuxInt(32)
 28853  		v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28854  		v2.AuxInt = int32ToAuxInt(16)
 28855  		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
 28856  		v3.AuxInt = int32ToAuxInt(0)
 28857  		v3.AddArg4(ptr, v0, v0, mem)
 28858  		v2.AddArg4(ptr, v0, v0, v3)
 28859  		v1.AddArg4(ptr, v0, v0, v2)
 28860  		v.AddArg4(ptr, v0, v0, v1)
 28861  		return true
 28862  	}
 28863  	// match: (Zero [s] ptr mem)
 28864  	// cond: s%16 != 0 && s%16 <= 8 && s > 16
 28865  	// result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem))
 28866  	for {
 28867  		s := auxIntToInt64(v.AuxInt)
 28868  		ptr := v_0
 28869  		mem := v_1
 28870  		if !(s%16 != 0 && s%16 <= 8 && s > 16) {
 28871  			break
 28872  		}
 28873  		v.reset(OpZero)
 28874  		v.AuxInt = int64ToAuxInt(8)
 28875  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 28876  		v0.AuxInt = int64ToAuxInt(s - 8)
 28877  		v0.AddArg(ptr)
 28878  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 28879  		v1.AuxInt = int64ToAuxInt(s - s%16)
 28880  		v1.AddArg2(ptr, mem)
 28881  		v.AddArg2(v0, v1)
 28882  		return true
 28883  	}
 28884  	// match: (Zero [s] ptr mem)
 28885  	// cond: s%16 != 0 && s%16 > 8 && s > 16
 28886  	// result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem))
 28887  	for {
 28888  		s := auxIntToInt64(v.AuxInt)
 28889  		ptr := v_0
 28890  		mem := v_1
 28891  		if !(s%16 != 0 && s%16 > 8 && s > 16) {
 28892  			break
 28893  		}
 28894  		v.reset(OpZero)
 28895  		v.AuxInt = int64ToAuxInt(16)
 28896  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 28897  		v0.AuxInt = int64ToAuxInt(s - 16)
 28898  		v0.AddArg(ptr)
 28899  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 28900  		v1.AuxInt = int64ToAuxInt(s - s%16)
 28901  		v1.AddArg2(ptr, mem)
 28902  		v.AddArg2(v0, v1)
 28903  		return true
 28904  	}
 28905  	// match: (Zero [s] ptr mem)
 28906  	// cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice
 28907  	// result: (DUFFZERO [4 * (64 - s/16)] ptr mem)
 28908  	for {
 28909  		s := auxIntToInt64(v.AuxInt)
 28910  		ptr := v_0
 28911  		mem := v_1
 28912  		if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) {
 28913  			break
 28914  		}
 28915  		v.reset(OpARM64DUFFZERO)
 28916  		v.AuxInt = int64ToAuxInt(4 * (64 - s/16))
 28917  		v.AddArg2(ptr, mem)
 28918  		return true
 28919  	}
 28920  	// match: (Zero [s] ptr mem)
 28921  	// cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice)
 28922  	// result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem)
 28923  	for {
 28924  		s := auxIntToInt64(v.AuxInt)
 28925  		ptr := v_0
 28926  		mem := v_1
 28927  		if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) {
 28928  			break
 28929  		}
 28930  		v.reset(OpARM64LoweredZero)
 28931  		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
 28932  		v0.AuxInt = int64ToAuxInt(s - 16)
 28933  		v0.AddArg(ptr)
 28934  		v.AddArg3(ptr, v0, mem)
 28935  		return true
 28936  	}
 28937  	return false
 28938  }
 28939  func rewriteBlockARM64(b *Block) bool {
 28940  	typ := &b.Func.Config.Types
 28941  	switch b.Kind {
 28942  	case BlockARM64EQ:
 28943  		// match: (EQ (CMPconst [0] z:(AND x y)) yes no)
 28944  		// cond: z.Uses == 1
 28945  		// result: (EQ (TST x y) yes no)
 28946  		for b.Controls[0].Op == OpARM64CMPconst {
 28947  			v_0 := b.Controls[0]
 28948  			if auxIntToInt64(v_0.AuxInt) != 0 {
 28949  				break
 28950  			}
 28951  			z := v_0.Args[0]
 28952  			if z.Op != OpARM64AND {
 28953  				break
 28954  			}
 28955  			_ = z.Args[1]
 28956  			z_0 := z.Args[0]
 28957  			z_1 := z.Args[1]
 28958  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 28959  				x := z_0
 28960  				y := z_1
 28961  				if !(z.Uses == 1) {
 28962  					continue
 28963  				}
 28964  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 28965  				v0.AddArg2(x, y)
 28966  				b.resetWithControl(BlockARM64EQ, v0)
 28967  				return true
 28968  			}
 28969  			break
 28970  		}
 28971  		// match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no)
 28972  		// cond: x.Uses == 1
 28973  		// result: (EQ (TSTconst [c] y) yes no)
 28974  		for b.Controls[0].Op == OpARM64CMPconst {
 28975  			v_0 := b.Controls[0]
 28976  			if auxIntToInt64(v_0.AuxInt) != 0 {
 28977  				break
 28978  			}
 28979  			x := v_0.Args[0]
 28980  			if x.Op != OpARM64ANDconst {
 28981  				break
 28982  			}
 28983  			c := auxIntToInt64(x.AuxInt)
 28984  			y := x.Args[0]
 28985  			if !(x.Uses == 1) {
 28986  				break
 28987  			}
 28988  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 28989  			v0.AuxInt = int64ToAuxInt(c)
 28990  			v0.AddArg(y)
 28991  			b.resetWithControl(BlockARM64EQ, v0)
 28992  			return true
 28993  		}
 28994  		// match: (EQ (CMPWconst [0] z:(AND x y)) yes no)
 28995  		// cond: z.Uses == 1
 28996  		// result: (EQ (TSTW x y) yes no)
 28997  		for b.Controls[0].Op == OpARM64CMPWconst {
 28998  			v_0 := b.Controls[0]
 28999  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29000  				break
 29001  			}
 29002  			z := v_0.Args[0]
 29003  			if z.Op != OpARM64AND {
 29004  				break
 29005  			}
 29006  			_ = z.Args[1]
 29007  			z_0 := z.Args[0]
 29008  			z_1 := z.Args[1]
 29009  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29010  				x := z_0
 29011  				y := z_1
 29012  				if !(z.Uses == 1) {
 29013  					continue
 29014  				}
 29015  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 29016  				v0.AddArg2(x, y)
 29017  				b.resetWithControl(BlockARM64EQ, v0)
 29018  				return true
 29019  			}
 29020  			break
 29021  		}
 29022  		// match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 29023  		// cond: x.Uses == 1
 29024  		// result: (EQ (TSTWconst [int32(c)] y) yes no)
 29025  		for b.Controls[0].Op == OpARM64CMPWconst {
 29026  			v_0 := b.Controls[0]
 29027  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29028  				break
 29029  			}
 29030  			x := v_0.Args[0]
 29031  			if x.Op != OpARM64ANDconst {
 29032  				break
 29033  			}
 29034  			c := auxIntToInt64(x.AuxInt)
 29035  			y := x.Args[0]
 29036  			if !(x.Uses == 1) {
 29037  				break
 29038  			}
 29039  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 29040  			v0.AuxInt = int32ToAuxInt(int32(c))
 29041  			v0.AddArg(y)
 29042  			b.resetWithControl(BlockARM64EQ, v0)
 29043  			return true
 29044  		}
 29045  		// match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no)
 29046  		// cond: x.Uses == 1
 29047  		// result: (EQ (CMNconst [c] y) yes no)
 29048  		for b.Controls[0].Op == OpARM64CMPconst {
 29049  			v_0 := b.Controls[0]
 29050  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29051  				break
 29052  			}
 29053  			x := v_0.Args[0]
 29054  			if x.Op != OpARM64ADDconst {
 29055  				break
 29056  			}
 29057  			c := auxIntToInt64(x.AuxInt)
 29058  			y := x.Args[0]
 29059  			if !(x.Uses == 1) {
 29060  				break
 29061  			}
 29062  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 29063  			v0.AuxInt = int64ToAuxInt(c)
 29064  			v0.AddArg(y)
 29065  			b.resetWithControl(BlockARM64EQ, v0)
 29066  			return true
 29067  		}
 29068  		// match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 29069  		// cond: x.Uses == 1
 29070  		// result: (EQ (CMNWconst [int32(c)] y) yes no)
 29071  		for b.Controls[0].Op == OpARM64CMPWconst {
 29072  			v_0 := b.Controls[0]
 29073  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29074  				break
 29075  			}
 29076  			x := v_0.Args[0]
 29077  			if x.Op != OpARM64ADDconst {
 29078  				break
 29079  			}
 29080  			c := auxIntToInt64(x.AuxInt)
 29081  			y := x.Args[0]
 29082  			if !(x.Uses == 1) {
 29083  				break
 29084  			}
 29085  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 29086  			v0.AuxInt = int32ToAuxInt(int32(c))
 29087  			v0.AddArg(y)
 29088  			b.resetWithControl(BlockARM64EQ, v0)
 29089  			return true
 29090  		}
 29091  		// match: (EQ (CMPconst [0] z:(ADD x y)) yes no)
 29092  		// cond: z.Uses == 1
 29093  		// result: (EQ (CMN x y) yes no)
 29094  		for b.Controls[0].Op == OpARM64CMPconst {
 29095  			v_0 := b.Controls[0]
 29096  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29097  				break
 29098  			}
 29099  			z := v_0.Args[0]
 29100  			if z.Op != OpARM64ADD {
 29101  				break
 29102  			}
 29103  			_ = z.Args[1]
 29104  			z_0 := z.Args[0]
 29105  			z_1 := z.Args[1]
 29106  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29107  				x := z_0
 29108  				y := z_1
 29109  				if !(z.Uses == 1) {
 29110  					continue
 29111  				}
 29112  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29113  				v0.AddArg2(x, y)
 29114  				b.resetWithControl(BlockARM64EQ, v0)
 29115  				return true
 29116  			}
 29117  			break
 29118  		}
 29119  		// match: (EQ (CMPWconst [0] z:(ADD x y)) yes no)
 29120  		// cond: z.Uses == 1
 29121  		// result: (EQ (CMNW x y) yes no)
 29122  		for b.Controls[0].Op == OpARM64CMPWconst {
 29123  			v_0 := b.Controls[0]
 29124  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29125  				break
 29126  			}
 29127  			z := v_0.Args[0]
 29128  			if z.Op != OpARM64ADD {
 29129  				break
 29130  			}
 29131  			_ = z.Args[1]
 29132  			z_0 := z.Args[0]
 29133  			z_1 := z.Args[1]
 29134  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29135  				x := z_0
 29136  				y := z_1
 29137  				if !(z.Uses == 1) {
 29138  					continue
 29139  				}
 29140  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 29141  				v0.AddArg2(x, y)
 29142  				b.resetWithControl(BlockARM64EQ, v0)
 29143  				return true
 29144  			}
 29145  			break
 29146  		}
 29147  		// match: (EQ (CMP x z:(NEG y)) yes no)
 29148  		// cond: z.Uses == 1
 29149  		// result: (EQ (CMN x y) yes no)
 29150  		for b.Controls[0].Op == OpARM64CMP {
 29151  			v_0 := b.Controls[0]
 29152  			_ = v_0.Args[1]
 29153  			x := v_0.Args[0]
 29154  			z := v_0.Args[1]
 29155  			if z.Op != OpARM64NEG {
 29156  				break
 29157  			}
 29158  			y := z.Args[0]
 29159  			if !(z.Uses == 1) {
 29160  				break
 29161  			}
 29162  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29163  			v0.AddArg2(x, y)
 29164  			b.resetWithControl(BlockARM64EQ, v0)
 29165  			return true
 29166  		}
 29167  		// match: (EQ (CMPW x z:(NEG y)) yes no)
 29168  		// cond: z.Uses == 1
 29169  		// result: (EQ (CMNW x y) yes no)
 29170  		for b.Controls[0].Op == OpARM64CMPW {
 29171  			v_0 := b.Controls[0]
 29172  			_ = v_0.Args[1]
 29173  			x := v_0.Args[0]
 29174  			z := v_0.Args[1]
 29175  			if z.Op != OpARM64NEG {
 29176  				break
 29177  			}
 29178  			y := z.Args[0]
 29179  			if !(z.Uses == 1) {
 29180  				break
 29181  			}
 29182  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 29183  			v0.AddArg2(x, y)
 29184  			b.resetWithControl(BlockARM64EQ, v0)
 29185  			return true
 29186  		}
 29187  		// match: (EQ (CMPconst [0] x) yes no)
 29188  		// result: (Z x yes no)
 29189  		for b.Controls[0].Op == OpARM64CMPconst {
 29190  			v_0 := b.Controls[0]
 29191  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29192  				break
 29193  			}
 29194  			x := v_0.Args[0]
 29195  			b.resetWithControl(BlockARM64Z, x)
 29196  			return true
 29197  		}
 29198  		// match: (EQ (CMPWconst [0] x) yes no)
 29199  		// result: (ZW x yes no)
 29200  		for b.Controls[0].Op == OpARM64CMPWconst {
 29201  			v_0 := b.Controls[0]
 29202  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29203  				break
 29204  			}
 29205  			x := v_0.Args[0]
 29206  			b.resetWithControl(BlockARM64ZW, x)
 29207  			return true
 29208  		}
 29209  		// match: (EQ (CMPconst [0] z:(MADD a x y)) yes no)
 29210  		// cond: z.Uses==1
 29211  		// result: (EQ (CMN a (MUL <x.Type> x y)) yes no)
 29212  		for b.Controls[0].Op == OpARM64CMPconst {
 29213  			v_0 := b.Controls[0]
 29214  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29215  				break
 29216  			}
 29217  			z := v_0.Args[0]
 29218  			if z.Op != OpARM64MADD {
 29219  				break
 29220  			}
 29221  			y := z.Args[2]
 29222  			a := z.Args[0]
 29223  			x := z.Args[1]
 29224  			if !(z.Uses == 1) {
 29225  				break
 29226  			}
 29227  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29228  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 29229  			v1.AddArg2(x, y)
 29230  			v0.AddArg2(a, v1)
 29231  			b.resetWithControl(BlockARM64EQ, v0)
 29232  			return true
 29233  		}
 29234  		// match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no)
 29235  		// cond: z.Uses==1
 29236  		// result: (EQ (CMP a (MUL <x.Type> x y)) yes no)
 29237  		for b.Controls[0].Op == OpARM64CMPconst {
 29238  			v_0 := b.Controls[0]
 29239  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29240  				break
 29241  			}
 29242  			z := v_0.Args[0]
 29243  			if z.Op != OpARM64MSUB {
 29244  				break
 29245  			}
 29246  			y := z.Args[2]
 29247  			a := z.Args[0]
 29248  			x := z.Args[1]
 29249  			if !(z.Uses == 1) {
 29250  				break
 29251  			}
 29252  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 29253  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 29254  			v1.AddArg2(x, y)
 29255  			v0.AddArg2(a, v1)
 29256  			b.resetWithControl(BlockARM64EQ, v0)
 29257  			return true
 29258  		}
 29259  		// match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no)
 29260  		// cond: z.Uses==1
 29261  		// result: (EQ (CMNW a (MULW <x.Type> x y)) yes no)
 29262  		for b.Controls[0].Op == OpARM64CMPWconst {
 29263  			v_0 := b.Controls[0]
 29264  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29265  				break
 29266  			}
 29267  			z := v_0.Args[0]
 29268  			if z.Op != OpARM64MADDW {
 29269  				break
 29270  			}
 29271  			y := z.Args[2]
 29272  			a := z.Args[0]
 29273  			x := z.Args[1]
 29274  			if !(z.Uses == 1) {
 29275  				break
 29276  			}
 29277  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 29278  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 29279  			v1.AddArg2(x, y)
 29280  			v0.AddArg2(a, v1)
 29281  			b.resetWithControl(BlockARM64EQ, v0)
 29282  			return true
 29283  		}
 29284  		// match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no)
 29285  		// cond: z.Uses==1
 29286  		// result: (EQ (CMPW a (MULW <x.Type> x y)) yes no)
 29287  		for b.Controls[0].Op == OpARM64CMPWconst {
 29288  			v_0 := b.Controls[0]
 29289  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29290  				break
 29291  			}
 29292  			z := v_0.Args[0]
 29293  			if z.Op != OpARM64MSUBW {
 29294  				break
 29295  			}
 29296  			y := z.Args[2]
 29297  			a := z.Args[0]
 29298  			x := z.Args[1]
 29299  			if !(z.Uses == 1) {
 29300  				break
 29301  			}
 29302  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 29303  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 29304  			v1.AddArg2(x, y)
 29305  			v0.AddArg2(a, v1)
 29306  			b.resetWithControl(BlockARM64EQ, v0)
 29307  			return true
 29308  		}
 29309  		// match: (EQ (TSTconst [c] x) yes no)
 29310  		// cond: oneBit(c)
 29311  		// result: (TBZ [int64(ntz64(c))] x yes no)
 29312  		for b.Controls[0].Op == OpARM64TSTconst {
 29313  			v_0 := b.Controls[0]
 29314  			c := auxIntToInt64(v_0.AuxInt)
 29315  			x := v_0.Args[0]
 29316  			if !(oneBit(c)) {
 29317  				break
 29318  			}
 29319  			b.resetWithControl(BlockARM64TBZ, x)
 29320  			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
 29321  			return true
 29322  		}
 29323  		// match: (EQ (TSTWconst [c] x) yes no)
 29324  		// cond: oneBit(int64(uint32(c)))
 29325  		// result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no)
 29326  		for b.Controls[0].Op == OpARM64TSTWconst {
 29327  			v_0 := b.Controls[0]
 29328  			c := auxIntToInt32(v_0.AuxInt)
 29329  			x := v_0.Args[0]
 29330  			if !(oneBit(int64(uint32(c)))) {
 29331  				break
 29332  			}
 29333  			b.resetWithControl(BlockARM64TBZ, x)
 29334  			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
 29335  			return true
 29336  		}
 29337  		// match: (EQ (FlagConstant [fc]) yes no)
 29338  		// cond: fc.eq()
 29339  		// result: (First yes no)
 29340  		for b.Controls[0].Op == OpARM64FlagConstant {
 29341  			v_0 := b.Controls[0]
 29342  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29343  			if !(fc.eq()) {
 29344  				break
 29345  			}
 29346  			b.Reset(BlockFirst)
 29347  			return true
 29348  		}
 29349  		// match: (EQ (FlagConstant [fc]) yes no)
 29350  		// cond: !fc.eq()
 29351  		// result: (First no yes)
 29352  		for b.Controls[0].Op == OpARM64FlagConstant {
 29353  			v_0 := b.Controls[0]
 29354  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29355  			if !(!fc.eq()) {
 29356  				break
 29357  			}
 29358  			b.Reset(BlockFirst)
 29359  			b.swapSuccessors()
 29360  			return true
 29361  		}
 29362  		// match: (EQ (InvertFlags cmp) yes no)
 29363  		// result: (EQ cmp yes no)
 29364  		for b.Controls[0].Op == OpARM64InvertFlags {
 29365  			v_0 := b.Controls[0]
 29366  			cmp := v_0.Args[0]
 29367  			b.resetWithControl(BlockARM64EQ, cmp)
 29368  			return true
 29369  		}
 29370  	case BlockARM64FGE:
 29371  		// match: (FGE (InvertFlags cmp) yes no)
 29372  		// result: (FLE cmp yes no)
 29373  		for b.Controls[0].Op == OpARM64InvertFlags {
 29374  			v_0 := b.Controls[0]
 29375  			cmp := v_0.Args[0]
 29376  			b.resetWithControl(BlockARM64FLE, cmp)
 29377  			return true
 29378  		}
 29379  	case BlockARM64FGT:
 29380  		// match: (FGT (InvertFlags cmp) yes no)
 29381  		// result: (FLT cmp yes no)
 29382  		for b.Controls[0].Op == OpARM64InvertFlags {
 29383  			v_0 := b.Controls[0]
 29384  			cmp := v_0.Args[0]
 29385  			b.resetWithControl(BlockARM64FLT, cmp)
 29386  			return true
 29387  		}
 29388  	case BlockARM64FLE:
 29389  		// match: (FLE (InvertFlags cmp) yes no)
 29390  		// result: (FGE cmp yes no)
 29391  		for b.Controls[0].Op == OpARM64InvertFlags {
 29392  			v_0 := b.Controls[0]
 29393  			cmp := v_0.Args[0]
 29394  			b.resetWithControl(BlockARM64FGE, cmp)
 29395  			return true
 29396  		}
 29397  	case BlockARM64FLT:
 29398  		// match: (FLT (InvertFlags cmp) yes no)
 29399  		// result: (FGT cmp yes no)
 29400  		for b.Controls[0].Op == OpARM64InvertFlags {
 29401  			v_0 := b.Controls[0]
 29402  			cmp := v_0.Args[0]
 29403  			b.resetWithControl(BlockARM64FGT, cmp)
 29404  			return true
 29405  		}
 29406  	case BlockARM64GE:
 29407  		// match: (GE (CMPconst [0] z:(AND x y)) yes no)
 29408  		// cond: z.Uses == 1
 29409  		// result: (GE (TST x y) yes no)
 29410  		for b.Controls[0].Op == OpARM64CMPconst {
 29411  			v_0 := b.Controls[0]
 29412  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29413  				break
 29414  			}
 29415  			z := v_0.Args[0]
 29416  			if z.Op != OpARM64AND {
 29417  				break
 29418  			}
 29419  			_ = z.Args[1]
 29420  			z_0 := z.Args[0]
 29421  			z_1 := z.Args[1]
 29422  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29423  				x := z_0
 29424  				y := z_1
 29425  				if !(z.Uses == 1) {
 29426  					continue
 29427  				}
 29428  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 29429  				v0.AddArg2(x, y)
 29430  				b.resetWithControl(BlockARM64GE, v0)
 29431  				return true
 29432  			}
 29433  			break
 29434  		}
 29435  		// match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no)
 29436  		// cond: x.Uses == 1
 29437  		// result: (GE (TSTconst [c] y) yes no)
 29438  		for b.Controls[0].Op == OpARM64CMPconst {
 29439  			v_0 := b.Controls[0]
 29440  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29441  				break
 29442  			}
 29443  			x := v_0.Args[0]
 29444  			if x.Op != OpARM64ANDconst {
 29445  				break
 29446  			}
 29447  			c := auxIntToInt64(x.AuxInt)
 29448  			y := x.Args[0]
 29449  			if !(x.Uses == 1) {
 29450  				break
 29451  			}
 29452  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 29453  			v0.AuxInt = int64ToAuxInt(c)
 29454  			v0.AddArg(y)
 29455  			b.resetWithControl(BlockARM64GE, v0)
 29456  			return true
 29457  		}
 29458  		// match: (GE (CMPWconst [0] z:(AND x y)) yes no)
 29459  		// cond: z.Uses == 1
 29460  		// result: (GE (TSTW x y) yes no)
 29461  		for b.Controls[0].Op == OpARM64CMPWconst {
 29462  			v_0 := b.Controls[0]
 29463  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29464  				break
 29465  			}
 29466  			z := v_0.Args[0]
 29467  			if z.Op != OpARM64AND {
 29468  				break
 29469  			}
 29470  			_ = z.Args[1]
 29471  			z_0 := z.Args[0]
 29472  			z_1 := z.Args[1]
 29473  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29474  				x := z_0
 29475  				y := z_1
 29476  				if !(z.Uses == 1) {
 29477  					continue
 29478  				}
 29479  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 29480  				v0.AddArg2(x, y)
 29481  				b.resetWithControl(BlockARM64GE, v0)
 29482  				return true
 29483  			}
 29484  			break
 29485  		}
 29486  		// match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 29487  		// cond: x.Uses == 1
 29488  		// result: (GE (TSTWconst [int32(c)] y) yes no)
 29489  		for b.Controls[0].Op == OpARM64CMPWconst {
 29490  			v_0 := b.Controls[0]
 29491  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29492  				break
 29493  			}
 29494  			x := v_0.Args[0]
 29495  			if x.Op != OpARM64ANDconst {
 29496  				break
 29497  			}
 29498  			c := auxIntToInt64(x.AuxInt)
 29499  			y := x.Args[0]
 29500  			if !(x.Uses == 1) {
 29501  				break
 29502  			}
 29503  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 29504  			v0.AuxInt = int32ToAuxInt(int32(c))
 29505  			v0.AddArg(y)
 29506  			b.resetWithControl(BlockARM64GE, v0)
 29507  			return true
 29508  		}
 29509  		// match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no)
 29510  		// cond: x.Uses == 1
 29511  		// result: (GEnoov (CMNconst [c] y) yes no)
 29512  		for b.Controls[0].Op == OpARM64CMPconst {
 29513  			v_0 := b.Controls[0]
 29514  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29515  				break
 29516  			}
 29517  			x := v_0.Args[0]
 29518  			if x.Op != OpARM64ADDconst {
 29519  				break
 29520  			}
 29521  			c := auxIntToInt64(x.AuxInt)
 29522  			y := x.Args[0]
 29523  			if !(x.Uses == 1) {
 29524  				break
 29525  			}
 29526  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 29527  			v0.AuxInt = int64ToAuxInt(c)
 29528  			v0.AddArg(y)
 29529  			b.resetWithControl(BlockARM64GEnoov, v0)
 29530  			return true
 29531  		}
 29532  		// match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 29533  		// cond: x.Uses == 1
 29534  		// result: (GEnoov (CMNWconst [int32(c)] y) yes no)
 29535  		for b.Controls[0].Op == OpARM64CMPWconst {
 29536  			v_0 := b.Controls[0]
 29537  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29538  				break
 29539  			}
 29540  			x := v_0.Args[0]
 29541  			if x.Op != OpARM64ADDconst {
 29542  				break
 29543  			}
 29544  			c := auxIntToInt64(x.AuxInt)
 29545  			y := x.Args[0]
 29546  			if !(x.Uses == 1) {
 29547  				break
 29548  			}
 29549  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 29550  			v0.AuxInt = int32ToAuxInt(int32(c))
 29551  			v0.AddArg(y)
 29552  			b.resetWithControl(BlockARM64GEnoov, v0)
 29553  			return true
 29554  		}
 29555  		// match: (GE (CMPconst [0] z:(ADD x y)) yes no)
 29556  		// cond: z.Uses == 1
 29557  		// result: (GEnoov (CMN x y) yes no)
 29558  		for b.Controls[0].Op == OpARM64CMPconst {
 29559  			v_0 := b.Controls[0]
 29560  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29561  				break
 29562  			}
 29563  			z := v_0.Args[0]
 29564  			if z.Op != OpARM64ADD {
 29565  				break
 29566  			}
 29567  			_ = z.Args[1]
 29568  			z_0 := z.Args[0]
 29569  			z_1 := z.Args[1]
 29570  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29571  				x := z_0
 29572  				y := z_1
 29573  				if !(z.Uses == 1) {
 29574  					continue
 29575  				}
 29576  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29577  				v0.AddArg2(x, y)
 29578  				b.resetWithControl(BlockARM64GEnoov, v0)
 29579  				return true
 29580  			}
 29581  			break
 29582  		}
 29583  		// match: (GE (CMPWconst [0] z:(ADD x y)) yes no)
 29584  		// cond: z.Uses == 1
 29585  		// result: (GEnoov (CMNW x y) yes no)
 29586  		for b.Controls[0].Op == OpARM64CMPWconst {
 29587  			v_0 := b.Controls[0]
 29588  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29589  				break
 29590  			}
 29591  			z := v_0.Args[0]
 29592  			if z.Op != OpARM64ADD {
 29593  				break
 29594  			}
 29595  			_ = z.Args[1]
 29596  			z_0 := z.Args[0]
 29597  			z_1 := z.Args[1]
 29598  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29599  				x := z_0
 29600  				y := z_1
 29601  				if !(z.Uses == 1) {
 29602  					continue
 29603  				}
 29604  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 29605  				v0.AddArg2(x, y)
 29606  				b.resetWithControl(BlockARM64GEnoov, v0)
 29607  				return true
 29608  			}
 29609  			break
 29610  		}
 29611  		// match: (GE (CMPconst [0] z:(MADD a x y)) yes no)
 29612  		// cond: z.Uses==1
 29613  		// result: (GEnoov (CMN a (MUL <x.Type> x y)) yes no)
 29614  		for b.Controls[0].Op == OpARM64CMPconst {
 29615  			v_0 := b.Controls[0]
 29616  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29617  				break
 29618  			}
 29619  			z := v_0.Args[0]
 29620  			if z.Op != OpARM64MADD {
 29621  				break
 29622  			}
 29623  			y := z.Args[2]
 29624  			a := z.Args[0]
 29625  			x := z.Args[1]
 29626  			if !(z.Uses == 1) {
 29627  				break
 29628  			}
 29629  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29630  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 29631  			v1.AddArg2(x, y)
 29632  			v0.AddArg2(a, v1)
 29633  			b.resetWithControl(BlockARM64GEnoov, v0)
 29634  			return true
 29635  		}
 29636  		// match: (GE (CMPconst [0] z:(MSUB a x y)) yes no)
 29637  		// cond: z.Uses==1
 29638  		// result: (GEnoov (CMP a (MUL <x.Type> x y)) yes no)
 29639  		for b.Controls[0].Op == OpARM64CMPconst {
 29640  			v_0 := b.Controls[0]
 29641  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29642  				break
 29643  			}
 29644  			z := v_0.Args[0]
 29645  			if z.Op != OpARM64MSUB {
 29646  				break
 29647  			}
 29648  			y := z.Args[2]
 29649  			a := z.Args[0]
 29650  			x := z.Args[1]
 29651  			if !(z.Uses == 1) {
 29652  				break
 29653  			}
 29654  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 29655  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 29656  			v1.AddArg2(x, y)
 29657  			v0.AddArg2(a, v1)
 29658  			b.resetWithControl(BlockARM64GEnoov, v0)
 29659  			return true
 29660  		}
 29661  		// match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no)
 29662  		// cond: z.Uses==1
 29663  		// result: (GEnoov (CMNW a (MULW <x.Type> x y)) yes no)
 29664  		for b.Controls[0].Op == OpARM64CMPWconst {
 29665  			v_0 := b.Controls[0]
 29666  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29667  				break
 29668  			}
 29669  			z := v_0.Args[0]
 29670  			if z.Op != OpARM64MADDW {
 29671  				break
 29672  			}
 29673  			y := z.Args[2]
 29674  			a := z.Args[0]
 29675  			x := z.Args[1]
 29676  			if !(z.Uses == 1) {
 29677  				break
 29678  			}
 29679  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 29680  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 29681  			v1.AddArg2(x, y)
 29682  			v0.AddArg2(a, v1)
 29683  			b.resetWithControl(BlockARM64GEnoov, v0)
 29684  			return true
 29685  		}
 29686  		// match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no)
 29687  		// cond: z.Uses==1
 29688  		// result: (GEnoov (CMPW a (MULW <x.Type> x y)) yes no)
 29689  		for b.Controls[0].Op == OpARM64CMPWconst {
 29690  			v_0 := b.Controls[0]
 29691  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29692  				break
 29693  			}
 29694  			z := v_0.Args[0]
 29695  			if z.Op != OpARM64MSUBW {
 29696  				break
 29697  			}
 29698  			y := z.Args[2]
 29699  			a := z.Args[0]
 29700  			x := z.Args[1]
 29701  			if !(z.Uses == 1) {
 29702  				break
 29703  			}
 29704  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 29705  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 29706  			v1.AddArg2(x, y)
 29707  			v0.AddArg2(a, v1)
 29708  			b.resetWithControl(BlockARM64GEnoov, v0)
 29709  			return true
 29710  		}
 29711  		// match: (GE (CMPWconst [0] x) yes no)
 29712  		// result: (TBZ [31] x yes no)
 29713  		for b.Controls[0].Op == OpARM64CMPWconst {
 29714  			v_0 := b.Controls[0]
 29715  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29716  				break
 29717  			}
 29718  			x := v_0.Args[0]
 29719  			b.resetWithControl(BlockARM64TBZ, x)
 29720  			b.AuxInt = int64ToAuxInt(31)
 29721  			return true
 29722  		}
 29723  		// match: (GE (CMPconst [0] x) yes no)
 29724  		// result: (TBZ [63] x yes no)
 29725  		for b.Controls[0].Op == OpARM64CMPconst {
 29726  			v_0 := b.Controls[0]
 29727  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29728  				break
 29729  			}
 29730  			x := v_0.Args[0]
 29731  			b.resetWithControl(BlockARM64TBZ, x)
 29732  			b.AuxInt = int64ToAuxInt(63)
 29733  			return true
 29734  		}
 29735  		// match: (GE (FlagConstant [fc]) yes no)
 29736  		// cond: fc.ge()
 29737  		// result: (First yes no)
 29738  		for b.Controls[0].Op == OpARM64FlagConstant {
 29739  			v_0 := b.Controls[0]
 29740  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29741  			if !(fc.ge()) {
 29742  				break
 29743  			}
 29744  			b.Reset(BlockFirst)
 29745  			return true
 29746  		}
 29747  		// match: (GE (FlagConstant [fc]) yes no)
 29748  		// cond: !fc.ge()
 29749  		// result: (First no yes)
 29750  		for b.Controls[0].Op == OpARM64FlagConstant {
 29751  			v_0 := b.Controls[0]
 29752  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29753  			if !(!fc.ge()) {
 29754  				break
 29755  			}
 29756  			b.Reset(BlockFirst)
 29757  			b.swapSuccessors()
 29758  			return true
 29759  		}
 29760  		// match: (GE (InvertFlags cmp) yes no)
 29761  		// result: (LE cmp yes no)
 29762  		for b.Controls[0].Op == OpARM64InvertFlags {
 29763  			v_0 := b.Controls[0]
 29764  			cmp := v_0.Args[0]
 29765  			b.resetWithControl(BlockARM64LE, cmp)
 29766  			return true
 29767  		}
 29768  	case BlockARM64GEnoov:
 29769  		// match: (GEnoov (FlagConstant [fc]) yes no)
 29770  		// cond: fc.geNoov()
 29771  		// result: (First yes no)
 29772  		for b.Controls[0].Op == OpARM64FlagConstant {
 29773  			v_0 := b.Controls[0]
 29774  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29775  			if !(fc.geNoov()) {
 29776  				break
 29777  			}
 29778  			b.Reset(BlockFirst)
 29779  			return true
 29780  		}
 29781  		// match: (GEnoov (FlagConstant [fc]) yes no)
 29782  		// cond: !fc.geNoov()
 29783  		// result: (First no yes)
 29784  		for b.Controls[0].Op == OpARM64FlagConstant {
 29785  			v_0 := b.Controls[0]
 29786  			fc := auxIntToFlagConstant(v_0.AuxInt)
 29787  			if !(!fc.geNoov()) {
 29788  				break
 29789  			}
 29790  			b.Reset(BlockFirst)
 29791  			b.swapSuccessors()
 29792  			return true
 29793  		}
 29794  		// match: (GEnoov (InvertFlags cmp) yes no)
 29795  		// result: (LEnoov cmp yes no)
 29796  		for b.Controls[0].Op == OpARM64InvertFlags {
 29797  			v_0 := b.Controls[0]
 29798  			cmp := v_0.Args[0]
 29799  			b.resetWithControl(BlockARM64LEnoov, cmp)
 29800  			return true
 29801  		}
 29802  	case BlockARM64GT:
 29803  		// match: (GT (CMPconst [0] z:(AND x y)) yes no)
 29804  		// cond: z.Uses == 1
 29805  		// result: (GT (TST x y) yes no)
 29806  		for b.Controls[0].Op == OpARM64CMPconst {
 29807  			v_0 := b.Controls[0]
 29808  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29809  				break
 29810  			}
 29811  			z := v_0.Args[0]
 29812  			if z.Op != OpARM64AND {
 29813  				break
 29814  			}
 29815  			_ = z.Args[1]
 29816  			z_0 := z.Args[0]
 29817  			z_1 := z.Args[1]
 29818  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29819  				x := z_0
 29820  				y := z_1
 29821  				if !(z.Uses == 1) {
 29822  					continue
 29823  				}
 29824  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 29825  				v0.AddArg2(x, y)
 29826  				b.resetWithControl(BlockARM64GT, v0)
 29827  				return true
 29828  			}
 29829  			break
 29830  		}
 29831  		// match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no)
 29832  		// cond: x.Uses == 1
 29833  		// result: (GT (TSTconst [c] y) yes no)
 29834  		for b.Controls[0].Op == OpARM64CMPconst {
 29835  			v_0 := b.Controls[0]
 29836  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29837  				break
 29838  			}
 29839  			x := v_0.Args[0]
 29840  			if x.Op != OpARM64ANDconst {
 29841  				break
 29842  			}
 29843  			c := auxIntToInt64(x.AuxInt)
 29844  			y := x.Args[0]
 29845  			if !(x.Uses == 1) {
 29846  				break
 29847  			}
 29848  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 29849  			v0.AuxInt = int64ToAuxInt(c)
 29850  			v0.AddArg(y)
 29851  			b.resetWithControl(BlockARM64GT, v0)
 29852  			return true
 29853  		}
 29854  		// match: (GT (CMPWconst [0] z:(AND x y)) yes no)
 29855  		// cond: z.Uses == 1
 29856  		// result: (GT (TSTW x y) yes no)
 29857  		for b.Controls[0].Op == OpARM64CMPWconst {
 29858  			v_0 := b.Controls[0]
 29859  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29860  				break
 29861  			}
 29862  			z := v_0.Args[0]
 29863  			if z.Op != OpARM64AND {
 29864  				break
 29865  			}
 29866  			_ = z.Args[1]
 29867  			z_0 := z.Args[0]
 29868  			z_1 := z.Args[1]
 29869  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29870  				x := z_0
 29871  				y := z_1
 29872  				if !(z.Uses == 1) {
 29873  					continue
 29874  				}
 29875  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 29876  				v0.AddArg2(x, y)
 29877  				b.resetWithControl(BlockARM64GT, v0)
 29878  				return true
 29879  			}
 29880  			break
 29881  		}
 29882  		// match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 29883  		// cond: x.Uses == 1
 29884  		// result: (GT (TSTWconst [int32(c)] y) yes no)
 29885  		for b.Controls[0].Op == OpARM64CMPWconst {
 29886  			v_0 := b.Controls[0]
 29887  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29888  				break
 29889  			}
 29890  			x := v_0.Args[0]
 29891  			if x.Op != OpARM64ANDconst {
 29892  				break
 29893  			}
 29894  			c := auxIntToInt64(x.AuxInt)
 29895  			y := x.Args[0]
 29896  			if !(x.Uses == 1) {
 29897  				break
 29898  			}
 29899  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 29900  			v0.AuxInt = int32ToAuxInt(int32(c))
 29901  			v0.AddArg(y)
 29902  			b.resetWithControl(BlockARM64GT, v0)
 29903  			return true
 29904  		}
 29905  		// match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no)
 29906  		// cond: x.Uses == 1
 29907  		// result: (GTnoov (CMNconst [c] y) yes no)
 29908  		for b.Controls[0].Op == OpARM64CMPconst {
 29909  			v_0 := b.Controls[0]
 29910  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29911  				break
 29912  			}
 29913  			x := v_0.Args[0]
 29914  			if x.Op != OpARM64ADDconst {
 29915  				break
 29916  			}
 29917  			c := auxIntToInt64(x.AuxInt)
 29918  			y := x.Args[0]
 29919  			if !(x.Uses == 1) {
 29920  				break
 29921  			}
 29922  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 29923  			v0.AuxInt = int64ToAuxInt(c)
 29924  			v0.AddArg(y)
 29925  			b.resetWithControl(BlockARM64GTnoov, v0)
 29926  			return true
 29927  		}
 29928  		// match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 29929  		// cond: x.Uses == 1
 29930  		// result: (GTnoov (CMNWconst [int32(c)] y) yes no)
 29931  		for b.Controls[0].Op == OpARM64CMPWconst {
 29932  			v_0 := b.Controls[0]
 29933  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29934  				break
 29935  			}
 29936  			x := v_0.Args[0]
 29937  			if x.Op != OpARM64ADDconst {
 29938  				break
 29939  			}
 29940  			c := auxIntToInt64(x.AuxInt)
 29941  			y := x.Args[0]
 29942  			if !(x.Uses == 1) {
 29943  				break
 29944  			}
 29945  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 29946  			v0.AuxInt = int32ToAuxInt(int32(c))
 29947  			v0.AddArg(y)
 29948  			b.resetWithControl(BlockARM64GTnoov, v0)
 29949  			return true
 29950  		}
 29951  		// match: (GT (CMPconst [0] z:(ADD x y)) yes no)
 29952  		// cond: z.Uses == 1
 29953  		// result: (GTnoov (CMN x y) yes no)
 29954  		for b.Controls[0].Op == OpARM64CMPconst {
 29955  			v_0 := b.Controls[0]
 29956  			if auxIntToInt64(v_0.AuxInt) != 0 {
 29957  				break
 29958  			}
 29959  			z := v_0.Args[0]
 29960  			if z.Op != OpARM64ADD {
 29961  				break
 29962  			}
 29963  			_ = z.Args[1]
 29964  			z_0 := z.Args[0]
 29965  			z_1 := z.Args[1]
 29966  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29967  				x := z_0
 29968  				y := z_1
 29969  				if !(z.Uses == 1) {
 29970  					continue
 29971  				}
 29972  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 29973  				v0.AddArg2(x, y)
 29974  				b.resetWithControl(BlockARM64GTnoov, v0)
 29975  				return true
 29976  			}
 29977  			break
 29978  		}
 29979  		// match: (GT (CMPWconst [0] z:(ADD x y)) yes no)
 29980  		// cond: z.Uses == 1
 29981  		// result: (GTnoov (CMNW x y) yes no)
 29982  		for b.Controls[0].Op == OpARM64CMPWconst {
 29983  			v_0 := b.Controls[0]
 29984  			if auxIntToInt32(v_0.AuxInt) != 0 {
 29985  				break
 29986  			}
 29987  			z := v_0.Args[0]
 29988  			if z.Op != OpARM64ADD {
 29989  				break
 29990  			}
 29991  			_ = z.Args[1]
 29992  			z_0 := z.Args[0]
 29993  			z_1 := z.Args[1]
 29994  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 29995  				x := z_0
 29996  				y := z_1
 29997  				if !(z.Uses == 1) {
 29998  					continue
 29999  				}
 30000  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30001  				v0.AddArg2(x, y)
 30002  				b.resetWithControl(BlockARM64GTnoov, v0)
 30003  				return true
 30004  			}
 30005  			break
 30006  		}
 30007  		// match: (GT (CMPconst [0] z:(MADD a x y)) yes no)
 30008  		// cond: z.Uses==1
 30009  		// result: (GTnoov (CMN a (MUL <x.Type> x y)) yes no)
 30010  		for b.Controls[0].Op == OpARM64CMPconst {
 30011  			v_0 := b.Controls[0]
 30012  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30013  				break
 30014  			}
 30015  			z := v_0.Args[0]
 30016  			if z.Op != OpARM64MADD {
 30017  				break
 30018  			}
 30019  			y := z.Args[2]
 30020  			a := z.Args[0]
 30021  			x := z.Args[1]
 30022  			if !(z.Uses == 1) {
 30023  				break
 30024  			}
 30025  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 30026  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30027  			v1.AddArg2(x, y)
 30028  			v0.AddArg2(a, v1)
 30029  			b.resetWithControl(BlockARM64GTnoov, v0)
 30030  			return true
 30031  		}
 30032  		// match: (GT (CMPconst [0] z:(MSUB a x y)) yes no)
 30033  		// cond: z.Uses==1
 30034  		// result: (GTnoov (CMP a (MUL <x.Type> x y)) yes no)
 30035  		for b.Controls[0].Op == OpARM64CMPconst {
 30036  			v_0 := b.Controls[0]
 30037  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30038  				break
 30039  			}
 30040  			z := v_0.Args[0]
 30041  			if z.Op != OpARM64MSUB {
 30042  				break
 30043  			}
 30044  			y := z.Args[2]
 30045  			a := z.Args[0]
 30046  			x := z.Args[1]
 30047  			if !(z.Uses == 1) {
 30048  				break
 30049  			}
 30050  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 30051  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30052  			v1.AddArg2(x, y)
 30053  			v0.AddArg2(a, v1)
 30054  			b.resetWithControl(BlockARM64GTnoov, v0)
 30055  			return true
 30056  		}
 30057  		// match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no)
 30058  		// cond: z.Uses==1
 30059  		// result: (GTnoov (CMNW a (MULW <x.Type> x y)) yes no)
 30060  		for b.Controls[0].Op == OpARM64CMPWconst {
 30061  			v_0 := b.Controls[0]
 30062  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30063  				break
 30064  			}
 30065  			z := v_0.Args[0]
 30066  			if z.Op != OpARM64MADDW {
 30067  				break
 30068  			}
 30069  			y := z.Args[2]
 30070  			a := z.Args[0]
 30071  			x := z.Args[1]
 30072  			if !(z.Uses == 1) {
 30073  				break
 30074  			}
 30075  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30076  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30077  			v1.AddArg2(x, y)
 30078  			v0.AddArg2(a, v1)
 30079  			b.resetWithControl(BlockARM64GTnoov, v0)
 30080  			return true
 30081  		}
 30082  		// match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no)
 30083  		// cond: z.Uses==1
 30084  		// result: (GTnoov (CMPW a (MULW <x.Type> x y)) yes no)
 30085  		for b.Controls[0].Op == OpARM64CMPWconst {
 30086  			v_0 := b.Controls[0]
 30087  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30088  				break
 30089  			}
 30090  			z := v_0.Args[0]
 30091  			if z.Op != OpARM64MSUBW {
 30092  				break
 30093  			}
 30094  			y := z.Args[2]
 30095  			a := z.Args[0]
 30096  			x := z.Args[1]
 30097  			if !(z.Uses == 1) {
 30098  				break
 30099  			}
 30100  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 30101  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30102  			v1.AddArg2(x, y)
 30103  			v0.AddArg2(a, v1)
 30104  			b.resetWithControl(BlockARM64GTnoov, v0)
 30105  			return true
 30106  		}
 30107  		// match: (GT (FlagConstant [fc]) yes no)
 30108  		// cond: fc.gt()
 30109  		// result: (First yes no)
 30110  		for b.Controls[0].Op == OpARM64FlagConstant {
 30111  			v_0 := b.Controls[0]
 30112  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30113  			if !(fc.gt()) {
 30114  				break
 30115  			}
 30116  			b.Reset(BlockFirst)
 30117  			return true
 30118  		}
 30119  		// match: (GT (FlagConstant [fc]) yes no)
 30120  		// cond: !fc.gt()
 30121  		// result: (First no yes)
 30122  		for b.Controls[0].Op == OpARM64FlagConstant {
 30123  			v_0 := b.Controls[0]
 30124  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30125  			if !(!fc.gt()) {
 30126  				break
 30127  			}
 30128  			b.Reset(BlockFirst)
 30129  			b.swapSuccessors()
 30130  			return true
 30131  		}
 30132  		// match: (GT (InvertFlags cmp) yes no)
 30133  		// result: (LT cmp yes no)
 30134  		for b.Controls[0].Op == OpARM64InvertFlags {
 30135  			v_0 := b.Controls[0]
 30136  			cmp := v_0.Args[0]
 30137  			b.resetWithControl(BlockARM64LT, cmp)
 30138  			return true
 30139  		}
 30140  	case BlockARM64GTnoov:
 30141  		// match: (GTnoov (FlagConstant [fc]) yes no)
 30142  		// cond: fc.gtNoov()
 30143  		// result: (First yes no)
 30144  		for b.Controls[0].Op == OpARM64FlagConstant {
 30145  			v_0 := b.Controls[0]
 30146  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30147  			if !(fc.gtNoov()) {
 30148  				break
 30149  			}
 30150  			b.Reset(BlockFirst)
 30151  			return true
 30152  		}
 30153  		// match: (GTnoov (FlagConstant [fc]) yes no)
 30154  		// cond: !fc.gtNoov()
 30155  		// result: (First no yes)
 30156  		for b.Controls[0].Op == OpARM64FlagConstant {
 30157  			v_0 := b.Controls[0]
 30158  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30159  			if !(!fc.gtNoov()) {
 30160  				break
 30161  			}
 30162  			b.Reset(BlockFirst)
 30163  			b.swapSuccessors()
 30164  			return true
 30165  		}
 30166  		// match: (GTnoov (InvertFlags cmp) yes no)
 30167  		// result: (LTnoov cmp yes no)
 30168  		for b.Controls[0].Op == OpARM64InvertFlags {
 30169  			v_0 := b.Controls[0]
 30170  			cmp := v_0.Args[0]
 30171  			b.resetWithControl(BlockARM64LTnoov, cmp)
 30172  			return true
 30173  		}
 30174  	case BlockIf:
 30175  		// match: (If (Equal cc) yes no)
 30176  		// result: (EQ cc yes no)
 30177  		for b.Controls[0].Op == OpARM64Equal {
 30178  			v_0 := b.Controls[0]
 30179  			cc := v_0.Args[0]
 30180  			b.resetWithControl(BlockARM64EQ, cc)
 30181  			return true
 30182  		}
 30183  		// match: (If (NotEqual cc) yes no)
 30184  		// result: (NE cc yes no)
 30185  		for b.Controls[0].Op == OpARM64NotEqual {
 30186  			v_0 := b.Controls[0]
 30187  			cc := v_0.Args[0]
 30188  			b.resetWithControl(BlockARM64NE, cc)
 30189  			return true
 30190  		}
 30191  		// match: (If (LessThan cc) yes no)
 30192  		// result: (LT cc yes no)
 30193  		for b.Controls[0].Op == OpARM64LessThan {
 30194  			v_0 := b.Controls[0]
 30195  			cc := v_0.Args[0]
 30196  			b.resetWithControl(BlockARM64LT, cc)
 30197  			return true
 30198  		}
 30199  		// match: (If (LessThanU cc) yes no)
 30200  		// result: (ULT cc yes no)
 30201  		for b.Controls[0].Op == OpARM64LessThanU {
 30202  			v_0 := b.Controls[0]
 30203  			cc := v_0.Args[0]
 30204  			b.resetWithControl(BlockARM64ULT, cc)
 30205  			return true
 30206  		}
 30207  		// match: (If (LessEqual cc) yes no)
 30208  		// result: (LE cc yes no)
 30209  		for b.Controls[0].Op == OpARM64LessEqual {
 30210  			v_0 := b.Controls[0]
 30211  			cc := v_0.Args[0]
 30212  			b.resetWithControl(BlockARM64LE, cc)
 30213  			return true
 30214  		}
 30215  		// match: (If (LessEqualU cc) yes no)
 30216  		// result: (ULE cc yes no)
 30217  		for b.Controls[0].Op == OpARM64LessEqualU {
 30218  			v_0 := b.Controls[0]
 30219  			cc := v_0.Args[0]
 30220  			b.resetWithControl(BlockARM64ULE, cc)
 30221  			return true
 30222  		}
 30223  		// match: (If (GreaterThan cc) yes no)
 30224  		// result: (GT cc yes no)
 30225  		for b.Controls[0].Op == OpARM64GreaterThan {
 30226  			v_0 := b.Controls[0]
 30227  			cc := v_0.Args[0]
 30228  			b.resetWithControl(BlockARM64GT, cc)
 30229  			return true
 30230  		}
 30231  		// match: (If (GreaterThanU cc) yes no)
 30232  		// result: (UGT cc yes no)
 30233  		for b.Controls[0].Op == OpARM64GreaterThanU {
 30234  			v_0 := b.Controls[0]
 30235  			cc := v_0.Args[0]
 30236  			b.resetWithControl(BlockARM64UGT, cc)
 30237  			return true
 30238  		}
 30239  		// match: (If (GreaterEqual cc) yes no)
 30240  		// result: (GE cc yes no)
 30241  		for b.Controls[0].Op == OpARM64GreaterEqual {
 30242  			v_0 := b.Controls[0]
 30243  			cc := v_0.Args[0]
 30244  			b.resetWithControl(BlockARM64GE, cc)
 30245  			return true
 30246  		}
 30247  		// match: (If (GreaterEqualU cc) yes no)
 30248  		// result: (UGE cc yes no)
 30249  		for b.Controls[0].Op == OpARM64GreaterEqualU {
 30250  			v_0 := b.Controls[0]
 30251  			cc := v_0.Args[0]
 30252  			b.resetWithControl(BlockARM64UGE, cc)
 30253  			return true
 30254  		}
 30255  		// match: (If (LessThanF cc) yes no)
 30256  		// result: (FLT cc yes no)
 30257  		for b.Controls[0].Op == OpARM64LessThanF {
 30258  			v_0 := b.Controls[0]
 30259  			cc := v_0.Args[0]
 30260  			b.resetWithControl(BlockARM64FLT, cc)
 30261  			return true
 30262  		}
 30263  		// match: (If (LessEqualF cc) yes no)
 30264  		// result: (FLE cc yes no)
 30265  		for b.Controls[0].Op == OpARM64LessEqualF {
 30266  			v_0 := b.Controls[0]
 30267  			cc := v_0.Args[0]
 30268  			b.resetWithControl(BlockARM64FLE, cc)
 30269  			return true
 30270  		}
 30271  		// match: (If (GreaterThanF cc) yes no)
 30272  		// result: (FGT cc yes no)
 30273  		for b.Controls[0].Op == OpARM64GreaterThanF {
 30274  			v_0 := b.Controls[0]
 30275  			cc := v_0.Args[0]
 30276  			b.resetWithControl(BlockARM64FGT, cc)
 30277  			return true
 30278  		}
 30279  		// match: (If (GreaterEqualF cc) yes no)
 30280  		// result: (FGE cc yes no)
 30281  		for b.Controls[0].Op == OpARM64GreaterEqualF {
 30282  			v_0 := b.Controls[0]
 30283  			cc := v_0.Args[0]
 30284  			b.resetWithControl(BlockARM64FGE, cc)
 30285  			return true
 30286  		}
 30287  		// match: (If cond yes no)
 30288  		// result: (TBNZ [0] cond yes no)
 30289  		for {
 30290  			cond := b.Controls[0]
 30291  			b.resetWithControl(BlockARM64TBNZ, cond)
 30292  			b.AuxInt = int64ToAuxInt(0)
 30293  			return true
 30294  		}
 30295  	case BlockJumpTable:
 30296  		// match: (JumpTable idx)
 30297  		// result: (JUMPTABLE {makeJumpTableSym(b)} idx (MOVDaddr <typ.Uintptr> {makeJumpTableSym(b)} (SB)))
 30298  		for {
 30299  			idx := b.Controls[0]
 30300  			v0 := b.NewValue0(b.Pos, OpARM64MOVDaddr, typ.Uintptr)
 30301  			v0.Aux = symToAux(makeJumpTableSym(b))
 30302  			v1 := b.NewValue0(b.Pos, OpSB, typ.Uintptr)
 30303  			v0.AddArg(v1)
 30304  			b.resetWithControl2(BlockARM64JUMPTABLE, idx, v0)
 30305  			b.Aux = symToAux(makeJumpTableSym(b))
 30306  			return true
 30307  		}
 30308  	case BlockARM64LE:
 30309  		// match: (LE (CMPconst [0] z:(AND x y)) yes no)
 30310  		// cond: z.Uses == 1
 30311  		// result: (LE (TST x y) yes no)
 30312  		for b.Controls[0].Op == OpARM64CMPconst {
 30313  			v_0 := b.Controls[0]
 30314  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30315  				break
 30316  			}
 30317  			z := v_0.Args[0]
 30318  			if z.Op != OpARM64AND {
 30319  				break
 30320  			}
 30321  			_ = z.Args[1]
 30322  			z_0 := z.Args[0]
 30323  			z_1 := z.Args[1]
 30324  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30325  				x := z_0
 30326  				y := z_1
 30327  				if !(z.Uses == 1) {
 30328  					continue
 30329  				}
 30330  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 30331  				v0.AddArg2(x, y)
 30332  				b.resetWithControl(BlockARM64LE, v0)
 30333  				return true
 30334  			}
 30335  			break
 30336  		}
 30337  		// match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no)
 30338  		// cond: x.Uses == 1
 30339  		// result: (LE (TSTconst [c] y) yes no)
 30340  		for b.Controls[0].Op == OpARM64CMPconst {
 30341  			v_0 := b.Controls[0]
 30342  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30343  				break
 30344  			}
 30345  			x := v_0.Args[0]
 30346  			if x.Op != OpARM64ANDconst {
 30347  				break
 30348  			}
 30349  			c := auxIntToInt64(x.AuxInt)
 30350  			y := x.Args[0]
 30351  			if !(x.Uses == 1) {
 30352  				break
 30353  			}
 30354  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 30355  			v0.AuxInt = int64ToAuxInt(c)
 30356  			v0.AddArg(y)
 30357  			b.resetWithControl(BlockARM64LE, v0)
 30358  			return true
 30359  		}
 30360  		// match: (LE (CMPWconst [0] z:(AND x y)) yes no)
 30361  		// cond: z.Uses == 1
 30362  		// result: (LE (TSTW x y) yes no)
 30363  		for b.Controls[0].Op == OpARM64CMPWconst {
 30364  			v_0 := b.Controls[0]
 30365  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30366  				break
 30367  			}
 30368  			z := v_0.Args[0]
 30369  			if z.Op != OpARM64AND {
 30370  				break
 30371  			}
 30372  			_ = z.Args[1]
 30373  			z_0 := z.Args[0]
 30374  			z_1 := z.Args[1]
 30375  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30376  				x := z_0
 30377  				y := z_1
 30378  				if !(z.Uses == 1) {
 30379  					continue
 30380  				}
 30381  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 30382  				v0.AddArg2(x, y)
 30383  				b.resetWithControl(BlockARM64LE, v0)
 30384  				return true
 30385  			}
 30386  			break
 30387  		}
 30388  		// match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 30389  		// cond: x.Uses == 1
 30390  		// result: (LE (TSTWconst [int32(c)] y) yes no)
 30391  		for b.Controls[0].Op == OpARM64CMPWconst {
 30392  			v_0 := b.Controls[0]
 30393  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30394  				break
 30395  			}
 30396  			x := v_0.Args[0]
 30397  			if x.Op != OpARM64ANDconst {
 30398  				break
 30399  			}
 30400  			c := auxIntToInt64(x.AuxInt)
 30401  			y := x.Args[0]
 30402  			if !(x.Uses == 1) {
 30403  				break
 30404  			}
 30405  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 30406  			v0.AuxInt = int32ToAuxInt(int32(c))
 30407  			v0.AddArg(y)
 30408  			b.resetWithControl(BlockARM64LE, v0)
 30409  			return true
 30410  		}
 30411  		// match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no)
 30412  		// cond: x.Uses == 1
 30413  		// result: (LEnoov (CMNconst [c] y) yes no)
 30414  		for b.Controls[0].Op == OpARM64CMPconst {
 30415  			v_0 := b.Controls[0]
 30416  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30417  				break
 30418  			}
 30419  			x := v_0.Args[0]
 30420  			if x.Op != OpARM64ADDconst {
 30421  				break
 30422  			}
 30423  			c := auxIntToInt64(x.AuxInt)
 30424  			y := x.Args[0]
 30425  			if !(x.Uses == 1) {
 30426  				break
 30427  			}
 30428  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 30429  			v0.AuxInt = int64ToAuxInt(c)
 30430  			v0.AddArg(y)
 30431  			b.resetWithControl(BlockARM64LEnoov, v0)
 30432  			return true
 30433  		}
 30434  		// match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 30435  		// cond: x.Uses == 1
 30436  		// result: (LEnoov (CMNWconst [int32(c)] y) yes no)
 30437  		for b.Controls[0].Op == OpARM64CMPWconst {
 30438  			v_0 := b.Controls[0]
 30439  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30440  				break
 30441  			}
 30442  			x := v_0.Args[0]
 30443  			if x.Op != OpARM64ADDconst {
 30444  				break
 30445  			}
 30446  			c := auxIntToInt64(x.AuxInt)
 30447  			y := x.Args[0]
 30448  			if !(x.Uses == 1) {
 30449  				break
 30450  			}
 30451  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 30452  			v0.AuxInt = int32ToAuxInt(int32(c))
 30453  			v0.AddArg(y)
 30454  			b.resetWithControl(BlockARM64LEnoov, v0)
 30455  			return true
 30456  		}
 30457  		// match: (LE (CMPconst [0] z:(ADD x y)) yes no)
 30458  		// cond: z.Uses == 1
 30459  		// result: (LEnoov (CMN x y) yes no)
 30460  		for b.Controls[0].Op == OpARM64CMPconst {
 30461  			v_0 := b.Controls[0]
 30462  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30463  				break
 30464  			}
 30465  			z := v_0.Args[0]
 30466  			if z.Op != OpARM64ADD {
 30467  				break
 30468  			}
 30469  			_ = z.Args[1]
 30470  			z_0 := z.Args[0]
 30471  			z_1 := z.Args[1]
 30472  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30473  				x := z_0
 30474  				y := z_1
 30475  				if !(z.Uses == 1) {
 30476  					continue
 30477  				}
 30478  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 30479  				v0.AddArg2(x, y)
 30480  				b.resetWithControl(BlockARM64LEnoov, v0)
 30481  				return true
 30482  			}
 30483  			break
 30484  		}
 30485  		// match: (LE (CMPWconst [0] z:(ADD x y)) yes no)
 30486  		// cond: z.Uses == 1
 30487  		// result: (LEnoov (CMNW x y) yes no)
 30488  		for b.Controls[0].Op == OpARM64CMPWconst {
 30489  			v_0 := b.Controls[0]
 30490  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30491  				break
 30492  			}
 30493  			z := v_0.Args[0]
 30494  			if z.Op != OpARM64ADD {
 30495  				break
 30496  			}
 30497  			_ = z.Args[1]
 30498  			z_0 := z.Args[0]
 30499  			z_1 := z.Args[1]
 30500  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30501  				x := z_0
 30502  				y := z_1
 30503  				if !(z.Uses == 1) {
 30504  					continue
 30505  				}
 30506  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30507  				v0.AddArg2(x, y)
 30508  				b.resetWithControl(BlockARM64LEnoov, v0)
 30509  				return true
 30510  			}
 30511  			break
 30512  		}
 30513  		// match: (LE (CMPconst [0] z:(MADD a x y)) yes no)
 30514  		// cond: z.Uses==1
 30515  		// result: (LEnoov (CMN a (MUL <x.Type> x y)) yes no)
 30516  		for b.Controls[0].Op == OpARM64CMPconst {
 30517  			v_0 := b.Controls[0]
 30518  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30519  				break
 30520  			}
 30521  			z := v_0.Args[0]
 30522  			if z.Op != OpARM64MADD {
 30523  				break
 30524  			}
 30525  			y := z.Args[2]
 30526  			a := z.Args[0]
 30527  			x := z.Args[1]
 30528  			if !(z.Uses == 1) {
 30529  				break
 30530  			}
 30531  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 30532  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30533  			v1.AddArg2(x, y)
 30534  			v0.AddArg2(a, v1)
 30535  			b.resetWithControl(BlockARM64LEnoov, v0)
 30536  			return true
 30537  		}
 30538  		// match: (LE (CMPconst [0] z:(MSUB a x y)) yes no)
 30539  		// cond: z.Uses==1
 30540  		// result: (LEnoov (CMP a (MUL <x.Type> x y)) yes no)
 30541  		for b.Controls[0].Op == OpARM64CMPconst {
 30542  			v_0 := b.Controls[0]
 30543  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30544  				break
 30545  			}
 30546  			z := v_0.Args[0]
 30547  			if z.Op != OpARM64MSUB {
 30548  				break
 30549  			}
 30550  			y := z.Args[2]
 30551  			a := z.Args[0]
 30552  			x := z.Args[1]
 30553  			if !(z.Uses == 1) {
 30554  				break
 30555  			}
 30556  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 30557  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30558  			v1.AddArg2(x, y)
 30559  			v0.AddArg2(a, v1)
 30560  			b.resetWithControl(BlockARM64LEnoov, v0)
 30561  			return true
 30562  		}
 30563  		// match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no)
 30564  		// cond: z.Uses==1
 30565  		// result: (LEnoov (CMNW a (MULW <x.Type> x y)) yes no)
 30566  		for b.Controls[0].Op == OpARM64CMPWconst {
 30567  			v_0 := b.Controls[0]
 30568  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30569  				break
 30570  			}
 30571  			z := v_0.Args[0]
 30572  			if z.Op != OpARM64MADDW {
 30573  				break
 30574  			}
 30575  			y := z.Args[2]
 30576  			a := z.Args[0]
 30577  			x := z.Args[1]
 30578  			if !(z.Uses == 1) {
 30579  				break
 30580  			}
 30581  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30582  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30583  			v1.AddArg2(x, y)
 30584  			v0.AddArg2(a, v1)
 30585  			b.resetWithControl(BlockARM64LEnoov, v0)
 30586  			return true
 30587  		}
 30588  		// match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no)
 30589  		// cond: z.Uses==1
 30590  		// result: (LEnoov (CMPW a (MULW <x.Type> x y)) yes no)
 30591  		for b.Controls[0].Op == OpARM64CMPWconst {
 30592  			v_0 := b.Controls[0]
 30593  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30594  				break
 30595  			}
 30596  			z := v_0.Args[0]
 30597  			if z.Op != OpARM64MSUBW {
 30598  				break
 30599  			}
 30600  			y := z.Args[2]
 30601  			a := z.Args[0]
 30602  			x := z.Args[1]
 30603  			if !(z.Uses == 1) {
 30604  				break
 30605  			}
 30606  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 30607  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30608  			v1.AddArg2(x, y)
 30609  			v0.AddArg2(a, v1)
 30610  			b.resetWithControl(BlockARM64LEnoov, v0)
 30611  			return true
 30612  		}
 30613  		// match: (LE (FlagConstant [fc]) yes no)
 30614  		// cond: fc.le()
 30615  		// result: (First yes no)
 30616  		for b.Controls[0].Op == OpARM64FlagConstant {
 30617  			v_0 := b.Controls[0]
 30618  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30619  			if !(fc.le()) {
 30620  				break
 30621  			}
 30622  			b.Reset(BlockFirst)
 30623  			return true
 30624  		}
 30625  		// match: (LE (FlagConstant [fc]) yes no)
 30626  		// cond: !fc.le()
 30627  		// result: (First no yes)
 30628  		for b.Controls[0].Op == OpARM64FlagConstant {
 30629  			v_0 := b.Controls[0]
 30630  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30631  			if !(!fc.le()) {
 30632  				break
 30633  			}
 30634  			b.Reset(BlockFirst)
 30635  			b.swapSuccessors()
 30636  			return true
 30637  		}
 30638  		// match: (LE (InvertFlags cmp) yes no)
 30639  		// result: (GE cmp yes no)
 30640  		for b.Controls[0].Op == OpARM64InvertFlags {
 30641  			v_0 := b.Controls[0]
 30642  			cmp := v_0.Args[0]
 30643  			b.resetWithControl(BlockARM64GE, cmp)
 30644  			return true
 30645  		}
 30646  	case BlockARM64LEnoov:
 30647  		// match: (LEnoov (FlagConstant [fc]) yes no)
 30648  		// cond: fc.leNoov()
 30649  		// result: (First yes no)
 30650  		for b.Controls[0].Op == OpARM64FlagConstant {
 30651  			v_0 := b.Controls[0]
 30652  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30653  			if !(fc.leNoov()) {
 30654  				break
 30655  			}
 30656  			b.Reset(BlockFirst)
 30657  			return true
 30658  		}
 30659  		// match: (LEnoov (FlagConstant [fc]) yes no)
 30660  		// cond: !fc.leNoov()
 30661  		// result: (First no yes)
 30662  		for b.Controls[0].Op == OpARM64FlagConstant {
 30663  			v_0 := b.Controls[0]
 30664  			fc := auxIntToFlagConstant(v_0.AuxInt)
 30665  			if !(!fc.leNoov()) {
 30666  				break
 30667  			}
 30668  			b.Reset(BlockFirst)
 30669  			b.swapSuccessors()
 30670  			return true
 30671  		}
 30672  		// match: (LEnoov (InvertFlags cmp) yes no)
 30673  		// result: (GEnoov cmp yes no)
 30674  		for b.Controls[0].Op == OpARM64InvertFlags {
 30675  			v_0 := b.Controls[0]
 30676  			cmp := v_0.Args[0]
 30677  			b.resetWithControl(BlockARM64GEnoov, cmp)
 30678  			return true
 30679  		}
 30680  	case BlockARM64LT:
 30681  		// match: (LT (CMPconst [0] z:(AND x y)) yes no)
 30682  		// cond: z.Uses == 1
 30683  		// result: (LT (TST x y) yes no)
 30684  		for b.Controls[0].Op == OpARM64CMPconst {
 30685  			v_0 := b.Controls[0]
 30686  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30687  				break
 30688  			}
 30689  			z := v_0.Args[0]
 30690  			if z.Op != OpARM64AND {
 30691  				break
 30692  			}
 30693  			_ = z.Args[1]
 30694  			z_0 := z.Args[0]
 30695  			z_1 := z.Args[1]
 30696  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30697  				x := z_0
 30698  				y := z_1
 30699  				if !(z.Uses == 1) {
 30700  					continue
 30701  				}
 30702  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 30703  				v0.AddArg2(x, y)
 30704  				b.resetWithControl(BlockARM64LT, v0)
 30705  				return true
 30706  			}
 30707  			break
 30708  		}
 30709  		// match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no)
 30710  		// cond: x.Uses == 1
 30711  		// result: (LT (TSTconst [c] y) yes no)
 30712  		for b.Controls[0].Op == OpARM64CMPconst {
 30713  			v_0 := b.Controls[0]
 30714  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30715  				break
 30716  			}
 30717  			x := v_0.Args[0]
 30718  			if x.Op != OpARM64ANDconst {
 30719  				break
 30720  			}
 30721  			c := auxIntToInt64(x.AuxInt)
 30722  			y := x.Args[0]
 30723  			if !(x.Uses == 1) {
 30724  				break
 30725  			}
 30726  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 30727  			v0.AuxInt = int64ToAuxInt(c)
 30728  			v0.AddArg(y)
 30729  			b.resetWithControl(BlockARM64LT, v0)
 30730  			return true
 30731  		}
 30732  		// match: (LT (CMPWconst [0] z:(AND x y)) yes no)
 30733  		// cond: z.Uses == 1
 30734  		// result: (LT (TSTW x y) yes no)
 30735  		for b.Controls[0].Op == OpARM64CMPWconst {
 30736  			v_0 := b.Controls[0]
 30737  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30738  				break
 30739  			}
 30740  			z := v_0.Args[0]
 30741  			if z.Op != OpARM64AND {
 30742  				break
 30743  			}
 30744  			_ = z.Args[1]
 30745  			z_0 := z.Args[0]
 30746  			z_1 := z.Args[1]
 30747  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30748  				x := z_0
 30749  				y := z_1
 30750  				if !(z.Uses == 1) {
 30751  					continue
 30752  				}
 30753  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 30754  				v0.AddArg2(x, y)
 30755  				b.resetWithControl(BlockARM64LT, v0)
 30756  				return true
 30757  			}
 30758  			break
 30759  		}
 30760  		// match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 30761  		// cond: x.Uses == 1
 30762  		// result: (LT (TSTWconst [int32(c)] y) yes no)
 30763  		for b.Controls[0].Op == OpARM64CMPWconst {
 30764  			v_0 := b.Controls[0]
 30765  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30766  				break
 30767  			}
 30768  			x := v_0.Args[0]
 30769  			if x.Op != OpARM64ANDconst {
 30770  				break
 30771  			}
 30772  			c := auxIntToInt64(x.AuxInt)
 30773  			y := x.Args[0]
 30774  			if !(x.Uses == 1) {
 30775  				break
 30776  			}
 30777  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 30778  			v0.AuxInt = int32ToAuxInt(int32(c))
 30779  			v0.AddArg(y)
 30780  			b.resetWithControl(BlockARM64LT, v0)
 30781  			return true
 30782  		}
 30783  		// match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no)
 30784  		// cond: x.Uses == 1
 30785  		// result: (LTnoov (CMNconst [c] y) yes no)
 30786  		for b.Controls[0].Op == OpARM64CMPconst {
 30787  			v_0 := b.Controls[0]
 30788  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30789  				break
 30790  			}
 30791  			x := v_0.Args[0]
 30792  			if x.Op != OpARM64ADDconst {
 30793  				break
 30794  			}
 30795  			c := auxIntToInt64(x.AuxInt)
 30796  			y := x.Args[0]
 30797  			if !(x.Uses == 1) {
 30798  				break
 30799  			}
 30800  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 30801  			v0.AuxInt = int64ToAuxInt(c)
 30802  			v0.AddArg(y)
 30803  			b.resetWithControl(BlockARM64LTnoov, v0)
 30804  			return true
 30805  		}
 30806  		// match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 30807  		// cond: x.Uses == 1
 30808  		// result: (LTnoov (CMNWconst [int32(c)] y) yes no)
 30809  		for b.Controls[0].Op == OpARM64CMPWconst {
 30810  			v_0 := b.Controls[0]
 30811  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30812  				break
 30813  			}
 30814  			x := v_0.Args[0]
 30815  			if x.Op != OpARM64ADDconst {
 30816  				break
 30817  			}
 30818  			c := auxIntToInt64(x.AuxInt)
 30819  			y := x.Args[0]
 30820  			if !(x.Uses == 1) {
 30821  				break
 30822  			}
 30823  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 30824  			v0.AuxInt = int32ToAuxInt(int32(c))
 30825  			v0.AddArg(y)
 30826  			b.resetWithControl(BlockARM64LTnoov, v0)
 30827  			return true
 30828  		}
 30829  		// match: (LT (CMPconst [0] z:(ADD x y)) yes no)
 30830  		// cond: z.Uses == 1
 30831  		// result: (LTnoov (CMN x y) yes no)
 30832  		for b.Controls[0].Op == OpARM64CMPconst {
 30833  			v_0 := b.Controls[0]
 30834  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30835  				break
 30836  			}
 30837  			z := v_0.Args[0]
 30838  			if z.Op != OpARM64ADD {
 30839  				break
 30840  			}
 30841  			_ = z.Args[1]
 30842  			z_0 := z.Args[0]
 30843  			z_1 := z.Args[1]
 30844  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30845  				x := z_0
 30846  				y := z_1
 30847  				if !(z.Uses == 1) {
 30848  					continue
 30849  				}
 30850  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 30851  				v0.AddArg2(x, y)
 30852  				b.resetWithControl(BlockARM64LTnoov, v0)
 30853  				return true
 30854  			}
 30855  			break
 30856  		}
 30857  		// match: (LT (CMPWconst [0] z:(ADD x y)) yes no)
 30858  		// cond: z.Uses == 1
 30859  		// result: (LTnoov (CMNW x y) yes no)
 30860  		for b.Controls[0].Op == OpARM64CMPWconst {
 30861  			v_0 := b.Controls[0]
 30862  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30863  				break
 30864  			}
 30865  			z := v_0.Args[0]
 30866  			if z.Op != OpARM64ADD {
 30867  				break
 30868  			}
 30869  			_ = z.Args[1]
 30870  			z_0 := z.Args[0]
 30871  			z_1 := z.Args[1]
 30872  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 30873  				x := z_0
 30874  				y := z_1
 30875  				if !(z.Uses == 1) {
 30876  					continue
 30877  				}
 30878  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30879  				v0.AddArg2(x, y)
 30880  				b.resetWithControl(BlockARM64LTnoov, v0)
 30881  				return true
 30882  			}
 30883  			break
 30884  		}
 30885  		// match: (LT (CMPconst [0] z:(MADD a x y)) yes no)
 30886  		// cond: z.Uses==1
 30887  		// result: (LTnoov (CMN a (MUL <x.Type> x y)) yes no)
 30888  		for b.Controls[0].Op == OpARM64CMPconst {
 30889  			v_0 := b.Controls[0]
 30890  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30891  				break
 30892  			}
 30893  			z := v_0.Args[0]
 30894  			if z.Op != OpARM64MADD {
 30895  				break
 30896  			}
 30897  			y := z.Args[2]
 30898  			a := z.Args[0]
 30899  			x := z.Args[1]
 30900  			if !(z.Uses == 1) {
 30901  				break
 30902  			}
 30903  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 30904  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30905  			v1.AddArg2(x, y)
 30906  			v0.AddArg2(a, v1)
 30907  			b.resetWithControl(BlockARM64LTnoov, v0)
 30908  			return true
 30909  		}
 30910  		// match: (LT (CMPconst [0] z:(MSUB a x y)) yes no)
 30911  		// cond: z.Uses==1
 30912  		// result: (LTnoov (CMP a (MUL <x.Type> x y)) yes no)
 30913  		for b.Controls[0].Op == OpARM64CMPconst {
 30914  			v_0 := b.Controls[0]
 30915  			if auxIntToInt64(v_0.AuxInt) != 0 {
 30916  				break
 30917  			}
 30918  			z := v_0.Args[0]
 30919  			if z.Op != OpARM64MSUB {
 30920  				break
 30921  			}
 30922  			y := z.Args[2]
 30923  			a := z.Args[0]
 30924  			x := z.Args[1]
 30925  			if !(z.Uses == 1) {
 30926  				break
 30927  			}
 30928  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 30929  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 30930  			v1.AddArg2(x, y)
 30931  			v0.AddArg2(a, v1)
 30932  			b.resetWithControl(BlockARM64LTnoov, v0)
 30933  			return true
 30934  		}
 30935  		// match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no)
 30936  		// cond: z.Uses==1
 30937  		// result: (LTnoov (CMNW a (MULW <x.Type> x y)) yes no)
 30938  		for b.Controls[0].Op == OpARM64CMPWconst {
 30939  			v_0 := b.Controls[0]
 30940  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30941  				break
 30942  			}
 30943  			z := v_0.Args[0]
 30944  			if z.Op != OpARM64MADDW {
 30945  				break
 30946  			}
 30947  			y := z.Args[2]
 30948  			a := z.Args[0]
 30949  			x := z.Args[1]
 30950  			if !(z.Uses == 1) {
 30951  				break
 30952  			}
 30953  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 30954  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30955  			v1.AddArg2(x, y)
 30956  			v0.AddArg2(a, v1)
 30957  			b.resetWithControl(BlockARM64LTnoov, v0)
 30958  			return true
 30959  		}
 30960  		// match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no)
 30961  		// cond: z.Uses==1
 30962  		// result: (LTnoov (CMPW a (MULW <x.Type> x y)) yes no)
 30963  		for b.Controls[0].Op == OpARM64CMPWconst {
 30964  			v_0 := b.Controls[0]
 30965  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30966  				break
 30967  			}
 30968  			z := v_0.Args[0]
 30969  			if z.Op != OpARM64MSUBW {
 30970  				break
 30971  			}
 30972  			y := z.Args[2]
 30973  			a := z.Args[0]
 30974  			x := z.Args[1]
 30975  			if !(z.Uses == 1) {
 30976  				break
 30977  			}
 30978  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 30979  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 30980  			v1.AddArg2(x, y)
 30981  			v0.AddArg2(a, v1)
 30982  			b.resetWithControl(BlockARM64LTnoov, v0)
 30983  			return true
 30984  		}
 30985  		// match: (LT (CMPWconst [0] x) yes no)
 30986  		// result: (TBNZ [31] x yes no)
 30987  		for b.Controls[0].Op == OpARM64CMPWconst {
 30988  			v_0 := b.Controls[0]
 30989  			if auxIntToInt32(v_0.AuxInt) != 0 {
 30990  				break
 30991  			}
 30992  			x := v_0.Args[0]
 30993  			b.resetWithControl(BlockARM64TBNZ, x)
 30994  			b.AuxInt = int64ToAuxInt(31)
 30995  			return true
 30996  		}
 30997  		// match: (LT (CMPconst [0] x) yes no)
 30998  		// result: (TBNZ [63] x yes no)
 30999  		for b.Controls[0].Op == OpARM64CMPconst {
 31000  			v_0 := b.Controls[0]
 31001  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31002  				break
 31003  			}
 31004  			x := v_0.Args[0]
 31005  			b.resetWithControl(BlockARM64TBNZ, x)
 31006  			b.AuxInt = int64ToAuxInt(63)
 31007  			return true
 31008  		}
 31009  		// match: (LT (FlagConstant [fc]) yes no)
 31010  		// cond: fc.lt()
 31011  		// result: (First yes no)
 31012  		for b.Controls[0].Op == OpARM64FlagConstant {
 31013  			v_0 := b.Controls[0]
 31014  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31015  			if !(fc.lt()) {
 31016  				break
 31017  			}
 31018  			b.Reset(BlockFirst)
 31019  			return true
 31020  		}
 31021  		// match: (LT (FlagConstant [fc]) yes no)
 31022  		// cond: !fc.lt()
 31023  		// result: (First no yes)
 31024  		for b.Controls[0].Op == OpARM64FlagConstant {
 31025  			v_0 := b.Controls[0]
 31026  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31027  			if !(!fc.lt()) {
 31028  				break
 31029  			}
 31030  			b.Reset(BlockFirst)
 31031  			b.swapSuccessors()
 31032  			return true
 31033  		}
 31034  		// match: (LT (InvertFlags cmp) yes no)
 31035  		// result: (GT cmp yes no)
 31036  		for b.Controls[0].Op == OpARM64InvertFlags {
 31037  			v_0 := b.Controls[0]
 31038  			cmp := v_0.Args[0]
 31039  			b.resetWithControl(BlockARM64GT, cmp)
 31040  			return true
 31041  		}
 31042  	case BlockARM64LTnoov:
 31043  		// match: (LTnoov (FlagConstant [fc]) yes no)
 31044  		// cond: fc.ltNoov()
 31045  		// result: (First yes no)
 31046  		for b.Controls[0].Op == OpARM64FlagConstant {
 31047  			v_0 := b.Controls[0]
 31048  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31049  			if !(fc.ltNoov()) {
 31050  				break
 31051  			}
 31052  			b.Reset(BlockFirst)
 31053  			return true
 31054  		}
 31055  		// match: (LTnoov (FlagConstant [fc]) yes no)
 31056  		// cond: !fc.ltNoov()
 31057  		// result: (First no yes)
 31058  		for b.Controls[0].Op == OpARM64FlagConstant {
 31059  			v_0 := b.Controls[0]
 31060  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31061  			if !(!fc.ltNoov()) {
 31062  				break
 31063  			}
 31064  			b.Reset(BlockFirst)
 31065  			b.swapSuccessors()
 31066  			return true
 31067  		}
 31068  		// match: (LTnoov (InvertFlags cmp) yes no)
 31069  		// result: (GTnoov cmp yes no)
 31070  		for b.Controls[0].Op == OpARM64InvertFlags {
 31071  			v_0 := b.Controls[0]
 31072  			cmp := v_0.Args[0]
 31073  			b.resetWithControl(BlockARM64GTnoov, cmp)
 31074  			return true
 31075  		}
 31076  	case BlockARM64NE:
 31077  		// match: (NE (CMPconst [0] z:(AND x y)) yes no)
 31078  		// cond: z.Uses == 1
 31079  		// result: (NE (TST x y) yes no)
 31080  		for b.Controls[0].Op == OpARM64CMPconst {
 31081  			v_0 := b.Controls[0]
 31082  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31083  				break
 31084  			}
 31085  			z := v_0.Args[0]
 31086  			if z.Op != OpARM64AND {
 31087  				break
 31088  			}
 31089  			_ = z.Args[1]
 31090  			z_0 := z.Args[0]
 31091  			z_1 := z.Args[1]
 31092  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 31093  				x := z_0
 31094  				y := z_1
 31095  				if !(z.Uses == 1) {
 31096  					continue
 31097  				}
 31098  				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
 31099  				v0.AddArg2(x, y)
 31100  				b.resetWithControl(BlockARM64NE, v0)
 31101  				return true
 31102  			}
 31103  			break
 31104  		}
 31105  		// match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no)
 31106  		// cond: x.Uses == 1
 31107  		// result: (NE (TSTconst [c] y) yes no)
 31108  		for b.Controls[0].Op == OpARM64CMPconst {
 31109  			v_0 := b.Controls[0]
 31110  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31111  				break
 31112  			}
 31113  			x := v_0.Args[0]
 31114  			if x.Op != OpARM64ANDconst {
 31115  				break
 31116  			}
 31117  			c := auxIntToInt64(x.AuxInt)
 31118  			y := x.Args[0]
 31119  			if !(x.Uses == 1) {
 31120  				break
 31121  			}
 31122  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
 31123  			v0.AuxInt = int64ToAuxInt(c)
 31124  			v0.AddArg(y)
 31125  			b.resetWithControl(BlockARM64NE, v0)
 31126  			return true
 31127  		}
 31128  		// match: (NE (CMPWconst [0] z:(AND x y)) yes no)
 31129  		// cond: z.Uses == 1
 31130  		// result: (NE (TSTW x y) yes no)
 31131  		for b.Controls[0].Op == OpARM64CMPWconst {
 31132  			v_0 := b.Controls[0]
 31133  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31134  				break
 31135  			}
 31136  			z := v_0.Args[0]
 31137  			if z.Op != OpARM64AND {
 31138  				break
 31139  			}
 31140  			_ = z.Args[1]
 31141  			z_0 := z.Args[0]
 31142  			z_1 := z.Args[1]
 31143  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 31144  				x := z_0
 31145  				y := z_1
 31146  				if !(z.Uses == 1) {
 31147  					continue
 31148  				}
 31149  				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
 31150  				v0.AddArg2(x, y)
 31151  				b.resetWithControl(BlockARM64NE, v0)
 31152  				return true
 31153  			}
 31154  			break
 31155  		}
 31156  		// match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
 31157  		// cond: x.Uses == 1
 31158  		// result: (NE (TSTWconst [int32(c)] y) yes no)
 31159  		for b.Controls[0].Op == OpARM64CMPWconst {
 31160  			v_0 := b.Controls[0]
 31161  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31162  				break
 31163  			}
 31164  			x := v_0.Args[0]
 31165  			if x.Op != OpARM64ANDconst {
 31166  				break
 31167  			}
 31168  			c := auxIntToInt64(x.AuxInt)
 31169  			y := x.Args[0]
 31170  			if !(x.Uses == 1) {
 31171  				break
 31172  			}
 31173  			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
 31174  			v0.AuxInt = int32ToAuxInt(int32(c))
 31175  			v0.AddArg(y)
 31176  			b.resetWithControl(BlockARM64NE, v0)
 31177  			return true
 31178  		}
 31179  		// match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no)
 31180  		// cond: x.Uses == 1
 31181  		// result: (NE (CMNconst [c] y) yes no)
 31182  		for b.Controls[0].Op == OpARM64CMPconst {
 31183  			v_0 := b.Controls[0]
 31184  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31185  				break
 31186  			}
 31187  			x := v_0.Args[0]
 31188  			if x.Op != OpARM64ADDconst {
 31189  				break
 31190  			}
 31191  			c := auxIntToInt64(x.AuxInt)
 31192  			y := x.Args[0]
 31193  			if !(x.Uses == 1) {
 31194  				break
 31195  			}
 31196  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
 31197  			v0.AuxInt = int64ToAuxInt(c)
 31198  			v0.AddArg(y)
 31199  			b.resetWithControl(BlockARM64NE, v0)
 31200  			return true
 31201  		}
 31202  		// match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
 31203  		// cond: x.Uses == 1
 31204  		// result: (NE (CMNWconst [int32(c)] y) yes no)
 31205  		for b.Controls[0].Op == OpARM64CMPWconst {
 31206  			v_0 := b.Controls[0]
 31207  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31208  				break
 31209  			}
 31210  			x := v_0.Args[0]
 31211  			if x.Op != OpARM64ADDconst {
 31212  				break
 31213  			}
 31214  			c := auxIntToInt64(x.AuxInt)
 31215  			y := x.Args[0]
 31216  			if !(x.Uses == 1) {
 31217  				break
 31218  			}
 31219  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
 31220  			v0.AuxInt = int32ToAuxInt(int32(c))
 31221  			v0.AddArg(y)
 31222  			b.resetWithControl(BlockARM64NE, v0)
 31223  			return true
 31224  		}
 31225  		// match: (NE (CMPconst [0] z:(ADD x y)) yes no)
 31226  		// cond: z.Uses == 1
 31227  		// result: (NE (CMN x y) yes no)
 31228  		for b.Controls[0].Op == OpARM64CMPconst {
 31229  			v_0 := b.Controls[0]
 31230  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31231  				break
 31232  			}
 31233  			z := v_0.Args[0]
 31234  			if z.Op != OpARM64ADD {
 31235  				break
 31236  			}
 31237  			_ = z.Args[1]
 31238  			z_0 := z.Args[0]
 31239  			z_1 := z.Args[1]
 31240  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 31241  				x := z_0
 31242  				y := z_1
 31243  				if !(z.Uses == 1) {
 31244  					continue
 31245  				}
 31246  				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 31247  				v0.AddArg2(x, y)
 31248  				b.resetWithControl(BlockARM64NE, v0)
 31249  				return true
 31250  			}
 31251  			break
 31252  		}
 31253  		// match: (NE (CMPWconst [0] z:(ADD x y)) yes no)
 31254  		// cond: z.Uses == 1
 31255  		// result: (NE (CMNW x y) yes no)
 31256  		for b.Controls[0].Op == OpARM64CMPWconst {
 31257  			v_0 := b.Controls[0]
 31258  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31259  				break
 31260  			}
 31261  			z := v_0.Args[0]
 31262  			if z.Op != OpARM64ADD {
 31263  				break
 31264  			}
 31265  			_ = z.Args[1]
 31266  			z_0 := z.Args[0]
 31267  			z_1 := z.Args[1]
 31268  			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
 31269  				x := z_0
 31270  				y := z_1
 31271  				if !(z.Uses == 1) {
 31272  					continue
 31273  				}
 31274  				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 31275  				v0.AddArg2(x, y)
 31276  				b.resetWithControl(BlockARM64NE, v0)
 31277  				return true
 31278  			}
 31279  			break
 31280  		}
 31281  		// match: (NE (CMP x z:(NEG y)) yes no)
 31282  		// cond: z.Uses == 1
 31283  		// result: (NE (CMN x y) yes no)
 31284  		for b.Controls[0].Op == OpARM64CMP {
 31285  			v_0 := b.Controls[0]
 31286  			_ = v_0.Args[1]
 31287  			x := v_0.Args[0]
 31288  			z := v_0.Args[1]
 31289  			if z.Op != OpARM64NEG {
 31290  				break
 31291  			}
 31292  			y := z.Args[0]
 31293  			if !(z.Uses == 1) {
 31294  				break
 31295  			}
 31296  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 31297  			v0.AddArg2(x, y)
 31298  			b.resetWithControl(BlockARM64NE, v0)
 31299  			return true
 31300  		}
 31301  		// match: (NE (CMPW x z:(NEG y)) yes no)
 31302  		// cond: z.Uses == 1
 31303  		// result: (NE (CMNW x y) yes no)
 31304  		for b.Controls[0].Op == OpARM64CMPW {
 31305  			v_0 := b.Controls[0]
 31306  			_ = v_0.Args[1]
 31307  			x := v_0.Args[0]
 31308  			z := v_0.Args[1]
 31309  			if z.Op != OpARM64NEG {
 31310  				break
 31311  			}
 31312  			y := z.Args[0]
 31313  			if !(z.Uses == 1) {
 31314  				break
 31315  			}
 31316  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 31317  			v0.AddArg2(x, y)
 31318  			b.resetWithControl(BlockARM64NE, v0)
 31319  			return true
 31320  		}
 31321  		// match: (NE (CMPconst [0] x) yes no)
 31322  		// result: (NZ x yes no)
 31323  		for b.Controls[0].Op == OpARM64CMPconst {
 31324  			v_0 := b.Controls[0]
 31325  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31326  				break
 31327  			}
 31328  			x := v_0.Args[0]
 31329  			b.resetWithControl(BlockARM64NZ, x)
 31330  			return true
 31331  		}
 31332  		// match: (NE (CMPWconst [0] x) yes no)
 31333  		// result: (NZW x yes no)
 31334  		for b.Controls[0].Op == OpARM64CMPWconst {
 31335  			v_0 := b.Controls[0]
 31336  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31337  				break
 31338  			}
 31339  			x := v_0.Args[0]
 31340  			b.resetWithControl(BlockARM64NZW, x)
 31341  			return true
 31342  		}
 31343  		// match: (NE (CMPconst [0] z:(MADD a x y)) yes no)
 31344  		// cond: z.Uses==1
 31345  		// result: (NE (CMN a (MUL <x.Type> x y)) yes no)
 31346  		for b.Controls[0].Op == OpARM64CMPconst {
 31347  			v_0 := b.Controls[0]
 31348  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31349  				break
 31350  			}
 31351  			z := v_0.Args[0]
 31352  			if z.Op != OpARM64MADD {
 31353  				break
 31354  			}
 31355  			y := z.Args[2]
 31356  			a := z.Args[0]
 31357  			x := z.Args[1]
 31358  			if !(z.Uses == 1) {
 31359  				break
 31360  			}
 31361  			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
 31362  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 31363  			v1.AddArg2(x, y)
 31364  			v0.AddArg2(a, v1)
 31365  			b.resetWithControl(BlockARM64NE, v0)
 31366  			return true
 31367  		}
 31368  		// match: (NE (CMPconst [0] z:(MSUB a x y)) yes no)
 31369  		// cond: z.Uses==1
 31370  		// result: (NE (CMP a (MUL <x.Type> x y)) yes no)
 31371  		for b.Controls[0].Op == OpARM64CMPconst {
 31372  			v_0 := b.Controls[0]
 31373  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31374  				break
 31375  			}
 31376  			z := v_0.Args[0]
 31377  			if z.Op != OpARM64MSUB {
 31378  				break
 31379  			}
 31380  			y := z.Args[2]
 31381  			a := z.Args[0]
 31382  			x := z.Args[1]
 31383  			if !(z.Uses == 1) {
 31384  				break
 31385  			}
 31386  			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
 31387  			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
 31388  			v1.AddArg2(x, y)
 31389  			v0.AddArg2(a, v1)
 31390  			b.resetWithControl(BlockARM64NE, v0)
 31391  			return true
 31392  		}
 31393  		// match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no)
 31394  		// cond: z.Uses==1
 31395  		// result: (NE (CMNW a (MULW <x.Type> x y)) yes no)
 31396  		for b.Controls[0].Op == OpARM64CMPWconst {
 31397  			v_0 := b.Controls[0]
 31398  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31399  				break
 31400  			}
 31401  			z := v_0.Args[0]
 31402  			if z.Op != OpARM64MADDW {
 31403  				break
 31404  			}
 31405  			y := z.Args[2]
 31406  			a := z.Args[0]
 31407  			x := z.Args[1]
 31408  			if !(z.Uses == 1) {
 31409  				break
 31410  			}
 31411  			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
 31412  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 31413  			v1.AddArg2(x, y)
 31414  			v0.AddArg2(a, v1)
 31415  			b.resetWithControl(BlockARM64NE, v0)
 31416  			return true
 31417  		}
 31418  		// match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no)
 31419  		// cond: z.Uses==1
 31420  		// result: (NE (CMPW a (MULW <x.Type> x y)) yes no)
 31421  		for b.Controls[0].Op == OpARM64CMPWconst {
 31422  			v_0 := b.Controls[0]
 31423  			if auxIntToInt32(v_0.AuxInt) != 0 {
 31424  				break
 31425  			}
 31426  			z := v_0.Args[0]
 31427  			if z.Op != OpARM64MSUBW {
 31428  				break
 31429  			}
 31430  			y := z.Args[2]
 31431  			a := z.Args[0]
 31432  			x := z.Args[1]
 31433  			if !(z.Uses == 1) {
 31434  				break
 31435  			}
 31436  			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
 31437  			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
 31438  			v1.AddArg2(x, y)
 31439  			v0.AddArg2(a, v1)
 31440  			b.resetWithControl(BlockARM64NE, v0)
 31441  			return true
 31442  		}
 31443  		// match: (NE (TSTconst [c] x) yes no)
 31444  		// cond: oneBit(c)
 31445  		// result: (TBNZ [int64(ntz64(c))] x yes no)
 31446  		for b.Controls[0].Op == OpARM64TSTconst {
 31447  			v_0 := b.Controls[0]
 31448  			c := auxIntToInt64(v_0.AuxInt)
 31449  			x := v_0.Args[0]
 31450  			if !(oneBit(c)) {
 31451  				break
 31452  			}
 31453  			b.resetWithControl(BlockARM64TBNZ, x)
 31454  			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
 31455  			return true
 31456  		}
 31457  		// match: (NE (TSTWconst [c] x) yes no)
 31458  		// cond: oneBit(int64(uint32(c)))
 31459  		// result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no)
 31460  		for b.Controls[0].Op == OpARM64TSTWconst {
 31461  			v_0 := b.Controls[0]
 31462  			c := auxIntToInt32(v_0.AuxInt)
 31463  			x := v_0.Args[0]
 31464  			if !(oneBit(int64(uint32(c)))) {
 31465  				break
 31466  			}
 31467  			b.resetWithControl(BlockARM64TBNZ, x)
 31468  			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
 31469  			return true
 31470  		}
 31471  		// match: (NE (FlagConstant [fc]) yes no)
 31472  		// cond: fc.ne()
 31473  		// result: (First yes no)
 31474  		for b.Controls[0].Op == OpARM64FlagConstant {
 31475  			v_0 := b.Controls[0]
 31476  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31477  			if !(fc.ne()) {
 31478  				break
 31479  			}
 31480  			b.Reset(BlockFirst)
 31481  			return true
 31482  		}
 31483  		// match: (NE (FlagConstant [fc]) yes no)
 31484  		// cond: !fc.ne()
 31485  		// result: (First no yes)
 31486  		for b.Controls[0].Op == OpARM64FlagConstant {
 31487  			v_0 := b.Controls[0]
 31488  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31489  			if !(!fc.ne()) {
 31490  				break
 31491  			}
 31492  			b.Reset(BlockFirst)
 31493  			b.swapSuccessors()
 31494  			return true
 31495  		}
 31496  		// match: (NE (InvertFlags cmp) yes no)
 31497  		// result: (NE cmp yes no)
 31498  		for b.Controls[0].Op == OpARM64InvertFlags {
 31499  			v_0 := b.Controls[0]
 31500  			cmp := v_0.Args[0]
 31501  			b.resetWithControl(BlockARM64NE, cmp)
 31502  			return true
 31503  		}
 31504  	case BlockARM64NZ:
 31505  		// match: (NZ (Equal cc) yes no)
 31506  		// result: (EQ cc yes no)
 31507  		for b.Controls[0].Op == OpARM64Equal {
 31508  			v_0 := b.Controls[0]
 31509  			cc := v_0.Args[0]
 31510  			b.resetWithControl(BlockARM64EQ, cc)
 31511  			return true
 31512  		}
 31513  		// match: (NZ (NotEqual cc) yes no)
 31514  		// result: (NE cc yes no)
 31515  		for b.Controls[0].Op == OpARM64NotEqual {
 31516  			v_0 := b.Controls[0]
 31517  			cc := v_0.Args[0]
 31518  			b.resetWithControl(BlockARM64NE, cc)
 31519  			return true
 31520  		}
 31521  		// match: (NZ (LessThan cc) yes no)
 31522  		// result: (LT cc yes no)
 31523  		for b.Controls[0].Op == OpARM64LessThan {
 31524  			v_0 := b.Controls[0]
 31525  			cc := v_0.Args[0]
 31526  			b.resetWithControl(BlockARM64LT, cc)
 31527  			return true
 31528  		}
 31529  		// match: (NZ (LessThanU cc) yes no)
 31530  		// result: (ULT cc yes no)
 31531  		for b.Controls[0].Op == OpARM64LessThanU {
 31532  			v_0 := b.Controls[0]
 31533  			cc := v_0.Args[0]
 31534  			b.resetWithControl(BlockARM64ULT, cc)
 31535  			return true
 31536  		}
 31537  		// match: (NZ (LessEqual cc) yes no)
 31538  		// result: (LE cc yes no)
 31539  		for b.Controls[0].Op == OpARM64LessEqual {
 31540  			v_0 := b.Controls[0]
 31541  			cc := v_0.Args[0]
 31542  			b.resetWithControl(BlockARM64LE, cc)
 31543  			return true
 31544  		}
 31545  		// match: (NZ (LessEqualU cc) yes no)
 31546  		// result: (ULE cc yes no)
 31547  		for b.Controls[0].Op == OpARM64LessEqualU {
 31548  			v_0 := b.Controls[0]
 31549  			cc := v_0.Args[0]
 31550  			b.resetWithControl(BlockARM64ULE, cc)
 31551  			return true
 31552  		}
 31553  		// match: (NZ (GreaterThan cc) yes no)
 31554  		// result: (GT cc yes no)
 31555  		for b.Controls[0].Op == OpARM64GreaterThan {
 31556  			v_0 := b.Controls[0]
 31557  			cc := v_0.Args[0]
 31558  			b.resetWithControl(BlockARM64GT, cc)
 31559  			return true
 31560  		}
 31561  		// match: (NZ (GreaterThanU cc) yes no)
 31562  		// result: (UGT cc yes no)
 31563  		for b.Controls[0].Op == OpARM64GreaterThanU {
 31564  			v_0 := b.Controls[0]
 31565  			cc := v_0.Args[0]
 31566  			b.resetWithControl(BlockARM64UGT, cc)
 31567  			return true
 31568  		}
 31569  		// match: (NZ (GreaterEqual cc) yes no)
 31570  		// result: (GE cc yes no)
 31571  		for b.Controls[0].Op == OpARM64GreaterEqual {
 31572  			v_0 := b.Controls[0]
 31573  			cc := v_0.Args[0]
 31574  			b.resetWithControl(BlockARM64GE, cc)
 31575  			return true
 31576  		}
 31577  		// match: (NZ (GreaterEqualU cc) yes no)
 31578  		// result: (UGE cc yes no)
 31579  		for b.Controls[0].Op == OpARM64GreaterEqualU {
 31580  			v_0 := b.Controls[0]
 31581  			cc := v_0.Args[0]
 31582  			b.resetWithControl(BlockARM64UGE, cc)
 31583  			return true
 31584  		}
 31585  		// match: (NZ (LessThanF cc) yes no)
 31586  		// result: (FLT cc yes no)
 31587  		for b.Controls[0].Op == OpARM64LessThanF {
 31588  			v_0 := b.Controls[0]
 31589  			cc := v_0.Args[0]
 31590  			b.resetWithControl(BlockARM64FLT, cc)
 31591  			return true
 31592  		}
 31593  		// match: (NZ (LessEqualF cc) yes no)
 31594  		// result: (FLE cc yes no)
 31595  		for b.Controls[0].Op == OpARM64LessEqualF {
 31596  			v_0 := b.Controls[0]
 31597  			cc := v_0.Args[0]
 31598  			b.resetWithControl(BlockARM64FLE, cc)
 31599  			return true
 31600  		}
 31601  		// match: (NZ (GreaterThanF cc) yes no)
 31602  		// result: (FGT cc yes no)
 31603  		for b.Controls[0].Op == OpARM64GreaterThanF {
 31604  			v_0 := b.Controls[0]
 31605  			cc := v_0.Args[0]
 31606  			b.resetWithControl(BlockARM64FGT, cc)
 31607  			return true
 31608  		}
 31609  		// match: (NZ (GreaterEqualF cc) yes no)
 31610  		// result: (FGE cc yes no)
 31611  		for b.Controls[0].Op == OpARM64GreaterEqualF {
 31612  			v_0 := b.Controls[0]
 31613  			cc := v_0.Args[0]
 31614  			b.resetWithControl(BlockARM64FGE, cc)
 31615  			return true
 31616  		}
 31617  		// match: (NZ (ANDconst [c] x) yes no)
 31618  		// cond: oneBit(c)
 31619  		// result: (TBNZ [int64(ntz64(c))] x yes no)
 31620  		for b.Controls[0].Op == OpARM64ANDconst {
 31621  			v_0 := b.Controls[0]
 31622  			c := auxIntToInt64(v_0.AuxInt)
 31623  			x := v_0.Args[0]
 31624  			if !(oneBit(c)) {
 31625  				break
 31626  			}
 31627  			b.resetWithControl(BlockARM64TBNZ, x)
 31628  			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
 31629  			return true
 31630  		}
 31631  		// match: (NZ (MOVDconst [0]) yes no)
 31632  		// result: (First no yes)
 31633  		for b.Controls[0].Op == OpARM64MOVDconst {
 31634  			v_0 := b.Controls[0]
 31635  			if auxIntToInt64(v_0.AuxInt) != 0 {
 31636  				break
 31637  			}
 31638  			b.Reset(BlockFirst)
 31639  			b.swapSuccessors()
 31640  			return true
 31641  		}
 31642  		// match: (NZ (MOVDconst [c]) yes no)
 31643  		// cond: c != 0
 31644  		// result: (First yes no)
 31645  		for b.Controls[0].Op == OpARM64MOVDconst {
 31646  			v_0 := b.Controls[0]
 31647  			c := auxIntToInt64(v_0.AuxInt)
 31648  			if !(c != 0) {
 31649  				break
 31650  			}
 31651  			b.Reset(BlockFirst)
 31652  			return true
 31653  		}
 31654  	case BlockARM64NZW:
 31655  		// match: (NZW (ANDconst [c] x) yes no)
 31656  		// cond: oneBit(int64(uint32(c)))
 31657  		// result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no)
 31658  		for b.Controls[0].Op == OpARM64ANDconst {
 31659  			v_0 := b.Controls[0]
 31660  			c := auxIntToInt64(v_0.AuxInt)
 31661  			x := v_0.Args[0]
 31662  			if !(oneBit(int64(uint32(c)))) {
 31663  				break
 31664  			}
 31665  			b.resetWithControl(BlockARM64TBNZ, x)
 31666  			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
 31667  			return true
 31668  		}
 31669  		// match: (NZW (MOVDconst [c]) yes no)
 31670  		// cond: int32(c) == 0
 31671  		// result: (First no yes)
 31672  		for b.Controls[0].Op == OpARM64MOVDconst {
 31673  			v_0 := b.Controls[0]
 31674  			c := auxIntToInt64(v_0.AuxInt)
 31675  			if !(int32(c) == 0) {
 31676  				break
 31677  			}
 31678  			b.Reset(BlockFirst)
 31679  			b.swapSuccessors()
 31680  			return true
 31681  		}
 31682  		// match: (NZW (MOVDconst [c]) yes no)
 31683  		// cond: int32(c) != 0
 31684  		// result: (First yes no)
 31685  		for b.Controls[0].Op == OpARM64MOVDconst {
 31686  			v_0 := b.Controls[0]
 31687  			c := auxIntToInt64(v_0.AuxInt)
 31688  			if !(int32(c) != 0) {
 31689  				break
 31690  			}
 31691  			b.Reset(BlockFirst)
 31692  			return true
 31693  		}
 31694  	case BlockARM64TBNZ:
 31695  		// match: (TBNZ [0] (Equal cc) yes no)
 31696  		// result: (EQ cc yes no)
 31697  		for b.Controls[0].Op == OpARM64Equal {
 31698  			v_0 := b.Controls[0]
 31699  			cc := v_0.Args[0]
 31700  			if auxIntToInt64(b.AuxInt) != 0 {
 31701  				break
 31702  			}
 31703  			b.resetWithControl(BlockARM64EQ, cc)
 31704  			return true
 31705  		}
 31706  		// match: (TBNZ [0] (NotEqual cc) yes no)
 31707  		// result: (NE cc yes no)
 31708  		for b.Controls[0].Op == OpARM64NotEqual {
 31709  			v_0 := b.Controls[0]
 31710  			cc := v_0.Args[0]
 31711  			if auxIntToInt64(b.AuxInt) != 0 {
 31712  				break
 31713  			}
 31714  			b.resetWithControl(BlockARM64NE, cc)
 31715  			return true
 31716  		}
 31717  		// match: (TBNZ [0] (LessThan cc) yes no)
 31718  		// result: (LT cc yes no)
 31719  		for b.Controls[0].Op == OpARM64LessThan {
 31720  			v_0 := b.Controls[0]
 31721  			cc := v_0.Args[0]
 31722  			if auxIntToInt64(b.AuxInt) != 0 {
 31723  				break
 31724  			}
 31725  			b.resetWithControl(BlockARM64LT, cc)
 31726  			return true
 31727  		}
 31728  		// match: (TBNZ [0] (LessThanU cc) yes no)
 31729  		// result: (ULT cc yes no)
 31730  		for b.Controls[0].Op == OpARM64LessThanU {
 31731  			v_0 := b.Controls[0]
 31732  			cc := v_0.Args[0]
 31733  			if auxIntToInt64(b.AuxInt) != 0 {
 31734  				break
 31735  			}
 31736  			b.resetWithControl(BlockARM64ULT, cc)
 31737  			return true
 31738  		}
 31739  		// match: (TBNZ [0] (LessEqual cc) yes no)
 31740  		// result: (LE cc yes no)
 31741  		for b.Controls[0].Op == OpARM64LessEqual {
 31742  			v_0 := b.Controls[0]
 31743  			cc := v_0.Args[0]
 31744  			if auxIntToInt64(b.AuxInt) != 0 {
 31745  				break
 31746  			}
 31747  			b.resetWithControl(BlockARM64LE, cc)
 31748  			return true
 31749  		}
 31750  		// match: (TBNZ [0] (LessEqualU cc) yes no)
 31751  		// result: (ULE cc yes no)
 31752  		for b.Controls[0].Op == OpARM64LessEqualU {
 31753  			v_0 := b.Controls[0]
 31754  			cc := v_0.Args[0]
 31755  			if auxIntToInt64(b.AuxInt) != 0 {
 31756  				break
 31757  			}
 31758  			b.resetWithControl(BlockARM64ULE, cc)
 31759  			return true
 31760  		}
 31761  		// match: (TBNZ [0] (GreaterThan cc) yes no)
 31762  		// result: (GT cc yes no)
 31763  		for b.Controls[0].Op == OpARM64GreaterThan {
 31764  			v_0 := b.Controls[0]
 31765  			cc := v_0.Args[0]
 31766  			if auxIntToInt64(b.AuxInt) != 0 {
 31767  				break
 31768  			}
 31769  			b.resetWithControl(BlockARM64GT, cc)
 31770  			return true
 31771  		}
 31772  		// match: (TBNZ [0] (GreaterThanU cc) yes no)
 31773  		// result: (UGT cc yes no)
 31774  		for b.Controls[0].Op == OpARM64GreaterThanU {
 31775  			v_0 := b.Controls[0]
 31776  			cc := v_0.Args[0]
 31777  			if auxIntToInt64(b.AuxInt) != 0 {
 31778  				break
 31779  			}
 31780  			b.resetWithControl(BlockARM64UGT, cc)
 31781  			return true
 31782  		}
 31783  		// match: (TBNZ [0] (GreaterEqual cc) yes no)
 31784  		// result: (GE cc yes no)
 31785  		for b.Controls[0].Op == OpARM64GreaterEqual {
 31786  			v_0 := b.Controls[0]
 31787  			cc := v_0.Args[0]
 31788  			if auxIntToInt64(b.AuxInt) != 0 {
 31789  				break
 31790  			}
 31791  			b.resetWithControl(BlockARM64GE, cc)
 31792  			return true
 31793  		}
 31794  		// match: (TBNZ [0] (GreaterEqualU cc) yes no)
 31795  		// result: (UGE cc yes no)
 31796  		for b.Controls[0].Op == OpARM64GreaterEqualU {
 31797  			v_0 := b.Controls[0]
 31798  			cc := v_0.Args[0]
 31799  			if auxIntToInt64(b.AuxInt) != 0 {
 31800  				break
 31801  			}
 31802  			b.resetWithControl(BlockARM64UGE, cc)
 31803  			return true
 31804  		}
 31805  		// match: (TBNZ [0] (LessThanF cc) yes no)
 31806  		// result: (FLT cc yes no)
 31807  		for b.Controls[0].Op == OpARM64LessThanF {
 31808  			v_0 := b.Controls[0]
 31809  			cc := v_0.Args[0]
 31810  			if auxIntToInt64(b.AuxInt) != 0 {
 31811  				break
 31812  			}
 31813  			b.resetWithControl(BlockARM64FLT, cc)
 31814  			return true
 31815  		}
 31816  		// match: (TBNZ [0] (LessEqualF cc) yes no)
 31817  		// result: (FLE cc yes no)
 31818  		for b.Controls[0].Op == OpARM64LessEqualF {
 31819  			v_0 := b.Controls[0]
 31820  			cc := v_0.Args[0]
 31821  			if auxIntToInt64(b.AuxInt) != 0 {
 31822  				break
 31823  			}
 31824  			b.resetWithControl(BlockARM64FLE, cc)
 31825  			return true
 31826  		}
 31827  		// match: (TBNZ [0] (GreaterThanF cc) yes no)
 31828  		// result: (FGT cc yes no)
 31829  		for b.Controls[0].Op == OpARM64GreaterThanF {
 31830  			v_0 := b.Controls[0]
 31831  			cc := v_0.Args[0]
 31832  			if auxIntToInt64(b.AuxInt) != 0 {
 31833  				break
 31834  			}
 31835  			b.resetWithControl(BlockARM64FGT, cc)
 31836  			return true
 31837  		}
 31838  		// match: (TBNZ [0] (GreaterEqualF cc) yes no)
 31839  		// result: (FGE cc yes no)
 31840  		for b.Controls[0].Op == OpARM64GreaterEqualF {
 31841  			v_0 := b.Controls[0]
 31842  			cc := v_0.Args[0]
 31843  			if auxIntToInt64(b.AuxInt) != 0 {
 31844  				break
 31845  			}
 31846  			b.resetWithControl(BlockARM64FGE, cc)
 31847  			return true
 31848  		}
 31849  	case BlockARM64UGE:
 31850  		// match: (UGE (FlagConstant [fc]) yes no)
 31851  		// cond: fc.uge()
 31852  		// result: (First yes no)
 31853  		for b.Controls[0].Op == OpARM64FlagConstant {
 31854  			v_0 := b.Controls[0]
 31855  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31856  			if !(fc.uge()) {
 31857  				break
 31858  			}
 31859  			b.Reset(BlockFirst)
 31860  			return true
 31861  		}
 31862  		// match: (UGE (FlagConstant [fc]) yes no)
 31863  		// cond: !fc.uge()
 31864  		// result: (First no yes)
 31865  		for b.Controls[0].Op == OpARM64FlagConstant {
 31866  			v_0 := b.Controls[0]
 31867  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31868  			if !(!fc.uge()) {
 31869  				break
 31870  			}
 31871  			b.Reset(BlockFirst)
 31872  			b.swapSuccessors()
 31873  			return true
 31874  		}
 31875  		// match: (UGE (InvertFlags cmp) yes no)
 31876  		// result: (ULE cmp yes no)
 31877  		for b.Controls[0].Op == OpARM64InvertFlags {
 31878  			v_0 := b.Controls[0]
 31879  			cmp := v_0.Args[0]
 31880  			b.resetWithControl(BlockARM64ULE, cmp)
 31881  			return true
 31882  		}
 31883  	case BlockARM64UGT:
 31884  		// match: (UGT (FlagConstant [fc]) yes no)
 31885  		// cond: fc.ugt()
 31886  		// result: (First yes no)
 31887  		for b.Controls[0].Op == OpARM64FlagConstant {
 31888  			v_0 := b.Controls[0]
 31889  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31890  			if !(fc.ugt()) {
 31891  				break
 31892  			}
 31893  			b.Reset(BlockFirst)
 31894  			return true
 31895  		}
 31896  		// match: (UGT (FlagConstant [fc]) yes no)
 31897  		// cond: !fc.ugt()
 31898  		// result: (First no yes)
 31899  		for b.Controls[0].Op == OpARM64FlagConstant {
 31900  			v_0 := b.Controls[0]
 31901  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31902  			if !(!fc.ugt()) {
 31903  				break
 31904  			}
 31905  			b.Reset(BlockFirst)
 31906  			b.swapSuccessors()
 31907  			return true
 31908  		}
 31909  		// match: (UGT (InvertFlags cmp) yes no)
 31910  		// result: (ULT cmp yes no)
 31911  		for b.Controls[0].Op == OpARM64InvertFlags {
 31912  			v_0 := b.Controls[0]
 31913  			cmp := v_0.Args[0]
 31914  			b.resetWithControl(BlockARM64ULT, cmp)
 31915  			return true
 31916  		}
 31917  	case BlockARM64ULE:
 31918  		// match: (ULE (FlagConstant [fc]) yes no)
 31919  		// cond: fc.ule()
 31920  		// result: (First yes no)
 31921  		for b.Controls[0].Op == OpARM64FlagConstant {
 31922  			v_0 := b.Controls[0]
 31923  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31924  			if !(fc.ule()) {
 31925  				break
 31926  			}
 31927  			b.Reset(BlockFirst)
 31928  			return true
 31929  		}
 31930  		// match: (ULE (FlagConstant [fc]) yes no)
 31931  		// cond: !fc.ule()
 31932  		// result: (First no yes)
 31933  		for b.Controls[0].Op == OpARM64FlagConstant {
 31934  			v_0 := b.Controls[0]
 31935  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31936  			if !(!fc.ule()) {
 31937  				break
 31938  			}
 31939  			b.Reset(BlockFirst)
 31940  			b.swapSuccessors()
 31941  			return true
 31942  		}
 31943  		// match: (ULE (InvertFlags cmp) yes no)
 31944  		// result: (UGE cmp yes no)
 31945  		for b.Controls[0].Op == OpARM64InvertFlags {
 31946  			v_0 := b.Controls[0]
 31947  			cmp := v_0.Args[0]
 31948  			b.resetWithControl(BlockARM64UGE, cmp)
 31949  			return true
 31950  		}
 31951  	case BlockARM64ULT:
 31952  		// match: (ULT (FlagConstant [fc]) yes no)
 31953  		// cond: fc.ult()
 31954  		// result: (First yes no)
 31955  		for b.Controls[0].Op == OpARM64FlagConstant {
 31956  			v_0 := b.Controls[0]
 31957  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31958  			if !(fc.ult()) {
 31959  				break
 31960  			}
 31961  			b.Reset(BlockFirst)
 31962  			return true
 31963  		}
 31964  		// match: (ULT (FlagConstant [fc]) yes no)
 31965  		// cond: !fc.ult()
 31966  		// result: (First no yes)
 31967  		for b.Controls[0].Op == OpARM64FlagConstant {
 31968  			v_0 := b.Controls[0]
 31969  			fc := auxIntToFlagConstant(v_0.AuxInt)
 31970  			if !(!fc.ult()) {
 31971  				break
 31972  			}
 31973  			b.Reset(BlockFirst)
 31974  			b.swapSuccessors()
 31975  			return true
 31976  		}
 31977  		// match: (ULT (InvertFlags cmp) yes no)
 31978  		// result: (UGT cmp yes no)
 31979  		for b.Controls[0].Op == OpARM64InvertFlags {
 31980  			v_0 := b.Controls[0]
 31981  			cmp := v_0.Args[0]
 31982  			b.resetWithControl(BlockARM64UGT, cmp)
 31983  			return true
 31984  		}
 31985  	case BlockARM64Z:
 31986  		// match: (Z (ANDconst [c] x) yes no)
 31987  		// cond: oneBit(c)
 31988  		// result: (TBZ [int64(ntz64(c))] x yes no)
 31989  		for b.Controls[0].Op == OpARM64ANDconst {
 31990  			v_0 := b.Controls[0]
 31991  			c := auxIntToInt64(v_0.AuxInt)
 31992  			x := v_0.Args[0]
 31993  			if !(oneBit(c)) {
 31994  				break
 31995  			}
 31996  			b.resetWithControl(BlockARM64TBZ, x)
 31997  			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
 31998  			return true
 31999  		}
 32000  		// match: (Z (MOVDconst [0]) yes no)
 32001  		// result: (First yes no)
 32002  		for b.Controls[0].Op == OpARM64MOVDconst {
 32003  			v_0 := b.Controls[0]
 32004  			if auxIntToInt64(v_0.AuxInt) != 0 {
 32005  				break
 32006  			}
 32007  			b.Reset(BlockFirst)
 32008  			return true
 32009  		}
 32010  		// match: (Z (MOVDconst [c]) yes no)
 32011  		// cond: c != 0
 32012  		// result: (First no yes)
 32013  		for b.Controls[0].Op == OpARM64MOVDconst {
 32014  			v_0 := b.Controls[0]
 32015  			c := auxIntToInt64(v_0.AuxInt)
 32016  			if !(c != 0) {
 32017  				break
 32018  			}
 32019  			b.Reset(BlockFirst)
 32020  			b.swapSuccessors()
 32021  			return true
 32022  		}
 32023  	case BlockARM64ZW:
 32024  		// match: (ZW (ANDconst [c] x) yes no)
 32025  		// cond: oneBit(int64(uint32(c)))
 32026  		// result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no)
 32027  		for b.Controls[0].Op == OpARM64ANDconst {
 32028  			v_0 := b.Controls[0]
 32029  			c := auxIntToInt64(v_0.AuxInt)
 32030  			x := v_0.Args[0]
 32031  			if !(oneBit(int64(uint32(c)))) {
 32032  				break
 32033  			}
 32034  			b.resetWithControl(BlockARM64TBZ, x)
 32035  			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
 32036  			return true
 32037  		}
 32038  		// match: (ZW (MOVDconst [c]) yes no)
 32039  		// cond: int32(c) == 0
 32040  		// result: (First yes no)
 32041  		for b.Controls[0].Op == OpARM64MOVDconst {
 32042  			v_0 := b.Controls[0]
 32043  			c := auxIntToInt64(v_0.AuxInt)
 32044  			if !(int32(c) == 0) {
 32045  				break
 32046  			}
 32047  			b.Reset(BlockFirst)
 32048  			return true
 32049  		}
 32050  		// match: (ZW (MOVDconst [c]) yes no)
 32051  		// cond: int32(c) != 0
 32052  		// result: (First no yes)
 32053  		for b.Controls[0].Op == OpARM64MOVDconst {
 32054  			v_0 := b.Controls[0]
 32055  			c := auxIntToInt64(v_0.AuxInt)
 32056  			if !(int32(c) != 0) {
 32057  				break
 32058  			}
 32059  			b.Reset(BlockFirst)
 32060  			b.swapSuccessors()
 32061  			return true
 32062  		}
 32063  	}
 32064  	return false
 32065  }