github.com/euank/go@v0.0.0-20160829210321-495514729181/src/cmd/compile/internal/ssa/gen/PPC64.rules (about)

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Lowering arithmetic
     6  (Add64  x y) -> (ADD  x y)
     7  (AddPtr x y) -> (ADD  x y)
     8  (Add32  x y) -> (ADD x y)
     9  (Add16  x y) -> (ADD x y)
    10  (Add8   x y) -> (ADD x y)
    11  (Add64F x y) -> (FADD x y)
    12  (Add32F x y) -> (FADDS x y)
    13  
    14  (Sub64  x y) -> (SUB  x y)
    15  (SubPtr x y) -> (SUB  x y)
    16  (Sub32  x y) -> (SUB x y)
    17  (Sub16  x y) -> (SUB x y)
    18  (Sub8   x y) -> (SUB x y)
    19  (Sub32F x y) -> (FSUBS x y)
    20  (Sub64F x y) -> (FSUB x y)
    21  
    22  (Mod16 x y) -> (Mod32 (SignExt16to32 x) (SignExt16to32 y))
    23  (Mod16u x y) -> (Mod32u (ZeroExt16to32 x) (ZeroExt16to32 y))
    24  (Mod8 x y) -> (Mod32 (SignExt8to32 x) (SignExt8to32 y))
    25  (Mod8u x y) -> (Mod32u (ZeroExt8to32 x) (ZeroExt8to32 y))
    26  (Mod64 x y) -> (SUB x (MULLD y (DIVD x y)))
    27  (Mod64u x y) -> (SUB x (MULLD y (DIVDU x y)))
    28  (Mod32 x y) -> (SUB x (MULLW y (DIVW x y)))
    29  (Mod32u x y) -> (SUB x (MULLW y (DIVWU x y)))
    30  
    31  (Avg64u <t> x y) -> (ADD (ADD <t> (SRD <t> x (MOVDconst <t> [1])) (SRD <t> y (MOVDconst <t> [1]))) (ANDconst <t> (AND <t> x y) [1]))
    32  
    33  (Mul64  x y) -> (MULLD  x y)
    34  (Mul32  x y) -> (MULLW  x y)
    35  (Mul16  x y) -> (MULLW x y)
    36  (Mul8   x y) -> (MULLW x y)
    37  
    38  (Div64  x y) -> (DIVD  x y)
    39  (Div64u x y) -> (DIVDU x y)
    40  (Div32  x y) -> (DIVW  x y)
    41  (Div32u x y) -> (DIVWU x y)
    42  (Div16  x y) -> (DIVW  (SignExt16to32 x) (SignExt16to32 y))
    43  (Div16u x y) -> (DIVWU (ZeroExt16to32 x) (ZeroExt16to32 y))
    44  (Div8   x y) -> (DIVW  (SignExt8to32 x) (SignExt8to32 y))
    45  (Div8u  x y) -> (DIVWU (ZeroExt8to32 x) (ZeroExt8to32 y))
    46  
    47  (Hmul64  x y) -> (MULHD  x y)
    48  (Hmul64u  x y) -> (MULHDU x y)
    49  (Hmul32  x y) -> (MULHW  x y)
    50  (Hmul32u  x y) -> (MULHWU x y)
    51  (Hmul16 x y) -> (SRAWconst (MULLW <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16])
    52  (Hmul16u x y) -> (SRWconst (MULLW <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16])
    53  (Hmul8 x y) -> (SRAWconst (MULLW <config.fe.TypeInt16()> (SignExt8to32 x) (SignExt8to32 y)) [8])
    54  (Hmul8u x y) -> (SRWconst (MULLW <config.fe.TypeUInt16()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8])
    55  
    56  (Mul32F x y) -> (FMULS x y)
    57  (Mul64F x y) -> (FMUL x y)
    58  
    59  (Div32F x y) -> (FDIVS x y)
    60  (Div64F x y) -> (FDIV x y)
    61  
    62  // Lowering float <-> int
    63  (Cvt32to32F x) -> (FRSP (FCFID (Xi2f64 (SignExt32to64 x))))
    64  (Cvt32to64F x) -> (FCFID (Xi2f64 (SignExt32to64 x)))
    65  (Cvt64to32F x) -> (FRSP (FCFID (Xi2f64 x)))
    66  (Cvt64to64F x) -> (FCFID (Xi2f64 x))
    67  
    68  (Cvt32Fto32 x) -> (Xf2i64 (FCTIWZ x))
    69  (Cvt32Fto64 x) -> (Xf2i64 (FCTIDZ x))
    70  (Cvt64Fto32 x) -> (Xf2i64 (FCTIWZ x))
    71  (Cvt64Fto64 x) -> (Xf2i64 (FCTIDZ x))
    72  
    73  (Cvt32Fto64F x) -> x // Note x will have the wrong type for patterns dependent on Float32/Float64
    74  (Cvt64Fto32F x) -> (FRSP x)
    75  
    76  (Sqrt x) -> (FSQRT x)
    77  
    78  (Rsh64x64 x y)  -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
    79  (Rsh64Ux64 x y) -> (SRD  x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
    80  (Lsh64x64 x y)  -> (SLD  x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
    81  
    82  (Rsh32x64 x y)  -> (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
    83  (Rsh32Ux64 x y) -> (SRW  x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
    84  (Lsh32x64 x y)  -> (SLW  x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
    85  
    86  (Rsh16x64 x y)  -> (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
    87  (Rsh16Ux64 x y) -> (SRW  (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
    88  (Lsh16x64 x y)  -> (SLW  x                 (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
    89  
    90  (Rsh8x64 x y)  -> (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
    91  (Rsh8Ux64 x y) -> (SRW  (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
    92  (Lsh8x64 x y)  -> (SLW  x                (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
    93  
    94  
    95  (Rsh64x32 x y)  -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
    96  (Rsh64Ux32 x y) -> (SRD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
    97  (Lsh64x32 x y)  -> (SLD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
    98  
    99  (Rsh32x32 x y)  -> (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
   100  (Rsh32Ux32 x y) -> (SRW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
   101  (Lsh32x32 x y)  -> (SLW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
   102  
   103  (Rsh16x32 x y)  -> (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
   104  (Rsh16Ux32 x y) -> (SRW  (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
   105  (Lsh16x32 x y)  -> (SLW  x                 (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
   106  
   107  (Rsh8x32 x y)  -> (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
   108  (Rsh8Ux32 x y) -> (SRW  (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
   109  (Lsh8x32 x y)  -> (SLW  x                (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
   110  
   111  
   112  (Rsh64x16 x y)  -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
   113  (Rsh64Ux16 x y) -> (SRD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
   114  (Lsh64x16 x y)  -> (SLD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
   115  
   116  (Rsh32x16 x y)  -> (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
   117  (Rsh32Ux16 x y) -> (SRW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
   118  (Lsh32x16 x y)  -> (SLW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
   119  
   120  (Rsh16x16 x y)  -> (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
   121  (Rsh16Ux16 x y) -> (SRW  (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
   122  (Lsh16x16 x y)  -> (SLW  x                 (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
   123  
   124  (Rsh8x16 x y)  -> (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
   125  (Rsh8Ux16 x y) -> (SRW  (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
   126  (Lsh8x16 x y)  -> (SLW  x                (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
   127  
   128  
   129  (Rsh64x8 x y)  -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
   130  (Rsh64Ux8 x y) -> (SRD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
   131  (Lsh64x8 x y)  -> (SLD x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
   132  
   133  (Rsh32x8 x y)  -> (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
   134  (Rsh32Ux8 x y) -> (SRW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
   135  (Lsh32x8 x y)  -> (SLW x  (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
   136  
   137  (Rsh16x8 x y)  -> (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
   138  (Rsh16Ux8 x y) -> (SRW  (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
   139  (Lsh16x8 x y)  -> (SLW  x                 (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
   140  
   141  (Rsh8x8 x y)  -> (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
   142  (Rsh8Ux8 x y) -> (SRW  (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
   143  (Lsh8x8 x y)  -> (SLW  x                (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
   144  
   145  // Potentially useful optimizing rewrites.
   146  // (ADDconstForCarry [k] c), k < 0 && (c < 0 || k+c >= 0) -> CarrySet
   147  // (ADDconstForCarry [k] c), K < 0 && (c >= 0 && k+c < 0) -> CarryClear
   148  // (MaskIfNotCarry CarrySet) -> 0
   149  // (MaskIfNotCarry CarrySet) -> -1
   150  
   151  // Lowering constants
   152  (Const8   [val]) -> (MOVWconst [val])
   153  (Const16  [val]) -> (MOVWconst [val])
   154  (Const32  [val]) -> (MOVWconst [val])
   155  (Const64  [val]) -> (MOVDconst [val])
   156  (Const32F [val]) -> (FMOVSconst [val])
   157  (Const64F [val]) -> (FMOVDconst [val])
   158  (ConstNil) -> (MOVDconst [0])
   159  (ConstBool [b]) -> (MOVWconst [b])
   160  
   161  (Addr {sym} base) -> (MOVDaddr {sym} base)
   162  // (Addr {sym} base) -> (ADDconst {sym} base)
   163  (OffPtr [off] ptr) -> (ADD (MOVDconst <config.Frontend().TypeInt64()> [off]) ptr)
   164  
   165  (And64 x y) -> (AND x y)
   166  (And32 x y) -> (AND x y)
   167  (And16 x y) -> (AND x y)
   168  (And8  x y) -> (AND x y)
   169  
   170  (Or64 x y) -> (OR x y)
   171  (Or32 x y) -> (OR x y)
   172  (Or16 x y) -> (OR x y)
   173  (Or8  x y) -> (OR x y)
   174  
   175  (Xor64 x y) -> (XOR x y)
   176  (Xor32 x y) -> (XOR x y)
   177  (Xor16 x y) -> (XOR x y)
   178  (Xor8  x y) -> (XOR x y)
   179  
   180  (Neg64F x) -> (FNEG x)
   181  (Neg32F x) -> (FNEG x)
   182  (Neg64  x) -> (NEG x)
   183  (Neg32  x) -> (NEG x)
   184  (Neg16  x) -> (NEG x)
   185  (Neg8   x) -> (NEG x)
   186  
   187  (Com64 x) -> (XORconst [-1] x)
   188  (Com32 x) -> (XORconst [-1] x)
   189  (Com16 x) -> (XORconst [-1] x)
   190  (Com8  x) -> (XORconst [-1] x)
   191  
   192  // Lowering boolean ops
   193  (AndB x y) -> (AND x y)
   194  (OrB x y) -> (OR x y)
   195  (Not x) -> (XORconst [1] x)
   196  
   197  // Lowering comparisons
   198  (EqB x y)  -> (ANDconst [1] (EQV x y))
   199  (Eq8 x y)  -> (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
   200  (Eq16 x y) -> (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
   201  (Eq32 x y) -> (Equal (CMPW x y))
   202  (Eq64 x y) -> (Equal (CMP x y))
   203  (Eq32F x y) -> (Equal (FCMPU x y))
   204  (Eq64F x y) -> (Equal (FCMPU x y))
   205  (EqPtr x y) -> (Equal (CMP x y))
   206  
   207  (NeqB x y)  -> (XOR x y)
   208  (Neq8 x y)  -> (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
   209  (Neq16 x y) -> (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
   210  (Neq32 x y) -> (NotEqual (CMPW x y))
   211  (Neq64 x y) -> (NotEqual (CMP x y))
   212  (Neq32F x y) -> (NotEqual (FCMPU x y))
   213  (Neq64F x y) -> (NotEqual (FCMPU x y))
   214  (NeqPtr x y) -> (NotEqual (CMP x y))
   215  
   216  (Less8 x y)  -> (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
   217  (Less16 x y) -> (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
   218  (Less32 x y) -> (LessThan (CMPW x y))
   219  (Less64 x y) -> (LessThan (CMP x y))
   220  (Less32F x y) -> (FLessThan (FCMPU x y))
   221  (Less64F x y) -> (FLessThan (FCMPU x y))
   222  
   223  (Less8U x y)  -> (LessThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   224  (Less16U x y) -> (LessThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   225  (Less32U x y) -> (LessThan (CMPWU x y))
   226  (Less64U x y) -> (LessThan (CMPU x y))
   227  
   228  (Leq8 x y)  -> (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
   229  (Leq16 x y) -> (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
   230  (Leq32 x y) -> (LessEqual (CMPW x y))
   231  (Leq64 x y) -> (LessEqual (CMP x y))
   232  (Leq32F x y) -> (FLessEqual (FCMPU x y))
   233  (Leq64F x y) -> (FLessEqual (FCMPU x y))
   234  
   235  (Leq8U x y)  -> (LessEqual (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   236  (Leq16U x y) -> (LessEqual (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   237  (Leq32U x y) -> (LessEqual (CMPWU x y))
   238  (Leq64U x y) -> (LessEqual (CMPU x y))
   239  
   240  (Greater8 x y)  -> (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
   241  (Greater16 x y) -> (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
   242  (Greater32 x y) -> (GreaterThan (CMPW x y))
   243  (Greater64 x y) -> (GreaterThan (CMP x y))
   244  (Greater32F x y) -> (FGreaterThan (FCMPU x y))
   245  (Greater64F x y) -> (FGreaterThan (FCMPU x y))
   246  
   247  (Greater8U x y)  -> (GreaterThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   248  (Greater16U x y) -> (GreaterThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   249  (Greater32U x y) -> (GreaterThan (CMPWU x y))
   250  (Greater64U x y) -> (GreaterThan (CMPU x y))
   251  
   252  (Geq8 x y)  -> (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
   253  (Geq16 x y) -> (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
   254  (Geq32 x y) -> (GreaterEqual (CMPW x y))
   255  (Geq64 x y) -> (GreaterEqual (CMP x y))
   256  (Geq32F x y) -> (FGreaterEqual (FCMPU x y))
   257  (Geq64F x y) -> (FGreaterEqual (FCMPU x y))
   258  
   259  (Geq8U x y)  -> (GreaterEqual (CMPU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   260  (Geq16U x y) -> (GreaterEqual (CMPU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   261  (Geq32U x y) -> (GreaterEqual (CMPU x y))
   262  (Geq64U x y) -> (GreaterEqual (CMPU x y))
   263  
   264  // Absorb pseudo-ops into blocks.
   265  (If (Equal cc) yes no) -> (EQ cc yes no)
   266  (If (NotEqual cc) yes no) -> (NE cc yes no)
   267  (If (LessThan cc) yes no) -> (LT cc yes no)
   268  (If (LessEqual cc) yes no) -> (LE cc yes no)
   269  (If (GreaterThan cc) yes no) -> (GT cc yes no)
   270  (If (GreaterEqual cc) yes no) -> (GE cc yes no)
   271  (If (FLessThan cc) yes no) -> (FLT cc yes no)
   272  (If (FLessEqual cc) yes no) -> (FLE cc yes no)
   273  (If (FGreaterThan cc) yes no) -> (FGT cc yes no)
   274  (If (FGreaterEqual cc) yes no) -> (FGE cc yes no)
   275  
   276  (If cond yes no) -> (NE (CMPWconst [0] cond) yes no)
   277  
   278  // Absorb boolean tests into block
   279  (NE (CMPWconst [0] (Equal cc)) yes no) -> (EQ cc yes no)
   280  (NE (CMPWconst [0] (NotEqual cc)) yes no) -> (NE cc yes no)
   281  (NE (CMPWconst [0] (LessThan cc)) yes no) -> (LT cc yes no)
   282  (NE (CMPWconst [0] (LessEqual cc)) yes no) -> (LE cc yes no)
   283  (NE (CMPWconst [0] (GreaterThan cc)) yes no) -> (GT cc yes no)
   284  (NE (CMPWconst [0] (GreaterEqual cc)) yes no) -> (GE cc yes no)
   285  // (NE (CMPWconst [0] (FLessThan cc)) yes no) -> (FLT cc yes no)
   286  // (NE (CMPWconst [0] (FLessEqual cc)) yes no) -> (FLE cc yes no)
   287  // (NE (CMPWconst [0] (FGreaterThan cc)) yes no) -> (FGT cc yes no)
   288  // (NE (CMPWconst [0] (FGreaterEqual cc)) yes no) -> (FGE cc yes no)
   289  
   290  // absorb flag constants into branches
   291  (EQ (FlagEQ) yes no) -> (First nil yes no)
   292  (EQ (FlagLT) yes no) -> (First nil no yes)
   293  (EQ (FlagGT) yes no) -> (First nil no yes)
   294  
   295  (NE (FlagEQ) yes no) -> (First nil no yes)
   296  (NE (FlagLT) yes no) -> (First nil yes no)
   297  (NE (FlagGT) yes no) -> (First nil yes no)
   298  
   299  (LT (FlagEQ) yes no) -> (First nil no yes)
   300  (LT (FlagLT) yes no) -> (First nil yes no)
   301  (LT (FlagGT) yes no) -> (First nil no yes)
   302  
   303  (LE (FlagEQ) yes no) -> (First nil yes no)
   304  (LE (FlagLT) yes no) -> (First nil yes no)
   305  (LE (FlagGT) yes no) -> (First nil no yes)
   306  
   307  (GT (FlagEQ) yes no) -> (First nil no yes)
   308  (GT (FlagLT) yes no) -> (First nil no yes)
   309  (GT (FlagGT) yes no) -> (First nil yes no)
   310  
   311  (GE (FlagEQ) yes no) -> (First nil yes no)
   312  (GE (FlagLT) yes no) -> (First nil no yes)
   313  (GE (FlagGT) yes no) -> (First nil yes no)
   314  
   315  // absorb InvertFlags into branches
   316  (LT (InvertFlags cmp) yes no) -> (GT cmp yes no)
   317  (GT (InvertFlags cmp) yes no) -> (LT cmp yes no)
   318  (LE (InvertFlags cmp) yes no) -> (GE cmp yes no)
   319  (GE (InvertFlags cmp) yes no) -> (LE cmp yes no)
   320  (EQ (InvertFlags cmp) yes no) -> (EQ cmp yes no)
   321  (NE (InvertFlags cmp) yes no) -> (NE cmp yes no)
   322  
   323  // (FLT (InvertFlags cmp) yes no) -> (FGT cmp yes no)
   324  // (FGT (InvertFlags cmp) yes no) -> (FLT cmp yes no)
   325  // (FLE (InvertFlags cmp) yes no) -> (FGE cmp yes no)
   326  // (FGE (InvertFlags cmp) yes no) -> (FLE cmp yes no)
   327  
   328  // constant comparisons
   329  (CMPWconst (MOVWconst [x]) [y]) && int32(x)==int32(y) -> (FlagEQ)
   330  (CMPWconst (MOVWconst [x]) [y]) && int32(x)<int32(y)  -> (FlagLT)
   331  (CMPWconst (MOVWconst [x]) [y]) && int32(x)>int32(y)  -> (FlagGT)
   332  
   333  (CMPconst (MOVDconst [x]) [y]) && int64(x)==int64(y) -> (FlagEQ)
   334  (CMPconst (MOVDconst [x]) [y]) && int64(x)<int64(y)  -> (FlagLT)
   335  (CMPconst (MOVDconst [x]) [y]) && int64(x)>int64(y)  -> (FlagGT)
   336  
   337  (CMPWUconst (MOVWconst [x]) [y]) && int32(x)==int32(y)  -> (FlagEQ)
   338  (CMPWUconst (MOVWconst [x]) [y]) && uint32(x)<uint32(y) -> (FlagLT)
   339  (CMPWUconst (MOVWconst [x]) [y]) && uint32(x)>uint32(y) -> (FlagGT)
   340  
   341  (CMPUconst (MOVDconst [x]) [y]) && int64(x)==int64(y)  -> (FlagEQ)
   342  (CMPUconst (MOVDconst [x]) [y]) && uint64(x)<uint64(y) -> (FlagLT)
   343  (CMPUconst (MOVDconst [x]) [y]) && uint64(x)>uint64(y) -> (FlagGT)
   344  
   345  // other known comparisons
   346  //(CMPconst (MOVBUreg _) [c]) && 0xff < c -> (FlagLT)
   347  //(CMPconst (MOVHUreg _) [c]) && 0xffff < c -> (FlagLT)
   348  //(CMPconst (ANDconst _ [m]) [n]) && 0 <= int32(m) && int32(m) < int32(n) -> (FlagLT)
   349  //(CMPconst (SRLconst _ [c]) [n]) && 0 <= n && 0 < c && c <= 32 && (1<<uint32(32-c)) <= uint32(n) -> (FlagLT)
   350  
   351  // absorb flag constants into boolean values
   352  (Equal (FlagEQ)) -> (MOVWconst [1])
   353  (Equal (FlagLT)) -> (MOVWconst [0])
   354  (Equal (FlagGT)) -> (MOVWconst [0])
   355  
   356  (NotEqual (FlagEQ)) -> (MOVWconst [0])
   357  (NotEqual (FlagLT)) -> (MOVWconst [1])
   358  (NotEqual (FlagGT)) -> (MOVWconst [1])
   359  
   360  (LessThan (FlagEQ)) -> (MOVWconst [0])
   361  (LessThan (FlagLT)) -> (MOVWconst [1])
   362  (LessThan (FlagGT)) -> (MOVWconst [0])
   363  
   364  (LessEqual (FlagEQ)) -> (MOVWconst [1])
   365  (LessEqual (FlagLT)) -> (MOVWconst [1])
   366  (LessEqual (FlagGT)) -> (MOVWconst [0])
   367  
   368  (GreaterThan (FlagEQ)) -> (MOVWconst [0])
   369  (GreaterThan (FlagLT)) -> (MOVWconst [0])
   370  (GreaterThan (FlagGT)) -> (MOVWconst [1])
   371  
   372  (GreaterEqual (FlagEQ)) -> (MOVWconst [1])
   373  (GreaterEqual (FlagLT)) -> (MOVWconst [0])
   374  (GreaterEqual (FlagGT)) -> (MOVWconst [1])
   375  
   376  // absorb InvertFlags into boolean values
   377  (Equal (InvertFlags x)) -> (Equal x)
   378  (NotEqual (InvertFlags x)) -> (NotEqual x)
   379  (LessThan (InvertFlags x)) -> (GreaterThan x)
   380  (GreaterThan (InvertFlags x)) -> (LessThan x)
   381  (LessEqual (InvertFlags x)) -> (GreaterEqual x)
   382  (GreaterEqual (InvertFlags x)) -> (LessEqual x)
   383  
   384  // (FLessThan (InvertFlags x)) -> (FGreaterThan x)
   385  // (FGreaterThan (InvertFlags x)) -> (FLessThan x)
   386  // (FLessEqual (InvertFlags x)) -> (FGreaterEqual x)
   387  // (FGreaterEqual (InvertFlags x)) -> (FLessEqual x)
   388  
   389  
   390  // Lowering loads
   391  (Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) -> (MOVDload ptr mem)
   392  (Load <t> ptr mem) && is32BitInt(t) && isSigned(t) -> (MOVWload ptr mem)
   393  (Load <t> ptr mem) && is32BitInt(t) && !isSigned(t) -> (MOVWZload ptr mem)
   394  (Load <t> ptr mem) && is16BitInt(t) && isSigned(t) -> (MOVHload ptr mem)
   395  (Load <t> ptr mem) && is16BitInt(t) && !isSigned(t) -> (MOVHZload ptr mem)
   396  (Load <t> ptr mem) && (t.IsBoolean() || (is8BitInt(t) && isSigned(t))) -> (MOVBload ptr mem)
   397  (Load <t> ptr mem) && is8BitInt(t) && !isSigned(t) -> (MOVBZload ptr mem)
   398  
   399  (Load <t> ptr mem) && is32BitFloat(t) -> (FMOVSload ptr mem)
   400  (Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem)
   401  
   402  (Store [8] ptr val mem) && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem)
   403  (Store [8] ptr val mem) && is32BitFloat(val.Type) -> (FMOVDstore ptr val mem) // glitch from (Cvt32Fto64F x) -> x -- type is wrong
   404  (Store [4] ptr val mem) && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem)
   405  (Store [8] ptr val mem) && (is64BitInt(val.Type) || isPtr(val.Type)) -> (MOVDstore ptr val mem)
   406  (Store [4] ptr val mem) && is32BitInt(val.Type) -> (MOVWstore ptr val mem)
   407  (Store [2] ptr val mem) -> (MOVHstore ptr val mem)
   408  (Store [1] ptr val mem) -> (MOVBstore ptr val mem)
   409  
   410  (Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem
   411  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstorezero destptr mem)
   412  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 ->
   413  	(MOVHstorezero destptr mem)
   414  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 ->
   415  	(MOVBstorezero [1] destptr
   416  		(MOVBstorezero [0] destptr mem))
   417  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 ->
   418  	(MOVWstorezero destptr mem)
   419  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 ->
   420  	(MOVHstorezero [2] destptr
   421  		(MOVHstorezero [0] destptr mem))
   422  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 ->
   423  	(MOVBstorezero [3] destptr
   424  		(MOVBstorezero [2] destptr
   425  			(MOVBstorezero [1] destptr
   426  				(MOVBstorezero [0] destptr mem))))
   427  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 ->
   428  	(MOVDstorezero [0] destptr mem)
   429  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 ->
   430  	(MOVWstorezero [4] destptr
   431  		(MOVWstorezero [0] destptr mem))
   432  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 ->
   433  	(MOVHstorezero [6] destptr
   434  		(MOVHstorezero [4] destptr
   435  			(MOVHstorezero [2] destptr
   436  				(MOVHstorezero [0] destptr mem))))
   437  
   438  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 3 ->
   439  	(MOVBstorezero [2] destptr
   440  		(MOVBstorezero [1] destptr
   441  			(MOVBstorezero [0] destptr mem)))
   442  
   443  // Zero small numbers of words directly.
   444  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 ->
   445  	(MOVDstorezero [8] destptr
   446                  (MOVDstorezero [0] destptr mem))
   447  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 ->
   448  	(MOVDstorezero [16] destptr
   449  		(MOVDstorezero [8] destptr
   450  			(MOVDstorezero [0] destptr mem)))
   451  (Zero [s] destptr mem) && SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0 ->
   452  	(MOVDstorezero [24] destptr
   453  		(MOVDstorezero [16] destptr
   454  			(MOVDstorezero [8] destptr
   455  				(MOVDstorezero [0] destptr mem))))
   456  
   457  // Large zeroing uses a loop
   458  (Zero [s] ptr mem)
   459  	&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 ->
   460  	(LoweredZero [SizeAndAlign(s).Align()]
   461  		ptr
   462  		(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)])
   463  		mem)
   464  
   465  // moves
   466  (Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem
   467  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBZload src mem) mem)
   468  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 ->
   469  	(MOVHstore dst (MOVHZload src mem) mem)
   470  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 ->
   471  	(MOVBstore [1] dst (MOVBZload [1] src mem)
   472  		(MOVBstore dst (MOVBZload src mem) mem))
   473  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 ->
   474  	(MOVWstore dst (MOVWload src mem) mem)
   475  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 ->
   476  	(MOVHstore [2] dst (MOVHZload [2] src mem)
   477  		(MOVHstore dst (MOVHZload src mem) mem))
   478  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 ->
   479  	(MOVBstore [3] dst (MOVBZload [3] src mem)
   480  		(MOVBstore [2] dst (MOVBZload [2] src mem)
   481  			(MOVBstore [1] dst (MOVBZload [1] src mem)
   482  				(MOVBstore dst (MOVBZload src mem) mem))))
   483  
   484  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 ->
   485  	(MOVDstore dst (MOVDload src mem) mem)
   486  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 ->
   487  	(MOVWstore [4] dst (MOVWZload [4] src mem)
   488  		(MOVWstore dst (MOVWZload src mem) mem))
   489  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0->
   490  	(MOVHstore [6] dst (MOVHZload [6] src mem)
   491  		(MOVHstore [4] dst (MOVHZload [4] src mem)
   492  			(MOVHstore [2] dst (MOVHZload [2] src mem)
   493  				(MOVHstore dst (MOVHZload src mem) mem))))
   494  
   495  (Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 ->
   496  	(MOVBstore [2] dst (MOVBZload [2] src mem)
   497  		(MOVBstore [1] dst (MOVBZload [1] src mem)
   498  			(MOVBstore dst (MOVBZload src mem) mem)))
   499  
   500  // Large move uses a loop
   501  (Move [s] dst src mem)
   502  	&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 ->
   503  	(LoweredMove [SizeAndAlign(s).Align()]
   504  		dst
   505  		src
   506  		(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)])
   507  		mem)
   508  
   509  // Calls
   510  // Lowering calls
   511  (StaticCall [argwid] {target} mem) -> (CALLstatic [argwid] {target} mem)
   512  (ClosureCall [argwid] entry closure mem) -> (CALLclosure [argwid] entry closure mem)
   513  (DeferCall [argwid] mem) -> (CALLdefer [argwid] mem)
   514  (GoCall [argwid] mem) -> (CALLgo [argwid] mem)
   515  (InterCall [argwid] entry mem) -> (CALLinter [argwid] entry mem)
   516  
   517  // Miscellaneous
   518  (Convert <t> x mem) -> (MOVDconvert <t> x mem)
   519  (GetClosurePtr) -> (LoweredGetClosurePtr)
   520  (IsNonNil ptr) -> (NotEqual (CMPconst [0] ptr))
   521  (IsInBounds idx len) -> (LessThan (CMPU idx len))
   522  (IsSliceInBounds idx len) -> (LessEqual (CMPU idx len))
   523  (NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
   524  
   525  // Optimizations
   526  
   527  (ADD (MOVDconst [c]) x) && int64(int32(c)) == c -> (ADDconst [c] x)
   528  (ADD x (MOVDconst [c])) && int64(int32(c)) == c -> (ADDconst [c] x)
   529  
   530  // Fold offsets for stores.
   531  (MOVDstore [off1] {sym} (ADDconst [off2] x) val mem) && is16Bit(off1+off2) -> (MOVDstore [off1+off2] {sym} x val mem)
   532  (MOVWstore [off1] {sym} (ADDconst [off2] x) val mem) && is16Bit(off1+off2) -> (MOVWstore [off1+off2] {sym} x val mem)
   533  (MOVHstore [off1] {sym} (ADDconst [off2] x) val mem) && is16Bit(off1+off2) -> (MOVHstore [off1+off2] {sym} x val mem)
   534  (MOVBstore [off1] {sym} (ADDconst [off2] x) val mem) && is16Bit(off1+off2) -> (MOVBstore [off1+off2] {sym} x val mem)
   535  
   536  (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) && is16Bit(off1+off2) -> (FMOVSstore [off1+off2] {sym} ptr val mem)
   537  (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) && is16Bit(off1+off2) -> (FMOVDstore [off1+off2] {sym} ptr val mem)
   538  
   539  (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   540          (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   541  (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   542          (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   543  (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   544          (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   545  (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   546          (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   547  
   548  (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   549          (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   550  (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) && canMergeSym(sym1,sym2) ->
   551          (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   552  
   553  (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   554          (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   555  (MOVBZload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   556          (MOVBZload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   557  (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   558          (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   559  (MOVHZload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   560          (MOVHZload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   561  (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   562          (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   563  (MOVWZload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   564          (MOVWZload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   565  (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   566          (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   567  (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   568          (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   569  (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) && canMergeSym(sym1,sym2) ->
   570          (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   571  
   572  // Fold offsets for loads.
   573  (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) && is16Bit(off1+off2) -> (FMOVSload [off1+off2] {sym} ptr mem)
   574  (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) && is16Bit(off1+off2) -> (FMOVDload [off1+off2] {sym} ptr mem)
   575  
   576  (MOVDload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVDload [off1+off2] {sym} x mem)
   577  (MOVWload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVWload [off1+off2] {sym} x mem)
   578  (MOVWZload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVWZload [off1+off2] {sym} x mem)
   579  (MOVHload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVHload [off1+off2] {sym} x mem)
   580  (MOVHZload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVHZload [off1+off2] {sym} x mem)
   581  (MOVBload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVBload [off1+off2] {sym} x mem)
   582  (MOVBZload [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) -> (MOVBZload [off1+off2] {sym} x mem)
   583  
   584  // Store of zero -> storezero
   585  (MOVDstore [off] {sym} ptr (MOVDconst [c]) mem) && c == 0 -> (MOVDstorezero [off] {sym} ptr mem)
   586  (MOVWstore [off] {sym} ptr (MOVDconst [c]) mem) && c == 0 -> (MOVWstorezero [off] {sym} ptr mem)
   587  (MOVHstore [off] {sym} ptr (MOVDconst [c]) mem) && c == 0 -> (MOVHstorezero [off] {sym} ptr mem)
   588  (MOVBstore [off] {sym} ptr (MOVDconst [c]) mem) && c == 0 -> (MOVBstorezero [off] {sym} ptr mem)
   589  
   590  // Fold offsets for storezero
   591  (MOVDstorezero [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) ->
   592      (MOVDstorezero [off1+off2] {sym} x mem)
   593  (MOVWstorezero [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) ->
   594      (MOVWstorezero [off1+off2] {sym} x mem)
   595  (MOVHstorezero [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) ->
   596      (MOVHstorezero [off1+off2] {sym} x mem)
   597  (MOVBstorezero [off1] {sym} (ADDconst [off2] x) mem) && is16Bit(off1+off2) ->
   598      (MOVBstorezero [off1+off2] {sym} x mem)
   599  
   600  // Lowering extension
   601  // Note: we always extend to 64 bits even though some ops don't need that many result bits.
   602  (SignExt8to16  x) -> (MOVBreg x)
   603  (SignExt8to32  x) -> (MOVBreg x)
   604  (SignExt8to64  x) -> (MOVBreg x)
   605  (SignExt16to32 x) -> (MOVHreg x)
   606  (SignExt16to64 x) -> (MOVHreg x)
   607  (SignExt32to64 x) -> (MOVWreg x)
   608  
   609  (ZeroExt8to16  x) -> (MOVBZreg x)
   610  (ZeroExt8to32  x) -> (MOVBZreg x)
   611  (ZeroExt8to64  x) -> (MOVBZreg x)
   612  (ZeroExt16to32 x) -> (MOVHZreg x)
   613  (ZeroExt16to64 x) -> (MOVHZreg x)
   614  (ZeroExt32to64 x) -> (MOVWZreg x)
   615  
   616  (Trunc16to8  x) -> (MOVBreg x)
   617  (Trunc32to8  x) -> (MOVBreg x)
   618  (Trunc32to16 x) -> (MOVHreg x)
   619  (Trunc64to8  x) -> (MOVBreg x)
   620  (Trunc64to16 x) -> (MOVHreg x)
   621  (Trunc64to32 x) -> (MOVWreg x)
   622