github.com/sbinet/go@v0.0.0-20160827155028-54d7de7dd62b/src/cmd/compile/internal/ssa/gen/generic.rules (about)

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Simplifications that apply to all backend architectures. As an example, this
     6  // Go source code
     7  //
     8  // y := 0 * x
     9  //
    10  // can be translated into y := 0 without losing any information, which saves a
    11  // pointless multiplication instruction. Other .rules files in this directory
    12  // (for example AMD64.rules) contain rules specific to the architecture in the
    13  // filename. The rules here apply to every architecture.
    14  //
    15  // The code for parsing this file lives in rulegen.go; this file generates
    16  // ssa/rewritegeneric.go.
    17  
    18  // values are specified using the following format:
    19  // (op <type> [auxint] {aux} arg0 arg1 ...)
    20  // the type, aux, and auxint fields are optional
    21  // on the matching side
    22  //  - the type, aux, and auxint fields must match if they are specified.
    23  //  - the first occurrence of a variable defines that variable.  Subsequent
    24  //    uses must match (be == to) the first use.
    25  //  - v is defined to be the value matched.
    26  //  - an additional conditional can be provided after the match pattern with "&&".
    27  // on the generated side
    28  //  - the type of the top-level expression is the same as the one on the left-hand side.
    29  //  - the type of any subexpressions must be specified explicitly (or
    30  //    be specified in the op's type field).
    31  //  - auxint will be 0 if not specified.
    32  //  - aux will be nil if not specified.
    33  
    34  // blocks are specified using the following format:
    35  // (kind controlvalue succ0 succ1 ...)
    36  // controlvalue must be "nil" or a value expression
    37  // succ* fields must be variables
    38  // For now, the generated successors must be a permutation of the matched successors.
    39  
    40  // constant folding
    41  (Trunc16to8  (Const16 [c]))  -> (Const8   [int64(int8(c))])
    42  (Trunc32to8  (Const32 [c]))  -> (Const8   [int64(int8(c))])
    43  (Trunc32to16 (Const32 [c]))  -> (Const16  [int64(int16(c))])
    44  (Trunc64to8  (Const64 [c]))  -> (Const8   [int64(int8(c))])
    45  (Trunc64to16 (Const64 [c]))  -> (Const16  [int64(int16(c))])
    46  (Trunc64to32 (Const64 [c]))  -> (Const32  [int64(int32(c))])
    47  (Cvt64Fto32F (Const64F [c])) -> (Const32F [f2i(float64(i2f32(c)))])
    48  (Cvt32Fto64F (Const32F [c])) -> (Const64F [c]) // c is already a 64 bit float
    49  
    50  // const negation is currently handled by frontend
    51  //(Neg8 (Const8 [c])) -> (Const8 [-c])
    52  //(Neg16 (Const16 [c])) -> (Const16 [-c])
    53  //(Neg32 (Const32 [c])) -> (Const32 [-c])
    54  //(Neg64 (Const64 [c])) -> (Const64 [-c])
    55  //(Neg32F (Const32F [c])) -> (Const32F [f2i(-i2f(c))])
    56  //(Neg64F (Const64F [c])) -> (Const64F [f2i(-i2f(c))])
    57  
    58  (Add8   (Const8 [c])   (Const8 [d]))   -> (Const8  [int64(int8(c+d))])
    59  (Add16  (Const16 [c])  (Const16 [d]))  -> (Const16 [int64(int16(c+d))])
    60  (Add32  (Const32 [c])  (Const32 [d]))  -> (Const32 [int64(int32(c+d))])
    61  (Add64  (Const64 [c])  (Const64 [d]))  -> (Const64 [c+d])
    62  (Add32F (Const32F [c]) (Const32F [d])) ->
    63          (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) // ensure we combine the operands with 32 bit precision
    64  (Add64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) + i2f(d))])
    65  (AddPtr <t> x (Const64 [c])) -> (OffPtr <t> x [c])
    66  
    67  (Sub8   (Const8 [c]) (Const8 [d]))     -> (Const8 [int64(int8(c-d))])
    68  (Sub16  (Const16 [c]) (Const16 [d]))   -> (Const16 [int64(int16(c-d))])
    69  (Sub32  (Const32 [c]) (Const32 [d]))   -> (Const32 [int64(int32(c-d))])
    70  (Sub64  (Const64 [c]) (Const64 [d]))   -> (Const64 [c-d])
    71  (Sub32F (Const32F [c]) (Const32F [d])) ->
    72          (Const32F [f2i(float64(i2f32(c) - i2f32(d)))])
    73  (Sub64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) - i2f(d))])
    74  
    75  (Mul8   (Const8 [c])   (Const8 [d]))   -> (Const8  [int64(int8(c*d))])
    76  (Mul16  (Const16 [c])  (Const16 [d]))  -> (Const16 [int64(int16(c*d))])
    77  (Mul32  (Const32 [c])  (Const32 [d]))  -> (Const32 [int64(int32(c*d))])
    78  (Mul64  (Const64 [c])  (Const64 [d]))  -> (Const64 [c*d])
    79  (Mul32F (Const32F [c]) (Const32F [d])) ->
    80          (Const32F [f2i(float64(i2f32(c) * i2f32(d)))])
    81  (Mul64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) * i2f(d))])
    82  
    83  // Convert x * -1 to -x. The front-end catches some but not all of these.
    84  (Mul8  (Const8  [-1]) x) -> (Neg8  x)
    85  (Mul16 (Const16 [-1]) x) -> (Neg16 x)
    86  (Mul32 (Const32 [-1]) x) -> (Neg32 x)
    87  (Mul64 (Const64 [-1]) x) -> (Neg64 x)
    88  
    89  (Mod8  (Const8  [c]) (Const8  [d])) && d != 0 -> (Const8  [int64(int8(c % d))])
    90  (Mod16 (Const16 [c]) (Const16 [d])) && d != 0 -> (Const16 [int64(int16(c % d))])
    91  (Mod32 (Const32 [c]) (Const32 [d])) && d != 0 -> (Const32 [int64(int32(c % d))])
    92  (Mod64 (Const64 [c]) (Const64 [d])) && d != 0 -> (Const64 [c % d])
    93  
    94  (Mod8u  (Const8 [c])  (Const8  [d])) && d != 0 -> (Const8  [int64(uint8(c) % uint8(d))])
    95  (Mod16u (Const16 [c]) (Const16 [d])) && d != 0 -> (Const16 [int64(uint16(c) % uint16(d))])
    96  (Mod32u (Const32 [c]) (Const32 [d])) && d != 0 -> (Const32 [int64(uint32(c) % uint32(d))])
    97  (Mod64u (Const64 [c]) (Const64 [d])) && d != 0 -> (Const64 [int64(uint64(c) % uint64(d))])
    98  
    99  (Lsh64x64  (Const64 [c]) (Const64 [d])) -> (Const64 [c << uint64(d)])
   100  (Rsh64x64  (Const64 [c]) (Const64 [d])) -> (Const64 [c >> uint64(d)])
   101  (Rsh64Ux64 (Const64 [c]) (Const64 [d])) -> (Const64 [int64(uint64(c) >> uint64(d))])
   102  (Lsh32x64  (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) << uint64(d))])
   103  (Rsh32x64  (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) >> uint64(d))])
   104  (Rsh32Ux64 (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(uint32(c) >> uint64(d)))])
   105  (Lsh16x64  (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) << uint64(d))])
   106  (Rsh16x64  (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) >> uint64(d))])
   107  (Rsh16Ux64 (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(uint16(c) >> uint64(d)))])
   108  (Lsh8x64   (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(c) << uint64(d))])
   109  (Rsh8x64   (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(c) >> uint64(d))])
   110  (Rsh8Ux64  (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(uint8(c) >> uint64(d)))])
   111  
   112  // Fold IsInBounds when the range of the index cannot exceed the limit.
   113  (IsInBounds (ZeroExt8to32  _) (Const32 [c])) && (1 << 8)  <= c -> (ConstBool [1])
   114  (IsInBounds (ZeroExt8to64  _) (Const64 [c])) && (1 << 8)  <= c -> (ConstBool [1])
   115  (IsInBounds (ZeroExt16to32 _) (Const32 [c])) && (1 << 16) <= c -> (ConstBool [1])
   116  (IsInBounds (ZeroExt16to64 _) (Const64 [c])) && (1 << 16) <= c -> (ConstBool [1])
   117  (IsInBounds x x) -> (ConstBool [0])
   118  (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c < d -> (ConstBool [1])
   119  (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c < d -> (ConstBool [1])
   120  (IsInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c < d)])
   121  (IsInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c < d)])
   122  // (Mod64u x y) is always between 0 (inclusive) and y (exclusive).
   123  (IsInBounds (Mod32u _ y) y) -> (ConstBool [1])
   124  (IsInBounds (Mod64u _ y) y) -> (ConstBool [1])
   125  
   126  (IsSliceInBounds x x) -> (ConstBool [1])
   127  (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c <= d -> (ConstBool [1])
   128  (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c <= d -> (ConstBool [1])
   129  (IsSliceInBounds (Const32 [0]) _) -> (ConstBool [1])
   130  (IsSliceInBounds (Const64 [0]) _) -> (ConstBool [1])
   131  (IsSliceInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c <= d)])
   132  (IsSliceInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c <= d)])
   133  (IsSliceInBounds (SliceLen x) (SliceCap x)) -> (ConstBool [1])
   134  
   135  (Eq64 x x) -> (ConstBool [1])
   136  (Eq32 x x) -> (ConstBool [1])
   137  (Eq16 x x) -> (ConstBool [1])
   138  (Eq8  x x) -> (ConstBool [1])
   139  (EqB (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c == d)])
   140  (EqB (ConstBool [0]) x) -> (Not x)
   141  (EqB (ConstBool [1]) x) -> x
   142  
   143  (Neq64 x x) -> (ConstBool [0])
   144  (Neq32 x x) -> (ConstBool [0])
   145  (Neq16 x x) -> (ConstBool [0])
   146  (Neq8  x x) -> (ConstBool [0])
   147  (NeqB (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c != d)])
   148  (NeqB (ConstBool [0]) x) -> x
   149  (NeqB (ConstBool [1]) x) -> (Not x)
   150  
   151  (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Eq64 (Const64 <t> [c-d]) x)
   152  (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
   153  (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
   154  (Eq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x)) -> (Eq8  (Const8 <t> [int64(int8(c-d))]) x)
   155  
   156  (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Neq64 (Const64 <t> [c-d]) x)
   157  (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Neq32 (Const32 <t> [int64(int32(c-d))]) x)
   158  (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Neq16 (Const16 <t> [int64(int16(c-d))]) x)
   159  (Neq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x)) -> (Neq8 (Const8 <t> [int64(int8(c-d))]) x)
   160  
   161  // canonicalize: swap arguments for commutative operations when one argument is a constant.
   162  (Eq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Eq64 (Const64 <t> [c]) x)
   163  (Eq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Eq32 (Const32 <t> [c]) x)
   164  (Eq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Eq16 (Const16 <t> [c]) x)
   165  (Eq8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Eq8  (Const8  <t> [c]) x)
   166  
   167  (Neq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Neq64 (Const64 <t> [c]) x)
   168  (Neq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Neq32 (Const32 <t> [c]) x)
   169  (Neq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Neq16 (Const16 <t> [c]) x)
   170  (Neq8  x (Const8 <t>  [c])) && x.Op != OpConst8  -> (Neq8  (Const8  <t> [c]) x)
   171  
   172  // AddPtr is not canonicalized because nilcheck ptr checks the first argument to be non-nil.
   173  (Add64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [c]) x)
   174  (Add32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [c]) x)
   175  (Add16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [c]) x)
   176  (Add8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Add8  (Const8  <t> [c]) x)
   177  
   178  (Mul64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Mul64 (Const64 <t> [c]) x)
   179  (Mul32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Mul32 (Const32 <t> [c]) x)
   180  (Mul16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Mul16 (Const16 <t> [c]) x)
   181  (Mul8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Mul8  (Const8  <t> [c]) x)
   182  
   183  (Sub64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [-c]) x)
   184  (Sub32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [int64(int32(-c))]) x)
   185  (Sub16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [int64(int16(-c))]) x)
   186  (Sub8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Add8  (Const8  <t> [int64(int8(-c))]) x)
   187  
   188  (And64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (And64 (Const64 <t> [c]) x)
   189  (And32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (And32 (Const32 <t> [c]) x)
   190  (And16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (And16 (Const16 <t> [c]) x)
   191  (And8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (And8  (Const8  <t> [c]) x)
   192  
   193  (Or64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Or64 (Const64 <t> [c]) x)
   194  (Or32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Or32 (Const32 <t> [c]) x)
   195  (Or16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Or16 (Const16 <t> [c]) x)
   196  (Or8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Or8  (Const8  <t> [c]) x)
   197  
   198  (Xor64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Xor64 (Const64 <t> [c]) x)
   199  (Xor32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Xor32 (Const32 <t> [c]) x)
   200  (Xor16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Xor16 (Const16 <t> [c]) x)
   201  (Xor8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Xor8  (Const8  <t> [c]) x)
   202  
   203  // Distribute multiplication c * (d+x) -> c*d + c*x. Useful for:
   204  // a[i].b = ...; a[i+1].b = ...
   205  (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) ->
   206    (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
   207  (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) ->
   208    (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x))
   209  
   210  // rewrite shifts of 8/16/32 bit consts into 64 bit consts to reduce
   211  // the number of the other rewrite rules for const shifts
   212  (Lsh64x32  <t> x (Const32 [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint32(c))]))
   213  (Lsh64x16  <t> x (Const16 [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint16(c))]))
   214  (Lsh64x8   <t> x (Const8  [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint8(c))]))
   215  (Rsh64x32  <t> x (Const32 [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint32(c))]))
   216  (Rsh64x16  <t> x (Const16 [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint16(c))]))
   217  (Rsh64x8   <t> x (Const8  [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint8(c))]))
   218  (Rsh64Ux32 <t> x (Const32 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
   219  (Rsh64Ux16 <t> x (Const16 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
   220  (Rsh64Ux8  <t> x (Const8  [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
   221  
   222  (Lsh32x32  <t> x (Const32 [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint32(c))]))
   223  (Lsh32x16  <t> x (Const16 [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint16(c))]))
   224  (Lsh32x8   <t> x (Const8  [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint8(c))]))
   225  (Rsh32x32  <t> x (Const32 [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint32(c))]))
   226  (Rsh32x16  <t> x (Const16 [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint16(c))]))
   227  (Rsh32x8   <t> x (Const8  [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint8(c))]))
   228  (Rsh32Ux32 <t> x (Const32 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
   229  (Rsh32Ux16 <t> x (Const16 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
   230  (Rsh32Ux8  <t> x (Const8  [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
   231  
   232  (Lsh16x32  <t> x (Const32 [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint32(c))]))
   233  (Lsh16x16  <t> x (Const16 [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint16(c))]))
   234  (Lsh16x8   <t> x (Const8  [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint8(c))]))
   235  (Rsh16x32  <t> x (Const32 [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint32(c))]))
   236  (Rsh16x16  <t> x (Const16 [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint16(c))]))
   237  (Rsh16x8   <t> x (Const8  [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint8(c))]))
   238  (Rsh16Ux32 <t> x (Const32 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
   239  (Rsh16Ux16 <t> x (Const16 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
   240  (Rsh16Ux8  <t> x (Const8  [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
   241  
   242  (Lsh8x32  <t> x (Const32 [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint32(c))]))
   243  (Lsh8x16  <t> x (Const16 [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint16(c))]))
   244  (Lsh8x8   <t> x (Const8  [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint8(c))]))
   245  (Rsh8x32  <t> x (Const32 [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint32(c))]))
   246  (Rsh8x16  <t> x (Const16 [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint16(c))]))
   247  (Rsh8x8   <t> x (Const8  [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint8(c))]))
   248  (Rsh8Ux32 <t> x (Const32 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
   249  (Rsh8Ux16 <t> x (Const16 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
   250  (Rsh8Ux8  <t> x (Const8  [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
   251  
   252  // shifts by zero
   253  (Lsh64x64  x (Const64 [0])) -> x
   254  (Rsh64x64  x (Const64 [0])) -> x
   255  (Rsh64Ux64 x (Const64 [0])) -> x
   256  (Lsh32x64  x (Const64 [0])) -> x
   257  (Rsh32x64  x (Const64 [0])) -> x
   258  (Rsh32Ux64 x (Const64 [0])) -> x
   259  (Lsh16x64  x (Const64 [0])) -> x
   260  (Rsh16x64  x (Const64 [0])) -> x
   261  (Rsh16Ux64 x (Const64 [0])) -> x
   262  (Lsh8x64   x (Const64 [0])) -> x
   263  (Rsh8x64   x (Const64 [0])) -> x
   264  (Rsh8Ux64  x (Const64 [0])) -> x
   265  
   266  // zero shifted.
   267  (Lsh64x64  (Const64 [0]) _) -> (Const64 [0])
   268  (Lsh64x32  (Const64 [0]) _) -> (Const64 [0])
   269  (Lsh64x16  (Const64 [0]) _) -> (Const64 [0])
   270  (Lsh64x8  (Const64 [0]) _) -> (Const64 [0])
   271  (Rsh64x64  (Const64 [0]) _) -> (Const64 [0])
   272  (Rsh64x32  (Const64 [0]) _) -> (Const64 [0])
   273  (Rsh64x16  (Const64 [0]) _) -> (Const64 [0])
   274  (Rsh64x8  (Const64 [0]) _) -> (Const64 [0])
   275  (Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0])
   276  (Rsh64Ux32 (Const64 [0]) _) -> (Const64 [0])
   277  (Rsh64Ux16 (Const64 [0]) _) -> (Const64 [0])
   278  (Rsh64Ux8 (Const64 [0]) _) -> (Const64 [0])
   279  (Lsh32x64  (Const32 [0]) _) -> (Const32 [0])
   280  (Lsh32x32  (Const32 [0]) _) -> (Const32 [0])
   281  (Lsh32x16  (Const32 [0]) _) -> (Const32 [0])
   282  (Lsh32x8  (Const32 [0]) _) -> (Const32 [0])
   283  (Rsh32x64  (Const32 [0]) _) -> (Const32 [0])
   284  (Rsh32x32  (Const32 [0]) _) -> (Const32 [0])
   285  (Rsh32x16  (Const32 [0]) _) -> (Const32 [0])
   286  (Rsh32x8  (Const32 [0]) _) -> (Const32 [0])
   287  (Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0])
   288  (Rsh32Ux32 (Const32 [0]) _) -> (Const32 [0])
   289  (Rsh32Ux16 (Const32 [0]) _) -> (Const32 [0])
   290  (Rsh32Ux8 (Const32 [0]) _) -> (Const32 [0])
   291  (Lsh16x64  (Const16 [0]) _) -> (Const16 [0])
   292  (Lsh16x32  (Const16 [0]) _) -> (Const16 [0])
   293  (Lsh16x16  (Const16 [0]) _) -> (Const16 [0])
   294  (Lsh16x8  (Const16 [0]) _) -> (Const16 [0])
   295  (Rsh16x64  (Const16 [0]) _) -> (Const16 [0])
   296  (Rsh16x32  (Const16 [0]) _) -> (Const16 [0])
   297  (Rsh16x16  (Const16 [0]) _) -> (Const16 [0])
   298  (Rsh16x8  (Const16 [0]) _) -> (Const16 [0])
   299  (Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0])
   300  (Rsh16Ux32 (Const16 [0]) _) -> (Const16 [0])
   301  (Rsh16Ux16 (Const16 [0]) _) -> (Const16 [0])
   302  (Rsh16Ux8 (Const16 [0]) _) -> (Const16 [0])
   303  (Lsh8x64   (Const8 [0]) _) -> (Const8  [0])
   304  (Lsh8x32   (Const8 [0]) _) -> (Const8  [0])
   305  (Lsh8x16   (Const8 [0]) _) -> (Const8  [0])
   306  (Lsh8x8   (Const8 [0]) _) -> (Const8  [0])
   307  (Rsh8x64   (Const8 [0]) _) -> (Const8  [0])
   308  (Rsh8x32   (Const8 [0]) _) -> (Const8  [0])
   309  (Rsh8x16   (Const8 [0]) _) -> (Const8  [0])
   310  (Rsh8x8   (Const8 [0]) _) -> (Const8  [0])
   311  (Rsh8Ux64  (Const8 [0]) _) -> (Const8  [0])
   312  (Rsh8Ux32  (Const8 [0]) _) -> (Const8  [0])
   313  (Rsh8Ux16  (Const8 [0]) _) -> (Const8  [0])
   314  (Rsh8Ux8  (Const8 [0]) _) -> (Const8  [0])
   315  
   316  // large left shifts of all values, and right shifts of unsigned values
   317  (Lsh64x64  _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0])
   318  (Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0])
   319  (Lsh32x64  _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0])
   320  (Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0])
   321  (Lsh16x64  _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0])
   322  (Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0])
   323  (Lsh8x64   _ (Const64 [c])) && uint64(c) >= 8  -> (Const8  [0])
   324  (Rsh8Ux64  _ (Const64 [c])) && uint64(c) >= 8  -> (Const8  [0])
   325  
   326  // combine const shifts
   327  (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh64x64 x (Const64 <t> [c+d]))
   328  (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh32x64 x (Const64 <t> [c+d]))
   329  (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh16x64 x (Const64 <t> [c+d]))
   330  (Lsh8x64  <t> (Lsh8x64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh8x64  x (Const64 <t> [c+d]))
   331  
   332  (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64x64 x (Const64 <t> [c+d]))
   333  (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32x64 x (Const64 <t> [c+d]))
   334  (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16x64 x (Const64 <t> [c+d]))
   335  (Rsh8x64  <t> (Rsh8x64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8x64  x (Const64 <t> [c+d]))
   336  
   337  (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64Ux64 x (Const64 <t> [c+d]))
   338  (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32Ux64 x (Const64 <t> [c+d]))
   339  (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 <t> [c+d]))
   340  (Rsh8Ux64  <t> (Rsh8Ux64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64  x (Const64 <t> [c+d]))
   341  
   342  // ((x >> c1) << c2) >> c3
   343  (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   344    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   345    -> (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   346  (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   347    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   348    -> (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   349  (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   350    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   351    -> (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   352  (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   353    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   354    -> (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   355  
   356  // ((x << c1) >> c2) << c3
   357  (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   358    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   359    -> (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   360  (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   361    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   362    -> (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   363  (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   364    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   365    -> (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   366  (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   367    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   368    -> (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   369  
   370  // constant comparisons
   371  (Eq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c == d)])
   372  (Eq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c == d)])
   373  (Eq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c == d)])
   374  (Eq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c == d)])
   375  
   376  (Neq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c != d)])
   377  (Neq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c != d)])
   378  (Neq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c != d)])
   379  (Neq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c != d)])
   380  
   381  (Greater64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c > d)])
   382  (Greater32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c > d)])
   383  (Greater16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c > d)])
   384  (Greater8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c > d)])
   385  
   386  (Greater64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) > uint64(d))])
   387  (Greater32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) > uint32(d))])
   388  (Greater16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) > uint16(d))])
   389  (Greater8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  > uint8(d))])
   390  
   391  (Geq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c >= d)])
   392  (Geq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c >= d)])
   393  (Geq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c >= d)])
   394  (Geq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c >= d)])
   395  
   396  (Geq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) >= uint64(d))])
   397  (Geq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) >= uint32(d))])
   398  (Geq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) >= uint16(d))])
   399  (Geq8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  >= uint8(d))])
   400  
   401  (Less64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c < d)])
   402  (Less32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c < d)])
   403  (Less16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c < d)])
   404  (Less8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c < d)])
   405  
   406  (Less64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) < uint64(d))])
   407  (Less32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) < uint32(d))])
   408  (Less16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) < uint16(d))])
   409  (Less8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  < uint8(d))])
   410  
   411  (Leq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c <= d)])
   412  (Leq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c <= d)])
   413  (Leq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c <= d)])
   414  (Leq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c <= d)])
   415  
   416  (Leq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) <= uint64(d))])
   417  (Leq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) <= uint32(d))])
   418  (Leq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) <= uint16(d))])
   419  (Leq8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  <= uint8(d))])
   420  
   421  // simplifications
   422  (Or64 x x) -> x
   423  (Or32 x x) -> x
   424  (Or16 x x) -> x
   425  (Or8  x x) -> x
   426  (Or64 (Const64 [0]) x) -> x
   427  (Or32 (Const32 [0]) x) -> x
   428  (Or16 (Const16 [0]) x) -> x
   429  (Or8  (Const8  [0]) x) -> x
   430  (Or64 (Const64 [-1]) _) -> (Const64 [-1])
   431  (Or32 (Const32 [-1]) _) -> (Const32 [-1])
   432  (Or16 (Const16 [-1]) _) -> (Const16 [-1])
   433  (Or8  (Const8  [-1]) _) -> (Const8  [-1])
   434  (And64 x x) -> x
   435  (And32 x x) -> x
   436  (And16 x x) -> x
   437  (And8  x x) -> x
   438  (And64 (Const64 [-1]) x) -> x
   439  (And32 (Const32 [-1]) x) -> x
   440  (And16 (Const16 [-1]) x) -> x
   441  (And8  (Const8  [-1]) x) -> x
   442  (And64 (Const64 [0]) _) -> (Const64 [0])
   443  (And32 (Const32 [0]) _) -> (Const32 [0])
   444  (And16 (Const16 [0]) _) -> (Const16 [0])
   445  (And8  (Const8  [0]) _) -> (Const8  [0])
   446  (Xor64 x x) -> (Const64 [0])
   447  (Xor32 x x) -> (Const32 [0])
   448  (Xor16 x x) -> (Const16 [0])
   449  (Xor8  x x) -> (Const8  [0])
   450  (Xor64 (Const64 [0]) x) -> x
   451  (Xor32 (Const32 [0]) x) -> x
   452  (Xor16 (Const16 [0]) x) -> x
   453  (Xor8  (Const8  [0]) x) -> x
   454  (Add64 (Const64 [0]) x) -> x
   455  (Add32 (Const32 [0]) x) -> x
   456  (Add16 (Const16 [0]) x) -> x
   457  (Add8  (Const8  [0]) x) -> x
   458  (Sub64 x x) -> (Const64 [0])
   459  (Sub32 x x) -> (Const32 [0])
   460  (Sub16 x x) -> (Const16 [0])
   461  (Sub8  x x) -> (Const8  [0])
   462  (Mul64 (Const64 [0]) _) -> (Const64 [0])
   463  (Mul32 (Const32 [0]) _) -> (Const32 [0])
   464  (Mul16 (Const16 [0]) _) -> (Const16 [0])
   465  (Mul8  (Const8  [0]) _) -> (Const8  [0])
   466  (Com8  (Com8  x)) -> x
   467  (Com16 (Com16 x)) -> x
   468  (Com32 (Com32 x)) -> x
   469  (Com64 (Com64 x)) -> x
   470  (Neg8  (Sub8  x y)) -> (Sub8  y x)
   471  (Neg16 (Sub16 x y)) -> (Sub16 y x)
   472  (Neg32 (Sub32 x y)) -> (Sub32 y x)
   473  (Neg64 (Sub64 x y)) -> (Sub64 y x)
   474  
   475  (And64 x (And64 x y)) -> (And64 x y)
   476  (And32 x (And32 x y)) -> (And32 x y)
   477  (And16 x (And16 x y)) -> (And16 x y)
   478  (And8  x (And8  x y)) -> (And8  x y)
   479  (And64 x (And64 y x)) -> (And64 x y)
   480  (And32 x (And32 y x)) -> (And32 x y)
   481  (And16 x (And16 y x)) -> (And16 x y)
   482  (And8  x (And8  y x)) -> (And8  x y)
   483  (And64 (And64 x y) x) -> (And64 x y)
   484  (And32 (And32 x y) x) -> (And32 x y)
   485  (And16 (And16 x y) x) -> (And16 x y)
   486  (And8  (And8  x y) x) -> (And8  x y)
   487  (And64 (And64 x y) y) -> (And64 x y)
   488  (And32 (And32 x y) y) -> (And32 x y)
   489  (And16 (And16 x y) y) -> (And16 x y)
   490  (And8  (And8  x y) y) -> (And8  x y)
   491  (Or64 x (Or64 x y)) -> (Or64 x y)
   492  (Or32 x (Or32 x y)) -> (Or32 x y)
   493  (Or16 x (Or16 x y)) -> (Or16 x y)
   494  (Or8  x (Or8  x y)) -> (Or8  x y)
   495  (Or64 x (Or64 y x)) -> (Or64 x y)
   496  (Or32 x (Or32 y x)) -> (Or32 x y)
   497  (Or16 x (Or16 y x)) -> (Or16 x y)
   498  (Or8  x (Or8  y x)) -> (Or8  x y)
   499  (Or64 (Or64 x y) x) -> (Or64 x y)
   500  (Or32 (Or32 x y) x) -> (Or32 x y)
   501  (Or16 (Or16 x y) x) -> (Or16 x y)
   502  (Or8  (Or8  x y) x) -> (Or8  x y)
   503  (Or64 (Or64 x y) y) -> (Or64 x y)
   504  (Or32 (Or32 x y) y) -> (Or32 x y)
   505  (Or16 (Or16 x y) y) -> (Or16 x y)
   506  (Or8  (Or8  x y) y) -> (Or8  x y)
   507  (Xor64 x (Xor64 x y)) -> y
   508  (Xor32 x (Xor32 x y)) -> y
   509  (Xor16 x (Xor16 x y)) -> y
   510  (Xor8  x (Xor8  x y)) -> y
   511  (Xor64 x (Xor64 y x)) -> y
   512  (Xor32 x (Xor32 y x)) -> y
   513  (Xor16 x (Xor16 y x)) -> y
   514  (Xor8  x (Xor8  y x)) -> y
   515  (Xor64 (Xor64 x y) x) -> y
   516  (Xor32 (Xor32 x y) x) -> y
   517  (Xor16 (Xor16 x y) x) -> y
   518  (Xor8  (Xor8  x y) x) -> y
   519  (Xor64 (Xor64 x y) y) -> x
   520  (Xor32 (Xor32 x y) y) -> x
   521  (Xor16 (Xor16 x y) y) -> x
   522  (Xor8  (Xor8  x y) y) -> x
   523  
   524  (Trunc64to8  (And64 (Const64 [y]) x)) && y&0xFF == 0xFF -> (Trunc64to8 x)
   525  (Trunc64to16 (And64 (Const64 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc64to16 x)
   526  (Trunc64to32 (And64 (Const64 [y]) x)) && y&0xFFFFFFFF == 0xFFFFFFFF -> (Trunc64to32 x)
   527  (Trunc32to8  (And32 (Const32 [y]) x)) && y&0xFF == 0xFF -> (Trunc32to8 x)
   528  (Trunc32to16 (And32 (Const32 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc32to16 x)
   529  (Trunc16to8  (And16 (Const16 [y]) x)) && y&0xFF == 0xFF -> (Trunc16to8 x)
   530  
   531  // Rewrite AND of consts as shifts if possible, slightly faster for 64 bit operands
   532  // leading zeros can be shifted left, then right
   533  (And64 <t> (Const64 [y]) x) && nlz(y) + nto(y) == 64 && nto(y) >= 32
   534    -> (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
   535  // trailing zeros can be shifted right, then left
   536  (And64 <t> (Const64 [y]) x) && nlo(y) + ntz(y) == 64 && ntz(y) >= 32
   537    -> (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
   538  
   539  // simplifications often used for lengths.  e.g. len(s[i:i+5])==5
   540  (Sub64 (Add64 x y) x) -> y
   541  (Sub64 (Add64 x y) y) -> x
   542  (Sub32 (Add32 x y) x) -> y
   543  (Sub32 (Add32 x y) y) -> x
   544  (Sub16 (Add16 x y) x) -> y
   545  (Sub16 (Add16 x y) y) -> x
   546  (Sub8  (Add8  x y) x) -> y
   547  (Sub8  (Add8  x y) y) -> x
   548  
   549  // basic phi simplifications
   550  (Phi (Const8  [c]) (Const8  [c])) -> (Const8  [c])
   551  (Phi (Const16 [c]) (Const16 [c])) -> (Const16 [c])
   552  (Phi (Const32 [c]) (Const32 [c])) -> (Const32 [c])
   553  (Phi (Const64 [c]) (Const64 [c])) -> (Const64 [c])
   554  
   555  // user nil checks
   556  (NeqPtr p (ConstNil)) -> (IsNonNil p)
   557  (NeqPtr (ConstNil) p) -> (IsNonNil p)
   558  (EqPtr p (ConstNil)) -> (Not (IsNonNil p))
   559  (EqPtr (ConstNil) p) -> (Not (IsNonNil p))
   560  
   561  // slice and interface comparisons
   562  // The frontend ensures that we can only compare against nil,
   563  // so we need only compare the first word (interface type or slice ptr).
   564  (EqInter x y)  -> (EqPtr  (ITab x) (ITab y))
   565  (NeqInter x y) -> (NeqPtr (ITab x) (ITab y))
   566  (EqSlice x y)  -> (EqPtr  (SlicePtr x) (SlicePtr y))
   567  (NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y))
   568  
   569  // Load of store of same address, with compatibly typed value and same size
   570  (Load <t1> p1 (Store [w] p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() -> x
   571  
   572  // Collapse OffPtr
   573  (OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b])
   574  (OffPtr p [0]) && v.Type.Compare(p.Type) == CMPeq -> p
   575  
   576  // indexing operations
   577  // Note: bounds check has already been done
   578  (ArrayIndex <t> [0] x:(Load ptr mem)) -> @x.Block (Load <t> ptr mem)
   579  (PtrIndex <t> ptr idx) && config.PtrSize == 4 -> (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()])))
   580  (PtrIndex <t> ptr idx) && config.PtrSize == 8 -> (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()])))
   581  
   582  // struct operations
   583  (StructSelect (StructMake1 x)) -> x
   584  (StructSelect [0] (StructMake2 x _)) -> x
   585  (StructSelect [1] (StructMake2 _ x)) -> x
   586  (StructSelect [0] (StructMake3 x _ _)) -> x
   587  (StructSelect [1] (StructMake3 _ x _)) -> x
   588  (StructSelect [2] (StructMake3 _ _ x)) -> x
   589  (StructSelect [0] (StructMake4 x _ _ _)) -> x
   590  (StructSelect [1] (StructMake4 _ x _ _)) -> x
   591  (StructSelect [2] (StructMake4 _ _ x _)) -> x
   592  (StructSelect [3] (StructMake4 _ _ _ x)) -> x
   593  
   594  (Load <t> _ _) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) ->
   595    (StructMake0)
   596  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) ->
   597    (StructMake1
   598      (Load <t.FieldType(0)> ptr mem))
   599  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) ->
   600    (StructMake2
   601      (Load <t.FieldType(0)> ptr mem)
   602      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
   603  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) ->
   604    (StructMake3
   605      (Load <t.FieldType(0)> ptr mem)
   606      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)
   607      (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
   608  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) ->
   609    (StructMake4
   610      (Load <t.FieldType(0)> ptr mem)
   611      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)
   612      (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)
   613      (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
   614  
   615  (StructSelect [i] x:(Load <t> ptr mem)) && !config.fe.CanSSA(t) ->
   616    @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
   617  
   618  (Store _ (StructMake0) mem) -> mem
   619  (Store dst (StructMake1 <t> f0) mem) ->
   620    (Store [t.FieldType(0).Size()] dst f0 mem)
   621  (Store dst (StructMake2 <t> f0 f1) mem) ->
   622    (Store [t.FieldType(1).Size()]
   623      (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   624      f1
   625      (Store [t.FieldType(0).Size()] dst f0 mem))
   626  (Store dst (StructMake3 <t> f0 f1 f2) mem) ->
   627    (Store [t.FieldType(2).Size()]
   628      (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
   629      f2
   630      (Store [t.FieldType(1).Size()]
   631        (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   632        f1
   633        (Store [t.FieldType(0).Size()] dst f0 mem)))
   634  (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) ->
   635    (Store [t.FieldType(3).Size()]
   636      (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst)
   637      f3
   638      (Store [t.FieldType(2).Size()]
   639        (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
   640        f2
   641        (Store [t.FieldType(1).Size()]
   642          (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   643          f1
   644          (Store [t.FieldType(0).Size()] dst f0 mem))))
   645  
   646  // un-SSAable values use mem->mem copies
   647  (Store [size] dst (Load <t> src mem) mem) && !config.fe.CanSSA(t) ->
   648  	(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem)
   649  (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) && !config.fe.CanSSA(t) ->
   650  	(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem))
   651  
   652  // string ops
   653  // Decomposing StringMake and lowering of StringPtr and StringLen
   654  // happens in a later pass, dec, so that these operations are available
   655  // to other passes for optimizations.
   656  (StringPtr (StringMake (Const64 <t> [c]) _)) -> (Const64 <t> [c])
   657  (StringLen (StringMake _ (Const64 <t> [c]))) -> (Const64 <t> [c])
   658  (ConstString {s}) && config.PtrSize == 4 && s.(string) == "" ->
   659    (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0]))
   660  (ConstString {s}) && config.PtrSize == 8 && s.(string) == "" ->
   661    (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0]))
   662  (ConstString {s}) && config.PtrSize == 4 && s.(string) != "" ->
   663    (StringMake
   664      (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}
   665        (SB))
   666      (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))]))
   667  (ConstString {s}) && config.PtrSize == 8 && s.(string) != "" ->
   668    (StringMake
   669      (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}
   670        (SB))
   671      (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))]))
   672  
   673  // slice ops
   674  // Only a few slice rules are provided here.  See dec.rules for
   675  // a more comprehensive set.
   676  (SliceLen (SliceMake _ (Const64 <t> [c]) _)) -> (Const64 <t> [c])
   677  (SliceCap (SliceMake _ _ (Const64 <t> [c]))) -> (Const64 <t> [c])
   678  (SlicePtr (SliceMake (SlicePtr x) _ _)) -> (SlicePtr x)
   679  (SliceLen (SliceMake _ (SliceLen x) _)) -> (SliceLen x)
   680  (SliceCap (SliceMake _ _ (SliceCap x))) -> (SliceCap x)
   681  (SliceCap (SliceMake _ _ (SliceLen x))) -> (SliceLen x)
   682  (ConstSlice) && config.PtrSize == 4 ->
   683    (SliceMake
   684      (ConstNil <v.Type.ElemType().PtrTo()>)
   685      (Const32 <config.fe.TypeInt()> [0])
   686      (Const32 <config.fe.TypeInt()> [0]))
   687  (ConstSlice) && config.PtrSize == 8 ->
   688    (SliceMake
   689      (ConstNil <v.Type.ElemType().PtrTo()>)
   690      (Const64 <config.fe.TypeInt()> [0])
   691      (Const64 <config.fe.TypeInt()> [0]))
   692  
   693  // interface ops
   694  (ConstInterface) ->
   695    (IMake
   696      (ConstNil <config.fe.TypeBytePtr()>)
   697      (ConstNil <config.fe.TypeBytePtr()>))
   698  
   699  (Check (NilCheck (GetG _) _) next) -> (Plain nil next)
   700  
   701  (If (Not cond) yes no) -> (If cond no yes)
   702  (If (ConstBool [c]) yes no) && c == 1 -> (First nil yes no)
   703  (If (ConstBool [c]) yes no) && c == 0 -> (First nil no yes)
   704  
   705  // Get rid of Convert ops for pointer arithmetic on unsafe.Pointer.
   706  (Convert (Add64 (Convert ptr mem) off) mem) -> (Add64 ptr off)
   707  (Convert (Add64 off (Convert ptr mem)) mem) -> (Add64 ptr off)
   708  (Convert (Convert ptr mem) mem) -> ptr
   709  
   710  // Decompose compound argument values
   711  (Arg {n} [off]) && v.Type.IsString() ->
   712    (StringMake
   713      (Arg <config.fe.TypeBytePtr()> {n} [off])
   714      (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
   715  
   716  (Arg {n} [off]) && v.Type.IsSlice() ->
   717    (SliceMake
   718      (Arg <v.Type.ElemType().PtrTo()> {n} [off])
   719      (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])
   720      (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
   721  
   722  (Arg {n} [off]) && v.Type.IsInterface() ->
   723    (IMake
   724      (Arg <config.fe.TypeBytePtr()> {n} [off])
   725      (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
   726  
   727  (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 16 ->
   728    (ComplexMake
   729      (Arg <config.fe.TypeFloat64()> {n} [off])
   730      (Arg <config.fe.TypeFloat64()> {n} [off+8]))
   731  
   732  (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 8 ->
   733    (ComplexMake
   734      (Arg <config.fe.TypeFloat32()> {n} [off])
   735      (Arg <config.fe.TypeFloat32()> {n} [off+4]))
   736  
   737  (Arg <t>) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) ->
   738    (StructMake0)
   739  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) ->
   740    (StructMake1
   741      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]))
   742  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) ->
   743    (StructMake2
   744      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   745      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]))
   746  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) ->
   747    (StructMake3
   748      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   749      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])
   750      (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]))
   751  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) ->
   752    (StructMake4
   753      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   754      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])
   755      (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])
   756      (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)]))
   757  
   758  // strength reduction of divide by a constant.
   759  // Note: frontend does <=32 bits. We only need to do 64 bits here.
   760  // TODO: Do them all here?
   761  
   762  // Div/mod by 1.  Currently handled by frontend.
   763  //(Div64 n (Const64 [1])) -> n
   764  //(Div64u n (Const64 [1])) -> n
   765  //(Mod64 n (Const64 [1])) -> (Const64 [0])
   766  //(Mod64u n (Const64 [1])) -> (Const64 [0])
   767  
   768  // Unsigned divide by power of 2.
   769  (Div64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (Rsh64Ux64 n (Const64 <t> [log2(c)]))
   770  (Mod64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (And64 n (Const64 <t> [c-1]))
   771  
   772  // Signed divide by power of 2.  Currently handled by frontend.
   773  // n / c = n >> log(c)       if n >= 0
   774  //       = (n+c-1) >> log(c) if n < 0
   775  // We conditionally add c-1 by adding n>>63>>(64-log(c)) (first shift signed, second shift unsigned).
   776  //(Div64 <t> n (Const64 [c])) && isPowerOfTwo(c) ->
   777  //  (Rsh64x64
   778  //    (Add64 <t>
   779  //      n
   780  //      (Rsh64Ux64 <t>
   781  //        (Rsh64x64 <t> n (Const64 <t> [63]))
   782  //        (Const64 <t> [64-log2(c)])))
   783  //    (Const64 <t> [log2(c)]))
   784  
   785  // Unsigned divide, not a power of 2.  Strength reduce to a multiply.
   786  (Div64u <t> x (Const64 [c])) && umagic64ok(c) && !umagic64a(c) ->
   787    (Rsh64Ux64
   788      (Hmul64u <t>
   789        (Const64 <t> [umagic64m(c)])
   790        x)
   791      (Const64 <t> [umagic64s(c)]))
   792  (Div64u <t> x (Const64 [c])) && umagic64ok(c) && umagic64a(c) ->
   793    (Rsh64Ux64
   794      (Avg64u <t>
   795        (Hmul64u <t>
   796          x
   797          (Const64 <t> [umagic64m(c)]))
   798        x)
   799      (Const64 <t> [umagic64s(c)-1]))
   800  
   801  // Signed divide, not a power of 2.  Strength reduce to a multiply.
   802  (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) > 0 ->
   803    (Sub64 <t>
   804      (Rsh64x64 <t>
   805        (Hmul64 <t>
   806          (Const64 <t> [smagic64m(c)])
   807          x)
   808        (Const64 <t> [smagic64s(c)]))
   809      (Rsh64x64 <t>
   810        x
   811        (Const64 <t> [63])))
   812  (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) < 0 ->
   813    (Sub64 <t>
   814      (Rsh64x64 <t>
   815        (Add64 <t>
   816          (Hmul64 <t>
   817            (Const64 <t> [smagic64m(c)])
   818            x)
   819          x)
   820        (Const64 <t> [smagic64s(c)]))
   821      (Rsh64x64 <t>
   822        x
   823        (Const64 <t> [63])))
   824  (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) > 0 ->
   825    (Neg64 <t>
   826      (Sub64 <t>
   827        (Rsh64x64 <t>
   828          (Hmul64 <t>
   829            (Const64 <t> [smagic64m(c)])
   830            x)
   831          (Const64 <t> [smagic64s(c)]))
   832        (Rsh64x64 <t>
   833          x
   834          (Const64 <t> [63]))))
   835  (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) < 0 ->
   836    (Neg64 <t>
   837      (Sub64 <t>
   838        (Rsh64x64 <t>
   839          (Add64 <t>
   840            (Hmul64 <t>
   841              (Const64 <t> [smagic64m(c)])
   842              x)
   843            x)
   844          (Const64 <t> [smagic64s(c)]))
   845        (Rsh64x64 <t>
   846          x
   847          (Const64 <t> [63]))))
   848  
   849  // A%B = A-(A/B*B).
   850  // This implements % with two * and a bunch of ancillary ops.
   851  // One of the * is free if the user's code also computes A/B.
   852  (Mod64  <t> x (Const64 [c])) && x.Op != OpConst64 && smagic64ok(c)
   853    -> (Sub64 x (Mul64 <t> (Div64  <t> x (Const64 <t> [c])) (Const64 <t> [c])))
   854  (Mod64u <t> x (Const64 [c])) && x.Op != OpConst64 && umagic64ok(c)
   855    -> (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
   856  
   857  // floating point optimizations
   858  (Add32F x (Const32F [0])) -> x
   859  (Add32F (Const32F [0]) x) -> x
   860  (Add64F x (Const64F [0])) -> x
   861  (Add64F (Const64F [0]) x) -> x
   862  (Sub32F x (Const32F [0])) -> x
   863  (Sub64F x (Const64F [0])) -> x
   864  (Mul32F x (Const32F [f2i(1)])) -> x
   865  (Mul32F (Const32F [f2i(1)]) x) -> x
   866  (Mul64F x (Const64F [f2i(1)])) -> x
   867  (Mul64F (Const64F [f2i(1)]) x) -> x
   868  (Mul32F x (Const32F [f2i(-1)])) -> (Neg32F x)
   869  (Mul32F (Const32F [f2i(-1)]) x) -> (Neg32F x)
   870  (Mul64F x (Const64F [f2i(-1)])) -> (Neg64F x)
   871  (Mul64F (Const64F [f2i(-1)]) x) -> (Neg64F x)
   872  (Div32F x (Const32F [f2i(1)])) -> x
   873  (Div64F x (Const64F [f2i(1)])) -> x
   874  (Div32F x (Const32F [f2i(-1)])) -> (Neg32F x)
   875  (Div64F x (Const64F [f2i(-1)])) -> (Neg32F x)
   876  
   877  (Sqrt (Const64F [c])) -> (Const64F [f2i(math.Sqrt(i2f(c)))])