github.com/FenixAra/go@v0.0.0-20170127160404-96ea0918e670/src/cmd/compile/internal/ssa/gen/generic.rules (about)

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Simplifications that apply to all backend architectures. As an example, this
     6  // Go source code
     7  //
     8  // y := 0 * x
     9  //
    10  // can be translated into y := 0 without losing any information, which saves a
    11  // pointless multiplication instruction. Other .rules files in this directory
    12  // (for example AMD64.rules) contain rules specific to the architecture in the
    13  // filename. The rules here apply to every architecture.
    14  //
    15  // The code for parsing this file lives in rulegen.go; this file generates
    16  // ssa/rewritegeneric.go.
    17  
    18  // values are specified using the following format:
    19  // (op <type> [auxint] {aux} arg0 arg1 ...)
    20  // the type, aux, and auxint fields are optional
    21  // on the matching side
    22  //  - the type, aux, and auxint fields must match if they are specified.
    23  //  - the first occurrence of a variable defines that variable.  Subsequent
    24  //    uses must match (be == to) the first use.
    25  //  - v is defined to be the value matched.
    26  //  - an additional conditional can be provided after the match pattern with "&&".
    27  // on the generated side
    28  //  - the type of the top-level expression is the same as the one on the left-hand side.
    29  //  - the type of any subexpressions must be specified explicitly (or
    30  //    be specified in the op's type field).
    31  //  - auxint will be 0 if not specified.
    32  //  - aux will be nil if not specified.
    33  
    34  // blocks are specified using the following format:
    35  // (kind controlvalue succ0 succ1 ...)
    36  // controlvalue must be "nil" or a value expression
    37  // succ* fields must be variables
    38  // For now, the generated successors must be a permutation of the matched successors.
    39  
    40  // constant folding
    41  (Trunc16to8  (Const16 [c]))  -> (Const8   [int64(int8(c))])
    42  (Trunc32to8  (Const32 [c]))  -> (Const8   [int64(int8(c))])
    43  (Trunc32to16 (Const32 [c]))  -> (Const16  [int64(int16(c))])
    44  (Trunc64to8  (Const64 [c]))  -> (Const8   [int64(int8(c))])
    45  (Trunc64to16 (Const64 [c]))  -> (Const16  [int64(int16(c))])
    46  (Trunc64to32 (Const64 [c]))  -> (Const32  [int64(int32(c))])
    47  (Cvt64Fto32F (Const64F [c])) -> (Const32F [f2i(float64(i2f32(c)))])
    48  (Cvt32Fto64F (Const32F [c])) -> (Const64F [c]) // c is already a 64 bit float
    49  
    50  (Trunc16to8  (ZeroExt8to16  x)) -> x
    51  (Trunc32to8  (ZeroExt8to32  x)) -> x
    52  (Trunc32to16 (ZeroExt8to32  x)) -> (ZeroExt8to16  x)
    53  (Trunc32to16 (ZeroExt16to32 x)) -> x
    54  (Trunc64to8  (ZeroExt8to64  x)) -> x
    55  (Trunc64to16 (ZeroExt8to64  x)) -> (ZeroExt8to16  x)
    56  (Trunc64to16 (ZeroExt16to64 x)) -> x
    57  (Trunc64to32 (ZeroExt8to64  x)) -> (ZeroExt8to32  x)
    58  (Trunc64to32 (ZeroExt16to64 x)) -> (ZeroExt16to32 x)
    59  (Trunc64to32 (ZeroExt32to64 x)) -> x
    60  (Trunc16to8  (SignExt8to16  x)) -> x
    61  (Trunc32to8  (SignExt8to32  x)) -> x
    62  (Trunc32to16 (SignExt8to32  x)) -> (SignExt8to16  x)
    63  (Trunc32to16 (SignExt16to32 x)) -> x
    64  (Trunc64to8  (SignExt8to64  x)) -> x
    65  (Trunc64to16 (SignExt8to64  x)) -> (SignExt8to16  x)
    66  (Trunc64to16 (SignExt16to64 x)) -> x
    67  (Trunc64to32 (SignExt8to64  x)) -> (SignExt8to32  x)
    68  (Trunc64to32 (SignExt16to64 x)) -> (SignExt16to32 x)
    69  (Trunc64to32 (SignExt32to64 x)) -> x
    70  
    71  // const negation is currently handled by frontend
    72  //(Neg8 (Const8 [c])) -> (Const8 [-c])
    73  //(Neg16 (Const16 [c])) -> (Const16 [-c])
    74  //(Neg32 (Const32 [c])) -> (Const32 [-c])
    75  //(Neg64 (Const64 [c])) -> (Const64 [-c])
    76  //(Neg32F (Const32F [c])) -> (Const32F [f2i(-i2f(c))])
    77  //(Neg64F (Const64F [c])) -> (Const64F [f2i(-i2f(c))])
    78  
    79  (Add8   (Const8 [c])   (Const8 [d]))   -> (Const8  [int64(int8(c+d))])
    80  (Add16  (Const16 [c])  (Const16 [d]))  -> (Const16 [int64(int16(c+d))])
    81  (Add32  (Const32 [c])  (Const32 [d]))  -> (Const32 [int64(int32(c+d))])
    82  (Add64  (Const64 [c])  (Const64 [d]))  -> (Const64 [c+d])
    83  (Add32F (Const32F [c]) (Const32F [d])) ->
    84          (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) // ensure we combine the operands with 32 bit precision
    85  (Add64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) + i2f(d))])
    86  (AddPtr <t> x (Const64 [c])) -> (OffPtr <t> x [c])
    87  
    88  (Sub8   (Const8 [c]) (Const8 [d]))     -> (Const8 [int64(int8(c-d))])
    89  (Sub16  (Const16 [c]) (Const16 [d]))   -> (Const16 [int64(int16(c-d))])
    90  (Sub32  (Const32 [c]) (Const32 [d]))   -> (Const32 [int64(int32(c-d))])
    91  (Sub64  (Const64 [c]) (Const64 [d]))   -> (Const64 [c-d])
    92  (Sub32F (Const32F [c]) (Const32F [d])) ->
    93          (Const32F [f2i(float64(i2f32(c) - i2f32(d)))])
    94  (Sub64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) - i2f(d))])
    95  
    96  (Mul8   (Const8 [c])   (Const8 [d]))   -> (Const8  [int64(int8(c*d))])
    97  (Mul16  (Const16 [c])  (Const16 [d]))  -> (Const16 [int64(int16(c*d))])
    98  (Mul32  (Const32 [c])  (Const32 [d]))  -> (Const32 [int64(int32(c*d))])
    99  (Mul64  (Const64 [c])  (Const64 [d]))  -> (Const64 [c*d])
   100  (Mul32F (Const32F [c]) (Const32F [d])) ->
   101          (Const32F [f2i(float64(i2f32(c) * i2f32(d)))])
   102  (Mul64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) * i2f(d))])
   103  
   104  // Convert x * -1 to -x. The front-end catches some but not all of these.
   105  (Mul8  (Const8  [-1]) x) -> (Neg8  x)
   106  (Mul16 (Const16 [-1]) x) -> (Neg16 x)
   107  (Mul32 (Const32 [-1]) x) -> (Neg32 x)
   108  (Mul64 (Const64 [-1]) x) -> (Neg64 x)
   109  
   110  (Mod8  (Const8  [c]) (Const8  [d])) && d != 0 -> (Const8  [int64(int8(c % d))])
   111  (Mod16 (Const16 [c]) (Const16 [d])) && d != 0 -> (Const16 [int64(int16(c % d))])
   112  (Mod32 (Const32 [c]) (Const32 [d])) && d != 0 -> (Const32 [int64(int32(c % d))])
   113  (Mod64 (Const64 [c]) (Const64 [d])) && d != 0 -> (Const64 [c % d])
   114  
   115  (Mod8u  (Const8 [c])  (Const8  [d])) && d != 0 -> (Const8  [int64(uint8(c) % uint8(d))])
   116  (Mod16u (Const16 [c]) (Const16 [d])) && d != 0 -> (Const16 [int64(uint16(c) % uint16(d))])
   117  (Mod32u (Const32 [c]) (Const32 [d])) && d != 0 -> (Const32 [int64(uint32(c) % uint32(d))])
   118  (Mod64u (Const64 [c]) (Const64 [d])) && d != 0 -> (Const64 [int64(uint64(c) % uint64(d))])
   119  
   120  (Lsh64x64  (Const64 [c]) (Const64 [d])) -> (Const64 [c << uint64(d)])
   121  (Rsh64x64  (Const64 [c]) (Const64 [d])) -> (Const64 [c >> uint64(d)])
   122  (Rsh64Ux64 (Const64 [c]) (Const64 [d])) -> (Const64 [int64(uint64(c) >> uint64(d))])
   123  (Lsh32x64  (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) << uint64(d))])
   124  (Rsh32x64  (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) >> uint64(d))])
   125  (Rsh32Ux64 (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(uint32(c) >> uint64(d)))])
   126  (Lsh16x64  (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) << uint64(d))])
   127  (Rsh16x64  (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) >> uint64(d))])
   128  (Rsh16Ux64 (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(uint16(c) >> uint64(d)))])
   129  (Lsh8x64   (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(c) << uint64(d))])
   130  (Rsh8x64   (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(c) >> uint64(d))])
   131  (Rsh8Ux64  (Const8  [c]) (Const64 [d])) -> (Const8  [int64(int8(uint8(c) >> uint64(d)))])
   132  
   133  // Fold IsInBounds when the range of the index cannot exceed the limit.
   134  (IsInBounds (ZeroExt8to32  _) (Const32 [c])) && (1 << 8)  <= c -> (ConstBool [1])
   135  (IsInBounds (ZeroExt8to64  _) (Const64 [c])) && (1 << 8)  <= c -> (ConstBool [1])
   136  (IsInBounds (ZeroExt16to32 _) (Const32 [c])) && (1 << 16) <= c -> (ConstBool [1])
   137  (IsInBounds (ZeroExt16to64 _) (Const64 [c])) && (1 << 16) <= c -> (ConstBool [1])
   138  (IsInBounds x x) -> (ConstBool [0])
   139  (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c < d -> (ConstBool [1])
   140  (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c < d -> (ConstBool [1])
   141  (IsInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c < d)])
   142  (IsInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c < d)])
   143  // (Mod64u x y) is always between 0 (inclusive) and y (exclusive).
   144  (IsInBounds (Mod32u _ y) y) -> (ConstBool [1])
   145  (IsInBounds (Mod64u _ y) y) -> (ConstBool [1])
   146  
   147  (IsSliceInBounds x x) -> (ConstBool [1])
   148  (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c <= d -> (ConstBool [1])
   149  (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c <= d -> (ConstBool [1])
   150  (IsSliceInBounds (Const32 [0]) _) -> (ConstBool [1])
   151  (IsSliceInBounds (Const64 [0]) _) -> (ConstBool [1])
   152  (IsSliceInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c <= d)])
   153  (IsSliceInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c <= d)])
   154  (IsSliceInBounds (SliceLen x) (SliceCap x)) -> (ConstBool [1])
   155  
   156  (Eq64 x x) -> (ConstBool [1])
   157  (Eq32 x x) -> (ConstBool [1])
   158  (Eq16 x x) -> (ConstBool [1])
   159  (Eq8  x x) -> (ConstBool [1])
   160  (EqB (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c == d)])
   161  (EqB (ConstBool [0]) x) -> (Not x)
   162  (EqB (ConstBool [1]) x) -> x
   163  
   164  (Neq64 x x) -> (ConstBool [0])
   165  (Neq32 x x) -> (ConstBool [0])
   166  (Neq16 x x) -> (ConstBool [0])
   167  (Neq8  x x) -> (ConstBool [0])
   168  (NeqB (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c != d)])
   169  (NeqB (ConstBool [0]) x) -> x
   170  (NeqB (ConstBool [1]) x) -> (Not x)
   171  
   172  (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Eq64 (Const64 <t> [c-d]) x)
   173  (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
   174  (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
   175  (Eq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x)) -> (Eq8  (Const8 <t> [int64(int8(c-d))]) x)
   176  
   177  (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Neq64 (Const64 <t> [c-d]) x)
   178  (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Neq32 (Const32 <t> [int64(int32(c-d))]) x)
   179  (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Neq16 (Const16 <t> [int64(int16(c-d))]) x)
   180  (Neq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x)) -> (Neq8 (Const8 <t> [int64(int8(c-d))]) x)
   181  
   182  // canonicalize: swap arguments for commutative operations when one argument is a constant.
   183  (Eq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Eq64 (Const64 <t> [c]) x)
   184  (Eq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Eq32 (Const32 <t> [c]) x)
   185  (Eq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Eq16 (Const16 <t> [c]) x)
   186  (Eq8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Eq8  (Const8  <t> [c]) x)
   187  
   188  (Neq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Neq64 (Const64 <t> [c]) x)
   189  (Neq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Neq32 (Const32 <t> [c]) x)
   190  (Neq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Neq16 (Const16 <t> [c]) x)
   191  (Neq8  x (Const8 <t>  [c])) && x.Op != OpConst8  -> (Neq8  (Const8  <t> [c]) x)
   192  
   193  // AddPtr is not canonicalized because nilcheck ptr checks the first argument to be non-nil.
   194  (Add64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [c]) x)
   195  (Add32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [c]) x)
   196  (Add16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [c]) x)
   197  (Add8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Add8  (Const8  <t> [c]) x)
   198  
   199  (Mul64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Mul64 (Const64 <t> [c]) x)
   200  (Mul32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Mul32 (Const32 <t> [c]) x)
   201  (Mul16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Mul16 (Const16 <t> [c]) x)
   202  (Mul8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Mul8  (Const8  <t> [c]) x)
   203  
   204  (Sub64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [-c]) x)
   205  (Sub32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [int64(int32(-c))]) x)
   206  (Sub16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [int64(int16(-c))]) x)
   207  (Sub8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Add8  (Const8  <t> [int64(int8(-c))]) x)
   208  
   209  (And64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (And64 (Const64 <t> [c]) x)
   210  (And32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (And32 (Const32 <t> [c]) x)
   211  (And16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (And16 (Const16 <t> [c]) x)
   212  (And8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (And8  (Const8  <t> [c]) x)
   213  
   214  (Or64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Or64 (Const64 <t> [c]) x)
   215  (Or32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Or32 (Const32 <t> [c]) x)
   216  (Or16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Or16 (Const16 <t> [c]) x)
   217  (Or8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Or8  (Const8  <t> [c]) x)
   218  
   219  (Xor64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Xor64 (Const64 <t> [c]) x)
   220  (Xor32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Xor32 (Const32 <t> [c]) x)
   221  (Xor16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Xor16 (Const16 <t> [c]) x)
   222  (Xor8  x (Const8  <t> [c])) && x.Op != OpConst8  -> (Xor8  (Const8  <t> [c]) x)
   223  
   224  // fold negation into comparison operators
   225  (Not (Eq64 x y)) -> (Neq64 x y)
   226  (Not (Eq32 x y)) -> (Neq32 x y)
   227  (Not (Eq16 x y)) -> (Neq16 x y)
   228  (Not (Eq8  x y)) -> (Neq8  x y)
   229  (Not (EqB  x y)) -> (NeqB  x y)
   230  
   231  (Not (Neq64 x y)) -> (Eq64 x y)
   232  (Not (Neq32 x y)) -> (Eq32 x y)
   233  (Not (Neq16 x y)) -> (Eq16 x y)
   234  (Not (Neq8  x y)) -> (Eq8  x y)
   235  (Not (NeqB  x y)) -> (EqB  x y)
   236  
   237  (Not (Greater64 x y)) -> (Leq64 x y)
   238  (Not (Greater32 x y)) -> (Leq32 x y)
   239  (Not (Greater16 x y)) -> (Leq16 x y)
   240  (Not (Greater8  x y)) -> (Leq8  x y)
   241  
   242  (Not (Greater64U x y)) -> (Leq64U x y)
   243  (Not (Greater32U x y)) -> (Leq32U x y)
   244  (Not (Greater16U x y)) -> (Leq16U x y)
   245  (Not (Greater8U  x y)) -> (Leq8U  x y)
   246  
   247  (Not (Geq64 x y)) -> (Less64 x y)
   248  (Not (Geq32 x y)) -> (Less32 x y)
   249  (Not (Geq16 x y)) -> (Less16 x y)
   250  (Not (Geq8  x y)) -> (Less8  x y)
   251  
   252  (Not (Geq64U x y)) -> (Less64U x y)
   253  (Not (Geq32U x y)) -> (Less32U x y)
   254  (Not (Geq16U x y)) -> (Less16U x y)
   255  (Not (Geq8U  x y)) -> (Less8U  x y)
   256  
   257  (Not (Less64 x y)) -> (Geq64 x y)
   258  (Not (Less32 x y)) -> (Geq32 x y)
   259  (Not (Less16 x y)) -> (Geq16 x y)
   260  (Not (Less8  x y)) -> (Geq8  x y)
   261  
   262  (Not (Less64U x y)) -> (Geq64U x y)
   263  (Not (Less32U x y)) -> (Geq32U x y)
   264  (Not (Less16U x y)) -> (Geq16U x y)
   265  (Not (Less8U  x y)) -> (Geq8U  x y)
   266  
   267  (Not (Leq64 x y)) -> (Greater64 x y)
   268  (Not (Leq32 x y)) -> (Greater32 x y)
   269  (Not (Leq16 x y)) -> (Greater16 x y)
   270  (Not (Leq8  x y)) -> (Greater8 x y)
   271  
   272  (Not (Leq64U x y)) -> (Greater64U x y)
   273  (Not (Leq32U x y)) -> (Greater32U x y)
   274  (Not (Leq16U x y)) -> (Greater16U x y)
   275  (Not (Leq8U  x y)) -> (Greater8U  x y)
   276  
   277  // Distribute multiplication c * (d+x) -> c*d + c*x. Useful for:
   278  // a[i].b = ...; a[i+1].b = ...
   279  (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) ->
   280    (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
   281  (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) ->
   282    (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x))
   283  
   284  // rewrite shifts of 8/16/32 bit consts into 64 bit consts to reduce
   285  // the number of the other rewrite rules for const shifts
   286  (Lsh64x32  <t> x (Const32 [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint32(c))]))
   287  (Lsh64x16  <t> x (Const16 [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint16(c))]))
   288  (Lsh64x8   <t> x (Const8  [c])) -> (Lsh64x64  x (Const64 <t> [int64(uint8(c))]))
   289  (Rsh64x32  <t> x (Const32 [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint32(c))]))
   290  (Rsh64x16  <t> x (Const16 [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint16(c))]))
   291  (Rsh64x8   <t> x (Const8  [c])) -> (Rsh64x64  x (Const64 <t> [int64(uint8(c))]))
   292  (Rsh64Ux32 <t> x (Const32 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
   293  (Rsh64Ux16 <t> x (Const16 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
   294  (Rsh64Ux8  <t> x (Const8  [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
   295  
   296  (Lsh32x32  <t> x (Const32 [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint32(c))]))
   297  (Lsh32x16  <t> x (Const16 [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint16(c))]))
   298  (Lsh32x8   <t> x (Const8  [c])) -> (Lsh32x64  x (Const64 <t> [int64(uint8(c))]))
   299  (Rsh32x32  <t> x (Const32 [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint32(c))]))
   300  (Rsh32x16  <t> x (Const16 [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint16(c))]))
   301  (Rsh32x8   <t> x (Const8  [c])) -> (Rsh32x64  x (Const64 <t> [int64(uint8(c))]))
   302  (Rsh32Ux32 <t> x (Const32 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
   303  (Rsh32Ux16 <t> x (Const16 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
   304  (Rsh32Ux8  <t> x (Const8  [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
   305  
   306  (Lsh16x32  <t> x (Const32 [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint32(c))]))
   307  (Lsh16x16  <t> x (Const16 [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint16(c))]))
   308  (Lsh16x8   <t> x (Const8  [c])) -> (Lsh16x64  x (Const64 <t> [int64(uint8(c))]))
   309  (Rsh16x32  <t> x (Const32 [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint32(c))]))
   310  (Rsh16x16  <t> x (Const16 [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint16(c))]))
   311  (Rsh16x8   <t> x (Const8  [c])) -> (Rsh16x64  x (Const64 <t> [int64(uint8(c))]))
   312  (Rsh16Ux32 <t> x (Const32 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
   313  (Rsh16Ux16 <t> x (Const16 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
   314  (Rsh16Ux8  <t> x (Const8  [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
   315  
   316  (Lsh8x32  <t> x (Const32 [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint32(c))]))
   317  (Lsh8x16  <t> x (Const16 [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint16(c))]))
   318  (Lsh8x8   <t> x (Const8  [c])) -> (Lsh8x64  x (Const64 <t> [int64(uint8(c))]))
   319  (Rsh8x32  <t> x (Const32 [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint32(c))]))
   320  (Rsh8x16  <t> x (Const16 [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint16(c))]))
   321  (Rsh8x8   <t> x (Const8  [c])) -> (Rsh8x64  x (Const64 <t> [int64(uint8(c))]))
   322  (Rsh8Ux32 <t> x (Const32 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
   323  (Rsh8Ux16 <t> x (Const16 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
   324  (Rsh8Ux8  <t> x (Const8  [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
   325  
   326  // shifts by zero
   327  (Lsh64x64  x (Const64 [0])) -> x
   328  (Rsh64x64  x (Const64 [0])) -> x
   329  (Rsh64Ux64 x (Const64 [0])) -> x
   330  (Lsh32x64  x (Const64 [0])) -> x
   331  (Rsh32x64  x (Const64 [0])) -> x
   332  (Rsh32Ux64 x (Const64 [0])) -> x
   333  (Lsh16x64  x (Const64 [0])) -> x
   334  (Rsh16x64  x (Const64 [0])) -> x
   335  (Rsh16Ux64 x (Const64 [0])) -> x
   336  (Lsh8x64   x (Const64 [0])) -> x
   337  (Rsh8x64   x (Const64 [0])) -> x
   338  (Rsh8Ux64  x (Const64 [0])) -> x
   339  
   340  // zero shifted.
   341  (Lsh64x64  (Const64 [0]) _) -> (Const64 [0])
   342  (Lsh64x32  (Const64 [0]) _) -> (Const64 [0])
   343  (Lsh64x16  (Const64 [0]) _) -> (Const64 [0])
   344  (Lsh64x8  (Const64 [0]) _) -> (Const64 [0])
   345  (Rsh64x64  (Const64 [0]) _) -> (Const64 [0])
   346  (Rsh64x32  (Const64 [0]) _) -> (Const64 [0])
   347  (Rsh64x16  (Const64 [0]) _) -> (Const64 [0])
   348  (Rsh64x8  (Const64 [0]) _) -> (Const64 [0])
   349  (Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0])
   350  (Rsh64Ux32 (Const64 [0]) _) -> (Const64 [0])
   351  (Rsh64Ux16 (Const64 [0]) _) -> (Const64 [0])
   352  (Rsh64Ux8 (Const64 [0]) _) -> (Const64 [0])
   353  (Lsh32x64  (Const32 [0]) _) -> (Const32 [0])
   354  (Lsh32x32  (Const32 [0]) _) -> (Const32 [0])
   355  (Lsh32x16  (Const32 [0]) _) -> (Const32 [0])
   356  (Lsh32x8  (Const32 [0]) _) -> (Const32 [0])
   357  (Rsh32x64  (Const32 [0]) _) -> (Const32 [0])
   358  (Rsh32x32  (Const32 [0]) _) -> (Const32 [0])
   359  (Rsh32x16  (Const32 [0]) _) -> (Const32 [0])
   360  (Rsh32x8  (Const32 [0]) _) -> (Const32 [0])
   361  (Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0])
   362  (Rsh32Ux32 (Const32 [0]) _) -> (Const32 [0])
   363  (Rsh32Ux16 (Const32 [0]) _) -> (Const32 [0])
   364  (Rsh32Ux8 (Const32 [0]) _) -> (Const32 [0])
   365  (Lsh16x64  (Const16 [0]) _) -> (Const16 [0])
   366  (Lsh16x32  (Const16 [0]) _) -> (Const16 [0])
   367  (Lsh16x16  (Const16 [0]) _) -> (Const16 [0])
   368  (Lsh16x8  (Const16 [0]) _) -> (Const16 [0])
   369  (Rsh16x64  (Const16 [0]) _) -> (Const16 [0])
   370  (Rsh16x32  (Const16 [0]) _) -> (Const16 [0])
   371  (Rsh16x16  (Const16 [0]) _) -> (Const16 [0])
   372  (Rsh16x8  (Const16 [0]) _) -> (Const16 [0])
   373  (Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0])
   374  (Rsh16Ux32 (Const16 [0]) _) -> (Const16 [0])
   375  (Rsh16Ux16 (Const16 [0]) _) -> (Const16 [0])
   376  (Rsh16Ux8 (Const16 [0]) _) -> (Const16 [0])
   377  (Lsh8x64   (Const8 [0]) _) -> (Const8  [0])
   378  (Lsh8x32   (Const8 [0]) _) -> (Const8  [0])
   379  (Lsh8x16   (Const8 [0]) _) -> (Const8  [0])
   380  (Lsh8x8   (Const8 [0]) _) -> (Const8  [0])
   381  (Rsh8x64   (Const8 [0]) _) -> (Const8  [0])
   382  (Rsh8x32   (Const8 [0]) _) -> (Const8  [0])
   383  (Rsh8x16   (Const8 [0]) _) -> (Const8  [0])
   384  (Rsh8x8   (Const8 [0]) _) -> (Const8  [0])
   385  (Rsh8Ux64  (Const8 [0]) _) -> (Const8  [0])
   386  (Rsh8Ux32  (Const8 [0]) _) -> (Const8  [0])
   387  (Rsh8Ux16  (Const8 [0]) _) -> (Const8  [0])
   388  (Rsh8Ux8  (Const8 [0]) _) -> (Const8  [0])
   389  
   390  // large left shifts of all values, and right shifts of unsigned values
   391  (Lsh64x64  _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0])
   392  (Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0])
   393  (Lsh32x64  _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0])
   394  (Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0])
   395  (Lsh16x64  _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0])
   396  (Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0])
   397  (Lsh8x64   _ (Const64 [c])) && uint64(c) >= 8  -> (Const8  [0])
   398  (Rsh8Ux64  _ (Const64 [c])) && uint64(c) >= 8  -> (Const8  [0])
   399  
   400  // combine const shifts
   401  (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh64x64 x (Const64 <t> [c+d]))
   402  (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh32x64 x (Const64 <t> [c+d]))
   403  (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh16x64 x (Const64 <t> [c+d]))
   404  (Lsh8x64  <t> (Lsh8x64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh8x64  x (Const64 <t> [c+d]))
   405  
   406  (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64x64 x (Const64 <t> [c+d]))
   407  (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32x64 x (Const64 <t> [c+d]))
   408  (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16x64 x (Const64 <t> [c+d]))
   409  (Rsh8x64  <t> (Rsh8x64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8x64  x (Const64 <t> [c+d]))
   410  
   411  (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64Ux64 x (Const64 <t> [c+d]))
   412  (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32Ux64 x (Const64 <t> [c+d]))
   413  (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 <t> [c+d]))
   414  (Rsh8Ux64  <t> (Rsh8Ux64  x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64  x (Const64 <t> [c+d]))
   415  
   416  // ((x >> c1) << c2) >> c3
   417  (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   418    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   419    -> (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   420  (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   421    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   422    -> (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   423  (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   424    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   425    -> (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   426  (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   427    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   428    -> (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   429  
   430  // ((x << c1) >> c2) << c3
   431  (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   432    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   433    -> (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   434  (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   435    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   436    -> (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   437  (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   438    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   439    -> (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   440  (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
   441    && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
   442    -> (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
   443  
   444  // constant comparisons
   445  (Eq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c == d)])
   446  (Eq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c == d)])
   447  (Eq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c == d)])
   448  (Eq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c == d)])
   449  
   450  (Neq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c != d)])
   451  (Neq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c != d)])
   452  (Neq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c != d)])
   453  (Neq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c != d)])
   454  
   455  (Greater64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c > d)])
   456  (Greater32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c > d)])
   457  (Greater16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c > d)])
   458  (Greater8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c > d)])
   459  
   460  (Greater64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) > uint64(d))])
   461  (Greater32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) > uint32(d))])
   462  (Greater16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) > uint16(d))])
   463  (Greater8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  > uint8(d))])
   464  
   465  (Geq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c >= d)])
   466  (Geq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c >= d)])
   467  (Geq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c >= d)])
   468  (Geq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c >= d)])
   469  
   470  (Geq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) >= uint64(d))])
   471  (Geq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) >= uint32(d))])
   472  (Geq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) >= uint16(d))])
   473  (Geq8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  >= uint8(d))])
   474  
   475  (Less64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c < d)])
   476  (Less32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c < d)])
   477  (Less16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c < d)])
   478  (Less8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c < d)])
   479  
   480  (Less64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) < uint64(d))])
   481  (Less32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) < uint32(d))])
   482  (Less16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) < uint16(d))])
   483  (Less8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  < uint8(d))])
   484  
   485  (Leq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c <= d)])
   486  (Leq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c <= d)])
   487  (Leq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c <= d)])
   488  (Leq8  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(c <= d)])
   489  
   490  (Leq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) <= uint64(d))])
   491  (Leq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) <= uint32(d))])
   492  (Leq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) <= uint16(d))])
   493  (Leq8U  (Const8  [c]) (Const8  [d])) -> (ConstBool [b2i(uint8(c)  <= uint8(d))])
   494  
   495  // simplifications
   496  (Or64 x x) -> x
   497  (Or32 x x) -> x
   498  (Or16 x x) -> x
   499  (Or8  x x) -> x
   500  (Or64 (Const64 [0]) x) -> x
   501  (Or32 (Const32 [0]) x) -> x
   502  (Or16 (Const16 [0]) x) -> x
   503  (Or8  (Const8  [0]) x) -> x
   504  (Or64 (Const64 [-1]) _) -> (Const64 [-1])
   505  (Or32 (Const32 [-1]) _) -> (Const32 [-1])
   506  (Or16 (Const16 [-1]) _) -> (Const16 [-1])
   507  (Or8  (Const8  [-1]) _) -> (Const8  [-1])
   508  (And64 x x) -> x
   509  (And32 x x) -> x
   510  (And16 x x) -> x
   511  (And8  x x) -> x
   512  (And64 (Const64 [-1]) x) -> x
   513  (And32 (Const32 [-1]) x) -> x
   514  (And16 (Const16 [-1]) x) -> x
   515  (And8  (Const8  [-1]) x) -> x
   516  (And64 (Const64 [0]) _) -> (Const64 [0])
   517  (And32 (Const32 [0]) _) -> (Const32 [0])
   518  (And16 (Const16 [0]) _) -> (Const16 [0])
   519  (And8  (Const8  [0]) _) -> (Const8  [0])
   520  (Xor64 x x) -> (Const64 [0])
   521  (Xor32 x x) -> (Const32 [0])
   522  (Xor16 x x) -> (Const16 [0])
   523  (Xor8  x x) -> (Const8  [0])
   524  (Xor64 (Const64 [0]) x) -> x
   525  (Xor32 (Const32 [0]) x) -> x
   526  (Xor16 (Const16 [0]) x) -> x
   527  (Xor8  (Const8  [0]) x) -> x
   528  (Add64 (Const64 [0]) x) -> x
   529  (Add32 (Const32 [0]) x) -> x
   530  (Add16 (Const16 [0]) x) -> x
   531  (Add8  (Const8  [0]) x) -> x
   532  (Sub64 x x) -> (Const64 [0])
   533  (Sub32 x x) -> (Const32 [0])
   534  (Sub16 x x) -> (Const16 [0])
   535  (Sub8  x x) -> (Const8  [0])
   536  (Mul64 (Const64 [0]) _) -> (Const64 [0])
   537  (Mul32 (Const32 [0]) _) -> (Const32 [0])
   538  (Mul16 (Const16 [0]) _) -> (Const16 [0])
   539  (Mul8  (Const8  [0]) _) -> (Const8  [0])
   540  (Com8  (Com8  x)) -> x
   541  (Com16 (Com16 x)) -> x
   542  (Com32 (Com32 x)) -> x
   543  (Com64 (Com64 x)) -> x
   544  (Neg8  (Sub8  x y)) -> (Sub8  y x)
   545  (Neg16 (Sub16 x y)) -> (Sub16 y x)
   546  (Neg32 (Sub32 x y)) -> (Sub32 y x)
   547  (Neg64 (Sub64 x y)) -> (Sub64 y x)
   548  
   549  (And64 x (And64 x y)) -> (And64 x y)
   550  (And32 x (And32 x y)) -> (And32 x y)
   551  (And16 x (And16 x y)) -> (And16 x y)
   552  (And8  x (And8  x y)) -> (And8  x y)
   553  (And64 x (And64 y x)) -> (And64 x y)
   554  (And32 x (And32 y x)) -> (And32 x y)
   555  (And16 x (And16 y x)) -> (And16 x y)
   556  (And8  x (And8  y x)) -> (And8  x y)
   557  (And64 (And64 x y) x) -> (And64 x y)
   558  (And32 (And32 x y) x) -> (And32 x y)
   559  (And16 (And16 x y) x) -> (And16 x y)
   560  (And8  (And8  x y) x) -> (And8  x y)
   561  (And64 (And64 x y) y) -> (And64 x y)
   562  (And32 (And32 x y) y) -> (And32 x y)
   563  (And16 (And16 x y) y) -> (And16 x y)
   564  (And8  (And8  x y) y) -> (And8  x y)
   565  (Or64 x (Or64 x y)) -> (Or64 x y)
   566  (Or32 x (Or32 x y)) -> (Or32 x y)
   567  (Or16 x (Or16 x y)) -> (Or16 x y)
   568  (Or8  x (Or8  x y)) -> (Or8  x y)
   569  (Or64 x (Or64 y x)) -> (Or64 x y)
   570  (Or32 x (Or32 y x)) -> (Or32 x y)
   571  (Or16 x (Or16 y x)) -> (Or16 x y)
   572  (Or8  x (Or8  y x)) -> (Or8  x y)
   573  (Or64 (Or64 x y) x) -> (Or64 x y)
   574  (Or32 (Or32 x y) x) -> (Or32 x y)
   575  (Or16 (Or16 x y) x) -> (Or16 x y)
   576  (Or8  (Or8  x y) x) -> (Or8  x y)
   577  (Or64 (Or64 x y) y) -> (Or64 x y)
   578  (Or32 (Or32 x y) y) -> (Or32 x y)
   579  (Or16 (Or16 x y) y) -> (Or16 x y)
   580  (Or8  (Or8  x y) y) -> (Or8  x y)
   581  (Xor64 x (Xor64 x y)) -> y
   582  (Xor32 x (Xor32 x y)) -> y
   583  (Xor16 x (Xor16 x y)) -> y
   584  (Xor8  x (Xor8  x y)) -> y
   585  (Xor64 x (Xor64 y x)) -> y
   586  (Xor32 x (Xor32 y x)) -> y
   587  (Xor16 x (Xor16 y x)) -> y
   588  (Xor8  x (Xor8  y x)) -> y
   589  (Xor64 (Xor64 x y) x) -> y
   590  (Xor32 (Xor32 x y) x) -> y
   591  (Xor16 (Xor16 x y) x) -> y
   592  (Xor8  (Xor8  x y) x) -> y
   593  (Xor64 (Xor64 x y) y) -> x
   594  (Xor32 (Xor32 x y) y) -> x
   595  (Xor16 (Xor16 x y) y) -> x
   596  (Xor8  (Xor8  x y) y) -> x
   597  
   598  (Trunc64to8  (And64 (Const64 [y]) x)) && y&0xFF == 0xFF -> (Trunc64to8 x)
   599  (Trunc64to16 (And64 (Const64 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc64to16 x)
   600  (Trunc64to32 (And64 (Const64 [y]) x)) && y&0xFFFFFFFF == 0xFFFFFFFF -> (Trunc64to32 x)
   601  (Trunc32to8  (And32 (Const32 [y]) x)) && y&0xFF == 0xFF -> (Trunc32to8 x)
   602  (Trunc32to16 (And32 (Const32 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc32to16 x)
   603  (Trunc16to8  (And16 (Const16 [y]) x)) && y&0xFF == 0xFF -> (Trunc16to8 x)
   604  
   605  (ZeroExt8to64  (Trunc64to8  x:(Rsh64Ux64 _ (Const64 [s])))) && s >= 56 -> x
   606  (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s])))) && s >= 48 -> x
   607  (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s])))) && s >= 32 -> x
   608  (ZeroExt8to32  (Trunc32to8  x:(Rsh32Ux64 _ (Const64 [s])))) && s >= 24 -> x
   609  (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s])))) && s >= 16 -> x
   610  (ZeroExt8to16  (Trunc16to8  x:(Rsh16Ux64 _ (Const64 [s])))) && s >= 8 -> x
   611  
   612  (SignExt8to64  (Trunc64to8  x:(Rsh64x64 _ (Const64 [s])))) && s >= 56 -> x
   613  (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s])))) && s >= 48 -> x
   614  (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s])))) && s >= 32 -> x
   615  (SignExt8to32  (Trunc32to8  x:(Rsh32x64 _ (Const64 [s])))) && s >= 24 -> x
   616  (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s])))) && s >= 16 -> x
   617  (SignExt8to16  (Trunc16to8  x:(Rsh16x64 _ (Const64 [s])))) && s >= 8 -> x
   618  
   619  (Slicemask (Const32 [x])) && x > 0 -> (Const32 [-1])
   620  (Slicemask (Const32 [0]))          -> (Const32 [0])
   621  (Slicemask (Const64 [x])) && x > 0 -> (Const64 [-1])
   622  (Slicemask (Const64 [0]))          -> (Const64 [0])
   623  
   624  // Rewrite AND of consts as shifts if possible, slightly faster for 64 bit operands
   625  // leading zeros can be shifted left, then right
   626  (And64 <t> (Const64 [y]) x) && nlz(y) + nto(y) == 64 && nto(y) >= 32
   627    -> (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
   628  // trailing zeros can be shifted right, then left
   629  (And64 <t> (Const64 [y]) x) && nlo(y) + ntz(y) == 64 && ntz(y) >= 32
   630    -> (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
   631  
   632  // simplifications often used for lengths.  e.g. len(s[i:i+5])==5
   633  (Sub64 (Add64 x y) x) -> y
   634  (Sub64 (Add64 x y) y) -> x
   635  (Sub32 (Add32 x y) x) -> y
   636  (Sub32 (Add32 x y) y) -> x
   637  (Sub16 (Add16 x y) x) -> y
   638  (Sub16 (Add16 x y) y) -> x
   639  (Sub8  (Add8  x y) x) -> y
   640  (Sub8  (Add8  x y) y) -> x
   641  
   642  // basic phi simplifications
   643  (Phi (Const8  [c]) (Const8  [c])) -> (Const8  [c])
   644  (Phi (Const16 [c]) (Const16 [c])) -> (Const16 [c])
   645  (Phi (Const32 [c]) (Const32 [c])) -> (Const32 [c])
   646  (Phi (Const64 [c]) (Const64 [c])) -> (Const64 [c])
   647  
   648  // user nil checks
   649  (NeqPtr p (ConstNil)) -> (IsNonNil p)
   650  (NeqPtr (ConstNil) p) -> (IsNonNil p)
   651  (EqPtr p (ConstNil)) -> (Not (IsNonNil p))
   652  (EqPtr (ConstNil) p) -> (Not (IsNonNil p))
   653  
   654  // slice and interface comparisons
   655  // The frontend ensures that we can only compare against nil,
   656  // so we need only compare the first word (interface type or slice ptr).
   657  (EqInter x y)  -> (EqPtr  (ITab x) (ITab y))
   658  (NeqInter x y) -> (NeqPtr (ITab x) (ITab y))
   659  (EqSlice x y)  -> (EqPtr  (SlicePtr x) (SlicePtr y))
   660  (NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y))
   661  
   662  // Load of store of same address, with compatibly typed value and same size
   663  (Load <t1> p1 (Store [w] p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() -> x
   664  
   665  // Collapse OffPtr
   666  (OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b])
   667  (OffPtr p [0]) && v.Type.Compare(p.Type) == CMPeq -> p
   668  
   669  // indexing operations
   670  // Note: bounds check has already been done
   671  (PtrIndex <t> ptr idx) && config.PtrSize == 4 -> (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()])))
   672  (PtrIndex <t> ptr idx) && config.PtrSize == 8 -> (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()])))
   673  
   674  // struct operations
   675  (StructSelect (StructMake1 x)) -> x
   676  (StructSelect [0] (StructMake2 x _)) -> x
   677  (StructSelect [1] (StructMake2 _ x)) -> x
   678  (StructSelect [0] (StructMake3 x _ _)) -> x
   679  (StructSelect [1] (StructMake3 _ x _)) -> x
   680  (StructSelect [2] (StructMake3 _ _ x)) -> x
   681  (StructSelect [0] (StructMake4 x _ _ _)) -> x
   682  (StructSelect [1] (StructMake4 _ x _ _)) -> x
   683  (StructSelect [2] (StructMake4 _ _ x _)) -> x
   684  (StructSelect [3] (StructMake4 _ _ _ x)) -> x
   685  
   686  (Load <t> _ _) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) ->
   687    (StructMake0)
   688  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) ->
   689    (StructMake1
   690      (Load <t.FieldType(0)> ptr mem))
   691  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) ->
   692    (StructMake2
   693      (Load <t.FieldType(0)> ptr mem)
   694      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
   695  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) ->
   696    (StructMake3
   697      (Load <t.FieldType(0)> ptr mem)
   698      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)
   699      (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
   700  (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) ->
   701    (StructMake4
   702      (Load <t.FieldType(0)> ptr mem)
   703      (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)
   704      (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)
   705      (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
   706  
   707  (StructSelect [i] x:(Load <t> ptr mem)) && !config.fe.CanSSA(t) ->
   708    @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
   709  
   710  (Store _ (StructMake0) mem) -> mem
   711  (Store dst (StructMake1 <t> f0) mem) ->
   712    (Store [t.FieldType(0).Size()] dst f0 mem)
   713  (Store dst (StructMake2 <t> f0 f1) mem) ->
   714    (Store [t.FieldType(1).Size()]
   715      (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   716      f1
   717      (Store [t.FieldType(0).Size()] dst f0 mem))
   718  (Store dst (StructMake3 <t> f0 f1 f2) mem) ->
   719    (Store [t.FieldType(2).Size()]
   720      (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
   721      f2
   722      (Store [t.FieldType(1).Size()]
   723        (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   724        f1
   725        (Store [t.FieldType(0).Size()] dst f0 mem)))
   726  (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) ->
   727    (Store [t.FieldType(3).Size()]
   728      (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst)
   729      f3
   730      (Store [t.FieldType(2).Size()]
   731        (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
   732        f2
   733        (Store [t.FieldType(1).Size()]
   734          (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
   735          f1
   736          (Store [t.FieldType(0).Size()] dst f0 mem))))
   737  
   738  // Putting struct{*byte} and similar into direct interfaces.
   739  (IMake typ (StructMake1 val)) -> (IMake typ val)
   740  (StructSelect [0] x:(IData _)) -> x
   741  
   742  // un-SSAable values use mem->mem copies
   743  (Store [size] dst (Load <t> src mem) mem) && !config.fe.CanSSA(t) ->
   744  	(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem)
   745  (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) && !config.fe.CanSSA(t) ->
   746  	(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem))
   747  
   748  // array ops
   749  (ArraySelect (ArrayMake1 x)) -> x
   750  
   751  (Load <t> _ _) && t.IsArray() && t.NumElem() == 0 ->
   752    (ArrayMake0)
   753  
   754  (Load <t> ptr mem) && t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) ->
   755    (ArrayMake1 (Load <t.ElemType()> ptr mem))
   756  
   757  (Store _ (ArrayMake0) mem) -> mem
   758  (Store [size] dst (ArrayMake1 e) mem) -> (Store [size] dst e mem)
   759  
   760  (ArraySelect [0] (Load ptr mem)) -> (Load ptr mem)
   761  
   762  // Putting [1]{*byte} and similar into direct interfaces.
   763  (IMake typ (ArrayMake1 val)) -> (IMake typ val)
   764  (ArraySelect [0] x:(IData _)) -> x
   765  
   766  // string ops
   767  // Decomposing StringMake and lowering of StringPtr and StringLen
   768  // happens in a later pass, dec, so that these operations are available
   769  // to other passes for optimizations.
   770  (StringPtr (StringMake (Const64 <t> [c]) _)) -> (Const64 <t> [c])
   771  (StringLen (StringMake _ (Const64 <t> [c]))) -> (Const64 <t> [c])
   772  (ConstString {s}) && config.PtrSize == 4 && s.(string) == "" ->
   773    (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0]))
   774  (ConstString {s}) && config.PtrSize == 8 && s.(string) == "" ->
   775    (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0]))
   776  (ConstString {s}) && config.PtrSize == 4 && s.(string) != "" ->
   777    (StringMake
   778      (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}
   779        (SB))
   780      (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))]))
   781  (ConstString {s}) && config.PtrSize == 8 && s.(string) != "" ->
   782    (StringMake
   783      (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}
   784        (SB))
   785      (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))]))
   786  
   787  // slice ops
   788  // Only a few slice rules are provided here.  See dec.rules for
   789  // a more comprehensive set.
   790  (SliceLen (SliceMake _ (Const64 <t> [c]) _)) -> (Const64 <t> [c])
   791  (SliceCap (SliceMake _ _ (Const64 <t> [c]))) -> (Const64 <t> [c])
   792  (SliceLen (SliceMake _ (Const32 <t> [c]) _)) -> (Const32 <t> [c])
   793  (SliceCap (SliceMake _ _ (Const32 <t> [c]))) -> (Const32 <t> [c])
   794  (SlicePtr (SliceMake (SlicePtr x) _ _)) -> (SlicePtr x)
   795  (SliceLen (SliceMake _ (SliceLen x) _)) -> (SliceLen x)
   796  (SliceCap (SliceMake _ _ (SliceCap x))) -> (SliceCap x)
   797  (SliceCap (SliceMake _ _ (SliceLen x))) -> (SliceLen x)
   798  (ConstSlice) && config.PtrSize == 4 ->
   799    (SliceMake
   800      (ConstNil <v.Type.ElemType().PtrTo()>)
   801      (Const32 <config.fe.TypeInt()> [0])
   802      (Const32 <config.fe.TypeInt()> [0]))
   803  (ConstSlice) && config.PtrSize == 8 ->
   804    (SliceMake
   805      (ConstNil <v.Type.ElemType().PtrTo()>)
   806      (Const64 <config.fe.TypeInt()> [0])
   807      (Const64 <config.fe.TypeInt()> [0]))
   808  
   809  // interface ops
   810  (ConstInterface) ->
   811    (IMake
   812      (ConstNil <config.fe.TypeBytePtr()>)
   813      (ConstNil <config.fe.TypeBytePtr()>))
   814  
   815  (NilCheck (GetG mem) mem) -> mem
   816  
   817  (If (Not cond) yes no) -> (If cond no yes)
   818  (If (ConstBool [c]) yes no) && c == 1 -> (First nil yes no)
   819  (If (ConstBool [c]) yes no) && c == 0 -> (First nil no yes)
   820  
   821  // Get rid of Convert ops for pointer arithmetic on unsafe.Pointer.
   822  (Convert (Add64 (Convert ptr mem) off) mem) -> (Add64 ptr off)
   823  (Convert (Add64 off (Convert ptr mem)) mem) -> (Add64 ptr off)
   824  (Convert (Convert ptr mem) mem) -> ptr
   825  
   826  // Decompose compound argument values
   827  (Arg {n} [off]) && v.Type.IsString() ->
   828    (StringMake
   829      (Arg <config.fe.TypeBytePtr()> {n} [off])
   830      (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
   831  
   832  (Arg {n} [off]) && v.Type.IsSlice() ->
   833    (SliceMake
   834      (Arg <v.Type.ElemType().PtrTo()> {n} [off])
   835      (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])
   836      (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
   837  
   838  (Arg {n} [off]) && v.Type.IsInterface() ->
   839    (IMake
   840      (Arg <config.fe.TypeBytePtr()> {n} [off])
   841      (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
   842  
   843  (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 16 ->
   844    (ComplexMake
   845      (Arg <config.fe.TypeFloat64()> {n} [off])
   846      (Arg <config.fe.TypeFloat64()> {n} [off+8]))
   847  
   848  (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 8 ->
   849    (ComplexMake
   850      (Arg <config.fe.TypeFloat32()> {n} [off])
   851      (Arg <config.fe.TypeFloat32()> {n} [off+4]))
   852  
   853  (Arg <t>) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) ->
   854    (StructMake0)
   855  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) ->
   856    (StructMake1
   857      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]))
   858  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) ->
   859    (StructMake2
   860      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   861      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]))
   862  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) ->
   863    (StructMake3
   864      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   865      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])
   866      (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]))
   867  (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) ->
   868    (StructMake4
   869      (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])
   870      (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])
   871      (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])
   872      (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)]))
   873  
   874  (Arg <t>) && t.IsArray() && t.NumElem() == 0 ->
   875    (ArrayMake0)
   876  (Arg <t> {n} [off]) && t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) ->
   877    (ArrayMake1 (Arg <t.ElemType()> {n} [off]))
   878  
   879  // strength reduction of divide by a constant.
   880  // Note: frontend does <=32 bits. We only need to do 64 bits here.
   881  // TODO: Do them all here?
   882  
   883  // Div/mod by 1.  Currently handled by frontend.
   884  //(Div64 n (Const64 [1])) -> n
   885  //(Div64u n (Const64 [1])) -> n
   886  //(Mod64 n (Const64 [1])) -> (Const64 [0])
   887  //(Mod64u n (Const64 [1])) -> (Const64 [0])
   888  
   889  // Unsigned divide by power of 2.
   890  (Div64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (Rsh64Ux64 n (Const64 <t> [log2(c)]))
   891  (Mod64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (And64 n (Const64 <t> [c-1]))
   892  
   893  // Signed divide by power of 2.  Currently handled by frontend.
   894  // n / c = n >> log(c)       if n >= 0
   895  //       = (n+c-1) >> log(c) if n < 0
   896  // We conditionally add c-1 by adding n>>63>>(64-log(c)) (first shift signed, second shift unsigned).
   897  //(Div64 <t> n (Const64 [c])) && isPowerOfTwo(c) ->
   898  //  (Rsh64x64
   899  //    (Add64 <t>
   900  //      n
   901  //      (Rsh64Ux64 <t>
   902  //        (Rsh64x64 <t> n (Const64 <t> [63]))
   903  //        (Const64 <t> [64-log2(c)])))
   904  //    (Const64 <t> [log2(c)]))
   905  
   906  // Unsigned divide, not a power of 2.  Strength reduce to a multiply.
   907  (Div64u <t> x (Const64 [c])) && umagic64ok(c) && !umagic64a(c) ->
   908    (Rsh64Ux64
   909      (Hmul64u <t>
   910        (Const64 <t> [umagic64m(c)])
   911        x)
   912      (Const64 <t> [umagic64s(c)]))
   913  (Div64u <t> x (Const64 [c])) && umagic64ok(c) && umagic64a(c) ->
   914    (Rsh64Ux64
   915      (Avg64u <t>
   916        (Hmul64u <t>
   917          x
   918          (Const64 <t> [umagic64m(c)]))
   919        x)
   920      (Const64 <t> [umagic64s(c)-1]))
   921  
   922  // Signed divide, not a power of 2.  Strength reduce to a multiply.
   923  (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) > 0 ->
   924    (Sub64 <t>
   925      (Rsh64x64 <t>
   926        (Hmul64 <t>
   927          (Const64 <t> [smagic64m(c)])
   928          x)
   929        (Const64 <t> [smagic64s(c)]))
   930      (Rsh64x64 <t>
   931        x
   932        (Const64 <t> [63])))
   933  (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) < 0 ->
   934    (Sub64 <t>
   935      (Rsh64x64 <t>
   936        (Add64 <t>
   937          (Hmul64 <t>
   938            (Const64 <t> [smagic64m(c)])
   939            x)
   940          x)
   941        (Const64 <t> [smagic64s(c)]))
   942      (Rsh64x64 <t>
   943        x
   944        (Const64 <t> [63])))
   945  (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) > 0 ->
   946    (Neg64 <t>
   947      (Sub64 <t>
   948        (Rsh64x64 <t>
   949          (Hmul64 <t>
   950            (Const64 <t> [smagic64m(c)])
   951            x)
   952          (Const64 <t> [smagic64s(c)]))
   953        (Rsh64x64 <t>
   954          x
   955          (Const64 <t> [63]))))
   956  (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) < 0 ->
   957    (Neg64 <t>
   958      (Sub64 <t>
   959        (Rsh64x64 <t>
   960          (Add64 <t>
   961            (Hmul64 <t>
   962              (Const64 <t> [smagic64m(c)])
   963              x)
   964            x)
   965          (Const64 <t> [smagic64s(c)]))
   966        (Rsh64x64 <t>
   967          x
   968          (Const64 <t> [63]))))
   969  
   970  // A%B = A-(A/B*B).
   971  // This implements % with two * and a bunch of ancillary ops.
   972  // One of the * is free if the user's code also computes A/B.
   973  (Mod64  <t> x (Const64 [c])) && x.Op != OpConst64 && smagic64ok(c)
   974    -> (Sub64 x (Mul64 <t> (Div64  <t> x (Const64 <t> [c])) (Const64 <t> [c])))
   975  (Mod64u <t> x (Const64 [c])) && x.Op != OpConst64 && umagic64ok(c)
   976    -> (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
   977  
   978  // floating point optimizations
   979  (Add32F x (Const32F [0])) -> x
   980  (Add32F (Const32F [0]) x) -> x
   981  (Add64F x (Const64F [0])) -> x
   982  (Add64F (Const64F [0]) x) -> x
   983  (Sub32F x (Const32F [0])) -> x
   984  (Sub64F x (Const64F [0])) -> x
   985  (Mul32F x (Const32F [f2i(1)])) -> x
   986  (Mul32F (Const32F [f2i(1)]) x) -> x
   987  (Mul64F x (Const64F [f2i(1)])) -> x
   988  (Mul64F (Const64F [f2i(1)]) x) -> x
   989  (Mul32F x (Const32F [f2i(-1)])) -> (Neg32F x)
   990  (Mul32F (Const32F [f2i(-1)]) x) -> (Neg32F x)
   991  (Mul64F x (Const64F [f2i(-1)])) -> (Neg64F x)
   992  (Mul64F (Const64F [f2i(-1)]) x) -> (Neg64F x)
   993  (Div32F x (Const32F [f2i(1)])) -> x
   994  (Div64F x (Const64F [f2i(1)])) -> x
   995  (Div32F x (Const32F [f2i(-1)])) -> (Neg32F x)
   996  (Div64F x (Const64F [f2i(-1)])) -> (Neg32F x)
   997  
   998  (Sqrt (Const64F [c])) -> (Const64F [f2i(math.Sqrt(i2f(c)))])
   999  
  1000  // recognize runtime.newobject and don't Zero/Nilcheck it
  1001  (Zero (Load (OffPtr [c] (SP)) mem) mem)
  1002          && mem.Op == OpStaticCall
  1003  	&& isSameSym(mem.Aux, "runtime.newobject")
  1004  	&& c == config.ctxt.FixedFrameSize() + config.PtrSize // offset of return value
  1005  	-> mem
  1006  // nil checks just need to rewrite to something useless.
  1007  // they will be deadcode eliminated soon afterwards.
  1008  (NilCheck (Load (OffPtr [c] (SP)) mem) mem)
  1009  	&& mem.Op == OpStaticCall
  1010  	&& isSameSym(mem.Aux, "runtime.newobject")
  1011  	&& c == config.ctxt.FixedFrameSize() + config.RegSize // offset of return value
  1012  	&& warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")
  1013  	-> (Invalid)
  1014  (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem)
  1015  	&& mem.Op == OpStaticCall
  1016  	&& isSameSym(mem.Aux, "runtime.newobject")
  1017  	&& c == config.ctxt.FixedFrameSize() + config.RegSize // offset of return value
  1018  	&& warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")
  1019  	-> (Invalid)