github.com/miolini/go@v0.0.0-20160405192216-fca68c8cb408/src/cmd/compile/internal/ssa/gen/generic.rules (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // values are specified using the following format: 6 // (op <type> [auxint] {aux} arg0 arg1 ...) 7 // the type, aux, and auxint fields are optional 8 // on the matching side 9 // - the type, aux, and auxint fields must match if they are specified. 10 // - the first occurrence of a variable defines that variable. Subsequent 11 // uses must match (be == to) the first use. 12 // - v is defined to be the value matched. 13 // - an additional conditional can be provided after the match pattern with "&&". 14 // on the generated side 15 // - the type of the top-level expression is the same as the one on the left-hand side. 16 // - the type of any subexpressions must be specified explicitly. 17 // - auxint will be 0 if not specified. 18 // - aux will be nil if not specified. 19 20 // blocks are specified using the following format: 21 // (kind controlvalue succ0 succ1 ...) 22 // controlvalue must be "nil" or a value expression 23 // succ* fields must be variables 24 // For now, the generated successors must be a permutation of the matched successors. 25 26 // constant folding 27 (Trunc16to8 (Const16 [c])) -> (Const8 [int64(int8(c))]) 28 (Trunc32to8 (Const32 [c])) -> (Const8 [int64(int8(c))]) 29 (Trunc32to16 (Const32 [c])) -> (Const16 [int64(int16(c))]) 30 (Trunc64to8 (Const64 [c])) -> (Const8 [int64(int8(c))]) 31 (Trunc64to16 (Const64 [c])) -> (Const16 [int64(int16(c))]) 32 (Trunc64to32 (Const64 [c])) -> (Const32 [int64(int32(c))]) 33 (Cvt64Fto32F (Const64F [c])) -> (Const32F [f2i(float64(i2f32(c)))]) 34 (Cvt32Fto64F (Const32F [c])) -> (Const64F [c]) // c is already a 64 bit float 35 36 // const negation is currently handled by frontend 37 //(Neg8 (Const8 [c])) -> (Const8 [-c]) 38 //(Neg16 (Const16 [c])) -> (Const16 [-c]) 39 //(Neg32 (Const32 [c])) -> (Const32 [-c]) 40 //(Neg64 (Const64 [c])) -> (Const64 [-c]) 41 //(Neg32F (Const32F [c])) -> (Const32F [f2i(-i2f(c))]) 42 //(Neg64F (Const64F [c])) -> (Const64F [f2i(-i2f(c))]) 43 44 (Add8 (Const8 [c]) (Const8 [d])) -> (Const8 [int64(int8(c+d))]) 45 (Add16 (Const16 [c]) (Const16 [d])) -> (Const16 [int64(int16(c+d))]) 46 (Add32 (Const32 [c]) (Const32 [d])) -> (Const32 [int64(int32(c+d))]) 47 (Add64 (Const64 [c]) (Const64 [d])) -> (Const64 [c+d]) 48 (Add32F (Const32F [c]) (Const32F [d])) -> 49 (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) // ensure we combine the operands with 32 bit precision 50 (Add64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) + i2f(d))]) 51 (AddPtr <t> x (Const64 [c])) -> (OffPtr <t> x [c]) 52 53 (Sub8 (Const8 [c]) (Const8 [d])) -> (Const8 [int64(int8(c-d))]) 54 (Sub16 (Const16 [c]) (Const16 [d])) -> (Const16 [int64(int16(c-d))]) 55 (Sub32 (Const32 [c]) (Const32 [d])) -> (Const32 [int64(int32(c-d))]) 56 (Sub64 (Const64 [c]) (Const64 [d])) -> (Const64 [c-d]) 57 (Sub32F (Const32F [c]) (Const32F [d])) -> 58 (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 59 (Sub64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) - i2f(d))]) 60 61 (Mul8 (Const8 [c]) (Const8 [d])) -> (Const8 [int64(int8(c*d))]) 62 (Mul16 (Const16 [c]) (Const16 [d])) -> (Const16 [int64(int16(c*d))]) 63 (Mul32 (Const32 [c]) (Const32 [d])) -> (Const32 [int64(int32(c*d))]) 64 (Mul64 (Const64 [c]) (Const64 [d])) -> (Const64 [c*d]) 65 (Mul32F (Const32F [c]) (Const32F [d])) -> 66 (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 67 (Mul64F (Const64F [c]) (Const64F [d])) -> (Const64F [f2i(i2f(c) * i2f(d))]) 68 69 (Mod8 (Const8 [c]) (Const8 [d])) && d != 0-> (Const8 [int64(int8(c % d))]) 70 (Mod16 (Const16 [c]) (Const16 [d])) && d != 0-> (Const16 [int64(int16(c % d))]) 71 (Mod32 (Const32 [c]) (Const32 [d])) && d != 0-> (Const32 [int64(int32(c % d))]) 72 (Mod64 (Const64 [c]) (Const64 [d])) && d != 0-> (Const64 [c % d]) 73 74 (Mod8u (Const8 [c]) (Const8 [d])) && d != 0-> (Const8 [int64(uint8(c) % uint8(d))]) 75 (Mod16u (Const16 [c]) (Const16 [d])) && d != 0-> (Const16 [int64(uint16(c) % uint16(d))]) 76 (Mod32u (Const32 [c]) (Const32 [d])) && d != 0-> (Const32 [int64(uint32(c) % uint32(d))]) 77 (Mod64u (Const64 [c]) (Const64 [d])) && d != 0-> (Const64 [int64(uint64(c) % uint64(d))]) 78 79 (Lsh64x64 (Const64 [c]) (Const64 [d])) -> (Const64 [c << uint64(d)]) 80 (Rsh64x64 (Const64 [c]) (Const64 [d])) -> (Const64 [c >> uint64(d)]) 81 (Rsh64Ux64 (Const64 [c]) (Const64 [d])) -> (Const64 [int64(uint64(c) >> uint64(d))]) 82 (Lsh32x64 (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) << uint64(d))]) 83 (Rsh32x64 (Const32 [c]) (Const64 [d])) -> (Const32 [int64(int32(c) >> uint64(d))]) 84 (Rsh32Ux64 (Const32 [c]) (Const64 [d])) -> (Const32 [int64(uint32(c) >> uint64(d))]) 85 (Lsh16x64 (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) << uint64(d))]) 86 (Rsh16x64 (Const16 [c]) (Const64 [d])) -> (Const16 [int64(int16(c) >> uint64(d))]) 87 (Rsh16Ux64 (Const16 [c]) (Const64 [d])) -> (Const16 [int64(uint16(c) >> uint64(d))]) 88 (Lsh8x64 (Const8 [c]) (Const64 [d])) -> (Const8 [int64(int8(c) << uint64(d))]) 89 (Rsh8x64 (Const8 [c]) (Const64 [d])) -> (Const8 [int64(int8(c) >> uint64(d))]) 90 (Rsh8Ux64 (Const8 [c]) (Const64 [d])) -> (Const8 [int64(uint8(c) >> uint64(d))]) 91 92 (Lsh64x64 (Const64 [0]) _) -> (Const64 [0]) 93 (Rsh64x64 (Const64 [0]) _) -> (Const64 [0]) 94 (Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0]) 95 (Lsh32x64 (Const32 [0]) _) -> (Const32 [0]) 96 (Rsh32x64 (Const32 [0]) _) -> (Const32 [0]) 97 (Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0]) 98 (Lsh16x64 (Const16 [0]) _) -> (Const16 [0]) 99 (Rsh16x64 (Const16 [0]) _) -> (Const16 [0]) 100 (Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0]) 101 (Lsh8x64 (Const8 [0]) _) -> (Const8 [0]) 102 (Rsh8x64 (Const8 [0]) _) -> (Const8 [0]) 103 (Rsh8Ux64 (Const8 [0]) _) -> (Const8 [0]) 104 105 // ((x >> c1) << c2) >> c3 106 (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) -> (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 107 (Rsh32Ux32 (Lsh32x32 (Rsh32Ux32 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) -> (Rsh32Ux32 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))])) 108 (Rsh16Ux16 (Lsh16x16 (Rsh16Ux16 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) -> (Rsh16Ux16 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))])) 109 (Rsh8Ux8 (Lsh8x8 (Rsh8Ux8 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) -> (Rsh8Ux8 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))])) 110 111 // ((x << c1) >> c2) << c3 112 (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) -> (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 113 (Lsh32x32 (Rsh32Ux32 (Lsh32x32 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) -> (Lsh32x32 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))])) 114 (Lsh16x16 (Rsh16Ux16 (Lsh16x16 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) -> (Lsh16x16 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))])) 115 (Lsh8x8 (Rsh8Ux8 (Lsh8x8 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) -> (Lsh8x8 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))])) 116 117 // Fold IsInBounds when the range of the index cannot exceed the limt. 118 (IsInBounds (ZeroExt8to32 _) (Const32 [c])) && (1 << 8) <= c -> (ConstBool [1]) 119 (IsInBounds (ZeroExt8to64 _) (Const64 [c])) && (1 << 8) <= c -> (ConstBool [1]) 120 (IsInBounds (ZeroExt16to32 _) (Const32 [c])) && (1 << 16) <= c -> (ConstBool [1]) 121 (IsInBounds (ZeroExt16to64 _) (Const64 [c])) && (1 << 16) <= c -> (ConstBool [1]) 122 (IsInBounds x x) -> (ConstBool [0]) 123 (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c < d -> (ConstBool [1]) 124 (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c < d -> (ConstBool [1]) 125 (IsInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c < d)]) 126 (IsInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c < d)]) 127 // (Mod64u x y) is always between 0 (inclusive) and y (exclusive). 128 (IsInBounds (Mod32u _ y) y) -> (ConstBool [1]) 129 (IsInBounds (Mod64u _ y) y) -> (ConstBool [1]) 130 131 (IsSliceInBounds x x) -> (ConstBool [1]) 132 (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) && 0 <= c && c <= d -> (ConstBool [1]) 133 (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) && 0 <= c && c <= d -> (ConstBool [1]) 134 (IsSliceInBounds (Const32 [0]) _) -> (ConstBool [1]) 135 (IsSliceInBounds (Const64 [0]) _) -> (ConstBool [1]) 136 (IsSliceInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(0 <= c && c <= d)]) 137 (IsSliceInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(0 <= c && c <= d)]) 138 (IsSliceInBounds (SliceLen x) (SliceCap x)) -> (ConstBool [1]) 139 140 (Eq64 x x) -> (ConstBool [1]) 141 (Eq32 x x) -> (ConstBool [1]) 142 (Eq16 x x) -> (ConstBool [1]) 143 (Eq8 x x) -> (ConstBool [1]) 144 (Eq8 (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c == d)]) 145 (Eq8 (ConstBool [0]) x) -> (Not x) 146 (Eq8 (ConstBool [1]) x) -> x 147 148 (Neq64 x x) -> (ConstBool [0]) 149 (Neq32 x x) -> (ConstBool [0]) 150 (Neq16 x x) -> (ConstBool [0]) 151 (Neq8 x x) -> (ConstBool [0]) 152 (Neq8 (ConstBool [c]) (ConstBool [d])) -> (ConstBool [b2i(c != d)]) 153 (Neq8 (ConstBool [0]) x) -> x 154 (Neq8 (ConstBool [1]) x) -> (Not x) 155 156 (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Eq64 (Const64 <t> [c-d]) x) 157 (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 158 (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 159 (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) -> (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 160 161 (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) -> (Neq64 (Const64 <t> [c-d]) x) 162 (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) -> (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 163 (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 164 (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) -> (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 165 166 // canonicalize: swap arguments for commutative operations when one argument is a constant. 167 (Eq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Eq64 (Const64 <t> [c]) x) 168 (Eq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Eq32 (Const32 <t> [c]) x) 169 (Eq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Eq16 (Const16 <t> [c]) x) 170 (Eq8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Eq8 (Const8 <t> [c]) x) 171 (Eq8 x (ConstBool <t> [c])) && x.Op != OpConstBool -> (Eq8 (ConstBool <t> [c]) x) 172 173 (Neq64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Neq64 (Const64 <t> [c]) x) 174 (Neq32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Neq32 (Const32 <t> [c]) x) 175 (Neq16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Neq16 (Const16 <t> [c]) x) 176 (Neq8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Neq8 (Const8 <t> [c]) x) 177 (Neq8 x (ConstBool <t> [c])) && x.Op != OpConstBool -> (Neq8 (ConstBool <t> [c]) x) 178 179 // AddPtr is not canonicalized because nilcheck ptr checks the first argument to be non-nil. 180 (Add64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [c]) x) 181 (Add32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [c]) x) 182 (Add16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [c]) x) 183 (Add8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Add8 (Const8 <t> [c]) x) 184 185 (Mul64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Mul64 (Const64 <t> [c]) x) 186 (Mul32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Mul32 (Const32 <t> [c]) x) 187 (Mul16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Mul16 (Const16 <t> [c]) x) 188 (Mul8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Mul8 (Const8 <t> [c]) x) 189 190 (Sub64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [-c]) x) 191 (Sub32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [int64(int32(-c))]) x) 192 (Sub16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Add16 (Const16 <t> [int64(int16(-c))]) x) 193 (Sub8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Add8 (Const8 <t> [int64(int8(-c))]) x) 194 195 (And64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (And64 (Const64 <t> [c]) x) 196 (And32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (And32 (Const32 <t> [c]) x) 197 (And16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (And16 (Const16 <t> [c]) x) 198 (And8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (And8 (Const8 <t> [c]) x) 199 200 (Or64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Or64 (Const64 <t> [c]) x) 201 (Or32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Or32 (Const32 <t> [c]) x) 202 (Or16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Or16 (Const16 <t> [c]) x) 203 (Or8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Or8 (Const8 <t> [c]) x) 204 205 (Xor64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Xor64 (Const64 <t> [c]) x) 206 (Xor32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Xor32 (Const32 <t> [c]) x) 207 (Xor16 x (Const16 <t> [c])) && x.Op != OpConst16 -> (Xor16 (Const16 <t> [c]) x) 208 (Xor8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Xor8 (Const8 <t> [c]) x) 209 210 // Distribute multiplication c * (d+x) -> c*d + c*x. Useful for: 211 // a[i].b = ...; a[i+1].b = ... 212 (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) -> (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 213 (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) -> (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 214 215 // rewrite shifts of 8/16/32 bit consts into 64 bit consts to reduce 216 // the number of the other rewrite rules for const shifts 217 (Lsh64x32 <t> x (Const32 [c])) -> (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 218 (Lsh64x16 <t> x (Const16 [c])) -> (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 219 (Lsh64x8 <t> x (Const8 [c])) -> (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 220 (Rsh64x32 <t> x (Const32 [c])) -> (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 221 (Rsh64x16 <t> x (Const16 [c])) -> (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 222 (Rsh64x8 <t> x (Const8 [c])) -> (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 223 (Rsh64Ux32 <t> x (Const32 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 224 (Rsh64Ux16 <t> x (Const16 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 225 (Rsh64Ux8 <t> x (Const8 [c])) -> (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 226 227 (Lsh32x32 <t> x (Const32 [c])) -> (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 228 (Lsh32x16 <t> x (Const16 [c])) -> (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 229 (Lsh32x8 <t> x (Const8 [c])) -> (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 230 (Rsh32x32 <t> x (Const32 [c])) -> (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 231 (Rsh32x16 <t> x (Const16 [c])) -> (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 232 (Rsh32x8 <t> x (Const8 [c])) -> (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 233 (Rsh32Ux32 <t> x (Const32 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 234 (Rsh32Ux16 <t> x (Const16 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 235 (Rsh32Ux8 <t> x (Const8 [c])) -> (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 236 237 (Lsh16x32 <t> x (Const32 [c])) -> (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 238 (Lsh16x16 <t> x (Const16 [c])) -> (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 239 (Lsh16x8 <t> x (Const8 [c])) -> (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 240 (Rsh16x32 <t> x (Const32 [c])) -> (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 241 (Rsh16x16 <t> x (Const16 [c])) -> (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 242 (Rsh16x8 <t> x (Const8 [c])) -> (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 243 (Rsh16Ux32 <t> x (Const32 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 244 (Rsh16Ux16 <t> x (Const16 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 245 (Rsh16Ux8 <t> x (Const8 [c])) -> (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 246 247 (Lsh8x32 <t> x (Const32 [c])) -> (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 248 (Lsh8x16 <t> x (Const16 [c])) -> (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 249 (Lsh8x8 <t> x (Const8 [c])) -> (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 250 (Rsh8x32 <t> x (Const32 [c])) -> (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 251 (Rsh8x16 <t> x (Const16 [c])) -> (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 252 (Rsh8x8 <t> x (Const8 [c])) -> (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 253 (Rsh8Ux32 <t> x (Const32 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 254 (Rsh8Ux16 <t> x (Const16 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 255 (Rsh8Ux8 <t> x (Const8 [c])) -> (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 256 257 // shifts by zero 258 (Lsh64x64 x (Const64 [0])) -> x 259 (Rsh64x64 x (Const64 [0])) -> x 260 (Rsh64Ux64 x (Const64 [0])) -> x 261 (Lsh32x64 x (Const64 [0])) -> x 262 (Rsh32x64 x (Const64 [0])) -> x 263 (Rsh32Ux64 x (Const64 [0])) -> x 264 (Lsh16x64 x (Const64 [0])) -> x 265 (Rsh16x64 x (Const64 [0])) -> x 266 (Rsh16Ux64 x (Const64 [0])) -> x 267 (Lsh8x64 x (Const64 [0])) -> x 268 (Rsh8x64 x (Const64 [0])) -> x 269 (Rsh8Ux64 x (Const64 [0])) -> x 270 271 // zero shifted. 272 // TODO: other bit sizes. 273 (Lsh64x64 (Const64 [0]) _) -> (Const64 [0]) 274 (Rsh64x64 (Const64 [0]) _) -> (Const64 [0]) 275 (Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0]) 276 (Lsh64x32 (Const64 [0]) _) -> (Const64 [0]) 277 (Rsh64x32 (Const64 [0]) _) -> (Const64 [0]) 278 (Rsh64Ux32 (Const64 [0]) _) -> (Const64 [0]) 279 (Lsh64x16 (Const64 [0]) _) -> (Const64 [0]) 280 (Rsh64x16 (Const64 [0]) _) -> (Const64 [0]) 281 (Rsh64Ux16 (Const64 [0]) _) -> (Const64 [0]) 282 (Lsh64x8 (Const64 [0]) _) -> (Const64 [0]) 283 (Rsh64x8 (Const64 [0]) _) -> (Const64 [0]) 284 (Rsh64Ux8 (Const64 [0]) _) -> (Const64 [0]) 285 286 // large left shifts of all values, and right shifts of unsigned values 287 (Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) 288 (Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) 289 (Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0]) 290 (Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0]) 291 (Lsh16x64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0]) 292 (Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0]) 293 (Lsh8x64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const8 [0]) 294 (Rsh8Ux64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const8 [0]) 295 296 // combine const shifts 297 (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh64x64 x (Const64 <t> [c+d])) 298 (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh32x64 x (Const64 <t> [c+d])) 299 (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh16x64 x (Const64 <t> [c+d])) 300 (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh8x64 x (Const64 <t> [c+d])) 301 302 (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64x64 x (Const64 <t> [c+d])) 303 (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32x64 x (Const64 <t> [c+d])) 304 (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16x64 x (Const64 <t> [c+d])) 305 (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8x64 x (Const64 <t> [c+d])) 306 307 (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64Ux64 x (Const64 <t> [c+d])) 308 (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32Ux64 x (Const64 <t> [c+d])) 309 (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 <t> [c+d])) 310 (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64 x (Const64 <t> [c+d])) 311 312 // constant comparisons 313 (Eq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c == d)]) 314 (Eq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c == d)]) 315 (Eq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c == d)]) 316 (Eq8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c == d)]) 317 318 (Neq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c != d)]) 319 (Neq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c != d)]) 320 (Neq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c != d)]) 321 (Neq8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c != d)]) 322 323 (Greater64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c > d)]) 324 (Greater32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c > d)]) 325 (Greater16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c > d)]) 326 (Greater8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c > d)]) 327 328 (Greater64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) > uint64(d))]) 329 (Greater32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) > uint32(d))]) 330 (Greater16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) > uint16(d))]) 331 (Greater8U (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(uint8(c) > uint8(d))]) 332 333 (Geq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c >= d)]) 334 (Geq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c >= d)]) 335 (Geq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c >= d)]) 336 (Geq8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c >= d)]) 337 338 (Geq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) >= uint64(d))]) 339 (Geq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) >= uint32(d))]) 340 (Geq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) >= uint16(d))]) 341 (Geq8U (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(uint8(c) >= uint8(d))]) 342 343 (Less64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c < d)]) 344 (Less32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c < d)]) 345 (Less16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c < d)]) 346 (Less8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c < d)]) 347 348 (Less64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) < uint64(d))]) 349 (Less32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) < uint32(d))]) 350 (Less16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) < uint16(d))]) 351 (Less8U (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(uint8(c) < uint8(d))]) 352 353 (Leq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c <= d)]) 354 (Leq32 (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(c <= d)]) 355 (Leq16 (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(c <= d)]) 356 (Leq8 (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(c <= d)]) 357 358 (Leq64U (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(uint64(c) <= uint64(d))]) 359 (Leq32U (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(uint32(c) <= uint32(d))]) 360 (Leq16U (Const16 [c]) (Const16 [d])) -> (ConstBool [b2i(uint16(c) <= uint16(d))]) 361 (Leq8U (Const8 [c]) (Const8 [d])) -> (ConstBool [b2i(uint8(c) <= uint8(d))]) 362 363 // simplifications 364 (Or64 x x) -> x 365 (Or32 x x) -> x 366 (Or16 x x) -> x 367 (Or8 x x) -> x 368 (Or64 (Const64 [0]) x) -> x 369 (Or32 (Const32 [0]) x) -> x 370 (Or16 (Const16 [0]) x) -> x 371 (Or8 (Const8 [0]) x) -> x 372 (Or64 (Const64 [-1]) _) -> (Const64 [-1]) 373 (Or32 (Const32 [-1]) _) -> (Const32 [-1]) 374 (Or16 (Const16 [-1]) _) -> (Const16 [-1]) 375 (Or8 (Const8 [-1]) _) -> (Const8 [-1]) 376 (And64 x x) -> x 377 (And32 x x) -> x 378 (And16 x x) -> x 379 (And8 x x) -> x 380 (And64 (Const64 [-1]) x) -> x 381 (And32 (Const32 [-1]) x) -> x 382 (And16 (Const16 [-1]) x) -> x 383 (And8 (Const8 [-1]) x) -> x 384 (And64 (Const64 [0]) _) -> (Const64 [0]) 385 (And32 (Const32 [0]) _) -> (Const32 [0]) 386 (And16 (Const16 [0]) _) -> (Const16 [0]) 387 (And8 (Const8 [0]) _) -> (Const8 [0]) 388 (Xor64 x x) -> (Const64 [0]) 389 (Xor32 x x) -> (Const32 [0]) 390 (Xor16 x x) -> (Const16 [0]) 391 (Xor8 x x) -> (Const8 [0]) 392 (Xor64 (Const64 [0]) x) -> x 393 (Xor32 (Const32 [0]) x) -> x 394 (Xor16 (Const16 [0]) x) -> x 395 (Xor8 (Const8 [0]) x) -> x 396 (Add64 (Const64 [0]) x) -> x 397 (Add32 (Const32 [0]) x) -> x 398 (Add16 (Const16 [0]) x) -> x 399 (Add8 (Const8 [0]) x) -> x 400 (Sub64 x x) -> (Const64 [0]) 401 (Sub32 x x) -> (Const32 [0]) 402 (Sub16 x x) -> (Const16 [0]) 403 (Sub8 x x) -> (Const8 [0]) 404 (Mul64 (Const64 [0]) _) -> (Const64 [0]) 405 (Mul32 (Const32 [0]) _) -> (Const32 [0]) 406 (Mul16 (Const16 [0]) _) -> (Const16 [0]) 407 (Mul8 (Const8 [0]) _) -> (Const8 [0]) 408 (Com8 (Com8 x)) -> x 409 (Com16 (Com16 x)) -> x 410 (Com32 (Com32 x)) -> x 411 (Com64 (Com64 x)) -> x 412 (Neg8 (Sub8 x y)) -> (Sub8 y x) 413 (Neg16 (Sub16 x y)) -> (Sub16 y x) 414 (Neg32 (Sub32 x y)) -> (Sub32 y x) 415 (Neg64 (Sub64 x y)) -> (Sub64 y x) 416 417 (Trunc64to8 (And64 (Const64 [y]) x)) && y&0xFF == 0xFF -> (Trunc64to8 x) 418 (Trunc64to16 (And64 (Const64 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc64to16 x) 419 (Trunc64to32 (And64 (Const64 [y]) x)) && y&0xFFFFFFFF == 0xFFFFFFFF -> (Trunc64to32 x) 420 (Trunc32to8 (And32 (Const32 [y]) x)) && y&0xFF == 0xFF -> (Trunc32to8 x) 421 (Trunc32to16 (And32 (Const32 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc32to16 x) 422 (Trunc16to8 (And16 (Const16 [y]) x)) && y&0xFF == 0xFF -> (Trunc16to8 x) 423 424 // Rewrite AND of consts as shifts if possible, slightly faster for 64 bit operands 425 // leading zeros can be shifted left, then right 426 (And64 <t> (Const64 [y]) x) && nlz(y) + nto(y) == 64 && nto(y) >= 32 -> (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 427 // trailing zeros can be shifted right, then left 428 (And64 <t> (Const64 [y]) x) && nlo(y) + ntz(y) == 64 && ntz(y) >= 32 -> (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 429 430 // simplifications often used for lengths. e.g. len(s[i:i+5])==5 431 (Sub64 (Add64 x y) x) -> y 432 (Sub64 (Add64 x y) y) -> x 433 (Sub32 (Add32 x y) x) -> y 434 (Sub32 (Add32 x y) y) -> x 435 (Sub16 (Add16 x y) x) -> y 436 (Sub16 (Add16 x y) y) -> x 437 (Sub8 (Add8 x y) x) -> y 438 (Sub8 (Add8 x y) y) -> x 439 440 // basic phi simplifications 441 (Phi (Const8 [c]) (Const8 [c])) -> (Const8 [c]) 442 (Phi (Const16 [c]) (Const16 [c])) -> (Const16 [c]) 443 (Phi (Const32 [c]) (Const32 [c])) -> (Const32 [c]) 444 (Phi (Const64 [c]) (Const64 [c])) -> (Const64 [c]) 445 446 // user nil checks 447 (NeqPtr p (ConstNil)) -> (IsNonNil p) 448 (NeqPtr (ConstNil) p) -> (IsNonNil p) 449 (EqPtr p (ConstNil)) -> (Not (IsNonNil p)) 450 (EqPtr (ConstNil) p) -> (Not (IsNonNil p)) 451 452 // slice and interface comparisons 453 // The frontend ensures that we can only compare against nil, 454 // so we need only compare the first word (interface type or slice ptr). 455 (EqInter x y) -> (EqPtr (ITab x) (ITab y)) 456 (NeqInter x y) -> (NeqPtr (ITab x) (ITab y)) 457 (EqSlice x y) -> (EqPtr (SlicePtr x) (SlicePtr y)) 458 (NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y)) 459 460 // Load of store of same address, with compatibly typed value and same size 461 (Load <t1> p1 (Store [w] p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() -> x 462 463 // Collapse OffPtr 464 (OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b]) 465 (OffPtr p [0]) && v.Type.Compare(p.Type) == CMPeq -> p 466 467 // indexing operations 468 // Note: bounds check has already been done 469 (ArrayIndex <t> [0] x:(Load ptr mem)) -> @x.Block (Load <t> ptr mem) 470 (PtrIndex <t> ptr idx) && config.PtrSize == 4 -> (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 471 (PtrIndex <t> ptr idx) && config.PtrSize == 8 -> (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 472 473 // struct operations 474 (StructSelect (StructMake1 x)) -> x 475 (StructSelect [0] (StructMake2 x _)) -> x 476 (StructSelect [1] (StructMake2 _ x)) -> x 477 (StructSelect [0] (StructMake3 x _ _)) -> x 478 (StructSelect [1] (StructMake3 _ x _)) -> x 479 (StructSelect [2] (StructMake3 _ _ x)) -> x 480 (StructSelect [0] (StructMake4 x _ _ _)) -> x 481 (StructSelect [1] (StructMake4 _ x _ _)) -> x 482 (StructSelect [2] (StructMake4 _ _ x _)) -> x 483 (StructSelect [3] (StructMake4 _ _ _ x)) -> x 484 485 (Load <t> _ _) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) -> 486 (StructMake0) 487 (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) -> 488 (StructMake1 489 (Load <t.FieldType(0)> ptr mem)) 490 (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) -> 491 (StructMake2 492 (Load <t.FieldType(0)> ptr mem) 493 (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 494 (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) -> 495 (StructMake3 496 (Load <t.FieldType(0)> ptr mem) 497 (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) 498 (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 499 (Load <t> ptr mem) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) -> 500 (StructMake4 501 (Load <t.FieldType(0)> ptr mem) 502 (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) 503 (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) 504 (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 505 506 (StructSelect [i] x:(Load <t> ptr mem)) && !config.fe.CanSSA(t) -> 507 @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 508 509 (Store _ (StructMake0) mem) -> mem 510 (Store dst (StructMake1 <t> f0) mem) -> 511 (Store [t.FieldType(0).Size()] dst f0 mem) 512 (Store dst (StructMake2 <t> f0 f1) mem) -> 513 (Store [t.FieldType(1).Size()] 514 (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) 515 f1 516 (Store [t.FieldType(0).Size()] dst f0 mem)) 517 (Store dst (StructMake3 <t> f0 f1 f2) mem) -> 518 (Store [t.FieldType(2).Size()] 519 (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) 520 f2 521 (Store [t.FieldType(1).Size()] 522 (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) 523 f1 524 (Store [t.FieldType(0).Size()] dst f0 mem))) 525 (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) -> 526 (Store [t.FieldType(3).Size()] 527 (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) 528 f3 529 (Store [t.FieldType(2).Size()] 530 (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) 531 f2 532 (Store [t.FieldType(1).Size()] 533 (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) 534 f1 535 (Store [t.FieldType(0).Size()] dst f0 mem)))) 536 537 // un-SSAable values use mem->mem copies 538 (Store [size] dst (Load <t> src mem) mem) && !config.fe.CanSSA(t) -> (Move [size] dst src mem) 539 (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) && !config.fe.CanSSA(t) -> (Move [size] dst src (VarDef {x} mem)) 540 541 // string ops 542 // Decomposing StringMake and lowering of StringPtr and StringLen 543 // happens in a later pass, dec, so that these operations are available 544 // to other passes for optimizations. 545 (StringPtr (StringMake (Const64 <t> [c]) _)) -> (Const64 <t> [c]) 546 (StringLen (StringMake _ (Const64 <t> [c]))) -> (Const64 <t> [c]) 547 (ConstString {s}) && config.PtrSize == 4 && s.(string) == "" -> 548 (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 549 (ConstString {s}) && config.PtrSize == 8 && s.(string) == "" -> 550 (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 551 (ConstString {s}) && config.PtrSize == 4 && s.(string) != "" -> 552 (StringMake 553 (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} 554 (SB)) 555 (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 556 (ConstString {s}) && config.PtrSize == 8 && s.(string) != "" -> 557 (StringMake 558 (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} 559 (SB)) 560 (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 561 562 // slice ops 563 // Only a few slice rules are provided here. See dec.rules for 564 // a more comprehensive set. 565 (SliceLen (SliceMake _ (Const64 <t> [c]) _)) -> (Const64 <t> [c]) 566 (SliceCap (SliceMake _ _ (Const64 <t> [c]))) -> (Const64 <t> [c]) 567 (SlicePtr (SliceMake (SlicePtr x) _ _)) -> (SlicePtr x) 568 (SliceLen (SliceMake _ (SliceLen x) _)) -> (SliceLen x) 569 (SliceCap (SliceMake _ _ (SliceCap x))) -> (SliceCap x) 570 (SliceCap (SliceMake _ _ (SliceLen x))) -> (SliceLen x) 571 (ConstSlice) && config.PtrSize == 4 -> 572 (SliceMake 573 (ConstNil <v.Type.ElemType().PtrTo()>) 574 (Const32 <config.fe.TypeInt()> [0]) 575 (Const32 <config.fe.TypeInt()> [0])) 576 (ConstSlice) && config.PtrSize == 8 -> 577 (SliceMake 578 (ConstNil <v.Type.ElemType().PtrTo()>) 579 (Const64 <config.fe.TypeInt()> [0]) 580 (Const64 <config.fe.TypeInt()> [0])) 581 582 // interface ops 583 (ConstInterface) -> 584 (IMake 585 (ConstNil <config.fe.TypeBytePtr()>) 586 (ConstNil <config.fe.TypeBytePtr()>)) 587 588 (Check (NilCheck (GetG _) _) next) -> (Plain nil next) 589 590 (If (Not cond) yes no) -> (If cond no yes) 591 (If (ConstBool [c]) yes no) && c == 1 -> (First nil yes no) 592 (If (ConstBool [c]) yes no) && c == 0 -> (First nil no yes) 593 594 // Get rid of Convert ops for pointer arithmetic on unsafe.Pointer. 595 (Convert (Add64 (Convert ptr mem) off) mem) -> (Add64 ptr off) 596 (Convert (Add64 off (Convert ptr mem)) mem) -> (Add64 ptr off) 597 (Convert (Convert ptr mem) mem) -> ptr 598 599 // Decompose compound argument values 600 (Arg {n} [off]) && v.Type.IsString() -> 601 (StringMake 602 (Arg <config.fe.TypeBytePtr()> {n} [off]) 603 (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 604 605 (Arg {n} [off]) && v.Type.IsSlice() -> 606 (SliceMake 607 (Arg <v.Type.ElemType().PtrTo()> {n} [off]) 608 (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) 609 (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 610 611 (Arg {n} [off]) && v.Type.IsInterface() -> 612 (IMake 613 (Arg <config.fe.TypeBytePtr()> {n} [off]) 614 (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 615 616 (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 16 -> 617 (ComplexMake 618 (Arg <config.fe.TypeFloat64()> {n} [off]) 619 (Arg <config.fe.TypeFloat64()> {n} [off+8])) 620 621 (Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 8 -> 622 (ComplexMake 623 (Arg <config.fe.TypeFloat32()> {n} [off]) 624 (Arg <config.fe.TypeFloat32()> {n} [off+4])) 625 626 (Arg <t>) && t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) -> 627 (StructMake0) 628 (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) -> 629 (StructMake1 630 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 631 (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) -> 632 (StructMake2 633 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) 634 (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 635 (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) -> 636 (StructMake3 637 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) 638 (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) 639 (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 640 (Arg <t> {n} [off]) && t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) -> 641 (StructMake4 642 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) 643 (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) 644 (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) 645 (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 646 647 // strength reduction of divide by a constant. 648 // Note: frontend does <=32 bits. We only need to do 64 bits here. 649 // TODO: Do them all here? 650 651 // Div/mod by 1. Currently handled by frontend. 652 //(Div64 n (Const64 [1])) -> n 653 //(Div64u n (Const64 [1])) -> n 654 //(Mod64 n (Const64 [1])) -> (Const64 [0]) 655 //(Mod64u n (Const64 [1])) -> (Const64 [0]) 656 657 // Unsigned divide by power of 2. 658 (Div64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (Rsh64Ux64 n (Const64 <t> [log2(c)])) 659 (Mod64u <t> n (Const64 [c])) && isPowerOfTwo(c) -> (And64 n (Const64 <t> [c-1])) 660 661 // Signed divide by power of 2. Currently handled by frontend. 662 // n / c = n >> log(c) if n >= 0 663 // = (n+c-1) >> log(c) if n < 0 664 // We conditionally add c-1 by adding n>>63>>(64-log(c)) (first shift signed, second shift unsigned). 665 //(Div64 <t> n (Const64 [c])) && isPowerOfTwo(c) -> 666 // (Rsh64x64 667 // (Add64 <t> 668 // n 669 // (Rsh64Ux64 <t> 670 // (Rsh64x64 <t> n (Const64 <t> [63])) 671 // (Const64 <t> [64-log2(c)]))) 672 // (Const64 <t> [log2(c)])) 673 674 // Unsigned divide, not a power of 2. Strength reduce to a multiply. 675 (Div64u <t> x (Const64 [c])) && umagic64ok(c) && !umagic64a(c) -> 676 (Rsh64Ux64 677 (Hmul64u <t> 678 (Const64 <t> [umagic64m(c)]) 679 x) 680 (Const64 <t> [umagic64s(c)])) 681 (Div64u <t> x (Const64 [c])) && umagic64ok(c) && umagic64a(c) -> 682 (Rsh64Ux64 683 (Avg64u <t> 684 (Hmul64u <t> 685 x 686 (Const64 <t> [umagic64m(c)])) 687 x) 688 (Const64 <t> [umagic64s(c)-1])) 689 690 // Signed divide, not a power of 2. Strength reduce to a multiply. 691 (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) > 0 -> 692 (Sub64 <t> 693 (Rsh64x64 <t> 694 (Hmul64 <t> 695 (Const64 <t> [smagic64m(c)]) 696 x) 697 (Const64 <t> [smagic64s(c)])) 698 (Rsh64x64 <t> 699 x 700 (Const64 <t> [63]))) 701 (Div64 <t> x (Const64 [c])) && c > 0 && smagic64ok(c) && smagic64m(c) < 0 -> 702 (Sub64 <t> 703 (Rsh64x64 <t> 704 (Add64 <t> 705 (Hmul64 <t> 706 (Const64 <t> [smagic64m(c)]) 707 x) 708 x) 709 (Const64 <t> [smagic64s(c)])) 710 (Rsh64x64 <t> 711 x 712 (Const64 <t> [63]))) 713 (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) > 0 -> 714 (Neg64 <t> 715 (Sub64 <t> 716 (Rsh64x64 <t> 717 (Hmul64 <t> 718 (Const64 <t> [smagic64m(c)]) 719 x) 720 (Const64 <t> [smagic64s(c)])) 721 (Rsh64x64 <t> 722 x 723 (Const64 <t> [63])))) 724 (Div64 <t> x (Const64 [c])) && c < 0 && smagic64ok(c) && smagic64m(c) < 0 -> 725 (Neg64 <t> 726 (Sub64 <t> 727 (Rsh64x64 <t> 728 (Add64 <t> 729 (Hmul64 <t> 730 (Const64 <t> [smagic64m(c)]) 731 x) 732 x) 733 (Const64 <t> [smagic64s(c)])) 734 (Rsh64x64 <t> 735 x 736 (Const64 <t> [63])))) 737 738 // A%B = A-(A/B*B). 739 // This implements % with two * and a bunch of ancillary ops. 740 // One of the * is free if the user's code also computes A/B. 741 (Mod64 <t> x (Const64 [c])) && x.Op != OpConst64 && smagic64ok(c) -> (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 742 (Mod64u <t> x (Const64 [c])) && x.Op != OpConst64 && umagic64ok(c) -> (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))