github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/ssa/gen/Wasm.rules (about) 1 // Copyright 2018 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Lowering arithmetic 6 (Add(64|32|16|8|Ptr) x y) -> (I64Add x y) 7 (Add(64|32)F x y) -> (F64Add x y) 8 9 (Sub(64|32|16|8|Ptr) x y) -> (I64Sub x y) 10 (Sub(64|32)F x y) -> (F64Sub x y) 11 12 (Mul(64|32|16|8) x y) -> (I64Mul x y) 13 (Mul(64|32)F x y) -> (F64Mul x y) 14 15 (Div64 x y) -> (I64DivS x y) 16 (Div64u x y) -> (I64DivU x y) 17 (Div32 x y) -> (I64DivS (SignExt32to64 x) (SignExt32to64 y)) 18 (Div32u x y) -> (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y)) 19 (Div16 x y) -> (I64DivS (SignExt16to64 x) (SignExt16to64 y)) 20 (Div16u x y) -> (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y)) 21 (Div8 x y) -> (I64DivS (SignExt8to64 x) (SignExt8to64 y)) 22 (Div8u x y) -> (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y)) 23 (Div(64|32)F x y) -> (F64Div x y) 24 25 (Mod64 x y) -> (I64RemS x y) 26 (Mod64u x y) -> (I64RemU x y) 27 (Mod32 x y) -> (I64RemS (SignExt32to64 x) (SignExt32to64 y)) 28 (Mod32u x y) -> (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y)) 29 (Mod16 x y) -> (I64RemS (SignExt16to64 x) (SignExt16to64 y)) 30 (Mod16u x y) -> (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y)) 31 (Mod8 x y) -> (I64RemS (SignExt8to64 x) (SignExt8to64 y)) 32 (Mod8u x y) -> (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y)) 33 34 (And(64|32|16|8|B) x y) -> (I64And x y) 35 36 (Or(64|32|16|8|B) x y) -> (I64Or x y) 37 38 (Xor(64|32|16|8) x y) -> (I64Xor x y) 39 40 (Neg(64|32|16|8) x) -> (I64Sub (I64Const [0]) x) 41 (Neg32F x) -> (F64Neg x) 42 (Neg64F x) -> (F64Neg x) 43 44 (Com(64|32|16|8) x) -> (I64Xor x (I64Const [-1])) 45 46 (Not x) -> (I64Eqz x) 47 48 // Lowering pointer arithmetic 49 (OffPtr [off] ptr) -> (I64AddConst [off] ptr) 50 51 // Lowering extension 52 // It is unnecessary to extend loads 53 (SignExt32to64 x:(I64Load32S _ _)) -> x 54 (SignExt16to(64|32) x:(I64Load16S _ _)) -> x 55 (SignExt8to(64|32|16) x:(I64Load8S _ _)) -> x 56 (ZeroExt32to64 x:(I64Load32U _ _)) -> x 57 (ZeroExt16to(64|32) x:(I64Load16U _ _)) -> x 58 (ZeroExt8to(64|32|16) x:(I64Load8U _ _)) -> x 59 (SignExt32to64 x) -> (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32])) 60 (SignExt16to(64|32) x) -> (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48])) 61 (SignExt8to(64|32|16) x) -> (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56])) 62 (ZeroExt32to64 x) -> (I64ShrU (I64Shl x (I64Const [32])) (I64Const [32])) 63 (ZeroExt16to(64|32) x) -> (I64ShrU (I64Shl x (I64Const [48])) (I64Const [48])) 64 (ZeroExt8to(64|32|16) x) -> (I64ShrU (I64Shl x (I64Const [56])) (I64Const [56])) 65 66 (Slicemask x) -> (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63])) 67 68 // Lowering truncation 69 // Because we ignore the high parts, truncates are just copies. 70 (Trunc64to(32|16|8) x) -> x 71 (Trunc32to(16|8) x) -> x 72 (Trunc16to8 x) -> x 73 74 // Lowering float <-> int 75 (Cvt32to32F x) -> (LoweredRound32F (F64ConvertSI64 (SignExt32to64 x))) 76 (Cvt32to64F x) -> (F64ConvertSI64 (SignExt32to64 x)) 77 (Cvt64to32F x) -> (LoweredRound32F (F64ConvertSI64 x)) 78 (Cvt64to64F x) -> (F64ConvertSI64 x) 79 (Cvt32Uto32F x) -> (LoweredRound32F (F64ConvertUI64 (ZeroExt32to64 x))) 80 (Cvt32Uto64F x) -> (F64ConvertUI64 (ZeroExt32to64 x)) 81 (Cvt64Uto32F x) -> (LoweredRound32F (F64ConvertUI64 x)) 82 (Cvt64Uto64F x) -> (F64ConvertUI64 x) 83 84 (Cvt32Fto32 x) -> (I64TruncSF64 x) 85 (Cvt32Fto64 x) -> (I64TruncSF64 x) 86 (Cvt64Fto32 x) -> (I64TruncSF64 x) 87 (Cvt64Fto64 x) -> (I64TruncSF64 x) 88 (Cvt32Fto32U x) -> (I64TruncUF64 x) 89 (Cvt32Fto64U x) -> (I64TruncUF64 x) 90 (Cvt64Fto32U x) -> (I64TruncUF64 x) 91 (Cvt64Fto64U x) -> (I64TruncUF64 x) 92 93 (Cvt32Fto64F x) -> x 94 (Cvt64Fto32F x) -> (LoweredRound32F x) 95 96 (Round32F x) -> (LoweredRound32F x) 97 (Round64F x) -> x 98 99 // Lowering shifts 100 // Unsigned shifts need to return 0 if shift amount is >= width of shifted value. 101 102 (Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64]))) 103 (Lsh64x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y)) 104 (Lsh64x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y)) 105 (Lsh64x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y)) 106 107 (Lsh32x64 x y) -> (Lsh64x64 x y) 108 (Lsh32x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y)) 109 (Lsh32x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y)) 110 (Lsh32x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y)) 111 112 (Lsh16x64 x y) -> (Lsh64x64 x y) 113 (Lsh16x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y)) 114 (Lsh16x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y)) 115 (Lsh16x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y)) 116 117 (Lsh8x64 x y) -> (Lsh64x64 x y) 118 (Lsh8x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y)) 119 (Lsh8x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y)) 120 (Lsh8x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y)) 121 122 (Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64]))) 123 (Rsh64Ux32 x y) -> (Rsh64Ux64 x (ZeroExt32to64 y)) 124 (Rsh64Ux16 x y) -> (Rsh64Ux64 x (ZeroExt16to64 y)) 125 (Rsh64Ux8 x y) -> (Rsh64Ux64 x (ZeroExt8to64 y)) 126 127 (Rsh32Ux64 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) y) 128 (Rsh32Ux32 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt32to64 y)) 129 (Rsh32Ux16 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt16to64 y)) 130 (Rsh32Ux8 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt8to64 y)) 131 132 (Rsh16Ux64 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) y) 133 (Rsh16Ux32 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt32to64 y)) 134 (Rsh16Ux16 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt16to64 y)) 135 (Rsh16Ux8 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt8to64 y)) 136 137 (Rsh8Ux64 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) y) 138 (Rsh8Ux32 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt32to64 y)) 139 (Rsh8Ux16 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt16to64 y)) 140 (Rsh8Ux8 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt8to64 y)) 141 142 // Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value. 143 // We implement this by setting the shift value to (width - 1) if the shift value is >= width. 144 145 (Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64])))) 146 (Rsh64x32 x y) -> (Rsh64x64 x (ZeroExt32to64 y)) 147 (Rsh64x16 x y) -> (Rsh64x64 x (ZeroExt16to64 y)) 148 (Rsh64x8 x y) -> (Rsh64x64 x (ZeroExt8to64 y)) 149 150 (Rsh32x64 x y) -> (Rsh64x64 (SignExt32to64 x) y) 151 (Rsh32x32 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt32to64 y)) 152 (Rsh32x16 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt16to64 y)) 153 (Rsh32x8 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt8to64 y)) 154 155 (Rsh16x64 x y) -> (Rsh64x64 (SignExt16to64 x) y) 156 (Rsh16x32 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt32to64 y)) 157 (Rsh16x16 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt16to64 y)) 158 (Rsh16x8 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt8to64 y)) 159 160 (Rsh8x64 x y) -> (Rsh64x64 (SignExt8to64 x) y) 161 (Rsh8x32 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt32to64 y)) 162 (Rsh8x16 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt16to64 y)) 163 (Rsh8x8 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt8to64 y)) 164 165 // Lowering comparisons 166 (Less64 x y) -> (I64LtS x y) 167 (Less32 x y) -> (I64LtS (SignExt32to64 x) (SignExt32to64 y)) 168 (Less16 x y) -> (I64LtS (SignExt16to64 x) (SignExt16to64 y)) 169 (Less8 x y) -> (I64LtS (SignExt8to64 x) (SignExt8to64 y)) 170 (Less64U x y) -> (I64LtU x y) 171 (Less32U x y) -> (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y)) 172 (Less16U x y) -> (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y)) 173 (Less8U x y) -> (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y)) 174 (Less64F x y) -> (F64Lt x y) 175 (Less32F x y) -> (F64Lt (LoweredRound32F x) (LoweredRound32F y)) 176 177 (Leq64 x y) -> (I64LeS x y) 178 (Leq32 x y) -> (I64LeS (SignExt32to64 x) (SignExt32to64 y)) 179 (Leq16 x y) -> (I64LeS (SignExt16to64 x) (SignExt16to64 y)) 180 (Leq8 x y) -> (I64LeS (SignExt8to64 x) (SignExt8to64 y)) 181 (Leq64U x y) -> (I64LeU x y) 182 (Leq32U x y) -> (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y)) 183 (Leq16U x y) -> (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y)) 184 (Leq8U x y) -> (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y)) 185 (Leq64F x y) -> (F64Le x y) 186 (Leq32F x y) -> (F64Le (LoweredRound32F x) (LoweredRound32F y)) 187 188 (Greater64 x y) -> (I64GtS x y) 189 (Greater32 x y) -> (I64GtS (SignExt32to64 x) (SignExt32to64 y)) 190 (Greater16 x y) -> (I64GtS (SignExt16to64 x) (SignExt16to64 y)) 191 (Greater8 x y) -> (I64GtS (SignExt8to64 x) (SignExt8to64 y)) 192 (Greater64U x y) -> (I64GtU x y) 193 (Greater32U x y) -> (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y)) 194 (Greater16U x y) -> (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y)) 195 (Greater8U x y) -> (I64GtU (ZeroExt8to64 x) (ZeroExt8to64 y)) 196 (Greater64F x y) -> (F64Gt x y) 197 (Greater32F x y) -> (F64Gt (LoweredRound32F x) (LoweredRound32F y)) 198 199 (Geq64 x y) -> (I64GeS x y) 200 (Geq32 x y) -> (I64GeS (SignExt32to64 x) (SignExt32to64 y)) 201 (Geq16 x y) -> (I64GeS (SignExt16to64 x) (SignExt16to64 y)) 202 (Geq8 x y) -> (I64GeS (SignExt8to64 x) (SignExt8to64 y)) 203 (Geq64U x y) -> (I64GeU x y) 204 (Geq32U x y) -> (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y)) 205 (Geq16U x y) -> (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y)) 206 (Geq8U x y) -> (I64GeU (ZeroExt8to64 x) (ZeroExt8to64 y)) 207 (Geq64F x y) -> (F64Ge x y) 208 (Geq32F x y) -> (F64Ge (LoweredRound32F x) (LoweredRound32F y)) 209 210 (Eq64 x y) -> (I64Eq x y) 211 (Eq32 x y) -> (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y)) 212 (Eq16 x y) -> (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y)) 213 (Eq8 x y) -> (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y)) 214 (EqB x y) -> (I64Eq x y) 215 (EqPtr x y) -> (I64Eq x y) 216 (Eq64F x y) -> (F64Eq x y) 217 (Eq32F x y) -> (F64Eq (LoweredRound32F x) (LoweredRound32F y)) 218 219 (Neq64 x y) -> (I64Ne x y) 220 (Neq32 x y) -> (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y)) 221 (Neq16 x y) -> (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y)) 222 (Neq8 x y) -> (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y)) 223 (NeqB x y) -> (I64Ne x y) 224 (NeqPtr x y) -> (I64Ne x y) 225 (Neq64F x y) -> (F64Ne x y) 226 (Neq32F x y) -> (F64Ne (LoweredRound32F x) (LoweredRound32F y)) 227 228 // Lowering loads 229 (Load <t> ptr mem) && is32BitFloat(t) -> (F32Load ptr mem) 230 (Load <t> ptr mem) && is64BitFloat(t) -> (F64Load ptr mem) 231 (Load <t> ptr mem) && t.Size() == 8 -> (I64Load ptr mem) 232 (Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() -> (I64Load32U ptr mem) 233 (Load <t> ptr mem) && t.Size() == 4 && t.IsSigned() -> (I64Load32S ptr mem) 234 (Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() -> (I64Load16U ptr mem) 235 (Load <t> ptr mem) && t.Size() == 2 && t.IsSigned() -> (I64Load16S ptr mem) 236 (Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() -> (I64Load8U ptr mem) 237 (Load <t> ptr mem) && t.Size() == 1 && t.IsSigned() -> (I64Load8S ptr mem) 238 239 // Lowering stores 240 (Store {t} ptr val mem) && is64BitFloat(t.(*types.Type)) -> (F64Store ptr val mem) 241 (Store {t} ptr val mem) && is32BitFloat(t.(*types.Type)) -> (F32Store ptr val mem) 242 (Store {t} ptr val mem) && t.(*types.Type).Size() == 8 -> (I64Store ptr val mem) 243 (Store {t} ptr val mem) && t.(*types.Type).Size() == 4 -> (I64Store32 ptr val mem) 244 (Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (I64Store16 ptr val mem) 245 (Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (I64Store8 ptr val mem) 246 247 // Lowering moves 248 (Move [0] _ _ mem) -> mem 249 (Move [1] dst src mem) -> (I64Store8 dst (I64Load8U src mem) mem) 250 (Move [2] dst src mem) -> (I64Store16 dst (I64Load16U src mem) mem) 251 (Move [4] dst src mem) -> (I64Store32 dst (I64Load32U src mem) mem) 252 (Move [8] dst src mem) -> (I64Store dst (I64Load src mem) mem) 253 (Move [16] dst src mem) -> 254 (I64Store [8] dst (I64Load [8] src mem) 255 (I64Store dst (I64Load src mem) mem)) 256 (Move [3] dst src mem) -> 257 (I64Store8 [2] dst (I64Load8U [2] src mem) 258 (I64Store16 dst (I64Load16U src mem) mem)) 259 (Move [5] dst src mem) -> 260 (I64Store8 [4] dst (I64Load8U [4] src mem) 261 (I64Store32 dst (I64Load32U src mem) mem)) 262 (Move [6] dst src mem) -> 263 (I64Store16 [4] dst (I64Load16U [4] src mem) 264 (I64Store32 dst (I64Load32U src mem) mem)) 265 (Move [7] dst src mem) -> 266 (I64Store32 [3] dst (I64Load32U [3] src mem) 267 (I64Store32 dst (I64Load32U src mem) mem)) 268 (Move [s] dst src mem) && s > 8 && s < 16 -> 269 (I64Store [s-8] dst (I64Load [s-8] src mem) 270 (I64Store dst (I64Load src mem) mem)) 271 272 // Adjust moves to be a multiple of 16 bytes. 273 (Move [s] dst src mem) 274 && s > 16 && s%16 != 0 && s%16 <= 8 -> 275 (Move [s-s%16] 276 (OffPtr <dst.Type> dst [s%16]) 277 (OffPtr <src.Type> src [s%16]) 278 (I64Store dst (I64Load src mem) mem)) 279 (Move [s] dst src mem) 280 && s > 16 && s%16 != 0 && s%16 > 8 -> 281 (Move [s-s%16] 282 (OffPtr <dst.Type> dst [s%16]) 283 (OffPtr <src.Type> src [s%16]) 284 (I64Store [8] dst (I64Load [8] src mem) 285 (I64Store dst (I64Load src mem) mem))) 286 287 // Large copying uses helper. 288 (Move [s] dst src mem) && s%8 == 0 -> 289 (LoweredMove [s/8] dst src mem) 290 291 // Lowering Zero instructions 292 (Zero [0] _ mem) -> mem 293 (Zero [1] destptr mem) -> (I64Store8 destptr (I64Const [0]) mem) 294 (Zero [2] destptr mem) -> (I64Store16 destptr (I64Const [0]) mem) 295 (Zero [4] destptr mem) -> (I64Store32 destptr (I64Const [0]) mem) 296 (Zero [8] destptr mem) -> (I64Store destptr (I64Const [0]) mem) 297 298 (Zero [3] destptr mem) -> 299 (I64Store8 [2] destptr (I64Const [0]) 300 (I64Store16 destptr (I64Const [0]) mem)) 301 (Zero [5] destptr mem) -> 302 (I64Store8 [4] destptr (I64Const [0]) 303 (I64Store32 destptr (I64Const [0]) mem)) 304 (Zero [6] destptr mem) -> 305 (I64Store16 [4] destptr (I64Const [0]) 306 (I64Store32 destptr (I64Const [0]) mem)) 307 (Zero [7] destptr mem) -> 308 (I64Store32 [3] destptr (I64Const [0]) 309 (I64Store32 destptr (I64Const [0]) mem)) 310 311 // Strip off any fractional word zeroing. 312 (Zero [s] destptr mem) && s%8 != 0 && s > 8 -> 313 (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) 314 (I64Store destptr (I64Const [0]) mem)) 315 316 // Zero small numbers of words directly. 317 (Zero [16] destptr mem) -> 318 (I64Store [8] destptr (I64Const [0]) 319 (I64Store destptr (I64Const [0]) mem)) 320 (Zero [24] destptr mem) -> 321 (I64Store [16] destptr (I64Const [0]) 322 (I64Store [8] destptr (I64Const [0]) 323 (I64Store destptr (I64Const [0]) mem))) 324 (Zero [32] destptr mem) -> 325 (I64Store [24] destptr (I64Const [0]) 326 (I64Store [16] destptr (I64Const [0]) 327 (I64Store [8] destptr (I64Const [0]) 328 (I64Store destptr (I64Const [0]) mem)))) 329 330 // Large zeroing uses helper. 331 (Zero [s] destptr mem) && s%8 == 0 && s > 32 -> 332 (LoweredZero [s/8] destptr mem) 333 334 // Lowering constants 335 (Const(64|32|16|8) [val]) -> (I64Const [val]) 336 (Const(64|32)F [val]) -> (F64Const [val]) 337 (ConstNil) -> (I64Const [0]) 338 (ConstBool [b]) -> (I64Const [b]) 339 340 // Lowering calls 341 (StaticCall [argwid] {target} mem) -> (LoweredStaticCall [argwid] {target} mem) 342 (ClosureCall [argwid] entry closure mem) -> (LoweredClosureCall [argwid] entry closure mem) 343 (InterCall [argwid] entry mem) -> (LoweredInterCall [argwid] entry mem) 344 345 // Miscellaneous 346 (Convert <t> x mem) -> (LoweredConvert <t> x mem) 347 (IsNonNil p) -> (I64Eqz (I64Eqz p)) 348 (IsInBounds idx len) -> (I64LtU idx len) 349 (IsSliceInBounds idx len) -> (I64LeU idx len) 350 (NilCheck ptr mem) -> (LoweredNilCheck ptr mem) 351 (GetClosurePtr) -> (LoweredGetClosurePtr) 352 (GetCallerPC) -> (LoweredGetCallerPC) 353 (GetCallerSP) -> (LoweredGetCallerSP) 354 (Addr {sym} base) -> (LoweredAddr {sym} base) 355 (LocalAddr {sym} base _) -> (LoweredAddr {sym} base) 356 357 // Write barrier. 358 (WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem) 359 360 // --- Optimizations --- 361 (I64Add (I64Const [x]) (I64Const [y])) -> (I64Const [x + y]) 362 (I64Mul (I64Const [x]) (I64Const [y])) -> (I64Const [x * y]) 363 (I64And (I64Const [x]) (I64Const [y])) -> (I64Const [x & y]) 364 (I64Or (I64Const [x]) (I64Const [y])) -> (I64Const [x | y]) 365 (I64Xor (I64Const [x]) (I64Const [y])) -> (I64Const [x ^ y]) 366 (F64Add (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) + auxTo64F(y))]) 367 (F64Mul (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) * auxTo64F(y))]) 368 (I64Eq (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [1]) 369 (I64Eq (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [0]) 370 (I64Ne (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [0]) 371 (I64Ne (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [1]) 372 373 (I64Shl (I64Const [x]) (I64Const [y])) -> (I64Const [x << uint64(y)]) 374 (I64ShrU (I64Const [x]) (I64Const [y])) -> (I64Const [int64(uint64(x) >> uint64(y))]) 375 (I64ShrS (I64Const [x]) (I64Const [y])) -> (I64Const [x >> uint64(y)]) 376 377 (I64Add (I64Const [x]) y) -> (I64Add y (I64Const [x])) 378 (I64Mul (I64Const [x]) y) -> (I64Mul y (I64Const [x])) 379 (I64And (I64Const [x]) y) -> (I64And y (I64Const [x])) 380 (I64Or (I64Const [x]) y) -> (I64Or y (I64Const [x])) 381 (I64Xor (I64Const [x]) y) -> (I64Xor y (I64Const [x])) 382 (F64Add (F64Const [x]) y) -> (F64Add y (F64Const [x])) 383 (F64Mul (F64Const [x]) y) -> (F64Mul y (F64Const [x])) 384 (I64Eq (I64Const [x]) y) -> (I64Eq y (I64Const [x])) 385 (I64Ne (I64Const [x]) y) -> (I64Ne y (I64Const [x])) 386 387 (I64Eq x (I64Const [0])) -> (I64Eqz x) 388 (I64Ne x (I64Const [0])) -> (I64Eqz (I64Eqz x)) 389 390 (I64Add x (I64Const [y])) -> (I64AddConst [y] x) 391 (I64AddConst [0] x) -> x 392 (I64Eqz (I64Eqz (I64Eqz x))) -> (I64Eqz x) 393 394 // folding offset into load/store 395 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem) 396 && isU32Bit(off+off2) -> 397 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem) 398 399 ((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem) 400 && isU32Bit(off+off2) -> 401 ((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem) 402 403 // folding offset into address 404 (I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+off2) -> 405 (LoweredAddr {sym} [off+off2] base)