github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/ssa/gen/Wasm.rules (about)

     1  // Copyright 2018 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Lowering arithmetic
     6  (Add(64|32|16|8|Ptr) x y) -> (I64Add x y)
     7  (Add(64|32)F x y) -> (F(64|32)Add x y)
     8  
     9  (Sub(64|32|16|8|Ptr) x y) -> (I64Sub x y)
    10  (Sub(64|32)F x y) -> (F(64|32)Sub x y)
    11  
    12  (Mul(64|32|16|8) x y) -> (I64Mul x y)
    13  (Mul(64|32)F x y) -> (F(64|32)Mul x y)
    14  
    15  (Div64  x y) -> (I64DivS x y)
    16  (Div64u x y) -> (I64DivU x y)
    17  (Div32  x y) -> (I64DivS (SignExt32to64 x) (SignExt32to64 y))
    18  (Div32u x y) -> (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
    19  (Div16  x y) -> (I64DivS (SignExt16to64 x) (SignExt16to64 y))
    20  (Div16u x y) -> (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
    21  (Div8   x y) -> (I64DivS (SignExt8to64 x) (SignExt8to64 y))
    22  (Div8u  x y) -> (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
    23  (Div(64|32)F x y) -> (F(64|32)Div x y)
    24  
    25  (Mod64  x y) -> (I64RemS x y)
    26  (Mod64u x y) -> (I64RemU x y)
    27  (Mod32  x y) -> (I64RemS (SignExt32to64 x) (SignExt32to64 y))
    28  (Mod32u x y) -> (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
    29  (Mod16  x y) -> (I64RemS (SignExt16to64 x) (SignExt16to64 y))
    30  (Mod16u x y) -> (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
    31  (Mod8   x y) -> (I64RemS (SignExt8to64  x) (SignExt8to64  y))
    32  (Mod8u  x y) -> (I64RemU (ZeroExt8to64  x) (ZeroExt8to64  y))
    33  
    34  (And(64|32|16|8|B) x y) -> (I64And x y)
    35  
    36  (Or(64|32|16|8|B) x y) -> (I64Or x y)
    37  
    38  (Xor(64|32|16|8) x y) -> (I64Xor x y)
    39  
    40  (Neg(64|32|16|8) x) -> (I64Sub (I64Const [0]) x)
    41  (Neg(64|32)F x) -> (F(64|32)Neg x)
    42  
    43  (Com(64|32|16|8) x) -> (I64Xor x (I64Const [-1]))
    44  
    45  (Not x) -> (I64Eqz x)
    46  
    47  // Lowering pointer arithmetic
    48  (OffPtr [off] ptr) -> (I64AddConst [off] ptr)
    49  
    50  // Lowering extension
    51  // It is unnecessary to extend loads
    52  (SignExt32to64        x:(I64Load32S _ _)) -> x
    53  (SignExt16to(64|32)   x:(I64Load16S _ _)) -> x
    54  (SignExt8to(64|32|16) x:(I64Load8S  _ _)) -> x
    55  (ZeroExt32to64        x:(I64Load32U _ _)) -> x
    56  (ZeroExt16to(64|32)   x:(I64Load16U _ _)) -> x
    57  (ZeroExt8to(64|32|16) x:(I64Load8U  _ _)) -> x
    58  (SignExt32to64        x) && objabi.GOWASM.SignExt -> (I64Extend32S x)
    59  (SignExt8to(64|32|16) x) && objabi.GOWASM.SignExt -> (I64Extend8S x)
    60  (SignExt16to(64|32)   x) && objabi.GOWASM.SignExt -> (I64Extend16S x)
    61  (SignExt32to64        x) -> (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32]))
    62  (SignExt16to(64|32)   x) -> (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
    63  (SignExt8to(64|32|16) x) -> (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
    64  (ZeroExt32to64        x) -> (I64And x (I64Const [0xffffffff]))
    65  (ZeroExt16to(64|32)   x) -> (I64And x (I64Const [0xffff]))
    66  (ZeroExt8to(64|32|16) x) -> (I64And x (I64Const [0xff]))
    67  
    68  (Slicemask x) -> (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
    69  
    70  // Lowering truncation
    71  // Because we ignore the high parts, truncates are just copies.
    72  (Trunc64to(32|16|8) x) -> x
    73  (Trunc32to(16|8)    x) -> x
    74  (Trunc16to8         x) -> x
    75  
    76  // Lowering float <-> int
    77  (Cvt32to(64|32)F x) -> (F(64|32)ConvertI64S (SignExt32to64 x))
    78  (Cvt64to(64|32)F x) -> (F(64|32)ConvertI64S x)
    79  (Cvt32Uto(64|32)F x) -> (F(64|32)ConvertI64U (ZeroExt32to64 x))
    80  (Cvt64Uto(64|32)F x) -> (F(64|32)ConvertI64U x)
    81  
    82  (Cvt32Fto32 x) -> (I64TruncSatF32S x)
    83  (Cvt32Fto64 x) -> (I64TruncSatF32S x)
    84  (Cvt64Fto32 x) -> (I64TruncSatF64S x)
    85  (Cvt64Fto64 x) -> (I64TruncSatF64S x)
    86  (Cvt32Fto32U x) -> (I64TruncSatF32U x)
    87  (Cvt32Fto64U x) -> (I64TruncSatF32U x)
    88  (Cvt64Fto32U x) -> (I64TruncSatF64U x)
    89  (Cvt64Fto64U x) -> (I64TruncSatF64U x)
    90  
    91  (Cvt32Fto64F x) -> (F64PromoteF32 x)
    92  (Cvt64Fto32F x) -> (F32DemoteF64 x)
    93  
    94  (Round32F x) -> x
    95  (Round64F x) -> x
    96  
    97  // Lowering shifts
    98  // Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
    99  
   100  (Lsh64x64 x y) && shiftIsBounded(v) -> (I64Shl x y)
   101  (Lsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64Shl x (I64Const [c]))
   102  (Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0])
   103  (Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
   104  (Lsh64x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
   105  
   106  (Lsh32x64 x y) -> (Lsh64x64 x y)
   107  (Lsh32x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
   108  
   109  (Lsh16x64 x y) -> (Lsh64x64 x y)
   110  (Lsh16x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
   111  
   112  (Lsh8x64  x y) -> (Lsh64x64 x y)
   113  (Lsh8x(32|16|8)  x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
   114  
   115  (Rsh64Ux64 x y) && shiftIsBounded(v) -> (I64ShrU x y)
   116  (Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrU x (I64Const [c]))
   117  (Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0])
   118  (Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
   119  (Rsh64Ux(32|16|8) x y) -> (Rsh64Ux64 x (ZeroExt(32|16|8)to64 y))
   120  
   121  (Rsh32Ux64 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) y)
   122  (Rsh32Ux(32|16|8) x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt(32|16|8)to64 y))
   123  
   124  (Rsh16Ux64 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) y)
   125  (Rsh16Ux(32|16|8) x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt(32|16|8)to64 y))
   126  
   127  (Rsh8Ux64  x y) -> (Rsh64Ux64 (ZeroExt8to64 x) y)
   128  (Rsh8Ux(32|16|8)  x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt(32|16|8)to64 y))
   129  
   130  // Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value.
   131  // We implement this by setting the shift value to (width - 1) if the shift value is >= width.
   132  
   133  (Rsh64x64 x y) && shiftIsBounded(v) -> (I64ShrS x y)
   134  (Rsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrS x (I64Const [c]))
   135  (Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64ShrS x (I64Const [63]))
   136  (Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
   137  (Rsh64x(32|16|8) x y) -> (Rsh64x64 x (ZeroExt(32|16|8)to64 y))
   138  
   139  (Rsh32x64 x y) -> (Rsh64x64 (SignExt32to64 x) y)
   140  (Rsh32x(32|16|8) x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt(32|16|8)to64 y))
   141  
   142  (Rsh16x64 x y) -> (Rsh64x64 (SignExt16to64 x) y)
   143  (Rsh16x(32|16|8) x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt(32|16|8)to64 y))
   144  
   145  (Rsh8x64 x y)  -> (Rsh64x64 (SignExt8to64 x) y)
   146  (Rsh8x(32|16|8) x y)  -> (Rsh64x64 (SignExt8to64 x) (ZeroExt(32|16|8)to64 y))
   147  
   148  // Lowering rotates
   149  (RotateLeft8 <t> x (I64Const [c])) -> (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7])))
   150  (RotateLeft16 <t> x (I64Const [c])) -> (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15])))
   151  (RotateLeft32 x y) -> (I32Rotl x y)
   152  (RotateLeft64 x y) -> (I64Rotl x y)
   153  
   154  // Lowering comparisons
   155  (Less64  x y) -> (I64LtS x y)
   156  (Less32  x y) -> (I64LtS (SignExt32to64 x) (SignExt32to64 y))
   157  (Less16  x y) -> (I64LtS (SignExt16to64 x) (SignExt16to64 y))
   158  (Less8   x y) -> (I64LtS (SignExt8to64  x) (SignExt8to64  y))
   159  (Less64U x y) -> (I64LtU x y)
   160  (Less32U x y) -> (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
   161  (Less16U x y) -> (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
   162  (Less8U  x y) -> (I64LtU (ZeroExt8to64  x) (ZeroExt8to64  y))
   163  (Less(64|32)F x y) -> (F(64|32)Lt x y)
   164  
   165  (Leq64  x y) -> (I64LeS x y)
   166  (Leq32  x y) -> (I64LeS (SignExt32to64 x) (SignExt32to64 y))
   167  (Leq16  x y) -> (I64LeS (SignExt16to64 x) (SignExt16to64 y))
   168  (Leq8   x y) -> (I64LeS (SignExt8to64  x) (SignExt8to64  y))
   169  (Leq64U x y) -> (I64LeU x y)
   170  (Leq32U x y) -> (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
   171  (Leq16U x y) -> (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
   172  (Leq8U  x y) -> (I64LeU (ZeroExt8to64  x) (ZeroExt8to64  y))
   173  (Leq(64|32)F x y) -> (F(64|32)Le x y)
   174  
   175  (Greater64  x y) -> (I64GtS x y)
   176  (Greater32  x y) -> (I64GtS (SignExt32to64 x) (SignExt32to64 y))
   177  (Greater16  x y) -> (I64GtS (SignExt16to64 x) (SignExt16to64 y))
   178  (Greater8   x y) -> (I64GtS (SignExt8to64  x) (SignExt8to64  y))
   179  (Greater64U x y) -> (I64GtU x y)
   180  (Greater32U x y) -> (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y))
   181  (Greater16U x y) -> (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y))
   182  (Greater8U  x y) -> (I64GtU (ZeroExt8to64  x) (ZeroExt8to64  y))
   183  (Greater(64|32)F x y) -> (F(64|32)Gt x y)
   184  
   185  (Geq64  x y) -> (I64GeS x y)
   186  (Geq32  x y) -> (I64GeS (SignExt32to64 x) (SignExt32to64 y))
   187  (Geq16  x y) -> (I64GeS (SignExt16to64 x) (SignExt16to64 y))
   188  (Geq8   x y) -> (I64GeS (SignExt8to64  x) (SignExt8to64  y))
   189  (Geq64U x y) -> (I64GeU x y)
   190  (Geq32U x y) -> (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y))
   191  (Geq16U x y) -> (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y))
   192  (Geq8U  x y) -> (I64GeU (ZeroExt8to64  x) (ZeroExt8to64  y))
   193  (Geq(64|32)F x y) -> (F(64|32)Ge x y)
   194  
   195  (Eq64  x y) -> (I64Eq x y)
   196  (Eq32  x y) -> (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
   197  (Eq16  x y) -> (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
   198  (Eq8   x y) -> (I64Eq (ZeroExt8to64  x) (ZeroExt8to64  y))
   199  (EqB   x y) -> (I64Eq x y)
   200  (EqPtr x y) -> (I64Eq x y)
   201  (Eq(64|32)F x y) -> (F(64|32)Eq x y)
   202  
   203  (Neq64  x y) -> (I64Ne x y)
   204  (Neq32  x y) -> (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
   205  (Neq16  x y) -> (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
   206  (Neq8   x y) -> (I64Ne (ZeroExt8to64  x) (ZeroExt8to64  y))
   207  (NeqB   x y) -> (I64Ne x y)
   208  (NeqPtr x y) -> (I64Ne x y)
   209  (Neq(64|32)F x y) -> (F(64|32)Ne x y)
   210  
   211  // Lowering loads
   212  (Load <t> ptr mem) && is32BitFloat(t) -> (F32Load ptr mem)
   213  (Load <t> ptr mem) && is64BitFloat(t) -> (F64Load ptr mem)
   214  (Load <t> ptr mem) && t.Size() == 8 -> (I64Load ptr mem)
   215  (Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() -> (I64Load32U ptr mem)
   216  (Load <t> ptr mem) && t.Size() == 4 &&  t.IsSigned() -> (I64Load32S ptr mem)
   217  (Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() -> (I64Load16U ptr mem)
   218  (Load <t> ptr mem) && t.Size() == 2 &&  t.IsSigned() -> (I64Load16S ptr mem)
   219  (Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() -> (I64Load8U ptr mem)
   220  (Load <t> ptr mem) && t.Size() == 1 &&  t.IsSigned() -> (I64Load8S ptr mem)
   221  
   222  // Lowering stores
   223  (Store {t} ptr val mem) && is64BitFloat(t.(*types.Type)) -> (F64Store ptr val mem)
   224  (Store {t} ptr val mem) && is32BitFloat(t.(*types.Type)) -> (F32Store ptr val mem)
   225  (Store {t} ptr val mem) && t.(*types.Type).Size() == 8 -> (I64Store ptr val mem)
   226  (Store {t} ptr val mem) && t.(*types.Type).Size() == 4 -> (I64Store32 ptr val mem)
   227  (Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (I64Store16 ptr val mem)
   228  (Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (I64Store8 ptr val mem)
   229  
   230  // Lowering moves
   231  (Move [0] _ _ mem) -> mem
   232  (Move [1] dst src mem) -> (I64Store8 dst (I64Load8U src mem) mem)
   233  (Move [2] dst src mem) -> (I64Store16 dst (I64Load16U src mem) mem)
   234  (Move [4] dst src mem) -> (I64Store32 dst (I64Load32U src mem) mem)
   235  (Move [8] dst src mem) -> (I64Store dst (I64Load src mem) mem)
   236  (Move [16] dst src mem) ->
   237  	(I64Store [8] dst (I64Load [8] src mem)
   238  		(I64Store dst (I64Load src mem) mem))
   239  (Move [3] dst src mem) ->
   240  	(I64Store8 [2] dst (I64Load8U [2] src mem)
   241  		(I64Store16 dst (I64Load16U src mem) mem))
   242  (Move [5] dst src mem) ->
   243  	(I64Store8 [4] dst (I64Load8U [4] src mem)
   244  		(I64Store32 dst (I64Load32U src mem) mem))
   245  (Move [6] dst src mem) ->
   246  	(I64Store16 [4] dst (I64Load16U [4] src mem)
   247  		(I64Store32 dst (I64Load32U src mem) mem))
   248  (Move [7] dst src mem) ->
   249  	(I64Store32 [3] dst (I64Load32U [3] src mem)
   250  		(I64Store32 dst (I64Load32U src mem) mem))
   251  (Move [s] dst src mem) && s > 8 && s < 16 ->
   252  	(I64Store [s-8] dst (I64Load [s-8] src mem)
   253  		(I64Store dst (I64Load src mem) mem))
   254  
   255  // Adjust moves to be a multiple of 16 bytes.
   256  (Move [s] dst src mem)
   257  	&& s > 16 && s%16 != 0 && s%16 <= 8 ->
   258  	(Move [s-s%16]
   259  		(OffPtr <dst.Type> dst [s%16])
   260  		(OffPtr <src.Type> src [s%16])
   261  		(I64Store dst (I64Load src mem) mem))
   262  (Move [s] dst src mem)
   263  	&& s > 16 && s%16 != 0 && s%16 > 8 ->
   264  	(Move [s-s%16]
   265  		(OffPtr <dst.Type> dst [s%16])
   266  		(OffPtr <src.Type> src [s%16])
   267  		(I64Store [8] dst (I64Load [8] src mem)
   268  			(I64Store dst (I64Load src mem) mem)))
   269  
   270  // Large copying uses helper.
   271  (Move [s] dst src mem) && s%8 == 0 ->
   272  	(LoweredMove [s/8] dst src mem)
   273  
   274  // Lowering Zero instructions
   275  (Zero [0] _ mem) -> mem
   276  (Zero [1] destptr mem) -> (I64Store8 destptr (I64Const [0]) mem)
   277  (Zero [2] destptr mem) -> (I64Store16 destptr (I64Const [0]) mem)
   278  (Zero [4] destptr mem) -> (I64Store32 destptr (I64Const [0]) mem)
   279  (Zero [8] destptr mem) -> (I64Store destptr (I64Const [0]) mem)
   280  
   281  (Zero [3] destptr mem) ->
   282  	(I64Store8 [2] destptr (I64Const [0])
   283  		(I64Store16 destptr (I64Const [0]) mem))
   284  (Zero [5] destptr mem) ->
   285  	(I64Store8 [4] destptr (I64Const [0])
   286  		(I64Store32 destptr (I64Const [0]) mem))
   287  (Zero [6] destptr mem) ->
   288  	(I64Store16 [4] destptr (I64Const [0])
   289  		(I64Store32 destptr (I64Const [0]) mem))
   290  (Zero [7] destptr mem) ->
   291  	(I64Store32 [3] destptr (I64Const [0])
   292  		(I64Store32 destptr (I64Const [0]) mem))
   293  
   294  // Strip off any fractional word zeroing.
   295  (Zero [s] destptr mem) && s%8 != 0 && s > 8 ->
   296  	(Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
   297  		(I64Store destptr (I64Const [0]) mem))
   298  
   299  // Zero small numbers of words directly.
   300  (Zero [16] destptr mem) ->
   301  	(I64Store [8] destptr (I64Const [0])
   302  		(I64Store destptr (I64Const [0]) mem))
   303  (Zero [24] destptr mem) ->
   304  	(I64Store [16] destptr (I64Const [0])
   305  		(I64Store [8] destptr (I64Const [0])
   306  			(I64Store destptr (I64Const [0]) mem)))
   307  (Zero [32] destptr mem) ->
   308  	(I64Store [24] destptr (I64Const [0])
   309  		(I64Store [16] destptr (I64Const [0])
   310  			(I64Store [8] destptr (I64Const [0])
   311  				(I64Store destptr (I64Const [0]) mem))))
   312  
   313  // Large zeroing uses helper.
   314  (Zero [s] destptr mem) && s%8 == 0 && s > 32 ->
   315  	(LoweredZero [s/8] destptr mem)
   316  
   317  // Lowering constants
   318  (Const(64|32|16|8) [val]) -> (I64Const [val])
   319  (Const(64|32)F [val]) -> (F(64|32)Const [val])
   320  (ConstNil) -> (I64Const [0])
   321  (ConstBool [b]) -> (I64Const [b])
   322  
   323  // Lowering calls
   324  (StaticCall [argwid] {target} mem) -> (LoweredStaticCall [argwid] {target} mem)
   325  (ClosureCall [argwid] entry closure mem) -> (LoweredClosureCall [argwid] entry closure mem)
   326  (InterCall [argwid] entry mem) -> (LoweredInterCall [argwid] entry mem)
   327  
   328  // Miscellaneous
   329  (Convert <t> x mem) -> (LoweredConvert <t> x mem)
   330  (IsNonNil p) -> (I64Eqz (I64Eqz p))
   331  (IsInBounds idx len) -> (I64LtU idx len)
   332  (IsSliceInBounds idx len) -> (I64LeU idx len)
   333  (NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
   334  (GetClosurePtr) -> (LoweredGetClosurePtr)
   335  (GetCallerPC) -> (LoweredGetCallerPC)
   336  (GetCallerSP) -> (LoweredGetCallerSP)
   337  (Addr {sym} base) -> (LoweredAddr {sym} base)
   338  (LocalAddr {sym} base _) -> (LoweredAddr {sym} base)
   339  
   340  // Write barrier.
   341  (WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem)
   342  
   343  // --- Intrinsics ---
   344  (Sqrt x) -> (F64Sqrt x)
   345  (Trunc x) -> (F64Trunc x)
   346  (Ceil x) -> (F64Ceil x)
   347  (Floor x) -> (F64Floor x)
   348  (RoundToEven x) -> (F64Nearest x)
   349  (Abs x) -> (F64Abs x)
   350  (Copysign x y) -> (F64Copysign x y)
   351  
   352  (Ctz64 x) -> (I64Ctz x)
   353  (Ctz32 x) -> (I64Ctz (I64Or x (I64Const [0x100000000])))
   354  (Ctz16 x) -> (I64Ctz (I64Or x (I64Const [0x10000])))
   355  (Ctz8  x) -> (I64Ctz (I64Or x (I64Const [0x100])))
   356  
   357  (Ctz(64|32|16|8)NonZero x) -> (I64Ctz x)
   358  
   359  (BitLen64 x) -> (I64Sub (I64Const [64]) (I64Clz x))
   360  
   361  (PopCount64 x) -> (I64Popcnt x)
   362  (PopCount32 x) -> (I64Popcnt (ZeroExt32to64 x))
   363  (PopCount16 x) -> (I64Popcnt (ZeroExt16to64 x))
   364  (PopCount8  x) -> (I64Popcnt (ZeroExt8to64  x))
   365  
   366  (CondSelect <t> x y cond) -> (Select <t> x y cond)
   367  
   368  // --- Optimizations ---
   369  (I64Add (I64Const [x]) (I64Const [y])) -> (I64Const [x + y])
   370  (I64Mul (I64Const [x]) (I64Const [y])) -> (I64Const [x * y])
   371  (I64And (I64Const [x]) (I64Const [y])) -> (I64Const [x & y])
   372  (I64Or  (I64Const [x]) (I64Const [y])) -> (I64Const [x | y])
   373  (I64Xor (I64Const [x]) (I64Const [y])) -> (I64Const [x ^ y])
   374  (F64Add (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) + auxTo64F(y))])
   375  (F64Mul (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) * auxTo64F(y))])
   376  (I64Eq  (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [1])
   377  (I64Eq  (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [0])
   378  (I64Ne  (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [0])
   379  (I64Ne  (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [1])
   380  
   381  (I64Shl (I64Const [x]) (I64Const [y])) -> (I64Const [x << uint64(y)])
   382  (I64ShrU (I64Const [x]) (I64Const [y])) -> (I64Const [int64(uint64(x) >> uint64(y))])
   383  (I64ShrS (I64Const [x]) (I64Const [y])) -> (I64Const [x >> uint64(y)])
   384  
   385  (I64Add (I64Const [x]) y) -> (I64Add y (I64Const [x]))
   386  (I64Mul (I64Const [x]) y) -> (I64Mul y (I64Const [x]))
   387  (I64And (I64Const [x]) y) -> (I64And y (I64Const [x]))
   388  (I64Or  (I64Const [x]) y) -> (I64Or  y (I64Const [x]))
   389  (I64Xor (I64Const [x]) y) -> (I64Xor y (I64Const [x]))
   390  (F64Add (F64Const [x]) y) -> (F64Add y (F64Const [x]))
   391  (F64Mul (F64Const [x]) y) -> (F64Mul y (F64Const [x]))
   392  (I64Eq  (I64Const [x]) y) -> (I64Eq y  (I64Const [x]))
   393  (I64Ne  (I64Const [x]) y) -> (I64Ne y  (I64Const [x]))
   394  
   395  (I64Eq x (I64Const [0])) -> (I64Eqz x)
   396  (I64Ne x (I64Const [0])) -> (I64Eqz (I64Eqz x))
   397  
   398  (I64Add x (I64Const [y])) -> (I64AddConst [y] x)
   399  (I64AddConst [0] x) -> x
   400  (I64Eqz (I64Eqz (I64Eqz x))) -> (I64Eqz x)
   401  
   402  // folding offset into load/store
   403  ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem)
   404  	&& isU32Bit(off+off2) ->
   405  	((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem)
   406  
   407  ((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem)
   408  	&& isU32Bit(off+off2) ->
   409  	((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem)
   410  
   411  // folding offset into address
   412  (I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+off2) ->
   413  	(LoweredAddr {sym} [off+off2] base)
   414  
   415  // transforming readonly globals into constants
   416  (I64Load [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read64(sym, off+off2, config.BigEndian))])
   417  (I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read32(sym, off+off2, config.BigEndian))])
   418  (I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read16(sym, off+off2, config.BigEndian))])
   419  (I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read8(sym, off+off2))])