github.com/ltltlt/go-source-code@v0.0.0-20190830023027-95be009773aa/cmd/compile/internal/ssa/regalloc_test.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 import ( 8 "cmd/compile/internal/types" 9 "cmd/internal/src" 10 "testing" 11 ) 12 13 func TestLiveControlOps(t *testing.T) { 14 c := testConfig(t) 15 f := c.Fun("entry", 16 Bloc("entry", 17 Valu("mem", OpInitMem, types.TypeMem, 0, nil), 18 Valu("x", OpAMD64MOVLconst, c.config.Types.Int8, 1, nil), 19 Valu("y", OpAMD64MOVLconst, c.config.Types.Int8, 2, nil), 20 Valu("a", OpAMD64TESTB, types.TypeFlags, 0, nil, "x", "y"), 21 Valu("b", OpAMD64TESTB, types.TypeFlags, 0, nil, "y", "x"), 22 Eq("a", "if", "exit"), 23 ), 24 Bloc("if", 25 Eq("b", "plain", "exit"), 26 ), 27 Bloc("plain", 28 Goto("exit"), 29 ), 30 Bloc("exit", 31 Exit("mem"), 32 ), 33 ) 34 flagalloc(f.f) 35 regalloc(f.f) 36 checkFunc(f.f) 37 } 38 39 // Test to make sure we don't push spills into loops. 40 // See issue #19595. 41 func TestSpillWithLoop(t *testing.T) { 42 c := testConfig(t) 43 f := c.Fun("entry", 44 Bloc("entry", 45 Valu("mem", OpInitMem, types.TypeMem, 0, nil), 46 Valu("ptr", OpArg, c.config.Types.Int64.PtrTo(), 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Int64)), 47 Valu("cond", OpArg, c.config.Types.Bool, 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Bool)), 48 Valu("ld", OpAMD64MOVQload, c.config.Types.Int64, 0, nil, "ptr", "mem"), // this value needs a spill 49 Goto("loop"), 50 ), 51 Bloc("loop", 52 Valu("memphi", OpPhi, types.TypeMem, 0, nil, "mem", "call"), 53 Valu("call", OpAMD64CALLstatic, types.TypeMem, 0, nil, "memphi"), 54 Valu("test", OpAMD64CMPBconst, types.TypeFlags, 0, nil, "cond"), 55 Eq("test", "next", "exit"), 56 ), 57 Bloc("next", 58 Goto("loop"), 59 ), 60 Bloc("exit", 61 Valu("store", OpAMD64MOVQstore, types.TypeMem, 0, nil, "ptr", "ld", "call"), 62 Exit("store"), 63 ), 64 ) 65 regalloc(f.f) 66 checkFunc(f.f) 67 for _, v := range f.blocks["loop"].Values { 68 if v.Op == OpStoreReg { 69 t.Errorf("spill inside loop %s", v.LongString()) 70 } 71 } 72 } 73 74 func TestSpillMove1(t *testing.T) { 75 c := testConfig(t) 76 f := c.Fun("entry", 77 Bloc("entry", 78 Valu("mem", OpInitMem, types.TypeMem, 0, nil), 79 Valu("x", OpArg, c.config.Types.Int64, 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Int64)), 80 Valu("p", OpArg, c.config.Types.Int64.PtrTo(), 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Int64.PtrTo())), 81 Valu("a", OpAMD64TESTQ, types.TypeFlags, 0, nil, "x", "x"), 82 Goto("loop1"), 83 ), 84 Bloc("loop1", 85 Valu("y", OpAMD64MULQ, c.config.Types.Int64, 0, nil, "x", "x"), 86 Eq("a", "loop2", "exit1"), 87 ), 88 Bloc("loop2", 89 Eq("a", "loop1", "exit2"), 90 ), 91 Bloc("exit1", 92 // store before call, y is available in a register 93 Valu("mem2", OpAMD64MOVQstore, types.TypeMem, 0, nil, "p", "y", "mem"), 94 Valu("mem3", OpAMD64CALLstatic, types.TypeMem, 0, nil, "mem2"), 95 Exit("mem3"), 96 ), 97 Bloc("exit2", 98 // store after call, y must be loaded from a spill location 99 Valu("mem4", OpAMD64CALLstatic, types.TypeMem, 0, nil, "mem"), 100 Valu("mem5", OpAMD64MOVQstore, types.TypeMem, 0, nil, "p", "y", "mem4"), 101 Exit("mem5"), 102 ), 103 ) 104 flagalloc(f.f) 105 regalloc(f.f) 106 checkFunc(f.f) 107 // Spill should be moved to exit2. 108 if numSpills(f.blocks["loop1"]) != 0 { 109 t.Errorf("spill present from loop1") 110 } 111 if numSpills(f.blocks["loop2"]) != 0 { 112 t.Errorf("spill present in loop2") 113 } 114 if numSpills(f.blocks["exit1"]) != 0 { 115 t.Errorf("spill present in exit1") 116 } 117 if numSpills(f.blocks["exit2"]) != 1 { 118 t.Errorf("spill missing in exit2") 119 } 120 121 } 122 123 func TestSpillMove2(t *testing.T) { 124 c := testConfig(t) 125 f := c.Fun("entry", 126 Bloc("entry", 127 Valu("mem", OpInitMem, types.TypeMem, 0, nil), 128 Valu("x", OpArg, c.config.Types.Int64, 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Int64)), 129 Valu("p", OpArg, c.config.Types.Int64.PtrTo(), 0, c.Frontend().Auto(src.NoXPos, c.config.Types.Int64.PtrTo())), 130 Valu("a", OpAMD64TESTQ, types.TypeFlags, 0, nil, "x", "x"), 131 Goto("loop1"), 132 ), 133 Bloc("loop1", 134 Valu("y", OpAMD64MULQ, c.config.Types.Int64, 0, nil, "x", "x"), 135 Eq("a", "loop2", "exit1"), 136 ), 137 Bloc("loop2", 138 Eq("a", "loop1", "exit2"), 139 ), 140 Bloc("exit1", 141 // store after call, y must be loaded from a spill location 142 Valu("mem2", OpAMD64CALLstatic, types.TypeMem, 0, nil, "mem"), 143 Valu("mem3", OpAMD64MOVQstore, types.TypeMem, 0, nil, "p", "y", "mem2"), 144 Exit("mem3"), 145 ), 146 Bloc("exit2", 147 // store after call, y must be loaded from a spill location 148 Valu("mem4", OpAMD64CALLstatic, types.TypeMem, 0, nil, "mem"), 149 Valu("mem5", OpAMD64MOVQstore, types.TypeMem, 0, nil, "p", "y", "mem4"), 150 Exit("mem5"), 151 ), 152 ) 153 flagalloc(f.f) 154 regalloc(f.f) 155 checkFunc(f.f) 156 // There should be a spill in loop1, and nowhere else. 157 // TODO: resurrect moving spills out of loops? We could put spills at the start of both exit1 and exit2. 158 if numSpills(f.blocks["loop1"]) != 1 { 159 t.Errorf("spill missing from loop1") 160 } 161 if numSpills(f.blocks["loop2"]) != 0 { 162 t.Errorf("spill present in loop2") 163 } 164 if numSpills(f.blocks["exit1"]) != 0 { 165 t.Errorf("spill present in exit1") 166 } 167 if numSpills(f.blocks["exit2"]) != 0 { 168 t.Errorf("spill present in exit2") 169 } 170 171 } 172 173 func numSpills(b *Block) int { 174 n := 0 175 for _, v := range b.Values { 176 if v.Op == OpStoreReg { 177 n++ 178 } 179 } 180 return n 181 }