github.com/tardisgo/tardisgo@v0.0.0-20161119180838-e0dd9a7e46b5/haxe/peepholeopt.go (about) 1 // Copyright 2014 Elliott Stoneham and The TARDIS Go Authors 2 // Use of this source code is governed by an MIT-style 3 // license that can be found in the LICENSE file. 4 5 package haxe 6 7 import ( 8 "fmt" 9 "go/token" 10 "go/types" 11 "sort" 12 "strings" 13 14 "golang.org/x/tools/go/ssa" 15 ) 16 17 type phiEntry struct{ reg, val string } 18 19 // PeepholeOpt implements the optimisations spotted by pogo.peephole 20 func (l langType) PeepholeOpt(opt, register string, code []ssa.Instruction, errorInfo string) string { 21 ret := "" 22 switch opt { 23 case "pointerChain": 24 for _, cod := range code { 25 switch cod.(type) { 26 case *ssa.IndexAddr: 27 ret += fmt.Sprintf("// %s=%s\n", cod.(*ssa.IndexAddr).Name(), cod.String()) 28 case *ssa.FieldAddr: 29 ret += fmt.Sprintf("// %s=%s\n", cod.(*ssa.FieldAddr).Name(), cod.String()) 30 } 31 } 32 basePointer := "" 33 switch code[0].(type) { 34 case *ssa.IndexAddr: 35 basePointer = l.IndirectValue(code[0].(*ssa.IndexAddr).X, errorInfo) 36 case *ssa.FieldAddr: 37 basePointer = l.IndirectValue(code[0].(*ssa.FieldAddr).X, errorInfo) 38 default: 39 panic(fmt.Errorf("unexpected type %T", code[0])) 40 } 41 chainGang := "" 42 for c, cod := range code { 43 if c > 0 { 44 chainGang += "+" 45 } 46 switch cod.(type) { 47 case *ssa.IndexAddr: 48 idxString := wrapForceToUInt(l.IndirectValue(cod.(*ssa.IndexAddr).Index, errorInfo), 49 cod.(*ssa.IndexAddr).Index.(ssa.Value).Type().Underlying().(*types.Basic).Kind()) 50 ele := cod.(*ssa.IndexAddr).X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Array).Elem().Underlying() 51 chainGang += "(" + idxString + arrayOffsetCalc(ele) + ")" 52 case *ssa.FieldAddr: 53 off := fieldOffset(cod.(*ssa.FieldAddr).X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct), cod.(*ssa.FieldAddr).Field) 54 chainGang += fmt.Sprintf(`%d`, off) 55 } 56 } 57 if l.is1usePtr(code[len(code)-1]) { 58 ret += l.set1usePtr(code[len(code)-1].(ssa.Value), oneUsePtr{obj: basePointer + ".obj", off: chainGang + "+" + basePointer + ".off"}) 59 ret += "// virtual oneUsePtr " + register + "=" + l.hc.map1usePtr[code[len(code)-1].(ssa.Value)].obj + ":" + l.hc.map1usePtr[code[len(code)-1].(ssa.Value)].off 60 ret += " PEEPHOLE OPTIMIZATION pointerChain\n" 61 } else { 62 ret += register + "=" 63 if l.PogoComp().DebugFlag { 64 ret += "Pointer.check(" 65 } 66 ret += basePointer 67 if l.PogoComp().DebugFlag { 68 ret += ")" 69 } 70 ret += ".addr(" + chainGang 71 ret += "); // PEEPHOLE OPTIMIZATION pointerChain\n" 72 } 73 74 case "loadObject": 75 idx := "" 76 ret += fmt.Sprintf("// %s=%s\n", code[0].(*ssa.UnOp).Name(), code[0].String()) 77 for _, cod := range code[1:] { 78 switch cod.(type) { 79 case *ssa.Index: 80 ret += fmt.Sprintf("// %s=%s\n", cod.(*ssa.Index).Name(), cod.String()) 81 case *ssa.Field: 82 ret += fmt.Sprintf("// %s=%s\n", cod.(*ssa.Field).Name(), cod.String()) 83 } 84 } 85 for _, cod := range code[1:] { 86 switch cod.(type) { 87 case *ssa.Index: 88 idx = wrapForceToUInt(l.IndirectValue(cod.(*ssa.Index).Index, errorInfo), 89 cod.(*ssa.Index).Index.Type().Underlying().(*types.Basic).Kind()) 90 //if idx != "0" { 91 // ret += fmt.Sprintf(".addr(%s%s)", 92 // idx, 93 // arrayOffsetCalc(cod.(*ssa.Index).Type().Underlying())) 94 //} 95 case *ssa.Field: 96 fo := fieldOffset(cod.(*ssa.Field).X.Type().Underlying().(*types.Struct), cod.(*ssa.Field).Field) 97 idx = fmt.Sprintf("%d", fo) 98 //if idx != "0" { 99 // ret += fmt.Sprintf(".fieldAddr(%d)", fo) 100 //} 101 } 102 } 103 suffix := "" 104 switch code[len(code)-1].(type) { 105 case *ssa.Index: 106 suffix = loadStoreSuffix(code[len(code)-1].(*ssa.Index).Type().Underlying(), true) 107 //ret += fmt.Sprintf(".load%s); // PEEPHOLE OPTIMIZATION loadObject (Index)\n", 108 // loadStoreSuffix(code[len(code)-1].(*ssa.Index).Type().Underlying(), false)) 109 case *ssa.Field: 110 suffix = loadStoreSuffix(code[len(code)-1].(*ssa.Field).Type().Underlying(), true) 111 //ret += fmt.Sprintf(".load%s); // PEEPHOLE OPTIMIZATION loadObject (Field)\n", 112 // loadStoreSuffix(code[len(code)-1].(*ssa.Field).Type().Underlying(), false)) 113 } 114 if l.is1usePtr(code[0].(*ssa.UnOp).X) { 115 oup, found := l.hc.map1usePtr[code[0].(*ssa.UnOp).X] 116 if !found { 117 panic("unable to find virtual 1usePtr") 118 } 119 ret += register + "=" + oup.obj + ".get" + suffix + idx + "+" + oup.off + "); // PEEPHOLE OPTIMIZATION loadObject\n" 120 } else { 121 ptrString := l.IndirectValue(code[0].(*ssa.UnOp).X, errorInfo) 122 ret += fmt.Sprintf("%s=%s", register, ptrString) 123 ret += fmt.Sprintf(".obj.get%s%s+%s.off); // PEEPHOLE OPTIMIZATION loadObject\n", 124 suffix, idx, ptrString) 125 } 126 127 case "phiList": 128 //ret += "// PEEPHOLE OPTIMIZATION phiList\n" 129 //ret += l.PhiCode(true, 0, code, errorInfo) 130 default: 131 panic("Unhandled peephole optimization: " + opt) 132 } 133 return ret 134 } 135 136 func (l langType) PhiCode(allTargets bool, targetPhi int, code []ssa.Instruction, errorInfo string) string { 137 ret := "" 138 opts := make(map[int][]phiEntry) 139 for _, cod := range code { 140 _, isPhi := cod.(*ssa.Phi) 141 if isPhi { 142 operands := cod.(*ssa.Phi).Operands([]*ssa.Value{}) 143 phiEntries := make([]int, len(operands)) 144 valEntries := make([]string, len(operands)) 145 thisReg := cod.(*ssa.Phi).Name() 146 ret += "// " + thisReg + "=" + cod.String() + "\n" 147 for o := range operands { 148 phiEntries[o] = cod.(*ssa.Phi).Block().Preds[o].Index 149 if _, ok := opts[phiEntries[o]]; !ok { 150 opts[phiEntries[o]] = make([]phiEntry, 0) 151 } 152 valEntries[o] = l.IndirectValue(*operands[o], errorInfo) 153 if l.hc.fnCanOptMap[thisReg] || len(*(cod.(*ssa.Phi).Referrers())) == 0 { 154 thisReg = "" 155 } 156 opts[phiEntries[o]] = append(opts[phiEntries[o]], phiEntry{thisReg, valEntries[o]}) 157 } 158 } 159 } 160 if allTargets { 161 ret += "switch(_Phi) { \n" 162 } 163 idxs := make([]int, 0, len(opts)) 164 for phi := range opts { 165 idxs = append(idxs, phi) 166 } 167 sort.Ints(idxs) 168 for _, phi := range idxs { 169 if allTargets || phi == targetPhi { 170 opt := opts[phi] 171 if allTargets { 172 ret += fmt.Sprintf("\tcase %d:\n", phi) 173 } 174 crossover := false 175 for x1, ent1 := range opt { 176 for x2, ent2 := range opt { 177 if x1 != x2 { 178 if "_"+ent1.reg == ent2.val { 179 crossover = true 180 goto foundCrossover 181 } 182 } 183 } 184 } 185 foundCrossover: 186 if crossover { 187 for _, ent := range opt { 188 if ent.reg != "" { 189 ret += fmt.Sprintf("\t\tvar tmp_%s=%s;\n", ent.reg, ent.val) // need temp vars for a,b = b,a 190 } 191 } 192 } 193 for _, ent := range opt { 194 if ent.reg != "" { 195 rn := "_" + ent.reg 196 if l.hc.useRegisterArray { 197 rn = rn[:2] + "[" + rn[2:] + "]" 198 } 199 if crossover { 200 ret += fmt.Sprintf("\t\t%s=tmp_%s;\n", rn, ent.reg) 201 } else { 202 if rn != ent.val { 203 ret += fmt.Sprintf("\t\t%s=%s;\n", rn, ent.val) 204 } 205 } 206 } 207 } 208 } 209 } 210 if allTargets { 211 ret += "}\n" 212 } 213 return ret 214 } 215 216 func (l langType) CanInline(vi interface{}) bool { 217 //if l.PogoComp.DebugFlag { 218 // return false 219 //} 220 val, isVal := vi.(ssa.Value) 221 if !isVal { 222 return false 223 } 224 switch l.LangType(val.Type(), false, "CanInline()") { 225 case "Dynamic": // so a uintptr 226 return false // this can yeild un-expected results & mess up the type checking 227 } 228 var refs *[]ssa.Instruction 229 var thisBlock *ssa.BasicBlock 230 switch vi.(type) { 231 default: 232 return false 233 case *ssa.Convert: 234 /* this slows things down for cpp and does not speed things up for js 235 if l.LangType(val.Type(), false, "CanInline()") != 236 l.LangType(vi.(*ssa.Convert).X.Type(), false, "CanInline()") { 237 return false // can't have different Haxe types 238 } 239 */ 240 refs = vi.(*ssa.Convert).Referrers() 241 thisBlock = vi.(*ssa.Convert).Block() 242 case *ssa.BinOp: 243 refs = vi.(*ssa.BinOp).Referrers() 244 thisBlock = vi.(*ssa.BinOp).Block() 245 case *ssa.UnOp: 246 switch vi.(*ssa.UnOp).Op { 247 case token.ARROW, token.MUL: // NOTE token.MUL does not work because of a[4],a[5]=a[5],a[4] crossover 248 return false 249 } 250 refs = vi.(*ssa.UnOp).Referrers() 251 thisBlock = vi.(*ssa.UnOp).Block() 252 /* 253 case *ssa.IndexAddr: // NOTE optimising this instruction means it's pointer leaks, but it does give a speed-up 254 _, isSlice := vi.(*ssa.IndexAddr).X.Type().Underlying().(*types.Slice) 255 if !isSlice { 256 return false // only slices handled in the general in-line code, rather than pointerChain above 257 } 258 refs = vi.(*ssa.IndexAddr).Referrers() 259 thisBlock = vi.(*ssa.IndexAddr).Block() 260 */ 261 } 262 263 if thisBlock == nil { 264 return false 265 } 266 if len(*refs) != 1 { 267 return false 268 } 269 if (*refs)[0].Block() != thisBlock { 270 return false // consumer is not in the same block 271 } 272 if blockContainsBreaks((*refs)[0].Block(), vi.(ssa.Instruction), (*refs)[0]) { 273 return false 274 } 275 return true 276 } 277 278 func (l langType) inlineRegisterName(vi interface{}) string { 279 vp, okPtr := vi.(*ssa.Value) 280 if !okPtr { 281 v, ok := vi.(ssa.Value) 282 if !ok { 283 panic(fmt.Sprintf("inlineRegisterName not a pointer to a value, or a value; it is a %T", vi)) 284 } 285 vp = &v 286 } 287 nm := strings.TrimSpace(l.PogoComp().RegisterName(*vp)) 288 if l.CanInline(vi) { 289 code, found := l.PogoComp().InlineMap(nm) 290 if !found { 291 return nm 292 } 293 return code 294 } 295 return nm 296 } 297 298 func blockContainsBreaks(b *ssa.BasicBlock, producer, consumer ssa.Instruction) bool { 299 hadProducer := false 300 for ii, i := range b.Instrs { 301 _ = ii 302 if hadProducer { 303 switch i.(type) { 304 case *ssa.Call, *ssa.Select, *ssa.Send, *ssa.Defer, *ssa.RunDefers, *ssa.Return: 305 return true 306 case *ssa.UnOp: 307 if i.(*ssa.UnOp).Op == token.ARROW { 308 return true 309 } 310 } 311 if i == consumer { 312 //println("DEBUG consumer is ", ii) 313 return false // if there is no break before the var is consumed, then no problem 314 } 315 } else { 316 if i == producer { 317 //println("DEBUG producer is ", ii) 318 hadProducer = true 319 } 320 } 321 } 322 return false 323 } 324 325 type oneUsePtr struct { 326 obj, off, objOrig, offOrig string 327 varObj, varOff bool 328 } 329 330 func (l langType) reset1useMap() { 331 l.hc.map1usePtr = make(map[ssa.Value]oneUsePtr) 332 } 333 334 func (l langType) set1usePtr(v ssa.Value, oup oneUsePtr) string { 335 if l.hc.useRegisterArray { 336 l.hc.map1usePtr[v] = oneUsePtr{obj: oup.obj, off: oup.off} 337 return "" 338 } 339 nam := v.Name() 340 newObj := "" 341 newOff := "" 342 ret := "" 343 madeVarObj := false 344 madeVarOff := false 345 for _, eoup := range l.hc.map1usePtr { // TODO speed this up with another map or two 346 if oup.obj == eoup.objOrig && eoup.varObj { 347 newObj = eoup.obj 348 } 349 if oup.off == eoup.offOrig && eoup.varOff { 350 newOff = eoup.off 351 } 352 } 353 if newObj == "" { 354 ret += "var " + nam + "obj=" + oup.obj + ";\n" 355 newObj = nam + "obj" 356 madeVarObj = true 357 l.hc.tempVarList = append(l.hc.tempVarList, regToFree{nam + "obj", "Dynamic"}) 358 } 359 if newOff == "" { 360 ret += "var " + nam + "off=" + oup.off + ";\n" 361 newOff = nam + "off" 362 madeVarOff = true 363 } 364 l.hc.map1usePtr[v] = oneUsePtr{newObj, newOff, oup.obj, oup.off, madeVarObj, madeVarOff} 365 return ret 366 } 367 368 func (l langType) is1usePtr(v interface{}) bool { 369 var bl *ssa.BasicBlock 370 switch v.(type) { 371 case *ssa.FieldAddr: 372 bl = v.(*ssa.FieldAddr).Block() 373 case *ssa.IndexAddr: 374 bl = v.(*ssa.IndexAddr).Block() 375 default: 376 return false 377 } 378 val := v.(ssa.Value) 379 if l.hc.fnCanOptMap[val.Name()] { 380 switch val.Type().Underlying().(type) { 381 case *types.Pointer: 382 refs := val.Referrers() 383 for ll := range *refs { 384 if (*refs)[ll].Block() == bl { 385 for _, i := range l.hc.subFnInstrs { 386 if i == (*refs)[ll].(ssa.Instruction) { 387 goto foundInSubFn 388 } 389 } 390 return false 391 foundInSubFn: 392 switch (*refs)[ll].(type) { 393 case *ssa.Store: 394 if (*refs)[ll].(*ssa.Store).Addr == val { 395 return true 396 } 397 case *ssa.UnOp: 398 if (*refs)[ll].(*ssa.UnOp).Op.String() == "*" { 399 return true 400 } 401 default: // something we don't know how to handle! 402 return false 403 } 404 } 405 } 406 } 407 } 408 return false 409 }