github.com/4ad/go@v0.0.0-20161219182952-69a12818b605/src/cmd/compile/internal/gc/pgen.go (about) 1 // Copyright 2011 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/ssa" 9 "cmd/internal/obj" 10 "cmd/internal/sys" 11 "fmt" 12 "sort" 13 "strings" 14 ) 15 16 // "Portable" code generation. 17 18 var makefuncdatasym_nsym int 19 20 func makefuncdatasym(nameprefix string, funcdatakind int64) *Sym { 21 var nod Node 22 23 sym := LookupN(nameprefix, makefuncdatasym_nsym) 24 makefuncdatasym_nsym++ 25 pnod := newname(sym) 26 pnod.Class = PEXTERN 27 Nodconst(&nod, Types[TINT32], funcdatakind) 28 Thearch.Gins(obj.AFUNCDATA, &nod, pnod) 29 return sym 30 } 31 32 // gvardef inserts a VARDEF for n into the instruction stream. 33 // VARDEF is an annotation for the liveness analysis, marking a place 34 // where a complete initialization (definition) of a variable begins. 35 // Since the liveness analysis can see initialization of single-word 36 // variables quite easy, gvardef is usually only called for multi-word 37 // or 'fat' variables, those satisfying isfat(n->type). 38 // However, gvardef is also called when a non-fat variable is initialized 39 // via a block move; the only time this happens is when you have 40 // return f() 41 // for a function with multiple return values exactly matching the return 42 // types of the current function. 43 // 44 // A 'VARDEF x' annotation in the instruction stream tells the liveness 45 // analysis to behave as though the variable x is being initialized at that 46 // point in the instruction stream. The VARDEF must appear before the 47 // actual (multi-instruction) initialization, and it must also appear after 48 // any uses of the previous value, if any. For example, if compiling: 49 // 50 // x = x[1:] 51 // 52 // it is important to generate code like: 53 // 54 // base, len, cap = pieces of x[1:] 55 // VARDEF x 56 // x = {base, len, cap} 57 // 58 // If instead the generated code looked like: 59 // 60 // VARDEF x 61 // base, len, cap = pieces of x[1:] 62 // x = {base, len, cap} 63 // 64 // then the liveness analysis would decide the previous value of x was 65 // unnecessary even though it is about to be used by the x[1:] computation. 66 // Similarly, if the generated code looked like: 67 // 68 // base, len, cap = pieces of x[1:] 69 // x = {base, len, cap} 70 // VARDEF x 71 // 72 // then the liveness analysis will not preserve the new value of x, because 73 // the VARDEF appears to have "overwritten" it. 74 // 75 // VARDEF is a bit of a kludge to work around the fact that the instruction 76 // stream is working on single-word values but the liveness analysis 77 // wants to work on individual variables, which might be multi-word 78 // aggregates. It might make sense at some point to look into letting 79 // the liveness analysis work on single-word values as well, although 80 // there are complications around interface values, slices, and strings, 81 // all of which cannot be treated as individual words. 82 // 83 // VARKILL is the opposite of VARDEF: it marks a value as no longer needed, 84 // even if its address has been taken. That is, a VARKILL annotation asserts 85 // that its argument is certainly dead, for use when the liveness analysis 86 // would not otherwise be able to deduce that fact. 87 88 func gvardefx(n *Node, as obj.As) { 89 if n == nil { 90 Fatalf("gvardef nil") 91 } 92 if n.Op != ONAME { 93 Yyerror("gvardef %#v; %v", n.Op, n) 94 return 95 } 96 97 switch n.Class { 98 case PAUTO, PPARAM, PPARAMOUT: 99 if as == obj.AVARLIVE { 100 Thearch.Gins(as, n, nil) 101 } else { 102 Thearch.Gins(as, nil, n) 103 } 104 } 105 } 106 107 func Gvardef(n *Node) { 108 gvardefx(n, obj.AVARDEF) 109 } 110 111 func Gvarkill(n *Node) { 112 gvardefx(n, obj.AVARKILL) 113 } 114 115 func Gvarlive(n *Node) { 116 gvardefx(n, obj.AVARLIVE) 117 } 118 119 func removevardef(firstp *obj.Prog) { 120 for p := firstp; p != nil; p = p.Link { 121 for p.Link != nil && (p.Link.As == obj.AVARDEF || p.Link.As == obj.AVARKILL || p.Link.As == obj.AVARLIVE) { 122 p.Link = p.Link.Link 123 } 124 if p.To.Type == obj.TYPE_BRANCH { 125 for p.To.Val.(*obj.Prog) != nil && (p.To.Val.(*obj.Prog).As == obj.AVARDEF || p.To.Val.(*obj.Prog).As == obj.AVARKILL || p.To.Val.(*obj.Prog).As == obj.AVARLIVE) { 126 p.To.Val = p.To.Val.(*obj.Prog).Link 127 } 128 } 129 } 130 } 131 132 func emitptrargsmap() { 133 if Curfn.Func.Nname.Sym.Name == "_" { 134 return 135 } 136 sym := Lookup(fmt.Sprintf("%s.args_stackmap", Curfn.Func.Nname.Sym.Name)) 137 138 nptr := int(Curfn.Type.ArgWidth() / int64(Widthptr)) 139 bv := bvalloc(int32(nptr) * 2) 140 nbitmap := 1 141 if Curfn.Type.Results().NumFields() > 0 { 142 nbitmap = 2 143 } 144 off := duint32(sym, 0, uint32(nbitmap)) 145 off = duint32(sym, off, uint32(bv.n)) 146 var xoffset int64 147 if Curfn.Type.Recv() != nil { 148 xoffset = 0 149 onebitwalktype1(Curfn.Type.Recvs(), &xoffset, bv) 150 } 151 152 if Curfn.Type.Params().NumFields() > 0 { 153 xoffset = 0 154 onebitwalktype1(Curfn.Type.Params(), &xoffset, bv) 155 } 156 157 off = dbvec(sym, off, bv) 158 if Curfn.Type.Results().NumFields() > 0 { 159 xoffset = 0 160 onebitwalktype1(Curfn.Type.Results(), &xoffset, bv) 161 off = dbvec(sym, off, bv) 162 } 163 164 ggloblsym(sym, int32(off), obj.RODATA|obj.LOCAL) 165 } 166 167 // cmpstackvarlt reports whether the stack variable a sorts before b. 168 // 169 // Sort the list of stack variables. Autos after anything else, 170 // within autos, unused after used, within used, things with 171 // pointers first, zeroed things first, and then decreasing size. 172 // Because autos are laid out in decreasing addresses 173 // on the stack, pointers first, zeroed things first and decreasing size 174 // really means, in memory, things with pointers needing zeroing at 175 // the top of the stack and increasing in size. 176 // Non-autos sort on offset. 177 func cmpstackvarlt(a, b *Node) bool { 178 if (a.Class == PAUTO) != (b.Class == PAUTO) { 179 return b.Class == PAUTO 180 } 181 182 if a.Class != PAUTO { 183 return a.Xoffset < b.Xoffset 184 } 185 186 if a.Used != b.Used { 187 return a.Used 188 } 189 190 ap := haspointers(a.Type) 191 bp := haspointers(b.Type) 192 if ap != bp { 193 return ap 194 } 195 196 ap = a.Name.Needzero 197 bp = b.Name.Needzero 198 if ap != bp { 199 return ap 200 } 201 202 if a.Type.Width != b.Type.Width { 203 return a.Type.Width > b.Type.Width 204 } 205 206 return a.Sym.Name < b.Sym.Name 207 } 208 209 // byStackvar implements sort.Interface for []*Node using cmpstackvarlt. 210 type byStackVar []*Node 211 212 func (s byStackVar) Len() int { return len(s) } 213 func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) } 214 func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] } 215 216 // stkdelta records the stack offset delta for a node 217 // during the compaction of the stack frame to remove 218 // unused stack slots. 219 var stkdelta = map[*Node]int64{} 220 221 // TODO(lvd) find out where the PAUTO/OLITERAL nodes come from. 222 func allocauto(ptxt *obj.Prog) { 223 Stksize = 0 224 stkptrsize = 0 225 226 if len(Curfn.Func.Dcl) == 0 { 227 return 228 } 229 230 // Mark the PAUTO's unused. 231 for _, ln := range Curfn.Func.Dcl { 232 if ln.Class == PAUTO { 233 ln.Used = false 234 } 235 } 236 237 markautoused(ptxt) 238 239 sort.Sort(byStackVar(Curfn.Func.Dcl)) 240 241 // Unused autos are at the end, chop 'em off. 242 n := Curfn.Func.Dcl[0] 243 if n.Class == PAUTO && n.Op == ONAME && !n.Used { 244 // No locals used at all 245 Curfn.Func.Dcl = nil 246 247 fixautoused(ptxt) 248 return 249 } 250 251 for i := 1; i < len(Curfn.Func.Dcl); i++ { 252 n = Curfn.Func.Dcl[i] 253 if n.Class == PAUTO && n.Op == ONAME && !n.Used { 254 Curfn.Func.Dcl = Curfn.Func.Dcl[:i] 255 break 256 } 257 } 258 259 // Reassign stack offsets of the locals that are still there. 260 var w int64 261 for _, n := range Curfn.Func.Dcl { 262 if n.Class != PAUTO || n.Op != ONAME { 263 continue 264 } 265 266 dowidth(n.Type) 267 w = n.Type.Width 268 if w >= Thearch.MAXWIDTH || w < 0 { 269 Fatalf("bad width") 270 } 271 Stksize += w 272 Stksize = Rnd(Stksize, int64(n.Type.Align)) 273 if haspointers(n.Type) { 274 stkptrsize = Stksize 275 } 276 if Thearch.LinkArch.InFamily(sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X, sys.SPARC64) { 277 Stksize = Rnd(Stksize, int64(Widthptr)) 278 } 279 if Stksize >= 1<<31 { 280 setlineno(Curfn) 281 Yyerror("stack frame too large (>2GB)") 282 } 283 284 stkdelta[n] = -Stksize - n.Xoffset 285 } 286 287 Stksize = Rnd(Stksize, int64(Widthreg)) 288 stkptrsize = Rnd(stkptrsize, int64(Widthreg)) 289 290 fixautoused(ptxt) 291 292 // The debug information needs accurate offsets on the symbols. 293 for _, ln := range Curfn.Func.Dcl { 294 if ln.Class != PAUTO || ln.Op != ONAME { 295 continue 296 } 297 ln.Xoffset += stkdelta[ln] 298 delete(stkdelta, ln) 299 } 300 } 301 302 func Cgen_checknil(n *Node) { 303 if Disable_checknil != 0 { 304 return 305 } 306 307 // Ideally we wouldn't see any integer types here, but we do. 308 if n.Type == nil || (!n.Type.IsPtr() && !n.Type.IsInteger() && n.Type.Etype != TUNSAFEPTR) { 309 Dump("checknil", n) 310 Fatalf("bad checknil") 311 } 312 313 // Most architectures require that the address to be checked is 314 // in a register (it could be in memory). 315 needsReg := !Thearch.LinkArch.InFamily(sys.AMD64, sys.I386) 316 317 // Move the address to be checked into a register if necessary. 318 if (needsReg && n.Op != OREGISTER) || !n.Addable || n.Op == OLITERAL { 319 var reg Node 320 Regalloc(®, Types[Tptr], n) 321 Cgen(n, ®) 322 Thearch.Gins(obj.ACHECKNIL, ®, nil) 323 Regfree(®) 324 return 325 } 326 327 Thearch.Gins(obj.ACHECKNIL, n, nil) 328 } 329 330 func compile(fn *Node) { 331 if Newproc == nil { 332 Newproc = Sysfunc("newproc") 333 Deferproc = Sysfunc("deferproc") 334 Deferreturn = Sysfunc("deferreturn") 335 Panicindex = Sysfunc("panicindex") 336 panicslice = Sysfunc("panicslice") 337 panicdivide = Sysfunc("panicdivide") 338 throwreturn = Sysfunc("throwreturn") 339 growslice = Sysfunc("growslice") 340 writebarrierptr = Sysfunc("writebarrierptr") 341 typedmemmove = Sysfunc("typedmemmove") 342 panicdottype = Sysfunc("panicdottype") 343 } 344 345 defer func(lno int32) { 346 lineno = lno 347 }(setlineno(fn)) 348 349 Curfn = fn 350 dowidth(Curfn.Type) 351 352 if fn.Nbody.Len() == 0 { 353 if pure_go || strings.HasPrefix(fn.Func.Nname.Sym.Name, "init.") { 354 Yyerror("missing function body for %q", fn.Func.Nname.Sym.Name) 355 return 356 } 357 358 if Debug['A'] != 0 { 359 return 360 } 361 emitptrargsmap() 362 return 363 } 364 365 saveerrors() 366 367 // set up domain for labels 368 clearlabels() 369 370 if Curfn.Type.FuncType().Outnamed { 371 // add clearing of the output parameters 372 for _, t := range Curfn.Type.Results().Fields().Slice() { 373 if t.Nname != nil { 374 n := Nod(OAS, t.Nname, nil) 375 n = typecheck(n, Etop) 376 Curfn.Nbody.Set(append([]*Node{n}, Curfn.Nbody.Slice()...)) 377 } 378 } 379 } 380 381 order(Curfn) 382 if nerrors != 0 { 383 return 384 } 385 386 hasdefer = false 387 walk(Curfn) 388 if nerrors != 0 { 389 return 390 } 391 if instrumenting { 392 instrument(Curfn) 393 } 394 if nerrors != 0 { 395 return 396 } 397 398 // Build an SSA backend function. 399 var ssafn *ssa.Func 400 if shouldssa(Curfn) { 401 ssafn = buildssa(Curfn) 402 } 403 404 continpc = nil 405 breakpc = nil 406 407 pl := newplist() 408 pl.Name = Linksym(Curfn.Func.Nname.Sym) 409 410 setlineno(Curfn) 411 412 var nod1 Node 413 Nodconst(&nod1, Types[TINT32], 0) 414 nam := Curfn.Func.Nname 415 if isblank(nam) { 416 nam = nil 417 } 418 ptxt := Thearch.Gins(obj.ATEXT, nam, &nod1) 419 Afunclit(&ptxt.From, Curfn.Func.Nname) 420 ptxt.From3 = new(obj.Addr) 421 if fn.Func.Dupok { 422 ptxt.From3.Offset |= obj.DUPOK 423 } 424 if fn.Func.Wrapper { 425 ptxt.From3.Offset |= obj.WRAPPER 426 } 427 if fn.Func.Needctxt { 428 ptxt.From3.Offset |= obj.NEEDCTXT 429 } 430 if fn.Func.Pragma&Nosplit != 0 { 431 ptxt.From3.Offset |= obj.NOSPLIT 432 } 433 if fn.Func.ReflectMethod { 434 ptxt.From3.Offset |= obj.REFLECTMETHOD 435 } 436 if fn.Func.Pragma&Systemstack != 0 { 437 ptxt.From.Sym.Cfunc = true 438 } 439 440 // Clumsy but important. 441 // See test/recover.go for test cases and src/reflect/value.go 442 // for the actual functions being considered. 443 if myimportpath == "reflect" { 444 if Curfn.Func.Nname.Sym.Name == "callReflect" || Curfn.Func.Nname.Sym.Name == "callMethod" { 445 ptxt.From3.Offset |= obj.WRAPPER 446 } 447 } 448 449 ginit() 450 451 gcargs := makefuncdatasym("gcargs·", obj.FUNCDATA_ArgsPointerMaps) 452 gclocals := makefuncdatasym("gclocals·", obj.FUNCDATA_LocalsPointerMaps) 453 454 if obj.Fieldtrack_enabled != 0 && len(Curfn.Func.FieldTrack) > 0 { 455 trackSyms := make([]*Sym, 0, len(Curfn.Func.FieldTrack)) 456 for sym := range Curfn.Func.FieldTrack { 457 trackSyms = append(trackSyms, sym) 458 } 459 sort.Sort(symByName(trackSyms)) 460 for _, sym := range trackSyms { 461 gtrack(sym) 462 } 463 } 464 465 for _, n := range fn.Func.Dcl { 466 if n.Op != ONAME { // might be OTYPE or OLITERAL 467 continue 468 } 469 switch n.Class { 470 case PAUTO, PPARAM, PPARAMOUT: 471 Nodconst(&nod1, Types[TUINTPTR], n.Type.Width) 472 p := Thearch.Gins(obj.ATYPE, n, &nod1) 473 p.From.Gotype = Linksym(ngotype(n)) 474 } 475 } 476 477 if ssafn != nil { 478 genssa(ssafn, ptxt, gcargs, gclocals) 479 ssafn.Free() 480 } else { 481 genlegacy(ptxt, gcargs, gclocals) 482 } 483 } 484 485 type symByName []*Sym 486 487 func (a symByName) Len() int { return len(a) } 488 func (a symByName) Less(i, j int) bool { return a[i].Name < a[j].Name } 489 func (a symByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 490 491 // genlegacy compiles Curfn using the legacy non-SSA code generator. 492 func genlegacy(ptxt *obj.Prog, gcargs, gclocals *Sym) { 493 Genlist(Curfn.Func.Enter) 494 Genlist(Curfn.Nbody) 495 gclean() 496 checklabels() 497 if nerrors != 0 { 498 return 499 } 500 if Curfn.Func.Endlineno != 0 { 501 lineno = Curfn.Func.Endlineno 502 } 503 504 if Curfn.Type.Results().NumFields() != 0 { 505 Ginscall(throwreturn, 0) 506 } 507 508 ginit() 509 510 // TODO: Determine when the final cgen_ret can be omitted. Perhaps always? 511 cgen_ret(nil) 512 513 if hasdefer { 514 // deferreturn pretends to have one uintptr argument. 515 // Reserve space for it so stack scanner is happy. 516 if Maxarg < int64(Widthptr) { 517 Maxarg = int64(Widthptr) 518 } 519 } 520 521 gclean() 522 if nerrors != 0 { 523 return 524 } 525 526 Pc.As = obj.ARET // overwrite AEND 527 Pc.Lineno = lineno 528 529 fixjmp(ptxt) 530 if Debug['N'] == 0 || Debug['R'] != 0 || Debug['P'] != 0 { 531 regopt(ptxt) 532 nilopt(ptxt) 533 } 534 535 Thearch.Expandchecks(ptxt) 536 537 allocauto(ptxt) 538 539 setlineno(Curfn) 540 if Stksize+Maxarg > 1<<31 { 541 Yyerror("stack frame too large (>2GB)") 542 return 543 } 544 545 // Emit garbage collection symbols. 546 liveness(Curfn, ptxt, gcargs, gclocals) 547 548 Thearch.Defframe(ptxt) 549 550 if Debug['f'] != 0 { 551 frame(0) 552 } 553 554 // Remove leftover instrumentation from the instruction stream. 555 removevardef(ptxt) 556 }