github.com/mh-cbon/go@v0.0.0-20160603070303-9e112a3fe4c0/src/cmd/compile/internal/gc/pgen.go (about) 1 // Copyright 2011 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/ssa" 9 "cmd/internal/obj" 10 "cmd/internal/sys" 11 "fmt" 12 "sort" 13 "strings" 14 ) 15 16 // "Portable" code generation. 17 18 var makefuncdatasym_nsym int 19 20 func makefuncdatasym(nameprefix string, funcdatakind int64) *Sym { 21 var nod Node 22 23 sym := LookupN(nameprefix, makefuncdatasym_nsym) 24 makefuncdatasym_nsym++ 25 pnod := newname(sym) 26 pnod.Class = PEXTERN 27 Nodconst(&nod, Types[TINT32], funcdatakind) 28 Thearch.Gins(obj.AFUNCDATA, &nod, pnod) 29 return sym 30 } 31 32 // gvardef inserts a VARDEF for n into the instruction stream. 33 // VARDEF is an annotation for the liveness analysis, marking a place 34 // where a complete initialization (definition) of a variable begins. 35 // Since the liveness analysis can see initialization of single-word 36 // variables quite easy, gvardef is usually only called for multi-word 37 // or 'fat' variables, those satisfying isfat(n->type). 38 // However, gvardef is also called when a non-fat variable is initialized 39 // via a block move; the only time this happens is when you have 40 // return f() 41 // for a function with multiple return values exactly matching the return 42 // types of the current function. 43 // 44 // A 'VARDEF x' annotation in the instruction stream tells the liveness 45 // analysis to behave as though the variable x is being initialized at that 46 // point in the instruction stream. The VARDEF must appear before the 47 // actual (multi-instruction) initialization, and it must also appear after 48 // any uses of the previous value, if any. For example, if compiling: 49 // 50 // x = x[1:] 51 // 52 // it is important to generate code like: 53 // 54 // base, len, cap = pieces of x[1:] 55 // VARDEF x 56 // x = {base, len, cap} 57 // 58 // If instead the generated code looked like: 59 // 60 // VARDEF x 61 // base, len, cap = pieces of x[1:] 62 // x = {base, len, cap} 63 // 64 // then the liveness analysis would decide the previous value of x was 65 // unnecessary even though it is about to be used by the x[1:] computation. 66 // Similarly, if the generated code looked like: 67 // 68 // base, len, cap = pieces of x[1:] 69 // x = {base, len, cap} 70 // VARDEF x 71 // 72 // then the liveness analysis will not preserve the new value of x, because 73 // the VARDEF appears to have "overwritten" it. 74 // 75 // VARDEF is a bit of a kludge to work around the fact that the instruction 76 // stream is working on single-word values but the liveness analysis 77 // wants to work on individual variables, which might be multi-word 78 // aggregates. It might make sense at some point to look into letting 79 // the liveness analysis work on single-word values as well, although 80 // there are complications around interface values, slices, and strings, 81 // all of which cannot be treated as individual words. 82 // 83 // VARKILL is the opposite of VARDEF: it marks a value as no longer needed, 84 // even if its address has been taken. That is, a VARKILL annotation asserts 85 // that its argument is certainly dead, for use when the liveness analysis 86 // would not otherwise be able to deduce that fact. 87 88 func gvardefx(n *Node, as obj.As) { 89 if n == nil { 90 Fatalf("gvardef nil") 91 } 92 if n.Op != ONAME { 93 Yyerror("gvardef %#v; %v", n.Op, n) 94 return 95 } 96 97 switch n.Class { 98 case PAUTO, PPARAM, PPARAMOUT: 99 if as == obj.AVARLIVE { 100 Thearch.Gins(as, n, nil) 101 } else { 102 Thearch.Gins(as, nil, n) 103 } 104 } 105 } 106 107 func Gvardef(n *Node) { 108 gvardefx(n, obj.AVARDEF) 109 } 110 111 func Gvarkill(n *Node) { 112 gvardefx(n, obj.AVARKILL) 113 } 114 115 func Gvarlive(n *Node) { 116 gvardefx(n, obj.AVARLIVE) 117 } 118 119 func removevardef(firstp *obj.Prog) { 120 for p := firstp; p != nil; p = p.Link { 121 for p.Link != nil && (p.Link.As == obj.AVARDEF || p.Link.As == obj.AVARKILL || p.Link.As == obj.AVARLIVE) { 122 p.Link = p.Link.Link 123 } 124 if p.To.Type == obj.TYPE_BRANCH { 125 for p.To.Val.(*obj.Prog) != nil && (p.To.Val.(*obj.Prog).As == obj.AVARDEF || p.To.Val.(*obj.Prog).As == obj.AVARKILL || p.To.Val.(*obj.Prog).As == obj.AVARLIVE) { 126 p.To.Val = p.To.Val.(*obj.Prog).Link 127 } 128 } 129 } 130 } 131 132 func emitptrargsmap() { 133 if Curfn.Func.Nname.Sym.Name == "_" { 134 return 135 } 136 sym := Lookup(fmt.Sprintf("%s.args_stackmap", Curfn.Func.Nname.Sym.Name)) 137 138 nptr := int(Curfn.Type.ArgWidth() / int64(Widthptr)) 139 bv := bvalloc(int32(nptr) * 2) 140 nbitmap := 1 141 if Curfn.Type.Results().NumFields() > 0 { 142 nbitmap = 2 143 } 144 off := duint32(sym, 0, uint32(nbitmap)) 145 off = duint32(sym, off, uint32(bv.n)) 146 var xoffset int64 147 if Curfn.Type.Recv() != nil { 148 xoffset = 0 149 onebitwalktype1(Curfn.Type.Recvs(), &xoffset, bv) 150 } 151 152 if Curfn.Type.Params().NumFields() > 0 { 153 xoffset = 0 154 onebitwalktype1(Curfn.Type.Params(), &xoffset, bv) 155 } 156 157 for j := 0; int32(j) < bv.n; j += 32 { 158 off = duint32(sym, off, bv.b[j/32]) 159 } 160 if Curfn.Type.Results().NumFields() > 0 { 161 xoffset = 0 162 onebitwalktype1(Curfn.Type.Results(), &xoffset, bv) 163 for j := 0; int32(j) < bv.n; j += 32 { 164 off = duint32(sym, off, bv.b[j/32]) 165 } 166 } 167 168 ggloblsym(sym, int32(off), obj.RODATA|obj.LOCAL) 169 } 170 171 // cmpstackvarlt reports whether the stack variable a sorts before b. 172 // 173 // Sort the list of stack variables. Autos after anything else, 174 // within autos, unused after used, within used, things with 175 // pointers first, zeroed things first, and then decreasing size. 176 // Because autos are laid out in decreasing addresses 177 // on the stack, pointers first, zeroed things first and decreasing size 178 // really means, in memory, things with pointers needing zeroing at 179 // the top of the stack and increasing in size. 180 // Non-autos sort on offset. 181 func cmpstackvarlt(a, b *Node) bool { 182 if (a.Class == PAUTO) != (b.Class == PAUTO) { 183 return b.Class == PAUTO 184 } 185 186 if a.Class != PAUTO { 187 return a.Xoffset < b.Xoffset 188 } 189 190 if a.Used != b.Used { 191 return a.Used 192 } 193 194 ap := haspointers(a.Type) 195 bp := haspointers(b.Type) 196 if ap != bp { 197 return ap 198 } 199 200 ap = a.Name.Needzero 201 bp = b.Name.Needzero 202 if ap != bp { 203 return ap 204 } 205 206 if a.Type.Width != b.Type.Width { 207 return a.Type.Width > b.Type.Width 208 } 209 210 return a.Sym.Name < b.Sym.Name 211 } 212 213 // byStackvar implements sort.Interface for []*Node using cmpstackvarlt. 214 type byStackVar []*Node 215 216 func (s byStackVar) Len() int { return len(s) } 217 func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) } 218 func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] } 219 220 // stkdelta records the stack offset delta for a node 221 // during the compaction of the stack frame to remove 222 // unused stack slots. 223 var stkdelta = map[*Node]int64{} 224 225 // TODO(lvd) find out where the PAUTO/OLITERAL nodes come from. 226 func allocauto(ptxt *obj.Prog) { 227 Stksize = 0 228 stkptrsize = 0 229 230 if len(Curfn.Func.Dcl) == 0 { 231 return 232 } 233 234 // Mark the PAUTO's unused. 235 for _, ln := range Curfn.Func.Dcl { 236 if ln.Class == PAUTO { 237 ln.Used = false 238 } 239 } 240 241 markautoused(ptxt) 242 243 sort.Sort(byStackVar(Curfn.Func.Dcl)) 244 245 // Unused autos are at the end, chop 'em off. 246 n := Curfn.Func.Dcl[0] 247 if n.Class == PAUTO && n.Op == ONAME && !n.Used { 248 // No locals used at all 249 Curfn.Func.Dcl = nil 250 251 fixautoused(ptxt) 252 return 253 } 254 255 for i := 1; i < len(Curfn.Func.Dcl); i++ { 256 n = Curfn.Func.Dcl[i] 257 if n.Class == PAUTO && n.Op == ONAME && !n.Used { 258 Curfn.Func.Dcl = Curfn.Func.Dcl[:i] 259 break 260 } 261 } 262 263 // Reassign stack offsets of the locals that are still there. 264 var w int64 265 for _, n := range Curfn.Func.Dcl { 266 if n.Class != PAUTO || n.Op != ONAME { 267 continue 268 } 269 270 dowidth(n.Type) 271 w = n.Type.Width 272 if w >= Thearch.MAXWIDTH || w < 0 { 273 Fatalf("bad width") 274 } 275 Stksize += w 276 Stksize = Rnd(Stksize, int64(n.Type.Align)) 277 if haspointers(n.Type) { 278 stkptrsize = Stksize 279 } 280 if Thearch.LinkArch.InFamily(sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) { 281 Stksize = Rnd(Stksize, int64(Widthptr)) 282 } 283 if Stksize >= 1<<31 { 284 setlineno(Curfn) 285 Yyerror("stack frame too large (>2GB)") 286 } 287 288 stkdelta[n] = -Stksize - n.Xoffset 289 } 290 291 Stksize = Rnd(Stksize, int64(Widthreg)) 292 stkptrsize = Rnd(stkptrsize, int64(Widthreg)) 293 294 fixautoused(ptxt) 295 296 // The debug information needs accurate offsets on the symbols. 297 for _, ln := range Curfn.Func.Dcl { 298 if ln.Class != PAUTO || ln.Op != ONAME { 299 continue 300 } 301 ln.Xoffset += stkdelta[ln] 302 delete(stkdelta, ln) 303 } 304 } 305 306 func Cgen_checknil(n *Node) { 307 if Disable_checknil != 0 { 308 return 309 } 310 311 // Ideally we wouldn't see any integer types here, but we do. 312 if n.Type == nil || (!n.Type.IsPtr() && !n.Type.IsInteger() && n.Type.Etype != TUNSAFEPTR) { 313 Dump("checknil", n) 314 Fatalf("bad checknil") 315 } 316 317 // Most architectures require that the address to be checked is 318 // in a register (it could be in memory). 319 needsReg := !Thearch.LinkArch.InFamily(sys.AMD64, sys.I386) 320 321 // Move the address to be checked into a register if necessary. 322 if (needsReg && n.Op != OREGISTER) || !n.Addable || n.Op == OLITERAL { 323 var reg Node 324 Regalloc(®, Types[Tptr], n) 325 Cgen(n, ®) 326 Thearch.Gins(obj.ACHECKNIL, ®, nil) 327 Regfree(®) 328 return 329 } 330 331 Thearch.Gins(obj.ACHECKNIL, n, nil) 332 } 333 334 func compile(fn *Node) { 335 if Newproc == nil { 336 Newproc = Sysfunc("newproc") 337 Deferproc = Sysfunc("deferproc") 338 Deferreturn = Sysfunc("deferreturn") 339 Panicindex = Sysfunc("panicindex") 340 panicslice = Sysfunc("panicslice") 341 panicdivide = Sysfunc("panicdivide") 342 throwreturn = Sysfunc("throwreturn") 343 growslice = Sysfunc("growslice") 344 writebarrierptr = Sysfunc("writebarrierptr") 345 typedmemmove = Sysfunc("typedmemmove") 346 panicdottype = Sysfunc("panicdottype") 347 } 348 349 defer func(lno int32) { 350 lineno = lno 351 }(setlineno(fn)) 352 353 Curfn = fn 354 dowidth(Curfn.Type) 355 356 if fn.Nbody.Len() == 0 { 357 if pure_go || strings.HasPrefix(fn.Func.Nname.Sym.Name, "init.") { 358 Yyerror("missing function body for %q", fn.Func.Nname.Sym.Name) 359 return 360 } 361 362 if Debug['A'] != 0 { 363 return 364 } 365 emitptrargsmap() 366 return 367 } 368 369 saveerrors() 370 371 // set up domain for labels 372 clearlabels() 373 374 if Curfn.Type.FuncType().Outnamed { 375 // add clearing of the output parameters 376 for _, t := range Curfn.Type.Results().Fields().Slice() { 377 if t.Nname != nil { 378 n := Nod(OAS, t.Nname, nil) 379 n = typecheck(n, Etop) 380 Curfn.Nbody.Set(append([]*Node{n}, Curfn.Nbody.Slice()...)) 381 } 382 } 383 } 384 385 order(Curfn) 386 if nerrors != 0 { 387 return 388 } 389 390 hasdefer = false 391 walk(Curfn) 392 if nerrors != 0 { 393 return 394 } 395 if instrumenting { 396 instrument(Curfn) 397 } 398 if nerrors != 0 { 399 return 400 } 401 402 // Build an SSA backend function. 403 var ssafn *ssa.Func 404 if shouldssa(Curfn) { 405 ssafn = buildssa(Curfn) 406 } 407 408 continpc = nil 409 breakpc = nil 410 411 pl := newplist() 412 pl.Name = Linksym(Curfn.Func.Nname.Sym) 413 414 setlineno(Curfn) 415 416 var nod1 Node 417 Nodconst(&nod1, Types[TINT32], 0) 418 nam := Curfn.Func.Nname 419 if isblank(nam) { 420 nam = nil 421 } 422 ptxt := Thearch.Gins(obj.ATEXT, nam, &nod1) 423 Afunclit(&ptxt.From, Curfn.Func.Nname) 424 ptxt.From3 = new(obj.Addr) 425 if fn.Func.Dupok { 426 ptxt.From3.Offset |= obj.DUPOK 427 } 428 if fn.Func.Wrapper { 429 ptxt.From3.Offset |= obj.WRAPPER 430 } 431 if fn.Func.Needctxt { 432 ptxt.From3.Offset |= obj.NEEDCTXT 433 } 434 if fn.Func.Pragma&Nosplit != 0 { 435 ptxt.From3.Offset |= obj.NOSPLIT 436 } 437 if fn.Func.ReflectMethod { 438 ptxt.From3.Offset |= obj.REFLECTMETHOD 439 } 440 if fn.Func.Pragma&Systemstack != 0 { 441 ptxt.From.Sym.Cfunc = true 442 } 443 444 // Clumsy but important. 445 // See test/recover.go for test cases and src/reflect/value.go 446 // for the actual functions being considered. 447 if myimportpath == "reflect" { 448 if Curfn.Func.Nname.Sym.Name == "callReflect" || Curfn.Func.Nname.Sym.Name == "callMethod" { 449 ptxt.From3.Offset |= obj.WRAPPER 450 } 451 } 452 453 ginit() 454 455 gcargs := makefuncdatasym("gcargs·", obj.FUNCDATA_ArgsPointerMaps) 456 gclocals := makefuncdatasym("gclocals·", obj.FUNCDATA_LocalsPointerMaps) 457 458 if obj.Fieldtrack_enabled != 0 && len(Curfn.Func.FieldTrack) > 0 { 459 trackSyms := make([]*Sym, 0, len(Curfn.Func.FieldTrack)) 460 for sym := range Curfn.Func.FieldTrack { 461 trackSyms = append(trackSyms, sym) 462 } 463 sort.Sort(symByName(trackSyms)) 464 for _, sym := range trackSyms { 465 gtrack(sym) 466 } 467 } 468 469 for _, n := range fn.Func.Dcl { 470 if n.Op != ONAME { // might be OTYPE or OLITERAL 471 continue 472 } 473 switch n.Class { 474 case PAUTO, PPARAM, PPARAMOUT: 475 Nodconst(&nod1, Types[TUINTPTR], n.Type.Width) 476 p := Thearch.Gins(obj.ATYPE, n, &nod1) 477 p.From.Gotype = Linksym(ngotype(n)) 478 } 479 } 480 481 if ssafn != nil { 482 genssa(ssafn, ptxt, gcargs, gclocals) 483 ssafn.Free() 484 } else { 485 genlegacy(ptxt, gcargs, gclocals) 486 } 487 } 488 489 type symByName []*Sym 490 491 func (a symByName) Len() int { return len(a) } 492 func (a symByName) Less(i, j int) bool { return a[i].Name < a[j].Name } 493 func (a symByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 494 495 // genlegacy compiles Curfn using the legacy non-SSA code generator. 496 func genlegacy(ptxt *obj.Prog, gcargs, gclocals *Sym) { 497 Genlist(Curfn.Func.Enter) 498 Genlist(Curfn.Nbody) 499 gclean() 500 checklabels() 501 if nerrors != 0 { 502 return 503 } 504 if Curfn.Func.Endlineno != 0 { 505 lineno = Curfn.Func.Endlineno 506 } 507 508 if Curfn.Type.Results().NumFields() != 0 { 509 Ginscall(throwreturn, 0) 510 } 511 512 ginit() 513 514 // TODO: Determine when the final cgen_ret can be omitted. Perhaps always? 515 cgen_ret(nil) 516 517 if hasdefer { 518 // deferreturn pretends to have one uintptr argument. 519 // Reserve space for it so stack scanner is happy. 520 if Maxarg < int64(Widthptr) { 521 Maxarg = int64(Widthptr) 522 } 523 } 524 525 gclean() 526 if nerrors != 0 { 527 return 528 } 529 530 Pc.As = obj.ARET // overwrite AEND 531 Pc.Lineno = lineno 532 533 fixjmp(ptxt) 534 if Debug['N'] == 0 || Debug['R'] != 0 || Debug['P'] != 0 { 535 regopt(ptxt) 536 nilopt(ptxt) 537 } 538 539 Thearch.Expandchecks(ptxt) 540 541 allocauto(ptxt) 542 543 setlineno(Curfn) 544 if Stksize+Maxarg > 1<<31 { 545 Yyerror("stack frame too large (>2GB)") 546 return 547 } 548 549 // Emit garbage collection symbols. 550 liveness(Curfn, ptxt, gcargs, gclocals) 551 552 Thearch.Defframe(ptxt) 553 554 if Debug['f'] != 0 { 555 frame(0) 556 } 557 558 // Remove leftover instrumentation from the instruction stream. 559 removevardef(ptxt) 560 }