github.com/goproxy0/go@v0.0.0-20171111080102-49cc0c489d2c/src/cmd/compile/internal/gc/align.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/types" 9 "sort" 10 ) 11 12 // sizeCalculationDisabled indicates whether it is safe 13 // to calculate Types' widths and alignments. See dowidth. 14 var sizeCalculationDisabled bool 15 16 // machine size and rounding alignment is dictated around 17 // the size of a pointer, set in betypeinit (see ../amd64/galign.go). 18 var defercalc int 19 20 func Rnd(o int64, r int64) int64 { 21 if r < 1 || r > 8 || r&(r-1) != 0 { 22 Fatalf("rnd %d", r) 23 } 24 return (o + r - 1) &^ (r - 1) 25 } 26 27 // expandiface computes the method set for interface type t by 28 // expanding embedded interfaces. 29 func expandiface(t *types.Type) { 30 var fields []*types.Field 31 for _, m := range t.Methods().Slice() { 32 if m.Sym != nil { 33 fields = append(fields, m) 34 checkwidth(m.Type) 35 continue 36 } 37 38 if !m.Type.IsInterface() { 39 yyerrorl(asNode(m.Nname).Pos, "interface contains embedded non-interface %v", m.Type) 40 m.SetBroke(true) 41 t.SetBroke(true) 42 // Add to fields so that error messages 43 // include the broken embedded type when 44 // printing t. 45 // TODO(mdempsky): Revisit this. 46 fields = append(fields, m) 47 continue 48 } 49 50 // Embedded interface: duplicate all methods 51 // (including broken ones, if any) and add to t's 52 // method set. 53 for _, t1 := range m.Type.Fields().Slice() { 54 f := types.NewField() 55 f.Type = t1.Type 56 f.SetBroke(t1.Broke()) 57 f.Sym = t1.Sym 58 f.Nname = m.Nname // preserve embedding position 59 fields = append(fields, f) 60 } 61 } 62 sort.Sort(methcmp(fields)) 63 64 // Access fields directly to avoid recursively calling dowidth 65 // within Type.Fields(). 66 t.Extra.(*types.Interface).Fields.Set(fields) 67 } 68 69 func offmod(t *types.Type) { 70 o := int32(0) 71 for _, f := range t.Fields().Slice() { 72 f.Offset = int64(o) 73 o += int32(Widthptr) 74 if int64(o) >= thearch.MAXWIDTH { 75 yyerror("interface too large") 76 o = int32(Widthptr) 77 } 78 } 79 } 80 81 func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 { 82 starto := o 83 maxalign := int32(flag) 84 if maxalign < 1 { 85 maxalign = 1 86 } 87 lastzero := int64(0) 88 for _, f := range t.Fields().Slice() { 89 if f.Type == nil { 90 // broken field, just skip it so that other valid fields 91 // get a width. 92 continue 93 } 94 95 dowidth(f.Type) 96 if int32(f.Type.Align) > maxalign { 97 maxalign = int32(f.Type.Align) 98 } 99 if f.Type.Align > 0 { 100 o = Rnd(o, int64(f.Type.Align)) 101 } 102 f.Offset = o 103 if asNode(f.Nname) != nil { 104 // addrescapes has similar code to update these offsets. 105 // Usually addrescapes runs after widstruct, 106 // in which case we could drop this, 107 // but function closure functions are the exception. 108 // NOTE(rsc): This comment may be stale. 109 // It's possible the ordering has changed and this is 110 // now the common case. I'm not sure. 111 if asNode(f.Nname).Name.Param.Stackcopy != nil { 112 asNode(f.Nname).Name.Param.Stackcopy.Xoffset = o 113 asNode(f.Nname).Xoffset = 0 114 } else { 115 asNode(f.Nname).Xoffset = o 116 } 117 } 118 119 w := f.Type.Width 120 if w < 0 { 121 Fatalf("invalid width %d", f.Type.Width) 122 } 123 if w == 0 { 124 lastzero = o 125 } 126 o += w 127 maxwidth := thearch.MAXWIDTH 128 // On 32-bit systems, reflect tables impose an additional constraint 129 // that each field start offset must fit in 31 bits. 130 if maxwidth < 1<<32 { 131 maxwidth = 1<<31 - 1 132 } 133 if o >= maxwidth { 134 yyerror("type %L too large", errtype) 135 o = 8 // small but nonzero 136 } 137 } 138 139 // For nonzero-sized structs which end in a zero-sized thing, we add 140 // an extra byte of padding to the type. This padding ensures that 141 // taking the address of the zero-sized thing can't manufacture a 142 // pointer to the next object in the heap. See issue 9401. 143 if flag == 1 && o > starto && o == lastzero { 144 o++ 145 } 146 147 // final width is rounded 148 if flag != 0 { 149 o = Rnd(o, int64(maxalign)) 150 } 151 t.Align = uint8(maxalign) 152 153 // type width only includes back to first field's offset 154 t.Width = o - starto 155 156 return o 157 } 158 159 // dowidth calculates and stores the size and alignment for t. 160 // If sizeCalculationDisabled is set, and the size/alignment 161 // have not already been calculated, it calls Fatal. 162 // This is used to prevent data races in the back end. 163 func dowidth(t *types.Type) { 164 if Widthptr == 0 { 165 Fatalf("dowidth without betypeinit") 166 } 167 168 if t == nil { 169 return 170 } 171 172 if t.Width == -2 { 173 if !t.Broke() { 174 t.SetBroke(true) 175 yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t) 176 } 177 178 t.Width = 0 179 t.Align = 1 180 return 181 } 182 183 if t.WidthCalculated() { 184 return 185 } 186 187 if sizeCalculationDisabled { 188 if t.Broke() { 189 // break infinite recursion from Fatal call below 190 return 191 } 192 t.SetBroke(true) 193 Fatalf("width not calculated: %v", t) 194 } 195 196 // break infinite recursion if the broken recursive type 197 // is referenced again 198 if t.Broke() && t.Width == 0 { 199 return 200 } 201 202 // defer checkwidth calls until after we're done 203 defercalc++ 204 205 lno := lineno 206 if asNode(t.Nod) != nil { 207 lineno = asNode(t.Nod).Pos 208 } 209 210 t.Width = -2 211 t.Align = 0 212 213 et := t.Etype 214 switch et { 215 case TFUNC, TCHAN, TMAP, TSTRING: 216 break 217 218 // simtype == 0 during bootstrap 219 default: 220 if simtype[t.Etype] != 0 { 221 et = simtype[t.Etype] 222 } 223 } 224 225 w := int64(0) 226 switch et { 227 default: 228 Fatalf("dowidth: unknown type: %v", t) 229 230 // compiler-specific stuff 231 case TINT8, TUINT8, TBOOL: 232 // bool is int8 233 w = 1 234 235 case TINT16, TUINT16: 236 w = 2 237 238 case TINT32, TUINT32, TFLOAT32: 239 w = 4 240 241 case TINT64, TUINT64, TFLOAT64: 242 w = 8 243 t.Align = uint8(Widthreg) 244 245 case TCOMPLEX64: 246 w = 8 247 t.Align = 4 248 249 case TCOMPLEX128: 250 w = 16 251 t.Align = uint8(Widthreg) 252 253 case TPTR32: 254 w = 4 255 checkwidth(t.Elem()) 256 257 case TPTR64: 258 w = 8 259 checkwidth(t.Elem()) 260 261 case TUNSAFEPTR: 262 w = int64(Widthptr) 263 264 case TINTER: // implemented as 2 pointers 265 w = 2 * int64(Widthptr) 266 t.Align = uint8(Widthptr) 267 expandiface(t) 268 269 case TCHAN: // implemented as pointer 270 w = int64(Widthptr) 271 272 checkwidth(t.Elem()) 273 274 // make fake type to check later to 275 // trigger channel argument check. 276 t1 := types.NewChanArgs(t) 277 checkwidth(t1) 278 279 case TCHANARGS: 280 t1 := t.ChanArgs() 281 dowidth(t1) // just in case 282 if t1.Elem().Width >= 1<<16 { 283 yyerror("channel element type too large (>64kB)") 284 } 285 w = 1 // anything will do 286 287 case TMAP: // implemented as pointer 288 w = int64(Widthptr) 289 checkwidth(t.Val()) 290 checkwidth(t.Key()) 291 292 case TFORW: // should have been filled in 293 if !t.Broke() { 294 t.SetBroke(true) 295 yyerror("invalid recursive type %v", t) 296 } 297 w = 1 // anything will do 298 299 case TANY: 300 // dummy type; should be replaced before use. 301 Fatalf("dowidth any") 302 303 case TSTRING: 304 if sizeof_String == 0 { 305 Fatalf("early dowidth string") 306 } 307 w = int64(sizeof_String) 308 t.Align = uint8(Widthptr) 309 310 case TARRAY: 311 if t.Elem() == nil { 312 break 313 } 314 if t.IsDDDArray() { 315 if !t.Broke() { 316 yyerror("use of [...] array outside of array literal") 317 t.SetBroke(true) 318 } 319 break 320 } 321 322 dowidth(t.Elem()) 323 if t.Elem().Width != 0 { 324 cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width) 325 if uint64(t.NumElem()) > cap { 326 yyerror("type %L larger than address space", t) 327 } 328 } 329 w = t.NumElem() * t.Elem().Width 330 t.Align = t.Elem().Align 331 332 case TSLICE: 333 if t.Elem() == nil { 334 break 335 } 336 w = int64(sizeof_Array) 337 checkwidth(t.Elem()) 338 t.Align = uint8(Widthptr) 339 340 case TSTRUCT: 341 if t.IsFuncArgStruct() { 342 Fatalf("dowidth fn struct %v", t) 343 } 344 w = widstruct(t, t, 0, 1) 345 346 // make fake type to check later to 347 // trigger function argument computation. 348 case TFUNC: 349 t1 := types.NewFuncArgs(t) 350 checkwidth(t1) 351 w = int64(Widthptr) // width of func type is pointer 352 353 // function is 3 cated structures; 354 // compute their widths as side-effect. 355 case TFUNCARGS: 356 t1 := t.FuncArgs() 357 w = widstruct(t1, t1.Recvs(), 0, 0) 358 w = widstruct(t1, t1.Params(), w, Widthreg) 359 w = widstruct(t1, t1.Results(), w, Widthreg) 360 t1.Extra.(*types.Func).Argwid = w 361 if w%int64(Widthreg) != 0 { 362 Warn("bad type %v %d\n", t1, w) 363 } 364 t.Align = 1 365 } 366 367 if Widthptr == 4 && w != int64(int32(w)) { 368 yyerror("type %v too large", t) 369 } 370 371 t.Width = w 372 if t.Align == 0 { 373 if w > 8 || w&(w-1) != 0 || w == 0 { 374 Fatalf("invalid alignment for %v", t) 375 } 376 t.Align = uint8(w) 377 } 378 379 if t.Etype == TINTER { 380 // We defer calling these functions until after 381 // setting t.Width and t.Align so the recursive calls 382 // to dowidth within t.Fields() will succeed. 383 checkdupfields("method", t) 384 offmod(t) 385 } 386 387 lineno = lno 388 389 if defercalc == 1 { 390 resumecheckwidth() 391 } else { 392 defercalc-- 393 } 394 } 395 396 // when a type's width should be known, we call checkwidth 397 // to compute it. during a declaration like 398 // 399 // type T *struct { next T } 400 // 401 // it is necessary to defer the calculation of the struct width 402 // until after T has been initialized to be a pointer to that struct. 403 // similarly, during import processing structs may be used 404 // before their definition. in those situations, calling 405 // defercheckwidth() stops width calculations until 406 // resumecheckwidth() is called, at which point all the 407 // checkwidths that were deferred are executed. 408 // dowidth should only be called when the type's size 409 // is needed immediately. checkwidth makes sure the 410 // size is evaluated eventually. 411 412 var deferredTypeStack []*types.Type 413 414 func checkwidth(t *types.Type) { 415 if t == nil { 416 return 417 } 418 419 // function arg structs should not be checked 420 // outside of the enclosing function. 421 if t.IsFuncArgStruct() { 422 Fatalf("checkwidth %v", t) 423 } 424 425 if defercalc == 0 { 426 dowidth(t) 427 return 428 } 429 430 if t.Deferwidth() { 431 return 432 } 433 t.SetDeferwidth(true) 434 435 deferredTypeStack = append(deferredTypeStack, t) 436 } 437 438 func defercheckwidth() { 439 // we get out of sync on syntax errors, so don't be pedantic. 440 if defercalc != 0 && nerrors == 0 { 441 Fatalf("defercheckwidth") 442 } 443 defercalc = 1 444 } 445 446 func resumecheckwidth() { 447 if defercalc == 0 { 448 Fatalf("resumecheckwidth") 449 } 450 for len(deferredTypeStack) > 0 { 451 t := deferredTypeStack[len(deferredTypeStack)-1] 452 deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1] 453 t.SetDeferwidth(false) 454 dowidth(t) 455 } 456 457 defercalc = 0 458 }