github.com/tidwall/go@v0.0.0-20170415222209-6694a6888b7d/src/cmd/compile/internal/gc/align.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/types" 9 "sort" 10 ) 11 12 // machine size and rounding alignment is dictated around 13 // the size of a pointer, set in betypeinit (see ../amd64/galign.go). 14 var defercalc int 15 16 func Rnd(o int64, r int64) int64 { 17 if r < 1 || r > 8 || r&(r-1) != 0 { 18 Fatalf("rnd %d", r) 19 } 20 return (o + r - 1) &^ (r - 1) 21 } 22 23 // expandiface computes the method set for interface type t by 24 // expanding embedded interfaces. 25 func expandiface(t *types.Type) { 26 var fields []*types.Field 27 for _, m := range t.Methods().Slice() { 28 if m.Sym != nil { 29 fields = append(fields, m) 30 continue 31 } 32 33 if !m.Type.IsInterface() { 34 yyerrorl(asNode(m.Nname).Pos, "interface contains embedded non-interface %v", m.Type) 35 m.SetBroke(true) 36 t.SetBroke(true) 37 // Add to fields so that error messages 38 // include the broken embedded type when 39 // printing t. 40 // TODO(mdempsky): Revisit this. 41 fields = append(fields, m) 42 continue 43 } 44 45 // Embedded interface: duplicate all methods 46 // (including broken ones, if any) and add to t's 47 // method set. 48 for _, t1 := range m.Type.Fields().Slice() { 49 f := types.NewField() 50 f.Type = t1.Type 51 f.SetBroke(t1.Broke()) 52 f.Sym = t1.Sym 53 f.Nname = m.Nname // preserve embedding position 54 fields = append(fields, f) 55 } 56 } 57 sort.Sort(methcmp(fields)) 58 59 // Access fields directly to avoid recursively calling dowidth 60 // within Type.Fields(). 61 t.Extra.(*types.Interface).Fields.Set(fields) 62 } 63 64 func offmod(t *types.Type) { 65 o := int32(0) 66 for _, f := range t.Fields().Slice() { 67 f.Offset = int64(o) 68 o += int32(Widthptr) 69 if int64(o) >= thearch.MAXWIDTH { 70 yyerror("interface too large") 71 o = int32(Widthptr) 72 } 73 } 74 } 75 76 func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 { 77 starto := o 78 maxalign := int32(flag) 79 if maxalign < 1 { 80 maxalign = 1 81 } 82 lastzero := int64(0) 83 for _, f := range t.Fields().Slice() { 84 if f.Type == nil { 85 // broken field, just skip it so that other valid fields 86 // get a width. 87 continue 88 } 89 90 dowidth(f.Type) 91 if int32(f.Type.Align) > maxalign { 92 maxalign = int32(f.Type.Align) 93 } 94 if f.Type.Align > 0 { 95 o = Rnd(o, int64(f.Type.Align)) 96 } 97 f.Offset = o 98 if asNode(f.Nname) != nil { 99 // addrescapes has similar code to update these offsets. 100 // Usually addrescapes runs after widstruct, 101 // in which case we could drop this, 102 // but function closure functions are the exception. 103 // NOTE(rsc): This comment may be stale. 104 // It's possible the ordering has changed and this is 105 // now the common case. I'm not sure. 106 if asNode(f.Nname).Name.Param.Stackcopy != nil { 107 asNode(f.Nname).Name.Param.Stackcopy.Xoffset = o 108 asNode(f.Nname).Xoffset = 0 109 } else { 110 asNode(f.Nname).Xoffset = o 111 } 112 } 113 114 w := f.Type.Width 115 if w < 0 { 116 Fatalf("invalid width %d", f.Type.Width) 117 } 118 if w == 0 { 119 lastzero = o 120 } 121 o += w 122 maxwidth := thearch.MAXWIDTH 123 // On 32-bit systems, reflect tables impose an additional constraint 124 // that each field start offset must fit in 31 bits. 125 if maxwidth < 1<<32 { 126 maxwidth = 1<<31 - 1 127 } 128 if o >= maxwidth { 129 yyerror("type %L too large", errtype) 130 o = 8 // small but nonzero 131 } 132 } 133 134 // For nonzero-sized structs which end in a zero-sized thing, we add 135 // an extra byte of padding to the type. This padding ensures that 136 // taking the address of the zero-sized thing can't manufacture a 137 // pointer to the next object in the heap. See issue 9401. 138 if flag == 1 && o > starto && o == lastzero { 139 o++ 140 } 141 142 // final width is rounded 143 if flag != 0 { 144 o = Rnd(o, int64(maxalign)) 145 } 146 t.Align = uint8(maxalign) 147 148 // type width only includes back to first field's offset 149 t.Width = o - starto 150 151 return o 152 } 153 154 func dowidth(t *types.Type) { 155 if Widthptr == 0 { 156 Fatalf("dowidth without betypeinit") 157 } 158 159 if t == nil { 160 return 161 } 162 163 if t.Width == -2 { 164 if !t.Broke() { 165 t.SetBroke(true) 166 yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t) 167 } 168 169 t.Width = 0 170 return 171 } 172 173 if t.WidthCalculated() { 174 return 175 } 176 177 // break infinite recursion if the broken recursive type 178 // is referenced again 179 if t.Broke() && t.Width == 0 { 180 return 181 } 182 183 // defer checkwidth calls until after we're done 184 defercalc++ 185 186 lno := lineno 187 if asNode(t.Nod) != nil { 188 lineno = asNode(t.Nod).Pos 189 } 190 191 t.Width = -2 192 t.Align = 0 193 194 et := t.Etype 195 switch et { 196 case TFUNC, TCHAN, TMAP, TSTRING: 197 break 198 199 // simtype == 0 during bootstrap 200 default: 201 if simtype[t.Etype] != 0 { 202 et = simtype[t.Etype] 203 } 204 } 205 206 w := int64(0) 207 switch et { 208 default: 209 Fatalf("dowidth: unknown type: %v", t) 210 211 // compiler-specific stuff 212 case TINT8, TUINT8, TBOOL: 213 // bool is int8 214 w = 1 215 216 case TINT16, TUINT16: 217 w = 2 218 219 case TINT32, TUINT32, TFLOAT32: 220 w = 4 221 222 case TINT64, TUINT64, TFLOAT64: 223 w = 8 224 t.Align = uint8(Widthreg) 225 226 case TCOMPLEX64: 227 w = 8 228 t.Align = 4 229 230 case TCOMPLEX128: 231 w = 16 232 t.Align = uint8(Widthreg) 233 234 case TPTR32: 235 w = 4 236 checkwidth(t.Elem()) 237 238 case TPTR64: 239 w = 8 240 checkwidth(t.Elem()) 241 242 case TUNSAFEPTR: 243 w = int64(Widthptr) 244 245 case TINTER: // implemented as 2 pointers 246 w = 2 * int64(Widthptr) 247 t.Align = uint8(Widthptr) 248 expandiface(t) 249 250 case TCHAN: // implemented as pointer 251 w = int64(Widthptr) 252 253 checkwidth(t.Elem()) 254 255 // make fake type to check later to 256 // trigger channel argument check. 257 t1 := types.NewChanArgs(t) 258 checkwidth(t1) 259 260 case TCHANARGS: 261 t1 := t.ChanArgs() 262 dowidth(t1) // just in case 263 if t1.Elem().Width >= 1<<16 { 264 yyerror("channel element type too large (>64kB)") 265 } 266 w = 1 // anything will do 267 268 case TMAP: // implemented as pointer 269 w = int64(Widthptr) 270 checkwidth(t.Val()) 271 checkwidth(t.Key()) 272 273 case TFORW: // should have been filled in 274 if !t.Broke() { 275 yyerror("invalid recursive type %v", t) 276 } 277 w = 1 // anything will do 278 279 case TANY: 280 // dummy type; should be replaced before use. 281 Fatalf("dowidth any") 282 283 case TSTRING: 284 if sizeof_String == 0 { 285 Fatalf("early dowidth string") 286 } 287 w = int64(sizeof_String) 288 t.Align = uint8(Widthptr) 289 290 case TARRAY: 291 if t.Elem() == nil { 292 break 293 } 294 if t.IsDDDArray() { 295 if !t.Broke() { 296 yyerror("use of [...] array outside of array literal") 297 t.SetBroke(true) 298 } 299 break 300 } 301 302 dowidth(t.Elem()) 303 if t.Elem().Width != 0 { 304 cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width) 305 if uint64(t.NumElem()) > cap { 306 yyerror("type %L larger than address space", t) 307 } 308 } 309 w = t.NumElem() * t.Elem().Width 310 t.Align = t.Elem().Align 311 312 case TSLICE: 313 if t.Elem() == nil { 314 break 315 } 316 w = int64(sizeof_Array) 317 checkwidth(t.Elem()) 318 t.Align = uint8(Widthptr) 319 320 case TSTRUCT: 321 if t.IsFuncArgStruct() { 322 Fatalf("dowidth fn struct %v", t) 323 } 324 w = widstruct(t, t, 0, 1) 325 326 // make fake type to check later to 327 // trigger function argument computation. 328 case TFUNC: 329 t1 := types.NewFuncArgs(t) 330 checkwidth(t1) 331 w = int64(Widthptr) // width of func type is pointer 332 333 // function is 3 cated structures; 334 // compute their widths as side-effect. 335 case TFUNCARGS: 336 t1 := t.FuncArgs() 337 w = widstruct(t1, t1.Recvs(), 0, 0) 338 w = widstruct(t1, t1.Params(), w, Widthreg) 339 w = widstruct(t1, t1.Results(), w, Widthreg) 340 t1.Extra.(*types.Func).Argwid = w 341 if w%int64(Widthreg) != 0 { 342 Warn("bad type %v %d\n", t1, w) 343 } 344 t.Align = 1 345 } 346 347 if Widthptr == 4 && w != int64(int32(w)) { 348 yyerror("type %v too large", t) 349 } 350 351 t.Width = w 352 if t.Align == 0 { 353 if w > 8 || w&(w-1) != 0 || w == 0 { 354 Fatalf("invalid alignment for %v", t) 355 } 356 t.Align = uint8(w) 357 } 358 359 if t.Etype == TINTER { 360 // We defer calling these functions until after 361 // setting t.Width and t.Align so the recursive calls 362 // to dowidth within t.Fields() will succeed. 363 checkdupfields("method", t) 364 offmod(t) 365 } 366 367 lineno = lno 368 369 if defercalc == 1 { 370 resumecheckwidth() 371 } else { 372 defercalc-- 373 } 374 } 375 376 // when a type's width should be known, we call checkwidth 377 // to compute it. during a declaration like 378 // 379 // type T *struct { next T } 380 // 381 // it is necessary to defer the calculation of the struct width 382 // until after T has been initialized to be a pointer to that struct. 383 // similarly, during import processing structs may be used 384 // before their definition. in those situations, calling 385 // defercheckwidth() stops width calculations until 386 // resumecheckwidth() is called, at which point all the 387 // checkwidths that were deferred are executed. 388 // dowidth should only be called when the type's size 389 // is needed immediately. checkwidth makes sure the 390 // size is evaluated eventually. 391 392 var deferredTypeStack []*types.Type 393 394 func checkwidth(t *types.Type) { 395 if t == nil { 396 return 397 } 398 399 // function arg structs should not be checked 400 // outside of the enclosing function. 401 if t.IsFuncArgStruct() { 402 Fatalf("checkwidth %v", t) 403 } 404 405 if defercalc == 0 { 406 dowidth(t) 407 return 408 } 409 410 if t.Deferwidth() { 411 return 412 } 413 t.SetDeferwidth(true) 414 415 deferredTypeStack = append(deferredTypeStack, t) 416 } 417 418 func defercheckwidth() { 419 // we get out of sync on syntax errors, so don't be pedantic. 420 if defercalc != 0 && nerrors == 0 { 421 Fatalf("defercheckwidth") 422 } 423 defercalc = 1 424 } 425 426 func resumecheckwidth() { 427 if defercalc == 0 { 428 Fatalf("resumecheckwidth") 429 } 430 for len(deferredTypeStack) > 0 { 431 t := deferredTypeStack[len(deferredTypeStack)-1] 432 deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1] 433 t.SetDeferwidth(false) 434 dowidth(t) 435 } 436 437 defercalc = 0 438 }