github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/gc/align.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"sort"
    10  )
    11  
    12  // sizeCalculationDisabled indicates whether it is safe
    13  // to calculate Types' widths and alignments. See dowidth.
    14  var sizeCalculationDisabled bool
    15  
    16  // machine size and rounding alignment is dictated around
    17  // the size of a pointer, set in betypeinit (see ../amd64/galign.go).
    18  var defercalc int
    19  
    20  func Rnd(o int64, r int64) int64 {
    21  	if r < 1 || r > 8 || r&(r-1) != 0 {
    22  		Fatalf("rnd %d", r)
    23  	}
    24  	return (o + r - 1) &^ (r - 1)
    25  }
    26  
    27  // expandiface computes the method set for interface type t by
    28  // expanding embedded interfaces.
    29  func expandiface(t *types.Type) {
    30  	var fields []*types.Field
    31  	for _, m := range t.Methods().Slice() {
    32  		if m.Sym != nil {
    33  			fields = append(fields, m)
    34  			continue
    35  		}
    36  
    37  		if !m.Type.IsInterface() {
    38  			yyerrorl(asNode(m.Nname).Pos, "interface contains embedded non-interface %v", m.Type)
    39  			m.SetBroke(true)
    40  			t.SetBroke(true)
    41  			// Add to fields so that error messages
    42  			// include the broken embedded type when
    43  			// printing t.
    44  			// TODO(mdempsky): Revisit this.
    45  			fields = append(fields, m)
    46  			continue
    47  		}
    48  
    49  		// Embedded interface: duplicate all methods
    50  		// (including broken ones, if any) and add to t's
    51  		// method set.
    52  		for _, t1 := range m.Type.Fields().Slice() {
    53  			f := types.NewField()
    54  			f.Type = t1.Type
    55  			f.SetBroke(t1.Broke())
    56  			f.Sym = t1.Sym
    57  			f.Nname = m.Nname // preserve embedding position
    58  			fields = append(fields, f)
    59  		}
    60  	}
    61  	sort.Sort(methcmp(fields))
    62  
    63  	// Access fields directly to avoid recursively calling dowidth
    64  	// within Type.Fields().
    65  	t.Extra.(*types.Interface).Fields.Set(fields)
    66  }
    67  
    68  func offmod(t *types.Type) {
    69  	o := int32(0)
    70  	for _, f := range t.Fields().Slice() {
    71  		f.Offset = int64(o)
    72  		o += int32(Widthptr)
    73  		if int64(o) >= thearch.MAXWIDTH {
    74  			yyerror("interface too large")
    75  			o = int32(Widthptr)
    76  		}
    77  	}
    78  }
    79  
    80  func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
    81  	starto := o
    82  	maxalign := int32(flag)
    83  	if maxalign < 1 {
    84  		maxalign = 1
    85  	}
    86  	lastzero := int64(0)
    87  	for _, f := range t.Fields().Slice() {
    88  		if f.Type == nil {
    89  			// broken field, just skip it so that other valid fields
    90  			// get a width.
    91  			continue
    92  		}
    93  
    94  		dowidth(f.Type)
    95  		if int32(f.Type.Align) > maxalign {
    96  			maxalign = int32(f.Type.Align)
    97  		}
    98  		if f.Type.Align > 0 {
    99  			o = Rnd(o, int64(f.Type.Align))
   100  		}
   101  		f.Offset = o
   102  		if asNode(f.Nname) != nil {
   103  			// addrescapes has similar code to update these offsets.
   104  			// Usually addrescapes runs after widstruct,
   105  			// in which case we could drop this,
   106  			// but function closure functions are the exception.
   107  			// NOTE(rsc): This comment may be stale.
   108  			// It's possible the ordering has changed and this is
   109  			// now the common case. I'm not sure.
   110  			if asNode(f.Nname).Name.Param.Stackcopy != nil {
   111  				asNode(f.Nname).Name.Param.Stackcopy.Xoffset = o
   112  				asNode(f.Nname).Xoffset = 0
   113  			} else {
   114  				asNode(f.Nname).Xoffset = o
   115  			}
   116  		}
   117  
   118  		w := f.Type.Width
   119  		if w < 0 {
   120  			Fatalf("invalid width %d", f.Type.Width)
   121  		}
   122  		if w == 0 {
   123  			lastzero = o
   124  		}
   125  		o += w
   126  		maxwidth := thearch.MAXWIDTH
   127  		// On 32-bit systems, reflect tables impose an additional constraint
   128  		// that each field start offset must fit in 31 bits.
   129  		if maxwidth < 1<<32 {
   130  			maxwidth = 1<<31 - 1
   131  		}
   132  		if o >= maxwidth {
   133  			yyerror("type %L too large", errtype)
   134  			o = 8 // small but nonzero
   135  		}
   136  	}
   137  
   138  	// For nonzero-sized structs which end in a zero-sized thing, we add
   139  	// an extra byte of padding to the type. This padding ensures that
   140  	// taking the address of the zero-sized thing can't manufacture a
   141  	// pointer to the next object in the heap. See issue 9401.
   142  	if flag == 1 && o > starto && o == lastzero {
   143  		o++
   144  	}
   145  
   146  	// final width is rounded
   147  	if flag != 0 {
   148  		o = Rnd(o, int64(maxalign))
   149  	}
   150  	t.Align = uint8(maxalign)
   151  
   152  	// type width only includes back to first field's offset
   153  	t.Width = o - starto
   154  
   155  	return o
   156  }
   157  
   158  // dowidth calculates and stores the size and alignment for t.
   159  // If sizeCalculationDisabled is set, and the size/alignment
   160  // have not already been calculated, it calls Fatal.
   161  // This is used to prevent data races in the back end.
   162  func dowidth(t *types.Type) {
   163  	if Widthptr == 0 {
   164  		Fatalf("dowidth without betypeinit")
   165  	}
   166  
   167  	if t == nil {
   168  		return
   169  	}
   170  
   171  	if t.Width == -2 {
   172  		if !t.Broke() {
   173  			t.SetBroke(true)
   174  			yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
   175  		}
   176  
   177  		t.Width = 0
   178  		return
   179  	}
   180  
   181  	if t.WidthCalculated() {
   182  		return
   183  	}
   184  
   185  	if sizeCalculationDisabled {
   186  		Fatalf("width not calculated: %v", t)
   187  	}
   188  
   189  	// break infinite recursion if the broken recursive type
   190  	// is referenced again
   191  	if t.Broke() && t.Width == 0 {
   192  		return
   193  	}
   194  
   195  	// defer checkwidth calls until after we're done
   196  	defercalc++
   197  
   198  	lno := lineno
   199  	if asNode(t.Nod) != nil {
   200  		lineno = asNode(t.Nod).Pos
   201  	}
   202  
   203  	t.Width = -2
   204  	t.Align = 0
   205  
   206  	et := t.Etype
   207  	switch et {
   208  	case TFUNC, TCHAN, TMAP, TSTRING:
   209  		break
   210  
   211  	// simtype == 0 during bootstrap
   212  	default:
   213  		if simtype[t.Etype] != 0 {
   214  			et = simtype[t.Etype]
   215  		}
   216  	}
   217  
   218  	w := int64(0)
   219  	switch et {
   220  	default:
   221  		Fatalf("dowidth: unknown type: %v", t)
   222  
   223  	// compiler-specific stuff
   224  	case TINT8, TUINT8, TBOOL:
   225  		// bool is int8
   226  		w = 1
   227  
   228  	case TINT16, TUINT16:
   229  		w = 2
   230  
   231  	case TINT32, TUINT32, TFLOAT32:
   232  		w = 4
   233  
   234  	case TINT64, TUINT64, TFLOAT64:
   235  		w = 8
   236  		t.Align = uint8(Widthreg)
   237  
   238  	case TCOMPLEX64:
   239  		w = 8
   240  		t.Align = 4
   241  
   242  	case TCOMPLEX128:
   243  		w = 16
   244  		t.Align = uint8(Widthreg)
   245  
   246  	case TPTR32:
   247  		w = 4
   248  		checkwidth(t.Elem())
   249  
   250  	case TPTR64:
   251  		w = 8
   252  		checkwidth(t.Elem())
   253  
   254  	case TUNSAFEPTR:
   255  		w = int64(Widthptr)
   256  
   257  	case TINTER: // implemented as 2 pointers
   258  		w = 2 * int64(Widthptr)
   259  		t.Align = uint8(Widthptr)
   260  		expandiface(t)
   261  
   262  	case TCHAN: // implemented as pointer
   263  		w = int64(Widthptr)
   264  
   265  		checkwidth(t.Elem())
   266  
   267  		// make fake type to check later to
   268  		// trigger channel argument check.
   269  		t1 := types.NewChanArgs(t)
   270  		checkwidth(t1)
   271  
   272  	case TCHANARGS:
   273  		t1 := t.ChanArgs()
   274  		dowidth(t1) // just in case
   275  		if t1.Elem().Width >= 1<<16 {
   276  			yyerror("channel element type too large (>64kB)")
   277  		}
   278  		w = 1 // anything will do
   279  
   280  	case TMAP: // implemented as pointer
   281  		w = int64(Widthptr)
   282  		checkwidth(t.Val())
   283  		checkwidth(t.Key())
   284  
   285  	case TFORW: // should have been filled in
   286  		if !t.Broke() {
   287  			yyerror("invalid recursive type %v", t)
   288  		}
   289  		w = 1 // anything will do
   290  
   291  	case TANY:
   292  		// dummy type; should be replaced before use.
   293  		Fatalf("dowidth any")
   294  
   295  	case TSTRING:
   296  		if sizeof_String == 0 {
   297  			Fatalf("early dowidth string")
   298  		}
   299  		w = int64(sizeof_String)
   300  		t.Align = uint8(Widthptr)
   301  
   302  	case TARRAY:
   303  		if t.Elem() == nil {
   304  			break
   305  		}
   306  		if t.IsDDDArray() {
   307  			if !t.Broke() {
   308  				yyerror("use of [...] array outside of array literal")
   309  				t.SetBroke(true)
   310  			}
   311  			break
   312  		}
   313  
   314  		dowidth(t.Elem())
   315  		if t.Elem().Width != 0 {
   316  			cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
   317  			if uint64(t.NumElem()) > cap {
   318  				yyerror("type %L larger than address space", t)
   319  			}
   320  		}
   321  		w = t.NumElem() * t.Elem().Width
   322  		t.Align = t.Elem().Align
   323  
   324  	case TSLICE:
   325  		if t.Elem() == nil {
   326  			break
   327  		}
   328  		w = int64(sizeof_Array)
   329  		checkwidth(t.Elem())
   330  		t.Align = uint8(Widthptr)
   331  
   332  	case TSTRUCT:
   333  		if t.IsFuncArgStruct() {
   334  			Fatalf("dowidth fn struct %v", t)
   335  		}
   336  		w = widstruct(t, t, 0, 1)
   337  
   338  	// make fake type to check later to
   339  	// trigger function argument computation.
   340  	case TFUNC:
   341  		t1 := types.NewFuncArgs(t)
   342  		checkwidth(t1)
   343  		w = int64(Widthptr) // width of func type is pointer
   344  
   345  	// function is 3 cated structures;
   346  	// compute their widths as side-effect.
   347  	case TFUNCARGS:
   348  		t1 := t.FuncArgs()
   349  		w = widstruct(t1, t1.Recvs(), 0, 0)
   350  		w = widstruct(t1, t1.Params(), w, Widthreg)
   351  		w = widstruct(t1, t1.Results(), w, Widthreg)
   352  		t1.Extra.(*types.Func).Argwid = w
   353  		if w%int64(Widthreg) != 0 {
   354  			Warn("bad type %v %d\n", t1, w)
   355  		}
   356  		t.Align = 1
   357  	}
   358  
   359  	if Widthptr == 4 && w != int64(int32(w)) {
   360  		yyerror("type %v too large", t)
   361  	}
   362  
   363  	t.Width = w
   364  	if t.Align == 0 {
   365  		if w > 8 || w&(w-1) != 0 || w == 0 {
   366  			Fatalf("invalid alignment for %v", t)
   367  		}
   368  		t.Align = uint8(w)
   369  	}
   370  
   371  	if t.Etype == TINTER {
   372  		// We defer calling these functions until after
   373  		// setting t.Width and t.Align so the recursive calls
   374  		// to dowidth within t.Fields() will succeed.
   375  		checkdupfields("method", t)
   376  		offmod(t)
   377  	}
   378  
   379  	lineno = lno
   380  
   381  	if defercalc == 1 {
   382  		resumecheckwidth()
   383  	} else {
   384  		defercalc--
   385  	}
   386  }
   387  
   388  // when a type's width should be known, we call checkwidth
   389  // to compute it.  during a declaration like
   390  //
   391  //	type T *struct { next T }
   392  //
   393  // it is necessary to defer the calculation of the struct width
   394  // until after T has been initialized to be a pointer to that struct.
   395  // similarly, during import processing structs may be used
   396  // before their definition.  in those situations, calling
   397  // defercheckwidth() stops width calculations until
   398  // resumecheckwidth() is called, at which point all the
   399  // checkwidths that were deferred are executed.
   400  // dowidth should only be called when the type's size
   401  // is needed immediately.  checkwidth makes sure the
   402  // size is evaluated eventually.
   403  
   404  var deferredTypeStack []*types.Type
   405  
   406  func checkwidth(t *types.Type) {
   407  	if t == nil {
   408  		return
   409  	}
   410  
   411  	// function arg structs should not be checked
   412  	// outside of the enclosing function.
   413  	if t.IsFuncArgStruct() {
   414  		Fatalf("checkwidth %v", t)
   415  	}
   416  
   417  	if defercalc == 0 {
   418  		dowidth(t)
   419  		return
   420  	}
   421  
   422  	if t.Deferwidth() {
   423  		return
   424  	}
   425  	t.SetDeferwidth(true)
   426  
   427  	deferredTypeStack = append(deferredTypeStack, t)
   428  }
   429  
   430  func defercheckwidth() {
   431  	// we get out of sync on syntax errors, so don't be pedantic.
   432  	if defercalc != 0 && nerrors == 0 {
   433  		Fatalf("defercheckwidth")
   434  	}
   435  	defercalc = 1
   436  }
   437  
   438  func resumecheckwidth() {
   439  	if defercalc == 0 {
   440  		Fatalf("resumecheckwidth")
   441  	}
   442  	for len(deferredTypeStack) > 0 {
   443  		t := deferredTypeStack[len(deferredTypeStack)-1]
   444  		deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
   445  		t.SetDeferwidth(false)
   446  		dowidth(t)
   447  	}
   448  
   449  	defercalc = 0
   450  }