github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/gc/align.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"sort"
    10  )
    11  
    12  // sizeCalculationDisabled indicates whether it is safe
    13  // to calculate Types' widths and alignments. See dowidth.
    14  var sizeCalculationDisabled bool
    15  
    16  // machine size and rounding alignment is dictated around
    17  // the size of a pointer, set in betypeinit (see ../amd64/galign.go).
    18  var defercalc int
    19  
    20  func Rnd(o int64, r int64) int64 {
    21  	if r < 1 || r > 8 || r&(r-1) != 0 {
    22  		Fatalf("rnd %d", r)
    23  	}
    24  	return (o + r - 1) &^ (r - 1)
    25  }
    26  
    27  // expandiface computes the method set for interface type t by
    28  // expanding embedded interfaces.
    29  func expandiface(t *types.Type) {
    30  	var fields []*types.Field
    31  	for _, m := range t.Methods().Slice() {
    32  		if m.Sym != nil {
    33  			fields = append(fields, m)
    34  			checkwidth(m.Type)
    35  			continue
    36  		}
    37  
    38  		if !m.Type.IsInterface() {
    39  			yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type)
    40  			m.SetBroke(true)
    41  			t.SetBroke(true)
    42  			// Add to fields so that error messages
    43  			// include the broken embedded type when
    44  			// printing t.
    45  			// TODO(mdempsky): Revisit this.
    46  			fields = append(fields, m)
    47  			continue
    48  		}
    49  
    50  		// Embedded interface: duplicate all methods
    51  		// (including broken ones, if any) and add to t's
    52  		// method set.
    53  		for _, t1 := range m.Type.Fields().Slice() {
    54  			f := types.NewField()
    55  			f.Pos = m.Pos // preserve embedding position
    56  			f.Sym = t1.Sym
    57  			f.Type = t1.Type
    58  			f.SetBroke(t1.Broke())
    59  			fields = append(fields, f)
    60  		}
    61  	}
    62  	sort.Sort(methcmp(fields))
    63  
    64  	// Access fields directly to avoid recursively calling dowidth
    65  	// within Type.Fields().
    66  	t.Extra.(*types.Interface).Fields.Set(fields)
    67  }
    68  
    69  func offmod(t *types.Type) {
    70  	o := int32(0)
    71  	for _, f := range t.Fields().Slice() {
    72  		f.Offset = int64(o)
    73  		o += int32(Widthptr)
    74  		if int64(o) >= thearch.MAXWIDTH {
    75  			yyerror("interface too large")
    76  			o = int32(Widthptr)
    77  		}
    78  	}
    79  }
    80  
    81  func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
    82  	starto := o
    83  	maxalign := int32(flag)
    84  	if maxalign < 1 {
    85  		maxalign = 1
    86  	}
    87  	lastzero := int64(0)
    88  	for _, f := range t.Fields().Slice() {
    89  		if f.Type == nil {
    90  			// broken field, just skip it so that other valid fields
    91  			// get a width.
    92  			continue
    93  		}
    94  
    95  		dowidth(f.Type)
    96  		if int32(f.Type.Align) > maxalign {
    97  			maxalign = int32(f.Type.Align)
    98  		}
    99  		if f.Type.Align > 0 {
   100  			o = Rnd(o, int64(f.Type.Align))
   101  		}
   102  		f.Offset = o
   103  		if n := asNode(f.Nname); n != nil {
   104  			// addrescapes has similar code to update these offsets.
   105  			// Usually addrescapes runs after widstruct,
   106  			// in which case we could drop this,
   107  			// but function closure functions are the exception.
   108  			// NOTE(rsc): This comment may be stale.
   109  			// It's possible the ordering has changed and this is
   110  			// now the common case. I'm not sure.
   111  			if n.Name.Param.Stackcopy != nil {
   112  				n.Name.Param.Stackcopy.Xoffset = o
   113  				n.Xoffset = 0
   114  			} else {
   115  				n.Xoffset = o
   116  			}
   117  		}
   118  
   119  		w := f.Type.Width
   120  		if w < 0 {
   121  			Fatalf("invalid width %d", f.Type.Width)
   122  		}
   123  		if w == 0 {
   124  			lastzero = o
   125  		}
   126  		o += w
   127  		maxwidth := thearch.MAXWIDTH
   128  		// On 32-bit systems, reflect tables impose an additional constraint
   129  		// that each field start offset must fit in 31 bits.
   130  		if maxwidth < 1<<32 {
   131  			maxwidth = 1<<31 - 1
   132  		}
   133  		if o >= maxwidth {
   134  			yyerror("type %L too large", errtype)
   135  			o = 8 // small but nonzero
   136  		}
   137  	}
   138  
   139  	// For nonzero-sized structs which end in a zero-sized thing, we add
   140  	// an extra byte of padding to the type. This padding ensures that
   141  	// taking the address of the zero-sized thing can't manufacture a
   142  	// pointer to the next object in the heap. See issue 9401.
   143  	if flag == 1 && o > starto && o == lastzero {
   144  		o++
   145  	}
   146  
   147  	// final width is rounded
   148  	if flag != 0 {
   149  		o = Rnd(o, int64(maxalign))
   150  	}
   151  	t.Align = uint8(maxalign)
   152  
   153  	// type width only includes back to first field's offset
   154  	t.Width = o - starto
   155  
   156  	return o
   157  }
   158  
   159  // dowidth calculates and stores the size and alignment for t.
   160  // If sizeCalculationDisabled is set, and the size/alignment
   161  // have not already been calculated, it calls Fatal.
   162  // This is used to prevent data races in the back end.
   163  func dowidth(t *types.Type) {
   164  	if Widthptr == 0 {
   165  		Fatalf("dowidth without betypeinit")
   166  	}
   167  
   168  	if t == nil {
   169  		return
   170  	}
   171  
   172  	if t.Width == -2 {
   173  		if !t.Broke() {
   174  			t.SetBroke(true)
   175  			yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
   176  		}
   177  
   178  		t.Width = 0
   179  		t.Align = 1
   180  		return
   181  	}
   182  
   183  	if t.WidthCalculated() {
   184  		return
   185  	}
   186  
   187  	if sizeCalculationDisabled {
   188  		if t.Broke() {
   189  			// break infinite recursion from Fatal call below
   190  			return
   191  		}
   192  		t.SetBroke(true)
   193  		Fatalf("width not calculated: %v", t)
   194  	}
   195  
   196  	// break infinite recursion if the broken recursive type
   197  	// is referenced again
   198  	if t.Broke() && t.Width == 0 {
   199  		return
   200  	}
   201  
   202  	// defer checkwidth calls until after we're done
   203  	defercalc++
   204  
   205  	lno := lineno
   206  	if asNode(t.Nod) != nil {
   207  		lineno = asNode(t.Nod).Pos
   208  	}
   209  
   210  	t.Width = -2
   211  	t.Align = 0 // 0 means use t.Width, below
   212  
   213  	et := t.Etype
   214  	switch et {
   215  	case TFUNC, TCHAN, TMAP, TSTRING:
   216  		break
   217  
   218  	// simtype == 0 during bootstrap
   219  	default:
   220  		if simtype[t.Etype] != 0 {
   221  			et = simtype[t.Etype]
   222  		}
   223  	}
   224  
   225  	var w int64
   226  	switch et {
   227  	default:
   228  		Fatalf("dowidth: unknown type: %v", t)
   229  
   230  	// compiler-specific stuff
   231  	case TINT8, TUINT8, TBOOL:
   232  		// bool is int8
   233  		w = 1
   234  
   235  	case TINT16, TUINT16:
   236  		w = 2
   237  
   238  	case TINT32, TUINT32, TFLOAT32:
   239  		w = 4
   240  
   241  	case TINT64, TUINT64, TFLOAT64:
   242  		w = 8
   243  		t.Align = uint8(Widthreg)
   244  
   245  	case TCOMPLEX64:
   246  		w = 8
   247  		t.Align = 4
   248  
   249  	case TCOMPLEX128:
   250  		w = 16
   251  		t.Align = uint8(Widthreg)
   252  
   253  	case TPTR:
   254  		w = int64(Widthptr)
   255  		checkwidth(t.Elem())
   256  
   257  	case TUNSAFEPTR:
   258  		w = int64(Widthptr)
   259  
   260  	case TINTER: // implemented as 2 pointers
   261  		w = 2 * int64(Widthptr)
   262  		t.Align = uint8(Widthptr)
   263  		expandiface(t)
   264  
   265  	case TCHAN: // implemented as pointer
   266  		w = int64(Widthptr)
   267  
   268  		checkwidth(t.Elem())
   269  
   270  		// make fake type to check later to
   271  		// trigger channel argument check.
   272  		t1 := types.NewChanArgs(t)
   273  		checkwidth(t1)
   274  
   275  	case TCHANARGS:
   276  		t1 := t.ChanArgs()
   277  		dowidth(t1) // just in case
   278  		if t1.Elem().Width >= 1<<16 {
   279  			yyerror("channel element type too large (>64kB)")
   280  		}
   281  		w = 1 // anything will do
   282  
   283  	case TMAP: // implemented as pointer
   284  		w = int64(Widthptr)
   285  		checkwidth(t.Elem())
   286  		checkwidth(t.Key())
   287  
   288  	case TFORW: // should have been filled in
   289  		if !t.Broke() {
   290  			t.SetBroke(true)
   291  			yyerror("invalid recursive type %v", t)
   292  		}
   293  		w = 1 // anything will do
   294  
   295  	case TANY:
   296  		// dummy type; should be replaced before use.
   297  		Fatalf("dowidth any")
   298  
   299  	case TSTRING:
   300  		if sizeof_String == 0 {
   301  			Fatalf("early dowidth string")
   302  		}
   303  		w = int64(sizeof_String)
   304  		t.Align = uint8(Widthptr)
   305  
   306  	case TARRAY:
   307  		if t.Elem() == nil {
   308  			break
   309  		}
   310  		if t.IsDDDArray() {
   311  			if !t.Broke() {
   312  				yyerror("use of [...] array outside of array literal")
   313  				t.SetBroke(true)
   314  			}
   315  			break
   316  		}
   317  
   318  		dowidth(t.Elem())
   319  		if t.Elem().Width != 0 {
   320  			cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
   321  			if uint64(t.NumElem()) > cap {
   322  				yyerror("type %L larger than address space", t)
   323  			}
   324  		}
   325  		w = t.NumElem() * t.Elem().Width
   326  		t.Align = t.Elem().Align
   327  
   328  	case TSLICE:
   329  		if t.Elem() == nil {
   330  			break
   331  		}
   332  		w = int64(sizeof_Array)
   333  		checkwidth(t.Elem())
   334  		t.Align = uint8(Widthptr)
   335  
   336  	case TSTRUCT:
   337  		if t.IsFuncArgStruct() {
   338  			Fatalf("dowidth fn struct %v", t)
   339  		}
   340  		w = widstruct(t, t, 0, 1)
   341  
   342  	// make fake type to check later to
   343  	// trigger function argument computation.
   344  	case TFUNC:
   345  		t1 := types.NewFuncArgs(t)
   346  		checkwidth(t1)
   347  		w = int64(Widthptr) // width of func type is pointer
   348  
   349  	// function is 3 cated structures;
   350  	// compute their widths as side-effect.
   351  	case TFUNCARGS:
   352  		t1 := t.FuncArgs()
   353  		w = widstruct(t1, t1.Recvs(), 0, 0)
   354  		w = widstruct(t1, t1.Params(), w, Widthreg)
   355  		w = widstruct(t1, t1.Results(), w, Widthreg)
   356  		t1.Extra.(*types.Func).Argwid = w
   357  		if w%int64(Widthreg) != 0 {
   358  			Warn("bad type %v %d\n", t1, w)
   359  		}
   360  		t.Align = 1
   361  	}
   362  
   363  	if Widthptr == 4 && w != int64(int32(w)) {
   364  		yyerror("type %v too large", t)
   365  	}
   366  
   367  	t.Width = w
   368  	if t.Align == 0 {
   369  		if w == 0 || w > 8 || w&(w-1) != 0 {
   370  			Fatalf("invalid alignment for %v", t)
   371  		}
   372  		t.Align = uint8(w)
   373  	}
   374  
   375  	if t.Etype == TINTER {
   376  		// We defer calling these functions until after
   377  		// setting t.Width and t.Align so the recursive calls
   378  		// to dowidth within t.Fields() will succeed.
   379  		checkdupfields("method", t)
   380  		offmod(t)
   381  	}
   382  
   383  	lineno = lno
   384  
   385  	if defercalc == 1 {
   386  		resumecheckwidth()
   387  	} else {
   388  		defercalc--
   389  	}
   390  }
   391  
   392  // when a type's width should be known, we call checkwidth
   393  // to compute it.  during a declaration like
   394  //
   395  //	type T *struct { next T }
   396  //
   397  // it is necessary to defer the calculation of the struct width
   398  // until after T has been initialized to be a pointer to that struct.
   399  // similarly, during import processing structs may be used
   400  // before their definition.  in those situations, calling
   401  // defercheckwidth() stops width calculations until
   402  // resumecheckwidth() is called, at which point all the
   403  // checkwidths that were deferred are executed.
   404  // dowidth should only be called when the type's size
   405  // is needed immediately.  checkwidth makes sure the
   406  // size is evaluated eventually.
   407  
   408  var deferredTypeStack []*types.Type
   409  
   410  func checkwidth(t *types.Type) {
   411  	if t == nil {
   412  		return
   413  	}
   414  
   415  	// function arg structs should not be checked
   416  	// outside of the enclosing function.
   417  	if t.IsFuncArgStruct() {
   418  		Fatalf("checkwidth %v", t)
   419  	}
   420  
   421  	if defercalc == 0 {
   422  		dowidth(t)
   423  		return
   424  	}
   425  
   426  	// if type has not yet been pushed on deferredTypeStack yet, do it now
   427  	if !t.Deferwidth() {
   428  		t.SetDeferwidth(true)
   429  		deferredTypeStack = append(deferredTypeStack, t)
   430  	}
   431  }
   432  
   433  func defercheckwidth() {
   434  	// we get out of sync on syntax errors, so don't be pedantic.
   435  	if defercalc != 0 && nerrors == 0 {
   436  		Fatalf("defercheckwidth")
   437  	}
   438  	defercalc = 1
   439  }
   440  
   441  func resumecheckwidth() {
   442  	if defercalc == 0 {
   443  		Fatalf("resumecheckwidth")
   444  	}
   445  
   446  	for len(deferredTypeStack) > 0 {
   447  		t := deferredTypeStack[len(deferredTypeStack)-1]
   448  		deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
   449  		t.SetDeferwidth(false)
   450  		dowidth(t)
   451  	}
   452  
   453  	defercalc = 0
   454  }