github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/gc/align.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"github.com/gagliardetto/golang-go/cmd/compile/internal/types"
     9  	"sort"
    10  )
    11  
    12  // sizeCalculationDisabled indicates whether it is safe
    13  // to calculate Types' widths and alignments. See dowidth.
    14  var sizeCalculationDisabled bool
    15  
    16  // machine size and rounding alignment is dictated around
    17  // the size of a pointer, set in betypeinit (see ../amd64/galign.go).
    18  var defercalc int
    19  
    20  func Rnd(o int64, r int64) int64 {
    21  	if r < 1 || r > 8 || r&(r-1) != 0 {
    22  		Fatalf("rnd %d", r)
    23  	}
    24  	return (o + r - 1) &^ (r - 1)
    25  }
    26  
    27  // expandiface computes the method set for interface type t by
    28  // expanding embedded interfaces.
    29  func expandiface(t *types.Type) {
    30  	seen := make(map[*types.Sym]*types.Field)
    31  	var methods []*types.Field
    32  
    33  	addMethod := func(m *types.Field, explicit bool) {
    34  		switch prev := seen[m.Sym]; {
    35  		case prev == nil:
    36  			seen[m.Sym] = m
    37  		case langSupported(1, 14, t.Pkg()) && !explicit && types.Identical(m.Type, prev.Type):
    38  			return
    39  		default:
    40  			yyerrorl(m.Pos, "duplicate method %s", m.Sym.Name)
    41  		}
    42  		methods = append(methods, m)
    43  	}
    44  
    45  	for _, m := range t.Methods().Slice() {
    46  		if m.Sym == nil {
    47  			continue
    48  		}
    49  
    50  		checkwidth(m.Type)
    51  		addMethod(m, true)
    52  	}
    53  
    54  	for _, m := range t.Methods().Slice() {
    55  		if m.Sym != nil {
    56  			continue
    57  		}
    58  
    59  		if !m.Type.IsInterface() {
    60  			yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type)
    61  			m.SetBroke(true)
    62  			t.SetBroke(true)
    63  			// Add to fields so that error messages
    64  			// include the broken embedded type when
    65  			// printing t.
    66  			// TODO(mdempsky): Revisit this.
    67  			methods = append(methods, m)
    68  			continue
    69  		}
    70  
    71  		// Embedded interface: duplicate all methods
    72  		// (including broken ones, if any) and add to t's
    73  		// method set.
    74  		for _, t1 := range m.Type.Fields().Slice() {
    75  			f := types.NewField()
    76  			f.Pos = m.Pos // preserve embedding position
    77  			f.Sym = t1.Sym
    78  			f.Type = t1.Type
    79  			f.SetBroke(t1.Broke())
    80  			addMethod(f, false)
    81  		}
    82  	}
    83  
    84  	sort.Sort(methcmp(methods))
    85  
    86  	if int64(len(methods)) >= thearch.MAXWIDTH/int64(Widthptr) {
    87  		yyerror("interface too large")
    88  	}
    89  	for i, m := range methods {
    90  		m.Offset = int64(i) * int64(Widthptr)
    91  	}
    92  
    93  	// Access fields directly to avoid recursively calling dowidth
    94  	// within Type.Fields().
    95  	t.Extra.(*types.Interface).Fields.Set(methods)
    96  }
    97  
    98  func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
    99  	starto := o
   100  	maxalign := int32(flag)
   101  	if maxalign < 1 {
   102  		maxalign = 1
   103  	}
   104  	lastzero := int64(0)
   105  	for _, f := range t.Fields().Slice() {
   106  		if f.Type == nil {
   107  			// broken field, just skip it so that other valid fields
   108  			// get a width.
   109  			continue
   110  		}
   111  
   112  		dowidth(f.Type)
   113  		if int32(f.Type.Align) > maxalign {
   114  			maxalign = int32(f.Type.Align)
   115  		}
   116  		if f.Type.Align > 0 {
   117  			o = Rnd(o, int64(f.Type.Align))
   118  		}
   119  		f.Offset = o
   120  		if n := asNode(f.Nname); n != nil {
   121  			// addrescapes has similar code to update these offsets.
   122  			// Usually addrescapes runs after widstruct,
   123  			// in which case we could drop this,
   124  			// but function closure functions are the exception.
   125  			// NOTE(rsc): This comment may be stale.
   126  			// It's possible the ordering has changed and this is
   127  			// now the common case. I'm not sure.
   128  			if n.Name.Param.Stackcopy != nil {
   129  				n.Name.Param.Stackcopy.Xoffset = o
   130  				n.Xoffset = 0
   131  			} else {
   132  				n.Xoffset = o
   133  			}
   134  		}
   135  
   136  		w := f.Type.Width
   137  		if w < 0 {
   138  			Fatalf("invalid width %d", f.Type.Width)
   139  		}
   140  		if w == 0 {
   141  			lastzero = o
   142  		}
   143  		o += w
   144  		maxwidth := thearch.MAXWIDTH
   145  		// On 32-bit systems, reflect tables impose an additional constraint
   146  		// that each field start offset must fit in 31 bits.
   147  		if maxwidth < 1<<32 {
   148  			maxwidth = 1<<31 - 1
   149  		}
   150  		if o >= maxwidth {
   151  			yyerror("type %L too large", errtype)
   152  			o = 8 // small but nonzero
   153  		}
   154  	}
   155  
   156  	// For nonzero-sized structs which end in a zero-sized thing, we add
   157  	// an extra byte of padding to the type. This padding ensures that
   158  	// taking the address of the zero-sized thing can't manufacture a
   159  	// pointer to the next object in the heap. See issue 9401.
   160  	if flag == 1 && o > starto && o == lastzero {
   161  		o++
   162  	}
   163  
   164  	// final width is rounded
   165  	if flag != 0 {
   166  		o = Rnd(o, int64(maxalign))
   167  	}
   168  	t.Align = uint8(maxalign)
   169  
   170  	// type width only includes back to first field's offset
   171  	t.Width = o - starto
   172  
   173  	return o
   174  }
   175  
   176  // dowidth calculates and stores the size and alignment for t.
   177  // If sizeCalculationDisabled is set, and the size/alignment
   178  // have not already been calculated, it calls Fatal.
   179  // This is used to prevent data races in the back end.
   180  func dowidth(t *types.Type) {
   181  	// Calling dowidth when typecheck tracing enabled is not safe.
   182  	// See issue #33658.
   183  	if enableTrace && skipDowidthForTracing {
   184  		return
   185  	}
   186  	if Widthptr == 0 {
   187  		Fatalf("dowidth without betypeinit")
   188  	}
   189  
   190  	if t == nil {
   191  		return
   192  	}
   193  
   194  	if t.Width == -2 {
   195  		if !t.Broke() {
   196  			t.SetBroke(true)
   197  			yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
   198  		}
   199  
   200  		t.Width = 0
   201  		t.Align = 1
   202  		return
   203  	}
   204  
   205  	if t.WidthCalculated() {
   206  		return
   207  	}
   208  
   209  	if sizeCalculationDisabled {
   210  		if t.Broke() {
   211  			// break infinite recursion from Fatal call below
   212  			return
   213  		}
   214  		t.SetBroke(true)
   215  		Fatalf("width not calculated: %v", t)
   216  	}
   217  
   218  	// break infinite recursion if the broken recursive type
   219  	// is referenced again
   220  	if t.Broke() && t.Width == 0 {
   221  		return
   222  	}
   223  
   224  	// defer checkwidth calls until after we're done
   225  	defercheckwidth()
   226  
   227  	lno := lineno
   228  	if asNode(t.Nod) != nil {
   229  		lineno = asNode(t.Nod).Pos
   230  	}
   231  
   232  	t.Width = -2
   233  	t.Align = 0 // 0 means use t.Width, below
   234  
   235  	et := t.Etype
   236  	switch et {
   237  	case TFUNC, TCHAN, TMAP, TSTRING:
   238  		break
   239  
   240  	// simtype == 0 during bootstrap
   241  	default:
   242  		if simtype[t.Etype] != 0 {
   243  			et = simtype[t.Etype]
   244  		}
   245  	}
   246  
   247  	var w int64
   248  	switch et {
   249  	default:
   250  		Fatalf("dowidth: unknown type: %v", t)
   251  
   252  	// compiler-specific stuff
   253  	case TINT8, TUINT8, TBOOL:
   254  		// bool is int8
   255  		w = 1
   256  
   257  	case TINT16, TUINT16:
   258  		w = 2
   259  
   260  	case TINT32, TUINT32, TFLOAT32:
   261  		w = 4
   262  
   263  	case TINT64, TUINT64, TFLOAT64:
   264  		w = 8
   265  		t.Align = uint8(Widthreg)
   266  
   267  	case TCOMPLEX64:
   268  		w = 8
   269  		t.Align = 4
   270  
   271  	case TCOMPLEX128:
   272  		w = 16
   273  		t.Align = uint8(Widthreg)
   274  
   275  	case TPTR:
   276  		w = int64(Widthptr)
   277  		checkwidth(t.Elem())
   278  
   279  	case TUNSAFEPTR:
   280  		w = int64(Widthptr)
   281  
   282  	case TINTER: // implemented as 2 pointers
   283  		w = 2 * int64(Widthptr)
   284  		t.Align = uint8(Widthptr)
   285  		expandiface(t)
   286  
   287  	case TCHAN: // implemented as pointer
   288  		w = int64(Widthptr)
   289  
   290  		checkwidth(t.Elem())
   291  
   292  		// make fake type to check later to
   293  		// trigger channel argument check.
   294  		t1 := types.NewChanArgs(t)
   295  		checkwidth(t1)
   296  
   297  	case TCHANARGS:
   298  		t1 := t.ChanArgs()
   299  		dowidth(t1) // just in case
   300  		if t1.Elem().Width >= 1<<16 {
   301  			yyerror("channel element type too large (>64kB)")
   302  		}
   303  		w = 1 // anything will do
   304  
   305  	case TMAP: // implemented as pointer
   306  		w = int64(Widthptr)
   307  		checkwidth(t.Elem())
   308  		checkwidth(t.Key())
   309  
   310  	case TFORW: // should have been filled in
   311  		if !t.Broke() {
   312  			t.SetBroke(true)
   313  			yyerror("invalid recursive type %v", t)
   314  		}
   315  		w = 1 // anything will do
   316  
   317  	case TANY:
   318  		// dummy type; should be replaced before use.
   319  		Fatalf("dowidth any")
   320  
   321  	case TSTRING:
   322  		if sizeof_String == 0 {
   323  			Fatalf("early dowidth string")
   324  		}
   325  		w = int64(sizeof_String)
   326  		t.Align = uint8(Widthptr)
   327  
   328  	case TARRAY:
   329  		if t.Elem() == nil {
   330  			break
   331  		}
   332  
   333  		dowidth(t.Elem())
   334  		if t.Elem().Width != 0 {
   335  			cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
   336  			if uint64(t.NumElem()) > cap {
   337  				yyerror("type %L larger than address space", t)
   338  			}
   339  		}
   340  		w = t.NumElem() * t.Elem().Width
   341  		t.Align = t.Elem().Align
   342  
   343  	case TSLICE:
   344  		if t.Elem() == nil {
   345  			break
   346  		}
   347  		w = int64(sizeof_Slice)
   348  		checkwidth(t.Elem())
   349  		t.Align = uint8(Widthptr)
   350  
   351  	case TSTRUCT:
   352  		if t.IsFuncArgStruct() {
   353  			Fatalf("dowidth fn struct %v", t)
   354  		}
   355  		w = widstruct(t, t, 0, 1)
   356  
   357  	// make fake type to check later to
   358  	// trigger function argument computation.
   359  	case TFUNC:
   360  		t1 := types.NewFuncArgs(t)
   361  		checkwidth(t1)
   362  		w = int64(Widthptr) // width of func type is pointer
   363  
   364  	// function is 3 cated structures;
   365  	// compute their widths as side-effect.
   366  	case TFUNCARGS:
   367  		t1 := t.FuncArgs()
   368  		w = widstruct(t1, t1.Recvs(), 0, 0)
   369  		w = widstruct(t1, t1.Params(), w, Widthreg)
   370  		w = widstruct(t1, t1.Results(), w, Widthreg)
   371  		t1.Extra.(*types.Func).Argwid = w
   372  		if w%int64(Widthreg) != 0 {
   373  			Warn("bad type %v %d\n", t1, w)
   374  		}
   375  		t.Align = 1
   376  	}
   377  
   378  	if Widthptr == 4 && w != int64(int32(w)) {
   379  		yyerror("type %v too large", t)
   380  	}
   381  
   382  	t.Width = w
   383  	if t.Align == 0 {
   384  		if w == 0 || w > 8 || w&(w-1) != 0 {
   385  			Fatalf("invalid alignment for %v", t)
   386  		}
   387  		t.Align = uint8(w)
   388  	}
   389  
   390  	lineno = lno
   391  
   392  	resumecheckwidth()
   393  }
   394  
   395  // when a type's width should be known, we call checkwidth
   396  // to compute it.  during a declaration like
   397  //
   398  //	type T *struct { next T }
   399  //
   400  // it is necessary to defer the calculation of the struct width
   401  // until after T has been initialized to be a pointer to that struct.
   402  // similarly, during import processing structs may be used
   403  // before their definition.  in those situations, calling
   404  // defercheckwidth() stops width calculations until
   405  // resumecheckwidth() is called, at which point all the
   406  // checkwidths that were deferred are executed.
   407  // dowidth should only be called when the type's size
   408  // is needed immediately.  checkwidth makes sure the
   409  // size is evaluated eventually.
   410  
   411  var deferredTypeStack []*types.Type
   412  
   413  func checkwidth(t *types.Type) {
   414  	if t == nil {
   415  		return
   416  	}
   417  
   418  	// function arg structs should not be checked
   419  	// outside of the enclosing function.
   420  	if t.IsFuncArgStruct() {
   421  		Fatalf("checkwidth %v", t)
   422  	}
   423  
   424  	if defercalc == 0 {
   425  		dowidth(t)
   426  		return
   427  	}
   428  
   429  	// if type has not yet been pushed on deferredTypeStack yet, do it now
   430  	if !t.Deferwidth() {
   431  		t.SetDeferwidth(true)
   432  		deferredTypeStack = append(deferredTypeStack, t)
   433  	}
   434  }
   435  
   436  func defercheckwidth() {
   437  	defercalc++
   438  }
   439  
   440  func resumecheckwidth() {
   441  	if defercalc == 1 {
   442  		for len(deferredTypeStack) > 0 {
   443  			t := deferredTypeStack[len(deferredTypeStack)-1]
   444  			deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
   445  			t.SetDeferwidth(false)
   446  			dowidth(t)
   447  		}
   448  	}
   449  
   450  	defercalc--
   451  }