github.com/alash3al/go@v0.0.0-20150827002835-d497eeb00540/src/runtime/gc_test.go (about)

     1  // Copyright 2011 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package runtime_test
     6  
     7  import (
     8  	"io"
     9  	"os"
    10  	"reflect"
    11  	"runtime"
    12  	"runtime/debug"
    13  	"testing"
    14  	"time"
    15  	"unsafe"
    16  )
    17  
    18  func TestGcSys(t *testing.T) {
    19  	if os.Getenv("GOGC") == "off" {
    20  		t.Skip("skipping test; GOGC=off in environment")
    21  	}
    22  	data := struct{ Short bool }{testing.Short()}
    23  	got := executeTest(t, testGCSysSource, &data)
    24  	want := "OK\n"
    25  	if got != want {
    26  		t.Fatalf("expected %q, but got %q", want, got)
    27  	}
    28  }
    29  
    30  const testGCSysSource = `
    31  package main
    32  
    33  import (
    34  	"fmt"
    35  	"runtime"
    36  )
    37  
    38  func main() {
    39  	runtime.GOMAXPROCS(1)
    40  	memstats := new(runtime.MemStats)
    41  	runtime.GC()
    42  	runtime.ReadMemStats(memstats)
    43  	sys := memstats.Sys
    44  
    45  	runtime.MemProfileRate = 0 // disable profiler
    46  
    47  	itercount := 1000000
    48  {{if .Short}}
    49  	itercount = 100000
    50  {{end}}
    51  	for i := 0; i < itercount; i++ {
    52  		workthegc()
    53  	}
    54  
    55  	// Should only be using a few MB.
    56  	// We allocated 100 MB or (if not short) 1 GB.
    57  	runtime.ReadMemStats(memstats)
    58  	if sys > memstats.Sys {
    59  		sys = 0
    60  	} else {
    61  		sys = memstats.Sys - sys
    62  	}
    63  	if sys > 16<<20 {
    64  		fmt.Printf("using too much memory: %d bytes\n", sys)
    65  		return
    66  	}
    67  	fmt.Printf("OK\n")
    68  }
    69  
    70  func workthegc() []byte {
    71  	return make([]byte, 1029)
    72  }
    73  `
    74  
    75  func TestGcDeepNesting(t *testing.T) {
    76  	type T [2][2][2][2][2][2][2][2][2][2]*int
    77  	a := new(T)
    78  
    79  	// Prevent the compiler from applying escape analysis.
    80  	// This makes sure new(T) is allocated on heap, not on the stack.
    81  	t.Logf("%p", a)
    82  
    83  	a[0][0][0][0][0][0][0][0][0][0] = new(int)
    84  	*a[0][0][0][0][0][0][0][0][0][0] = 13
    85  	runtime.GC()
    86  	if *a[0][0][0][0][0][0][0][0][0][0] != 13 {
    87  		t.Fail()
    88  	}
    89  }
    90  
    91  func TestGcHashmapIndirection(t *testing.T) {
    92  	defer debug.SetGCPercent(debug.SetGCPercent(1))
    93  	runtime.GC()
    94  	type T struct {
    95  		a [256]int
    96  	}
    97  	m := make(map[T]T)
    98  	for i := 0; i < 2000; i++ {
    99  		var a T
   100  		a.a[0] = i
   101  		m[a] = T{}
   102  	}
   103  }
   104  
   105  func TestGcArraySlice(t *testing.T) {
   106  	type X struct {
   107  		buf     [1]byte
   108  		nextbuf []byte
   109  		next    *X
   110  	}
   111  	var head *X
   112  	for i := 0; i < 10; i++ {
   113  		p := &X{}
   114  		p.buf[0] = 42
   115  		p.next = head
   116  		if head != nil {
   117  			p.nextbuf = head.buf[:]
   118  		}
   119  		head = p
   120  		runtime.GC()
   121  	}
   122  	for p := head; p != nil; p = p.next {
   123  		if p.buf[0] != 42 {
   124  			t.Fatal("corrupted heap")
   125  		}
   126  	}
   127  }
   128  
   129  func TestGcRescan(t *testing.T) {
   130  	type X struct {
   131  		c     chan error
   132  		nextx *X
   133  	}
   134  	type Y struct {
   135  		X
   136  		nexty *Y
   137  		p     *int
   138  	}
   139  	var head *Y
   140  	for i := 0; i < 10; i++ {
   141  		p := &Y{}
   142  		p.c = make(chan error)
   143  		if head != nil {
   144  			p.nextx = &head.X
   145  		}
   146  		p.nexty = head
   147  		p.p = new(int)
   148  		*p.p = 42
   149  		head = p
   150  		runtime.GC()
   151  	}
   152  	for p := head; p != nil; p = p.nexty {
   153  		if *p.p != 42 {
   154  			t.Fatal("corrupted heap")
   155  		}
   156  	}
   157  }
   158  
   159  func TestGcLastTime(t *testing.T) {
   160  	ms := new(runtime.MemStats)
   161  	t0 := time.Now().UnixNano()
   162  	runtime.GC()
   163  	t1 := time.Now().UnixNano()
   164  	runtime.ReadMemStats(ms)
   165  	last := int64(ms.LastGC)
   166  	if t0 > last || last > t1 {
   167  		t.Fatalf("bad last GC time: got %v, want [%v, %v]", last, t0, t1)
   168  	}
   169  	pause := ms.PauseNs[(ms.NumGC+255)%256]
   170  	// Due to timer granularity, pause can actually be 0 on windows
   171  	// or on virtualized environments.
   172  	if pause == 0 {
   173  		t.Logf("last GC pause was 0")
   174  	} else if pause > 10e9 {
   175  		t.Logf("bad last GC pause: got %v, want [0, 10e9]", pause)
   176  	}
   177  }
   178  
   179  var hugeSink interface{}
   180  
   181  func TestHugeGCInfo(t *testing.T) {
   182  	// The test ensures that compiler can chew these huge types even on weakest machines.
   183  	// The types are not allocated at runtime.
   184  	if hugeSink != nil {
   185  		// 400MB on 32 bots, 4TB on 64-bits.
   186  		const n = (400 << 20) + (unsafe.Sizeof(uintptr(0))-4)<<40
   187  		hugeSink = new([n]*byte)
   188  		hugeSink = new([n]uintptr)
   189  		hugeSink = new(struct {
   190  			x float64
   191  			y [n]*byte
   192  			z []string
   193  		})
   194  		hugeSink = new(struct {
   195  			x float64
   196  			y [n]uintptr
   197  			z []string
   198  		})
   199  	}
   200  }
   201  
   202  func BenchmarkSetTypePtr(b *testing.B) {
   203  	benchSetType(b, new(*byte))
   204  }
   205  
   206  func BenchmarkSetTypePtr8(b *testing.B) {
   207  	benchSetType(b, new([8]*byte))
   208  }
   209  
   210  func BenchmarkSetTypePtr16(b *testing.B) {
   211  	benchSetType(b, new([16]*byte))
   212  }
   213  
   214  func BenchmarkSetTypePtr32(b *testing.B) {
   215  	benchSetType(b, new([32]*byte))
   216  }
   217  
   218  func BenchmarkSetTypePtr64(b *testing.B) {
   219  	benchSetType(b, new([64]*byte))
   220  }
   221  
   222  func BenchmarkSetTypePtr126(b *testing.B) {
   223  	benchSetType(b, new([126]*byte))
   224  }
   225  
   226  func BenchmarkSetTypePtr128(b *testing.B) {
   227  	benchSetType(b, new([128]*byte))
   228  }
   229  
   230  func BenchmarkSetTypePtrSlice(b *testing.B) {
   231  	benchSetType(b, make([]*byte, 1<<10))
   232  }
   233  
   234  type Node1 struct {
   235  	Value       [1]uintptr
   236  	Left, Right *byte
   237  }
   238  
   239  func BenchmarkSetTypeNode1(b *testing.B) {
   240  	benchSetType(b, new(Node1))
   241  }
   242  
   243  func BenchmarkSetTypeNode1Slice(b *testing.B) {
   244  	benchSetType(b, make([]Node1, 32))
   245  }
   246  
   247  type Node8 struct {
   248  	Value       [8]uintptr
   249  	Left, Right *byte
   250  }
   251  
   252  func BenchmarkSetTypeNode8(b *testing.B) {
   253  	benchSetType(b, new(Node8))
   254  }
   255  
   256  func BenchmarkSetTypeNode8Slice(b *testing.B) {
   257  	benchSetType(b, make([]Node8, 32))
   258  }
   259  
   260  type Node64 struct {
   261  	Value       [64]uintptr
   262  	Left, Right *byte
   263  }
   264  
   265  func BenchmarkSetTypeNode64(b *testing.B) {
   266  	benchSetType(b, new(Node64))
   267  }
   268  
   269  func BenchmarkSetTypeNode64Slice(b *testing.B) {
   270  	benchSetType(b, make([]Node64, 32))
   271  }
   272  
   273  type Node64Dead struct {
   274  	Left, Right *byte
   275  	Value       [64]uintptr
   276  }
   277  
   278  func BenchmarkSetTypeNode64Dead(b *testing.B) {
   279  	benchSetType(b, new(Node64Dead))
   280  }
   281  
   282  func BenchmarkSetTypeNode64DeadSlice(b *testing.B) {
   283  	benchSetType(b, make([]Node64Dead, 32))
   284  }
   285  
   286  type Node124 struct {
   287  	Value       [124]uintptr
   288  	Left, Right *byte
   289  }
   290  
   291  func BenchmarkSetTypeNode124(b *testing.B) {
   292  	benchSetType(b, new(Node124))
   293  }
   294  
   295  func BenchmarkSetTypeNode124Slice(b *testing.B) {
   296  	benchSetType(b, make([]Node124, 32))
   297  }
   298  
   299  type Node126 struct {
   300  	Value       [126]uintptr
   301  	Left, Right *byte
   302  }
   303  
   304  func BenchmarkSetTypeNode126(b *testing.B) {
   305  	benchSetType(b, new(Node126))
   306  }
   307  
   308  func BenchmarkSetTypeNode126Slice(b *testing.B) {
   309  	benchSetType(b, make([]Node126, 32))
   310  }
   311  
   312  type Node128 struct {
   313  	Value       [128]uintptr
   314  	Left, Right *byte
   315  }
   316  
   317  func BenchmarkSetTypeNode128(b *testing.B) {
   318  	benchSetType(b, new(Node128))
   319  }
   320  
   321  func BenchmarkSetTypeNode128Slice(b *testing.B) {
   322  	benchSetType(b, make([]Node128, 32))
   323  }
   324  
   325  type Node130 struct {
   326  	Value       [130]uintptr
   327  	Left, Right *byte
   328  }
   329  
   330  func BenchmarkSetTypeNode130(b *testing.B) {
   331  	benchSetType(b, new(Node130))
   332  }
   333  
   334  func BenchmarkSetTypeNode130Slice(b *testing.B) {
   335  	benchSetType(b, make([]Node130, 32))
   336  }
   337  
   338  type Node1024 struct {
   339  	Value       [1024]uintptr
   340  	Left, Right *byte
   341  }
   342  
   343  func BenchmarkSetTypeNode1024(b *testing.B) {
   344  	benchSetType(b, new(Node1024))
   345  }
   346  
   347  func BenchmarkSetTypeNode1024Slice(b *testing.B) {
   348  	benchSetType(b, make([]Node1024, 32))
   349  }
   350  
   351  func benchSetType(b *testing.B, x interface{}) {
   352  	v := reflect.ValueOf(x)
   353  	t := v.Type()
   354  	switch t.Kind() {
   355  	case reflect.Ptr:
   356  		b.SetBytes(int64(t.Elem().Size()))
   357  	case reflect.Slice:
   358  		b.SetBytes(int64(t.Elem().Size()) * int64(v.Len()))
   359  	}
   360  	b.ResetTimer()
   361  	runtime.BenchSetType(b.N, x)
   362  }
   363  
   364  func BenchmarkAllocation(b *testing.B) {
   365  	type T struct {
   366  		x, y *byte
   367  	}
   368  	ngo := runtime.GOMAXPROCS(0)
   369  	work := make(chan bool, b.N+ngo)
   370  	result := make(chan *T)
   371  	for i := 0; i < b.N; i++ {
   372  		work <- true
   373  	}
   374  	for i := 0; i < ngo; i++ {
   375  		work <- false
   376  	}
   377  	for i := 0; i < ngo; i++ {
   378  		go func() {
   379  			var x *T
   380  			for <-work {
   381  				for i := 0; i < 1000; i++ {
   382  					x = &T{}
   383  				}
   384  			}
   385  			result <- x
   386  		}()
   387  	}
   388  	for i := 0; i < ngo; i++ {
   389  		<-result
   390  	}
   391  }
   392  
   393  func TestPrintGC(t *testing.T) {
   394  	if testing.Short() {
   395  		t.Skip("Skipping in short mode")
   396  	}
   397  	defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(2))
   398  	done := make(chan bool)
   399  	go func() {
   400  		for {
   401  			select {
   402  			case <-done:
   403  				return
   404  			default:
   405  				runtime.GC()
   406  			}
   407  		}
   408  	}()
   409  	for i := 0; i < 1e4; i++ {
   410  		func() {
   411  			defer print("")
   412  		}()
   413  	}
   414  	close(done)
   415  }
   416  
   417  // The implicit y, ok := x.(error) for the case error
   418  // in testTypeSwitch used to not initialize the result y
   419  // before passing &y to assertE2I2GC.
   420  // Catch this by making assertE2I2 call runtime.GC,
   421  // which will force a stack scan and failure if there are
   422  // bad pointers, and then fill the stack with bad pointers
   423  // and run the type switch.
   424  func TestAssertE2I2Liveness(t *testing.T) {
   425  	// Note that this flag is defined in export_test.go
   426  	// and is not available to ordinary imports of runtime.
   427  	*runtime.TestingAssertE2I2GC = true
   428  	defer func() {
   429  		*runtime.TestingAssertE2I2GC = false
   430  	}()
   431  
   432  	poisonStack()
   433  	testTypeSwitch(io.EOF)
   434  	poisonStack()
   435  	testAssert(io.EOF)
   436  	poisonStack()
   437  	testAssertVar(io.EOF)
   438  }
   439  
   440  func poisonStack() uintptr {
   441  	var x [1000]uintptr
   442  	for i := range x {
   443  		x[i] = 0xff
   444  	}
   445  	return x[123]
   446  }
   447  
   448  func testTypeSwitch(x interface{}) error {
   449  	switch y := x.(type) {
   450  	case nil:
   451  		// ok
   452  	case error:
   453  		return y
   454  	}
   455  	return nil
   456  }
   457  
   458  func testAssert(x interface{}) error {
   459  	if y, ok := x.(error); ok {
   460  		return y
   461  	}
   462  	return nil
   463  }
   464  
   465  func testAssertVar(x interface{}) error {
   466  	var y, ok = x.(error)
   467  	if ok {
   468  		return y
   469  	}
   470  	return nil
   471  }
   472  
   473  func TestAssertE2T2Liveness(t *testing.T) {
   474  	*runtime.TestingAssertE2T2GC = true
   475  	defer func() {
   476  		*runtime.TestingAssertE2T2GC = false
   477  	}()
   478  
   479  	poisonStack()
   480  	testIfaceEqual(io.EOF)
   481  }
   482  
   483  func testIfaceEqual(x interface{}) {
   484  	if x == "abc" {
   485  		// Prevent inlining
   486  		panic("")
   487  	}
   488  }