modernc.org/memory@v1.8.0/all_test.go (about)

     1  // Copyright 2017 The Memory Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package memory // import "modernc.org/memory"
     6  
     7  import (
     8  	"bytes"
     9  	"fmt"
    10  	"math"
    11  	"os"
    12  	"path"
    13  	"runtime"
    14  	"strings"
    15  	"testing"
    16  	"unsafe"
    17  
    18  	"modernc.org/mathutil"
    19  )
    20  
    21  func caller(s string, va ...interface{}) {
    22  	if s == "" {
    23  		s = strings.Repeat("%v ", len(va))
    24  	}
    25  	_, fn, fl, _ := runtime.Caller(2)
    26  	fmt.Fprintf(os.Stderr, "# caller: %s:%d: ", path.Base(fn), fl)
    27  	fmt.Fprintf(os.Stderr, s, va...)
    28  	fmt.Fprintln(os.Stderr)
    29  	_, fn, fl, _ = runtime.Caller(1)
    30  	fmt.Fprintf(os.Stderr, "# \tcallee: %s:%d: ", path.Base(fn), fl)
    31  	fmt.Fprintln(os.Stderr)
    32  	os.Stderr.Sync()
    33  }
    34  
    35  func dbg(s string, va ...interface{}) {
    36  	if s == "" {
    37  		s = strings.Repeat("%v ", len(va))
    38  	}
    39  	_, fn, fl, _ := runtime.Caller(1)
    40  	fmt.Fprintf(os.Stderr, "# dbg %s:%d: ", path.Base(fn), fl)
    41  	fmt.Fprintf(os.Stderr, s, va...)
    42  	fmt.Fprintln(os.Stderr)
    43  	os.Stderr.Sync()
    44  }
    45  
    46  func TODO(...interface{}) string { //TODOOK
    47  	_, fn, fl, _ := runtime.Caller(1)
    48  	return fmt.Sprintf("# TODO: %s:%d:\n", path.Base(fn), fl) //TODOOK
    49  }
    50  
    51  func use(...interface{}) {}
    52  
    53  func init() {
    54  	use(caller, dbg, TODO) //TODOOK
    55  }
    56  
    57  // ============================================================================
    58  
    59  const quota = 128 << 20
    60  
    61  var (
    62  	max    = 2 * osPageSize
    63  	bigMax = 2 * pageSize
    64  )
    65  
    66  type block struct {
    67  	p    uintptr
    68  	size int
    69  }
    70  
    71  func test1u(t *testing.T, max int) {
    72  	var alloc Allocator
    73  
    74  	defer alloc.Close()
    75  
    76  	rem := quota
    77  	var a []block
    78  	srng, err := mathutil.NewFC32(0, math.MaxInt32, true)
    79  	if err != nil {
    80  		t.Fatal(err)
    81  	}
    82  
    83  	vrng, err := mathutil.NewFC32(0, math.MaxInt32, true)
    84  	if err != nil {
    85  		t.Fatal(err)
    86  	}
    87  
    88  	// Allocate
    89  	for rem > 0 {
    90  		size := srng.Next()%max + 1
    91  		rem -= size
    92  		p, err := alloc.UintptrMalloc(size)
    93  		if err != nil {
    94  			t.Fatal(err)
    95  		}
    96  
    97  		a = append(a, block{p, size})
    98  		for i := 0; i < size; i++ {
    99  			*(*byte)(unsafe.Pointer(p + uintptr(i))) = byte(vrng.Next())
   100  		}
   101  	}
   102  	if counters {
   103  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   104  	}
   105  	srng.Seek(0)
   106  	vrng.Seek(0)
   107  	// Verify
   108  	for i, b := range a {
   109  		if g, e := b.size, srng.Next()%max+1; g != e {
   110  			t.Fatal(i, g, e)
   111  		}
   112  
   113  		if a, b := b.size, UintptrUsableSize(b.p); a > b {
   114  			t.Fatal(i, a, b)
   115  		}
   116  
   117  		for j := 0; j < b.size; j++ {
   118  			g := *(*byte)(unsafe.Pointer(b.p + uintptr(j)))
   119  			if e := byte(vrng.Next()); g != e {
   120  				t.Fatalf("%v,%v %#x: %#02x %#02x", i, j, b.p+uintptr(j), g, e)
   121  			}
   122  
   123  			*(*byte)(unsafe.Pointer(b.p + uintptr(j))) = 0
   124  		}
   125  	}
   126  	// Shuffle
   127  	for i := range a {
   128  		j := srng.Next() % len(a)
   129  		a[i], a[j] = a[j], a[i]
   130  	}
   131  	// Free
   132  	for _, b := range a {
   133  		if err := alloc.UintptrFree(b.p); err != nil {
   134  			t.Fatal(err)
   135  		}
   136  	}
   137  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   138  		t.Fatalf("%+v", alloc)
   139  	}
   140  }
   141  
   142  func Test1USmall(t *testing.T) { test1u(t, max) }
   143  func Test1UBig(t *testing.T)   { test1u(t, bigMax) }
   144  
   145  func test2u(t *testing.T, max int) {
   146  	var alloc Allocator
   147  
   148  	defer alloc.Close()
   149  
   150  	rem := quota
   151  	var a []block
   152  	srng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   153  	if err != nil {
   154  		t.Fatal(err)
   155  	}
   156  
   157  	vrng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   158  	if err != nil {
   159  		t.Fatal(err)
   160  	}
   161  
   162  	// Allocate
   163  	for rem > 0 {
   164  		size := srng.Next()%max + 1
   165  		rem -= size
   166  		p, err := alloc.UintptrMalloc(size)
   167  		if err != nil {
   168  			t.Fatal(err)
   169  		}
   170  
   171  		a = append(a, block{p, size})
   172  		for i := 0; i < size; i++ {
   173  			*(*byte)(unsafe.Pointer(p + uintptr(i))) = byte(vrng.Next())
   174  		}
   175  	}
   176  	if counters {
   177  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   178  	}
   179  	srng.Seek(0)
   180  	vrng.Seek(0)
   181  	// Verify & free
   182  	for i, b := range a {
   183  		if g, e := b.size, srng.Next()%max+1; g != e {
   184  			t.Fatal(i, g, e)
   185  		}
   186  
   187  		if a, b := b.size, UintptrUsableSize(b.p); a > b {
   188  			t.Fatal(i, a, b)
   189  		}
   190  
   191  		for j := 0; j < b.size; j++ {
   192  			g := *(*byte)(unsafe.Pointer(b.p + uintptr(j)))
   193  			if e := byte(vrng.Next()); g != e {
   194  				t.Fatalf("%v,%v %#x: %#02x %#02x", i, j, b.p+uintptr(j), g, e)
   195  			}
   196  
   197  			*(*byte)(unsafe.Pointer(b.p + uintptr(j))) = 0
   198  		}
   199  		if err := alloc.UintptrFree(b.p); err != nil {
   200  			t.Fatal(err)
   201  		}
   202  	}
   203  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   204  		t.Fatalf("%+v", alloc)
   205  	}
   206  }
   207  
   208  func Test2USmall(t *testing.T) { test2u(t, max) }
   209  func Test2UBig(t *testing.T)   { test2u(t, bigMax) }
   210  
   211  func test3u(t *testing.T, max int) {
   212  	var alloc Allocator
   213  
   214  	defer alloc.Close()
   215  
   216  	rem := quota
   217  	m := map[block][]byte{}
   218  	srng, err := mathutil.NewFC32(1, max, true)
   219  	if err != nil {
   220  		t.Fatal(err)
   221  	}
   222  
   223  	vrng, err := mathutil.NewFC32(1, max, true)
   224  	if err != nil {
   225  		t.Fatal(err)
   226  	}
   227  
   228  	for rem > 0 {
   229  		switch srng.Next() % 3 {
   230  		case 0, 1: // 2/3 allocate
   231  			size := srng.Next()
   232  			rem -= size
   233  			p, err := alloc.UintptrMalloc(size)
   234  			if err != nil {
   235  				t.Fatal(err)
   236  			}
   237  
   238  			b := make([]byte, size)
   239  			for i := range b {
   240  				b[i] = byte(vrng.Next())
   241  				*(*byte)(unsafe.Pointer(p + uintptr(i))) = b[i]
   242  			}
   243  			m[block{p, size}] = append([]byte(nil), b...)
   244  		default: // 1/3 free
   245  			for b, v := range m {
   246  				for i, v := range v {
   247  					if *(*byte)(unsafe.Pointer(b.p + uintptr(i))) != v {
   248  						t.Fatal("corrupted heap")
   249  					}
   250  				}
   251  
   252  				if a, b := b.size, UintptrUsableSize(b.p); a > b {
   253  					t.Fatal(a, b)
   254  				}
   255  
   256  				for j := 0; j < b.size; j++ {
   257  					*(*byte)(unsafe.Pointer(b.p + uintptr(j))) = 0
   258  				}
   259  				rem += b.size
   260  				alloc.UintptrFree(b.p)
   261  				delete(m, b)
   262  				break
   263  			}
   264  		}
   265  	}
   266  	if counters {
   267  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   268  	}
   269  	for b, v := range m {
   270  		for i, v := range v {
   271  			if *(*byte)(unsafe.Pointer(b.p + uintptr(i))) != v {
   272  				t.Fatal("corrupted heap")
   273  			}
   274  		}
   275  
   276  		if a, b := b.size, UintptrUsableSize(b.p); a > b {
   277  			t.Fatal(a, b)
   278  		}
   279  
   280  		for j := 0; j < b.size; j++ {
   281  			*(*byte)(unsafe.Pointer(b.p + uintptr(j))) = 0
   282  		}
   283  		alloc.UintptrFree(b.p)
   284  	}
   285  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   286  		t.Fatalf("%+v", alloc)
   287  	}
   288  }
   289  
   290  func Test3USmall(t *testing.T) { test3u(t, max) }
   291  func Test3UBig(t *testing.T)   { test3u(t, bigMax) }
   292  
   293  func TestUFree(t *testing.T) {
   294  	var alloc Allocator
   295  
   296  	defer alloc.Close()
   297  
   298  	p, err := alloc.UintptrMalloc(1)
   299  	if err != nil {
   300  		t.Fatal(err)
   301  	}
   302  
   303  	if err := alloc.UintptrFree(p); err != nil {
   304  		t.Fatal(err)
   305  	}
   306  
   307  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   308  		t.Fatalf("%+v", alloc)
   309  	}
   310  }
   311  
   312  func TestUMalloc(t *testing.T) {
   313  	var alloc Allocator
   314  
   315  	defer alloc.Close()
   316  
   317  	p, err := alloc.UintptrMalloc(maxSlotSize)
   318  	if err != nil {
   319  		t.Fatal(err)
   320  	}
   321  
   322  	pg := (*page)(unsafe.Pointer(p &^ uintptr(osPageMask)))
   323  	if 1<<pg.log > maxSlotSize {
   324  		t.Fatal(1<<pg.log, maxSlotSize)
   325  	}
   326  
   327  	if err := alloc.UintptrFree(p); err != nil {
   328  		t.Fatal(err)
   329  	}
   330  
   331  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   332  		t.Fatalf("%+v", alloc)
   333  	}
   334  }
   335  
   336  func test1(t *testing.T, max int) {
   337  	var alloc Allocator
   338  
   339  	defer alloc.Close()
   340  
   341  	rem := quota
   342  	var a [][]byte
   343  	srng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   344  	if err != nil {
   345  		t.Fatal(err)
   346  	}
   347  
   348  	vrng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   349  	if err != nil {
   350  		t.Fatal(err)
   351  	}
   352  
   353  	// Allocate
   354  	for rem > 0 {
   355  		size := srng.Next()%max + 1
   356  		rem -= size
   357  		b, err := alloc.Malloc(size)
   358  		if err != nil {
   359  			t.Fatal(err)
   360  		}
   361  
   362  		a = append(a, b)
   363  		for i := range b {
   364  			b[i] = byte(vrng.Next())
   365  		}
   366  	}
   367  	if counters {
   368  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   369  	}
   370  	srng.Seek(0)
   371  	vrng.Seek(0)
   372  	// Verify
   373  	for i, b := range a {
   374  		if g, e := len(b), srng.Next()%max+1; g != e {
   375  			t.Fatal(i, g, e)
   376  		}
   377  
   378  		if a, b := len(b), UsableSize(&b[0]); a > b {
   379  			t.Fatal(i, a, b)
   380  		}
   381  
   382  		for i, g := range b {
   383  			if e := byte(vrng.Next()); g != e {
   384  				t.Fatalf("%v %p: %#02x %#02x", i, &b[i], g, e)
   385  			}
   386  
   387  			b[i] = 0
   388  		}
   389  	}
   390  	// Shuffle
   391  	for i := range a {
   392  		j := srng.Next() % len(a)
   393  		a[i], a[j] = a[j], a[i]
   394  	}
   395  	// Free
   396  	for _, b := range a {
   397  		if err := alloc.Free(b); err != nil {
   398  			t.Fatal(err)
   399  		}
   400  	}
   401  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   402  		t.Fatalf("%+v", alloc)
   403  	}
   404  }
   405  
   406  func Test1Small(t *testing.T) { test1(t, max) }
   407  func Test1Big(t *testing.T)   { test1(t, bigMax) }
   408  
   409  func test2(t *testing.T, max int) {
   410  	var alloc Allocator
   411  
   412  	defer alloc.Close()
   413  
   414  	rem := quota
   415  	var a [][]byte
   416  	srng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   417  	if err != nil {
   418  		t.Fatal(err)
   419  	}
   420  
   421  	vrng, err := mathutil.NewFC32(0, math.MaxInt32, true)
   422  	if err != nil {
   423  		t.Fatal(err)
   424  	}
   425  
   426  	// Allocate
   427  	for rem > 0 {
   428  		size := srng.Next()%max + 1
   429  		rem -= size
   430  		b, err := alloc.Malloc(size)
   431  		if err != nil {
   432  			t.Fatal(err)
   433  		}
   434  
   435  		a = append(a, b)
   436  		for i := range b {
   437  			b[i] = byte(vrng.Next())
   438  		}
   439  	}
   440  	if counters {
   441  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   442  	}
   443  	srng.Seek(0)
   444  	vrng.Seek(0)
   445  	// Verify & free
   446  	for i, b := range a {
   447  		if g, e := len(b), srng.Next()%max+1; g != e {
   448  			t.Fatal(i, g, e)
   449  		}
   450  
   451  		if a, b := len(b), UsableSize(&b[0]); a > b {
   452  			t.Fatal(i, a, b)
   453  		}
   454  
   455  		for i, g := range b {
   456  			if e := byte(vrng.Next()); g != e {
   457  				t.Fatalf("%v %p: %#02x %#02x", i, &b[i], g, e)
   458  			}
   459  
   460  			b[i] = 0
   461  		}
   462  		if err := alloc.Free(b); err != nil {
   463  			t.Fatal(err)
   464  		}
   465  	}
   466  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   467  		t.Fatalf("%+v", alloc)
   468  	}
   469  }
   470  
   471  func Test2Small(t *testing.T) { test2(t, max) }
   472  func Test2Big(t *testing.T)   { test2(t, bigMax) }
   473  
   474  func test3(t *testing.T, max int) {
   475  	var alloc Allocator
   476  
   477  	defer alloc.Close()
   478  
   479  	rem := quota
   480  	m := map[*[]byte][]byte{}
   481  	srng, err := mathutil.NewFC32(1, max, true)
   482  	if err != nil {
   483  		t.Fatal(err)
   484  	}
   485  
   486  	vrng, err := mathutil.NewFC32(1, max, true)
   487  	if err != nil {
   488  		t.Fatal(err)
   489  	}
   490  
   491  	for rem > 0 {
   492  		switch srng.Next() % 3 {
   493  		case 0, 1: // 2/3 allocate
   494  			size := srng.Next()
   495  			rem -= size
   496  			b, err := alloc.Malloc(size)
   497  			if err != nil {
   498  				t.Fatal(err)
   499  			}
   500  
   501  			for i := range b {
   502  				b[i] = byte(vrng.Next())
   503  			}
   504  			m[&b] = append([]byte(nil), b...)
   505  		default: // 1/3 free
   506  			for k, v := range m {
   507  				b := *k
   508  				if !bytes.Equal(b, v) {
   509  					t.Fatal("corrupted heap")
   510  				}
   511  
   512  				if a, b := len(b), UsableSize(&b[0]); a > b {
   513  					t.Fatal(a, b)
   514  				}
   515  
   516  				for i := range b {
   517  					b[i] = 0
   518  				}
   519  				rem += len(b)
   520  				alloc.Free(b)
   521  				delete(m, k)
   522  				break
   523  			}
   524  		}
   525  	}
   526  	if counters {
   527  		t.Logf("allocs %v, mmaps %v, bytes %v, overhead %v (%.2f%%).", alloc.Allocs, alloc.Mmaps, alloc.Bytes, alloc.Bytes-quota, 100*float64(alloc.Bytes-quota)/quota)
   528  	}
   529  	for k, v := range m {
   530  		b := *k
   531  		if !bytes.Equal(b, v) {
   532  			t.Fatal("corrupted heap")
   533  		}
   534  
   535  		if a, b := len(b), UsableSize(&b[0]); a > b {
   536  			t.Fatal(a, b)
   537  		}
   538  
   539  		for i := range b {
   540  			b[i] = 0
   541  		}
   542  		alloc.Free(b)
   543  	}
   544  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   545  		t.Fatalf("%+v", alloc)
   546  	}
   547  }
   548  
   549  func Test3Small(t *testing.T) { test3(t, max) }
   550  func Test3Big(t *testing.T)   { test3(t, bigMax) }
   551  
   552  func TestFree(t *testing.T) {
   553  	var alloc Allocator
   554  
   555  	defer alloc.Close()
   556  
   557  	b, err := alloc.Malloc(1)
   558  	if err != nil {
   559  		t.Fatal(err)
   560  	}
   561  
   562  	if err := alloc.Free(b[:0]); err != nil {
   563  		t.Fatal(err)
   564  	}
   565  
   566  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   567  		t.Fatalf("%+v", alloc)
   568  	}
   569  }
   570  
   571  func TestMalloc(t *testing.T) {
   572  	var alloc Allocator
   573  
   574  	defer alloc.Close()
   575  
   576  	b, err := alloc.Malloc(maxSlotSize)
   577  	if err != nil {
   578  		t.Fatal(err)
   579  	}
   580  
   581  	p := (*page)(unsafe.Pointer(uintptr(unsafe.Pointer(&b[0])) &^ uintptr(osPageMask)))
   582  	if 1<<p.log > maxSlotSize {
   583  		t.Fatal(1<<p.log, maxSlotSize)
   584  	}
   585  
   586  	if err := alloc.Free(b[:0]); err != nil {
   587  		t.Fatal(err)
   588  	}
   589  
   590  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   591  		t.Fatalf("%+v", alloc)
   592  	}
   593  }
   594  
   595  func benchmarkFree(b *testing.B, size int) {
   596  	var alloc Allocator
   597  
   598  	defer alloc.Close()
   599  
   600  	a := make([][]byte, b.N)
   601  	for i := range a {
   602  		p, err := alloc.Malloc(size)
   603  		if err != nil {
   604  			b.Fatal(err)
   605  		}
   606  
   607  		a[i] = p
   608  	}
   609  	b.ResetTimer()
   610  	for _, b := range a {
   611  		alloc.Free(b)
   612  	}
   613  	b.StopTimer()
   614  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   615  		b.Fatalf("%+v", alloc)
   616  	}
   617  }
   618  
   619  func BenchmarkFree16(b *testing.B) { benchmarkFree(b, 1<<4) }
   620  func BenchmarkFree32(b *testing.B) { benchmarkFree(b, 1<<5) }
   621  func BenchmarkFree64(b *testing.B) { benchmarkFree(b, 1<<6) }
   622  
   623  func benchmarkCalloc(b *testing.B, size int) {
   624  	var alloc Allocator
   625  
   626  	defer alloc.Close()
   627  
   628  	a := make([][]byte, b.N)
   629  	b.ResetTimer()
   630  	for i := range a {
   631  		p, err := alloc.Calloc(size)
   632  		if err != nil {
   633  			b.Fatal(err)
   634  		}
   635  
   636  		a[i] = p
   637  	}
   638  	b.StopTimer()
   639  	for _, b := range a {
   640  		alloc.Free(b)
   641  	}
   642  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   643  		b.Fatalf("%+v", alloc)
   644  	}
   645  }
   646  
   647  func BenchmarkCalloc16(b *testing.B) { benchmarkCalloc(b, 1<<4) }
   648  func BenchmarkCalloc32(b *testing.B) { benchmarkCalloc(b, 1<<5) }
   649  func BenchmarkCalloc64(b *testing.B) { benchmarkCalloc(b, 1<<6) }
   650  
   651  func benchmarkGoCalloc(b *testing.B, size int) {
   652  	a := make([][]byte, b.N)
   653  	b.ResetTimer()
   654  	for i := range a {
   655  		a[i] = make([]byte, size)
   656  	}
   657  	b.StopTimer()
   658  	use(a)
   659  }
   660  
   661  func BenchmarkGoCalloc16(b *testing.B) { benchmarkGoCalloc(b, 1<<4) }
   662  func BenchmarkGoCalloc32(b *testing.B) { benchmarkGoCalloc(b, 1<<5) }
   663  func BenchmarkGoCalloc64(b *testing.B) { benchmarkGoCalloc(b, 1<<6) }
   664  
   665  func benchmarkMalloc(b *testing.B, size int) {
   666  	var alloc Allocator
   667  
   668  	defer alloc.Close()
   669  
   670  	a := make([][]byte, b.N)
   671  	b.ResetTimer()
   672  	for i := range a {
   673  		p, err := alloc.Malloc(size)
   674  		if err != nil {
   675  			b.Fatal(err)
   676  		}
   677  
   678  		a[i] = p
   679  	}
   680  	b.StopTimer()
   681  	for _, b := range a {
   682  		alloc.Free(b)
   683  	}
   684  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   685  		b.Fatalf("%+v", alloc)
   686  	}
   687  }
   688  
   689  func BenchmarkMalloc16(b *testing.B) { benchmarkMalloc(b, 1<<4) }
   690  func BenchmarkMalloc32(b *testing.B) { benchmarkMalloc(b, 1<<5) }
   691  func BenchmarkMalloc64(b *testing.B) { benchmarkMalloc(b, 1<<6) }
   692  
   693  func benchmarkUintptrFree(b *testing.B, size int) {
   694  	var alloc Allocator
   695  
   696  	defer alloc.Close()
   697  
   698  	a := make([]uintptr, b.N)
   699  	for i := range a {
   700  		p, err := alloc.UintptrMalloc(size)
   701  		if err != nil {
   702  			b.Fatal(err)
   703  		}
   704  
   705  		a[i] = p
   706  	}
   707  	b.ResetTimer()
   708  	for _, p := range a {
   709  		alloc.UintptrFree(p)
   710  	}
   711  	b.StopTimer()
   712  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   713  		b.Fatalf("%+v", alloc)
   714  	}
   715  }
   716  
   717  func BenchmarkUintptrFree16(b *testing.B) { benchmarkUintptrFree(b, 1<<4) }
   718  func BenchmarkUintptrFree32(b *testing.B) { benchmarkUintptrFree(b, 1<<5) }
   719  func BenchmarkUintptrFree64(b *testing.B) { benchmarkUintptrFree(b, 1<<6) }
   720  
   721  func benchmarkUintptrCalloc(b *testing.B, size int) {
   722  	var alloc Allocator
   723  
   724  	defer alloc.Close()
   725  
   726  	a := make([]uintptr, b.N)
   727  	b.ResetTimer()
   728  	for i := range a {
   729  		p, err := alloc.UintptrCalloc(size)
   730  		if err != nil {
   731  			b.Fatal(err)
   732  		}
   733  
   734  		a[i] = p
   735  	}
   736  	b.StopTimer()
   737  	for _, p := range a {
   738  		alloc.UintptrFree(p)
   739  	}
   740  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   741  		b.Fatalf("%+v", alloc)
   742  	}
   743  }
   744  
   745  func BenchmarkUintptrCalloc16(b *testing.B) { benchmarkUintptrCalloc(b, 1<<4) }
   746  func BenchmarkUintptrCalloc32(b *testing.B) { benchmarkUintptrCalloc(b, 1<<5) }
   747  func BenchmarkUintptrCalloc64(b *testing.B) { benchmarkUintptrCalloc(b, 1<<6) }
   748  
   749  func benchmarkUintptrMalloc(b *testing.B, size int) {
   750  	var alloc Allocator
   751  
   752  	defer alloc.Close()
   753  
   754  	a := make([]uintptr, b.N)
   755  	b.ResetTimer()
   756  	for i := range a {
   757  		p, err := alloc.UintptrMalloc(size)
   758  		if err != nil {
   759  			b.Fatal(err)
   760  		}
   761  
   762  		a[i] = p
   763  	}
   764  	b.StopTimer()
   765  	for _, p := range a {
   766  		alloc.UintptrFree(p)
   767  	}
   768  	if alloc.Allocs != 0 || alloc.Mmaps != 0 || alloc.Bytes != 0 || len(alloc.regs) != 0 {
   769  		b.Fatalf("%+v", alloc)
   770  	}
   771  }
   772  
   773  func BenchmarkUintptrMalloc16(b *testing.B) { benchmarkUintptrMalloc(b, 1<<4) }
   774  func BenchmarkUintptrMalloc32(b *testing.B) { benchmarkUintptrMalloc(b, 1<<5) }
   775  func BenchmarkUintptrMalloc64(b *testing.B) { benchmarkUintptrMalloc(b, 1<<6) }