gonum.org/v1/gonum@v0.13.0/internal/asm/c64/bench_test.go (about)

     1  // Copyright ©2016 The Gonum Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package c64
     6  
     7  import "testing"
     8  
     9  var (
    10  	a = complex64(2 + 2i)
    11  	x = make([]complex64, 1000000)
    12  	y = make([]complex64, 1000000)
    13  	z = make([]complex64, 1000000)
    14  )
    15  
    16  func init() {
    17  	for n := range x {
    18  		x[n] = complex(float32(n), float32(n))
    19  		y[n] = complex(float32(n), float32(n))
    20  	}
    21  }
    22  
    23  func benchaxpyu(t *testing.B, n int, f func(a complex64, x, y []complex64)) {
    24  	x, y := x[:n], y[:n]
    25  	for i := 0; i < t.N; i++ {
    26  		f(a, x, y)
    27  	}
    28  }
    29  
    30  func naiveaxpyu(a complex64, x, y []complex64) {
    31  	for i, v := range x {
    32  		y[i] += a * v
    33  	}
    34  }
    35  
    36  func BenchmarkC64AxpyUnitary1(t *testing.B)     { benchaxpyu(t, 1, AxpyUnitary) }
    37  func BenchmarkC64AxpyUnitary2(t *testing.B)     { benchaxpyu(t, 2, AxpyUnitary) }
    38  func BenchmarkC64AxpyUnitary3(t *testing.B)     { benchaxpyu(t, 3, AxpyUnitary) }
    39  func BenchmarkC64AxpyUnitary4(t *testing.B)     { benchaxpyu(t, 4, AxpyUnitary) }
    40  func BenchmarkC64AxpyUnitary5(t *testing.B)     { benchaxpyu(t, 5, AxpyUnitary) }
    41  func BenchmarkC64AxpyUnitary10(t *testing.B)    { benchaxpyu(t, 10, AxpyUnitary) }
    42  func BenchmarkC64AxpyUnitary100(t *testing.B)   { benchaxpyu(t, 100, AxpyUnitary) }
    43  func BenchmarkC64AxpyUnitary1000(t *testing.B)  { benchaxpyu(t, 1000, AxpyUnitary) }
    44  func BenchmarkC64AxpyUnitary5000(t *testing.B)  { benchaxpyu(t, 5000, AxpyUnitary) }
    45  func BenchmarkC64AxpyUnitary10000(t *testing.B) { benchaxpyu(t, 10000, AxpyUnitary) }
    46  func BenchmarkC64AxpyUnitary50000(t *testing.B) { benchaxpyu(t, 50000, AxpyUnitary) }
    47  
    48  func BenchmarkLC64AxpyUnitary1(t *testing.B)     { benchaxpyu(t, 1, naiveaxpyu) }
    49  func BenchmarkLC64AxpyUnitary2(t *testing.B)     { benchaxpyu(t, 2, naiveaxpyu) }
    50  func BenchmarkLC64AxpyUnitary3(t *testing.B)     { benchaxpyu(t, 3, naiveaxpyu) }
    51  func BenchmarkLC64AxpyUnitary4(t *testing.B)     { benchaxpyu(t, 4, naiveaxpyu) }
    52  func BenchmarkLC64AxpyUnitary5(t *testing.B)     { benchaxpyu(t, 5, naiveaxpyu) }
    53  func BenchmarkLC64AxpyUnitary10(t *testing.B)    { benchaxpyu(t, 10, naiveaxpyu) }
    54  func BenchmarkLC64AxpyUnitary100(t *testing.B)   { benchaxpyu(t, 100, naiveaxpyu) }
    55  func BenchmarkLC64AxpyUnitary1000(t *testing.B)  { benchaxpyu(t, 1000, naiveaxpyu) }
    56  func BenchmarkLC64AxpyUnitary5000(t *testing.B)  { benchaxpyu(t, 5000, naiveaxpyu) }
    57  func BenchmarkLC64AxpyUnitary10000(t *testing.B) { benchaxpyu(t, 10000, naiveaxpyu) }
    58  func BenchmarkLC64AxpyUnitary50000(t *testing.B) { benchaxpyu(t, 50000, naiveaxpyu) }
    59  
    60  func benchaxpyut(t *testing.B, n int, f func(d []complex64, a complex64, x, y []complex64)) {
    61  	x, y, z := x[:n], y[:n], z[:n]
    62  	for i := 0; i < t.N; i++ {
    63  		f(z, a, x, y)
    64  	}
    65  }
    66  
    67  func naiveaxpyut(d []complex64, a complex64, x, y []complex64) {
    68  	for i, v := range x {
    69  		d[i] = y[i] + a*v
    70  	}
    71  }
    72  
    73  func BenchmarkC64AxpyUnitaryTo1(t *testing.B)     { benchaxpyut(t, 1, AxpyUnitaryTo) }
    74  func BenchmarkC64AxpyUnitaryTo2(t *testing.B)     { benchaxpyut(t, 2, AxpyUnitaryTo) }
    75  func BenchmarkC64AxpyUnitaryTo3(t *testing.B)     { benchaxpyut(t, 3, AxpyUnitaryTo) }
    76  func BenchmarkC64AxpyUnitaryTo4(t *testing.B)     { benchaxpyut(t, 4, AxpyUnitaryTo) }
    77  func BenchmarkC64AxpyUnitaryTo5(t *testing.B)     { benchaxpyut(t, 5, AxpyUnitaryTo) }
    78  func BenchmarkC64AxpyUnitaryTo10(t *testing.B)    { benchaxpyut(t, 10, AxpyUnitaryTo) }
    79  func BenchmarkC64AxpyUnitaryTo100(t *testing.B)   { benchaxpyut(t, 100, AxpyUnitaryTo) }
    80  func BenchmarkC64AxpyUnitaryTo1000(t *testing.B)  { benchaxpyut(t, 1000, AxpyUnitaryTo) }
    81  func BenchmarkC64AxpyUnitaryTo5000(t *testing.B)  { benchaxpyut(t, 5000, AxpyUnitaryTo) }
    82  func BenchmarkC64AxpyUnitaryTo10000(t *testing.B) { benchaxpyut(t, 10000, AxpyUnitaryTo) }
    83  func BenchmarkC64AxpyUnitaryTo50000(t *testing.B) { benchaxpyut(t, 50000, AxpyUnitaryTo) }
    84  
    85  func BenchmarkLC64AxpyUnitaryTo1(t *testing.B)     { benchaxpyut(t, 1, naiveaxpyut) }
    86  func BenchmarkLC64AxpyUnitaryTo2(t *testing.B)     { benchaxpyut(t, 2, naiveaxpyut) }
    87  func BenchmarkLC64AxpyUnitaryTo3(t *testing.B)     { benchaxpyut(t, 3, naiveaxpyut) }
    88  func BenchmarkLC64AxpyUnitaryTo4(t *testing.B)     { benchaxpyut(t, 4, naiveaxpyut) }
    89  func BenchmarkLC64AxpyUnitaryTo5(t *testing.B)     { benchaxpyut(t, 5, naiveaxpyut) }
    90  func BenchmarkLC64AxpyUnitaryTo10(t *testing.B)    { benchaxpyut(t, 10, naiveaxpyut) }
    91  func BenchmarkLC64AxpyUnitaryTo100(t *testing.B)   { benchaxpyut(t, 100, naiveaxpyut) }
    92  func BenchmarkLC64AxpyUnitaryTo1000(t *testing.B)  { benchaxpyut(t, 1000, naiveaxpyut) }
    93  func BenchmarkLC64AxpyUnitaryTo5000(t *testing.B)  { benchaxpyut(t, 5000, naiveaxpyut) }
    94  func BenchmarkLC64AxpyUnitaryTo10000(t *testing.B) { benchaxpyut(t, 10000, naiveaxpyut) }
    95  func BenchmarkLC64AxpyUnitaryTo50000(t *testing.B) { benchaxpyut(t, 50000, naiveaxpyut) }
    96  
    97  func benchaxpyinc(t *testing.B, ln, t_inc int, f func(alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr)) {
    98  	n, inc := uintptr(ln), uintptr(t_inc)
    99  	var idx int
   100  	if t_inc < 0 {
   101  		idx = (-ln + 1) * t_inc
   102  	}
   103  	for i := 0; i < t.N; i++ {
   104  		f(1+1i, x, y, n, inc, inc, uintptr(idx), uintptr(idx))
   105  	}
   106  }
   107  
   108  func naiveaxpyinc(alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) {
   109  	for i := 0; i < int(n); i++ {
   110  		y[iy] += alpha * x[ix]
   111  		ix += incX
   112  		iy += incY
   113  	}
   114  }
   115  
   116  func BenchmarkC64AxpyIncN1Inc1(b *testing.B) { benchaxpyinc(b, 1, 1, AxpyInc) }
   117  
   118  func BenchmarkC64AxpyIncN2Inc1(b *testing.B)  { benchaxpyinc(b, 2, 1, AxpyInc) }
   119  func BenchmarkC64AxpyIncN2Inc2(b *testing.B)  { benchaxpyinc(b, 2, 2, AxpyInc) }
   120  func BenchmarkC64AxpyIncN2Inc4(b *testing.B)  { benchaxpyinc(b, 2, 4, AxpyInc) }
   121  func BenchmarkC64AxpyIncN2Inc10(b *testing.B) { benchaxpyinc(b, 2, 10, AxpyInc) }
   122  
   123  func BenchmarkC64AxpyIncN3Inc1(b *testing.B)  { benchaxpyinc(b, 3, 1, AxpyInc) }
   124  func BenchmarkC64AxpyIncN3Inc2(b *testing.B)  { benchaxpyinc(b, 3, 2, AxpyInc) }
   125  func BenchmarkC64AxpyIncN3Inc4(b *testing.B)  { benchaxpyinc(b, 3, 4, AxpyInc) }
   126  func BenchmarkC64AxpyIncN3Inc10(b *testing.B) { benchaxpyinc(b, 3, 10, AxpyInc) }
   127  
   128  func BenchmarkC64AxpyIncN4Inc1(b *testing.B)  { benchaxpyinc(b, 4, 1, AxpyInc) }
   129  func BenchmarkC64AxpyIncN4Inc2(b *testing.B)  { benchaxpyinc(b, 4, 2, AxpyInc) }
   130  func BenchmarkC64AxpyIncN4Inc4(b *testing.B)  { benchaxpyinc(b, 4, 4, AxpyInc) }
   131  func BenchmarkC64AxpyIncN4Inc10(b *testing.B) { benchaxpyinc(b, 4, 10, AxpyInc) }
   132  
   133  func BenchmarkC64AxpyIncN10Inc1(b *testing.B)  { benchaxpyinc(b, 10, 1, AxpyInc) }
   134  func BenchmarkC64AxpyIncN10Inc2(b *testing.B)  { benchaxpyinc(b, 10, 2, AxpyInc) }
   135  func BenchmarkC64AxpyIncN10Inc4(b *testing.B)  { benchaxpyinc(b, 10, 4, AxpyInc) }
   136  func BenchmarkC64AxpyIncN10Inc10(b *testing.B) { benchaxpyinc(b, 10, 10, AxpyInc) }
   137  
   138  func BenchmarkC64AxpyIncN1000Inc1(b *testing.B)  { benchaxpyinc(b, 1000, 1, AxpyInc) }
   139  func BenchmarkC64AxpyIncN1000Inc2(b *testing.B)  { benchaxpyinc(b, 1000, 2, AxpyInc) }
   140  func BenchmarkC64AxpyIncN1000Inc4(b *testing.B)  { benchaxpyinc(b, 1000, 4, AxpyInc) }
   141  func BenchmarkC64AxpyIncN1000Inc10(b *testing.B) { benchaxpyinc(b, 1000, 10, AxpyInc) }
   142  
   143  func BenchmarkC64AxpyIncN100000Inc1(b *testing.B)  { benchaxpyinc(b, 100000, 1, AxpyInc) }
   144  func BenchmarkC64AxpyIncN100000Inc2(b *testing.B)  { benchaxpyinc(b, 100000, 2, AxpyInc) }
   145  func BenchmarkC64AxpyIncN100000Inc4(b *testing.B)  { benchaxpyinc(b, 100000, 4, AxpyInc) }
   146  func BenchmarkC64AxpyIncN100000Inc10(b *testing.B) { benchaxpyinc(b, 100000, 10, AxpyInc) }
   147  
   148  func BenchmarkC64AxpyIncN100000IncM1(b *testing.B)  { benchaxpyinc(b, 100000, -1, AxpyInc) }
   149  func BenchmarkC64AxpyIncN100000IncM2(b *testing.B)  { benchaxpyinc(b, 100000, -2, AxpyInc) }
   150  func BenchmarkC64AxpyIncN100000IncM4(b *testing.B)  { benchaxpyinc(b, 100000, -4, AxpyInc) }
   151  func BenchmarkC64AxpyIncN100000IncM10(b *testing.B) { benchaxpyinc(b, 100000, -10, AxpyInc) }
   152  
   153  func BenchmarkLC64AxpyIncN1Inc1(b *testing.B) { benchaxpyinc(b, 1, 1, naiveaxpyinc) }
   154  
   155  func BenchmarkLC64AxpyIncN2Inc1(b *testing.B)  { benchaxpyinc(b, 2, 1, naiveaxpyinc) }
   156  func BenchmarkLC64AxpyIncN2Inc2(b *testing.B)  { benchaxpyinc(b, 2, 2, naiveaxpyinc) }
   157  func BenchmarkLC64AxpyIncN2Inc4(b *testing.B)  { benchaxpyinc(b, 2, 4, naiveaxpyinc) }
   158  func BenchmarkLC64AxpyIncN2Inc10(b *testing.B) { benchaxpyinc(b, 2, 10, naiveaxpyinc) }
   159  
   160  func BenchmarkLC64AxpyIncN3Inc1(b *testing.B)  { benchaxpyinc(b, 3, 1, naiveaxpyinc) }
   161  func BenchmarkLC64AxpyIncN3Inc2(b *testing.B)  { benchaxpyinc(b, 3, 2, naiveaxpyinc) }
   162  func BenchmarkLC64AxpyIncN3Inc4(b *testing.B)  { benchaxpyinc(b, 3, 4, naiveaxpyinc) }
   163  func BenchmarkLC64AxpyIncN3Inc10(b *testing.B) { benchaxpyinc(b, 3, 10, naiveaxpyinc) }
   164  
   165  func BenchmarkLC64AxpyIncN4Inc1(b *testing.B)  { benchaxpyinc(b, 4, 1, naiveaxpyinc) }
   166  func BenchmarkLC64AxpyIncN4Inc2(b *testing.B)  { benchaxpyinc(b, 4, 2, naiveaxpyinc) }
   167  func BenchmarkLC64AxpyIncN4Inc4(b *testing.B)  { benchaxpyinc(b, 4, 4, naiveaxpyinc) }
   168  func BenchmarkLC64AxpyIncN4Inc10(b *testing.B) { benchaxpyinc(b, 4, 10, naiveaxpyinc) }
   169  
   170  func BenchmarkLC64AxpyIncN10Inc1(b *testing.B)  { benchaxpyinc(b, 10, 1, naiveaxpyinc) }
   171  func BenchmarkLC64AxpyIncN10Inc2(b *testing.B)  { benchaxpyinc(b, 10, 2, naiveaxpyinc) }
   172  func BenchmarkLC64AxpyIncN10Inc4(b *testing.B)  { benchaxpyinc(b, 10, 4, naiveaxpyinc) }
   173  func BenchmarkLC64AxpyIncN10Inc10(b *testing.B) { benchaxpyinc(b, 10, 10, naiveaxpyinc) }
   174  
   175  func BenchmarkLC64AxpyIncN1000Inc1(b *testing.B)  { benchaxpyinc(b, 1000, 1, naiveaxpyinc) }
   176  func BenchmarkLC64AxpyIncN1000Inc2(b *testing.B)  { benchaxpyinc(b, 1000, 2, naiveaxpyinc) }
   177  func BenchmarkLC64AxpyIncN1000Inc4(b *testing.B)  { benchaxpyinc(b, 1000, 4, naiveaxpyinc) }
   178  func BenchmarkLC64AxpyIncN1000Inc10(b *testing.B) { benchaxpyinc(b, 1000, 10, naiveaxpyinc) }
   179  
   180  func BenchmarkLC64AxpyIncN100000Inc1(b *testing.B)  { benchaxpyinc(b, 100000, 1, naiveaxpyinc) }
   181  func BenchmarkLC64AxpyIncN100000Inc2(b *testing.B)  { benchaxpyinc(b, 100000, 2, naiveaxpyinc) }
   182  func BenchmarkLC64AxpyIncN100000Inc4(b *testing.B)  { benchaxpyinc(b, 100000, 4, naiveaxpyinc) }
   183  func BenchmarkLC64AxpyIncN100000Inc10(b *testing.B) { benchaxpyinc(b, 100000, 10, naiveaxpyinc) }
   184  
   185  func BenchmarkLC64AxpyIncN100000IncM1(b *testing.B)  { benchaxpyinc(b, 100000, -1, naiveaxpyinc) }
   186  func BenchmarkLC64AxpyIncN100000IncM2(b *testing.B)  { benchaxpyinc(b, 100000, -2, naiveaxpyinc) }
   187  func BenchmarkLC64AxpyIncN100000IncM4(b *testing.B)  { benchaxpyinc(b, 100000, -4, naiveaxpyinc) }
   188  func BenchmarkLC64AxpyIncN100000IncM10(b *testing.B) { benchaxpyinc(b, 100000, -10, naiveaxpyinc) }
   189  
   190  func benchaxpyincto(t *testing.B, ln, t_inc int, f func(dst []complex64, incDst, idst uintptr, alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr)) {
   191  	n, inc := uintptr(ln), uintptr(t_inc)
   192  	var idx int
   193  	if t_inc < 0 {
   194  		idx = (-ln + 1) * t_inc
   195  	}
   196  	for i := 0; i < t.N; i++ {
   197  		f(z, inc, uintptr(idx), 1+1i, x, y, n, inc, inc, uintptr(idx), uintptr(idx))
   198  	}
   199  }
   200  
   201  func naiveaxpyincto(dst []complex64, incDst, idst uintptr, alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) {
   202  	for i := 0; i < int(n); i++ {
   203  		dst[idst] = alpha*x[ix] + y[iy]
   204  		ix += incX
   205  		iy += incY
   206  		idst += incDst
   207  	}
   208  }
   209  
   210  func BenchmarkC64AxpyIncToN1Inc1(b *testing.B) { benchaxpyincto(b, 1, 1, AxpyIncTo) }
   211  
   212  func BenchmarkC64AxpyIncToN2Inc1(b *testing.B)  { benchaxpyincto(b, 2, 1, AxpyIncTo) }
   213  func BenchmarkC64AxpyIncToN2Inc2(b *testing.B)  { benchaxpyincto(b, 2, 2, AxpyIncTo) }
   214  func BenchmarkC64AxpyIncToN2Inc4(b *testing.B)  { benchaxpyincto(b, 2, 4, AxpyIncTo) }
   215  func BenchmarkC64AxpyIncToN2Inc10(b *testing.B) { benchaxpyincto(b, 2, 10, AxpyIncTo) }
   216  
   217  func BenchmarkC64AxpyIncToN3Inc1(b *testing.B)  { benchaxpyincto(b, 3, 1, AxpyIncTo) }
   218  func BenchmarkC64AxpyIncToN3Inc2(b *testing.B)  { benchaxpyincto(b, 3, 2, AxpyIncTo) }
   219  func BenchmarkC64AxpyIncToN3Inc4(b *testing.B)  { benchaxpyincto(b, 3, 4, AxpyIncTo) }
   220  func BenchmarkC64AxpyIncToN3Inc10(b *testing.B) { benchaxpyincto(b, 3, 10, AxpyIncTo) }
   221  
   222  func BenchmarkC64AxpyIncToN4Inc1(b *testing.B)  { benchaxpyincto(b, 4, 1, AxpyIncTo) }
   223  func BenchmarkC64AxpyIncToN4Inc2(b *testing.B)  { benchaxpyincto(b, 4, 2, AxpyIncTo) }
   224  func BenchmarkC64AxpyIncToN4Inc4(b *testing.B)  { benchaxpyincto(b, 4, 4, AxpyIncTo) }
   225  func BenchmarkC64AxpyIncToN4Inc10(b *testing.B) { benchaxpyincto(b, 4, 10, AxpyIncTo) }
   226  
   227  func BenchmarkC64AxpyIncToN10Inc1(b *testing.B)  { benchaxpyincto(b, 10, 1, AxpyIncTo) }
   228  func BenchmarkC64AxpyIncToN10Inc2(b *testing.B)  { benchaxpyincto(b, 10, 2, AxpyIncTo) }
   229  func BenchmarkC64AxpyIncToN10Inc4(b *testing.B)  { benchaxpyincto(b, 10, 4, AxpyIncTo) }
   230  func BenchmarkC64AxpyIncToN10Inc10(b *testing.B) { benchaxpyincto(b, 10, 10, AxpyIncTo) }
   231  
   232  func BenchmarkC64AxpyIncToN1000Inc1(b *testing.B)  { benchaxpyincto(b, 1000, 1, AxpyIncTo) }
   233  func BenchmarkC64AxpyIncToN1000Inc2(b *testing.B)  { benchaxpyincto(b, 1000, 2, AxpyIncTo) }
   234  func BenchmarkC64AxpyIncToN1000Inc4(b *testing.B)  { benchaxpyincto(b, 1000, 4, AxpyIncTo) }
   235  func BenchmarkC64AxpyIncToN1000Inc10(b *testing.B) { benchaxpyincto(b, 1000, 10, AxpyIncTo) }
   236  
   237  func BenchmarkC64AxpyIncToN100000Inc1(b *testing.B)  { benchaxpyincto(b, 100000, 1, AxpyIncTo) }
   238  func BenchmarkC64AxpyIncToN100000Inc2(b *testing.B)  { benchaxpyincto(b, 100000, 2, AxpyIncTo) }
   239  func BenchmarkC64AxpyIncToN100000Inc4(b *testing.B)  { benchaxpyincto(b, 100000, 4, AxpyIncTo) }
   240  func BenchmarkC64AxpyIncToN100000Inc10(b *testing.B) { benchaxpyincto(b, 100000, 10, AxpyIncTo) }
   241  
   242  func BenchmarkC64AxpyIncToN100000IncM1(b *testing.B)  { benchaxpyincto(b, 100000, -1, AxpyIncTo) }
   243  func BenchmarkC64AxpyIncToN100000IncM2(b *testing.B)  { benchaxpyincto(b, 100000, -2, AxpyIncTo) }
   244  func BenchmarkC64AxpyIncToN100000IncM4(b *testing.B)  { benchaxpyincto(b, 100000, -4, AxpyIncTo) }
   245  func BenchmarkC64AxpyIncToN100000IncM10(b *testing.B) { benchaxpyincto(b, 100000, -10, AxpyIncTo) }
   246  
   247  func BenchmarkLC64AxpyIncToN1Inc1(b *testing.B) { benchaxpyincto(b, 1, 1, naiveaxpyincto) }
   248  
   249  func BenchmarkLC64AxpyIncToN2Inc1(b *testing.B)  { benchaxpyincto(b, 2, 1, naiveaxpyincto) }
   250  func BenchmarkLC64AxpyIncToN2Inc2(b *testing.B)  { benchaxpyincto(b, 2, 2, naiveaxpyincto) }
   251  func BenchmarkLC64AxpyIncToN2Inc4(b *testing.B)  { benchaxpyincto(b, 2, 4, naiveaxpyincto) }
   252  func BenchmarkLC64AxpyIncToN2Inc10(b *testing.B) { benchaxpyincto(b, 2, 10, naiveaxpyincto) }
   253  
   254  func BenchmarkLC64AxpyIncToN3Inc1(b *testing.B)  { benchaxpyincto(b, 3, 1, naiveaxpyincto) }
   255  func BenchmarkLC64AxpyIncToN3Inc2(b *testing.B)  { benchaxpyincto(b, 3, 2, naiveaxpyincto) }
   256  func BenchmarkLC64AxpyIncToN3Inc4(b *testing.B)  { benchaxpyincto(b, 3, 4, naiveaxpyincto) }
   257  func BenchmarkLC64AxpyIncToN3Inc10(b *testing.B) { benchaxpyincto(b, 3, 10, naiveaxpyincto) }
   258  
   259  func BenchmarkLC64AxpyIncToN4Inc1(b *testing.B)  { benchaxpyincto(b, 4, 1, naiveaxpyincto) }
   260  func BenchmarkLC64AxpyIncToN4Inc2(b *testing.B)  { benchaxpyincto(b, 4, 2, naiveaxpyincto) }
   261  func BenchmarkLC64AxpyIncToN4Inc4(b *testing.B)  { benchaxpyincto(b, 4, 4, naiveaxpyincto) }
   262  func BenchmarkLC64AxpyIncToN4Inc10(b *testing.B) { benchaxpyincto(b, 4, 10, naiveaxpyincto) }
   263  
   264  func BenchmarkLC64AxpyIncToN10Inc1(b *testing.B)  { benchaxpyincto(b, 10, 1, naiveaxpyincto) }
   265  func BenchmarkLC64AxpyIncToN10Inc2(b *testing.B)  { benchaxpyincto(b, 10, 2, naiveaxpyincto) }
   266  func BenchmarkLC64AxpyIncToN10Inc4(b *testing.B)  { benchaxpyincto(b, 10, 4, naiveaxpyincto) }
   267  func BenchmarkLC64AxpyIncToN10Inc10(b *testing.B) { benchaxpyincto(b, 10, 10, naiveaxpyincto) }
   268  
   269  func BenchmarkLC64AxpyIncToN1000Inc1(b *testing.B)  { benchaxpyincto(b, 1000, 1, naiveaxpyincto) }
   270  func BenchmarkLC64AxpyIncToN1000Inc2(b *testing.B)  { benchaxpyincto(b, 1000, 2, naiveaxpyincto) }
   271  func BenchmarkLC64AxpyIncToN1000Inc4(b *testing.B)  { benchaxpyincto(b, 1000, 4, naiveaxpyincto) }
   272  func BenchmarkLC64AxpyIncToN1000Inc10(b *testing.B) { benchaxpyincto(b, 1000, 10, naiveaxpyincto) }
   273  
   274  func BenchmarkLC64AxpyIncToN100000Inc1(b *testing.B)  { benchaxpyincto(b, 100000, 1, naiveaxpyincto) }
   275  func BenchmarkLC64AxpyIncToN100000Inc2(b *testing.B)  { benchaxpyincto(b, 100000, 2, naiveaxpyincto) }
   276  func BenchmarkLC64AxpyIncToN100000Inc4(b *testing.B)  { benchaxpyincto(b, 100000, 4, naiveaxpyincto) }
   277  func BenchmarkLC64AxpyIncToN100000Inc10(b *testing.B) { benchaxpyincto(b, 100000, 10, naiveaxpyincto) }
   278  
   279  func BenchmarkLC64AxpyIncToN100000IncM1(b *testing.B) { benchaxpyincto(b, 100000, -1, naiveaxpyincto) }
   280  func BenchmarkLC64AxpyIncToN100000IncM2(b *testing.B) { benchaxpyincto(b, 100000, -2, naiveaxpyincto) }
   281  func BenchmarkLC64AxpyIncToN100000IncM4(b *testing.B) { benchaxpyincto(b, 100000, -4, naiveaxpyincto) }
   282  func BenchmarkLC64AxpyIncToN100000IncM10(b *testing.B) {
   283  	benchaxpyincto(b, 100000, -10, naiveaxpyincto)
   284  }