github.com/primecitizens/pcz/std@v0.2.1/core/bits/bits_test.go (about) 1 // SPDX-License-Identifier: Apache-2.0 2 // Copyright 2023 The Prime Citizens 3 // 4 // Copyright 2017 The Go Authors. All rights reserved. 5 // Use of this source code is governed by a BSD-style 6 // license that can be found in the LICENSE file. 7 8 package bits_test 9 10 import ( 11 "runtime" 12 "testing" 13 14 "github.com/primecitizens/pcz/std/core/arch" 15 "github.com/primecitizens/pcz/std/core/assert" 16 . "github.com/primecitizens/pcz/std/core/bits" 17 ) 18 19 func TestLeadingZeros(t *testing.T) { 20 for i := 0; i < 256; i++ { 21 nlz := tab[i].nlz 22 for k := 0; k < 64-8; k++ { 23 x := uint64(i) << uint(k) 24 if x <= 1<<8-1 { 25 got := LeadingZeros8(uint8(x)) 26 want := nlz - k + (8 - 8) 27 if x == 0 { 28 want = 8 29 } 30 if got != want { 31 t.Fatalf("LeadingZeros8(%#02x) == %d; want %d", x, got, want) 32 } 33 } 34 35 if x <= 1<<16-1 { 36 got := LeadingZeros16(uint16(x)) 37 want := nlz - k + (16 - 8) 38 if x == 0 { 39 want = 16 40 } 41 if got != want { 42 t.Fatalf("LeadingZeros16(%#04x) == %d; want %d", x, got, want) 43 } 44 } 45 46 if x <= 1<<32-1 { 47 got := LeadingZeros32(uint32(x)) 48 want := nlz - k + (32 - 8) 49 if x == 0 { 50 want = 32 51 } 52 if got != want { 53 t.Fatalf("LeadingZeros32(%#08x) == %d; want %d", x, got, want) 54 } 55 if arch.UintBits == 32 { 56 got = LeadingZeros(uint(x)) 57 if got != want { 58 t.Fatalf("LeadingZeros(%#08x) == %d; want %d", x, got, want) 59 } 60 } 61 } 62 63 if x <= 1<<64-1 { 64 got := LeadingZeros64(uint64(x)) 65 want := nlz - k + (64 - 8) 66 if x == 0 { 67 want = 64 68 } 69 if got != want { 70 t.Fatalf("LeadingZeros64(%#016x) == %d; want %d", x, got, want) 71 } 72 if arch.UintBits == 64 { 73 got = LeadingZeros(uint(x)) 74 if got != want { 75 t.Fatalf("LeadingZeros(%#016x) == %d; want %d", x, got, want) 76 } 77 } 78 } 79 } 80 } 81 } 82 83 // Exported (global) variable serving as input for some 84 // of the benchmarks to ensure side-effect free calls 85 // are not optimized away. 86 var Input uint64 = DeBruijn64 87 88 // Exported (global) variable to store function results 89 // during benchmarking to ensure side-effect free calls 90 // are not optimized away. 91 var Output int 92 93 func BenchmarkLeadingZeros(b *testing.B) { 94 var s int 95 for i := 0; i < b.N; i++ { 96 s += LeadingZeros(uint(Input) >> (uint(i) % arch.UintBits)) 97 } 98 Output = s 99 } 100 101 func BenchmarkLeadingZeros8(b *testing.B) { 102 var s int 103 for i := 0; i < b.N; i++ { 104 s += LeadingZeros8(uint8(Input) >> (uint(i) % 8)) 105 } 106 Output = s 107 } 108 109 func BenchmarkLeadingZeros16(b *testing.B) { 110 var s int 111 for i := 0; i < b.N; i++ { 112 s += LeadingZeros16(uint16(Input) >> (uint(i) % 16)) 113 } 114 Output = s 115 } 116 117 func BenchmarkLeadingZeros32(b *testing.B) { 118 var s int 119 for i := 0; i < b.N; i++ { 120 s += LeadingZeros32(uint32(Input) >> (uint(i) % 32)) 121 } 122 Output = s 123 } 124 125 func BenchmarkLeadingZeros64(b *testing.B) { 126 var s int 127 for i := 0; i < b.N; i++ { 128 s += LeadingZeros64(uint64(Input) >> (uint(i) % 64)) 129 } 130 Output = s 131 } 132 133 func TestTrailingZeros(t *testing.T) { 134 for i := 0; i < 256; i++ { 135 ntz := tab[i].ntz 136 for k := 0; k < 64-8; k++ { 137 x := uint64(i) << uint(k) 138 want := ntz + k 139 if x <= 1<<8-1 { 140 got := TrailingZeros8(uint8(x)) 141 if x == 0 { 142 want = 8 143 } 144 if got != want { 145 t.Fatalf("TrailingZeros8(%#02x) == %d; want %d", x, got, want) 146 } 147 } 148 149 if x <= 1<<16-1 { 150 got := TrailingZeros16(uint16(x)) 151 if x == 0 { 152 want = 16 153 } 154 if got != want { 155 t.Fatalf("TrailingZeros16(%#04x) == %d; want %d", x, got, want) 156 } 157 } 158 159 if x <= 1<<32-1 { 160 got := TrailingZeros32(uint32(x)) 161 if x == 0 { 162 want = 32 163 } 164 if got != want { 165 t.Fatalf("TrailingZeros32(%#08x) == %d; want %d", x, got, want) 166 } 167 if arch.UintBits == 32 { 168 got = TrailingZeros(uint(x)) 169 if got != want { 170 t.Fatalf("TrailingZeros(%#08x) == %d; want %d", x, got, want) 171 } 172 } 173 } 174 175 if x <= 1<<64-1 { 176 got := TrailingZeros64(uint64(x)) 177 if x == 0 { 178 want = 64 179 } 180 if got != want { 181 t.Fatalf("TrailingZeros64(%#016x) == %d; want %d", x, got, want) 182 } 183 if arch.UintBits == 64 { 184 got = TrailingZeros(uint(x)) 185 if got != want { 186 t.Fatalf("TrailingZeros(%#016x) == %d; want %d", x, got, want) 187 } 188 } 189 } 190 } 191 } 192 } 193 194 func BenchmarkTrailingZeros(b *testing.B) { 195 var s int 196 for i := 0; i < b.N; i++ { 197 s += TrailingZeros(uint(Input) << (uint(i) % arch.UintBits)) 198 } 199 Output = s 200 } 201 202 func BenchmarkTrailingZeros8(b *testing.B) { 203 var s int 204 for i := 0; i < b.N; i++ { 205 s += TrailingZeros8(uint8(Input) << (uint(i) % 8)) 206 } 207 Output = s 208 } 209 210 func BenchmarkTrailingZeros16(b *testing.B) { 211 var s int 212 for i := 0; i < b.N; i++ { 213 s += TrailingZeros16(uint16(Input) << (uint(i) % 16)) 214 } 215 Output = s 216 } 217 218 func BenchmarkTrailingZeros32(b *testing.B) { 219 var s int 220 for i := 0; i < b.N; i++ { 221 s += TrailingZeros32(uint32(Input) << (uint(i) % 32)) 222 } 223 Output = s 224 } 225 226 func BenchmarkTrailingZeros64(b *testing.B) { 227 var s int 228 for i := 0; i < b.N; i++ { 229 s += TrailingZeros64(uint64(Input) << (uint(i) % 64)) 230 } 231 Output = s 232 } 233 234 func TestOnesCount(t *testing.T) { 235 var x uint64 236 for i := 0; i <= 64; i++ { 237 testOnesCount(t, x, i) 238 x = x<<1 | 1 239 } 240 241 for i := 64; i >= 0; i-- { 242 testOnesCount(t, x, i) 243 x = x << 1 244 } 245 246 for i := 0; i < 256; i++ { 247 for k := 0; k < 64-8; k++ { 248 testOnesCount(t, uint64(i)<<uint(k), tab[i].pop) 249 } 250 } 251 } 252 253 func testOnesCount(t *testing.T, x uint64, want int) { 254 if x <= 1<<8-1 { 255 got := OnesCount8(uint8(x)) 256 if got != want { 257 t.Fatalf("OnesCount8(%#02x) == %d; want %d", uint8(x), got, want) 258 } 259 } 260 261 if x <= 1<<16-1 { 262 got := OnesCount16(uint16(x)) 263 if got != want { 264 t.Fatalf("OnesCount16(%#04x) == %d; want %d", uint16(x), got, want) 265 } 266 } 267 268 if x <= 1<<32-1 { 269 got := OnesCount32(uint32(x)) 270 if got != want { 271 t.Fatalf("OnesCount32(%#08x) == %d; want %d", uint32(x), got, want) 272 } 273 if arch.UintBits == 32 { 274 got = OnesCount(uint(x)) 275 if got != want { 276 t.Fatalf("OnesCount(%#08x) == %d; want %d", uint32(x), got, want) 277 } 278 } 279 } 280 281 if x <= 1<<64-1 { 282 got := OnesCount64(uint64(x)) 283 if got != want { 284 t.Fatalf("OnesCount64(%#016x) == %d; want %d", x, got, want) 285 } 286 if arch.UintBits == 64 { 287 got = OnesCount(uint(x)) 288 if got != want { 289 t.Fatalf("OnesCount(%#016x) == %d; want %d", x, got, want) 290 } 291 } 292 } 293 } 294 295 func BenchmarkOnesCount(b *testing.B) { 296 var s int 297 for i := 0; i < b.N; i++ { 298 s += OnesCount(uint(Input)) 299 } 300 Output = s 301 } 302 303 func BenchmarkOnesCount8(b *testing.B) { 304 var s int 305 for i := 0; i < b.N; i++ { 306 s += OnesCount8(uint8(Input)) 307 } 308 Output = s 309 } 310 311 func BenchmarkOnesCount16(b *testing.B) { 312 var s int 313 for i := 0; i < b.N; i++ { 314 s += OnesCount16(uint16(Input)) 315 } 316 Output = s 317 } 318 319 func BenchmarkOnesCount32(b *testing.B) { 320 var s int 321 for i := 0; i < b.N; i++ { 322 s += OnesCount32(uint32(Input)) 323 } 324 Output = s 325 } 326 327 func BenchmarkOnesCount64(b *testing.B) { 328 var s int 329 for i := 0; i < b.N; i++ { 330 s += OnesCount64(uint64(Input)) 331 } 332 Output = s 333 } 334 335 func TestRotateLeft(t *testing.T) { 336 var m uint64 = DeBruijn64 337 338 for k := uint(0); k < 128; k++ { 339 x8 := uint8(m) 340 got8 := RotateLeft8(x8, int(k)) 341 want8 := x8<<(k&0x7) | x8>>(8-k&0x7) 342 if got8 != want8 { 343 t.Fatalf("RotateLeft8(%#02x, %d) == %#02x; want %#02x", x8, k, got8, want8) 344 } 345 got8 = RotateLeft8(want8, -int(k)) 346 if got8 != x8 { 347 t.Fatalf("RotateLeft8(%#02x, -%d) == %#02x; want %#02x", want8, k, got8, x8) 348 } 349 350 x16 := uint16(m) 351 got16 := RotateLeft16(x16, int(k)) 352 want16 := x16<<(k&0xf) | x16>>(16-k&0xf) 353 if got16 != want16 { 354 t.Fatalf("RotateLeft16(%#04x, %d) == %#04x; want %#04x", x16, k, got16, want16) 355 } 356 got16 = RotateLeft16(want16, -int(k)) 357 if got16 != x16 { 358 t.Fatalf("RotateLeft16(%#04x, -%d) == %#04x; want %#04x", want16, k, got16, x16) 359 } 360 361 x32 := uint32(m) 362 got32 := RotateLeft32(x32, int(k)) 363 want32 := x32<<(k&0x1f) | x32>>(32-k&0x1f) 364 if got32 != want32 { 365 t.Fatalf("RotateLeft32(%#08x, %d) == %#08x; want %#08x", x32, k, got32, want32) 366 } 367 got32 = RotateLeft32(want32, -int(k)) 368 if got32 != x32 { 369 t.Fatalf("RotateLeft32(%#08x, -%d) == %#08x; want %#08x", want32, k, got32, x32) 370 } 371 if arch.UintBits == 32 { 372 x := uint(m) 373 got := RotateLeft(x, int(k)) 374 want := x<<(k&0x1f) | x>>(32-k&0x1f) 375 if got != want { 376 t.Fatalf("RotateLeft(%#08x, %d) == %#08x; want %#08x", x, k, got, want) 377 } 378 got = RotateLeft(want, -int(k)) 379 if got != x { 380 t.Fatalf("RotateLeft(%#08x, -%d) == %#08x; want %#08x", want, k, got, x) 381 } 382 } 383 384 x64 := uint64(m) 385 got64 := RotateLeft64(x64, int(k)) 386 want64 := x64<<(k&0x3f) | x64>>(64-k&0x3f) 387 if got64 != want64 { 388 t.Fatalf("RotateLeft64(%#016x, %d) == %#016x; want %#016x", x64, k, got64, want64) 389 } 390 got64 = RotateLeft64(want64, -int(k)) 391 if got64 != x64 { 392 t.Fatalf("RotateLeft64(%#016x, -%d) == %#016x; want %#016x", want64, k, got64, x64) 393 } 394 if arch.UintBits == 64 { 395 x := uint(m) 396 got := RotateLeft(x, int(k)) 397 want := x<<(k&0x3f) | x>>(64-k&0x3f) 398 if got != want { 399 t.Fatalf("RotateLeft(%#016x, %d) == %#016x; want %#016x", x, k, got, want) 400 } 401 got = RotateLeft(want, -int(k)) 402 if got != x { 403 t.Fatalf("RotateLeft(%#08x, -%d) == %#08x; want %#08x", want, k, got, x) 404 } 405 } 406 } 407 } 408 409 func BenchmarkRotateLeft(b *testing.B) { 410 var s uint 411 for i := 0; i < b.N; i++ { 412 s += RotateLeft(uint(Input), i) 413 } 414 Output = int(s) 415 } 416 417 func BenchmarkRotateLeft8(b *testing.B) { 418 var s uint8 419 for i := 0; i < b.N; i++ { 420 s += RotateLeft8(uint8(Input), i) 421 } 422 Output = int(s) 423 } 424 425 func BenchmarkRotateLeft16(b *testing.B) { 426 var s uint16 427 for i := 0; i < b.N; i++ { 428 s += RotateLeft16(uint16(Input), i) 429 } 430 Output = int(s) 431 } 432 433 func BenchmarkRotateLeft32(b *testing.B) { 434 var s uint32 435 for i := 0; i < b.N; i++ { 436 s += RotateLeft32(uint32(Input), i) 437 } 438 Output = int(s) 439 } 440 441 func BenchmarkRotateLeft64(b *testing.B) { 442 var s uint64 443 for i := 0; i < b.N; i++ { 444 s += RotateLeft64(uint64(Input), i) 445 } 446 Output = int(s) 447 } 448 449 func TestReverse(t *testing.T) { 450 // test each bit 451 for i := uint(0); i < 64; i++ { 452 testReverse(t, uint64(1)<<i, uint64(1)<<(63-i)) 453 } 454 455 // test a few patterns 456 for _, test := range []struct { 457 x, r uint64 458 }{ 459 {0, 0}, 460 {0x1, 0x8 << 60}, 461 {0x2, 0x4 << 60}, 462 {0x3, 0xc << 60}, 463 {0x4, 0x2 << 60}, 464 {0x5, 0xa << 60}, 465 {0x6, 0x6 << 60}, 466 {0x7, 0xe << 60}, 467 {0x8, 0x1 << 60}, 468 {0x9, 0x9 << 60}, 469 {0xa, 0x5 << 60}, 470 {0xb, 0xd << 60}, 471 {0xc, 0x3 << 60}, 472 {0xd, 0xb << 60}, 473 {0xe, 0x7 << 60}, 474 {0xf, 0xf << 60}, 475 {0x5686487, 0xe12616a000000000}, 476 {0x0123456789abcdef, 0xf7b3d591e6a2c480}, 477 } { 478 testReverse(t, test.x, test.r) 479 testReverse(t, test.r, test.x) 480 } 481 } 482 483 func testReverse(t *testing.T, x64, want64 uint64) { 484 x8 := uint8(x64) 485 got8 := Reverse8(x8) 486 want8 := uint8(want64 >> (64 - 8)) 487 if got8 != want8 { 488 t.Fatalf("Reverse8(%#02x) == %#02x; want %#02x", x8, got8, want8) 489 } 490 491 x16 := uint16(x64) 492 got16 := Reverse16(x16) 493 want16 := uint16(want64 >> (64 - 16)) 494 if got16 != want16 { 495 t.Fatalf("Reverse16(%#04x) == %#04x; want %#04x", x16, got16, want16) 496 } 497 498 x32 := uint32(x64) 499 got32 := Reverse32(x32) 500 want32 := uint32(want64 >> (64 - 32)) 501 if got32 != want32 { 502 t.Fatalf("Reverse32(%#08x) == %#08x; want %#08x", x32, got32, want32) 503 } 504 if arch.UintBits == 32 { 505 x := uint(x32) 506 got := Reverse(x) 507 want := uint(want32) 508 if got != want { 509 t.Fatalf("Reverse(%#08x) == %#08x; want %#08x", x, got, want) 510 } 511 } 512 513 got64 := Reverse64(x64) 514 if got64 != want64 { 515 t.Fatalf("Reverse64(%#016x) == %#016x; want %#016x", x64, got64, want64) 516 } 517 if arch.UintBits == 64 { 518 x := uint(x64) 519 got := Reverse(x) 520 want := uint(want64) 521 if got != want { 522 t.Fatalf("Reverse(%#08x) == %#016x; want %#016x", x, got, want) 523 } 524 } 525 } 526 527 func BenchmarkReverse(b *testing.B) { 528 var s uint 529 for i := 0; i < b.N; i++ { 530 s += Reverse(uint(i)) 531 } 532 Output = int(s) 533 } 534 535 func BenchmarkReverse8(b *testing.B) { 536 var s uint8 537 for i := 0; i < b.N; i++ { 538 s += Reverse8(uint8(i)) 539 } 540 Output = int(s) 541 } 542 543 func BenchmarkReverse16(b *testing.B) { 544 var s uint16 545 for i := 0; i < b.N; i++ { 546 s += Reverse16(uint16(i)) 547 } 548 Output = int(s) 549 } 550 551 func BenchmarkReverse32(b *testing.B) { 552 var s uint32 553 for i := 0; i < b.N; i++ { 554 s += Reverse32(uint32(i)) 555 } 556 Output = int(s) 557 } 558 559 func BenchmarkReverse64(b *testing.B) { 560 var s uint64 561 for i := 0; i < b.N; i++ { 562 s += Reverse64(uint64(i)) 563 } 564 Output = int(s) 565 } 566 567 func TestReverseBytes(t *testing.T) { 568 for _, test := range []struct { 569 x, r uint64 570 }{ 571 {0, 0}, 572 {0x01, 0x01 << 56}, 573 {0x0123, 0x2301 << 48}, 574 {0x012345, 0x452301 << 40}, 575 {0x01234567, 0x67452301 << 32}, 576 {0x0123456789, 0x8967452301 << 24}, 577 {0x0123456789ab, 0xab8967452301 << 16}, 578 {0x0123456789abcd, 0xcdab8967452301 << 8}, 579 {0x0123456789abcdef, 0xefcdab8967452301 << 0}, 580 } { 581 testReverseBytes(t, test.x, test.r) 582 testReverseBytes(t, test.r, test.x) 583 } 584 } 585 586 func testReverseBytes(t *testing.T, x64, want64 uint64) { 587 x16 := uint16(x64) 588 got16 := ReverseBytes16(x16) 589 want16 := uint16(want64 >> (64 - 16)) 590 if got16 != want16 { 591 t.Fatalf("ReverseBytes16(%#04x) == %#04x; want %#04x", x16, got16, want16) 592 } 593 594 x32 := uint32(x64) 595 got32 := ReverseBytes32(x32) 596 want32 := uint32(want64 >> (64 - 32)) 597 if got32 != want32 { 598 t.Fatalf("ReverseBytes32(%#08x) == %#08x; want %#08x", x32, got32, want32) 599 } 600 if arch.UintBits == 32 { 601 x := uint(x32) 602 got := ReverseBytes(x) 603 want := uint(want32) 604 if got != want { 605 t.Fatalf("ReverseBytes(%#08x) == %#08x; want %#08x", x, got, want) 606 } 607 } 608 609 got64 := ReverseBytes64(x64) 610 if got64 != want64 { 611 t.Fatalf("ReverseBytes64(%#016x) == %#016x; want %#016x", x64, got64, want64) 612 } 613 if arch.UintBits == 64 { 614 x := uint(x64) 615 got := ReverseBytes(x) 616 want := uint(want64) 617 if got != want { 618 t.Fatalf("ReverseBytes(%#016x) == %#016x; want %#016x", x, got, want) 619 } 620 } 621 } 622 623 func BenchmarkReverseBytes(b *testing.B) { 624 var s uint 625 for i := 0; i < b.N; i++ { 626 s += ReverseBytes(uint(i)) 627 } 628 Output = int(s) 629 } 630 631 func BenchmarkReverseBytes16(b *testing.B) { 632 var s uint16 633 for i := 0; i < b.N; i++ { 634 s += ReverseBytes16(uint16(i)) 635 } 636 Output = int(s) 637 } 638 639 func BenchmarkReverseBytes32(b *testing.B) { 640 var s uint32 641 for i := 0; i < b.N; i++ { 642 s += ReverseBytes32(uint32(i)) 643 } 644 Output = int(s) 645 } 646 647 func BenchmarkReverseBytes64(b *testing.B) { 648 var s uint64 649 for i := 0; i < b.N; i++ { 650 s += ReverseBytes64(uint64(i)) 651 } 652 Output = int(s) 653 } 654 655 func TestLen(t *testing.T) { 656 for i := 0; i < 256; i++ { 657 len := 8 - tab[i].nlz 658 for k := 0; k < 64-8; k++ { 659 x := uint64(i) << uint(k) 660 want := 0 661 if x != 0 { 662 want = len + k 663 } 664 if x <= 1<<8-1 { 665 got := Len8(uint8(x)) 666 if got != want { 667 t.Fatalf("Len8(%#02x) == %d; want %d", x, got, want) 668 } 669 } 670 671 if x <= 1<<16-1 { 672 got := Len16(uint16(x)) 673 if got != want { 674 t.Fatalf("Len16(%#04x) == %d; want %d", x, got, want) 675 } 676 } 677 678 if x <= 1<<32-1 { 679 got := Len32(uint32(x)) 680 if got != want { 681 t.Fatalf("Len32(%#08x) == %d; want %d", x, got, want) 682 } 683 if arch.UintBits == 32 { 684 got := Len(uint(x)) 685 if got != want { 686 t.Fatalf("Len(%#08x) == %d; want %d", x, got, want) 687 } 688 } 689 } 690 691 if x <= 1<<64-1 { 692 got := Len64(uint64(x)) 693 if got != want { 694 t.Fatalf("Len64(%#016x) == %d; want %d", x, got, want) 695 } 696 if arch.UintBits == 64 { 697 got := Len(uint(x)) 698 if got != want { 699 t.Fatalf("Len(%#016x) == %d; want %d", x, got, want) 700 } 701 } 702 } 703 } 704 } 705 } 706 707 const ( 708 _M = 1<<arch.UintBits - 1 709 _M32 = 1<<32 - 1 710 _M64 = 1<<64 - 1 711 ) 712 713 func TestAddSubUint(t *testing.T) { 714 test := func(msg string, f func(x, y, c uint) (z, cout uint), x, y, c, z, cout uint) { 715 z1, cout1 := f(x, y, c) 716 if z1 != z || cout1 != cout { 717 t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout) 718 } 719 } 720 for _, a := range []struct{ x, y, c, z, cout uint }{ 721 {0, 0, 0, 0, 0}, 722 {0, 1, 0, 1, 0}, 723 {0, 0, 1, 1, 0}, 724 {0, 1, 1, 2, 0}, 725 {12345, 67890, 0, 80235, 0}, 726 {12345, 67890, 1, 80236, 0}, 727 {_M, 1, 0, 0, 1}, 728 {_M, 0, 1, 0, 1}, 729 {_M, 1, 1, 1, 1}, 730 {_M, _M, 0, _M - 1, 1}, 731 {_M, _M, 1, _M, 1}, 732 } { 733 test("Add", Add, a.x, a.y, a.c, a.z, a.cout) 734 test("Add symmetric", Add, a.y, a.x, a.c, a.z, a.cout) 735 test("Sub", Sub, a.z, a.x, a.c, a.y, a.cout) 736 test("Sub symmetric", Sub, a.z, a.y, a.c, a.x, a.cout) 737 // The above code can't test intrinsic implementation, because the passed function is not called directly. 738 // The following code uses a closure to test the intrinsic version in case the function is intrinsified. 739 test("Add intrinsic", func(x, y, c uint) (uint, uint) { return Add(x, y, c) }, a.x, a.y, a.c, a.z, a.cout) 740 test("Add intrinsic symmetric", func(x, y, c uint) (uint, uint) { return Add(x, y, c) }, a.y, a.x, a.c, a.z, a.cout) 741 test("Sub intrinsic", func(x, y, c uint) (uint, uint) { return Sub(x, y, c) }, a.z, a.x, a.c, a.y, a.cout) 742 test("Sub intrinsic symmetric", func(x, y, c uint) (uint, uint) { return Sub(x, y, c) }, a.z, a.y, a.c, a.x, a.cout) 743 744 } 745 } 746 747 func TestAddSubUint32(t *testing.T) { 748 test := func(msg string, f func(x, y, c uint32) (z, cout uint32), x, y, c, z, cout uint32) { 749 z1, cout1 := f(x, y, c) 750 if z1 != z || cout1 != cout { 751 t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout) 752 } 753 } 754 for _, a := range []struct{ x, y, c, z, cout uint32 }{ 755 {0, 0, 0, 0, 0}, 756 {0, 1, 0, 1, 0}, 757 {0, 0, 1, 1, 0}, 758 {0, 1, 1, 2, 0}, 759 {12345, 67890, 0, 80235, 0}, 760 {12345, 67890, 1, 80236, 0}, 761 {_M32, 1, 0, 0, 1}, 762 {_M32, 0, 1, 0, 1}, 763 {_M32, 1, 1, 1, 1}, 764 {_M32, _M32, 0, _M32 - 1, 1}, 765 {_M32, _M32, 1, _M32, 1}, 766 } { 767 test("Add32", Add32, a.x, a.y, a.c, a.z, a.cout) 768 test("Add32 symmetric", Add32, a.y, a.x, a.c, a.z, a.cout) 769 test("Sub32", Sub32, a.z, a.x, a.c, a.y, a.cout) 770 test("Sub32 symmetric", Sub32, a.z, a.y, a.c, a.x, a.cout) 771 } 772 } 773 774 func TestAddSubUint64(t *testing.T) { 775 test := func(msg string, f func(x, y, c uint64) (z, cout uint64), x, y, c, z, cout uint64) { 776 z1, cout1 := f(x, y, c) 777 if z1 != z || cout1 != cout { 778 t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout) 779 } 780 } 781 for _, a := range []struct{ x, y, c, z, cout uint64 }{ 782 {0, 0, 0, 0, 0}, 783 {0, 1, 0, 1, 0}, 784 {0, 0, 1, 1, 0}, 785 {0, 1, 1, 2, 0}, 786 {12345, 67890, 0, 80235, 0}, 787 {12345, 67890, 1, 80236, 0}, 788 {_M64, 1, 0, 0, 1}, 789 {_M64, 0, 1, 0, 1}, 790 {_M64, 1, 1, 1, 1}, 791 {_M64, _M64, 0, _M64 - 1, 1}, 792 {_M64, _M64, 1, _M64, 1}, 793 } { 794 test("Add64", Add64, a.x, a.y, a.c, a.z, a.cout) 795 test("Add64 symmetric", Add64, a.y, a.x, a.c, a.z, a.cout) 796 test("Sub64", Sub64, a.z, a.x, a.c, a.y, a.cout) 797 test("Sub64 symmetric", Sub64, a.z, a.y, a.c, a.x, a.cout) 798 // The above code can't test intrinsic implementation, because the passed function is not called directly. 799 // The following code uses a closure to test the intrinsic version in case the function is intrinsified. 800 test("Add64 intrinsic", func(x, y, c uint64) (uint64, uint64) { return Add64(x, y, c) }, a.x, a.y, a.c, a.z, a.cout) 801 test("Add64 intrinsic symmetric", func(x, y, c uint64) (uint64, uint64) { return Add64(x, y, c) }, a.y, a.x, a.c, a.z, a.cout) 802 test("Sub64 intrinsic", func(x, y, c uint64) (uint64, uint64) { return Sub64(x, y, c) }, a.z, a.x, a.c, a.y, a.cout) 803 test("Sub64 intrinsic symmetric", func(x, y, c uint64) (uint64, uint64) { return Sub64(x, y, c) }, a.z, a.y, a.c, a.x, a.cout) 804 } 805 } 806 807 func TestAdd64OverflowPanic(t *testing.T) { 808 // Test that 64-bit overflow panics fire correctly. 809 // These are designed to improve coverage of compiler intrinsics. 810 tests := []func(uint64, uint64) uint64{ 811 func(a, b uint64) uint64 { 812 x, c := Add64(a, b, 0) 813 if c > 0 { 814 assert.Throw("overflow") 815 return 0 816 } 817 return x 818 }, 819 func(a, b uint64) uint64 { 820 x, c := Add64(a, b, 0) 821 if c != 0 { 822 assert.Throw("overflow") 823 return 0 824 } 825 return x 826 }, 827 func(a, b uint64) uint64 { 828 x, c := Add64(a, b, 0) 829 if c == 1 { 830 assert.Throw("overflow") 831 return 0 832 } 833 return x 834 }, 835 func(a, b uint64) uint64 { 836 x, c := Add64(a, b, 0) 837 if c != 1 { 838 return x 839 } 840 assert.Throw("overflow") 841 return 0 842 }, 843 func(a, b uint64) uint64 { 844 x, c := Add64(a, b, 0) 845 if c == 0 { 846 return x 847 } 848 assert.Throw("overflow") 849 return 0 850 }, 851 } 852 for _, test := range tests { 853 shouldPanic := func(f func()) { 854 defer func() { 855 if err := recover(); err == nil { 856 t.Fatalf("expected panic") 857 } 858 }() 859 f() 860 } 861 862 // overflow 863 shouldPanic(func() { test(_M64, 1) }) 864 shouldPanic(func() { test(1, _M64) }) 865 shouldPanic(func() { test(_M64, _M64) }) 866 867 // no overflow 868 test(_M64, 0) 869 test(0, 0) 870 test(1, 1) 871 } 872 } 873 874 func TestSub64OverflowPanic(t *testing.T) { 875 // Test that 64-bit overflow panics fire correctly. 876 // These are designed to improve coverage of compiler intrinsics. 877 tests := []func(uint64, uint64) uint64{ 878 func(a, b uint64) uint64 { 879 x, c := Sub64(a, b, 0) 880 if c > 0 { 881 assert.Throw("overflow") 882 return 0 883 } 884 return x 885 }, 886 func(a, b uint64) uint64 { 887 x, c := Sub64(a, b, 0) 888 if c != 0 { 889 assert.Throw("overflow") 890 return 0 891 } 892 return x 893 }, 894 func(a, b uint64) uint64 { 895 x, c := Sub64(a, b, 0) 896 if c == 1 { 897 assert.Throw("overflow") 898 return 0 899 } 900 return x 901 }, 902 func(a, b uint64) uint64 { 903 x, c := Sub64(a, b, 0) 904 if c != 1 { 905 return x 906 } 907 assert.Throw("overflow") 908 return 0 909 }, 910 func(a, b uint64) uint64 { 911 x, c := Sub64(a, b, 0) 912 if c == 0 { 913 return x 914 } 915 assert.Throw("overflow") 916 return 0 917 }, 918 } 919 for _, test := range tests { 920 shouldPanic := func(f func()) { 921 defer func() { 922 if err := recover(); err == nil { 923 t.Fatalf("expected panic") 924 } 925 }() 926 f() 927 } 928 929 // overflow 930 shouldPanic(func() { test(0, 1) }) 931 shouldPanic(func() { test(1, _M64) }) 932 shouldPanic(func() { test(_M64-1, _M64) }) 933 934 // no overflow 935 test(_M64, 0) 936 test(0, 0) 937 test(1, 1) 938 } 939 } 940 941 func TestMulDiv(t *testing.T) { 942 testMul := func(msg string, f func(x, y uint) (hi, lo uint), x, y, hi, lo uint) { 943 hi1, lo1 := f(x, y) 944 if hi1 != hi || lo1 != lo { 945 t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo) 946 } 947 } 948 testDiv := func(msg string, f func(hi, lo, y uint) (q, r uint), hi, lo, y, q, r uint) { 949 q1, r1 := f(hi, lo, y) 950 if q1 != q || r1 != r { 951 t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r) 952 } 953 } 954 for _, a := range []struct { 955 x, y uint 956 hi, lo, r uint 957 }{ 958 {1 << (arch.UintBits - 1), 2, 1, 0, 1}, 959 {_M, _M, _M - 1, 1, 42}, 960 } { 961 testMul("Mul", Mul, a.x, a.y, a.hi, a.lo) 962 testMul("Mul symmetric", Mul, a.y, a.x, a.hi, a.lo) 963 testDiv("Div", Div, a.hi, a.lo+a.r, a.y, a.x, a.r) 964 testDiv("Div symmetric", Div, a.hi, a.lo+a.r, a.x, a.y, a.r) 965 // The above code can't test intrinsic implementation, because the passed function is not called directly. 966 // The following code uses a closure to test the intrinsic version in case the function is intrinsified. 967 testMul("Mul intrinsic", func(x, y uint) (uint, uint) { return Mul(x, y) }, a.x, a.y, a.hi, a.lo) 968 testMul("Mul intrinsic symmetric", func(x, y uint) (uint, uint) { return Mul(x, y) }, a.y, a.x, a.hi, a.lo) 969 testDiv("Div intrinsic", func(hi, lo, y uint) (uint, uint) { return Div(hi, lo, y) }, a.hi, a.lo+a.r, a.y, a.x, a.r) 970 testDiv("Div intrinsic symmetric", func(hi, lo, y uint) (uint, uint) { return Div(hi, lo, y) }, a.hi, a.lo+a.r, a.x, a.y, a.r) 971 } 972 } 973 974 func TestMulDiv32(t *testing.T) { 975 testMul := func(msg string, f func(x, y uint32) (hi, lo uint32), x, y, hi, lo uint32) { 976 hi1, lo1 := f(x, y) 977 if hi1 != hi || lo1 != lo { 978 t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo) 979 } 980 } 981 testDiv := func(msg string, f func(hi, lo, y uint32) (q, r uint32), hi, lo, y, q, r uint32) { 982 q1, r1 := f(hi, lo, y) 983 if q1 != q || r1 != r { 984 t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r) 985 } 986 } 987 for _, a := range []struct { 988 x, y uint32 989 hi, lo, r uint32 990 }{ 991 {1 << 31, 2, 1, 0, 1}, 992 {0xc47dfa8c, 50911, 0x98a4, 0x998587f4, 13}, 993 {_M32, _M32, _M32 - 1, 1, 42}, 994 } { 995 testMul("Mul32", Mul32, a.x, a.y, a.hi, a.lo) 996 testMul("Mul32 symmetric", Mul32, a.y, a.x, a.hi, a.lo) 997 testDiv("Div32", Div32, a.hi, a.lo+a.r, a.y, a.x, a.r) 998 testDiv("Div32 symmetric", Div32, a.hi, a.lo+a.r, a.x, a.y, a.r) 999 } 1000 } 1001 1002 func TestMulDiv64(t *testing.T) { 1003 testMul := func(msg string, f func(x, y uint64) (hi, lo uint64), x, y, hi, lo uint64) { 1004 hi1, lo1 := f(x, y) 1005 if hi1 != hi || lo1 != lo { 1006 t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo) 1007 } 1008 } 1009 testDiv := func(msg string, f func(hi, lo, y uint64) (q, r uint64), hi, lo, y, q, r uint64) { 1010 q1, r1 := f(hi, lo, y) 1011 if q1 != q || r1 != r { 1012 t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r) 1013 } 1014 } 1015 for _, a := range []struct { 1016 x, y uint64 1017 hi, lo, r uint64 1018 }{ 1019 {1 << 63, 2, 1, 0, 1}, 1020 {0x3626229738a3b9, 0xd8988a9f1cc4a61, 0x2dd0712657fe8, 0x9dd6a3364c358319, 13}, 1021 {_M64, _M64, _M64 - 1, 1, 42}, 1022 } { 1023 testMul("Mul64", Mul64, a.x, a.y, a.hi, a.lo) 1024 testMul("Mul64 symmetric", Mul64, a.y, a.x, a.hi, a.lo) 1025 testDiv("Div64", Div64, a.hi, a.lo+a.r, a.y, a.x, a.r) 1026 testDiv("Div64 symmetric", Div64, a.hi, a.lo+a.r, a.x, a.y, a.r) 1027 // The above code can't test intrinsic implementation, because the passed function is not called directly. 1028 // The following code uses a closure to test the intrinsic version in case the function is intrinsified. 1029 testMul("Mul64 intrinsic", func(x, y uint64) (uint64, uint64) { return Mul64(x, y) }, a.x, a.y, a.hi, a.lo) 1030 testMul("Mul64 intrinsic symmetric", func(x, y uint64) (uint64, uint64) { return Mul64(x, y) }, a.y, a.x, a.hi, a.lo) 1031 testDiv("Div64 intrinsic", func(hi, lo, y uint64) (uint64, uint64) { return Div64(hi, lo, y) }, a.hi, a.lo+a.r, a.y, a.x, a.r) 1032 testDiv("Div64 intrinsic symmetric", func(hi, lo, y uint64) (uint64, uint64) { return Div64(hi, lo, y) }, a.hi, a.lo+a.r, a.x, a.y, a.r) 1033 } 1034 } 1035 1036 const ( 1037 divZeroError = "divide by zero" 1038 overflowError = "overflow" 1039 ) 1040 1041 func TestDivPanicOverflow(t *testing.T) { 1042 // Expect a panic 1043 defer func() { 1044 if err := recover(); err == nil { 1045 t.Error("Div should have panicked when y<=hi") 1046 } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError { 1047 t.Errorf("Div expected panic: %q, got: %q ", overflowError, e.Error()) 1048 } 1049 }() 1050 q, r := Div(1, 0, 1) 1051 t.Errorf("undefined q, r = %v, %v calculated when Div should have panicked", q, r) 1052 } 1053 1054 func TestDiv32PanicOverflow(t *testing.T) { 1055 // Expect a panic 1056 defer func() { 1057 if err := recover(); err == nil { 1058 t.Error("Div32 should have panicked when y<=hi") 1059 } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError { 1060 t.Errorf("Div32 expected panic: %q, got: %q ", overflowError, e.Error()) 1061 } 1062 }() 1063 q, r := Div32(1, 0, 1) 1064 t.Errorf("undefined q, r = %v, %v calculated when Div32 should have panicked", q, r) 1065 } 1066 1067 func TestDiv64PanicOverflow(t *testing.T) { 1068 // Expect a panic 1069 defer func() { 1070 if err := recover(); err == nil { 1071 t.Error("Div64 should have panicked when y<=hi") 1072 } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError { 1073 t.Errorf("Div64 expected panic: %q, got: %q ", overflowError, e.Error()) 1074 } 1075 }() 1076 q, r := Div64(1, 0, 1) 1077 t.Errorf("undefined q, r = %v, %v calculated when Div64 should have panicked", q, r) 1078 } 1079 1080 func TestDivPanicZero(t *testing.T) { 1081 // Expect a panic 1082 defer func() { 1083 if err := recover(); err == nil { 1084 t.Error("Div should have panicked when y==0") 1085 } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError { 1086 t.Errorf("Div expected panic: %q, got: %q ", divZeroError, e.Error()) 1087 } 1088 }() 1089 q, r := Div(1, 1, 0) 1090 t.Errorf("undefined q, r = %v, %v calculated when Div should have panicked", q, r) 1091 } 1092 1093 func TestDiv32PanicZero(t *testing.T) { 1094 // Expect a panic 1095 defer func() { 1096 if err := recover(); err == nil { 1097 t.Error("Div32 should have panicked when y==0") 1098 } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError { 1099 t.Errorf("Div32 expected panic: %q, got: %q ", divZeroError, e.Error()) 1100 } 1101 }() 1102 q, r := Div32(1, 1, 0) 1103 t.Errorf("undefined q, r = %v, %v calculated when Div32 should have panicked", q, r) 1104 } 1105 1106 func TestDiv64PanicZero(t *testing.T) { 1107 // Expect a panic 1108 defer func() { 1109 if err := recover(); err == nil { 1110 t.Error("Div64 should have panicked when y==0") 1111 } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError { 1112 t.Errorf("Div64 expected panic: %q, got: %q ", divZeroError, e.Error()) 1113 } 1114 }() 1115 q, r := Div64(1, 1, 0) 1116 t.Errorf("undefined q, r = %v, %v calculated when Div64 should have panicked", q, r) 1117 } 1118 1119 func TestRem32(t *testing.T) { 1120 // Sanity check: for non-oveflowing dividends, the result is the 1121 // same as the rem returned by Div32 1122 hi, lo, y := uint32(510510), uint32(9699690), uint32(510510+1) // ensure hi < y 1123 for i := 0; i < 1000; i++ { 1124 r := Rem32(hi, lo, y) 1125 _, r2 := Div32(hi, lo, y) 1126 if r != r2 { 1127 t.Errorf("Rem32(%v, %v, %v) returned %v, but Div32 returned rem %v", hi, lo, y, r, r2) 1128 } 1129 y += 13 1130 } 1131 } 1132 1133 func TestRem32Overflow(t *testing.T) { 1134 // To trigger a quotient overflow, we need y <= hi 1135 hi, lo, y := uint32(510510), uint32(9699690), uint32(7) 1136 for i := 0; i < 1000; i++ { 1137 r := Rem32(hi, lo, y) 1138 _, r2 := Div64(0, uint64(hi)<<32|uint64(lo), uint64(y)) 1139 if r != uint32(r2) { 1140 t.Errorf("Rem32(%v, %v, %v) returned %v, but Div64 returned rem %v", hi, lo, y, r, r2) 1141 } 1142 y += 13 1143 } 1144 } 1145 1146 func TestRem64(t *testing.T) { 1147 // Sanity check: for non-oveflowing dividends, the result is the 1148 // same as the rem returned by Div64 1149 hi, lo, y := uint64(510510), uint64(9699690), uint64(510510+1) // ensure hi < y 1150 for i := 0; i < 1000; i++ { 1151 r := Rem64(hi, lo, y) 1152 _, r2 := Div64(hi, lo, y) 1153 if r != r2 { 1154 t.Errorf("Rem64(%v, %v, %v) returned %v, but Div64 returned rem %v", hi, lo, y, r, r2) 1155 } 1156 y += 13 1157 } 1158 } 1159 1160 func TestRem64Overflow(t *testing.T) { 1161 Rem64Tests := []struct { 1162 hi, lo, y uint64 1163 rem uint64 1164 }{ 1165 // Testcases computed using Python 3, as: 1166 // >>> hi = 42; lo = 1119; y = 42 1167 // >>> ((hi<<64)+lo) % y 1168 {42, 1119, 42, 27}, 1169 {42, 1119, 38, 9}, 1170 {42, 1119, 26, 23}, 1171 {469, 0, 467, 271}, 1172 {469, 0, 113, 58}, 1173 {111111, 111111, 1171, 803}, 1174 {3968194946088682615, 3192705705065114702, 1000037, 56067}, 1175 } 1176 1177 for _, rt := range Rem64Tests { 1178 if rt.hi < rt.y { 1179 t.Fatalf("Rem64(%v, %v, %v) is not a test with quo overflow", rt.hi, rt.lo, rt.y) 1180 } 1181 rem := Rem64(rt.hi, rt.lo, rt.y) 1182 if rem != rt.rem { 1183 t.Errorf("Rem64(%v, %v, %v) returned %v, wanted %v", 1184 rt.hi, rt.lo, rt.y, rem, rt.rem) 1185 } 1186 } 1187 } 1188 1189 func BenchmarkAdd(b *testing.B) { 1190 var z, c uint 1191 for i := 0; i < b.N; i++ { 1192 z, c = Add(uint(Input), uint(i), c) 1193 } 1194 Output = int(z + c) 1195 } 1196 1197 func BenchmarkAdd32(b *testing.B) { 1198 var z, c uint32 1199 for i := 0; i < b.N; i++ { 1200 z, c = Add32(uint32(Input), uint32(i), c) 1201 } 1202 Output = int(z + c) 1203 } 1204 1205 func BenchmarkAdd64(b *testing.B) { 1206 var z, c uint64 1207 for i := 0; i < b.N; i++ { 1208 z, c = Add64(uint64(Input), uint64(i), c) 1209 } 1210 Output = int(z + c) 1211 } 1212 1213 func BenchmarkAdd64multiple(b *testing.B) { 1214 var z0 = uint64(Input) 1215 var z1 = uint64(Input) 1216 var z2 = uint64(Input) 1217 var z3 = uint64(Input) 1218 for i := 0; i < b.N; i++ { 1219 var c uint64 1220 z0, c = Add64(z0, uint64(i), c) 1221 z1, c = Add64(z1, uint64(i), c) 1222 z2, c = Add64(z2, uint64(i), c) 1223 z3, _ = Add64(z3, uint64(i), c) 1224 } 1225 Output = int(z0 + z1 + z2 + z3) 1226 } 1227 1228 func BenchmarkSub(b *testing.B) { 1229 var z, c uint 1230 for i := 0; i < b.N; i++ { 1231 z, c = Sub(uint(Input), uint(i), c) 1232 } 1233 Output = int(z + c) 1234 } 1235 1236 func BenchmarkSub32(b *testing.B) { 1237 var z, c uint32 1238 for i := 0; i < b.N; i++ { 1239 z, c = Sub32(uint32(Input), uint32(i), c) 1240 } 1241 Output = int(z + c) 1242 } 1243 1244 func BenchmarkSub64(b *testing.B) { 1245 var z, c uint64 1246 for i := 0; i < b.N; i++ { 1247 z, c = Sub64(uint64(Input), uint64(i), c) 1248 } 1249 Output = int(z + c) 1250 } 1251 1252 func BenchmarkSub64multiple(b *testing.B) { 1253 var z0 = uint64(Input) 1254 var z1 = uint64(Input) 1255 var z2 = uint64(Input) 1256 var z3 = uint64(Input) 1257 for i := 0; i < b.N; i++ { 1258 var c uint64 1259 z0, c = Sub64(z0, uint64(i), c) 1260 z1, c = Sub64(z1, uint64(i), c) 1261 z2, c = Sub64(z2, uint64(i), c) 1262 z3, _ = Sub64(z3, uint64(i), c) 1263 } 1264 Output = int(z0 + z1 + z2 + z3) 1265 } 1266 1267 func BenchmarkMul(b *testing.B) { 1268 var hi, lo uint 1269 for i := 0; i < b.N; i++ { 1270 hi, lo = Mul(uint(Input), uint(i)) 1271 } 1272 Output = int(hi + lo) 1273 } 1274 1275 func BenchmarkMul32(b *testing.B) { 1276 var hi, lo uint32 1277 for i := 0; i < b.N; i++ { 1278 hi, lo = Mul32(uint32(Input), uint32(i)) 1279 } 1280 Output = int(hi + lo) 1281 } 1282 1283 func BenchmarkMul64(b *testing.B) { 1284 var hi, lo uint64 1285 for i := 0; i < b.N; i++ { 1286 hi, lo = Mul64(uint64(Input), uint64(i)) 1287 } 1288 Output = int(hi + lo) 1289 } 1290 1291 func BenchmarkDiv(b *testing.B) { 1292 var q, r uint 1293 for i := 0; i < b.N; i++ { 1294 q, r = Div(1, uint(i), uint(Input)) 1295 } 1296 Output = int(q + r) 1297 } 1298 1299 func BenchmarkDiv32(b *testing.B) { 1300 var q, r uint32 1301 for i := 0; i < b.N; i++ { 1302 q, r = Div32(1, uint32(i), uint32(Input)) 1303 } 1304 Output = int(q + r) 1305 } 1306 1307 func BenchmarkDiv64(b *testing.B) { 1308 var q, r uint64 1309 for i := 0; i < b.N; i++ { 1310 q, r = Div64(1, uint64(i), uint64(Input)) 1311 } 1312 Output = int(q + r) 1313 } 1314 1315 // ---------------------------------------------------------------------------- 1316 // Testing support 1317 1318 type entry = struct { 1319 nlz, ntz, pop int 1320 } 1321 1322 // tab contains results for all uint8 values 1323 var tab [256]entry 1324 1325 func init() { 1326 tab[0] = entry{8, 8, 0} 1327 for i := 1; i < len(tab); i++ { 1328 // nlz 1329 x := i // x != 0 1330 n := 0 1331 for x&0x80 == 0 { 1332 n++ 1333 x <<= 1 1334 } 1335 tab[i].nlz = n 1336 1337 // ntz 1338 x = i // x != 0 1339 n = 0 1340 for x&1 == 0 { 1341 n++ 1342 x >>= 1 1343 } 1344 tab[i].ntz = n 1345 1346 // pop 1347 x = i // x != 0 1348 n = 0 1349 for x != 0 { 1350 n += int(x & 1) 1351 x >>= 1 1352 } 1353 tab[i].pop = n 1354 } 1355 }