github.com/xushiwei/go@v0.0.0-20130601165731-2b9d83f45bc9/src/pkg/sync/atomic/atomic_test.go (about) 1 // Copyright 2011 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package atomic_test 6 7 import ( 8 "runtime" 9 . "sync/atomic" 10 "testing" 11 "unsafe" 12 ) 13 14 // Tests of correct behavior, without contention. 15 // (Does the function work as advertised?) 16 // 17 // Test that the Add functions add correctly. 18 // Test that the CompareAndSwap functions actually 19 // do the comparison and the swap correctly. 20 // 21 // The loop over power-of-two values is meant to 22 // ensure that the operations apply to the full word size. 23 // The struct fields x.before and x.after check that the 24 // operations do not extend past the full word size. 25 26 const ( 27 magic32 = 0xdedbeef 28 magic64 = 0xdeddeadbeefbeef 29 ) 30 31 // Do the 64-bit functions panic? If so, don't bother testing. 32 var test64err = func() (err interface{}) { 33 defer func() { 34 err = recover() 35 }() 36 var x int64 37 AddInt64(&x, 1) 38 return nil 39 }() 40 41 func TestAddInt32(t *testing.T) { 42 var x struct { 43 before int32 44 i int32 45 after int32 46 } 47 x.before = magic32 48 x.after = magic32 49 var j int32 50 for delta := int32(1); delta+delta > delta; delta += delta { 51 k := AddInt32(&x.i, delta) 52 j += delta 53 if x.i != j || k != j { 54 t.Fatalf("delta=%d i=%d j=%d k=%d", delta, x.i, j, k) 55 } 56 } 57 if x.before != magic32 || x.after != magic32 { 58 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 59 } 60 } 61 62 func TestAddUint32(t *testing.T) { 63 var x struct { 64 before uint32 65 i uint32 66 after uint32 67 } 68 x.before = magic32 69 x.after = magic32 70 var j uint32 71 for delta := uint32(1); delta+delta > delta; delta += delta { 72 k := AddUint32(&x.i, delta) 73 j += delta 74 if x.i != j || k != j { 75 t.Fatalf("delta=%d i=%d j=%d k=%d", delta, x.i, j, k) 76 } 77 } 78 if x.before != magic32 || x.after != magic32 { 79 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 80 } 81 } 82 83 func TestAddInt64(t *testing.T) { 84 if test64err != nil { 85 t.Skipf("Skipping 64-bit tests: %v", test64err) 86 } 87 var x struct { 88 before int64 89 i int64 90 after int64 91 } 92 x.before = magic64 93 x.after = magic64 94 var j int64 95 for delta := int64(1); delta+delta > delta; delta += delta { 96 k := AddInt64(&x.i, delta) 97 j += delta 98 if x.i != j || k != j { 99 t.Fatalf("delta=%d i=%d j=%d k=%d", delta, x.i, j, k) 100 } 101 } 102 if x.before != magic64 || x.after != magic64 { 103 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, int64(magic64), int64(magic64)) 104 } 105 } 106 107 func TestAddUint64(t *testing.T) { 108 if test64err != nil { 109 t.Skipf("Skipping 64-bit tests: %v", test64err) 110 } 111 var x struct { 112 before uint64 113 i uint64 114 after uint64 115 } 116 x.before = magic64 117 x.after = magic64 118 var j uint64 119 for delta := uint64(1); delta+delta > delta; delta += delta { 120 k := AddUint64(&x.i, delta) 121 j += delta 122 if x.i != j || k != j { 123 t.Fatalf("delta=%d i=%d j=%d k=%d", delta, x.i, j, k) 124 } 125 } 126 if x.before != magic64 || x.after != magic64 { 127 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 128 } 129 } 130 131 func TestAddUintptr(t *testing.T) { 132 var x struct { 133 before uintptr 134 i uintptr 135 after uintptr 136 } 137 var m uint64 = magic64 138 magicptr := uintptr(m) 139 x.before = magicptr 140 x.after = magicptr 141 var j uintptr 142 for delta := uintptr(1); delta+delta > delta; delta += delta { 143 k := AddUintptr(&x.i, delta) 144 j += delta 145 if x.i != j || k != j { 146 t.Fatalf("delta=%d i=%d j=%d k=%d", delta, x.i, j, k) 147 } 148 } 149 if x.before != magicptr || x.after != magicptr { 150 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 151 } 152 } 153 154 func TestCompareAndSwapInt32(t *testing.T) { 155 var x struct { 156 before int32 157 i int32 158 after int32 159 } 160 x.before = magic32 161 x.after = magic32 162 for val := int32(1); val+val > val; val += val { 163 x.i = val 164 if !CompareAndSwapInt32(&x.i, val, val+1) { 165 t.Fatalf("should have swapped %#x %#x", val, val+1) 166 } 167 if x.i != val+1 { 168 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 169 } 170 x.i = val + 1 171 if CompareAndSwapInt32(&x.i, val, val+2) { 172 t.Fatalf("should not have swapped %#x %#x", val, val+2) 173 } 174 if x.i != val+1 { 175 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 176 } 177 } 178 if x.before != magic32 || x.after != magic32 { 179 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 180 } 181 } 182 183 func TestCompareAndSwapUint32(t *testing.T) { 184 var x struct { 185 before uint32 186 i uint32 187 after uint32 188 } 189 x.before = magic32 190 x.after = magic32 191 for val := uint32(1); val+val > val; val += val { 192 x.i = val 193 if !CompareAndSwapUint32(&x.i, val, val+1) { 194 t.Fatalf("should have swapped %#x %#x", val, val+1) 195 } 196 if x.i != val+1 { 197 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 198 } 199 x.i = val + 1 200 if CompareAndSwapUint32(&x.i, val, val+2) { 201 t.Fatalf("should not have swapped %#x %#x", val, val+2) 202 } 203 if x.i != val+1 { 204 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 205 } 206 } 207 if x.before != magic32 || x.after != magic32 { 208 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 209 } 210 } 211 212 func TestCompareAndSwapInt64(t *testing.T) { 213 if test64err != nil { 214 t.Skipf("Skipping 64-bit tests: %v", test64err) 215 } 216 var x struct { 217 before int64 218 i int64 219 after int64 220 } 221 x.before = magic64 222 x.after = magic64 223 for val := int64(1); val+val > val; val += val { 224 x.i = val 225 if !CompareAndSwapInt64(&x.i, val, val+1) { 226 t.Fatalf("should have swapped %#x %#x", val, val+1) 227 } 228 if x.i != val+1 { 229 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 230 } 231 x.i = val + 1 232 if CompareAndSwapInt64(&x.i, val, val+2) { 233 t.Fatalf("should not have swapped %#x %#x", val, val+2) 234 } 235 if x.i != val+1 { 236 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 237 } 238 } 239 if x.before != magic64 || x.after != magic64 { 240 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 241 } 242 } 243 244 func TestCompareAndSwapUint64(t *testing.T) { 245 if test64err != nil { 246 t.Skipf("Skipping 64-bit tests: %v", test64err) 247 } 248 var x struct { 249 before uint64 250 i uint64 251 after uint64 252 } 253 x.before = magic64 254 x.after = magic64 255 for val := uint64(1); val+val > val; val += val { 256 x.i = val 257 if !CompareAndSwapUint64(&x.i, val, val+1) { 258 t.Fatalf("should have swapped %#x %#x", val, val+1) 259 } 260 if x.i != val+1 { 261 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 262 } 263 x.i = val + 1 264 if CompareAndSwapUint64(&x.i, val, val+2) { 265 t.Fatalf("should not have swapped %#x %#x", val, val+2) 266 } 267 if x.i != val+1 { 268 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 269 } 270 } 271 if x.before != magic64 || x.after != magic64 { 272 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 273 } 274 } 275 276 func TestCompareAndSwapUintptr(t *testing.T) { 277 var x struct { 278 before uintptr 279 i uintptr 280 after uintptr 281 } 282 var m uint64 = magic64 283 magicptr := uintptr(m) 284 x.before = magicptr 285 x.after = magicptr 286 for val := uintptr(1); val+val > val; val += val { 287 x.i = val 288 if !CompareAndSwapUintptr(&x.i, val, val+1) { 289 t.Fatalf("should have swapped %#x %#x", val, val+1) 290 } 291 if x.i != val+1 { 292 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 293 } 294 x.i = val + 1 295 if CompareAndSwapUintptr(&x.i, val, val+2) { 296 t.Fatalf("should not have swapped %#x %#x", val, val+2) 297 } 298 if x.i != val+1 { 299 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 300 } 301 } 302 if x.before != magicptr || x.after != magicptr { 303 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 304 } 305 } 306 307 func TestCompareAndSwapPointer(t *testing.T) { 308 var x struct { 309 before uintptr 310 i unsafe.Pointer 311 after uintptr 312 } 313 var m uint64 = magic64 314 magicptr := uintptr(m) 315 x.before = magicptr 316 x.after = magicptr 317 for val := uintptr(1); val+val > val; val += val { 318 x.i = unsafe.Pointer(val) 319 if !CompareAndSwapPointer(&x.i, unsafe.Pointer(val), unsafe.Pointer(val+1)) { 320 t.Fatalf("should have swapped %#x %#x", val, val+1) 321 } 322 if x.i != unsafe.Pointer(val+1) { 323 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 324 } 325 x.i = unsafe.Pointer(val + 1) 326 if CompareAndSwapPointer(&x.i, unsafe.Pointer(val), unsafe.Pointer(val+2)) { 327 t.Fatalf("should not have swapped %#x %#x", val, val+2) 328 } 329 if x.i != unsafe.Pointer(val+1) { 330 t.Fatalf("wrong x.i after swap: x.i=%#x val+1=%#x", x.i, val+1) 331 } 332 } 333 if x.before != magicptr || x.after != magicptr { 334 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 335 } 336 } 337 338 func TestLoadInt32(t *testing.T) { 339 var x struct { 340 before int32 341 i int32 342 after int32 343 } 344 x.before = magic32 345 x.after = magic32 346 for delta := int32(1); delta+delta > delta; delta += delta { 347 k := LoadInt32(&x.i) 348 if k != x.i { 349 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 350 } 351 x.i += delta 352 } 353 if x.before != magic32 || x.after != magic32 { 354 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 355 } 356 } 357 358 func TestLoadUint32(t *testing.T) { 359 var x struct { 360 before uint32 361 i uint32 362 after uint32 363 } 364 x.before = magic32 365 x.after = magic32 366 for delta := uint32(1); delta+delta > delta; delta += delta { 367 k := LoadUint32(&x.i) 368 if k != x.i { 369 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 370 } 371 x.i += delta 372 } 373 if x.before != magic32 || x.after != magic32 { 374 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 375 } 376 } 377 378 func TestLoadInt64(t *testing.T) { 379 if test64err != nil { 380 t.Skipf("Skipping 64-bit tests: %v", test64err) 381 } 382 var x struct { 383 before int64 384 i int64 385 after int64 386 } 387 x.before = magic64 388 x.after = magic64 389 for delta := int64(1); delta+delta > delta; delta += delta { 390 k := LoadInt64(&x.i) 391 if k != x.i { 392 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 393 } 394 x.i += delta 395 } 396 if x.before != magic64 || x.after != magic64 { 397 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 398 } 399 } 400 401 func TestLoadUint64(t *testing.T) { 402 if test64err != nil { 403 t.Skipf("Skipping 64-bit tests: %v", test64err) 404 } 405 var x struct { 406 before uint64 407 i uint64 408 after uint64 409 } 410 x.before = magic64 411 x.after = magic64 412 for delta := uint64(1); delta+delta > delta; delta += delta { 413 k := LoadUint64(&x.i) 414 if k != x.i { 415 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 416 } 417 x.i += delta 418 } 419 if x.before != magic64 || x.after != magic64 { 420 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 421 } 422 } 423 424 func TestLoadUintptr(t *testing.T) { 425 var x struct { 426 before uintptr 427 i uintptr 428 after uintptr 429 } 430 var m uint64 = magic64 431 magicptr := uintptr(m) 432 x.before = magicptr 433 x.after = magicptr 434 for delta := uintptr(1); delta+delta > delta; delta += delta { 435 k := LoadUintptr(&x.i) 436 if k != x.i { 437 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 438 } 439 x.i += delta 440 } 441 if x.before != magicptr || x.after != magicptr { 442 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 443 } 444 } 445 446 func TestLoadPointer(t *testing.T) { 447 var x struct { 448 before uintptr 449 i unsafe.Pointer 450 after uintptr 451 } 452 var m uint64 = magic64 453 magicptr := uintptr(m) 454 x.before = magicptr 455 x.after = magicptr 456 for delta := uintptr(1); delta+delta > delta; delta += delta { 457 k := LoadPointer(&x.i) 458 if k != x.i { 459 t.Fatalf("delta=%d i=%d k=%d", delta, x.i, k) 460 } 461 x.i = unsafe.Pointer(uintptr(x.i) + delta) 462 } 463 if x.before != magicptr || x.after != magicptr { 464 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 465 } 466 } 467 468 func TestStoreInt32(t *testing.T) { 469 var x struct { 470 before int32 471 i int32 472 after int32 473 } 474 x.before = magic32 475 x.after = magic32 476 v := int32(0) 477 for delta := int32(1); delta+delta > delta; delta += delta { 478 StoreInt32(&x.i, v) 479 if x.i != v { 480 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 481 } 482 v += delta 483 } 484 if x.before != magic32 || x.after != magic32 { 485 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 486 } 487 } 488 489 func TestStoreUint32(t *testing.T) { 490 var x struct { 491 before uint32 492 i uint32 493 after uint32 494 } 495 x.before = magic32 496 x.after = magic32 497 v := uint32(0) 498 for delta := uint32(1); delta+delta > delta; delta += delta { 499 StoreUint32(&x.i, v) 500 if x.i != v { 501 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 502 } 503 v += delta 504 } 505 if x.before != magic32 || x.after != magic32 { 506 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) 507 } 508 } 509 510 func TestStoreInt64(t *testing.T) { 511 if test64err != nil { 512 t.Skipf("Skipping 64-bit tests: %v", test64err) 513 } 514 var x struct { 515 before int64 516 i int64 517 after int64 518 } 519 x.before = magic64 520 x.after = magic64 521 v := int64(0) 522 for delta := int64(1); delta+delta > delta; delta += delta { 523 StoreInt64(&x.i, v) 524 if x.i != v { 525 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 526 } 527 v += delta 528 } 529 if x.before != magic64 || x.after != magic64 { 530 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 531 } 532 } 533 534 func TestStoreUint64(t *testing.T) { 535 if test64err != nil { 536 t.Skipf("Skipping 64-bit tests: %v", test64err) 537 } 538 var x struct { 539 before uint64 540 i uint64 541 after uint64 542 } 543 x.before = magic64 544 x.after = magic64 545 v := uint64(0) 546 for delta := uint64(1); delta+delta > delta; delta += delta { 547 StoreUint64(&x.i, v) 548 if x.i != v { 549 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 550 } 551 v += delta 552 } 553 if x.before != magic64 || x.after != magic64 { 554 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, uint64(magic64), uint64(magic64)) 555 } 556 } 557 558 func TestStoreUintptr(t *testing.T) { 559 var x struct { 560 before uintptr 561 i uintptr 562 after uintptr 563 } 564 var m uint64 = magic64 565 magicptr := uintptr(m) 566 x.before = magicptr 567 x.after = magicptr 568 v := uintptr(0) 569 for delta := uintptr(1); delta+delta > delta; delta += delta { 570 StoreUintptr(&x.i, v) 571 if x.i != v { 572 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 573 } 574 v += delta 575 } 576 if x.before != magicptr || x.after != magicptr { 577 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 578 } 579 } 580 581 func TestStorePointer(t *testing.T) { 582 var x struct { 583 before uintptr 584 i unsafe.Pointer 585 after uintptr 586 } 587 var m uint64 = magic64 588 magicptr := uintptr(m) 589 x.before = magicptr 590 x.after = magicptr 591 v := unsafe.Pointer(uintptr(0)) 592 for delta := uintptr(1); delta+delta > delta; delta += delta { 593 StorePointer(&x.i, unsafe.Pointer(v)) 594 if x.i != v { 595 t.Fatalf("delta=%d i=%d v=%d", delta, x.i, v) 596 } 597 v = unsafe.Pointer(uintptr(v) + delta) 598 } 599 if x.before != magicptr || x.after != magicptr { 600 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) 601 } 602 } 603 604 // Tests of correct behavior, with contention. 605 // (Is the function atomic?) 606 // 607 // For each function, we write a "hammer" function that repeatedly 608 // uses the atomic operation to add 1 to a value. After running 609 // multiple hammers in parallel, check that we end with the correct 610 // total. 611 612 var hammer32 = []struct { 613 name string 614 f func(*uint32, int) 615 }{ 616 {"AddInt32", hammerAddInt32}, 617 {"AddUint32", hammerAddUint32}, 618 {"AddUintptr", hammerAddUintptr32}, 619 {"CompareAndSwapInt32", hammerCompareAndSwapInt32}, 620 {"CompareAndSwapUint32", hammerCompareAndSwapUint32}, 621 {"CompareAndSwapUintptr", hammerCompareAndSwapUintptr32}, 622 {"CompareAndSwapPointer", hammerCompareAndSwapPointer32}, 623 } 624 625 func init() { 626 var v uint64 = 1 << 50 627 if uintptr(v) != 0 { 628 // 64-bit system; clear uintptr tests 629 hammer32[2].f = nil 630 hammer32[5].f = nil 631 hammer32[6].f = nil 632 } 633 } 634 635 func hammerAddInt32(uaddr *uint32, count int) { 636 addr := (*int32)(unsafe.Pointer(uaddr)) 637 for i := 0; i < count; i++ { 638 AddInt32(addr, 1) 639 } 640 } 641 642 func hammerAddUint32(addr *uint32, count int) { 643 for i := 0; i < count; i++ { 644 AddUint32(addr, 1) 645 } 646 } 647 648 func hammerAddUintptr32(uaddr *uint32, count int) { 649 // only safe when uintptr is 32-bit. 650 // not called on 64-bit systems. 651 addr := (*uintptr)(unsafe.Pointer(uaddr)) 652 for i := 0; i < count; i++ { 653 AddUintptr(addr, 1) 654 } 655 } 656 657 func hammerCompareAndSwapInt32(uaddr *uint32, count int) { 658 addr := (*int32)(unsafe.Pointer(uaddr)) 659 for i := 0; i < count; i++ { 660 for { 661 v := *addr 662 if CompareAndSwapInt32(addr, v, v+1) { 663 break 664 } 665 } 666 } 667 } 668 669 func hammerCompareAndSwapUint32(addr *uint32, count int) { 670 for i := 0; i < count; i++ { 671 for { 672 v := *addr 673 if CompareAndSwapUint32(addr, v, v+1) { 674 break 675 } 676 } 677 } 678 } 679 680 func hammerCompareAndSwapUintptr32(uaddr *uint32, count int) { 681 // only safe when uintptr is 32-bit. 682 // not called on 64-bit systems. 683 addr := (*uintptr)(unsafe.Pointer(uaddr)) 684 for i := 0; i < count; i++ { 685 for { 686 v := *addr 687 if CompareAndSwapUintptr(addr, v, v+1) { 688 break 689 } 690 } 691 } 692 } 693 694 func hammerCompareAndSwapPointer32(uaddr *uint32, count int) { 695 // only safe when uintptr is 32-bit. 696 // not called on 64-bit systems. 697 addr := (*unsafe.Pointer)(unsafe.Pointer(uaddr)) 698 for i := 0; i < count; i++ { 699 for { 700 v := *addr 701 if CompareAndSwapPointer(addr, v, unsafe.Pointer(uintptr(v)+1)) { 702 break 703 } 704 } 705 } 706 } 707 708 func TestHammer32(t *testing.T) { 709 const p = 4 710 n := 100000 711 if testing.Short() { 712 n = 1000 713 } 714 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(p)) 715 716 for _, tt := range hammer32 { 717 if tt.f == nil { 718 continue 719 } 720 c := make(chan int) 721 var val uint32 722 for i := 0; i < p; i++ { 723 go func() { 724 tt.f(&val, n) 725 c <- 1 726 }() 727 } 728 for i := 0; i < p; i++ { 729 <-c 730 } 731 if val != uint32(n)*p { 732 t.Fatalf("%s: val=%d want %d", tt.name, val, n*p) 733 } 734 } 735 } 736 737 var hammer64 = []struct { 738 name string 739 f func(*uint64, int) 740 }{ 741 {"AddInt64", hammerAddInt64}, 742 {"AddUint64", hammerAddUint64}, 743 {"AddUintptr", hammerAddUintptr64}, 744 {"CompareAndSwapInt64", hammerCompareAndSwapInt64}, 745 {"CompareAndSwapUint64", hammerCompareAndSwapUint64}, 746 {"CompareAndSwapUintptr", hammerCompareAndSwapUintptr64}, 747 {"CompareAndSwapPointer", hammerCompareAndSwapPointer64}, 748 } 749 750 func init() { 751 var v uint64 = 1 << 50 752 if uintptr(v) == 0 { 753 // 32-bit system; clear uintptr tests 754 hammer64[2].f = nil 755 hammer64[5].f = nil 756 hammer64[6].f = nil 757 } 758 } 759 760 func hammerAddInt64(uaddr *uint64, count int) { 761 addr := (*int64)(unsafe.Pointer(uaddr)) 762 for i := 0; i < count; i++ { 763 AddInt64(addr, 1) 764 } 765 } 766 767 func hammerAddUint64(addr *uint64, count int) { 768 for i := 0; i < count; i++ { 769 AddUint64(addr, 1) 770 } 771 } 772 773 func hammerAddUintptr64(uaddr *uint64, count int) { 774 // only safe when uintptr is 64-bit. 775 // not called on 32-bit systems. 776 addr := (*uintptr)(unsafe.Pointer(uaddr)) 777 for i := 0; i < count; i++ { 778 AddUintptr(addr, 1) 779 } 780 } 781 782 func hammerCompareAndSwapInt64(uaddr *uint64, count int) { 783 addr := (*int64)(unsafe.Pointer(uaddr)) 784 for i := 0; i < count; i++ { 785 for { 786 v := *addr 787 if CompareAndSwapInt64(addr, v, v+1) { 788 break 789 } 790 } 791 } 792 } 793 794 func hammerCompareAndSwapUint64(addr *uint64, count int) { 795 for i := 0; i < count; i++ { 796 for { 797 v := *addr 798 if CompareAndSwapUint64(addr, v, v+1) { 799 break 800 } 801 } 802 } 803 } 804 805 func hammerCompareAndSwapUintptr64(uaddr *uint64, count int) { 806 // only safe when uintptr is 64-bit. 807 // not called on 32-bit systems. 808 addr := (*uintptr)(unsafe.Pointer(uaddr)) 809 for i := 0; i < count; i++ { 810 for { 811 v := *addr 812 if CompareAndSwapUintptr(addr, v, v+1) { 813 break 814 } 815 } 816 } 817 } 818 819 func hammerCompareAndSwapPointer64(uaddr *uint64, count int) { 820 // only safe when uintptr is 64-bit. 821 // not called on 32-bit systems. 822 addr := (*unsafe.Pointer)(unsafe.Pointer(uaddr)) 823 for i := 0; i < count; i++ { 824 for { 825 v := *addr 826 if CompareAndSwapPointer(addr, v, unsafe.Pointer(uintptr(v)+1)) { 827 break 828 } 829 } 830 } 831 } 832 833 func TestHammer64(t *testing.T) { 834 if test64err != nil { 835 t.Skipf("Skipping 64-bit tests: %v", test64err) 836 } 837 const p = 4 838 n := 100000 839 if testing.Short() { 840 n = 1000 841 } 842 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(p)) 843 844 for _, tt := range hammer64 { 845 if tt.f == nil { 846 continue 847 } 848 c := make(chan int) 849 var val uint64 850 for i := 0; i < p; i++ { 851 go func() { 852 tt.f(&val, n) 853 c <- 1 854 }() 855 } 856 for i := 0; i < p; i++ { 857 <-c 858 } 859 if val != uint64(n)*p { 860 t.Fatalf("%s: val=%d want %d", tt.name, val, n*p) 861 } 862 } 863 } 864 865 func hammerStoreLoadInt32(t *testing.T, paddr unsafe.Pointer) { 866 addr := (*int32)(paddr) 867 v := LoadInt32(addr) 868 vlo := v & ((1 << 16) - 1) 869 vhi := v >> 16 870 if vlo != vhi { 871 t.Fatalf("Int32: %#x != %#x", vlo, vhi) 872 } 873 new := v + 1 + 1<<16 874 if vlo == 1e4 { 875 new = 0 876 } 877 StoreInt32(addr, new) 878 } 879 880 func hammerStoreLoadUint32(t *testing.T, paddr unsafe.Pointer) { 881 addr := (*uint32)(paddr) 882 v := LoadUint32(addr) 883 vlo := v & ((1 << 16) - 1) 884 vhi := v >> 16 885 if vlo != vhi { 886 t.Fatalf("Uint32: %#x != %#x", vlo, vhi) 887 } 888 new := v + 1 + 1<<16 889 if vlo == 1e4 { 890 new = 0 891 } 892 StoreUint32(addr, new) 893 } 894 895 func hammerStoreLoadInt64(t *testing.T, paddr unsafe.Pointer) { 896 addr := (*int64)(paddr) 897 v := LoadInt64(addr) 898 vlo := v & ((1 << 32) - 1) 899 vhi := v >> 32 900 if vlo != vhi { 901 t.Fatalf("Int64: %#x != %#x", vlo, vhi) 902 } 903 new := v + 1 + 1<<32 904 StoreInt64(addr, new) 905 } 906 907 func hammerStoreLoadUint64(t *testing.T, paddr unsafe.Pointer) { 908 addr := (*uint64)(paddr) 909 v := LoadUint64(addr) 910 vlo := v & ((1 << 32) - 1) 911 vhi := v >> 32 912 if vlo != vhi { 913 t.Fatalf("Uint64: %#x != %#x", vlo, vhi) 914 } 915 new := v + 1 + 1<<32 916 StoreUint64(addr, new) 917 } 918 919 func hammerStoreLoadUintptr(t *testing.T, paddr unsafe.Pointer) { 920 addr := (*uintptr)(paddr) 921 var test64 uint64 = 1 << 50 922 arch32 := uintptr(test64) == 0 923 v := LoadUintptr(addr) 924 new := v 925 if arch32 { 926 vlo := v & ((1 << 16) - 1) 927 vhi := v >> 16 928 if vlo != vhi { 929 t.Fatalf("Uintptr: %#x != %#x", vlo, vhi) 930 } 931 new = v + 1 + 1<<16 932 if vlo == 1e4 { 933 new = 0 934 } 935 } else { 936 vlo := v & ((1 << 32) - 1) 937 vhi := v >> 32 938 if vlo != vhi { 939 t.Fatalf("Uintptr: %#x != %#x", vlo, vhi) 940 } 941 inc := uint64(1 + 1<<32) 942 new = v + uintptr(inc) 943 } 944 StoreUintptr(addr, new) 945 } 946 947 func hammerStoreLoadPointer(t *testing.T, paddr unsafe.Pointer) { 948 addr := (*unsafe.Pointer)(paddr) 949 var test64 uint64 = 1 << 50 950 arch32 := uintptr(test64) == 0 951 v := uintptr(LoadPointer(addr)) 952 new := v 953 if arch32 { 954 vlo := v & ((1 << 16) - 1) 955 vhi := v >> 16 956 if vlo != vhi { 957 t.Fatalf("Pointer: %#x != %#x", vlo, vhi) 958 } 959 new = v + 1 + 1<<16 960 if vlo == 1e4 { 961 new = 0 962 } 963 } else { 964 vlo := v & ((1 << 32) - 1) 965 vhi := v >> 32 966 if vlo != vhi { 967 t.Fatalf("Pointer: %#x != %#x", vlo, vhi) 968 } 969 inc := uint64(1 + 1<<32) 970 new = v + uintptr(inc) 971 } 972 StorePointer(addr, unsafe.Pointer(new)) 973 } 974 975 func TestHammerStoreLoad(t *testing.T) { 976 var tests []func(*testing.T, unsafe.Pointer) 977 tests = append(tests, hammerStoreLoadInt32, hammerStoreLoadUint32, 978 hammerStoreLoadUintptr, hammerStoreLoadPointer) 979 if test64err == nil { 980 tests = append(tests, hammerStoreLoadInt64, hammerStoreLoadUint64) 981 } 982 n := int(1e6) 983 if testing.Short() { 984 n = int(1e4) 985 } 986 const procs = 8 987 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(procs)) 988 for _, tt := range tests { 989 c := make(chan int) 990 var val uint64 991 for p := 0; p < procs; p++ { 992 go func() { 993 for i := 0; i < n; i++ { 994 tt(t, unsafe.Pointer(&val)) 995 } 996 c <- 1 997 }() 998 } 999 for p := 0; p < procs; p++ { 1000 <-c 1001 } 1002 } 1003 } 1004 1005 func TestStoreLoadSeqCst32(t *testing.T) { 1006 if runtime.NumCPU() == 1 { 1007 t.Skipf("Skipping test on %v processor machine", runtime.NumCPU()) 1008 } 1009 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(4)) 1010 N := int32(1e3) 1011 if testing.Short() { 1012 N = int32(1e2) 1013 } 1014 c := make(chan bool, 2) 1015 X := [2]int32{} 1016 ack := [2][3]int32{{-1, -1, -1}, {-1, -1, -1}} 1017 for p := 0; p < 2; p++ { 1018 go func(me int) { 1019 he := 1 - me 1020 for i := int32(1); i < N; i++ { 1021 StoreInt32(&X[me], i) 1022 my := LoadInt32(&X[he]) 1023 StoreInt32(&ack[me][i%3], my) 1024 for w := 1; LoadInt32(&ack[he][i%3]) == -1; w++ { 1025 if w%1000 == 0 { 1026 runtime.Gosched() 1027 } 1028 } 1029 his := LoadInt32(&ack[he][i%3]) 1030 if (my != i && my != i-1) || (his != i && his != i-1) { 1031 t.Fatalf("invalid values: %d/%d (%d)", my, his, i) 1032 } 1033 if my != i && his != i { 1034 t.Fatalf("store/load are not sequentially consistent: %d/%d (%d)", my, his, i) 1035 } 1036 StoreInt32(&ack[me][(i-1)%3], -1) 1037 } 1038 c <- true 1039 }(p) 1040 } 1041 <-c 1042 <-c 1043 } 1044 1045 func TestStoreLoadSeqCst64(t *testing.T) { 1046 if runtime.NumCPU() == 1 { 1047 t.Skipf("Skipping test on %v processor machine", runtime.NumCPU()) 1048 } 1049 if test64err != nil { 1050 t.Skipf("Skipping 64-bit tests: %v", test64err) 1051 } 1052 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(4)) 1053 N := int64(1e3) 1054 if testing.Short() { 1055 N = int64(1e2) 1056 } 1057 c := make(chan bool, 2) 1058 X := [2]int64{} 1059 ack := [2][3]int64{{-1, -1, -1}, {-1, -1, -1}} 1060 for p := 0; p < 2; p++ { 1061 go func(me int) { 1062 he := 1 - me 1063 for i := int64(1); i < N; i++ { 1064 StoreInt64(&X[me], i) 1065 my := LoadInt64(&X[he]) 1066 StoreInt64(&ack[me][i%3], my) 1067 for w := 1; LoadInt64(&ack[he][i%3]) == -1; w++ { 1068 if w%1000 == 0 { 1069 runtime.Gosched() 1070 } 1071 } 1072 his := LoadInt64(&ack[he][i%3]) 1073 if (my != i && my != i-1) || (his != i && his != i-1) { 1074 t.Fatalf("invalid values: %d/%d (%d)", my, his, i) 1075 } 1076 if my != i && his != i { 1077 t.Fatalf("store/load are not sequentially consistent: %d/%d (%d)", my, his, i) 1078 } 1079 StoreInt64(&ack[me][(i-1)%3], -1) 1080 } 1081 c <- true 1082 }(p) 1083 } 1084 <-c 1085 <-c 1086 } 1087 1088 func TestStoreLoadRelAcq32(t *testing.T) { 1089 if runtime.NumCPU() == 1 { 1090 t.Skipf("Skipping test on %v processor machine", runtime.NumCPU()) 1091 } 1092 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(4)) 1093 N := int32(1e3) 1094 if testing.Short() { 1095 N = int32(1e2) 1096 } 1097 c := make(chan bool, 2) 1098 type Data struct { 1099 signal int32 1100 pad1 [128]int8 1101 data1 int32 1102 pad2 [128]int8 1103 data2 float32 1104 } 1105 var X Data 1106 for p := int32(0); p < 2; p++ { 1107 go func(p int32) { 1108 for i := int32(1); i < N; i++ { 1109 if (i+p)%2 == 0 { 1110 X.data1 = i 1111 X.data2 = float32(i) 1112 StoreInt32(&X.signal, i) 1113 } else { 1114 for w := 1; LoadInt32(&X.signal) != i; w++ { 1115 if w%1000 == 0 { 1116 runtime.Gosched() 1117 } 1118 } 1119 d1 := X.data1 1120 d2 := X.data2 1121 if d1 != i || d2 != float32(i) { 1122 t.Fatalf("incorrect data: %d/%g (%d)", d1, d2, i) 1123 } 1124 } 1125 } 1126 c <- true 1127 }(p) 1128 } 1129 <-c 1130 <-c 1131 } 1132 1133 func TestStoreLoadRelAcq64(t *testing.T) { 1134 if runtime.NumCPU() == 1 { 1135 t.Skipf("Skipping test on %v processor machine", runtime.NumCPU()) 1136 } 1137 if test64err != nil { 1138 t.Skipf("Skipping 64-bit tests: %v", test64err) 1139 } 1140 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(4)) 1141 N := int64(1e3) 1142 if testing.Short() { 1143 N = int64(1e2) 1144 } 1145 c := make(chan bool, 2) 1146 type Data struct { 1147 signal int64 1148 pad1 [128]int8 1149 data1 int64 1150 pad2 [128]int8 1151 data2 float64 1152 } 1153 var X Data 1154 for p := int64(0); p < 2; p++ { 1155 go func(p int64) { 1156 for i := int64(1); i < N; i++ { 1157 if (i+p)%2 == 0 { 1158 X.data1 = i 1159 X.data2 = float64(i) 1160 StoreInt64(&X.signal, i) 1161 } else { 1162 for w := 1; LoadInt64(&X.signal) != i; w++ { 1163 if w%1000 == 0 { 1164 runtime.Gosched() 1165 } 1166 } 1167 d1 := X.data1 1168 d2 := X.data2 1169 if d1 != i || d2 != float64(i) { 1170 t.Fatalf("incorrect data: %d/%g (%d)", d1, d2, i) 1171 } 1172 } 1173 } 1174 c <- true 1175 }(p) 1176 } 1177 <-c 1178 <-c 1179 } 1180 1181 func shouldPanic(t *testing.T, name string, f func()) { 1182 defer func() { 1183 if recover() == nil { 1184 t.Errorf("%s did not panic", name) 1185 } 1186 }() 1187 f() 1188 } 1189 1190 func TestUnaligned64(t *testing.T) { 1191 // Unaligned 64-bit atomics on 32-bit systems are 1192 // a continual source of pain. Test that on 32-bit systems they crash 1193 // instead of failing silently. 1194 if unsafe.Sizeof(int(0)) != 4 { 1195 t.Skip("test only runs on 32-bit systems") 1196 } 1197 1198 x := make([]uint32, 4) 1199 p := (*uint64)(unsafe.Pointer(&x[1])) // misaligned 1200 1201 shouldPanic(t, "LoadUint64", func() { LoadUint64(p) }) 1202 shouldPanic(t, "StoreUint64", func() { StoreUint64(p, 1) }) 1203 shouldPanic(t, "CompareAndSwapUint64", func() { CompareAndSwapUint64(p, 1, 2) }) 1204 shouldPanic(t, "AddUint64", func() { AddUint64(p, 3) }) 1205 }