github.com/hlts2/go@v0.0.0-20170904000733-812b34efaed8/src/runtime/internal/atomic/atomic_test.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package atomic_test 6 7 import ( 8 "runtime" 9 "runtime/internal/atomic" 10 "runtime/internal/sys" 11 "testing" 12 "unsafe" 13 ) 14 15 func runParallel(N, iter int, f func()) { 16 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(int(N))) 17 done := make(chan bool) 18 for i := 0; i < N; i++ { 19 go func() { 20 for j := 0; j < iter; j++ { 21 f() 22 } 23 done <- true 24 }() 25 } 26 for i := 0; i < N; i++ { 27 <-done 28 } 29 } 30 31 func TestXadduintptr(t *testing.T) { 32 const N = 20 33 const iter = 100000 34 inc := uintptr(100) 35 total := uintptr(0) 36 runParallel(N, iter, func() { 37 atomic.Xadduintptr(&total, inc) 38 }) 39 if want := uintptr(N * iter * inc); want != total { 40 t.Fatalf("xadduintpr error, want %d, got %d", want, total) 41 } 42 total = 0 43 runParallel(N, iter, func() { 44 atomic.Xadduintptr(&total, inc) 45 atomic.Xadduintptr(&total, uintptr(-int64(inc))) 46 }) 47 if total != 0 { 48 t.Fatalf("xadduintpr total error, want %d, got %d", 0, total) 49 } 50 } 51 52 // Tests that xadduintptr correctly updates 64-bit values. The place where 53 // we actually do so is mstats.go, functions mSysStat{Inc,Dec}. 54 func TestXadduintptrOnUint64(t *testing.T) { 55 if sys.BigEndian != 0 { 56 // On big endian architectures, we never use xadduintptr to update 57 // 64-bit values and hence we skip the test. (Note that functions 58 // mSysStat{Inc,Dec} in mstats.go have explicit checks for 59 // big-endianness.) 60 t.Skip("skip xadduintptr on big endian architecture") 61 } 62 const inc = 100 63 val := uint64(0) 64 atomic.Xadduintptr((*uintptr)(unsafe.Pointer(&val)), inc) 65 if inc != val { 66 t.Fatalf("xadduintptr should increase lower-order bits, want %d, got %d", inc, val) 67 } 68 } 69 70 func shouldPanic(t *testing.T, name string, f func()) { 71 defer func() { 72 if recover() == nil { 73 t.Errorf("%s did not panic", name) 74 } 75 }() 76 f() 77 } 78 79 // Variant of sync/atomic's TestUnaligned64: 80 func TestUnaligned64(t *testing.T) { 81 // Unaligned 64-bit atomics on 32-bit systems are 82 // a continual source of pain. Test that on 32-bit systems they crash 83 // instead of failing silently. 84 85 switch runtime.GOARCH { 86 default: 87 if unsafe.Sizeof(int(0)) != 4 { 88 t.Skip("test only runs on 32-bit systems") 89 } 90 case "amd64p32": 91 // amd64p32 can handle unaligned atomics. 92 t.Skipf("test not needed on %v", runtime.GOARCH) 93 } 94 95 x := make([]uint32, 4) 96 up64 := (*uint64)(unsafe.Pointer(&x[1])) // misaligned 97 p64 := (*int64)(unsafe.Pointer(&x[1])) // misaligned 98 99 shouldPanic(t, "Load64", func() { atomic.Load64(up64) }) 100 shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) }) 101 shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) }) 102 shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) }) 103 shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) }) 104 shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) }) 105 }