github.com/c12o16h1/go/src@v0.0.0-20200114212001-5a151c0f00ed/runtime/internal/atomic/atomic_test.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package atomic_test 6 7 import ( 8 "runtime" 9 "runtime/internal/atomic" 10 "runtime/internal/sys" 11 "testing" 12 "unsafe" 13 ) 14 15 func runParallel(N, iter int, f func()) { 16 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(int(N))) 17 done := make(chan bool) 18 for i := 0; i < N; i++ { 19 go func() { 20 for j := 0; j < iter; j++ { 21 f() 22 } 23 done <- true 24 }() 25 } 26 for i := 0; i < N; i++ { 27 <-done 28 } 29 } 30 31 func TestXadduintptr(t *testing.T) { 32 N := 20 33 iter := 100000 34 if testing.Short() { 35 N = 10 36 iter = 10000 37 } 38 inc := uintptr(100) 39 total := uintptr(0) 40 runParallel(N, iter, func() { 41 atomic.Xadduintptr(&total, inc) 42 }) 43 if want := uintptr(N*iter) * inc; want != total { 44 t.Fatalf("xadduintpr error, want %d, got %d", want, total) 45 } 46 total = 0 47 runParallel(N, iter, func() { 48 atomic.Xadduintptr(&total, inc) 49 atomic.Xadduintptr(&total, uintptr(-int64(inc))) 50 }) 51 if total != 0 { 52 t.Fatalf("xadduintpr total error, want %d, got %d", 0, total) 53 } 54 } 55 56 // Tests that xadduintptr correctly updates 64-bit values. The place where 57 // we actually do so is mstats.go, functions mSysStat{Inc,Dec}. 58 func TestXadduintptrOnUint64(t *testing.T) { 59 if sys.BigEndian { 60 // On big endian architectures, we never use xadduintptr to update 61 // 64-bit values and hence we skip the test. (Note that functions 62 // mSysStat{Inc,Dec} in mstats.go have explicit checks for 63 // big-endianness.) 64 t.Skip("skip xadduintptr on big endian architecture") 65 } 66 const inc = 100 67 val := uint64(0) 68 atomic.Xadduintptr((*uintptr)(unsafe.Pointer(&val)), inc) 69 if inc != val { 70 t.Fatalf("xadduintptr should increase lower-order bits, want %d, got %d", inc, val) 71 } 72 } 73 74 func shouldPanic(t *testing.T, name string, f func()) { 75 defer func() { 76 if recover() == nil { 77 t.Errorf("%s did not panic", name) 78 } 79 }() 80 f() 81 } 82 83 // Variant of sync/atomic's TestUnaligned64: 84 func TestUnaligned64(t *testing.T) { 85 // Unaligned 64-bit atomics on 32-bit systems are 86 // a continual source of pain. Test that on 32-bit systems they crash 87 // instead of failing silently. 88 89 if unsafe.Sizeof(int(0)) != 4 { 90 t.Skip("test only runs on 32-bit systems") 91 } 92 93 x := make([]uint32, 4) 94 u := unsafe.Pointer(uintptr(unsafe.Pointer(&x[0])) | 4) // force alignment to 4 95 96 up64 := (*uint64)(u) // misaligned 97 p64 := (*int64)(u) // misaligned 98 99 shouldPanic(t, "Load64", func() { atomic.Load64(up64) }) 100 shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) }) 101 shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) }) 102 shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) }) 103 shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) }) 104 shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) }) 105 } 106 107 func TestAnd8(t *testing.T) { 108 // Basic sanity check. 109 x := uint8(0xff) 110 for i := uint8(0); i < 8; i++ { 111 atomic.And8(&x, ^(1 << i)) 112 if r := uint8(0xff) << (i + 1); x != r { 113 t.Fatalf("clearing bit %#x: want %#x, got %#x", uint8(1<<i), r, x) 114 } 115 } 116 117 // Set every bit in array to 1. 118 a := make([]uint8, 1<<12) 119 for i := range a { 120 a[i] = 0xff 121 } 122 123 // Clear array bit-by-bit in different goroutines. 124 done := make(chan bool) 125 for i := 0; i < 8; i++ { 126 m := ^uint8(1 << i) 127 go func() { 128 for i := range a { 129 atomic.And8(&a[i], m) 130 } 131 done <- true 132 }() 133 } 134 for i := 0; i < 8; i++ { 135 <-done 136 } 137 138 // Check that the array has been totally cleared. 139 for i, v := range a { 140 if v != 0 { 141 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint8(0), v) 142 } 143 } 144 } 145 146 func TestOr8(t *testing.T) { 147 // Basic sanity check. 148 x := uint8(0) 149 for i := uint8(0); i < 8; i++ { 150 atomic.Or8(&x, 1<<i) 151 if r := (uint8(1) << (i + 1)) - 1; x != r { 152 t.Fatalf("setting bit %#x: want %#x, got %#x", uint8(1)<<i, r, x) 153 } 154 } 155 156 // Start with every bit in array set to 0. 157 a := make([]uint8, 1<<12) 158 159 // Set every bit in array bit-by-bit in different goroutines. 160 done := make(chan bool) 161 for i := 0; i < 8; i++ { 162 m := uint8(1 << i) 163 go func() { 164 for i := range a { 165 atomic.Or8(&a[i], m) 166 } 167 done <- true 168 }() 169 } 170 for i := 0; i < 8; i++ { 171 <-done 172 } 173 174 // Check that the array has been totally set. 175 for i, v := range a { 176 if v != 0xff { 177 t.Fatalf("a[%v] not fully set: want %#x, got %#x", i, uint8(0xff), v) 178 } 179 } 180 } 181 182 func TestBitwiseContended(t *testing.T) { 183 // Start with every bit in array set to 0. 184 a := make([]uint8, 16) 185 186 // Iterations to try. 187 N := 1 << 16 188 if testing.Short() { 189 N = 1 << 10 190 } 191 192 // Set and then clear every bit in the array bit-by-bit in different goroutines. 193 done := make(chan bool) 194 for i := 0; i < 8; i++ { 195 m := uint8(1 << i) 196 go func() { 197 for n := 0; n < N; n++ { 198 for i := range a { 199 atomic.Or8(&a[i], m) 200 if atomic.Load8(&a[i])&m != m { 201 t.Errorf("a[%v] bit %#x not set", i, m) 202 } 203 atomic.And8(&a[i], ^m) 204 if atomic.Load8(&a[i])&m != 0 { 205 t.Errorf("a[%v] bit %#x not clear", i, m) 206 } 207 } 208 } 209 done <- true 210 }() 211 } 212 for i := 0; i < 8; i++ { 213 <-done 214 } 215 216 // Check that the array has been totally cleared. 217 for i, v := range a { 218 if v != 0 { 219 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint8(0), v) 220 } 221 } 222 }