github.com/aigarnetwork/aigar@v0.0.0-20191115204914-d59a6eb70f8e/metrics/sample_test.go (about) 1 // Copyright 2018 The go-ethereum Authors 2 // Copyright 2019 The go-aigar Authors 3 // This file is part of the go-aigar library. 4 // 5 // The go-aigar library is free software: you can redistribute it and/or modify 6 // it under the terms of the GNU Lesser General Public License as published by 7 // the Free Software Foundation, either version 3 of the License, or 8 // (at your option) any later version. 9 // 10 // The go-aigar library is distributed in the hope that it will be useful, 11 // but WITHOUT ANY WARRANTY; without even the implied warranty of 12 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 // GNU Lesser General Public License for more details. 14 // 15 // You should have received a copy of the GNU Lesser General Public License 16 // along with the go-aigar library. If not, see <http://www.gnu.org/licenses/>. 17 18 package metrics 19 20 import ( 21 "math" 22 "math/rand" 23 "runtime" 24 "testing" 25 "time" 26 ) 27 28 // Benchmark{Compute,Copy}{1000,1000000} demonstrate that, even for relatively 29 // expensive computations like Variance, the cost of copying the Sample, as 30 // approximated by a make and copy, is much greater than the cost of the 31 // computation for small samples and only slightly less for large samples. 32 func BenchmarkCompute1000(b *testing.B) { 33 s := make([]int64, 1000) 34 for i := 0; i < len(s); i++ { 35 s[i] = int64(i) 36 } 37 b.ResetTimer() 38 for i := 0; i < b.N; i++ { 39 SampleVariance(s) 40 } 41 } 42 func BenchmarkCompute1000000(b *testing.B) { 43 s := make([]int64, 1000000) 44 for i := 0; i < len(s); i++ { 45 s[i] = int64(i) 46 } 47 b.ResetTimer() 48 for i := 0; i < b.N; i++ { 49 SampleVariance(s) 50 } 51 } 52 func BenchmarkCopy1000(b *testing.B) { 53 s := make([]int64, 1000) 54 for i := 0; i < len(s); i++ { 55 s[i] = int64(i) 56 } 57 b.ResetTimer() 58 for i := 0; i < b.N; i++ { 59 sCopy := make([]int64, len(s)) 60 copy(sCopy, s) 61 } 62 } 63 func BenchmarkCopy1000000(b *testing.B) { 64 s := make([]int64, 1000000) 65 for i := 0; i < len(s); i++ { 66 s[i] = int64(i) 67 } 68 b.ResetTimer() 69 for i := 0; i < b.N; i++ { 70 sCopy := make([]int64, len(s)) 71 copy(sCopy, s) 72 } 73 } 74 75 func BenchmarkExpDecaySample257(b *testing.B) { 76 benchmarkSample(b, NewExpDecaySample(257, 0.015)) 77 } 78 79 func BenchmarkExpDecaySample514(b *testing.B) { 80 benchmarkSample(b, NewExpDecaySample(514, 0.015)) 81 } 82 83 func BenchmarkExpDecaySample1028(b *testing.B) { 84 benchmarkSample(b, NewExpDecaySample(1028, 0.015)) 85 } 86 87 func BenchmarkUniformSample257(b *testing.B) { 88 benchmarkSample(b, NewUniformSample(257)) 89 } 90 91 func BenchmarkUniformSample514(b *testing.B) { 92 benchmarkSample(b, NewUniformSample(514)) 93 } 94 95 func BenchmarkUniformSample1028(b *testing.B) { 96 benchmarkSample(b, NewUniformSample(1028)) 97 } 98 99 func TestExpDecaySample10(t *testing.T) { 100 rand.Seed(1) 101 s := NewExpDecaySample(100, 0.99) 102 for i := 0; i < 10; i++ { 103 s.Update(int64(i)) 104 } 105 if size := s.Count(); 10 != size { 106 t.Errorf("s.Count(): 10 != %v\n", size) 107 } 108 if size := s.Size(); 10 != size { 109 t.Errorf("s.Size(): 10 != %v\n", size) 110 } 111 if l := len(s.Values()); 10 != l { 112 t.Errorf("len(s.Values()): 10 != %v\n", l) 113 } 114 for _, v := range s.Values() { 115 if v > 10 || v < 0 { 116 t.Errorf("out of range [0, 10): %v\n", v) 117 } 118 } 119 } 120 121 func TestExpDecaySample100(t *testing.T) { 122 rand.Seed(1) 123 s := NewExpDecaySample(1000, 0.01) 124 for i := 0; i < 100; i++ { 125 s.Update(int64(i)) 126 } 127 if size := s.Count(); 100 != size { 128 t.Errorf("s.Count(): 100 != %v\n", size) 129 } 130 if size := s.Size(); 100 != size { 131 t.Errorf("s.Size(): 100 != %v\n", size) 132 } 133 if l := len(s.Values()); 100 != l { 134 t.Errorf("len(s.Values()): 100 != %v\n", l) 135 } 136 for _, v := range s.Values() { 137 if v > 100 || v < 0 { 138 t.Errorf("out of range [0, 100): %v\n", v) 139 } 140 } 141 } 142 143 func TestExpDecaySample1000(t *testing.T) { 144 rand.Seed(1) 145 s := NewExpDecaySample(100, 0.99) 146 for i := 0; i < 1000; i++ { 147 s.Update(int64(i)) 148 } 149 if size := s.Count(); 1000 != size { 150 t.Errorf("s.Count(): 1000 != %v\n", size) 151 } 152 if size := s.Size(); 100 != size { 153 t.Errorf("s.Size(): 100 != %v\n", size) 154 } 155 if l := len(s.Values()); 100 != l { 156 t.Errorf("len(s.Values()): 100 != %v\n", l) 157 } 158 for _, v := range s.Values() { 159 if v > 1000 || v < 0 { 160 t.Errorf("out of range [0, 1000): %v\n", v) 161 } 162 } 163 } 164 165 // This test makes sure that the sample's priority is not amplified by using 166 // nanosecond duration since start rather than second duration since start. 167 // The priority becomes +Inf quickly after starting if this is done, 168 // effectively freezing the set of samples until a rescale step happens. 169 func TestExpDecaySampleNanosecondRegression(t *testing.T) { 170 rand.Seed(1) 171 s := NewExpDecaySample(100, 0.99) 172 for i := 0; i < 100; i++ { 173 s.Update(10) 174 } 175 time.Sleep(1 * time.Millisecond) 176 for i := 0; i < 100; i++ { 177 s.Update(20) 178 } 179 v := s.Values() 180 avg := float64(0) 181 for i := 0; i < len(v); i++ { 182 avg += float64(v[i]) 183 } 184 avg /= float64(len(v)) 185 if avg > 16 || avg < 14 { 186 t.Errorf("out of range [14, 16]: %v\n", avg) 187 } 188 } 189 190 func TestExpDecaySampleRescale(t *testing.T) { 191 s := NewExpDecaySample(2, 0.001).(*ExpDecaySample) 192 s.update(time.Now(), 1) 193 s.update(time.Now().Add(time.Hour+time.Microsecond), 1) 194 for _, v := range s.values.Values() { 195 if v.k == 0.0 { 196 t.Fatal("v.k == 0.0") 197 } 198 } 199 } 200 201 func TestExpDecaySampleSnapshot(t *testing.T) { 202 now := time.Now() 203 rand.Seed(1) 204 s := NewExpDecaySample(100, 0.99) 205 for i := 1; i <= 10000; i++ { 206 s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i)) 207 } 208 snapshot := s.Snapshot() 209 s.Update(1) 210 testExpDecaySampleStatistics(t, snapshot) 211 } 212 213 func TestExpDecaySampleStatistics(t *testing.T) { 214 now := time.Now() 215 rand.Seed(1) 216 s := NewExpDecaySample(100, 0.99) 217 for i := 1; i <= 10000; i++ { 218 s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i)) 219 } 220 testExpDecaySampleStatistics(t, s) 221 } 222 223 func TestUniformSample(t *testing.T) { 224 rand.Seed(1) 225 s := NewUniformSample(100) 226 for i := 0; i < 1000; i++ { 227 s.Update(int64(i)) 228 } 229 if size := s.Count(); 1000 != size { 230 t.Errorf("s.Count(): 1000 != %v\n", size) 231 } 232 if size := s.Size(); 100 != size { 233 t.Errorf("s.Size(): 100 != %v\n", size) 234 } 235 if l := len(s.Values()); 100 != l { 236 t.Errorf("len(s.Values()): 100 != %v\n", l) 237 } 238 for _, v := range s.Values() { 239 if v > 1000 || v < 0 { 240 t.Errorf("out of range [0, 100): %v\n", v) 241 } 242 } 243 } 244 245 func TestUniformSampleIncludesTail(t *testing.T) { 246 rand.Seed(1) 247 s := NewUniformSample(100) 248 max := 100 249 for i := 0; i < max; i++ { 250 s.Update(int64(i)) 251 } 252 v := s.Values() 253 sum := 0 254 exp := (max - 1) * max / 2 255 for i := 0; i < len(v); i++ { 256 sum += int(v[i]) 257 } 258 if exp != sum { 259 t.Errorf("sum: %v != %v\n", exp, sum) 260 } 261 } 262 263 func TestUniformSampleSnapshot(t *testing.T) { 264 s := NewUniformSample(100) 265 for i := 1; i <= 10000; i++ { 266 s.Update(int64(i)) 267 } 268 snapshot := s.Snapshot() 269 s.Update(1) 270 testUniformSampleStatistics(t, snapshot) 271 } 272 273 func TestUniformSampleStatistics(t *testing.T) { 274 rand.Seed(1) 275 s := NewUniformSample(100) 276 for i := 1; i <= 10000; i++ { 277 s.Update(int64(i)) 278 } 279 testUniformSampleStatistics(t, s) 280 } 281 282 func benchmarkSample(b *testing.B, s Sample) { 283 var memStats runtime.MemStats 284 runtime.ReadMemStats(&memStats) 285 pauseTotalNs := memStats.PauseTotalNs 286 b.ResetTimer() 287 for i := 0; i < b.N; i++ { 288 s.Update(1) 289 } 290 b.StopTimer() 291 runtime.GC() 292 runtime.ReadMemStats(&memStats) 293 b.Logf("GC cost: %d ns/op", int(memStats.PauseTotalNs-pauseTotalNs)/b.N) 294 } 295 296 func testExpDecaySampleStatistics(t *testing.T, s Sample) { 297 if count := s.Count(); 10000 != count { 298 t.Errorf("s.Count(): 10000 != %v\n", count) 299 } 300 if min := s.Min(); 107 != min { 301 t.Errorf("s.Min(): 107 != %v\n", min) 302 } 303 if max := s.Max(); 10000 != max { 304 t.Errorf("s.Max(): 10000 != %v\n", max) 305 } 306 if mean := s.Mean(); 4965.98 != mean { 307 t.Errorf("s.Mean(): 4965.98 != %v\n", mean) 308 } 309 if stdDev := s.StdDev(); 2959.825156930727 != stdDev { 310 t.Errorf("s.StdDev(): 2959.825156930727 != %v\n", stdDev) 311 } 312 ps := s.Percentiles([]float64{0.5, 0.75, 0.99}) 313 if 4615 != ps[0] { 314 t.Errorf("median: 4615 != %v\n", ps[0]) 315 } 316 if 7672 != ps[1] { 317 t.Errorf("75th percentile: 7672 != %v\n", ps[1]) 318 } 319 if 9998.99 != ps[2] { 320 t.Errorf("99th percentile: 9998.99 != %v\n", ps[2]) 321 } 322 } 323 324 func testUniformSampleStatistics(t *testing.T, s Sample) { 325 if count := s.Count(); 10000 != count { 326 t.Errorf("s.Count(): 10000 != %v\n", count) 327 } 328 if min := s.Min(); 37 != min { 329 t.Errorf("s.Min(): 37 != %v\n", min) 330 } 331 if max := s.Max(); 9989 != max { 332 t.Errorf("s.Max(): 9989 != %v\n", max) 333 } 334 if mean := s.Mean(); 4748.14 != mean { 335 t.Errorf("s.Mean(): 4748.14 != %v\n", mean) 336 } 337 if stdDev := s.StdDev(); 2826.684117548333 != stdDev { 338 t.Errorf("s.StdDev(): 2826.684117548333 != %v\n", stdDev) 339 } 340 ps := s.Percentiles([]float64{0.5, 0.75, 0.99}) 341 if 4599 != ps[0] { 342 t.Errorf("median: 4599 != %v\n", ps[0]) 343 } 344 if 7380.5 != ps[1] { 345 t.Errorf("75th percentile: 7380.5 != %v\n", ps[1]) 346 } 347 if math.Abs(9986.429999999998-ps[2]) > epsilonPercentile { 348 t.Errorf("99th percentile: 9986.429999999998 != %v\n", ps[2]) 349 } 350 } 351 352 // TestUniformSampleConcurrentUpdateCount would expose data race problems with 353 // concurrent Update and Count calls on Sample when test is called with -race 354 // argument 355 func TestUniformSampleConcurrentUpdateCount(t *testing.T) { 356 if testing.Short() { 357 t.Skip("skipping in short mode") 358 } 359 s := NewUniformSample(100) 360 for i := 0; i < 100; i++ { 361 s.Update(int64(i)) 362 } 363 quit := make(chan struct{}) 364 go func() { 365 t := time.NewTicker(10 * time.Millisecond) 366 for { 367 select { 368 case <-t.C: 369 s.Update(rand.Int63()) 370 case <-quit: 371 t.Stop() 372 return 373 } 374 } 375 }() 376 for i := 0; i < 1000; i++ { 377 s.Count() 378 time.Sleep(5 * time.Millisecond) 379 } 380 quit <- struct{}{} 381 }