github.com/gopherd/gonum@v0.0.4/stat/distuv/statdist.go (about) 1 // Copyright ©2018 The Gonum Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package distuv 6 7 import ( 8 "math" 9 10 "github.com/gopherd/gonum/mathext" 11 ) 12 13 // Bhattacharyya is a type for computing the Bhattacharyya distance between 14 // probability distributions. 15 // 16 // The Bhattacharyya distance is defined as 17 // D_B = -ln(BC(l,r)) 18 // BC = \int_-∞^∞ (p(x)q(x))^(1/2) dx 19 // Where BC is known as the Bhattacharyya coefficient. 20 // The Bhattacharyya distance is related to the Hellinger distance by 21 // H(l,r) = sqrt(1-BC(l,r)) 22 // For more information, see 23 // https://en.wikipedia.org/wiki/Bhattacharyya_distance 24 type Bhattacharyya struct{} 25 26 // DistBeta returns the Bhattacharyya distance between Beta distributions l and r. 27 // For Beta distributions, the Bhattacharyya distance is given by 28 // -ln(B((α_l + α_r)/2, (β_l + β_r)/2) / (B(α_l,β_l), B(α_r,β_r))) 29 // Where B is the Beta function. 30 func (Bhattacharyya) DistBeta(l, r Beta) float64 { 31 // Reference: https://en.wikipedia.org/wiki/Hellinger_distance#Examples 32 return -mathext.Lbeta((l.Alpha+r.Alpha)/2, (l.Beta+r.Beta)/2) + 33 0.5*mathext.Lbeta(l.Alpha, l.Beta) + 0.5*mathext.Lbeta(r.Alpha, r.Beta) 34 } 35 36 // DistNormal returns the Bhattacharyya distance Normal distributions l and r. 37 // For Normal distributions, the Bhattacharyya distance is given by 38 // s = (σ_l^2 + σ_r^2)/2 39 // BC = 1/8 (μ_l-μ_r)^2/s + 1/2 ln(s/(σ_l*σ_r)) 40 func (Bhattacharyya) DistNormal(l, r Normal) float64 { 41 // Reference: https://en.wikipedia.org/wiki/Bhattacharyya_distance 42 m := l.Mu - r.Mu 43 s := (l.Sigma*l.Sigma + r.Sigma*r.Sigma) / 2 44 return 0.125*m*m/s + 0.5*math.Log(s) - 0.5*math.Log(l.Sigma) - 0.5*math.Log(r.Sigma) 45 } 46 47 // Hellinger is a type for computing the Hellinger distance between probability 48 // distributions. 49 // 50 // The Hellinger distance is defined as 51 // H^2(l,r) = 1/2 * int_x (\sqrt(l(x)) - \sqrt(r(x)))^2 dx 52 // and is bounded between 0 and 1. Note the above formula defines the squared 53 // Hellinger distance, while this returns the Hellinger distance itself. 54 // The Hellinger distance is related to the Bhattacharyya distance by 55 // H^2 = 1 - exp(-D_B) 56 // For more information, see 57 // https://en.wikipedia.org/wiki/Hellinger_distance 58 type Hellinger struct{} 59 60 // DistBeta computes the Hellinger distance between Beta distributions l and r. 61 // See the documentation of Bhattacharyya.DistBeta for the distance formula. 62 func (Hellinger) DistBeta(l, r Beta) float64 { 63 db := Bhattacharyya{}.DistBeta(l, r) 64 return math.Sqrt(-math.Expm1(-db)) 65 } 66 67 // DistNormal computes the Hellinger distance between Normal distributions l and r. 68 // See the documentation of Bhattacharyya.DistNormal for the distance formula. 69 func (Hellinger) DistNormal(l, r Normal) float64 { 70 db := Bhattacharyya{}.DistNormal(l, r) 71 return math.Sqrt(-math.Expm1(-db)) 72 } 73 74 // KullbackLeibler is a type for computing the Kullback-Leibler divergence from l to r. 75 // 76 // The Kullback-Leibler divergence is defined as 77 // D_KL(l || r ) = \int_x p(x) log(p(x)/q(x)) dx 78 // Note that the Kullback-Leibler divergence is not symmetric with respect to 79 // the order of the input arguments. 80 type KullbackLeibler struct{} 81 82 // DistBeta returns the Kullback-Leibler divergence between Beta distributions 83 // l and r. 84 // 85 // For two Beta distributions, the KL divergence is computed as 86 // D_KL(l || r) = log Γ(α_l+β_l) - log Γ(α_l) - log Γ(β_l) 87 // - log Γ(α_r+β_r) + log Γ(α_r) + log Γ(β_r) 88 // + (α_l-α_r)(ψ(α_l)-ψ(α_l+β_l)) + (β_l-β_r)(ψ(β_l)-ψ(α_l+β_l)) 89 // Where Γ is the gamma function and ψ is the digamma function. 90 func (KullbackLeibler) DistBeta(l, r Beta) float64 { 91 // http://bariskurt.com/kullback-leibler-divergence-between-two-dirichlet-and-beta-distributions/ 92 if l.Alpha <= 0 || l.Beta <= 0 { 93 panic("distuv: bad parameters for left distribution") 94 } 95 if r.Alpha <= 0 || r.Beta <= 0 { 96 panic("distuv: bad parameters for right distribution") 97 } 98 lab := l.Alpha + l.Beta 99 l1, _ := math.Lgamma(lab) 100 l2, _ := math.Lgamma(l.Alpha) 101 l3, _ := math.Lgamma(l.Beta) 102 lt := l1 - l2 - l3 103 104 r1, _ := math.Lgamma(r.Alpha + r.Beta) 105 r2, _ := math.Lgamma(r.Alpha) 106 r3, _ := math.Lgamma(r.Beta) 107 rt := r1 - r2 - r3 108 109 d0 := mathext.Digamma(l.Alpha + l.Beta) 110 ct := (l.Alpha-r.Alpha)*(mathext.Digamma(l.Alpha)-d0) + (l.Beta-r.Beta)*(mathext.Digamma(l.Beta)-d0) 111 112 return lt - rt + ct 113 } 114 115 // DistNormal returns the Kullback-Leibler divergence between Normal distributions 116 // l and r. 117 // 118 // For two Normal distributions, the KL divergence is computed as 119 // D_KL(l || r) = log(σ_r / σ_l) + (σ_l^2 + (μ_l-μ_r)^2)/(2 * σ_r^2) - 0.5 120 func (KullbackLeibler) DistNormal(l, r Normal) float64 { 121 d := l.Mu - r.Mu 122 v := (l.Sigma*l.Sigma + d*d) / (2 * r.Sigma * r.Sigma) 123 return math.Log(r.Sigma) - math.Log(l.Sigma) + v - 0.5 124 }