github.com/decred/dcrlnd@v0.7.6/routing/probability_estimator.go (about) 1 package routing 2 3 import ( 4 "errors" 5 "math" 6 "time" 7 8 "github.com/decred/dcrlnd/lnwire" 9 "github.com/decred/dcrlnd/routing/route" 10 ) 11 12 var ( 13 // ErrInvalidHalflife is returned when we get an invalid half life. 14 ErrInvalidHalflife = errors.New("penalty half life must be >= 0") 15 16 // ErrInvalidHopProbability is returned when we get an invalid hop 17 // probability. 18 ErrInvalidHopProbability = errors.New("hop probability must be in [0;1]") 19 20 // ErrInvalidAprioriWeight is returned when we get an apriori weight 21 // that is out of range. 22 ErrInvalidAprioriWeight = errors.New("apriori weight must be in [0;1]") 23 ) 24 25 // ProbabilityEstimatorCfg contains configuration for our probability estimator. 26 type ProbabilityEstimatorCfg struct { 27 // PenaltyHalfLife defines after how much time a penalized node or 28 // channel is back at 50% probability. 29 PenaltyHalfLife time.Duration 30 31 // AprioriHopProbability is the assumed success probability of a hop in 32 // a route when no other information is available. 33 AprioriHopProbability float64 34 35 // AprioriWeight is a value in the range [0, 1] that defines to what 36 // extent historical results should be extrapolated to untried 37 // connections. Setting it to one will completely ignore historical 38 // results and always assume the configured a priori probability for 39 // untried connections. A value of zero will ignore the a priori 40 // probability completely and only base the probability on historical 41 // results, unless there are none available. 42 AprioriWeight float64 43 } 44 45 func (p ProbabilityEstimatorCfg) validate() error { 46 if p.PenaltyHalfLife < 0 { 47 return ErrInvalidHalflife 48 } 49 50 if p.AprioriHopProbability < 0 || p.AprioriHopProbability > 1 { 51 return ErrInvalidHopProbability 52 } 53 54 if p.AprioriWeight < 0 || p.AprioriWeight > 1 { 55 return ErrInvalidAprioriWeight 56 } 57 58 return nil 59 } 60 61 // probabilityEstimator returns node and pair probabilities based on historical 62 // payment results. 63 type probabilityEstimator struct { 64 // ProbabilityEstimatorCfg contains configuration options for our 65 // estimator. 66 ProbabilityEstimatorCfg 67 68 // prevSuccessProbability is the assumed probability for node pairs that 69 // successfully relayed the previous attempt. 70 prevSuccessProbability float64 71 } 72 73 // getNodeProbability calculates the probability for connections from a node 74 // that have not been tried before. The results parameter is a list of last 75 // payment results for that node. 76 func (p *probabilityEstimator) getNodeProbability(now time.Time, 77 results NodeResults, amt lnwire.MilliAtom) float64 { 78 79 // If the channel history is not to be taken into account, we can return 80 // early here with the configured a priori probability. 81 if p.AprioriWeight == 1 { 82 return p.AprioriHopProbability 83 } 84 85 // If there is no channel history, our best estimate is still the a 86 // priori probability. 87 if len(results) == 0 { 88 return p.AprioriHopProbability 89 } 90 91 // The value of the apriori weight is in the range [0, 1]. Convert it to 92 // a factor that properly expresses the intention of the weight in the 93 // following weight average calculation. When the apriori weight is 0, 94 // the apriori factor is also 0. This means it won't have any effect on 95 // the weighted average calculation below. When the apriori weight 96 // approaches 1, the apriori factor goes to infinity. It will heavily 97 // outweigh any observations that have been collected. 98 aprioriFactor := 1/(1-p.AprioriWeight) - 1 99 100 // Calculate a weighted average consisting of the apriori probability 101 // and historical observations. This is the part that incentivizes nodes 102 // to make sure that all (not just some) of their channels are in good 103 // shape. Senders will steer around nodes that have shown a few 104 // failures, even though there may be many channels still untried. 105 // 106 // If there is just a single observation and the apriori weight is 0, 107 // this single observation will totally determine the node probability. 108 // The node probability is returned for all other channels of the node. 109 // This means that one failure will lead to the success probability 110 // estimates for all other channels being 0 too. The probability for the 111 // channel that was tried will not even recover, because it is 112 // recovering to the node probability (which is zero). So one failure 113 // effectively prunes all channels of the node forever. This is the most 114 // aggressive way in which we can penalize nodes and unlikely to yield 115 // good results in a real network. 116 probabilitiesTotal := p.AprioriHopProbability * aprioriFactor 117 totalWeight := aprioriFactor 118 119 for _, result := range results { 120 switch { 121 122 // Weigh success with a constant high weight of 1. There is no 123 // decay. Amt is never zero, so this clause is never executed 124 // when result.SuccessAmt is zero. 125 case amt <= result.SuccessAmt: 126 totalWeight++ 127 probabilitiesTotal += p.prevSuccessProbability 128 129 // Weigh failures in accordance with their age. The base 130 // probability of a failure is considered zero, so nothing needs 131 // to be added to probabilitiesTotal. 132 case !result.FailTime.IsZero() && amt >= result.FailAmt: 133 age := now.Sub(result.FailTime) 134 totalWeight += p.getWeight(age) 135 } 136 } 137 138 return probabilitiesTotal / totalWeight 139 } 140 141 // getWeight calculates a weight in the range [0, 1] that should be assigned to 142 // a payment result. Weight follows an exponential curve that starts at 1 when 143 // the result is fresh and asymptotically approaches zero over time. The rate at 144 // which this happens is controlled by the penaltyHalfLife parameter. 145 func (p *probabilityEstimator) getWeight(age time.Duration) float64 { 146 exp := -age.Hours() / p.PenaltyHalfLife.Hours() 147 return math.Pow(2, exp) 148 } 149 150 // getPairProbability estimates the probability of successfully traversing to 151 // toNode based on historical payment outcomes for the from node. Those outcomes 152 // are passed in via the results parameter. 153 func (p *probabilityEstimator) getPairProbability( 154 now time.Time, results NodeResults, 155 toNode route.Vertex, amt lnwire.MilliAtom) float64 { 156 157 nodeProbability := p.getNodeProbability(now, results, amt) 158 159 return p.calculateProbability( 160 now, results, nodeProbability, toNode, amt, 161 ) 162 } 163 164 // getLocalPairProbability estimates the probability of successfully traversing 165 // our own local channels to toNode. 166 func (p *probabilityEstimator) getLocalPairProbability( 167 now time.Time, results NodeResults, toNode route.Vertex) float64 { 168 169 // For local channels that have never been tried before, we assume them 170 // to be successful. We have accurate balance and online status 171 // information on our own channels, so when we select them in a route it 172 // is close to certain that those channels will work. 173 nodeProbability := p.prevSuccessProbability 174 175 return p.calculateProbability( 176 now, results, nodeProbability, toNode, lnwire.MaxMilliAtom, 177 ) 178 } 179 180 // calculateProbability estimates the probability of successfully traversing to 181 // toNode based on historical payment outcomes and a fall-back node probability. 182 func (p *probabilityEstimator) calculateProbability( 183 now time.Time, results NodeResults, 184 nodeProbability float64, toNode route.Vertex, 185 amt lnwire.MilliAtom) float64 { 186 187 // Retrieve the last pair outcome. 188 lastPairResult, ok := results[toNode] 189 190 // If there is no history for this pair, return the node probability 191 // that is a probability estimate for untried channel. 192 if !ok { 193 return nodeProbability 194 } 195 196 // For successes, we have a fixed (high) probability. Those pairs will 197 // be assumed good until proven otherwise. Amt is never zero, so this 198 // clause is never executed when lastPairResult.SuccessAmt is zero. 199 if amt <= lastPairResult.SuccessAmt { 200 return p.prevSuccessProbability 201 } 202 203 // Take into account a minimum penalize amount. For balance errors, a 204 // failure may be reported with such a minimum to prevent too aggressive 205 // penalization. If the current amount is smaller than the amount that 206 // previously triggered a failure, we act as if this is an untried 207 // channel. 208 if lastPairResult.FailTime.IsZero() || amt < lastPairResult.FailAmt { 209 return nodeProbability 210 } 211 212 timeSinceLastFailure := now.Sub(lastPairResult.FailTime) 213 214 // Calculate success probability based on the weight of the last 215 // failure. When the failure is fresh, its weight is 1 and we'll return 216 // probability 0. Over time the probability recovers to the node 217 // probability. It would be as if this channel was never tried before. 218 weight := p.getWeight(timeSinceLastFailure) 219 probability := nodeProbability * (1 - weight) 220 221 return probability 222 }