gonum.org/v1/gonum@v0.14.0/graph/community/bisect.go (about)

     1  // Copyright ©2016 The Gonum Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package community
     6  
     7  import (
     8  	"errors"
     9  	"fmt"
    10  	"math"
    11  
    12  	"golang.org/x/exp/rand"
    13  
    14  	"gonum.org/v1/gonum/graph"
    15  )
    16  
    17  // Interval is an interval of resolutions with a common score.
    18  type Interval struct {
    19  	// Low and High delimit the interval
    20  	// such that the interval is [low, high).
    21  	Low, High float64
    22  
    23  	// Score is the score of the interval.
    24  	Score float64
    25  
    26  	// Reduced is the best scoring
    27  	// community membership found for the
    28  	// interval.
    29  	Reduced
    30  }
    31  
    32  // Reduced is a graph reduction.
    33  type Reduced interface {
    34  	// Communities returns the community
    35  	// structure of the reduction.
    36  	Communities() [][]graph.Node
    37  }
    38  
    39  // Size is a score function that is the reciprocal of the number of communities.
    40  func Size(g ReducedGraph) float64 { return 1 / float64(len(g.Structure())) }
    41  
    42  // Weight is a score function that is the sum of community weights. The concrete
    43  // type of g must be a pointer to a ReducedUndirected or a ReducedDirected, otherwise
    44  // Weight will panic.
    45  func Weight(g ReducedGraph) float64 {
    46  	var w float64
    47  	switch g := g.(type) {
    48  	case *ReducedUndirected:
    49  		for _, n := range g.nodes {
    50  			w += n.weight
    51  		}
    52  	case *ReducedDirected:
    53  		for _, n := range g.nodes {
    54  			w += n.weight
    55  		}
    56  	default:
    57  		panic(fmt.Sprintf("community: invalid graph type: %T", g))
    58  	}
    59  	return w
    60  }
    61  
    62  // ModularScore returns a modularized scoring function for Profile based on the
    63  // graph g and the given score function. The effort parameter determines how
    64  // many attempts will be made to get an improved score for any given resolution.
    65  func ModularScore(g graph.Graph, score func(ReducedGraph) float64, effort int, src rand.Source) func(float64) (float64, Reduced) {
    66  	return func(resolution float64) (float64, Reduced) {
    67  		max := math.Inf(-1)
    68  		var best Reduced
    69  		for i := 0; i < effort; i++ {
    70  			r := Modularize(g, resolution, src)
    71  			s := score(r)
    72  			if s > max {
    73  				max = s
    74  				best = r
    75  			}
    76  		}
    77  		return max, best
    78  	}
    79  }
    80  
    81  // SizeMultiplex is a score function that is the reciprocal of the number of communities.
    82  func SizeMultiplex(g ReducedMultiplex) float64 { return 1 / float64(len(g.Structure())) }
    83  
    84  // WeightMultiplex is a score function that is the sum of community weights. The concrete
    85  // type of g must be pointer to a ReducedUndirectedMultiplex or a ReducedDirectedMultiplex,
    86  // otherwise WeightMultiplex will panic.
    87  func WeightMultiplex(g ReducedMultiplex) float64 {
    88  	var w float64
    89  	switch g := g.(type) {
    90  	case *ReducedUndirectedMultiplex:
    91  		for _, n := range g.nodes {
    92  			for _, lw := range n.weights {
    93  				w += lw
    94  			}
    95  		}
    96  	case *ReducedDirectedMultiplex:
    97  		for _, n := range g.nodes {
    98  			for _, lw := range n.weights {
    99  				w += lw
   100  			}
   101  		}
   102  	default:
   103  		panic(fmt.Sprintf("community: invalid graph type: %T", g))
   104  	}
   105  	return w
   106  }
   107  
   108  // ModularMultiplexScore returns a modularized scoring function for Profile based
   109  // on the graph g and the given score function. The effort parameter determines how
   110  // many attempts will be made to get an improved score for any given resolution.
   111  func ModularMultiplexScore(g Multiplex, weights []float64, all bool, score func(ReducedMultiplex) float64, effort int, src rand.Source) func(float64) (float64, Reduced) {
   112  	return func(resolution float64) (float64, Reduced) {
   113  		max := math.Inf(-1)
   114  		var best Reduced
   115  		for i := 0; i < effort; i++ {
   116  			r := ModularizeMultiplex(g, weights, []float64{resolution}, all, src)
   117  			s := score(r)
   118  			if s > max {
   119  				max = s
   120  				best = r
   121  			}
   122  		}
   123  		return max, best
   124  	}
   125  }
   126  
   127  // Profile returns an approximate profile of score values in the resolution domain [low,high)
   128  // at the given granularity. The score is calculated by bisecting calls to fn. If log is true,
   129  // log space bisection is used, otherwise bisection is linear. The function fn should be
   130  // monotonically decreasing in at least 1/grain evaluations. Profile will attempt to detect
   131  // non-monotonicity during the bisection.
   132  //
   133  // Since exact modularity optimization is known to be NP-hard and Profile calls modularization
   134  // routines repeatedly, it is unlikely to return the exact resolution profile.
   135  func Profile(fn func(float64) (float64, Reduced), log bool, grain, low, high float64) (profile []Interval, err error) {
   136  	if low >= high {
   137  		return nil, errors.New("community: zero or negative width domain")
   138  	}
   139  
   140  	defer func() {
   141  		r := recover()
   142  		e, ok := r.(nonDecreasing)
   143  		if ok {
   144  			err = e
   145  			return
   146  		}
   147  		if r != nil {
   148  			panic(r)
   149  		}
   150  	}()
   151  	left, comm := fn(low)
   152  	right, _ := fn(high)
   153  	for i := 1; i < int(1/grain); i++ {
   154  		rt, _ := fn(high)
   155  		right = math.Max(right, rt)
   156  	}
   157  	profile = bisect(fn, log, grain, low, left, high, right, comm)
   158  
   159  	// We may have missed some non-monotonicity,
   160  	// so merge low score discordant domains into
   161  	// their lower resolution neighbours.
   162  	return fixUp(profile), nil
   163  }
   164  
   165  type nonDecreasing int
   166  
   167  func (n nonDecreasing) Error() string {
   168  	return fmt.Sprintf("community: profile does not reliably monotonically decrease: tried %d times", n)
   169  }
   170  
   171  func bisect(fn func(float64) (float64, Reduced), log bool, grain, low, scoreLow, high, scoreHigh float64, comm Reduced) []Interval {
   172  	if low >= high {
   173  		panic("community: zero or negative width domain")
   174  	}
   175  	if math.IsNaN(scoreLow) || math.IsNaN(scoreHigh) {
   176  		return nil
   177  	}
   178  
   179  	// Heuristically determine a reasonable number
   180  	// of times to try to get a higher value.
   181  	maxIter := int(1 / grain)
   182  
   183  	lowComm := comm
   184  	for n := 0; scoreLow < scoreHigh; n++ {
   185  		if n > maxIter {
   186  			panic(nonDecreasing(n))
   187  		}
   188  		scoreLow, lowComm = fn(low)
   189  	}
   190  
   191  	if scoreLow == scoreHigh || tooSmall(low, high, grain, log) {
   192  		return []Interval{{Low: low, High: high, Score: scoreLow, Reduced: lowComm}}
   193  	}
   194  
   195  	var mid float64
   196  	if log {
   197  		mid = math.Sqrt(low * high)
   198  	} else {
   199  		mid = (low + high) / 2
   200  	}
   201  
   202  	scoreMid := math.Inf(-1)
   203  	var midComm Reduced
   204  	for n := 0; scoreMid < scoreHigh; n++ {
   205  		if n > maxIter {
   206  			panic(nonDecreasing(n))
   207  		}
   208  		scoreMid, midComm = fn(mid)
   209  	}
   210  
   211  	lower := bisect(fn, log, grain, low, scoreLow, mid, scoreMid, lowComm)
   212  	higher := bisect(fn, log, grain, mid, scoreMid, high, scoreHigh, midComm)
   213  	for n := 0; lower[len(lower)-1].Score < higher[0].Score; n++ {
   214  		if n > maxIter {
   215  			panic(nonDecreasing(n))
   216  		}
   217  		lower[len(lower)-1].Score, lower[len(lower)-1].Reduced = fn(low)
   218  	}
   219  
   220  	if lower[len(lower)-1].Score == higher[0].Score {
   221  		higher[0].Low = lower[len(lower)-1].Low
   222  		lower = lower[:len(lower)-1]
   223  		if len(lower) == 0 {
   224  			return higher
   225  		}
   226  	}
   227  	return append(lower, higher...)
   228  }
   229  
   230  // fixUp non-monotonically decreasing interval scores.
   231  func fixUp(profile []Interval) []Interval {
   232  	max := profile[len(profile)-1].Score
   233  	for i := len(profile) - 2; i >= 0; i-- {
   234  		if profile[i].Score > max {
   235  			max = profile[i].Score
   236  			continue
   237  		}
   238  		profile[i+1].Low = profile[i].Low
   239  		profile = append(profile[:i], profile[i+1:]...)
   240  	}
   241  	return profile
   242  }
   243  
   244  func tooSmall(low, high, grain float64, log bool) bool {
   245  	if log {
   246  		return math.Log(high/low) < grain
   247  	}
   248  	return high-low < grain
   249  }