github.com/gopherd/gonum@v0.0.4/optimize/bisection.go (about)

     1  // Copyright ©2014 The Gonum Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package optimize
     6  
     7  import "math"
     8  
     9  const defaultBisectionCurvature = 0.9
    10  
    11  var _ Linesearcher = (*Bisection)(nil)
    12  
    13  // Bisection is a Linesearcher that uses a bisection to find a point that
    14  // satisfies the strong Wolfe conditions with the given curvature factor and
    15  // a decrease factor of zero.
    16  type Bisection struct {
    17  	// CurvatureFactor is the constant factor in the curvature condition.
    18  	// Smaller values result in a more exact line search.
    19  	// A set value must be in the interval (0, 1), otherwise Init will panic.
    20  	// If it is zero, it will be defaulted to 0.9.
    21  	CurvatureFactor float64
    22  
    23  	minStep  float64
    24  	maxStep  float64
    25  	currStep float64
    26  
    27  	initF float64
    28  	minF  float64
    29  	maxF  float64
    30  	lastF float64
    31  
    32  	initGrad float64
    33  
    34  	lastOp Operation
    35  }
    36  
    37  func (b *Bisection) Init(f, g float64, step float64) Operation {
    38  	if step <= 0 {
    39  		panic("bisection: bad step size")
    40  	}
    41  	if g >= 0 {
    42  		panic("bisection: initial derivative is non-negative")
    43  	}
    44  
    45  	if b.CurvatureFactor == 0 {
    46  		b.CurvatureFactor = defaultBisectionCurvature
    47  	}
    48  	if b.CurvatureFactor <= 0 || b.CurvatureFactor >= 1 {
    49  		panic("bisection: CurvatureFactor not between 0 and 1")
    50  	}
    51  
    52  	b.minStep = 0
    53  	b.maxStep = math.Inf(1)
    54  	b.currStep = step
    55  
    56  	b.initF = f
    57  	b.minF = f
    58  	b.maxF = math.NaN()
    59  
    60  	b.initGrad = g
    61  
    62  	// Only evaluate the gradient when necessary.
    63  	b.lastOp = FuncEvaluation
    64  	return b.lastOp
    65  }
    66  
    67  func (b *Bisection) Iterate(f, g float64) (Operation, float64, error) {
    68  	if b.lastOp != FuncEvaluation && b.lastOp != GradEvaluation {
    69  		panic("bisection: Init has not been called")
    70  	}
    71  	minF := b.initF
    72  	if b.maxF < minF {
    73  		minF = b.maxF
    74  	}
    75  	if b.minF < minF {
    76  		minF = b.minF
    77  	}
    78  	if b.lastOp == FuncEvaluation {
    79  		// See if the function value is good enough to make progress. If it is,
    80  		// evaluate the gradient. If not, set it to the upper bound if the bound
    81  		// has not yet been found, otherwise iterate toward the minimum location.
    82  		if f <= minF {
    83  			b.lastF = f
    84  			b.lastOp = GradEvaluation
    85  			return b.lastOp, b.currStep, nil
    86  		}
    87  		if math.IsInf(b.maxStep, 1) {
    88  			b.maxStep = b.currStep
    89  			b.maxF = f
    90  			return b.nextStep((b.minStep + b.maxStep) / 2)
    91  		}
    92  		if b.minF <= b.maxF {
    93  			b.maxStep = b.currStep
    94  			b.maxF = f
    95  		} else {
    96  			b.minStep = b.currStep
    97  			b.minF = f
    98  		}
    99  		return b.nextStep((b.minStep + b.maxStep) / 2)
   100  	}
   101  	f = b.lastF
   102  	// The function value was lower. Check if this location is sufficient to
   103  	// converge the linesearch, otherwise iterate.
   104  	if StrongWolfeConditionsMet(f, g, minF, b.initGrad, b.currStep, 0, b.CurvatureFactor) {
   105  		b.lastOp = MajorIteration
   106  		return b.lastOp, b.currStep, nil
   107  	}
   108  	if math.IsInf(b.maxStep, 1) {
   109  		// The function value is lower. If the gradient is positive, an upper bound
   110  		// of the minimum been found. If the gradient is negative, search farther
   111  		// in that direction.
   112  		if g > 0 {
   113  			b.maxStep = b.currStep
   114  			b.maxF = f
   115  			return b.nextStep((b.minStep + b.maxStep) / 2)
   116  		}
   117  		b.minStep = b.currStep
   118  		b.minF = f
   119  		return b.nextStep(b.currStep * 2)
   120  	}
   121  	// The interval has been bounded, and we have found a new lowest value. Use
   122  	// the gradient to decide which direction.
   123  	if g < 0 {
   124  		b.minStep = b.currStep
   125  		b.minF = f
   126  	} else {
   127  		b.maxStep = b.currStep
   128  		b.maxF = f
   129  	}
   130  	return b.nextStep((b.minStep + b.maxStep) / 2)
   131  }
   132  
   133  // nextStep checks if the new step is equal to the old step.
   134  // This can happen if min and max are the same, or if the step size is infinity,
   135  // both of which indicate the minimization must stop. If the steps are different,
   136  // it sets the new step size and returns the evaluation type and the step. If the steps
   137  // are the same, it returns an error.
   138  func (b *Bisection) nextStep(step float64) (Operation, float64, error) {
   139  	if b.currStep == step {
   140  		b.lastOp = NoOperation
   141  		return b.lastOp, b.currStep, ErrLinesearcherFailure
   142  	}
   143  	b.currStep = step
   144  	b.lastOp = FuncEvaluation
   145  	return b.lastOp, b.currStep, nil
   146  }