gonum.org/v1/gonum@v0.14.0/optimize/errors.go (about)

     1  // Copyright ©2014 The Gonum Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package optimize
     6  
     7  import (
     8  	"errors"
     9  	"fmt"
    10  	"math"
    11  )
    12  
    13  var (
    14  	// ErrZeroDimensional signifies an optimization was called with an input of length 0.
    15  	ErrZeroDimensional = errors.New("optimize: zero dimensional input")
    16  
    17  	// ErrLinesearcherFailure signifies that a Linesearcher has iterated too
    18  	// many times. This may occur if the gradient tolerance is set too low.
    19  	ErrLinesearcherFailure = errors.New("linesearch: failed to converge")
    20  
    21  	// ErrNonDescentDirection signifies that LinesearchMethod has received a
    22  	// search direction from a NextDirectioner in which the function is not
    23  	// decreasing.
    24  	ErrNonDescentDirection = errors.New("linesearch: non-descent search direction")
    25  
    26  	// ErrNoProgress signifies that LinesearchMethod cannot make further
    27  	// progress because there is no change in location after Linesearcher step
    28  	// due to floating-point arithmetic.
    29  	ErrNoProgress = errors.New("linesearch: no change in location after Linesearcher step")
    30  
    31  	// ErrLinesearcherBound signifies that a Linesearcher reached a step that
    32  	// lies out of allowed bounds.
    33  	ErrLinesearcherBound = errors.New("linesearch: step out of bounds")
    34  
    35  	// ErrMissingGrad signifies that a Method requires a Gradient function that
    36  	// is not supplied by Problem.
    37  	ErrMissingGrad = errors.New("optimize: problem does not provide needed Grad function")
    38  
    39  	// ErrMissingHess signifies that a Method requires a Hessian function that
    40  	// is not supplied by Problem.
    41  	ErrMissingHess = errors.New("optimize: problem does not provide needed Hess function")
    42  )
    43  
    44  // ErrFunc is returned when an initial function value is invalid. The error
    45  // state may be either +Inf or NaN. ErrFunc satisfies the error interface.
    46  type ErrFunc float64
    47  
    48  func (err ErrFunc) Error() string {
    49  	switch {
    50  	case math.IsInf(float64(err), 1):
    51  		return "optimize: initial function value is infinite"
    52  	case math.IsNaN(float64(err)):
    53  		return "optimize: initial function value is NaN"
    54  	default:
    55  		panic("optimize: bad ErrFunc")
    56  	}
    57  }
    58  
    59  // ErrGrad is returned when an initial gradient is invalid. The error gradient
    60  // may be either ±Inf or NaN. ErrGrad satisfies the error interface.
    61  type ErrGrad struct {
    62  	Grad  float64 // Grad is the invalid gradient value.
    63  	Index int     // Index is the position at which the invalid gradient was found.
    64  }
    65  
    66  func (err ErrGrad) Error() string {
    67  	switch {
    68  	case math.IsInf(err.Grad, 0):
    69  		return fmt.Sprintf("optimize: initial gradient is infinite at position %d", err.Index)
    70  	case math.IsNaN(err.Grad):
    71  		return fmt.Sprintf("optimize: initial gradient is NaN at position %d", err.Index)
    72  	default:
    73  		panic("optimize: bad ErrGrad")
    74  	}
    75  }
    76  
    77  // List of shared panic strings
    78  const badProblem = "optimize: objective function is undefined"