gonum.org/v1/gonum@v0.14.0/optimize/types.go (about)

     1  // Copyright ©2014 The Gonum Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package optimize
     6  
     7  import (
     8  	"fmt"
     9  	"time"
    10  
    11  	"gonum.org/v1/gonum/mat"
    12  )
    13  
    14  const defaultGradientAbsTol = 1e-12
    15  
    16  // Operation represents the set of operations commanded by Method at each
    17  // iteration. It is a bitmap of various Iteration and Evaluation constants.
    18  // Individual constants must NOT be combined together by the binary OR operator
    19  // except for the Evaluation operations.
    20  type Operation uint64
    21  
    22  // Supported Operations.
    23  const (
    24  	// NoOperation specifies that no evaluation or convergence check should
    25  	// take place.
    26  	NoOperation Operation = 0
    27  	// InitIteration is sent to Recorder to indicate the initial location.
    28  	// All fields of the location to record must be valid.
    29  	// Method must not return it.
    30  	InitIteration Operation = 1 << (iota - 1)
    31  	// PostIteration is sent to Recorder to indicate the final location
    32  	// reached during an optimization run.
    33  	// All fields of the location to record must be valid.
    34  	// Method must not return it.
    35  	PostIteration
    36  	// MajorIteration indicates that the next candidate location for
    37  	// an optimum has been found and convergence should be checked.
    38  	MajorIteration
    39  	// MethodDone declares that the method is done running. A method must
    40  	// be a Statuser in order to use this iteration, and after returning
    41  	// MethodDone, the Status must return other than NotTerminated.
    42  	MethodDone
    43  	// FuncEvaluation specifies that the objective function
    44  	// should be evaluated.
    45  	FuncEvaluation
    46  	// GradEvaluation specifies that the gradient
    47  	// of the objective function should be evaluated.
    48  	GradEvaluation
    49  	// HessEvaluation specifies that the Hessian
    50  	// of the objective function should be evaluated.
    51  	HessEvaluation
    52  	// signalDone is used internally to signal completion.
    53  	signalDone
    54  
    55  	// Mask for the evaluating operations.
    56  	evalMask = FuncEvaluation | GradEvaluation | HessEvaluation
    57  )
    58  
    59  func (op Operation) isEvaluation() bool {
    60  	return op&evalMask != 0 && op&^evalMask == 0
    61  }
    62  
    63  func (op Operation) String() string {
    64  	if op&evalMask != 0 {
    65  		return fmt.Sprintf("Evaluation(Func: %t, Grad: %t, Hess: %t, Extra: 0b%b)",
    66  			op&FuncEvaluation != 0,
    67  			op&GradEvaluation != 0,
    68  			op&HessEvaluation != 0,
    69  			op&^(evalMask))
    70  	}
    71  	s, ok := operationNames[op]
    72  	if ok {
    73  		return s
    74  	}
    75  	return fmt.Sprintf("Operation(%d)", op)
    76  }
    77  
    78  var operationNames = map[Operation]string{
    79  	NoOperation:    "NoOperation",
    80  	InitIteration:  "InitIteration",
    81  	MajorIteration: "MajorIteration",
    82  	PostIteration:  "PostIteration",
    83  	MethodDone:     "MethodDone",
    84  	signalDone:     "signalDone",
    85  }
    86  
    87  // Result represents the answer of an optimization run. It contains the optimum
    88  // function value, X location, and gradient as well as the Status at convergence
    89  // and Statistics taken during the run.
    90  type Result struct {
    91  	Location
    92  	Stats
    93  	Status Status
    94  }
    95  
    96  // Stats contains the statistics of the run.
    97  type Stats struct {
    98  	MajorIterations int           // Total number of major iterations
    99  	FuncEvaluations int           // Number of evaluations of Func
   100  	GradEvaluations int           // Number of evaluations of Grad
   101  	HessEvaluations int           // Number of evaluations of Hess
   102  	Runtime         time.Duration // Total runtime of the optimization
   103  }
   104  
   105  // complementEval returns an evaluating operation that evaluates fields of loc
   106  // not evaluated by eval.
   107  func complementEval(loc *Location, eval Operation) (complEval Operation) {
   108  	if eval&FuncEvaluation == 0 {
   109  		complEval = FuncEvaluation
   110  	}
   111  	if loc.Gradient != nil && eval&GradEvaluation == 0 {
   112  		complEval |= GradEvaluation
   113  	}
   114  	if loc.Hessian != nil && eval&HessEvaluation == 0 {
   115  		complEval |= HessEvaluation
   116  	}
   117  	return complEval
   118  }
   119  
   120  // Problem describes the optimization problem to be solved.
   121  type Problem struct {
   122  	// Func evaluates the objective function at the given location. Func
   123  	// must not modify x.
   124  	Func func(x []float64) float64
   125  
   126  	// Grad evaluates the gradient at x and stores the result in grad which will
   127  	// be the same length as x. Grad must not modify x.
   128  	Grad func(grad, x []float64)
   129  
   130  	// Hess evaluates the Hessian at x and stores the result in-place in hess which
   131  	// will have dimensions matching the length of x. Hess must not modify x.
   132  	Hess func(hess *mat.SymDense, x []float64)
   133  
   134  	// Status reports the status of the objective function being optimized and any
   135  	// error. This can be used to terminate early, for example when the function is
   136  	// not able to evaluate itself. The user can use one of the pre-provided Status
   137  	// constants, or may call NewStatus to create a custom Status value.
   138  	Status func() (Status, error)
   139  }
   140  
   141  // Available describes the functions available to call in Problem.
   142  type Available struct {
   143  	Grad bool
   144  	Hess bool
   145  }
   146  
   147  func availFromProblem(prob Problem) Available {
   148  	return Available{Grad: prob.Grad != nil, Hess: prob.Hess != nil}
   149  }
   150  
   151  // function tests if the Problem described by the receiver is suitable for an
   152  // unconstrained Method that only calls the function, and returns the result.
   153  func (has Available) function() (uses Available, err error) {
   154  	// TODO(btracey): This needs to be modified when optimize supports
   155  	// constrained optimization.
   156  	return Available{}, nil
   157  }
   158  
   159  // gradient tests if the Problem described by the receiver is suitable for an
   160  // unconstrained gradient-based Method, and returns the result.
   161  func (has Available) gradient() (uses Available, err error) {
   162  	// TODO(btracey): This needs to be modified when optimize supports
   163  	// constrained optimization.
   164  	if !has.Grad {
   165  		return Available{}, ErrMissingGrad
   166  	}
   167  	return Available{Grad: true}, nil
   168  }
   169  
   170  // hessian tests if the Problem described by the receiver is suitable for an
   171  // unconstrained Hessian-based Method, and returns the result.
   172  func (has Available) hessian() (uses Available, err error) {
   173  	// TODO(btracey): This needs to be modified when optimize supports
   174  	// constrained optimization.
   175  	if !has.Grad {
   176  		return Available{}, ErrMissingGrad
   177  	}
   178  	if !has.Hess {
   179  		return Available{}, ErrMissingHess
   180  	}
   181  	return Available{Grad: true, Hess: true}, nil
   182  }
   183  
   184  // Settings represents settings of the optimization run. It contains initial
   185  // settings, convergence information, and Recorder information. Convergence
   186  // settings are only checked at MajorIterations, while Evaluation thresholds
   187  // are checked at every Operation. See the field comments for default values.
   188  type Settings struct {
   189  	// InitValues specifies properties (function value, gradient, etc.) known
   190  	// at the initial location passed to Minimize. If InitValues is non-nil, then
   191  	// the function value F must be provided, the location X must not be specified
   192  	// and other fields may be specified. The values in Location may be modified
   193  	// during the call to Minimize.
   194  	InitValues *Location
   195  
   196  	// GradientThreshold stops optimization with GradientThreshold status if the
   197  	// infinity norm of the gradient is less than this value. This defaults to
   198  	// a value of 0 (and so gradient convergence is not checked), however note
   199  	// that many Methods (LBFGS, CG, etc.) will converge with a small value of
   200  	// the gradient, and so to fully disable this setting the Method may need to
   201  	// be modified.
   202  	// This setting has no effect if the gradient is not used by the Method.
   203  	GradientThreshold float64
   204  
   205  	// Converger checks if the optimization has converged based on the (history
   206  	// of) locations found during the optimization. Minimize will pass the
   207  	// Location at every MajorIteration to the Converger.
   208  	//
   209  	// If the Converger is nil, a default value of
   210  	//  FunctionConverge {
   211  	//		Absolute: 1e-10,
   212  	//		Iterations: 100,
   213  	//  }
   214  	// will be used. NeverTerminated can be used to always return a
   215  	// NotTerminated status.
   216  	Converger Converger
   217  
   218  	// MajorIterations is the maximum number of iterations allowed.
   219  	// IterationLimit status is returned if the number of major iterations
   220  	// equals or exceeds this value.
   221  	// If it equals zero, this setting has no effect.
   222  	// The default value is 0.
   223  	MajorIterations int
   224  
   225  	// Runtime is the maximum runtime allowed. RuntimeLimit status is returned
   226  	// if the duration of the run is longer than this value. Runtime is only
   227  	// checked at MajorIterations of the Method.
   228  	// If it equals zero, this setting has no effect.
   229  	// The default value is 0.
   230  	Runtime time.Duration
   231  
   232  	// FuncEvaluations is the maximum allowed number of function evaluations.
   233  	// FunctionEvaluationLimit status is returned if the total number of calls
   234  	// to Func equals or exceeds this number.
   235  	// If it equals zero, this setting has no effect.
   236  	// The default value is 0.
   237  	FuncEvaluations int
   238  
   239  	// GradEvaluations is the maximum allowed number of gradient evaluations.
   240  	// GradientEvaluationLimit status is returned if the total number of calls
   241  	// to Grad equals or exceeds this number.
   242  	// If it equals zero, this setting has no effect.
   243  	// The default value is 0.
   244  	GradEvaluations int
   245  
   246  	// HessEvaluations is the maximum allowed number of Hessian evaluations.
   247  	// HessianEvaluationLimit status is returned if the total number of calls
   248  	// to Hess equals or exceeds this number.
   249  	// If it equals zero, this setting has no effect.
   250  	// The default value is 0.
   251  	HessEvaluations int
   252  
   253  	Recorder Recorder
   254  
   255  	// Concurrent represents how many concurrent evaluations are possible.
   256  	Concurrent int
   257  }
   258  
   259  // resize takes x and returns a slice of length dim. It returns a resliced x
   260  // if cap(x) >= dim, and a new slice otherwise.
   261  func resize(x []float64, dim int) []float64 {
   262  	if dim > cap(x) {
   263  		return make([]float64, dim)
   264  	}
   265  	return x[:dim]
   266  }
   267  
   268  func resizeSymDense(m *mat.SymDense, dim int) *mat.SymDense {
   269  	if m == nil || cap(m.RawSymmetric().Data) < dim*dim {
   270  		return mat.NewSymDense(dim, nil)
   271  	}
   272  	return mat.NewSymDense(dim, m.RawSymmetric().Data[:dim*dim])
   273  }