public abstract class BaseOptimizer extends Object implements ConvexOptimizer
Modifier and Type | Field and Description |
---|---|
protected org.nd4j.linalg.learning.AdaGrad |
adaGrad |
protected Map<String,org.nd4j.linalg.learning.AdaGrad> |
adaGradForVariable |
protected NeuralNetConfiguration |
conf |
static String |
GRADIENT_KEY |
protected int |
iteration |
protected Collection<IterationListener> |
iterationListeners |
protected BackTrackLineSearch |
lineMaximizer |
protected static org.slf4j.Logger |
log |
protected Model |
model |
protected double |
oldScore |
static String |
PARAMS_KEY |
protected double |
score |
static String |
SCORE_KEY |
protected Map<String,Object> |
searchState |
protected double |
step |
protected StepFunction |
stepFunction |
protected double |
stpMax |
protected Collection<TerminationCondition> |
terminationConditions |
Constructor and Description |
---|
BaseOptimizer(NeuralNetConfiguration conf,
StepFunction stepFunction,
Collection<IterationListener> iterationListeners,
Collection<TerminationCondition> terminationConditions,
Model model) |
BaseOptimizer(NeuralNetConfiguration conf,
StepFunction stepFunction,
Collection<IterationListener> iterationListeners,
Model model) |
Modifier and Type | Method and Description |
---|---|
Map<String,org.nd4j.linalg.learning.AdaGrad> |
adaGradForVariables()
Return the ada grad look up table
|
int |
batchSize()
The batch size for the optimizer
|
org.nd4j.linalg.learning.AdaGrad |
getAdaGrad()
The adagrad in this model
|
org.nd4j.linalg.learning.AdaGrad |
getAdaGradForVariable(String variable)
Get adagrad for a variable
|
Pair<Gradient,Double> |
gradientAndScore()
The gradient and score for this optimizer
|
boolean |
optimize()
Optimize call.
|
protected void |
postFirstStep(org.nd4j.linalg.api.ndarray.INDArray gradient) |
void |
postStep()
Post step (conjugate gradient among other methods needs this)
|
protected boolean |
preFirstStepProcess(org.nd4j.linalg.api.ndarray.INDArray gradient) |
void |
preProcessLine(org.nd4j.linalg.api.ndarray.INDArray line)
Pre process the line (scaling and the like)
|
double |
score()
The score for the optimizer so far
|
void |
setBatchSize(int batchSize)
Set the batch size for the optimizer
|
void |
setupSearchState(Pair<Gradient,Double> pair)
Setup the initial search state
|
void |
updateGradientAccordingToParams(Gradient gradient,
Model params,
int batchSize)
Update the gradient according to the configuration such as adagrad, momentum, and sparsity
|
void |
updateGradientAccordingToParams(org.nd4j.linalg.api.ndarray.INDArray gradient,
org.nd4j.linalg.api.ndarray.INDArray params,
int batchSize)
Update the gradient according to the configuration such as adagrad, momentum, and sparsity
|
protected NeuralNetConfiguration conf
protected org.nd4j.linalg.learning.AdaGrad adaGrad
protected int iteration
protected static final org.slf4j.Logger log
protected StepFunction stepFunction
protected Collection<IterationListener> iterationListeners
protected Collection<TerminationCondition> terminationConditions
protected Model model
protected BackTrackLineSearch lineMaximizer
protected double step
protected double score
protected double oldScore
protected double stpMax
public static final String GRADIENT_KEY
public static final String SCORE_KEY
public static final String PARAMS_KEY
public BaseOptimizer(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Model model)
conf
- stepFunction
- iterationListeners
- model
- public BaseOptimizer(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Collection<TerminationCondition> terminationConditions, Model model)
conf
- stepFunction
- iterationListeners
- terminationConditions
- model
- public void updateGradientAccordingToParams(org.nd4j.linalg.api.ndarray.INDArray gradient, org.nd4j.linalg.api.ndarray.INDArray params, int batchSize)
updateGradientAccordingToParams
in interface ConvexOptimizer
gradient
- the gradient to modifypublic double score()
ConvexOptimizer
score
in interface ConvexOptimizer
public Pair<Gradient,Double> gradientAndScore()
ConvexOptimizer
gradientAndScore
in interface ConvexOptimizer
public boolean optimize()
optimize
in interface ConvexOptimizer
protected void postFirstStep(org.nd4j.linalg.api.ndarray.INDArray gradient)
protected boolean preFirstStepProcess(org.nd4j.linalg.api.ndarray.INDArray gradient)
public int batchSize()
ConvexOptimizer
batchSize
in interface ConvexOptimizer
public void setBatchSize(int batchSize)
ConvexOptimizer
setBatchSize
in interface ConvexOptimizer
public void preProcessLine(org.nd4j.linalg.api.ndarray.INDArray line)
preProcessLine
in interface ConvexOptimizer
line
- the line to pre processpublic void postStep()
postStep
in interface ConvexOptimizer
public org.nd4j.linalg.learning.AdaGrad getAdaGrad()
ConvexOptimizer
getAdaGrad
in interface ConvexOptimizer
public Map<String,org.nd4j.linalg.learning.AdaGrad> adaGradForVariables()
ConvexOptimizer
adaGradForVariables
in interface ConvexOptimizer
public org.nd4j.linalg.learning.AdaGrad getAdaGradForVariable(String variable)
ConvexOptimizer
getAdaGradForVariable
in interface ConvexOptimizer
public void updateGradientAccordingToParams(Gradient gradient, Model params, int batchSize)
ConvexOptimizer
updateGradientAccordingToParams
in interface ConvexOptimizer
gradient
- the gradient to modifyparams
- the parameters to updatepublic void setupSearchState(Pair<Gradient,Double> pair)
setupSearchState
in interface ConvexOptimizer
pair
- Copyright © 2015. All Rights Reserved.