public static class NeuralNetConfiguration.Builder extends Object implements Cloneable
Modifier and Type | Field and Description |
---|---|
protected String |
activationFunction |
protected double |
biasInit |
protected Distribution |
dist |
protected double |
dropOut |
protected Updater |
updater |
protected WeightInit |
weightInit |
Constructor and Description |
---|
Builder() |
Modifier and Type | Method and Description |
---|---|
NeuralNetConfiguration.Builder |
activation(String activationFunction)
Activation function / neuron non-linearity
Typical values include:
"relu" (rectified linear), "tanh", "sigmoid", "softmax", "hardtanh", "leakyrelu", "maxout", "softsign", "softplus" |
NeuralNetConfiguration.Builder |
adamMeanDecay(double adamMeanDecay)
Mean decay rate for Adam updater.
|
NeuralNetConfiguration.Builder |
adamVarDecay(double adamVarDecay)
Variance decay rate for Adam updater.
|
NeuralNetConfiguration.Builder |
biasInit(double biasInit) |
NeuralNetConfiguration |
build()
Return a configuration based on this builder
|
NeuralNetConfiguration.Builder |
clone() |
NeuralNetConfiguration.Builder |
constrainGradientToUnitNorm(boolean constrainGradientToUnitNorm)
Deprecated.
|
NeuralNetConfiguration.Builder |
dist(Distribution dist)
Distribution to sample initial weights from.
|
NeuralNetConfiguration.Builder |
dropOut(double dropOut) |
NeuralNetConfiguration.Builder |
gradientNormalization(GradientNormalization gradientNormalization)
Gradient normalization strategy.
|
NeuralNetConfiguration.Builder |
gradientNormalizationThreshold(double threshold)
Threshold for gradient normalization, only used for GradientNormalization.ClipL2PerLayer,
GradientNormalization.ClipL2PerParamType, and GradientNormalization.ClipElementWiseAbsoluteValue
Not used otherwise. L2 threshold for first two types of clipping, or absolute value threshold for last type of clipping. |
NeuralNetConfiguration.Builder |
iterations(int numIterations)
Number of optimization iterations.
|
NeuralNetConfiguration.Builder |
l1(double l1)
L1 regularization coefficient.
|
NeuralNetConfiguration.Builder |
l2(double l2)
L2 regularization coefficient.
|
NeuralNetConfiguration.Builder |
layer(Layer layer)
Layer class.
|
NeuralNetConfiguration.Builder |
learningRate(double learningRate)
Learning rate.
|
NeuralNetConfiguration.Builder |
learningRateAfter(Map<Integer,Double> learningRateAfter)
Learning rate schedule.
|
NeuralNetConfiguration.Builder |
learningRateScoreBasedDecayRate(double lrScoreBasedDecay)
Rate to decrease learningRate by when the score stops improving.
|
NeuralNetConfiguration.ListBuilder |
list(int size)
Number of layers not including input.
|
NeuralNetConfiguration.Builder |
maxNumLineSearchIterations(int maxNumLineSearchIterations)
Maximum number of line search iterations.
|
NeuralNetConfiguration.Builder |
miniBatch(boolean miniBatch)
Process input as minibatch vs full dataset.
|
NeuralNetConfiguration.Builder |
minimize(boolean minimize)
Objective function to minimize or maximize cost function
Default set to minimize true.
|
NeuralNetConfiguration.Builder |
momentum(double momentum)
Momentum rate.
|
NeuralNetConfiguration.Builder |
momentumAfter(Map<Integer,Double> momentumAfter)
Momentum schedule.
|
NeuralNetConfiguration.Builder |
optimizationAlgo(OptimizationAlgorithm optimizationAlgo) |
NeuralNetConfiguration.Builder |
regularization(boolean useRegularization)
Whether to use regularization (l1, l2, dropout, etc
|
NeuralNetConfiguration.Builder |
rho(double rho)
Ada delta coefficient
|
NeuralNetConfiguration.Builder |
rmsDecay(double rmsDecay)
Decay rate for RMSProp.
|
NeuralNetConfiguration.Builder |
schedules(boolean schedules)
Whether to use schedules, learningRateAfter and momentumAfter
|
NeuralNetConfiguration.Builder |
seed(int seed)
Random number generator seed.
|
NeuralNetConfiguration.Builder |
seed(long seed)
Random number generator seed.
|
NeuralNetConfiguration.Builder |
stepFunction(StepFunction stepFunction)
Step function to apply for back track line search.
|
NeuralNetConfiguration.Builder |
timeSeriesLength(int timeSeriesLength)
Deprecated.
|
NeuralNetConfiguration.Builder |
updater(Updater updater)
Gradient updater.
|
NeuralNetConfiguration.Builder |
useDropConnect(boolean useDropConnect)
Use drop connect: multiply the coefficients
by a binomial sampling wrt the dropout probability
|
NeuralNetConfiguration.Builder |
weightInit(WeightInit weightInit)
Weight initialization scheme.
|
protected String activationFunction
protected WeightInit weightInit
protected double biasInit
protected Distribution dist
protected double dropOut
protected Updater updater
@Deprecated public NeuralNetConfiguration.Builder timeSeriesLength(int timeSeriesLength)
public NeuralNetConfiguration.Builder miniBatch(boolean miniBatch)
public NeuralNetConfiguration.Builder useDropConnect(boolean useDropConnect)
useDropConnect
- whether to use drop connect or notpublic NeuralNetConfiguration.Builder minimize(boolean minimize)
public NeuralNetConfiguration.Builder maxNumLineSearchIterations(int maxNumLineSearchIterations)
maxNumLineSearchIterations
- > 0public NeuralNetConfiguration.Builder layer(Layer layer)
public NeuralNetConfiguration.Builder stepFunction(StepFunction stepFunction)
public NeuralNetConfiguration.ListBuilder list(int size)
public NeuralNetConfiguration.Builder iterations(int numIterations)
public NeuralNetConfiguration.Builder seed(int seed)
public NeuralNetConfiguration.Builder seed(long seed)
public NeuralNetConfiguration.Builder optimizationAlgo(OptimizationAlgorithm optimizationAlgo)
@Deprecated public NeuralNetConfiguration.Builder constrainGradientToUnitNorm(boolean constrainGradientToUnitNorm)
GradientNormalization
public NeuralNetConfiguration.Builder regularization(boolean useRegularization)
public NeuralNetConfiguration.Builder schedules(boolean schedules)
public NeuralNetConfiguration.Builder clone()
public NeuralNetConfiguration.Builder activation(String activationFunction)
public NeuralNetConfiguration.Builder weightInit(WeightInit weightInit)
WeightInit
public NeuralNetConfiguration.Builder biasInit(double biasInit)
public NeuralNetConfiguration.Builder dist(Distribution dist)
public NeuralNetConfiguration.Builder learningRate(double learningRate)
public NeuralNetConfiguration.Builder learningRateAfter(Map<Integer,Double> learningRateAfter)
public NeuralNetConfiguration.Builder learningRateScoreBasedDecayRate(double lrScoreBasedDecay)
public NeuralNetConfiguration.Builder l1(double l1)
public NeuralNetConfiguration.Builder l2(double l2)
public NeuralNetConfiguration.Builder dropOut(double dropOut)
public NeuralNetConfiguration.Builder momentum(double momentum)
public NeuralNetConfiguration.Builder momentumAfter(Map<Integer,Double> momentumAfter)
public NeuralNetConfiguration.Builder updater(Updater updater)
Updater
public NeuralNetConfiguration.Builder rho(double rho)
rho
- public NeuralNetConfiguration.Builder rmsDecay(double rmsDecay)
public NeuralNetConfiguration.Builder adamMeanDecay(double adamMeanDecay)
public NeuralNetConfiguration.Builder adamVarDecay(double adamVarDecay)
public NeuralNetConfiguration.Builder gradientNormalization(GradientNormalization gradientNormalization)
gradientNormalization
- Type of normalization to use. Defaults to None.GradientNormalization
public NeuralNetConfiguration.Builder gradientNormalizationThreshold(double threshold)
public NeuralNetConfiguration build()
Copyright © 2015. All Rights Reserved.