public abstract static class BaseLayer.Builder<T extends BaseLayer.Builder<T>> extends Layer.Builder<T>
Modifier and Type | Field and Description |
---|---|
protected org.nd4j.linalg.activations.IActivation |
activationFn |
protected double |
adamMeanDecay
Deprecated.
|
protected double |
adamVarDecay
Deprecated.
|
protected double |
biasInit |
protected double |
biasLearningRate |
protected Distribution |
dist |
protected double |
epsilon
Deprecated.
|
protected GradientNormalization |
gradientNormalization |
protected double |
gradientNormalizationThreshold |
protected org.nd4j.linalg.learning.config.IUpdater |
iupdater |
protected double |
l1 |
protected double |
l1Bias |
protected double |
l2 |
protected double |
l2Bias |
protected double |
learningRate |
protected LearningRatePolicy |
learningRatePolicy |
protected Map<Integer,Double> |
learningRateSchedule |
protected double |
momentum
Deprecated.
|
protected Map<Integer,Double> |
momentumAfter
Deprecated.
|
protected double |
rho
Deprecated.
|
protected double |
rmsDecay
Deprecated.
|
protected Updater |
updater
Deprecated.
|
protected WeightInit |
weightInit |
dropOut, layerName
Constructor and Description |
---|
Builder() |
Modifier and Type | Method and Description |
---|---|
T |
activation(org.nd4j.linalg.activations.Activation activation) |
T |
activation(org.nd4j.linalg.activations.IActivation activationFunction) |
T |
activation(String activationFunction)
Deprecated.
Use
activation(Activation) or @activation(IActivation) |
T |
adamMeanDecay(double adamMeanDecay)
Deprecated.
use
.updater(Adam.builder().beta1(adamMeanDecay).build()) intead |
T |
adamVarDecay(double adamVarDecay)
Deprecated.
use
.updater(Adam.builder().beta2(adamVarDecay).build()) intead |
T |
biasInit(double biasInit) |
T |
biasLearningRate(double biasLearningRate)
Bias learning rate.
|
T |
dist(Distribution dist)
Distribution to sample initial weights from.
|
T |
epsilon(double epsilon)
Deprecated.
Use use
.updater(Adam.builder().epsilon(epsilon).build()) or similar instead |
T |
gradientNormalization(GradientNormalization gradientNormalization)
Gradient normalization strategy.
|
T |
gradientNormalizationThreshold(double threshold)
Threshold for gradient normalization, only used for GradientNormalization.ClipL2PerLayer,
GradientNormalization.ClipL2PerParamType, and GradientNormalization.ClipElementWiseAbsoluteValue
Not used otherwise. L2 threshold for first two types of clipping, or absolute value threshold for last type of clipping. |
T |
l1(double l1)
L1 regularization coefficient (weights only).
|
T |
l1Bias(double l1Bias)
L1 regularization coefficient for the bias.
|
T |
l2(double l2)
L2 regularization coefficient (weights only).
|
T |
l2Bias(double l2Bias)
L2 regularization coefficient for the bias.
|
T |
learningRate(double learningRate)
Learning rate.
|
T |
learningRateDecayPolicy(LearningRatePolicy policy)
Learning rate decay policy.
|
T |
learningRateSchedule(Map<Integer,Double> learningRateSchedule)
Learning rate schedule.
|
T |
momentum(double momentum)
Deprecated.
Use
.updater(new Nesterov(momentum)) instead |
T |
momentumAfter(Map<Integer,Double> momentumAfter)
Deprecated.
Use
.updater(Nesterov.builder().momentumSchedule(schedule).build()) instead |
T |
rho(double rho)
Deprecated.
use
.updater(new AdaDelta(rho,epsilon)) intead |
T |
rmsDecay(double rmsDecay)
Deprecated.
use
.updater(new RmsProp(rmsDecay)) instead |
T |
updater(org.nd4j.linalg.learning.config.IUpdater updater)
Gradient updater.
|
T |
updater(Updater updater)
Gradient updater.
|
T |
weightInit(WeightInit weightInit)
Weight initialization scheme.
|
build, dropOut, name
protected org.nd4j.linalg.activations.IActivation activationFn
protected WeightInit weightInit
protected double biasInit
protected Distribution dist
protected double learningRate
protected double biasLearningRate
@Deprecated protected double momentum
@Deprecated protected Map<Integer,Double> momentumAfter
protected double l1
protected double l2
protected double l1Bias
protected double l2Bias
@Deprecated protected Updater updater
protected org.nd4j.linalg.learning.config.IUpdater iupdater
@Deprecated protected double rho
@Deprecated protected double epsilon
@Deprecated protected double rmsDecay
@Deprecated protected double adamMeanDecay
@Deprecated protected double adamVarDecay
protected GradientNormalization gradientNormalization
protected double gradientNormalizationThreshold
protected LearningRatePolicy learningRatePolicy
@Deprecated public T activation(String activationFunction)
activation(Activation)
or @activation(IActivation)
public T activation(org.nd4j.linalg.activations.IActivation activationFunction)
public T activation(org.nd4j.linalg.activations.Activation activation)
public T weightInit(WeightInit weightInit)
WeightInit
public T biasInit(double biasInit)
public T dist(Distribution dist)
public T learningRate(double learningRate)
public T biasLearningRate(double biasLearningRate)
public T learningRateSchedule(Map<Integer,Double> learningRateSchedule)
public T l1(double l1)
l1Bias(double)
to configure the l1 regularization
coefficient for the bias.public T l2(double l2)
l2Bias(double)
to configure the l2 regularization
coefficient for the bias.public T l1Bias(double l1Bias)
l1(double)
public T l2Bias(double l2Bias)
l2(double)
@Deprecated public T momentum(double momentum)
.updater(new Nesterov(momentum))
instead@Deprecated public T momentumAfter(Map<Integer,Double> momentumAfter)
.updater(Nesterov.builder().momentumSchedule(schedule).build())
insteadpublic T updater(Updater updater)
Updater
public T updater(org.nd4j.linalg.learning.config.IUpdater updater)
Adam
or Nesterovs
updater
- Updater to use@Deprecated public T rho(double rho)
.updater(new AdaDelta(rho,epsilon))
inteadrho
- @Deprecated public T rmsDecay(double rmsDecay)
.updater(new RmsProp(rmsDecay))
instead@Deprecated public T epsilon(double epsilon)
.updater(Adam.builder().epsilon(epsilon).build())
or similar insteadepsilon
- Epsilon value to use@Deprecated public T adamMeanDecay(double adamMeanDecay)
.updater(Adam.builder().beta1(adamMeanDecay).build())
intead@Deprecated public T adamVarDecay(double adamVarDecay)
.updater(Adam.builder().beta2(adamVarDecay).build())
inteadpublic T gradientNormalization(GradientNormalization gradientNormalization)
gradientNormalization
- Type of normalization to use. Defaults to None.GradientNormalization
public T gradientNormalizationThreshold(double threshold)
public T learningRateDecayPolicy(LearningRatePolicy policy)
policy
- Type of policy to use. Defaults to None.GradientNormalization
Copyright © 2017. All rights reserved.