public class EmbeddingLayer extends BaseLayer<EmbeddingLayer>
Layer.TrainingMode, Layer.Type
gradient, gradientsFlattened, gradientViews, optimizer, params, paramsFlattened, score, solver
cacheMode, conf, dropoutApplied, dropoutMask, index, input, iterationListeners, maskArray, maskState, preOutput
Constructor and Description |
---|
EmbeddingLayer(NeuralNetConfiguration conf) |
Modifier and Type | Method and Description |
---|---|
org.nd4j.linalg.api.ndarray.INDArray |
activate(boolean training)
Trigger an activation with the last specified input
|
protected void |
applyDropOutIfNecessary(boolean training) |
org.nd4j.linalg.primitives.Pair<Gradient,org.nd4j.linalg.api.ndarray.INDArray> |
backpropGradient(org.nd4j.linalg.api.ndarray.INDArray epsilon)
Calculate the gradient relative to the error in the next layer
|
boolean |
isPretrainLayer()
Returns true if the layer can be trained in an unsupervised/pretrain manner (VAE, RBMs etc)
|
org.nd4j.linalg.api.ndarray.INDArray |
preOutput(boolean training) |
accumulateScore, activate, activate, activationMean, applyLearningRateScoreDecay, calcGradient, calcL1, calcL2, clone, computeGradientAndScore, error, fit, fit, getGradientsViewArray, getOptimizer, getParam, gradient, initParams, iterate, layerConf, merge, numParams, params, paramTable, paramTable, preOutput, score, setBackpropGradientsViewArray, setParam, setParams, setParams, setParamsViewArray, setParamTable, setScoreWithZ, toString, transpose, update, update
activate, activate, activate, addListeners, applyMask, batchSize, clear, conf, derivativeActivation, feedForwardMaskArray, getIndex, getInput, getInputMiniBatchSize, getListeners, getMaskArray, gradientAndScore, init, input, layerId, numParams, preOutput, preOutput, setCacheMode, setConf, setIndex, setInput, setInputMiniBatchSize, setListeners, setListeners, setMaskArray, type, validateInput
public EmbeddingLayer(NeuralNetConfiguration conf)
public org.nd4j.linalg.primitives.Pair<Gradient,org.nd4j.linalg.api.ndarray.INDArray> backpropGradient(org.nd4j.linalg.api.ndarray.INDArray epsilon)
Layer
backpropGradient
in interface Layer
backpropGradient
in class BaseLayer<EmbeddingLayer>
epsilon
- w^(L+1)*delta^(L+1). Or, equiv: dC/da, i.e., (dC/dz)*(dz/da) = dC/da, where C
is cost function a=sigma(z) is activation.public org.nd4j.linalg.api.ndarray.INDArray preOutput(boolean training)
preOutput
in class BaseLayer<EmbeddingLayer>
public org.nd4j.linalg.api.ndarray.INDArray activate(boolean training)
Layer
activate
in interface Layer
activate
in class BaseLayer<EmbeddingLayer>
training
- training or test modepublic boolean isPretrainLayer()
Layer
protected void applyDropOutIfNecessary(boolean training)
applyDropOutIfNecessary
in class AbstractLayer<EmbeddingLayer>
Copyright © 2017. All rights reserved.