12 #ifndef MLPACK_METHODS_ANN_RNN_HPP 13 #define MLPACK_METHODS_ANN_RNN_HPP 37 typename OutputLayerType = NegativeLogLikelihood<>,
38 typename InitializationRuleType = RandomInitialization
103 void Train(
const arma::mat& predictors,
104 const arma::mat& responses,
105 OptimizerType<NetworkType>& optimizer);
123 void Train(
const arma::mat& predictors,
const arma::mat& responses);
133 void Predict(arma::mat& predictors, arma::mat& responses);
158 void Gradient(
const arma::mat& parameters,
160 arma::mat& gradient);
167 template<
typename LayerType>
168 void Add(
const LayerType& layer) {
network.push_back(
new LayerType(layer)); }
175 template <
class LayerType,
class... Args>
176 void Add(Args... args) {
network.push_back(
new LayerType(args...)); }
194 template<
typename Archive>
195 void Serialize(Archive& ar,
const unsigned int );
205 void Forward(arma::mat&& input);
225 void SinglePredict(
const arma::mat& predictors, arma::mat& responses);
315 #include "rnn_impl.hpp" DeleteVisitor executes the destructor of the instantiated object.
DeleteVisitor deleteVisitor
Locally-stored delete visitor.
size_t numFunctions
The number of separable functions (the number of predictor points).
arma::mat predictors
The matrix of data points (predictors).
arma::mat error
The current error for the backward pass.
~RNN()
Destructor to release allocated memory.
Linear algebra utility functions, generally performed on matrices or vectors.
bool reset
Indicator if we already trained the model.
void Gradient()
Iterate through all layer modules and update the the gradient using the layer defined optimizer...
RNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the RNN object with the given predictors and responses set (this is the set that is used to tr...
void Forward(arma::mat &&input)
The Forward algorithm (part of the Forward-Backward algorithm).
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool deterministic
The current evaluation mode (training or testing).
arma::mat & Parameters()
Modify the initial point for the optimization.
WeightSizeVisitor returns the number of weights of the given module.
ResetVisitor resetVisitor
Locally-stored reset visitor.
arma::mat currentInput
THe current input of the forward/backward pass.
OutputParameterVisitor outputParameterVisitor
Locally-stored output parameter visitor.
OutputLayerType outputLayer
Instantiated outputlayer used to evaluate the network.
Implementation of a standard recurrent neural network container.
size_t inputSize
The input size.
void ResetGradients(arma::mat &gradient)
Reset the gradient for all modules that implement the Gradient function.
size_t targetSize
The target size.
ResetVisitor executes the Reset() function.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
OutputParameterVisitor exposes the output parameter of the given module.
void Predict(arma::mat &predictors, arma::mat &responses)
Predict the responses to a given set of predictors.
void ResetDeterministic()
Reset the module status by setting the current deterministic parameter for all modules that implement...
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MeanSquaredError< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > * > LayerTypes
void Backward()
The Backward algorithm (part of the Forward-Backward algorithm).
arma::mat responses
The matrix of responses to the input data points.
void Serialize(Archive &ar, const unsigned int)
Serialize the model.
void Train(const arma::mat &predictors, const arma::mat &responses, OptimizerType< NetworkType > &optimizer)
Train the recurrent neural network on the given input data using the given optimizer.
WeightSizeVisitor weightSizeVisitor
Locally-stored weight size visitor.
std::vector< arma::mat > moduleOutputParameter
List of all module parameters for the backward pass (BBTT).
DeltaVisitor deltaVisitor
Locally-stored delta visitor.
Stochastic Gradient Descent is a technique for minimizing a function which can be expressed as a sum ...
arma::mat parameter
Matrix of (trained) parameters.
DeltaVisitor exposes the delta parameter of the given module.
size_t outputSize
The output size.
size_t rho
Number of steps to backpropagate through time (BPTT).
std::vector< LayerTypes > network
Locally-stored model modules.
void SinglePredict(const arma::mat &predictors, arma::mat &responses)
double Evaluate(const arma::mat &, const size_t i, const bool deterministic=true)
Evaluate the recurrent neural network with the given parameters.
bool single
Only predict the last element of the input sequence.
void Add(const LayerType &layer)
InitializationRuleType initializeRule
Instantiated InitializationRule object for initializing the network parameter.
const arma::mat & Parameters() const
Return the initial point for the optimization.
void Add(LayerTypes layer)
void ResetParameters()
Reset the module infomration (weights/parameters).