mlpack  master
rnn.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_RNN_HPP
13 #define MLPACK_METHODS_ANN_RNN_HPP
14 
15 #include <mlpack/prereqs.hpp>
16 
22 
26 
27 namespace mlpack {
28 namespace ann {
29 
36 template<
37  typename OutputLayerType = NegativeLogLikelihood<>,
38  typename InitializationRuleType = RandomInitialization
39 >
40 class RNN
41 {
42  public:
45 
58  RNN(const size_t rho,
59  const bool single = false,
60  OutputLayerType outputLayer = OutputLayerType(),
61  InitializationRuleType initializeRule = InitializationRuleType());
62 
77  RNN(const arma::mat& predictors,
78  const arma::mat& responses,
79  const size_t rho,
80  const bool single = false,
81  OutputLayerType outputLayer = OutputLayerType(),
82  InitializationRuleType initializeRule = InitializationRuleType());
83 
85  ~RNN();
86 
100  template<
101  template<typename> class OptimizerType = mlpack::optimization::SGD
102  >
103  void Train(const arma::mat& predictors,
104  const arma::mat& responses,
105  OptimizerType<NetworkType>& optimizer);
106 
120  template<
121  template<typename> class OptimizerType = mlpack::optimization::SGD
122  >
123  void Train(const arma::mat& predictors, const arma::mat& responses);
124 
133  void Predict(arma::mat& predictors, arma::mat& responses);
134 
144  double Evaluate(const arma::mat& /* parameters */,
145  const size_t i,
146  const bool deterministic = true);
147 
158  void Gradient(const arma::mat& parameters,
159  const size_t i,
160  arma::mat& gradient);
161 
162  /*
163  * Add a new module to the model.
164  *
165  * @param layer The Layer to be added to the model.
166  */
167  template<typename LayerType>
168  void Add(const LayerType& layer) { network.push_back(new LayerType(layer)); }
169 
170  /*
171  * Add a new module to the model.
172  *
173  * @param args The layer parameter.
174  */
175  template <class LayerType, class... Args>
176  void Add(Args... args) { network.push_back(new LayerType(args...)); }
177 
178  /*
179  * Add a new module to the model.
180  *
181  * @param layer The Layer to be added to the model.
182  */
183  void Add(LayerTypes layer) { network.push_back(layer); }
184 
186  size_t NumFunctions() const { return numFunctions; }
187 
189  const arma::mat& Parameters() const { return parameter; }
191  arma::mat& Parameters() { return parameter; }
192 
194  template<typename Archive>
195  void Serialize(Archive& ar, const unsigned int /* version */);
196 
197  private:
198  // Helper functions.
205  void Forward(arma::mat&& input);
206 
211  void Backward();
212 
217  void Gradient();
218 
219  /*
220  * Predict the response of the given input sequence.
221  *
222  * @param predictors Input predictors.
223  * @param responses Vector to put output prediction of a response into.
224  */
225  void SinglePredict(const arma::mat& predictors, arma::mat& responses);
226 
230  void ResetParameters();
231 
236  void ResetDeterministic();
237 
241  void ResetGradients(arma::mat& gradient);
242 
244  size_t rho;
245 
247  OutputLayerType outputLayer;
248 
251  InitializationRuleType initializeRule;
252 
254  size_t inputSize;
255 
257  size_t outputSize;
258 
260  size_t targetSize;
261 
263  bool reset;
264 
266  bool single;
267 
269  std::vector<LayerTypes> network;
270 
272  arma::mat predictors;
273 
275  arma::mat responses;
276 
278  arma::mat parameter;
279 
281  size_t numFunctions;
282 
284  arma::mat error;
285 
287  arma::mat currentInput;
288 
291 
294 
296  std::vector<arma::mat> moduleOutputParameter;
297 
300 
303 
306 
309 }; // class RNN
310 
311 } // namespace ann
312 } // namespace mlpack
313 
314 // Include implementation.
315 #include "rnn_impl.hpp"
316 
317 #endif
DeleteVisitor executes the destructor of the instantiated object.
DeleteVisitor deleteVisitor
Locally-stored delete visitor.
Definition: rnn.hpp:305
size_t numFunctions
The number of separable functions (the number of predictor points).
Definition: rnn.hpp:281
arma::mat predictors
The matrix of data points (predictors).
Definition: rnn.hpp:272
arma::mat error
The current error for the backward pass.
Definition: rnn.hpp:284
~RNN()
Destructor to release allocated memory.
Linear algebra utility functions, generally performed on matrices or vectors.
Definition: binarize.hpp:18
bool reset
Indicator if we already trained the model.
Definition: rnn.hpp:263
void Gradient()
Iterate through all layer modules and update the the gradient using the layer defined optimizer...
RNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the RNN object with the given predictors and responses set (this is the set that is used to tr...
void Forward(arma::mat &&input)
The Forward algorithm (part of the Forward-Backward algorithm).
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool deterministic
The current evaluation mode (training or testing).
Definition: rnn.hpp:308
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: rnn.hpp:191
WeightSizeVisitor returns the number of weights of the given module.
ResetVisitor resetVisitor
Locally-stored reset visitor.
Definition: rnn.hpp:302
arma::mat currentInput
THe current input of the forward/backward pass.
Definition: rnn.hpp:287
OutputParameterVisitor outputParameterVisitor
Locally-stored output parameter visitor.
Definition: rnn.hpp:293
OutputLayerType outputLayer
Instantiated outputlayer used to evaluate the network.
Definition: rnn.hpp:247
Implementation of a standard recurrent neural network container.
Definition: rnn.hpp:40
size_t inputSize
The input size.
Definition: rnn.hpp:254
void ResetGradients(arma::mat &gradient)
Reset the gradient for all modules that implement the Gradient function.
size_t targetSize
The target size.
Definition: rnn.hpp:260
void Add(Args...args)
Definition: rnn.hpp:176
ResetVisitor executes the Reset() function.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: rnn.hpp:186
OutputParameterVisitor exposes the output parameter of the given module.
void Predict(arma::mat &predictors, arma::mat &responses)
Predict the responses to a given set of predictors.
void ResetDeterministic()
Reset the module status by setting the current deterministic parameter for all modules that implement...
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MeanSquaredError< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > * > LayerTypes
void Backward()
The Backward algorithm (part of the Forward-Backward algorithm).
arma::mat responses
The matrix of responses to the input data points.
Definition: rnn.hpp:275
void Serialize(Archive &ar, const unsigned int)
Serialize the model.
void Train(const arma::mat &predictors, const arma::mat &responses, OptimizerType< NetworkType > &optimizer)
Train the recurrent neural network on the given input data using the given optimizer.
WeightSizeVisitor weightSizeVisitor
Locally-stored weight size visitor.
Definition: rnn.hpp:299
std::vector< arma::mat > moduleOutputParameter
List of all module parameters for the backward pass (BBTT).
Definition: rnn.hpp:296
DeltaVisitor deltaVisitor
Locally-stored delta visitor.
Definition: rnn.hpp:290
Stochastic Gradient Descent is a technique for minimizing a function which can be expressed as a sum ...
Definition: sgd.hpp:76
arma::mat parameter
Matrix of (trained) parameters.
Definition: rnn.hpp:278
DeltaVisitor exposes the delta parameter of the given module.
size_t outputSize
The output size.
Definition: rnn.hpp:257
size_t rho
Number of steps to backpropagate through time (BPTT).
Definition: rnn.hpp:244
std::vector< LayerTypes > network
Locally-stored model modules.
Definition: rnn.hpp:269
void SinglePredict(const arma::mat &predictors, arma::mat &responses)
double Evaluate(const arma::mat &, const size_t i, const bool deterministic=true)
Evaluate the recurrent neural network with the given parameters.
bool single
Only predict the last element of the input sequence.
Definition: rnn.hpp:266
void Add(const LayerType &layer)
Definition: rnn.hpp:168
InitializationRuleType initializeRule
Instantiated InitializationRule object for initializing the network parameter.
Definition: rnn.hpp:251
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: rnn.hpp:189
void Add(LayerTypes layer)
Definition: rnn.hpp:183
void ResetParameters()
Reset the module infomration (weights/parameters).