mlpack  master
parametric_relu.hpp
Go to the documentation of this file.
1 
15 #ifndef MLPACK_METHODS_ANN_LAYER_PReLU_HPP
16 #define MLPACK_METHODS_ANN_LAYER_PReLU_HPP
17 
18 #include <mlpack/prereqs.hpp>
19 
20 namespace mlpack {
21 namespace ann {
22 
41 template <
42  typename InputDataType = arma::mat,
43  typename OutputDataType = arma::mat
44 >
45 class PReLU
46 {
47  public:
56  PReLU(const double user_alpha = 0.03);
57 
58  /*
59  * Reset the layer parameter.
60  */
61  void Reset();
62 
70  template<typename InputType, typename OutputType>
71  void Forward(const InputType&& input, OutputType&& output);
72 
82  template<typename DataType>
83  void Backward(const DataType&& input, DataType&& gy, DataType&& g);
84 
92  template<typename eT>
93  void Gradient(const arma::Mat<eT>&& input,
94  arma::Mat<eT>&& error,
95  arma::Mat<eT>&& gradient);
96 
98  OutputDataType const& Parameters() const { return alpha; }
100  OutputDataType& Parameters() { return alpha; }
101 
103  InputDataType const& InputParameter() const { return inputParameter; }
105  InputDataType& InputParameter() { return inputParameter; }
106 
108  OutputDataType const& OutputParameter() const { return outputParameter; }
110  OutputDataType& OutputParameter() { return outputParameter; }
111 
113  OutputDataType const& Delta() const { return delta; }
115  OutputDataType& Delta() { return delta; }
116 
118  OutputDataType const& Gradient() const { return gradient; }
120  OutputDataType& Gradient() { return gradient; }
121 
123  double const& Alpha() const { return alpha(0); }
125  double& Alpha() { return alpha(0); }
126 
130  template<typename Archive>
131  void Serialize(Archive& ar, const unsigned int /* version */);
132 
133  private:
140  double Fn(const double x)
141  {
142  return std::max(x, alpha(0) * x);
143  }
144 
151  template<typename eT>
152  void Fn(const arma::Mat<eT>& x, arma::Mat<eT>& y)
153  {
154  y = x;
155  arma::uvec negative = arma::find(x < 0);
156  y(negative) = x(negative) * alpha(0);
157  }
158 
165  double Deriv(const double x)
166  {
167  return (x >= 0) ? 1 : alpha(0);
168  }
169 
177  template<typename InputType, typename OutputType>
178  void Deriv(const InputType& x, OutputType& y)
179  {
180  y = x;
181 
182  for (size_t i = 0; i < x.n_elem; i++)
183  {
184  y(i) = Deriv(x(i));
185  }
186  }
187 
189  OutputDataType delta;
190 
192  InputDataType inputParameter;
193 
195  OutputDataType outputParameter;
196 
198  OutputDataType alpha;
199 
201  OutputDataType gradient;
202 
204  double user_alpha;
205 
206 }; // class PReLU
207 
208 } // namespace ann
209 } // namespace mlpack
210 
211 // Include implementation.
212 #include "parametric_relu_impl.hpp"
213 
214 #endif
OutputDataType const & OutputParameter() const
Get the output parameter.
double Fn(const double x)
Computes the parametric ReLU function.
OutputDataType gradient
Locally-stored gradient object.
void Deriv(const InputType &x, OutputType &y)
Computes the first derivative of the PReLU function.
OutputDataType const & Parameters() const
Get the parameters.
Linear algebra utility functions, generally performed on matrices or vectors.
Definition: binarize.hpp:18
void Serialize(Archive &ar, const unsigned int)
Serialize the layer.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double user_alpha
Leakyness Parameter given by user in the range 0 < alpha < 1.
OutputDataType delta
Locally-stored delta object.
InputDataType & InputParameter()
Modify the input parameter.
void Fn(const arma::Mat< eT > &x, arma::Mat< eT > &y)
Computes the parametric ReLU function using a dense matrix as input.
The PReLU activation function, defined by (where alpha is trainable)
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType & Parameters()
Modify the parameters.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & Delta() const
Get the delta.
InputDataType const & InputParameter() const
Get the input parameter.
double & Alpha()
Modify the non zero gradient.
OutputDataType & Delta()
Modify the delta.
OutputDataType outputParameter
Locally-stored output parameter object.
PReLU(const double user_alpha=0.03)
Create the PReLU object using the specified parameters.
double const & Alpha() const
Get the non zero gradient.
OutputDataType alpha
Leakyness Parameter object.
OutputDataType const & Gradient() const
Get the gradient.
InputDataType inputParameter
Locally-stored input parameter object.
double Deriv(const double x)
Computes the first derivative of the parametric ReLU function.
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & OutputParameter()
Modify the output parameter.