15 #ifndef MLPACK_METHODS_ANN_LAYER_PReLU_HPP 16 #define MLPACK_METHODS_ANN_LAYER_PReLU_HPP 42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
70 template<
typename InputType,
typename OutputType>
71 void Forward(
const InputType&& input, OutputType&& output);
82 template<
typename DataType>
83 void Backward(
const DataType&& input, DataType&& gy, DataType&& g);
93 void Gradient(
const arma::Mat<eT>&& input,
94 arma::Mat<eT>&& error,
130 template<
typename Archive>
131 void Serialize(Archive& ar,
const unsigned int );
140 double Fn(
const double x)
142 return std::max(x,
alpha(0) * x);
151 template<
typename eT>
152 void Fn(
const arma::Mat<eT>& x, arma::Mat<eT>& y)
155 arma::uvec negative = arma::find(x < 0);
156 y(negative) = x(negative) *
alpha(0);
167 return (x >= 0) ? 1 :
alpha(0);
177 template<
typename InputType,
typename OutputType>
178 void Deriv(
const InputType& x, OutputType& y)
182 for (
size_t i = 0; i < x.n_elem; i++)
212 #include "parametric_relu_impl.hpp" OutputDataType const & OutputParameter() const
Get the output parameter.
double Fn(const double x)
Computes the parametric ReLU function.
OutputDataType gradient
Locally-stored gradient object.
void Deriv(const InputType &x, OutputType &y)
Computes the first derivative of the PReLU function.
OutputDataType const & Parameters() const
Get the parameters.
Linear algebra utility functions, generally performed on matrices or vectors.
void Serialize(Archive &ar, const unsigned int)
Serialize the layer.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double user_alpha
Leakyness Parameter given by user in the range 0 < alpha < 1.
OutputDataType delta
Locally-stored delta object.
InputDataType & InputParameter()
Modify the input parameter.
void Fn(const arma::Mat< eT > &x, arma::Mat< eT > &y)
Computes the parametric ReLU function using a dense matrix as input.
The PReLU activation function, defined by (where alpha is trainable)
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType & Parameters()
Modify the parameters.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & Delta() const
Get the delta.
InputDataType const & InputParameter() const
Get the input parameter.
double & Alpha()
Modify the non zero gradient.
OutputDataType & Delta()
Modify the delta.
OutputDataType outputParameter
Locally-stored output parameter object.
PReLU(const double user_alpha=0.03)
Create the PReLU object using the specified parameters.
double const & Alpha() const
Get the non zero gradient.
OutputDataType alpha
Leakyness Parameter object.
OutputDataType const & Gradient() const
Get the gradient.
InputDataType inputParameter
Locally-stored input parameter object.
double Deriv(const double x)
Computes the first derivative of the parametric ReLU function.
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & OutputParameter()
Modify the output parameter.