Go to the documentation of this file.
15 #ifndef MLPACK_METHODS_ANN_LAYER_PReLU_HPP
16 #define MLPACK_METHODS_ANN_LAYER_PReLU_HPP
42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
56 PReLU(
const double userAlpha = 0.03);
70 template<
typename InputType,
typename OutputType>
71 void Forward(
const InputType& input, OutputType& output);
82 template<
typename DataType>
83 void Backward(
const DataType& input,
const DataType& gy, DataType& g);
94 const arma::Mat<eT>& error,
95 arma::Mat<eT>& gradient);
98 OutputDataType
const&
Parameters()
const {
return alpha; }
108 OutputDataType
const&
Delta()
const {
return delta; }
110 OutputDataType&
Delta() {
return delta; }
113 OutputDataType
const&
Gradient()
const {
return gradient; }
118 double const&
Alpha()
const {
return alpha(0); }
120 double&
Alpha() {
return alpha(0); }
125 template<
typename Archive>
130 OutputDataType delta;
133 OutputDataType outputParameter;
136 OutputDataType alpha;
139 OutputDataType gradient;
149 #include "parametric_relu_impl.hpp"
OutputDataType const & Delta() const
Get the delta.
double const & Alpha() const
Get the non zero gradient.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType & Delta()
Modify the delta.
void Forward(const InputType &input, OutputType &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
Linear algebra utility functions, generally performed on matrices or vectors.
The PReLU activation function, defined by (where alpha is trainable)
double & Alpha()
Modify the non zero gradient.
OutputDataType const & Parameters() const
Get the parameters.
PReLU(const double userAlpha=0.03)
Create the PReLU object using the specified parameters.
OutputDataType & Parameters()
Modify the parameters.
void Gradient(const arma::Mat< eT > &input, const arma::Mat< eT > &error, arma::Mat< eT > &gradient)
Calculate the gradient using the output delta and the input activation.
OutputDataType & OutputParameter()
Modify the output parameter.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
void Backward(const DataType &input, const DataType &gy, DataType &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType const & Gradient() const
Get the gradient.