24 RMSProp(
double lr = 1e-3,
double decay = 1e-3,
double epsilon = 1e-7,
double rho = .9) :
Optimizer(lr, decay),
37 Eigen::MatrixXd weights = layer->weights();
38 Eigen::MatrixXd biases = layer->biases();
39 Eigen::MatrixXd dweights = layer->dweights();
40 Eigen::MatrixXd dbiases = layer->dbiases();
42 Eigen::MatrixXd weights_cache = layer->weights_optimizer();
43 Eigen::MatrixXd biases_cache = layer->biases_optimizer();
45 weights_cache = _rho * weights_cache + (1 - _rho) * dweights.cwisePow(2);
46 biases_cache = _rho * biases_cache + (1 - _rho) * dbiases.cwisePow(2);
48 weights += (-
_current_lr * dweights.array() / (weights_cache.cwisePow(.5).array() + _epsilon)).matrix();
49 biases += (-
_current_lr * dbiases.array() / (biases_cache.cwisePow(.5).array() + _epsilon)).matrix();
51 layer->weights_optimizer(weights_cache);
52 layer->biases_optimizer(biases_cache);
54 layer->weights(weights);
55 layer->biases(biases);
Base class for all optimizers.
Definition Optimizer.hpp:15
double _current_lr
Definition Optimizer.hpp:78
Root Mean Square Propagation optimizer.
Definition RMSProp.hpp:14
void update_params(std::shared_ptr< Dense > &layer)
Update the parameters of the layer.
Definition RMSProp.hpp:35
RMSProp(double lr=1e-3, double decay=1e-3, double epsilon=1e-7, double rho=.9)
Construct a new RMSProp object.
Definition RMSProp.hpp:24
Definition Activation.hpp:6