NNFS
Neural network library from scratch
Loading...
Searching...
No Matches
Loss.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <Eigen/Dense>
4#include "../Utilities/clue.hpp"
5#include "../Layer/Dense.hpp"
6
7namespace NNFS
8{
12 enum class LossType
13 {
14 CCE,
16 };
17
23 class Loss
24 {
25 public:
26 LossType type; // Type of loss function
27
28 public:
35
39 virtual ~Loss() = default;
40
48 virtual void forward(Eigen::MatrixXd &sample_losses, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels) const = 0;
49
57 virtual void backward(Eigen::MatrixXd &out, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels) const = 0;
58
66 void calculate(double &loss, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels)
67 {
68 Eigen::MatrixXd sample_losses;
69 forward(sample_losses, predictions, labels);
70 loss = sample_losses.mean();
71 }
72
80 double regularization_loss(const std::shared_ptr<Dense> &layer)
81 {
82 double regularization_loss = 0;
83 const double weight_regularizer_l1 = layer->l1_weights_regularizer();
84 const double weight_regularizer_l2 = layer->l2_weights_regularizer();
85 const double bias_regularizer_l1 = layer->l1_biases_regularizer();
86 const double bias_regularizer_l2 = layer->l2_biases_regularizer();
87
88 if (weight_regularizer_l1 > 0)
89 {
90 regularization_loss += weight_regularizer_l1 * layer->weights().array().abs().sum();
91 }
92
93 if (weight_regularizer_l2 > 0)
94 {
95 regularization_loss += weight_regularizer_l2 * (layer->weights().array() * layer->weights().array()).sum();
96 }
97
98 if (bias_regularizer_l1 > 0)
99 {
100 regularization_loss += bias_regularizer_l1 * layer->weights().array().abs().sum();
101 }
102
103 if (bias_regularizer_l2 > 0)
104 {
105 regularization_loss += bias_regularizer_l2 * (layer->biases().array() * layer->biases().array()).sum();
106 }
107
108 return regularization_loss;
109 }
110 };
111} // namespace NNFS
Cross-entropy loss function.
Definition CCE.hpp:13
Base class for all loss functions.
Definition Loss.hpp:24
virtual void forward(Eigen::MatrixXd &sample_losses, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels) const =0
Forward pass of the loss function.
void calculate(double &loss, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels)
Calculate the loss.
Definition Loss.hpp:66
virtual void backward(Eigen::MatrixXd &out, const Eigen::MatrixXd &predictions, const Eigen::MatrixXd &labels) const =0
Backward pass of the loss function.
virtual ~Loss()=default
Basic destructor.
Loss(LossType type)
Construct a new Loss object.
Definition Loss.hpp:34
double regularization_loss(const std::shared_ptr< Dense > &layer)
Calculate l1 and l2 regularization loss.
Definition Loss.hpp:80
LossType type
Definition Loss.hpp:26
Definition Activation.hpp:6
LossType
Enum class for loss types.
Definition Loss.hpp:13