-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathCostFunc.h
More file actions
104 lines (90 loc) · 2.8 KB
/
CostFunc.h
File metadata and controls
104 lines (90 loc) · 2.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
/**
* Cost functions used for backpropagation
*
* Author: Rob Lyerly
* Date: 9/1/2014
*/
#ifndef _COST_FUNC_H
#define _COST_FUNC_H
#include "matrix.h"
#include "vector.h"
namespace ML
{
/**
* Cost function template which cost-function sub-classes should implement
*
* Provides the functionality needed to calculate partial derivatives and
* backpropagate errors.
*/
class CostFunc
{
public:
/**
* Implemented cost functions
*/
typedef enum CostFuncType {
MSE
} CostFuncType;
CostFunc(Neuron* neuron)
: _neuron(neuron) {}
virtual ~CostFunc() {};
/**
* Calculate error for the output layer L
*
* @param labels a vector of expected outputs from the neural network
* @param outputs a vector of actual outputs from the neural network
* @param sums a vector of weighted sums for L
* @return a vector in which to store the errors for L
*/
virtual void calcOutputError(const std::vector<double>& labels,
const std::vector<double>& nnOutputs,
const std::vector<double>& sums,
std::vector<double>& errors) = 0;
/**
* Calculate error for a hidden layer l. Note: these errors are equal to the
* gradients for the biases.
*
* @param weights weight matrix for l x l+1 layers
* @param nextErrors error vector for next layer, l+1
* @param sums a vector of weight sums for l
* @return a vector in which to store errors for l
*/
virtual void calcHiddenError(const std::matrix<double>& weights,
const std::vector<double>& nextErrors,
const std::vector<double>& sums,
std::vector<double>& errors) = 0;
/**
* Calculate weight gradients for a given layer l
*
* @param errors previously calculated errors for layer l
* @param prevActivations activations for layer l-1
* @return a matrix which contains the weight gradients
*/
virtual void calcWeightGradients(const std::vector<double>& errors,
const std::vector<double>& prevActivations,
std::matrix<double>& gradients) = 0;
protected:
Neuron* _neuron;
};
/**
* Mean-squared error cost function
*/
class MSE : public CostFunc
{
public:
MSE(Neuron* neuron)
: CostFunc(neuron) {}
virtual void calcOutputError(const std::vector<double>& labels,
const std::vector<double>& nnOutputs,
const std::vector<double>& sums,
std::vector<double>& errors);
virtual void calcHiddenError(const std::matrix<double>& weights,
const std::vector<double>& nextErrors,
const std::vector<double>& sums,
std::vector<double>& errors);
virtual void calcWeightGradients(const std::vector<double>& errors,
const std::vector<double>& prevActivations,
std::matrix<double>& gradients);
};
}
#endif /* _COST_FUNC_H */