-
Notifications
You must be signed in to change notification settings - Fork 22
Expand file tree
/
Copy pathPolynomialRegressor.hpp
More file actions
185 lines (143 loc) · 4.97 KB
/
PolynomialRegressor.hpp
File metadata and controls
185 lines (143 loc) · 4.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
/*
Part of the Fluid Corpus Manipulation Project (http://www.flucoma.org/)
Copyright University of Huddersfield.
Licensed under the BSD-3 License.
See license.md file in the project root for full license information.
This project has received funding from the European Research Council (ERC)
under the European Union’s Horizon 2020 research and innovation programme
(grant agreement No 725899).
*/
#pragma once
#include "../util/AlgorithmUtils.hpp"
#include "../util/FluidEigenMappings.hpp"
#include "../../data/FluidIndex.hpp"
#include "../../data/FluidMemory.hpp"
#include "../../data/TensorTypes.hpp"
#include <Eigen/Core>
#include <Eigen/Dense>
#include <cassert>
#include <cmath>
namespace fluid {
namespace algorithm {
class PolynomialRegressor
{
public:
explicit PolynomialRegressor() = default;
~PolynomialRegressor() = default;
void init(index degree, index dims, double tikhonov = 0.0)
{
mInitialized = true;
setDegree(degree);
setDims(dims);
setTikhonov(tikhonov);
};
index degree() const { return mInitialized ? asSigned(mDegree) : 0; };
double tihkonov() const { return mInitialized ? mTikhonovFactor : 0.0; };
index dims() const { return mInitialized ? asSigned(mDims) : 0; };
index size() const { return mInitialized ? asSigned(mDegree) : 0; };
void clear() { mRegressed = false; }
bool regressed() const { return mRegressed; };
bool initialized() const { return mInitialized; };
void setDegree(index degree)
{
if (mDegree == degree) return;
mDegree = degree;
mRegressed = false;
}
void setDims(index dims)
{
if (mDims == dims) return;
mDims = dims;
mRegressed = false;
}
void setTikhonov(double tikhonov)
{
if (mTikhonovFactor == tikhonov) return;
mTikhonovFactor = tikhonov;
mRegressed = false;
}
void regress(InputRealMatrixView in, InputRealMatrixView out,
Allocator& alloc = FluidDefaultAllocator())
{
using namespace _impl;
using namespace Eigen;
ScopedEigenMap<MatrixXd> input(in.rows(), in.cols(), alloc),
output(out.rows(), out.cols(), alloc),
transposeProduct(mDegree + 1, mDegree + 1, alloc);
input = asEigen<Matrix>(in);
output = asEigen<Matrix>(out);
mCoefficients.resize(mDegree + 1, mDims);
mTikhonovMatrix.resize(mDegree + 1, mDegree + 1);
asEigen<Matrix>(mTikhonovMatrix) =
mTikhonovFactor * MatrixXd::Identity(mDegree + 1, mDegree + 1);
for (index i = 0; i < mDims; ++i)
{
generateDesignMatrix(input.col(i));
// tikhonov/ridge regularisation, given Ax = y where x could be noisy
// optimise the value _x = (A^T . A + R^T . R)^-1 . A^T . y
// where R is a tikhonov filter matrix, in case of ridge regression of the
// form a.I
transposeProduct = asEigen<Matrix>(mDesignMatrix).transpose() *
asEigen<Matrix>(mDesignMatrix) +
asEigen<Matrix>(mTikhonovMatrix).transpose() *
asEigen<Matrix>(mTikhonovMatrix);
asEigen<Matrix>(mCoefficients.col(i)) =
transposeProduct.inverse() *
asEigen<Matrix>(mDesignMatrix).transpose() * output.col(i);
}
mRegressed = true;
};
void getCoefficients(RealMatrixView coefficients) const
{
if (mInitialized) coefficients <<= mCoefficients;
};
void setCoefficients(InputRealMatrixView coefficients)
{
if (!mInitialized) mInitialized = true;
setDegree(coefficients.rows() - 1);
setDims(coefficients.cols());
mCoefficients <<= coefficients;
mRegressed = true;
}
void process(InputRealMatrixView in, RealMatrixView out,
Allocator& alloc = FluidDefaultAllocator()) const
{
using namespace _impl;
using namespace Eigen;
ScopedEigenMap<VectorXd> coefficientsColumn(mCoefficients.rows(), alloc),
inputColumn(in.rows(), alloc);
for (index i = 0; i < mDims; ++i)
{
inputColumn = asEigen<Matrix>(in.col(i));
coefficientsColumn = asEigen<Matrix>(mCoefficients.col(i));
generateDesignMatrix(inputColumn);
asEigen<Matrix>(out.col(i)) =
asEigen<Matrix>(mDesignMatrix) * coefficientsColumn;
}
}
private:
void generateDesignMatrix(Eigen::Ref<Eigen::VectorXd> in,
Allocator& alloc = FluidDefaultAllocator()) const
{
using namespace _impl;
using namespace Eigen;
ScopedEigenMap<ArrayXd> designColumn(in.size(), alloc),
inArray(in.size(), alloc);
designColumn = VectorXd::Ones(in.size());
inArray = in.array();
mDesignMatrix.resize(in.size(), mDegree + 1);
for (index i = 0; i < mDegree + 1;
++i, designColumn = designColumn * inArray)
asEigen<Matrix>(mDesignMatrix.col(i)) = designColumn;
}
index mDegree{2};
index mDims{1};
bool mRegressed{false};
bool mInitialized{false};
double mTikhonovFactor{0};
RealMatrix mCoefficients;
mutable RealMatrix mDesignMatrix;
mutable RealMatrix mTikhonovMatrix;
};
} // namespace algorithm
} // namespace fluid