-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.cpp
More file actions
46 lines (37 loc) · 1.19 KB
/
main.cpp
File metadata and controls
46 lines (37 loc) · 1.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#include "Include/Evolutionary/EvolutionSim.h"
#include "Include/NeuralNetwork/NeuralNetwork.h"
#include "Include/Utilities/Printer.h"
#include "Include/Utilities/Random.h"
/// The function to be optimised for
std::vector<double> f(std::vector<double> x) { return {3*x[0]+2}; }
/// Generate data to train/test on
std::vector<data> generateData(int size){
std::vector<data> output;
output.reserve(size);
for (int i = 0; i < size; ++i) {
data d;
d.input = {i/static_cast<double>(size)};
d.output = f(d.input);
output.emplace_back(d);
}
auto rd = std::random_device {};
auto rng = std::default_random_engine { rd() };
std::shuffle(std::begin(output), std::end(output), rng);
return output;
}
void runEvolutionSim(){
auto evolutionSim = EvolutionSim({1, 1, 1}, f);
evolutionSim.runEvolution(200, 500);
auto hist = evolutionSim.getCostHistory();
print(hist);
}
void runGradientDescent(){
auto data = generateData(1000);
auto nn = NeuralNetwork({1,2,2,2,1}, functions::Activation::LINEAR);
nn.gradientDescent(data, 800, 0.05);
nn.storeOutput({0,1});
}
int main() {
//runGradientDescent();
//runEvolutionSim();
}