-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.cpp
More file actions
63 lines (47 loc) · 3.09 KB
/
main.cpp
File metadata and controls
63 lines (47 loc) · 3.09 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#include "AKML.hpp"
#include "NeuralFunctions.hpp"
#include "GeneticAlgorithm.hpp"
#include <iostream>
#include <chrono>
int main (){
using SINGLETON = std::array <std::array <float, 1>, 1>;
std::cout << "Hello, terrible world!" << std::endl;
akml::NeuralNetwork *bestnet;
// XOR DIM 2
//akml::NeuralNetwork::initialize_list_type xor2_initList = {{ std::make_pair(2, nullptr), std::make_pair(2, &akml::ActivationFunctions::RELU), std::make_pair(1, &akml::ActivationFunctions::SIGMOID) }};
/*akml::NeuralNetwork::initialize_list_type xor2_initList = {{ std::make_pair(2, nullptr), std::make_pair(2, &akml::ActivationFunctions::SIGMOID), std::make_pair(1, &akml::ActivationFunctions::SIGMOID) }};
std::vector<akml::DynamicMatrix <float>> inputs = {{ akml::make_dynamic_vector<float>(1.f,0.f), akml::make_dynamic_vector<float>(0.f,0.f), akml::make_dynamic_vector<float>(0.f,1.f), akml::make_dynamic_vector<float>(1.f,1.f) }};
std::vector<akml::DynamicMatrix <float>> outputs = {{ (SINGLETON){{ {1} }}, (SINGLETON){{ {0} }}, (SINGLETON){{ {1} }}, (SINGLETON){{ {0} }} }};*/
/*akml::GeneticAlgorithm ga (100, xor2_initList);
bestnet = ga.trainNetworks(5000, inputs, outputs);*/
// XOR DIM 3
akml::NeuralNetwork::initialize_list_type xor3_initList = {{ std::make_pair(3, nullptr), std::make_pair(3, &akml::ActivationFunctions::RELU), std::make_pair(3, &akml::ActivationFunctions::RELU), std::make_pair(1, &akml::ActivationFunctions::SIGMOID) }};
std::vector<akml::DynamicMatrix <float>> inputs = {{
akml::make_dynamic_vector<float>(1.f,0.f,0.f), akml::make_dynamic_vector<float>(0.f,0.f, 1.f),
akml::make_dynamic_vector<float>(0.f,1.f,0.f), akml::make_dynamic_vector<float>(1.f,1.f,0.f),
akml::make_dynamic_vector<float>(1.f,0.f,1.f), akml::make_dynamic_vector<float>(0.f,1.f,1.f),
akml::make_dynamic_vector<float>(1.f,1.f,1.f) }};
std::vector<akml::DynamicMatrix <float>> outputs = {{ (SINGLETON){{ {1} }}, (SINGLETON){{ {1} }},
(SINGLETON){{ {1} }}, (SINGLETON){{ {0} }},
(SINGLETON){{ {0} }}, (SINGLETON){{ {0} }}, (SINGLETON){{ {0} }} }};
bestnet = new akml::NeuralNetwork (xor3_initList);
auto start = std::chrono::high_resolution_clock::now();
// Genetic algorithm
/*akml::GeneticAlgorithm ga (100, xor3_initList);
bestnet = ga.trainNetworks(5000, inputs, outputs);*/
// Gradient descent
bestnet = new akml::NeuralNetwork (xor3_initList);
bestnet->adamGradientTraining(inputs, outputs, 3, 5000);
auto end = std::chrono::high_resolution_clock::now();
std::chrono::duration<double> duration = end - start;
std::cout << "\nExecutionTime=" << duration.count() << "\n";
for (std::size_t inputid(0); inputid<inputs.size(); inputid++){
std::cout << "\nTesting with " << std::endl;
std::cout << inputs[inputid];
std::cout << "Output :" << std::endl;
std::cout << bestnet->process(inputs[inputid]);
std::cout << "Output expected :" << std::endl;
std::cout << outputs[inputid];
}
return 0;
}