-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathNeuron.cpp
76 lines (64 loc) · 1.83 KB
/
Neuron.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#pragma once
#include "ACT_FUNC_ENUM.cpp"
#include "ActivationFunctions.h"
#include "vector_utils.h"
#include <vector>
#include <stdexcept>
using namespace std;
class Neuron
{
private:
double net_i = 0.0;
double f_net_i = 0.0;
bool is_input_layer_neuron = false;
double bias = NULL;
vector<double> weights;
bool init = true;
public:
Neuron() {
init = false;
}
Neuron(vector<double> weights, double bias, bool is_input_layer_neuron = false) : weights(weights), bias(bias), is_input_layer_neuron(is_input_layer_neuron){}
void set_weights(vector<double> weights)
{
this->weights = weights;
}
void update_weight(int prev_neuron, double weight) {
this->weights[prev_neuron] = weight;
}
void set_bias(double bias) {
this->bias = bias;
}
double get_bias() {
return this->bias;
}
const vector<double>& get_weights() const {
return this->weights;
}
double compute(vector<double> inputs, ACTIVATION_FUNCTIONS func = UNIPOLAR_SIGMOID, int lambda = 1)
{
if (weights.size() != inputs.size())
throw invalid_argument("The number of inputs must be equal to the number of weights");
net_i = 0;
f_net_i = 0;
for (int i = 0; i < inputs.size(); i++)
{
net_i += inputs[i] * weights[i];
}
net_i += bias;
if (is_input_layer_neuron) return net_i;
if (func == ACTIVATION_FUNCTIONS::UNIPOLAR_SIGMOID)
{
f_net_i = unipolar_sigmoid(net_i, lambda);
}
else if (func == ACTIVATION_FUNCTIONS::BIPOLAR_SIGMOID)
{
f_net_i = bipolar_sigmoid(net_i, lambda);
}
else
{
throw invalid_argument("Invalid activation function");
}
return f_net_i;
}
};