2525#include < type_traits>
2626namespace NN {
2727
28+
29+ /* MAE = Mean Absolute Error
30+ * MSE = Mean Squared Error
31+ */
2832enum LOSS_TYPE {
2933 MAE,
3034 MSE,
3135};
3236enum ACTIVATION_TYPE { RELU, SIGMOID, TANH };
3337
34- // peelu dimasukan ke typename karena semua array di dalamnya statis
38+ // perlu dimasukan ke parameter template karena semua array di dalamnya statis
3539template <typename FP, size_t inputSize, size_t hidden1Size, size_t hidden2Size,
3640 size_t outputSize,
3741 typename = std::enable_if_t <std::is_floating_point_v<FP>, FP>>
@@ -43,8 +47,10 @@ class FFN {
4347 FP resIn[hidden1Size], resHid1[hidden2Size], res[outputSize];
4448 bool xavier = false ;
4549
50+ // setiap layer punya distribusi yang berbeda
4651 template <size_t inSize, size_t outSize>
4752 void init_layer (FP k, FP (&w)[inSize][outSize], FP (&b)[inSize]) {
53+ // setup random
4854 std::random_device rd;
4955 std::mt19937 gen (rd);
5056 std::normal_distribution<FP> dis (0 , std::sqrt (k));
@@ -55,6 +61,9 @@ class FFN {
5561 }
5662 }
5763
64+ /* He Initialization cuma make k = input
65+ * sedangkan xavier init make k = input + output;
66+ */
5867 void init_wb () {
5968 FP k0 = inputSize, k1 = hidden1Size, k2 = hidden2Size;
6069 if (xavier) {
@@ -67,6 +76,16 @@ class FFN {
6776 init_layer<hidden2Size, outputSize>(k2, wHid2, bHid2);
6877 }
6978
79+ // Loss func
80+ static FP MSE (FP ypred, FP y){
81+ FP semi_loss = ypred - y;
82+ return 0.5 * semi_loss * semi_loss;
83+ }
84+ static FP MSE_deriv (FP ypred, FP y){
85+ return ypred - y;
86+ }
87+
88+ // Activation func
7089 static FP ReLU (FP x) { return x > 0 ? x : 1e-6 ; }
7190 static FP ReLU_deriv (FP y) { return y > 0 ? 1 : 1e-6 ; }
7291 static FP sigmoid (FP x) { return 1 / (1 + std::exp (-x)); }
@@ -76,6 +95,7 @@ class FFN {
7695 }
7796 static FP tanh_deriv (FP y) { return 1 - y * y; }
7897
98+
7999 template <size_t inSize, size_t outSize, bool hidden>
80100 void forwardlayer (FP (&w)[inSize][outSize], FP (&b)[outSize],
81101 FP (&in)[inputSize], FP (&res)[outSize],
@@ -87,11 +107,15 @@ class FFN {
87107 }
88108 }
89109
110+
111+
90112 public:
113+ // default acrivation using ReLU
91114 FFN (ACTIVATION_TYPE act_t = ACTIVATION_TYPE::RELU) : act_t (act_t ) {
92115 init_wb ();
93116 }
94- FP (&forward (FP (&data)[inputSize]))[outputSize] {
117+
118+ void forward (FP (&data)[inputSize]) {
95119 // reset layer data first
96120 resIn = {};
97121 resHid1 = {};
@@ -111,6 +135,8 @@ class FFN {
111135 return res;
112136 }
113137
138+ // using w = wcurr - eta * dL/dw
139+ // using b = bcurr - eta * dL/db;
114140 void backward (FP (&data)[outputSize]) {
115141 auto actDerivFuncFromType = [](ACTIVATION_TYPE act_t ) {
116142 switch (act_t ) {
0 commit comments