Skip to content

Commit 554f4b8

Browse files
remove useless dIn
1 parent b0ffdf6 commit 554f4b8

File tree

2 files changed

+7
-6
lines changed

2 files changed

+7
-6
lines changed

NN/FFN/basicFFN/include/ffn.hxx

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class BasicFFN {
4040
FP wIn[inputSize][hidden1Size], wHid1[hidden1Size][hidden2Size], wHid2[hidden2Size][outputSize];
4141
FP bIn[hidden1Size], bHid1[hidden2Size], bHid2[outputSize];
4242
FP toHid1[hidden1Size], toHid2[hidden2Size], out[outputSize];
43-
FP dOut[outputSize], dHid2[hidden2Size], dHid1[hidden1Size], dIn[inputSize];
43+
FP dOut[outputSize], dHid2[hidden2Size], dHid1[hidden1Size];
4444
inline static FP epsilon = 1e-6; // untuk mencegah dead neuron ketika menggunakan ReLU
4545
bool xavier = false;
4646
FP eta = 1e-2;
@@ -107,15 +107,16 @@ class BasicFFN {
107107
@param actFuncDeriv pointer ke turunan fungsi aktivasi
108108
*/
109109
template <size_t inSize, size_t outSize>
110-
void backward_layer(FP (&w)[outSize][inSize], FP (&b)[inSize], FP (&dataIn)[inSize], FP (&deltaIn)[inSize], FP (&deltaOut)[outSize], FP (*actFuncDeriv)(FP),
110+
void backward_layer(FP (&w)[outSize][inSize], FP (&b)[inSize], FP (&dataIn)[inSize], FP (&deltaIn)[inSize], FP (*deltaOut)[outSize], FP (*actFuncDeriv)(FP),
111111
FP eta) {
112112
// update bobot dan bias berdasarkan delta in
113113
for (size_t i = 0; i < inSize; ++i) {
114114
for (size_t j = 0; j < outSize; ++j) w[j][i] -= eta * deltaIn[inSize] * dataIn[i];
115115
b[i] -= eta * deltaIn[inSize];
116116
}
117117

118-
// update delta Outnya (ga berguna buat layer hidden ke input)
118+
if (!deltaOut) return;
119+
// update delta Out jika disediakan
119120
for (size_t i = 0; i < outSize; ++i) {
120121
for (size_t j = 0; j < inSize; ++j) deltaOut[i] += w[i][j] * deltaIn[j];
121122
// aturan rantai
@@ -198,7 +199,7 @@ class BasicFFN {
198199

199200
backward_layer<outputSize, hidden2Size>(wHid2, bHid2, dOut, dHid2, actFuncDeriv, eta);
200201
backward_layer<hidden2Size, hidden1Size>(wHid1, bHid1, dHid2, dHid1, actFuncDeriv, eta);
201-
backward_layer<hidden1Size, inputSize>(wIn, bIn, dHid1, dIn, actFuncDeriv, eta);
202+
backward_layer<hidden1Size, inputSize>(wIn, bIn, dHid1, 0, actFuncDeriv, eta);
202203
}
203204
};
204205

number_system/include/big_int.hxx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,15 +127,15 @@ class Big_int {
127127
static const std::string two_pow_64;
128128
explicit Big_int(std::vector<uint64_t> values, bool negative) noexcept : values(values), negative(negative) {}
129129

130-
explicit Big_int(uint64_t value) {
130+
Big_int(uint64_t value) {
131131
if (value < (1ULL << 63)) values = {value};
132132
else {
133133
values = {~value + 1};
134134
this->negative = true;
135135
}
136136
}
137137

138-
explicit Big_int(std::string value) {
138+
Big_int(std::string value) {
139139
std::string rem = "0";
140140
std::vector<uint64_t> chunks;
141141
while (!value.empty()) {

0 commit comments

Comments
 (0)