make softmax optional when the output is calculated
Resolves:
**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped
Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
Input = input;
hidden = input.dot(Weight).add(Bias).applyFunction(activation);
Tensor Y2 = output;
- Tensor Y = hidden.softmax();
+ Tensor Y = hidden;
+ if (softmax)
+ Y = Y.softmax();
float lossSum = 0.0;
switch (cost) {
Tensor OutputLayer::backwarding(Tensor label, int iteration) {
float lossSum = 0.0;
Tensor Y2 = label;
- Tensor Y;
+ Tensor Y = hidden;
if (softmax)
- Y = hidden.softmax();
- else
- Y = hidden;
+ Y = Y.softmax();
+
Tensor ret;
Tensor dJdB;
}
}
+ for (int k = 0; k < batch; ++k) {
+ int index = k * height;
+ for (int i = 1; i < height; ++i) {
+ divisor.data[index] += divisor.data[index + i];
+ }
+ }
+
for (int k = 0; k < batch; k++) {
int index = k * height;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
int id = k * height * width + i * width + j;
- result.data[id] = exp(this->data[id]) / divisor.data[index + i];
+ result.data[id] = exp(this->data[id]) / divisor.data[index];
}
}
}