projects
/
platform
/
upstream
/
caffeonacl.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
d54833e
)
revert unnecessary reordering of lines in softmaxwithlosslayer backward
author
Jeff Donahue
<jeff.donahue@gmail.com>
Fri, 14 Mar 2014 01:16:21 +0000
(18:16 -0700)
committer
Jeff Donahue
<jeff.donahue@gmail.com>
Wed, 19 Mar 2014 19:37:31 +0000
(12:37 -0700)
src/caffe/layers/softmax_loss_layer.cpp
patch
|
blob
|
history
diff --git
a/src/caffe/layers/softmax_loss_layer.cpp
b/src/caffe/layers/softmax_loss_layer.cpp
index
4238cf6
..
f9bd82e
100644
(file)
--- a/
src/caffe/layers/softmax_loss_layer.cpp
+++ b/
src/caffe/layers/softmax_loss_layer.cpp
@@
-48,8
+48,8
@@
void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
// Compute the diff
Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
const Dtype* prob_data = prob_.cpu_data();
// Compute the diff
Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
const Dtype* prob_data = prob_.cpu_data();
- const Dtype* label = (*bottom)[1]->cpu_data();
memcpy(bottom_diff, prob_data, sizeof(Dtype) * prob_.count());
memcpy(bottom_diff, prob_data, sizeof(Dtype) * prob_.count());
+ const Dtype* label = (*bottom)[1]->cpu_data();
int num = prob_.num();
int dim = prob_.count() / num;
for (int i = 0; i < num; ++i) {
int num = prob_.num();
int dim = prob_.count() / num;
for (int i = 0; i < num; ++i) {