*.lo
*.o
*.cuo
+*.pyc
# Compiled Dynamic libraries
*.so
}
template <typename Dtype>
-void Blob<Dtype>::ToProto(BlobProto* proto) {
+void Blob<Dtype>::ToProto(BlobProto* proto, bool write_diff) {
proto->set_num(num_);
proto->set_channels(channels_);
proto->set_height(height_);
for (int i = 0; i < count_; ++i) {
proto->add_data(data_vec[i]);
}
- const Dtype* diff_vec = cpu_diff();
- for (int i = 0; i < count_; ++i) {
- proto->add_diff(diff_vec[i]);
+ if (write_diff) {
+ const Dtype* diff_vec = cpu_diff();
+ for (int i = 0; i < count_; ++i) {
+ proto->add_diff(diff_vec[i]);
+ }
}
}
Dtype* mutable_gpu_diff();
void Update();
void FromProto(const BlobProto& proto);
- void ToProto(BlobProto* proto);
+ void ToProto(BlobProto* proto, bool write_diff = false);
private:
shared_ptr<SyncedMemory> data_;
repeated float diff = 6 [packed=true];
}
+message Datum {
+ optional BlobProto blob = 1;
+ optional int32 label = 2;
+}
+
message FillerParameter {
// The filler type. In default we will set it to Gaussian for easy
// debugging.
message NetParameter {
optional string name = 1; // consider giving the network a name
repeated LayerConnection layers = 2; // a bunch of layers.
+ repeated string bottom = 3; // The input to the network
+ repeated string top = 4; // The output of the network.
}