Revert "Merge pull request #114 from jeffdonahue/dags-by-split"
authorEvan Shelhamer <shelhamer@imaginarynumber.net>
Tue, 18 Feb 2014 01:03:57 +0000 (17:03 -0800)
committerEvan Shelhamer <shelhamer@imaginarynumber.net>
Tue, 18 Feb 2014 01:03:57 +0000 (17:03 -0800)
Revert dags-by-split merge because split layers were not quite ready.
Sorry for the bug!

We are adopting a `master`/`dev` split development
model, where new commits and pull requests will be merged to `dev` for
testing before integration to `master`. This will keep master clean and
stable.

This reverts commit d339d242b43ec5bec9dceb657ee0d665d524b1eb, reversing
changes made to 55198262d79944fa7382868f7ecd108ed4f238a2.

include/caffe/util/insert_splits.hpp [deleted file]
include/caffe/vision_layers.hpp
src/caffe/layer_factory.cpp
src/caffe/layers/split_layer.cpp [deleted file]
src/caffe/net.cpp
src/caffe/test/test_split_layer.cpp [deleted file]
src/caffe/util/insert_splits.cpp [deleted file]

diff --git a/include/caffe/util/insert_splits.hpp b/include/caffe/util/insert_splits.hpp
deleted file mode 100644 (file)
index 2224c71..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2014 Jeff Donahue
-
-#ifndef _CAFFE_UTIL_INSERT_SPLITS_HPP_
-#define _CAFFE_UTIL_INSERT_SPLITS_HPP_
-
-#include "caffe/proto/caffe.pb.h"
-
-using std::string;
-
-namespace caffe {
-
-// Copy NetParameters with SplitLayers added to replace any shared bottom
-// blobs with unique bottom blobs provided by the SplitLayer.
-void insert_splits(const NetParameter& param, NetParameter* param_split);
-
-void configure_split_layer(const string& blob_name,
-    const int split_count, LayerConnection* split_layer_connection);
-
-string get_split_blob_name(const string& blob_name, const int split_index);
-
-}  // namespace caffe
-
-#endif  // CAFFE_UTIL_INSERT_SPLITS_HPP_
index 4db2556..82e52cd 100644 (file)
@@ -109,27 +109,6 @@ class DropoutLayer : public NeuronLayer<Dtype> {
 
 
 template <typename Dtype>
-class SplitLayer : public Layer<Dtype> {
- public:
-  explicit SplitLayer(const LayerParameter& param)
-      : Layer<Dtype>(param) {}
-  virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top);
-
- protected:
-  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top);
-  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top);
-  virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top,
-      const bool propagate_down, vector<Blob<Dtype>*>* bottom);
-  virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
-      const bool propagate_down, vector<Blob<Dtype>*>* bottom);
-  int count_;
-};
-
-
-template <typename Dtype>
 class FlattenLayer : public Layer<Dtype> {
  public:
   explicit FlattenLayer(const LayerParameter& param)
index 48d6edf..b62ba38 100644 (file)
@@ -53,8 +53,6 @@ Layer<Dtype>* GetLayer(const LayerParameter& param) {
     return new SoftmaxLayer<Dtype>(param);
   } else if (type == "softmax_loss") {
     return new SoftmaxWithLossLayer<Dtype>(param);
-  } else if (type == "split") {
-    return new SplitLayer<Dtype>(param);
   } else if (type == "multinomial_logistic_loss") {
     return new MultinomialLogisticLossLayer<Dtype>(param);
   } else {
diff --git a/src/caffe/layers/split_layer.cpp b/src/caffe/layers/split_layer.cpp
deleted file mode 100644 (file)
index 5accdd0..0000000
+++ /dev/null
@@ -1,101 +0,0 @@
-// Copyright 2014 Jeff Donahue
-
-#include <vector>
-
-#include "caffe/layer.hpp"
-#include "caffe/vision_layers.hpp"
-#include "caffe/util/math_functions.hpp"
-
-namespace caffe {
-
-template <typename Dtype>
-void SplitLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top) {
-  CHECK_EQ(bottom.size(), 1) << "Split Layer takes a single blob as input.";
-  CHECK_GE(top->size(), 1) << "Split Layer takes at least one blob as output.";
-  count_ = bottom[0]->count();
-  for (int i = 0; i < top->size(); ++i) {
-    // Allow the 0th top blob to be 'in-place', but no others.
-    if (i == 0 && (*top)[i] == bottom[0]) {
-      continue;
-    } else {
-      CHECK_NE((*top)[i], bottom[0]) << "Only 0th top blob may be in place.";
-    }
-    (*top)[i]->Reshape(bottom[0]->num(), bottom[0]->channels(),
-                       bottom[0]->height(), bottom[0]->width());
-    CHECK_EQ(count_, (*top)[i]->count());
-  }
-};
-
-template <typename Dtype>
-void SplitLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top) {
-  const Dtype* bottom_data = bottom[0]->cpu_data();
-  for (int i = 0; i < top->size(); ++i) {
-    if (i == 0 && (*top)[i] == bottom[0]) {
-      continue;
-    }
-    Dtype* top_data = (*top)[i]->mutable_cpu_data();
-    caffe_copy(count_, bottom_data, top_data);
-  }
-}
-
-template <typename Dtype>
-void SplitLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
-      vector<Blob<Dtype>*>* top) {
-  const Dtype* bottom_data = bottom[0]->gpu_data();
-  for (int i = 0; i < top->size(); ++i) {
-    if (i == 0 && (*top)[i] == bottom[0]) {
-      continue;
-    }
-    Dtype* top_data = (*top)[i]->mutable_gpu_data();
-    caffe_gpu_copy(count_, bottom_data, top_data);
-  }
-}
-
-template <typename Dtype>
-Dtype SplitLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
-      const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
-  if (propagate_down) {
-    const Dtype* top_diff = top[0]->cpu_diff();
-    Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
-    // Initialize by copying first top blob diff to our diff, unless we're
-    // doing in-place computation for the first blob, in which case the diff is
-    // already initialized.
-    if (top[0] != (*bottom)[0]) {
-      caffe_copy(count_, top_diff, bottom_diff);
-    }
-    // Add remaining top blob diffs.
-    for (int i = 1; i < top.size(); ++i) {
-      top_diff = top[i]->cpu_diff();
-      caffe_axpy(count_, Dtype(1.), top_diff, bottom_diff);
-    }
-  }
-  return Dtype(0.);
-}
-
-
-template <typename Dtype>
-Dtype SplitLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
-      const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
-  if (propagate_down) {
-    const Dtype* top_diff = top[0]->gpu_diff();
-    Dtype* bottom_diff = (*bottom)[0]->mutable_gpu_diff();
-    // Initialize by copying first top blob diff to our diff, unless we're
-    // doing in-place computation for the first blob, in which case the diff is
-    // already initialized.
-    if (top[0] != (*bottom)[0]) {
-      caffe_gpu_copy(count_, top_diff, bottom_diff);
-    }
-    // Add remaining top blob diffs.
-    for (int i = 1; i < top.size(); ++i) {
-      top_diff = top[i]->gpu_diff();
-      caffe_gpu_axpy(count_, Dtype(1.), top_diff, bottom_diff);
-    }
-  }
-  return Dtype(0.);
-}
-
-INSTANTIATE_CLASS(SplitLayer);
-
-}  // namespace caffe
index e976dfd..f265cd3 100644 (file)
@@ -9,7 +9,6 @@
 #include "caffe/layer.hpp"
 #include "caffe/net.hpp"
 #include "caffe/util/io.hpp"
-#include "caffe/util/insert_splits.hpp"
 
 using std::pair;
 using std::map;
@@ -30,10 +29,7 @@ Net<Dtype>::Net(const string& param_file) {
 }
 
 template <typename Dtype>
-void Net<Dtype>::Init(const NetParameter& in_param) {
-  // Create a copy of in_param with splits added where necessary.
-  NetParameter param;
-  insert_splits(in_param, &param);
+void Net<Dtype>::Init(const NetParameter& param) {
   // Basically, build all the layers and set up its connections.
   name_ = param.name();
   map<string, int> blob_name_to_idx;
diff --git a/src/caffe/test/test_split_layer.cpp b/src/caffe/test/test_split_layer.cpp
deleted file mode 100644 (file)
index 002b787..0000000
+++ /dev/null
@@ -1,591 +0,0 @@
-// Copyright 2014 Jeff Donahue
-
-#include <cstring>
-#include <cuda_runtime.h>
-#include <google/protobuf/text_format.h>
-
-#include "gtest/gtest.h"
-#include "caffe/blob.hpp"
-#include "caffe/common.hpp"
-#include "caffe/filler.hpp"
-#include "caffe/vision_layers.hpp"
-#include "caffe/test/test_gradient_check_util.hpp"
-#include "caffe/util/insert_splits.hpp"
-
-#include "caffe/test/test_caffe_main.hpp"
-
-namespace caffe {
-
-extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
-
-template <typename Dtype>
-class SplitLayerTest : public ::testing::Test {
- protected:
-  SplitLayerTest()
-      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
-        blob_top_a_(new Blob<Dtype>()),
-        blob_top_b_(new Blob<Dtype>()) {
-    // fill the values
-    FillerParameter filler_param;
-    GaussianFiller<Dtype> filler(filler_param);
-    filler.Fill(this->blob_bottom_);
-    blob_bottom_vec_.push_back(blob_bottom_);
-    blob_top_vec_.push_back(blob_top_a_);
-    blob_top_vec_.push_back(blob_top_b_);
-  };
-  virtual ~SplitLayerTest() {
-    delete blob_bottom_;
-    delete blob_top_a_;
-    delete blob_top_b_;
-  }
-  Blob<Dtype>* const blob_bottom_;
-  Blob<Dtype>* const blob_top_a_;
-  Blob<Dtype>* const blob_top_b_;
-  vector<Blob<Dtype>*> blob_bottom_vec_;
-  vector<Blob<Dtype>*> blob_top_vec_;
-};
-
-typedef ::testing::Types<float, double> Dtypes;
-TYPED_TEST_CASE(SplitLayerTest, Dtypes);
-
-TYPED_TEST(SplitLayerTest, TestSetup) {
-  LayerParameter layer_param;
-  SplitLayer<TypeParam> layer(layer_param);
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  EXPECT_EQ(this->blob_top_a_->num(), 2);
-  EXPECT_EQ(this->blob_top_a_->channels(), 3);
-  EXPECT_EQ(this->blob_top_a_->height(), 6);
-  EXPECT_EQ(this->blob_top_a_->width(), 5);
-  EXPECT_EQ(this->blob_top_b_->num(), 2);
-  EXPECT_EQ(this->blob_top_b_->channels(), 3);
-  EXPECT_EQ(this->blob_top_b_->height(), 6);
-  EXPECT_EQ(this->blob_top_b_->width(), 5);
-}
-
-TYPED_TEST(SplitLayerTest, TestCPU) {
-  LayerParameter layer_param;
-  SplitLayer<TypeParam> layer(layer_param);
-  Caffe::set_mode(Caffe::CPU);
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
-    TypeParam bottom_value = this->blob_bottom_->cpu_data()[i];
-    EXPECT_EQ(bottom_value, this->blob_top_a_->cpu_data()[i]);
-    EXPECT_EQ(bottom_value, this->blob_top_b_->cpu_data()[i]);
-  }
-}
-
-TYPED_TEST(SplitLayerTest, TestGPU) {
-  LayerParameter layer_param;
-  SplitLayer<TypeParam> layer(layer_param);
-  Caffe::set_mode(Caffe::GPU);
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
-    TypeParam bottom_value = this->blob_bottom_->cpu_data()[i];
-    EXPECT_EQ(bottom_value, this->blob_top_a_->cpu_data()[i]);
-    EXPECT_EQ(bottom_value, this->blob_top_b_->cpu_data()[i]);
-  }
-}
-
-TYPED_TEST(SplitLayerTest, TestCPUInPlace) {
-  LayerParameter layer_param;
-  SplitLayer<TypeParam> layer(layer_param);
-  Caffe::set_mode(Caffe::CPU);
-  this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
-    TypeParam bottom_value = this->blob_bottom_->cpu_data()[i];
-    EXPECT_EQ(bottom_value, this->blob_top_b_->cpu_data()[i]);
-  }
-}
-
-TYPED_TEST(SplitLayerTest, TestGPUInPlace) {
-  LayerParameter layer_param;
-  SplitLayer<TypeParam> layer(layer_param);
-  Caffe::set_mode(Caffe::GPU);
-  this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
-    TypeParam bottom_value = this->blob_bottom_->cpu_data()[i];
-    EXPECT_EQ(bottom_value, this->blob_top_b_->cpu_data()[i]);
-  }
-}
-
-TYPED_TEST(SplitLayerTest, TestCPUGradient) {
-  LayerParameter layer_param;
-  Caffe::set_mode(Caffe::CPU);
-  SplitLayer<TypeParam> layer(layer_param);
-  GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_,
-      this->blob_top_vec_);
-}
-
-TYPED_TEST(SplitLayerTest, TestGPUGradient) {
-  LayerParameter layer_param;
-  Caffe::set_mode(Caffe::GPU);
-  SplitLayer<TypeParam> layer(layer_param);
-  GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_,
-      this->blob_top_vec_);
-}
-
-TYPED_TEST(SplitLayerTest, TestCPUGradientInPlace) {
-  LayerParameter layer_param;
-  Caffe::set_mode(Caffe::CPU);
-  SplitLayer<TypeParam> layer(layer_param);
-  GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_,
-      this->blob_top_vec_);
-}
-
-TYPED_TEST(SplitLayerTest, TestGPUGradientInPlace) {
-  LayerParameter layer_param;
-  Caffe::set_mode(Caffe::GPU);
-  SplitLayer<TypeParam> layer(layer_param);
-  GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_,
-      this->blob_top_vec_);
-}
-
-
-template <typename Dtype>
-class SplitLayerInsertionTest : public ::testing::Test {
- protected:
- SplitLayerInsertionTest() { };
-  void RunInsertionTest(
-      const string& input_param_string, const string& output_param_string) {
-    NetParameter input_param;
-    CHECK(google::protobuf::TextFormat::ParseFromString(
-        input_param_string, &input_param));
-    NetParameter expected_output_param;
-    CHECK(google::protobuf::TextFormat::ParseFromString(
-        output_param_string, &expected_output_param));
-    NetParameter actual_output_param;
-    insert_splits(input_param, &actual_output_param);
-    EXPECT_EQ(expected_output_param.DebugString(),
-        actual_output_param.DebugString());
-  }
-};
-
-typedef ::testing::Types<float> InsertionDtypes;
-TYPED_TEST_CASE(SplitLayerInsertionTest, InsertionDtypes);
-
-TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) {
-  const string& input_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss' "
-      "    type: 'softmax_with_loss' "
-      "  } "
-      "  bottom: 'innerprod' "
-      "  bottom: 'label' "
-      "} ";
-  this->RunInsertionTest(input_proto, input_proto);
-}
-
-TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) {
-  const string& input_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'data_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'data_split_0' "
-      "  top: 'data_split_1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_0' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_1' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod2' "
-      "} ";
-  this->RunInsertionTest(input_proto, input_proto);
-}
-
-TYPED_TEST(SplitLayerInsertionTest, TestInsertion) {
-  const string& input_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod3' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss1' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss2' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod2' "
-      "  bottom: 'innerprod3' "
-      "} ";
-  const string& expected_output_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'data_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'data' "
-      "  top: 'data_split_1' "
-      "  top: 'data_split_2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_1' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'innerprod2' "
-      "  top: 'innerprod2' "
-      "  top: 'innerprod2_split_1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod3' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_2' "
-      "  top: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss1' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss2' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod2_split_1' "
-      "  bottom: 'innerprod3' "
-      "} ";
-  this->RunInsertionTest(input_proto, expected_output_proto);
-}
-
-TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) {
-  const string& input_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'label' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod3' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod4' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'label' "
-      "  top: 'innerprod4' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss1' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss2' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod2' "
-      "  bottom: 'innerprod4' "
-      "} ";
-  const string& expected_output_proto =
-      "name: 'TestNetwork' "
-      "layers: { "
-      "  layer { "
-      "    name: 'data' "
-      "    type: 'data' "
-      "  } "
-      "  top: 'data' "
-      "  top: 'label' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'data_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'data' "
-      "  top: 'data_split_1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'label_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'label' "
-      "  top: 'label' "
-      "  top: 'label_split_1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'label' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod3' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_1' "
-      "  top: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod4' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'label_split_1' "
-      "  top: 'innerprod4' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss1' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod3' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss2' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod2' "
-      "  bottom: 'innerprod4' "
-      "} ";
-  this->RunInsertionTest(input_proto, expected_output_proto);
-}
-
-TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) {
-  const string& input_proto =
-      "name: 'TestNetwork' "
-      "input: 'data' "
-      "input_dim: 10 "
-      "input_dim: 3 "
-      "input_dim: 227 "
-      "input_dim: 227 "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod2' "
-      "} ";
-  const string& expected_output_proto =
-      "name: 'TestNetwork' "
-      "input: 'data' "
-      "input_dim: 10 "
-      "input_dim: 3 "
-      "input_dim: 227 "
-      "input_dim: 227 "
-      "layers: { "
-      "  layer { "
-      "    name: 'data_split' "
-      "    type: 'split' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'data' "
-      "  top: 'data_split_1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod1' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data' "
-      "  top: 'innerprod1' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'innerprod2' "
-      "    type: 'inner_product' "
-      "  } "
-      "  bottom: 'data_split_1' "
-      "  top: 'innerprod2' "
-      "} "
-      "layers: { "
-      "  layer { "
-      "    name: 'loss' "
-      "    type: 'euclidean_loss' "
-      "  } "
-      "  bottom: 'innerprod1' "
-      "  bottom: 'innerprod2' "
-      "} ";
-  this->RunInsertionTest(input_proto, expected_output_proto);
-}
-
-}
diff --git a/src/caffe/util/insert_splits.cpp b/src/caffe/util/insert_splits.cpp
deleted file mode 100644 (file)
index 48c7fb1..0000000
+++ /dev/null
@@ -1,86 +0,0 @@
-// Copyright 2014 Jeff Donahue
-
-#include <map>
-#include <string>
-#include <sstream>
-
-#include "caffe/util/insert_splits.hpp"
-
-using std::map;
-using std::ostringstream;
-
-namespace caffe {
-
-void insert_splits(const NetParameter& param, NetParameter* param_split) {
-  // Initialize by copying from the input NetParameter.
-  param_split->CopyFrom(param);
-  param_split->clear_layers();
-  map<string, int> blob_name_to_bottom_count;
-  map<string, int> blob_name_to_bottom_split_idx;
-  // Determine the number of times each blob is used as an input (bottom) blob.
-  for (int i = 0; i < param.layers_size(); ++i) {
-    const LayerConnection& layer_connection = param.layers(i);
-    for (int j = 0; j < layer_connection.bottom_size(); ++j) {
-      const string& blob_name = layer_connection.bottom(j);
-      ++blob_name_to_bottom_count[blob_name];
-    }
-  }
-  // Create split layer for any input blobs used by other layers as bottom
-  // blobs more than once.
-  for (int i = 0; i < param.input_size(); ++i) {
-    const string& blob_name = param.input(i);
-    const int split_count = blob_name_to_bottom_count[blob_name];
-    if (split_count > 1) {
-      LayerConnection* split_layer_connection = param_split->add_layers();
-      configure_split_layer(blob_name, split_count, split_layer_connection);
-    }
-  }
-  for (int i = 0; i < param.layers_size(); ++i) {
-    LayerConnection* layer_connection = param_split->add_layers();
-    layer_connection->CopyFrom(param.layers(i));
-    // Replace any shared bottom blobs with split layer outputs.
-    for (int j = 0; j < layer_connection->bottom_size(); ++j) {
-      const string& blob_name = layer_connection->bottom(j);
-      const int split_count = blob_name_to_bottom_count[blob_name];
-      if (split_count > 1) {
-        layer_connection->set_bottom(j, get_split_blob_name(blob_name,
-            blob_name_to_bottom_split_idx[blob_name]++));
-      }
-    }
-    // Create split layer for any top blobs used by other layers as bottom
-    // blobs more than once.
-    for (int j = 0; j < layer_connection->top_size(); ++j) {
-      const string& blob_name = layer_connection->top(j);
-      const int split_count = blob_name_to_bottom_count[blob_name];
-      if (split_count > 1) {
-        LayerConnection* split_layer_connection = param_split->add_layers();
-        configure_split_layer(blob_name, split_count, split_layer_connection);
-      }
-    }
-  }
-}
-
-void configure_split_layer(const string& blob_name,
-    const int split_count, LayerConnection* split_layer_connection) {
-  split_layer_connection->Clear();
-  split_layer_connection->add_bottom(blob_name);
-  LayerParameter* split_layer_param = split_layer_connection->mutable_layer();
-  split_layer_param->set_name(blob_name + "_split");
-  split_layer_param->set_type("split");
-  for (int k = 0; k < split_count; ++k) {
-    split_layer_connection->add_top(get_split_blob_name(blob_name, k));
-  }
-}
-
-string get_split_blob_name(const string& blob_name, const int split_index) {
-  // 0th split top blob is given the same name as the bottom blob so that
-  // computation is done 'in-place', saving a bit of time and memory.
-  if (split_index == 0) {
-    return blob_name;
-  }
-  ostringstream split_blob_name;
-  split_blob_name << blob_name << "_split_" << split_index;
-  return split_blob_name.str();
-}
-
-}  // namespace caffe