[caffegen] Remove deprecated Fill command (#1567)
author박종현/동작제어Lab(SR)/Staff Engineer/삼성전자 <jh1302.park@samsung.com>
Wed, 19 Sep 2018 08:48:41 +0000 (17:48 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Wed, 19 Sep 2018 08:48:41 +0000 (17:48 +0900)
* [caffegen] Remove deprecated Fill command

This commit removes deprecated Fill command and related implementations
from caffegen.

Signed-off-by: Jonghyun Park <jh1302.park@samsung.com>
* Fix build error

26 files changed:
contrib/caffegen/CMakeLists.txt
contrib/caffegen/include/internal/BlobContext.h [deleted file]
contrib/caffegen/include/internal/BlobShape.h [deleted file]
contrib/caffegen/include/internal/ConvolutionLayer.h [deleted file]
contrib/caffegen/include/internal/InputLayer.h [deleted file]
contrib/caffegen/include/internal/Layer.h [deleted file]
contrib/caffegen/include/internal/LayerAnalysisPass.h [deleted file]
contrib/caffegen/include/internal/LayerContext.h [deleted file]
contrib/caffegen/include/internal/LayerFactory.h [deleted file]
contrib/caffegen/include/internal/LayerResolver.h [deleted file]
contrib/caffegen/include/internal/LayerTransformPass.h [deleted file]
contrib/caffegen/include/internal/Network.h [deleted file]
contrib/caffegen/include/internal/NetworkBuilder.h [deleted file]
contrib/caffegen/src/Driver.cpp
contrib/caffegen/src/FillCommand.cpp [deleted file]
contrib/caffegen/src/FillCommand.h [deleted file]
contrib/caffegen/src/internal/BlobContext.cpp [deleted file]
contrib/caffegen/src/internal/ConvolutionLayer.cpp [deleted file]
contrib/caffegen/src/internal/InputLayer.cpp [deleted file]
contrib/caffegen/src/internal/LayerAnalysisPass.cpp [deleted file]
contrib/caffegen/src/internal/LayerContext.cpp [deleted file]
contrib/caffegen/src/internal/LayerResolver.cpp [deleted file]
contrib/caffegen/src/internal/Network.cpp [deleted file]
contrib/caffegen/src/internal/NetworkBuilder.cpp [deleted file]
contrib/caffegen/src/internal/ParameterRandomizePass.cpp [deleted file]
contrib/caffegen/src/internal/ParameterRandomizePass.h [deleted file]

index ce8565e..2875937 100644 (file)
@@ -13,7 +13,6 @@ endif(NOT Caffe_FOUND)
 file(GLOB_RECURSE SOURCES "src/*.cpp")
 
 add_executable(caffegen ${SOURCES})
-target_include_directories(caffegen PRIVATE include)
 target_link_libraries(caffegen stdex)
 target_link_libraries(caffegen cli)
 target_link_libraries(caffegen caffeproto)
diff --git a/contrib/caffegen/include/internal/BlobContext.h b/contrib/caffegen/include/internal/BlobContext.h
deleted file mode 100644 (file)
index 7bcbf7c..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-#ifndef __BLOB_CONTEXT_H__
-#define __BLOB_CONTEXT_H__
-
-#include "BlobShape.h"
-
-#include <string>
-#include <map>
-
-class BlobContext
-{
-public:
-  const BlobShape &at(const std::string &name) const;
-
-public:
-  BlobContext &insert(const std::string &name, const BlobShape &shape);
-
-private:
-  std::map<std::string, BlobShape> _shapes;
-};
-
-#endif // __BLOB_CONTEXT_H__
diff --git a/contrib/caffegen/include/internal/BlobShape.h b/contrib/caffegen/include/internal/BlobShape.h
deleted file mode 100644 (file)
index 94bacc4..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-#ifndef __BLOB_SHAPE_H__
-#define __BLOB_SHAPE_H__
-
-#include <vector>
-#include <cstdint>
-
-class BlobShape
-{
-public:
-  uint32_t rank(void) const { return _dims.size(); }
-
-public:
-  BlobShape &resize(uint32_t size)
-  {
-    _dims.resize(size);
-    return (*this);
-  }
-
-public:
-  int64_t dim(uint32_t axe) const { return _dims.at(axe); }
-  int64_t &dim(uint32_t axe) { return _dims.at(axe); }
-
-private:
-  std::vector<int64_t> _dims;
-};
-
-#endif // __BLOB_SHAPE_H__
diff --git a/contrib/caffegen/include/internal/ConvolutionLayer.h b/contrib/caffegen/include/internal/ConvolutionLayer.h
deleted file mode 100644 (file)
index aa612b9..0000000
+++ /dev/null
@@ -1,66 +0,0 @@
-#ifndef __CONVOLUTION_LAYER_H__
-#define __CONVOLUTION_LAYER_H__
-
-#include "Layer.h"
-#include "Network.h"
-
-#include "BlobShape.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-class ConvolutionLayer final : public Layer
-{
-public:
-  ConvolutionLayer(const Network *net, caffe::LayerParameter *p);
-
-public:
-  uint32_t bottom_size(void) const override;
-  const std::string &bottom_name(uint32_t n) const override;
-  const BlobShape &bottom_shape(uint32_t n) const override;
-
-public:
-  uint32_t top_size(void) const override;
-  const std::string &top_name(uint32_t n) const override;
-  BlobShape top_shape(uint32_t n) const override;
-
-public:
-  void accept(LayerAnalysisPass &&) const override;
-  void accept(LayerTransformPass &&) override;
-
-public:
-  const caffe::LayerParameter &param(void) const { return *_param; }
-  caffe::LayerParameter &param(void) { return *_param; }
-
-public:
-  caffe::ConvolutionParameter &conv_param(void) { return *param().mutable_convolution_param(); }
-
-  const caffe::ConvolutionParameter &conv_param(void) const { return param().convolution_param(); }
-
-public:
-  const std::string &input_name(void) const;
-  const BlobShape &input_shape(void) const;
-
-public:
-  const std::string &output_name(void) const;
-  BlobShape output_shape(void) const;
-
-public:
-  uint32_t channel_axis(void) const;
-  uint32_t num_effective_output(void) const;
-
-public:
-  uint32_t num_spatial_axes(void) const;
-  uint32_t num_batch_axes(void) const;
-
-public:
-  uint32_t pad(uint32_t spatial_axe) const;
-  uint32_t kernel_size(uint32_t spatial_axe) const;
-  uint32_t stride(uint32_t spatial_axe) const;
-  uint32_t dilation(uint32_t spatial_axe) const;
-
-private:
-  const Network *const _net;
-  caffe::LayerParameter *const _param;
-};
-
-#endif // __CONVOLUTION_LAYER_H__
diff --git a/contrib/caffegen/include/internal/InputLayer.h b/contrib/caffegen/include/internal/InputLayer.h
deleted file mode 100644 (file)
index 3738c00..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-#ifndef __INPUT_LAYER_H__
-#define __INPUT_LAYER_H__
-
-#include "Layer.h"
-#include "Network.h"
-
-#include "BlobShape.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-class InputLayer final : public Layer
-{
-public:
-  InputLayer(const Network *net, caffe::LayerParameter *p);
-
-public:
-  uint32_t bottom_size(void) const override;
-  const std::string &bottom_name(uint32_t n) const override;
-  const BlobShape &bottom_shape(uint32_t n) const override;
-
-public:
-  uint32_t top_size(void) const override;
-  const std::string &top_name(uint32_t n) const override;
-  BlobShape top_shape(uint32_t n) const override;
-
-public:
-  void accept(LayerAnalysisPass &&) const override;
-  void accept(LayerTransformPass &&) override;
-
-public:
-  const caffe::LayerParameter &param(void) const { return *_param; }
-  caffe::LayerParameter &param(void) { return *_param; }
-
-private:
-  caffe::LayerParameter *const _param;
-};
-
-#endif // __INPUT_LAYER_H__
diff --git a/contrib/caffegen/include/internal/Layer.h b/contrib/caffegen/include/internal/Layer.h
deleted file mode 100644 (file)
index 753ab9a..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-#ifndef __LAYER_H__
-#define __LAYER_H__
-
-#include "BlobShape.h"
-
-#include <string>
-
-struct LayerAnalysisPass;
-struct LayerTransformPass;
-
-struct Layer
-{
-  virtual ~Layer() = default;
-
-  virtual uint32_t bottom_size(void) const = 0;
-  virtual const std::string &bottom_name(uint32_t n) const = 0;
-  virtual const BlobShape &bottom_shape(uint32_t n) const = 0;
-
-  virtual uint32_t top_size(void) const = 0;
-  virtual const std::string &top_name(uint32_t n) const = 0;
-  virtual BlobShape top_shape(uint32_t n) const = 0;
-
-  virtual void accept(LayerAnalysisPass &&) const = 0;
-  virtual void accept(LayerTransformPass &&) = 0;
-};
-
-#endif // __LAYER_H__
diff --git a/contrib/caffegen/include/internal/LayerAnalysisPass.h b/contrib/caffegen/include/internal/LayerAnalysisPass.h
deleted file mode 100644 (file)
index 40244ef..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#ifndef __LAYER_ANALYSIS_PASS_H__
-#define __LAYER_ANALYSIS_PASS_H__
-
-#include "InputLayer.h"
-#include "ConvolutionLayer.h"
-
-struct LayerAnalysisPass
-{
-  virtual ~LayerAnalysisPass() = default;
-
-  virtual void visit(const InputLayer &) = 0;
-  virtual void visit(const ConvolutionLayer &) = 0;
-};
-
-#endif // __LAYER_ANALYSIS_PASS_H__
diff --git a/contrib/caffegen/include/internal/LayerContext.h b/contrib/caffegen/include/internal/LayerContext.h
deleted file mode 100644 (file)
index b00b6f6..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-#ifndef __LAYER_CONTEXT_H__
-#define __LAYER_CONTEXT_H__
-
-#include "Layer.h"
-
-#include <vector>
-#include <memory>
-#include <cstdint>
-
-class LayerContext
-{
-public:
-  uint32_t size(void) const;
-
-public:
-  Layer &at(uint32_t n);
-  const Layer &at(uint32_t n) const;
-
-public:
-  LayerContext &append(std::unique_ptr<Layer> &&l);
-
-private:
-  std::vector<std::unique_ptr<Layer>> _layers;
-};
-
-#endif // __LAYER_CONTEXT_H__
diff --git a/contrib/caffegen/include/internal/LayerFactory.h b/contrib/caffegen/include/internal/LayerFactory.h
deleted file mode 100644 (file)
index 8e894be..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#ifndef __LAYER_FACTORY_H__
-#define __LAYER_FACTORY_H__
-
-#include "Network.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-#include <memory>
-
-struct LayerFactory
-{
-  virtual ~LayerFactory() = default;
-
-  virtual std::unique_ptr<Layer> make(Network *, caffe::LayerParameter *) const = 0;
-};
-
-#endif // __LAYER_FACTORY_H__
diff --git a/contrib/caffegen/include/internal/LayerResolver.h b/contrib/caffegen/include/internal/LayerResolver.h
deleted file mode 100644 (file)
index 2775cfa..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-#ifndef __LAYER_RESOLVER_H__
-#define __LAYER_RESOLVER_H__
-
-#include "LayerFactory.h"
-
-#include <map>
-#include <memory>
-#include <string>
-
-class LayerResolver
-{
-public:
-  LayerResolver();
-
-public:
-  const LayerFactory &resolve(const std::string &type) const;
-
-private:
-  // NOTE Here std::shared_ptr<LayerFactory> is used instead of std::unique_ptr<LayerFactory>
-  //      as GCC 4.8.3 complains about the use of copy constructor even when _factories field
-  //      is never used.
-  std::map<std::string, std::shared_ptr<LayerFactory>> _factories;
-};
-
-#endif // __LAYER_RESOLVER_H__
diff --git a/contrib/caffegen/include/internal/LayerTransformPass.h b/contrib/caffegen/include/internal/LayerTransformPass.h
deleted file mode 100644 (file)
index 4afbc02..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#ifndef __LAYER_TRANSFORM_PASS_H__
-#define __LAYER_TRANSFORM_PASS_H__
-
-#include "InputLayer.h"
-#include "ConvolutionLayer.h"
-
-struct LayerTransformPass
-{
-  virtual ~LayerTransformPass() = default;
-
-  virtual void visit(InputLayer &) = 0;
-  virtual void visit(ConvolutionLayer &) = 0;
-};
-
-#endif // __LAYER_TRANSFORM_PASS_H__
diff --git a/contrib/caffegen/include/internal/Network.h b/contrib/caffegen/include/internal/Network.h
deleted file mode 100644 (file)
index a34d7a4..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-#ifndef __NETWORK_H__
-#define __NETWORK_H__
-
-#include "BlobContext.h"
-#include "LayerContext.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-#include <memory>
-
-class Network
-{
-public:
-  explicit Network(std::unique_ptr<::caffe::NetParameter> &&param);
-
-public:
-  LayerContext &layers(void) { return _layers; }
-  const LayerContext &layers(void) const { return _layers; }
-
-public:
-  BlobContext &blobs(void) { return _blobs; }
-  const BlobContext &blobs(void) const { return _blobs; }
-
-public:
-  ::caffe::NetParameter &param(void) { return *_param; }
-  const ::caffe::NetParameter &param(void) const { return *_param; }
-
-private:
-  const std::unique_ptr<::caffe::NetParameter> _param;
-
-private:
-  BlobContext _blobs;
-  LayerContext _layers;
-};
-
-#endif // __NETWORK_H__
diff --git a/contrib/caffegen/include/internal/NetworkBuilder.h b/contrib/caffegen/include/internal/NetworkBuilder.h
deleted file mode 100644 (file)
index dff1796..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-#ifndef __NETWORK_BUILDER_H__
-#define __NETWORK_BUILDER_H__
-
-#include "LayerResolver.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-#include <memory>
-
-class NetworkBuilder
-{
-public:
-  NetworkBuilder(const LayerResolver &resolver);
-
-public:
-  std::unique_ptr<Network> build(std::unique_ptr<caffe::NetParameter> &&);
-
-private:
-  const LayerResolver &_resolver;
-};
-
-#endif // __NETWORK_BUILDER_H__
index 9f8e78f..21a7803 100644 (file)
@@ -1,5 +1,4 @@
 #include "InitCommand.h"
-#include "FillCommand.h"
 #include "EncodeCommand.h"
 #include "DecodeCommand.h"
 #include "MergeCommand.h"
@@ -18,7 +17,6 @@ int main(int argc, char **argv)
 
   // all receive data from stdin
   app.insert("init", make_unique<InitCommand>());
-  app.insert("fill", make_unique<FillCommand>());
   app.insert("encode", make_unique<EncodeCommand>());
   app.insert("decode", make_unique<DecodeCommand>());
   // takes 2 args: prototxt model and caffemodel weights in that order
diff --git a/contrib/caffegen/src/FillCommand.cpp b/contrib/caffegen/src/FillCommand.cpp
deleted file mode 100644 (file)
index d599977..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-#include "FillCommand.h"
-#include "internal/LayerResolver.h"
-#include "internal/NetworkBuilder.h"
-#include "internal/ParameterRandomizePass.h"
-
-#include <caffe/proto/caffe.pb.h>
-
-#include <stdex/Memory.h>
-
-#include <google/protobuf/io/coded_stream.h>
-#include <google/protobuf/io/zero_copy_stream_impl.h>
-#include <google/protobuf/text_format.h>
-
-#include <chrono>
-#include <random>
-#include <iostream>
-
-using stdex::make_unique;
-
-int FillCommand::run(int, const char *const *) const
-{
-  auto param = make_unique<::caffe::NetParameter>();
-
-  // Read from standard input
-  google::protobuf::io::FileInputStream is{0};
-  if (!google::protobuf::TextFormat::Parse(&is, param.get()))
-  {
-    std::cerr << "ERROR: Failed to parse prototxt" << std::endl;
-    return 255;
-  }
-
-  auto net = NetworkBuilder{LayerResolver{}}.build(std::move(param));
-
-  uint32_t seed = std::chrono::system_clock::now().time_since_epoch().count();
-
-  // Allow users to override seed
-  {
-    char *env = std::getenv("SEED");
-
-    if (env)
-    {
-      seed = std::stoi(env);
-    }
-  }
-
-  std::cerr << "Use '" << seed << "' as seed" << std::endl;
-
-  // Create a random number generator
-  std::default_random_engine generator{seed};
-
-  // Randomize parameters
-  for (uint32_t n = 0; n < net->layers().size(); ++n)
-  {
-    net->layers().at(n).accept(ParameterRandomizePass{generator});
-  }
-
-  // Write to standard output
-  google::protobuf::io::FileOutputStream output(1);
-  google::protobuf::TextFormat::Print(net->param(), &output);
-
-  return 0;
-}
diff --git a/contrib/caffegen/src/FillCommand.h b/contrib/caffegen/src/FillCommand.h
deleted file mode 100644 (file)
index 2b2f0dd..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#ifndef __FILL_COMMAND_H__
-#define __FILL_COMMAND_H__
-
-#include <cli/Command.h>
-
-struct FillCommand final : public cli::Command
-{
-  int run(int argc, const char *const *argv) const override;
-};
-
-#endif // __FILL_COMMAND_H__
diff --git a/contrib/caffegen/src/internal/BlobContext.cpp b/contrib/caffegen/src/internal/BlobContext.cpp
deleted file mode 100644 (file)
index 545ee19..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-#include "internal/BlobContext.h"
-
-const BlobShape &BlobContext::at(const std::string &name) const { return _shapes.at(name); }
-
-BlobContext &BlobContext::insert(const std::string &name, const BlobShape &shape)
-{
-  _shapes[name] = shape;
-  return (*this);
-}
diff --git a/contrib/caffegen/src/internal/ConvolutionLayer.cpp b/contrib/caffegen/src/internal/ConvolutionLayer.cpp
deleted file mode 100644 (file)
index 85223c6..0000000
+++ /dev/null
@@ -1,132 +0,0 @@
-#include "internal/ConvolutionLayer.h"
-#include "internal/LayerAnalysisPass.h"
-#include "internal/LayerTransformPass.h"
-
-#include <cassert>
-
-ConvolutionLayer::ConvolutionLayer(const Network *net, caffe::LayerParameter *p)
-    : _net{net}, _param{p}
-{
-  assert(param().type() == "Convolution");
-  assert(param().mutable_convolution_param() != nullptr);
-
-  // These constratins come from Convolution layer's definition
-  assert(param().bottom_size() == 1);
-  assert(param().top_size() == 1);
-  assert(num_batch_axes() + 1 /*channel axis*/ + num_spatial_axes() == input_shape().rank());
-
-  // TODO Support force_nd_im2col option
-  assert(!conv_param().force_nd_im2col());
-  // TODO Support negative axis
-  assert(conv_param().axis() > 0);
-  // TODO Support multi-group convolution
-  assert(conv_param().group() == 1);
-
-  // Comment on ConvolutionParameter (in caffe.proto)
-  //   Pad, kernel size, and stride are all given as a single value for equal
-  //   dimensions in all spatial dimensions, or once per spatial dimension.
-  //
-  // NOTE 'equal dimensions in all spatial dimensions' schema is supported
-  // TODO Support 'once per spatial dimension'
-  assert(conv_param().pad_size() == 1);
-  assert(conv_param().kernel_size_size() == 1);
-  assert(conv_param().stride_size() == 1);
-
-  // NOTE 'dilation' is not supported yet
-  // TODO Support 'dilation'
-  assert(conv_param().dilation_size() == 0);
-}
-
-uint32_t ConvolutionLayer::bottom_size(void) const { return 1; }
-
-const std::string &ConvolutionLayer::bottom_name(uint32_t n) const
-{
-  assert(n == 0);
-  return input_name();
-}
-
-const BlobShape &ConvolutionLayer::bottom_shape(uint32_t n) const
-{
-  assert(n == 0);
-  return input_shape();
-}
-
-uint32_t ConvolutionLayer::top_size(void) const { return 1; }
-
-const std::string &ConvolutionLayer::top_name(uint32_t n) const
-{
-  assert(n == 0);
-  return output_name();
-}
-
-BlobShape ConvolutionLayer::top_shape(uint32_t n) const
-{
-  assert(n == 0);
-  return output_shape();
-}
-
-void ConvolutionLayer::accept(LayerAnalysisPass &&v) const { v.visit(*this); }
-void ConvolutionLayer::accept(LayerTransformPass &&v) { v.visit(*this); }
-
-const std::string &ConvolutionLayer::input_name(void) const { return param().bottom(0); }
-const BlobShape &ConvolutionLayer::input_shape(void) const
-{
-  return _net->blobs().at(input_name());
-}
-
-const std::string &ConvolutionLayer::output_name(void) const { return param().top(0); }
-BlobShape ConvolutionLayer::output_shape(void) const
-{
-  // The code below is derived from Caffe
-  //  - Please refer to 'compute_output_shape' method in 'caffe::ConvolutionLayer' for details
-  BlobShape res{};
-
-  res.resize(num_batch_axes() + 1 + num_spatial_axes());
-
-  for (uint32_t batch_axis = 0; batch_axis < num_batch_axes(); ++batch_axis)
-  {
-    res.dim(batch_axis) = input_shape().dim(batch_axis);
-  }
-
-  res.dim(num_batch_axes()) = num_effective_output();
-
-  for (uint32_t spatial_axis = 0; spatial_axis < num_spatial_axes(); ++spatial_axis)
-  {
-    const uint32_t axis = num_batch_axes() + 1 + spatial_axis;
-    const int64_t kernel_ext = dilation(spatial_axis) * (kernel_size(spatial_axis) - 1) + 1;
-
-    res.dim(axis) =
-        (input_shape().dim(axis) + 2 * pad(spatial_axis) - kernel_ext) / stride(spatial_axis);
-  }
-
-  return res;
-}
-
-uint32_t ConvolutionLayer::channel_axis(void) const { return conv_param().axis(); }
-
-uint32_t ConvolutionLayer::num_effective_output(void) const { return conv_param().num_output(); }
-
-uint32_t ConvolutionLayer::num_spatial_axes(void) const
-{
-  assert(input_shape().rank() > channel_axis());
-  return input_shape().rank() - channel_axis() - 1;
-}
-
-uint32_t ConvolutionLayer::num_batch_axes(void) const
-{
-  return input_shape().rank() - num_spatial_axes() - 1;
-}
-
-uint32_t ConvolutionLayer::pad(uint32_t /*spatial_axis*/) const { return conv_param().pad(0); }
-
-uint32_t ConvolutionLayer::kernel_size(uint32_t /*spatial_axis*/) const
-{
-  return conv_param().kernel_size(0);
-}
-
-uint32_t ConvolutionLayer::stride(uint32_t /*spatial_axis*/) const
-{
-  return conv_param().stride(0);
-}
-
-uint32_t ConvolutionLayer::dilation(uint32_t /*spatial_axis*/) const { return 1; }
diff --git a/contrib/caffegen/src/internal/InputLayer.cpp b/contrib/caffegen/src/internal/InputLayer.cpp
deleted file mode 100644 (file)
index 655fc5e..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-#include "internal/InputLayer.h"
-#include "internal/LayerAnalysisPass.h"
-#include "internal/LayerTransformPass.h"
-
-#include <cassert>
-
-InputLayer::InputLayer(const Network *, caffe::LayerParameter *p) : _param{p}
-{
-  assert(_param != nullptr);
-  assert(param().type() == "Input");
-  assert(param().bottom_size() == 0);
-  assert(param().top_size() == param().input_param().shape_size());
-}
-
-uint32_t InputLayer::bottom_size(void) const { return 0; }
-
-const std::string &InputLayer::bottom_name(uint32_t) const { throw std::invalid_argument{"n"}; }
-
-const BlobShape &InputLayer::bottom_shape(uint32_t) const { throw std::invalid_argument{"n"}; }
-
-uint32_t InputLayer::top_size(void) const { return param().top_size(); }
-
-const std::string &InputLayer::top_name(uint32_t n) const { return param().top(n); }
-
-BlobShape InputLayer::top_shape(uint32_t n) const
-{
-  BlobShape shape;
-
-  const auto &shape_param = param().input_param().shape(n);
-  const auto num_axes = shape_param.dim_size();
-
-  shape.resize(num_axes);
-
-  for (int axe = 0; axe < num_axes; ++axe)
-  {
-    shape.dim(axe) = shape_param.dim(axe);
-  }
-
-  return shape;
-}
-
-void InputLayer::accept(LayerAnalysisPass &&v) const { v.visit(*this); }
-void InputLayer::accept(LayerTransformPass &&v) { v.visit(*this); }
diff --git a/contrib/caffegen/src/internal/LayerAnalysisPass.cpp b/contrib/caffegen/src/internal/LayerAnalysisPass.cpp
deleted file mode 100644 (file)
index 9a330bf..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#include "internal/LayerAnalysisPass.h"
-
-// NOTE This file is introduced to ensure that 'LayerAnalyissPass.h' is self-complete.
diff --git a/contrib/caffegen/src/internal/LayerContext.cpp b/contrib/caffegen/src/internal/LayerContext.cpp
deleted file mode 100644 (file)
index 6c4c5c2..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-#include "internal/LayerContext.h"
-
-uint32_t LayerContext::size(void) const { return _layers.size(); };
-
-Layer &LayerContext::at(uint32_t n) { return *(_layers.at(n)); }
-const Layer &LayerContext::at(uint32_t n) const { return *(_layers.at(n)); }
-
-LayerContext &LayerContext::append(std::unique_ptr<Layer> &&l)
-{
-  _layers.emplace_back(std::move(l));
-  return (*this);
-}
diff --git a/contrib/caffegen/src/internal/LayerResolver.cpp b/contrib/caffegen/src/internal/LayerResolver.cpp
deleted file mode 100644 (file)
index 46cfa0e..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-#include "internal/LayerResolver.h"
-#include "internal/InputLayer.h"
-#include "internal/ConvolutionLayer.h"
-
-#include <stdex/Memory.h>
-
-using stdex::make_unique;
-
-template <typename T> std::shared_ptr<LayerFactory> make_factory(void)
-{
-  struct LayerFactoryImpl final : public LayerFactory
-  {
-    std::unique_ptr<Layer> make(Network *net, caffe::LayerParameter *p) const override
-    {
-      return make_unique<T>(net, p);
-    }
-  };
-
-  return make_unique<LayerFactoryImpl>();
-}
-
-LayerResolver::LayerResolver()
-{
-  _factories["Input"] = make_factory<InputLayer>();
-  _factories["Convolution"] = make_factory<ConvolutionLayer>();
-}
-
-const LayerFactory &LayerResolver::resolve(const std::string &type) const
-{
-  return *(_factories.at(type));
-}
diff --git a/contrib/caffegen/src/internal/Network.cpp b/contrib/caffegen/src/internal/Network.cpp
deleted file mode 100644 (file)
index cb015ae..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#include "internal/Network.h"
-
-Network::Network(std::unique_ptr<::caffe::NetParameter> &&param) : _param{std::move(param)}
-{
-  // DO NOTHING
-}
diff --git a/contrib/caffegen/src/internal/NetworkBuilder.cpp b/contrib/caffegen/src/internal/NetworkBuilder.cpp
deleted file mode 100644 (file)
index 20b0263..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-#include "internal/NetworkBuilder.h"
-
-#include <stdex/Memory.h>
-
-using stdex::make_unique;
-
-NetworkBuilder::NetworkBuilder(const LayerResolver &resolver) : _resolver{resolver}
-{
-  // DO NOTHING
-}
-
-std::unique_ptr<Network> NetworkBuilder::build(std::unique_ptr<caffe::NetParameter> &&p)
-{
-  auto res = make_unique<Network>(std::move(p));
-
-  for (int n = 0; n < res->param().layer_size(); ++n)
-  {
-    caffe::LayerParameter *layer_param = res->param().mutable_layer(n);
-    const std::string &layer_type = layer_param->type();
-
-    auto l = _resolver.resolve(layer_type).make(res.get(), layer_param);
-
-    for (uint32_t n = 0; n < l->top_size(); ++n)
-    {
-      res->blobs().insert(l->top_name(n), l->top_shape(n));
-    }
-
-    res->layers().append(std::move(l));
-  }
-
-  return res;
-}
diff --git a/contrib/caffegen/src/internal/ParameterRandomizePass.cpp b/contrib/caffegen/src/internal/ParameterRandomizePass.cpp
deleted file mode 100644 (file)
index 6b0b3e6..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-#include "ParameterRandomizePass.h"
-
-#include <stdexcept>
-
-// NOTE GCC 4.8.3 emits the following error with brace-initialization on r-value reference
-//   error: invalid initialization of non-const reference of type '..' from an rvalue of type '..'
-ParameterRandomizePass::ParameterRandomizePass(std::default_random_engine &generator)
-    : _generator(generator)
-{
-  // DO NOTHING
-}
-
-void ParameterRandomizePass::visit(InputLayer &)
-{
-  // InputLayer has no parameter to be randomized
-  return;
-}
-
-void ParameterRandomizePass::visit(ConvolutionLayer &l)
-{
-  assert(l.param().blobs_size() == 0);
-
-  caffe::ConvolutionParameter *conv_param = l.param().mutable_convolution_param();
-
-  auto element_count = [](caffe::BlobShape &shape) {
-    assert(shape.dim_size() > 0);
-
-    int64_t count = 1;
-
-    for (int axis = 0; axis < shape.dim_size(); ++axis)
-    {
-      count *= shape.dim(axis);
-    }
-
-    return count;
-  };
-
-  //
-  // Fill Kernel
-  //
-  caffe::BlobProto *weight_blob = l.param().add_blobs();
-  caffe::BlobShape *weight_shape = weight_blob->mutable_shape();
-
-  weight_shape->add_dim(l.num_effective_output());
-  weight_shape->add_dim(l.input_shape().dim(l.channel_axis()));
-
-  for (uint32_t spatial_axis = 0; spatial_axis < l.num_spatial_axes(); ++spatial_axis)
-  {
-    const auto kernel_dim = l.kernel_size(spatial_axis);
-    weight_shape->add_dim(kernel_dim);
-  }
-
-  // TODO Allow users to set mean and stddev
-  std::normal_distribution<float> weight_distribution(0.0f, 2.0f);
-
-  for (int64_t n = 0; n < element_count(*weight_shape); ++n)
-  {
-    weight_blob->add_data(weight_distribution(_generator));
-  }
-
-  //
-  // Fill Bias
-  //
-  assert(conv_param->bias_term());
-  caffe::BlobProto *bias_blob = l.param().add_blobs();
-  caffe::BlobShape *bias_shape = bias_blob->mutable_shape();
-
-  bias_shape->add_dim(l.num_effective_output());
-
-  // TODO Allow users to set mean and stddev
-  std::normal_distribution<float> bias_distribution(0.0f, 16.0f);
-
-  for (int64_t n = 0; n < element_count(*bias_shape); ++n)
-  {
-    bias_blob->add_data(bias_distribution(_generator));
-  }
-}
diff --git a/contrib/caffegen/src/internal/ParameterRandomizePass.h b/contrib/caffegen/src/internal/ParameterRandomizePass.h
deleted file mode 100644 (file)
index b9089e7..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-#ifndef __PARAMETER_RANDOMIZE_PASS_H__
-#define __PARAMETER_RANDOMIZE_PASS_H__
-
-#include "internal/LayerTransformPass.h"
-
-#include <random>
-
-class ParameterRandomizePass : public LayerTransformPass
-{
-public:
-  ParameterRandomizePass(std::default_random_engine &generator);
-
-public:
-  void visit(InputLayer &) override;
-  void visit(ConvolutionLayer &) override;
-
-private:
-  std::default_random_engine &_generator;
-};
-
-#endif // __PARAMETER_RANDOMIZE_PASS_H__