[caffegen] Add 'InputLayer' class (#101)
author박종현/동작제어Lab(SR)/Senior Engineer/삼성전자 <jh1302.park@samsung.com>
Thu, 19 Apr 2018 08:36:52 +0000 (17:36 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Thu, 19 Apr 2018 08:36:52 +0000 (17:36 +0900)
This commit introduces 'InputLayer' class which helps us to process
'Input' layer on caffe model.

Signed-off-by: Jonghyun Park <jh1302.park@samsung.com>
contrib/caffegen/include/InputLayer.h [new file with mode: 0644]
contrib/caffegen/include/LayerAnalysisPass.h
contrib/caffegen/include/LayerTransformPass.h
contrib/caffegen/src/InputLayer.cpp [new file with mode: 0644]

diff --git a/contrib/caffegen/include/InputLayer.h b/contrib/caffegen/include/InputLayer.h
new file mode 100644 (file)
index 0000000..1a39d88
--- /dev/null
@@ -0,0 +1,39 @@
+#ifndef __INPUT_LAYER_H__
+#define __INPUT_LAYER_H__
+
+#include "Layer.h"
+#include "Network.h"
+
+#include "BlobShape.h"
+
+#include <caffe.pb.h>
+
+class InputLayer final : public Layer
+{
+public:
+  InputLayer(const Network *net, caffe::LayerParameter *p);
+
+public:
+  uint32_t bottom_size(void) const override;
+  const std::string &bottom_name(uint32_t n) const override;
+  const BlobShape &bottom_shape(uint32_t n) const override;
+
+public:
+  uint32_t top_size(void) const override;
+  const std::string &top_name(uint32_t n) const override;
+  BlobShape top_shape(uint32_t n) const override;
+
+public:
+  void accept(LayerAnalysisPass &&) const override;
+  void accept(LayerTransformPass &&) override;
+
+public:
+  const caffe::LayerParameter &param(void) const { return *_param; }
+  caffe::LayerParameter &param(void) { return *_param; }
+
+private:
+  const Network * const _net;
+  caffe::LayerParameter * const _param;
+};
+
+#endif // __INPUT_LAYER_H__
index 60f9d24..ba28129 100644 (file)
@@ -4,6 +4,8 @@
 struct LayerAnalysisPass
 {
   virtual ~LayerAnalysisPass() = default;
+
+  virtual void visit(const InputLayer &) = 0;
 };
 
 #endif // __LAYER_ANALYSIS_PASS_H__
index 13f0cdc..18ad0f9 100644 (file)
@@ -1,9 +1,13 @@
 #ifndef __LAYER_TRANSFORM_PASS_H__
 #define __LAYER_TRANSFORM_PASS_H__
 
+#include "InputLayer.h"
+
 struct LayerTransformPass
 {
   virtual ~LayerTransformPass() = default;
+
+  virtual void visit(InputLayer &) = 0;
 };
 
 #endif // __LAYER_TRANSFORM_PASS_H__
diff --git a/contrib/caffegen/src/InputLayer.cpp b/contrib/caffegen/src/InputLayer.cpp
new file mode 100644 (file)
index 0000000..1247651
--- /dev/null
@@ -0,0 +1,49 @@
+#include "InputLayer.h"
+#include "LayerAnalysisPass.h"
+#include "LayerTransformPass.h"
+
+#include <cassert>
+
+InputLayer::InputLayer(const Network *net, caffe::LayerParameter *p) : _net{net}, _param{p}
+{
+  assert(_param != nullptr);
+  assert(param().type() == "Input");
+  assert(param().bottom_size() == 0);
+  assert(param().top_size() == param().input_param().shape_size());
+}
+
+uint32_t InputLayer::bottom_size(void) const { return 0; }
+
+const std::string &InputLayer::bottom_name(uint32_t n) const
+{
+  throw std::invalid_argument{"n"};
+}
+
+const BlobShape &InputLayer::bottom_shape(uint32_t n) const
+{
+  throw std::invalid_argument{"n"};
+}
+
+uint32_t InputLayer::top_size(void) const { return param().top_size(); }
+
+const std::string &InputLayer::top_name(uint32_t n) const { return param().top(n); }
+
+BlobShape InputLayer::top_shape(uint32_t n) const
+{
+  BlobShape shape;
+
+  const auto &shape_param = param().input_param().shape(n);
+  const auto num_axes = shape_param.dim_size();
+
+  shape.resize(num_axes);
+
+  for (int axe = 0; axe < num_axes; ++axe)
+  {
+    shape.dim(axe) = shape_param.dim(axe);
+  }
+
+  return shape;
+}
+
+void InputLayer::accept(LayerAnalysisPass &&v) const { v.visit(*this); }
+void InputLayer::accept(LayerTransformPass &&v) { v.visit(*this); }