From 3959c5f7e2d327b824f8f4707ce2964878d0d316 Mon Sep 17 00:00:00 2001 From: =?utf8?q?=EB=B0=95=EC=A2=85=ED=98=84/=EB=8F=99=EC=9E=91=EC=A0=9C?= =?utf8?q?=EC=96=B4Lab=28SR=29/Senior=20Engineer/=EC=82=BC=EC=84=B1?= =?utf8?q?=EC=A0=84=EC=9E=90?= Date: Thu, 19 Apr 2018 17:36:52 +0900 Subject: [PATCH] [caffegen] Add 'InputLayer' class (#101) This commit introduces 'InputLayer' class which helps us to process 'Input' layer on caffe model. Signed-off-by: Jonghyun Park --- contrib/caffegen/include/InputLayer.h | 39 +++++++++++++++++++++ contrib/caffegen/include/LayerAnalysisPass.h | 2 ++ contrib/caffegen/include/LayerTransformPass.h | 4 +++ contrib/caffegen/src/InputLayer.cpp | 49 +++++++++++++++++++++++++++ 4 files changed, 94 insertions(+) create mode 100644 contrib/caffegen/include/InputLayer.h create mode 100644 contrib/caffegen/src/InputLayer.cpp diff --git a/contrib/caffegen/include/InputLayer.h b/contrib/caffegen/include/InputLayer.h new file mode 100644 index 0000000..1a39d88 --- /dev/null +++ b/contrib/caffegen/include/InputLayer.h @@ -0,0 +1,39 @@ +#ifndef __INPUT_LAYER_H__ +#define __INPUT_LAYER_H__ + +#include "Layer.h" +#include "Network.h" + +#include "BlobShape.h" + +#include + +class InputLayer final : public Layer +{ +public: + InputLayer(const Network *net, caffe::LayerParameter *p); + +public: + uint32_t bottom_size(void) const override; + const std::string &bottom_name(uint32_t n) const override; + const BlobShape &bottom_shape(uint32_t n) const override; + +public: + uint32_t top_size(void) const override; + const std::string &top_name(uint32_t n) const override; + BlobShape top_shape(uint32_t n) const override; + +public: + void accept(LayerAnalysisPass &&) const override; + void accept(LayerTransformPass &&) override; + +public: + const caffe::LayerParameter ¶m(void) const { return *_param; } + caffe::LayerParameter ¶m(void) { return *_param; } + +private: + const Network * const _net; + caffe::LayerParameter * const _param; +}; + +#endif // __INPUT_LAYER_H__ diff --git a/contrib/caffegen/include/LayerAnalysisPass.h b/contrib/caffegen/include/LayerAnalysisPass.h index 60f9d24..ba28129 100644 --- a/contrib/caffegen/include/LayerAnalysisPass.h +++ b/contrib/caffegen/include/LayerAnalysisPass.h @@ -4,6 +4,8 @@ struct LayerAnalysisPass { virtual ~LayerAnalysisPass() = default; + + virtual void visit(const InputLayer &) = 0; }; #endif // __LAYER_ANALYSIS_PASS_H__ diff --git a/contrib/caffegen/include/LayerTransformPass.h b/contrib/caffegen/include/LayerTransformPass.h index 13f0cdc..18ad0f9 100644 --- a/contrib/caffegen/include/LayerTransformPass.h +++ b/contrib/caffegen/include/LayerTransformPass.h @@ -1,9 +1,13 @@ #ifndef __LAYER_TRANSFORM_PASS_H__ #define __LAYER_TRANSFORM_PASS_H__ +#include "InputLayer.h" + struct LayerTransformPass { virtual ~LayerTransformPass() = default; + + virtual void visit(InputLayer &) = 0; }; #endif // __LAYER_TRANSFORM_PASS_H__ diff --git a/contrib/caffegen/src/InputLayer.cpp b/contrib/caffegen/src/InputLayer.cpp new file mode 100644 index 0000000..1247651 --- /dev/null +++ b/contrib/caffegen/src/InputLayer.cpp @@ -0,0 +1,49 @@ +#include "InputLayer.h" +#include "LayerAnalysisPass.h" +#include "LayerTransformPass.h" + +#include + +InputLayer::InputLayer(const Network *net, caffe::LayerParameter *p) : _net{net}, _param{p} +{ + assert(_param != nullptr); + assert(param().type() == "Input"); + assert(param().bottom_size() == 0); + assert(param().top_size() == param().input_param().shape_size()); +} + +uint32_t InputLayer::bottom_size(void) const { return 0; } + +const std::string &InputLayer::bottom_name(uint32_t n) const +{ + throw std::invalid_argument{"n"}; +} + +const BlobShape &InputLayer::bottom_shape(uint32_t n) const +{ + throw std::invalid_argument{"n"}; +} + +uint32_t InputLayer::top_size(void) const { return param().top_size(); } + +const std::string &InputLayer::top_name(uint32_t n) const { return param().top(n); } + +BlobShape InputLayer::top_shape(uint32_t n) const +{ + BlobShape shape; + + const auto &shape_param = param().input_param().shape(n); + const auto num_axes = shape_param.dim_size(); + + shape.resize(num_axes); + + for (int axe = 0; axe < num_axes; ++axe) + { + shape.dim(axe) = shape_param.dim(axe); + } + + return shape; +} + +void InputLayer::accept(LayerAnalysisPass &&v) const { v.visit(*this); } +void InputLayer::accept(LayerTransformPass &&v) { v.visit(*this); } -- 2.7.4