[caffegen] Fix compiler warnings (#591)
author박종현/동작제어Lab(SR)/Staff Engineer/삼성전자 <jh1302.park@samsung.com>
Wed, 11 Jul 2018 23:48:51 +0000 (08:48 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Wed, 11 Jul 2018 23:48:51 +0000 (08:48 +0900)
This commit fixes all the compiler warnings (reported by clang 7.0)
in caffegen implementation.

Signed-off-by: Jonghyun Park <jh1302.park@samsung.com>
contrib/caffegen/include/internal/BlobShape.h
contrib/caffegen/include/internal/InputLayer.h
contrib/caffegen/src/internal/ConvolutionLayer.cpp
contrib/caffegen/src/internal/InputLayer.cpp
contrib/caffegen/src/internal/LayerContext.cpp
contrib/caffegen/src/internal/NetworkBuilder.cpp
contrib/caffegen/src/internal/ParameterRandomizePass.cpp

index 9f30384..794f341 100644 (file)
@@ -10,7 +10,11 @@ public:
   uint32_t rank(void) const { return _dims.size(); }
 
 public:
-  BlobShape &resize(uint32_t size) { _dims.resize(size); }
+  BlobShape &resize(uint32_t size)
+  { 
+    _dims.resize(size); 
+    return (*this);
+  }
 
 public:
   int64_t dim(uint32_t axe) const { return _dims.at(axe); }
index c535fae..6523cbe 100644 (file)
@@ -32,7 +32,6 @@ public:
   caffe::LayerParameter &param(void) { return *_param; }
 
 private:
-  const Network * const _net;
   caffe::LayerParameter * const _param;
 };
 
index 8efaaf6..ceb17a3 100644 (file)
@@ -90,7 +90,7 @@ BlobShape ConvolutionLayer::output_shape(void) const
 
   res.dim(num_batch_axes()) = num_effective_output();
 
-  for (int spatial_axis = 0; spatial_axis < num_spatial_axes(); ++spatial_axis)
+  for (uint32_t spatial_axis = 0; spatial_axis < num_spatial_axes(); ++spatial_axis)
   {
     const uint32_t axis = num_batch_axes() + 1 + spatial_axis;
     const int64_t kernel_ext =
@@ -124,22 +124,22 @@ uint32_t ConvolutionLayer::num_batch_axes(void) const
   return input_shape().rank() - num_spatial_axes() - 1;
 }
 
-uint32_t ConvolutionLayer::pad(uint32_t spatial_axis) const
+uint32_t ConvolutionLayer::pad(uint32_t /*spatial_axis*/) const
 {
   return conv_param().pad(0);
 }
 
-uint32_t ConvolutionLayer::kernel_size(uint32_t spatial_axis) const
+uint32_t ConvolutionLayer::kernel_size(uint32_t /*spatial_axis*/) const
 {
   return conv_param().kernel_size(0);
 }
 
-uint32_t ConvolutionLayer::stride(uint32_t spatial_axis) const
+uint32_t ConvolutionLayer::stride(uint32_t /*spatial_axis*/) const
 {
   return conv_param().stride(0);
 }
 
-uint32_t ConvolutionLayer::dilation(uint32_t spatial_axis) const
+uint32_t ConvolutionLayer::dilation(uint32_t /*spatial_axis*/) const
 {
   return 1;
 }
index b7eb930..e19e05e 100644 (file)
@@ -4,7 +4,7 @@
 
 #include <cassert>
 
-InputLayer::InputLayer(const Network *net, caffe::LayerParameter *p) : _net{net}, _param{p}
+InputLayer::InputLayer(const Network *, caffe::LayerParameter *p) : _param{p}
 {
   assert(_param != nullptr);
   assert(param().type() == "Input");
@@ -14,12 +14,12 @@ InputLayer::InputLayer(const Network *net, caffe::LayerParameter *p) : _net{net}
 
 uint32_t InputLayer::bottom_size(void) const { return 0; }
 
-const std::string &InputLayer::bottom_name(uint32_t n) const
+const std::string &InputLayer::bottom_name(uint32_t) const
 {
   throw std::invalid_argument{"n"};
 }
 
-const BlobShape &InputLayer::bottom_shape(uint32_t n) const
+const BlobShape &InputLayer::bottom_shape(uint32_t) const
 {
   throw std::invalid_argument{"n"};
 }
index bc2475d..6c4c5c2 100644 (file)
@@ -8,4 +8,5 @@ const Layer &LayerContext::at(uint32_t n) const { return *(_layers.at(n)); }
 LayerContext &LayerContext::append(std::unique_ptr<Layer> &&l)
 {
   _layers.emplace_back(std::move(l));
+  return (*this);
 }
index d13df2a..f9613e4 100644 (file)
@@ -26,5 +26,5 @@ std::unique_ptr<Network> NetworkBuilder::build(std::unique_ptr<caffe::NetParamet
     res->layers().append(std::move(l));
   }
 
-  return std::move(res);
+  return res;
 }
index c98dd53..9130379 100644 (file)
@@ -45,7 +45,7 @@ void ParameterRandomizePass::visit(ConvolutionLayer &l)
   weight_shape->add_dim(l.num_effective_output());
   weight_shape->add_dim(l.input_shape().dim(l.channel_axis()));
 
-  for (int spatial_axis = 0; spatial_axis < l.num_spatial_axes(); ++spatial_axis)
+  for (uint32_t spatial_axis = 0; spatial_axis < l.num_spatial_axes(); ++spatial_axis)
   {
     const auto kernel_dim = l.kernel_size(spatial_axis);
     weight_shape->add_dim(kernel_dim);