assert(spatial_axis < num_spatial_axes());
if (_param.kernel_size().size() == 0)
{
+ if (_param.has_kernel_h() && (spatial_axis == 0))
+ {
+ assert(num_spatial_axes() == 2);
+ return _param.kernel_h();
+ }
+
+ if (_param.has_kernel_w() && (spatial_axis == 1))
+ {
+ assert(num_spatial_axes() == 2);
+ return _param.kernel_w();
+ }
+
return 0;
}
ASSERT_EQ(expected, obtained);
}
}
+
+namespace
+{
+// clang-format off
+const char *conv_ker_hw = STRING(
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ kernel_h: 3
+ kernel_w: 1
+ }
+}
+);
+// clang-format on
+} // namespace
+
+TEST_F(ConvolutionSpecTest, conv_ker_hw)
+{
+ ::caffe::NetParameter param;
+
+ ASSERT_TRUE(load(conv_ker_hw, param));
+
+ ::caffe::Net<float> net{param};
+
+ const tensor::Shape ifm_shape{1, 3, 16, 16};
+ ConvolutionSpec spec{param.layer(1).convolution_param()};
+
+ spec.ifm_shape(ifm_shape);
+
+ // Check 'pad'
+ ASSERT_EQ(spec.ker_dim(0), 3);
+ ASSERT_EQ(spec.ker_dim(1), 1);
+
+ // Check 'ofm_shape'
+ {
+ auto expected = as_tensor_shape(net.blob_by_name("conv")->shape());
+ auto obtained = spec.ofm_shape();
+
+ ASSERT_EQ(expected, obtained);
+ }
+}