Disable fusion to output layers
authorDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Wed, 4 Jul 2018 12:50:39 +0000 (15:50 +0300)
committerDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Wed, 4 Jul 2018 12:53:47 +0000 (15:53 +0300)
modules/dnn/src/dnn.cpp
modules/dnn/test/test_layers.cpp

index 0177b31..bffcee3 100644 (file)
@@ -2075,7 +2075,8 @@ Mat Net::forward(const String& outputName)
     if (layerName.empty())
         layerName = getLayerNames().back();
 
-    impl->setUpNet();
+    std::vector<LayerPin> pins(1, impl->getPinByAlias(layerName));
+    impl->setUpNet(pins);
     impl->forwardToLayer(impl->getLayerData(layerName));
 
     return impl->getBlob(layerName);
@@ -2085,13 +2086,13 @@ void Net::forward(OutputArrayOfArrays outputBlobs, const String& outputName)
 {
     CV_TRACE_FUNCTION();
 
-    impl->setUpNet();
-
     String layerName = outputName;
 
     if (layerName.empty())
         layerName = getLayerNames().back();
 
+    std::vector<LayerPin> pins(1, impl->getPinByAlias(layerName));
+    impl->setUpNet(pins);
     impl->forwardToLayer(impl->getLayerData(layerName));
 
     LayerPin pin = impl->getPinByAlias(layerName);
index 963206b..fd52cae 100644 (file)
@@ -1240,4 +1240,36 @@ INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_ShuffleChannel, Combine(
 /*group*/        Values(1, 2, 3, 6)
 ));
 
+// Check if relu is not fused to convolution if we requested it's output
+TEST(Layer_Test_Convolution, relu_fusion)
+{
+    Net net;
+    {
+        LayerParams lp;
+        lp.set("kernel_size", 1);
+        lp.set("num_output", 1);
+        lp.set("bias_term", false);
+        lp.type = "Convolution";
+        lp.name = "testConv";
+
+        int weightsShape[] = {1, 1, 1, 1};
+        Mat weights(4, &weightsShape[0], CV_32F, Scalar(1));
+        lp.blobs.push_back(weights);
+        net.addLayerToPrev(lp.name, lp.type, lp);
+    }
+    {
+        LayerParams lp;
+        lp.type = "ReLU";
+        lp.name = "testReLU";
+        net.addLayerToPrev(lp.name, lp.type, lp);
+    }
+    int sz[] = {1, 1, 2, 3};
+    Mat input(4, &sz[0], CV_32F);
+    randu(input, -1.0, -0.1);
+    net.setInput(input);
+    net.setPreferableBackend(DNN_BACKEND_OPENCV);
+    Mat output = net.forward("testConv");
+    normAssert(input, output);
+}
+
 }} // namespace