Fix Inference Engine graphs with fused output layers
authorDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Thu, 31 May 2018 13:16:34 +0000 (16:16 +0300)
committerDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Thu, 31 May 2018 13:21:08 +0000 (16:21 +0300)
modules/dnn/src/dnn.cpp
modules/dnn/test/test_layers.cpp

index 973c98a..a565682 100644 (file)
@@ -1132,7 +1132,7 @@ struct Net::Impl
                 if (layerNet != ieInpNode->net)
                 {
                     // layerNet is empty or nodes are from different graphs.
-                    ieInpNode->net->addOutput(inpLd.name);
+                    ieInpNode->net->addOutput(ieInpNode->layer->name);
                 }
             }
         }
index 89c6ed8..5cbfba5 100644 (file)
@@ -834,6 +834,59 @@ TEST(Test_DLDT, two_inputs)
 
     normAssert(out, firstInp + secondInp);
 }
+
+class UnsupportedLayer : public Layer
+{
+public:
+    UnsupportedLayer(const LayerParams &params) {}
+
+    static Ptr<Layer> create(const LayerParams& params)
+    {
+        return Ptr<Layer>(new UnsupportedLayer(params));
+    }
+
+    virtual bool supportBackend(int backendId) CV_OVERRIDE
+    {
+        return backendId == DNN_BACKEND_DEFAULT;
+    }
+
+    virtual void forward(std::vector<cv::Mat*> &inputs, std::vector<cv::Mat> &outputs, std::vector<cv::Mat> &internals) CV_OVERRIDE {}
+
+    virtual void forward(cv::InputArrayOfArrays inputs, cv::OutputArrayOfArrays outputs, cv::OutputArrayOfArrays internals) CV_OVERRIDE {}
+};
+
+TEST(Test_DLDT, fused_output)
+{
+    static const int kNumChannels = 3;
+    CV_DNN_REGISTER_LAYER_CLASS(Unsupported, UnsupportedLayer);
+    Net net;
+    {
+        LayerParams lp;
+        lp.set("kernel_size", 1);
+        lp.set("num_output", 3);
+        lp.set("bias_term", false);
+        lp.type = "Convolution";
+        lp.name = "testConv";
+        lp.blobs.push_back(Mat({kNumChannels, 1, 1, 1}, CV_32F, Scalar(1)));
+        net.addLayerToPrev(lp.name, lp.type, lp);
+    }
+    {
+        LayerParams lp;
+        lp.set("bias_term", false);
+        lp.type = "Scale";
+        lp.name = "testScale";
+        lp.blobs.push_back(Mat({kNumChannels}, CV_32F, Scalar(1)));
+        net.addLayerToPrev(lp.name, lp.type, lp);
+    }
+    {
+        LayerParams lp;
+        net.addLayerToPrev("unsupported_layer", "Unsupported", lp);
+    }
+    net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE);
+    net.setInput(Mat({1, 1, 1, 1}, CV_32FC1, Scalar(1)));
+    ASSERT_NO_THROW(net.forward());
+    LayerFactory::unregisterLayer("Unsupported");
+}
 #endif  // HAVE_INF_ENGINE
 
 // Test a custom layer.