From f25a01bb5a3a1ca4950f0d496fc70164b0627e59 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Wed, 4 Jul 2018 15:50:39 +0300 Subject: [PATCH] Disable fusion to output layers --- modules/dnn/src/dnn.cpp | 7 ++++--- modules/dnn/test/test_layers.cpp | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 0177b31..bffcee3 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -2075,7 +2075,8 @@ Mat Net::forward(const String& outputName) if (layerName.empty()) layerName = getLayerNames().back(); - impl->setUpNet(); + std::vector pins(1, impl->getPinByAlias(layerName)); + impl->setUpNet(pins); impl->forwardToLayer(impl->getLayerData(layerName)); return impl->getBlob(layerName); @@ -2085,13 +2086,13 @@ void Net::forward(OutputArrayOfArrays outputBlobs, const String& outputName) { CV_TRACE_FUNCTION(); - impl->setUpNet(); - String layerName = outputName; if (layerName.empty()) layerName = getLayerNames().back(); + std::vector pins(1, impl->getPinByAlias(layerName)); + impl->setUpNet(pins); impl->forwardToLayer(impl->getLayerData(layerName)); LayerPin pin = impl->getPinByAlias(layerName); diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 963206b..fd52cae 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -1240,4 +1240,36 @@ INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_ShuffleChannel, Combine( /*group*/ Values(1, 2, 3, 6) )); +// Check if relu is not fused to convolution if we requested it's output +TEST(Layer_Test_Convolution, relu_fusion) +{ + Net net; + { + LayerParams lp; + lp.set("kernel_size", 1); + lp.set("num_output", 1); + lp.set("bias_term", false); + lp.type = "Convolution"; + lp.name = "testConv"; + + int weightsShape[] = {1, 1, 1, 1}; + Mat weights(4, &weightsShape[0], CV_32F, Scalar(1)); + lp.blobs.push_back(weights); + net.addLayerToPrev(lp.name, lp.type, lp); + } + { + LayerParams lp; + lp.type = "ReLU"; + lp.name = "testReLU"; + net.addLayerToPrev(lp.name, lp.type, lp); + } + int sz[] = {1, 1, 2, 3}; + Mat input(4, &sz[0], CV_32F); + randu(input, -1.0, -0.1); + net.setInput(input); + net.setPreferableBackend(DNN_BACKEND_OPENCV); + Mat output = net.forward("testConv"); + normAssert(input, output); +} + }} // namespace -- 2.7.4