support ReduceLayer without reshape layer.
authorZihao Mu <zihaomu@outlook.com>
Tue, 2 Aug 2022 02:32:31 +0000 (10:32 +0800)
committerZihao Mu <zihaomu@outlook.com>
Tue, 2 Aug 2022 02:32:31 +0000 (10:32 +0800)
modules/dnn/include/opencv2/dnn/all_layers.hpp
modules/dnn/src/int8layers/reduce_layer.cpp
modules/dnn/src/layers/reduce_layer.cpp
modules/dnn/src/onnx/onnx_importer.cpp
modules/dnn/test/test_onnx_importer.cpp

index 5c86da2..26fa66e 100644 (file)
@@ -334,7 +334,8 @@ CV__DNN_INLINE_NS_BEGIN
     {
     public:
         int reduceType;
-        std::vector<size_t> reduceDims;
+        // reduceDims contains the dimensions that need to be reduced, targetDims is the target output dimension.
+        std::vector<size_t> reduceDims, targetDims;
         static Ptr<ReduceLayer> create(const LayerParams& params);
     };
 
index 935bdc0..9ffb489 100644 (file)
@@ -38,6 +38,15 @@ public:
         {
             reduceDims[i] = tempDims.get<int>(i);
         }
+
+        CV_Assert(params.has("target_dims"));
+        tempDims = params.get("target_dims");
+        n = tempDims.size();
+        targetDims.resize(n);
+        for (i = 0; i < n; i++)
+        {
+            targetDims[i] = tempDims.get<int>(i);
+        }
     }
 
     virtual bool supportBackend(int backendId) CV_OVERRIDE
@@ -161,18 +170,30 @@ public:
                          std::vector<MatShape> &internals) const CV_OVERRIDE
     {
         CV_Assert(inputs.size() > 0);
-        CV_Assert(reduceDims.size() != 0 && inputs[0].size() >= reduceDims.size());
+        CV_Assert( reduceDims.size() !=0 && targetDims.size() != 0 && inputs[0].size() >= reduceDims.size());
 
-        std::vector<int> outShape;
+        // outShapeTmp can save the right number of `total(outShapeTmp)`. And the outShape is used as the final output shape.
+        std::vector<int> outShapeTmp, outShape;
+        outShape.assign(targetDims.begin(), targetDims.end());
         if (inputs[0].size() == reduceDims.size())
-            outShape.push_back(1);
+            outShapeTmp.push_back(1);
         else
         {
             for (int i = 0; i < inputs[0].size() - reduceDims.size(); i++)
             {
-                outShape.push_back(inputs[0][i]);
+                outShapeTmp.push_back(inputs[0][i]);
             }
         }
+
+        // Support dynamic shape of Batch size.
+        // Note that: when there are multiple dynamic inputs, we will give an error.
+        if (total(outShape) != total(outShapeTmp))
+        {
+            if (outShape[0] != outShapeTmp[0])
+                outShape[0] = outShapeTmp[0];
+        }
+
+        CV_Assert(total(outShape) == total(outShapeTmp));
         outputs.assign(1, outShape);
 
         return false;
index 47aec23..c1f74f1 100644 (file)
@@ -61,6 +61,15 @@ public:
         {
             reduceDims[i] = tempDims.get<int>(i);
         }
+
+        CV_Assert(params.has("target_dims"));
+        tempDims = params.get("target_dims");
+        n = tempDims.size();
+        targetDims.resize(n);
+        for (i = 0; i < n; i++)
+        {
+            targetDims[i] = tempDims.get<int>(i);
+        }
     }
 
     virtual bool supportBackend(int backendId) CV_OVERRIDE
@@ -325,18 +334,29 @@ public:
                          std::vector<MatShape> &internals) const CV_OVERRIDE
     {
         CV_Assert(inputs.size() > 0);
-        CV_Assert(reduceDims.size() != 0 && inputs[0].size() >= reduceDims.size());
+        CV_Assert( reduceDims.size() !=0 && targetDims.size() != 0 && inputs[0].size() >= reduceDims.size());
 
-        std::vector<int> outShape;
+        // outShapeTmp can save the right number of `total(outShapeTmp)`. And the outShape is used as the final output shape.
+        std::vector<int> outShapeTmp, outShape;
+        outShape.assign(targetDims.begin(), targetDims.end());
         if (inputs[0].size() == reduceDims.size())
-            outShape.push_back(1);
+            outShapeTmp.push_back(1);
         else
         {
             for (int i = 0; i < inputs[0].size() - reduceDims.size(); i++)
             {
-                outShape.push_back(inputs[0][i]);
+                outShapeTmp.push_back(inputs[0][i]);
             }
         }
+
+        // Support dynamic shape of Batch size.
+        // Note that: when there are multiple dynamic inputs, we will give an error.
+        if (total(outShape) != total(outShapeTmp) && outShape[0] != outShapeTmp[0])
+        {
+                outShape[0] = outShapeTmp[0];
+        }
+
+        CV_Assert(total(outShape) == total(outShapeTmp));
         outputs.assign(1, outShape);
 
         return false;
index e90581e..25d4ed9 100644 (file)
@@ -1191,7 +1191,7 @@ void ONNXImporter::parseReduce(LayerParams& layerParams, const opencv_onnx::Node
             int axesNum = axesMat.total();
             for (int i = 0; i < axesNum; i++)
             {
-                int axis = normalize_axis(static_cast<int>(axesMat.at<float>(i)), inpShape.size());
+                int axis = normalize_axis(axesMat.at<int>(i), inpShape.size());
                 shouldDelete[axis] = true;
             }
         }
@@ -1220,7 +1220,7 @@ void ONNXImporter::parseReduce(LayerParams& layerParams, const opencv_onnx::Node
         }
     }
 
-    MatShape targetShape;
+    std::vector<int> targetShape;
     for (int i = 0; i < inpShape.size(); ++i)
     {
         if (!shouldDelete[i])
@@ -1290,30 +1290,10 @@ void ONNXImporter::parseReduce(LayerParams& layerParams, const opencv_onnx::Node
         }
     }
 
-    LayerParams reduceLp = layerParams;
-    reduceLp.name = layerParams.name + "/reduce";
-    CV_Assert(layer_id.find(reduceLp.name) == layer_id.end());
-    reduceLp.set("deleted_dims", DictValue::arrayInt(&deletedDims[0], deletedDims.size()));
+    layerParams.set("deleted_dims", DictValue::arrayInt(&deletedDims[0], deletedDims.size()));
+    layerParams.set("target_dims", DictValue::arrayInt(&targetShape[0], targetShape.size()));
 
     node_proto.set_input(0, inputString);
-    node_proto.set_output(0, reduceLp.name);
-    addLayer(reduceLp, node_proto);
-
-    layerParams.type = (depth == CV_8S) ? "ReshapeInt8" : "Reshape";
-    layerParams.set("dim", DictValue::arrayInt(&targetShape[0], targetShape.size()));
-
-    // Set batchsize dim as dynamic to be compatible with batch size >= 2.
-    if (targetShape.size() > 1)
-    {
-        std::vector<int> dynamicAxes = {0};  // The index of batchsize dim is 0.
-        std::vector<int> inputIndices = {0};
-
-        layerParams.set("has_dynamic_shapes", true);
-        layerParams.set("dynamic_axes", DictValue::arrayInt(dynamicAxes.data(), dynamicAxes.size()));
-        layerParams.set("input_indices", DictValue::arrayInt(inputIndices.data(), inputIndices.size()));
-    }
-
-    node_proto.set_input(0, node_proto.output(0));
     node_proto.set_output(0, output_name);
 
     addLayer(layerParams, node_proto);
index 5f94f98..6a0de29 100644 (file)
@@ -411,7 +411,6 @@ TEST_P(Test_ONNX_layers, ReduceMean)
 TEST_P(Test_ONNX_layers, ReduceSum)
 {
     testONNXModels("reduce_sum");
-    testONNXModels("reduce_sum_axis");
     testONNXModels("reduce_sum_axis_dynamic_batch");
 }