[CPU] Add check quantize ranges (#850)
authorMaxim Andronov <maxim.andronov@intel.com>
Mon, 13 Jul 2020 09:48:00 +0000 (12:48 +0300)
committerGitHub <noreply@github.com>
Mon, 13 Jul 2020 09:48:00 +0000 (12:48 +0300)
inference-engine/src/mkldnn_plugin/nodes/mkldnn_quantize_node.cpp
inference-engine/tests_deprecated/functional/shared_tests/transformations/concat_test.cpp
inference-engine/thirdparty/mkl-dnn

index cd2f9fe..5602187 100644 (file)
@@ -221,6 +221,11 @@ void MKLDNNQuantizeNode::init() {
             float il = inputLowData[isInputLowBroadcasted ? 0 : i];
             float ih = inputHighData[isInputHighBroadcasted ? 0 : i];
 
+            if (il == ih) {
+                if (levels != 2)
+                    THROW_IE_EXCEPTION << "Quantize layer with name '" << getName() << "' has wrong input quantize ranges";
+            }
+
             inputScale[i] = (levels - 1) / (ih - il);
             inputShift[i] = -il * (levels - 1) / (ih - il);
         }
@@ -229,6 +234,11 @@ void MKLDNNQuantizeNode::init() {
             float ol = outputLowData[isOutputLowBroadcasted ? 0 : i];
             float oh = outputHighData[isOutputHighBroadcasted ? 0 : i];
 
+            if (ol == oh) {
+                if (levels != 2)
+                    THROW_IE_EXCEPTION << "Quantize layer with name '" << getName() << "' has wrong output quantize ranges";
+            }
+
             outputScale[i] = (oh - ol) / (levels - 1);
 
             if (outputScale[i] != 1.f)
index a371c54..aa9326e 100644 (file)
@@ -40,17 +40,18 @@ std::string ConcatTestModel::getModel(SingleLayerTransformationsTestParams& p) c
         {"10,15", "12,22"}, {"11,21", "12,23"} // FakeQuantize to Concat
     };
 
+    size_t constSize = std::accumulate(constInputDimentions.begin(), constInputDimentions.end(), 1lu, std::multiplies<size_t>());
     return CommonTestUtils::DefaultNetBuilder::buildNetworkWithOneInput(
             "Concat_transformations_", p.inputDimensions[0], p._network_precision)
         .addInputLayer(p._network_precision, p.inputDimensions[1])
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
-        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
+        .addLayer("Const", p._network_precision, &const_params, {{}, {constInputDimentions}}, type_size*constSize, 0)
         .addLayer(
             "FakeQuantize",
             p._network_precision,
index 4e339e9..36f650a 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 4e339e93098cf9448209db8c7d2b86356fc67ff5
+Subproject commit 36f650aac835b5ef8ab2459eda337ed881a1d3c4