Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / inference_engine / cpp_interfaces / impl / ie_plugin_internal.hpp
index d9bee35..bb261db 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 Intel Corporation
+// Copyright (C) 2018-2019 Intel Corporation
 // SPDX-License-Identifier: Apache-2.0
 //
 
@@ -13,6 +13,7 @@
 #include <map>
 #include <string>
 #include <blob_factory.hpp>
+#include "graph_transformer.h"
 #include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
 #include "cpp_interfaces/base/ie_executable_network_base.hpp"
 #include "cpp_interfaces/impl/ie_executable_network_internal.hpp"
@@ -47,6 +48,19 @@ public:
         StatusCode sts = _loadedNetwork->CreateInferRequest(_createdInferRequest, &resp);
         if (sts != OK) THROW_IE_EXCEPTION << resp.msg;
     }
+    /**
+     * @brief most plugins successfully consume unreshapable networks - lets do it in base class
+     * WARNING: this functions modifies layers in input network and might affect application, that uses it
+     */
+    virtual ICNNNetwork&  RemoveConstLayers(ICNNNetwork &network) {
+        auto* implNetwork = dynamic_cast<details::CNNNetworkImpl*>(&network);
+        if (implNetwork) {
+            // valid for CNNNetworkImpl only, while there's no API in ICNNNetwork to change network
+            ConstTransformer transformator(implNetwork);
+            transformator.fullTrim();
+        }
+        return network;
+    }
 
     /**
      * @brief Creates an executable network from an pares network object, users can create as many networks as they need and use
@@ -101,7 +115,7 @@ public:
             }
             _networkOutputs[it.first] = newData;
         }
-        auto impl = LoadExeNetworkImpl(network, config);
+        auto impl = LoadExeNetworkImpl(RemoveConstLayers(network), config);
         impl->setNetworkInputs(_networkInputs);
         impl->setNetworkOutputs(_networkOutputs);
         // skip setting shared ptr to avoid curricular dependency: ExecutableNetworkBase -> IExecutableNetworkInternal -> InferencePluginInternal