Removed PluginDispatcher; IEPlugin from python API (#920)
authorIlya Lavrenov <ilya.lavrenov@intel.com>
Tue, 16 Jun 2020 12:03:32 +0000 (15:03 +0300)
committerGitHub <noreply@github.com>
Tue, 16 Jun 2020 12:03:32 +0000 (15:03 +0300)
13 files changed:
inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pxd
inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pyx
inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp
inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.hpp
inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl_defs.pxd
inference-engine/ie_bridges/python/tests/test_IEPlugin.py [deleted file]
inference-engine/include/ie_plugin_dispatcher.hpp [deleted file]
inference-engine/include/inference_engine.hpp
inference-engine/src/inference_engine/ie_plugin_dispatcher.cpp [deleted file]
inference-engine/src/multi_device/multi_device.hpp
inference-engine/tests/ie_test_utils/unit_test_utils/empty.cpp
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_plugin_dispatcher.hpp [deleted file]
inference-engine/tests/unit/inference_engine/ie_plugin_dispatcher_tests.cpp [deleted file]

index 874435d..7b50eec 100644 (file)
@@ -42,21 +42,12 @@ cdef class IENetwork:
 
 cdef class ExecutableNetwork:
     cdef unique_ptr[C.IEExecNetwork] impl
-    cdef C.IEPlugin plugin_impl
     cdef C.IECore ie_core_impl
     cpdef wait(self, num_requests = ?, timeout = ?)
     cpdef get_idle_request_id(self)
     cdef public:
         _requests, _infer_requests
 
-cdef class IEPlugin:
-    cdef C.IEPlugin impl
-    cpdef ExecutableNetwork load(self, IENetwork network, int num_requests = ?, config = ?)
-    cpdef void set_config(self, config)
-    cpdef void add_cpu_extension(self, str extension_path) except *
-    cpdef void set_initial_affinity(self, IENetwork network) except *
-    cpdef set get_supported_layers(self, IENetwork net)
-
 cdef class LayersStatsMap(dict):
     cdef C.IENetwork net_impl
 
index 5fa5955..5815093 100644 (file)
@@ -1699,119 +1699,6 @@ cdef class IENetwork:
     # def get_function(self):
     #     return self.impl.getFunction()
 
-## This class is the main plugin interface and serves to initialize and configure the plugin.
-#
-#  \note This class is deprecated: Use IECore instead
-#
-cdef class IEPlugin:
-    ##  Class constructor
-    #
-    #  @param device: Target device name. Supported devices: CPU, GPU, FPGA, MYRIAD, HETERO, MULTI
-    #  @param plugin_dirs: List of paths to plugin directories
-    #  @return IEPlugin instance
-    def __cinit__(self, device: str, plugin_dirs=None):
-        warnings.warn("IEPlugin class is deprecated. "
-                      "Please use IECore class instead.",
-                      DeprecationWarning)
-        plugin_base = device.split(':')[0]
-        if plugin_base not in known_plugins:
-            raise ValueError("Unknown plugin: {}, expected one of: {}"
-                             .format(plugin_base, ",".join(known_plugins)))
-        if plugin_dirs is None:
-            plugin_dirs = [""]
-        elif isinstance(plugin_dirs, str):
-            plugin_dirs = [plugin_dirs]
-
-        # add package directory to plugin_dirs
-        lib_location = os.path.dirname(os.path.realpath(__file__))
-        plugin_dirs.append(lib_location)
-
-        cpdef string device_ = <string> device.encode()
-        cdef vector[string] dirs_
-        for d in plugin_dirs:
-            dirs_.push_back(<string> d.encode())
-
-        self.impl = C.IEPlugin(device_, dirs_)
-
-    ## Loads a network that was read from the IR to the plugin and creates an executable network from a network object.
-    #  You can create as many networks as you need and use them simultaneously (up to the limitation of the hardware
-    #  resources).
-    #
-    #  @param network:  A valid `IENetwork` instance
-    #  @param num_requests: A positive integer value of infer requests to be created. Number of infer
-    #                       requests may be limited by device capabilities.
-    #  @param config: A dictionary of plugin configuration keys and their values
-    #  @return  Valid instance of ExecutableNetwork class
-    #
-    #  Usage example:\n
-    #  ```python
-    #  net = IENetwork(model=path_to_xml_file, weights=path_to_bin_file)
-    #  ie = IECore()
-    #  exec_net = ie.load_network(network=net, device_name="CPU", num_requests=2)
-    #  ```
-    cpdef ExecutableNetwork load(self, IENetwork network, int num_requests=1, config=None):
-        cdef ExecutableNetwork exec_net = ExecutableNetwork()
-        cdef map[string, string] c_config
-        if num_requests < 0:
-            raise ValueError("Incorrect number of requests specified: {}. Expected positive integer number "
-                             "or zero for auto detection".format(num_requests))
-        if config:
-            for k, v in config.items():
-                c_config[to_std_string(k)] = to_std_string(v)
-        exec_net.plugin_impl = self.impl
-        exec_net.impl = move(self.impl.load(network.impl, num_requests, c_config))
-        return exec_net
-
-    ## Sets initial affinity for model layers according to the HETERO plugin logic. Applicable only if
-    #  `IEPlugin` was initialized for a HETERO device.
-    #
-    #  @param net: A valid instance of IENetwork
-    #  @return None
-    #
-    #  Usage example: See `affinity` attribute of the `IENetLayer` class.
-    cpdef void set_initial_affinity(self, IENetwork net) except *:
-        if self.device.find("HETERO") == -1:
-            raise RuntimeError("set_initial_affinity method applicable only for HETERO device")
-        self.impl.setInitialAffinity(net.impl)
-
-    cpdef set get_supported_layers(self, IENetwork net):
-        return set([l.decode() for l in self.impl.queryNetwork(net.impl)])
-
-    ## A name of the device that was specified to initialize IEPlugin
-    @property
-    def device(self):
-        device_name = bytes(self.impl.device_name)
-        return to_py_string(device_name)
-
-    ## A version of the plugin
-    @property
-    def version(self):
-        version = bytes(self.impl.version)
-        return version.decode()
-
-    ## Loads extensions library to the plugin. Applicable only for a CPU device and a HETERO device with CPU
-    #
-    #  @param extension_path: A full path to CPU extensions library
-    #  @return None
-    cpdef void add_cpu_extension(self, str extension_path) except *:
-        if self.device.find("CPU") == -1:
-            raise RuntimeError("add_cpu_extension method applicable only for CPU or HETERO devices")
-        cdef string extension_str = extension_path.encode()
-        self.impl.addCpuExtension(extension_str)
-
-    ## Sets a configuration for the plugin. Refer to `SetConfig()` in Inference Engine C++ documentation for acceptable
-    #  keys and values list.
-    #
-    #  @param config: A dictionary of keys and values of acceptable configuration parameters
-    #  @return None
-    cpdef void set_config(self, config):
-        cdef map[string, string] c_config
-        for k, v in config.items():
-            c_config[to_std_string(k)] = to_std_string(v)
-        self.impl.setConfig(c_config)
-
-    # TODO: Add export compiled network functionality
-
 cdef class BlobBuffer:
     """Copy-less accessor for Inference Engine Blob"""
 
index 4b934c1..61e165d 100644 (file)
@@ -348,83 +348,6 @@ void InferenceEnginePython::IENetwork::setStats(const std::map<std::string, std:
     IE_SUPPRESS_DEPRECATED_END
 }
 
-
-IE_SUPPRESS_DEPRECATED_START
-InferenceEnginePython::IEPlugin::IEPlugin(const std::string &device, const std::vector <std::string> &plugin_dirs) {
-
-    InferenceEngine::PluginDispatcher dispatcher{plugin_dirs};
-    actual = dispatcher.getPluginByDevice(device);
-    auto pluginVersion = actual.GetVersion();
-    version = std::to_string(pluginVersion->apiVersion.major) + ".";
-    version += std::to_string(pluginVersion->apiVersion.minor) + ".";
-    version += pluginVersion->buildNumber;
-    device_name = device;
-}
-IE_SUPPRESS_DEPRECATED_END
-
-void InferenceEnginePython::IEPlugin::setInitialAffinity(const InferenceEnginePython::IENetwork &net) {
-    IE_SUPPRESS_DEPRECATED_START
-    InferenceEngine::InferenceEnginePluginPtr hetero_plugin(actual);
-    InferenceEngine::QueryNetworkResult queryRes;
-    auto &network = net.actual;
-
-    hetero_plugin->QueryNetwork(*network, {}, queryRes);
-    IE_SUPPRESS_DEPRECATED_END
-
-    if (queryRes.rc != InferenceEngine::StatusCode::OK) {
-        THROW_IE_EXCEPTION << queryRes.resp.msg;
-    }
-    for (auto &&layer : queryRes.supportedLayersMap) {
-        IE_SUPPRESS_DEPRECATED_START
-        network->getLayerByName(layer.first.c_str())->affinity = layer.second;
-        IE_SUPPRESS_DEPRECATED_END
-    }
-}
-
-std::set <std::string> InferenceEnginePython::IEPlugin::queryNetwork(const InferenceEnginePython::IENetwork &net) {
-    const std::shared_ptr<InferenceEngine::CNNNetwork> &network = net.actual;
-    InferenceEngine::QueryNetworkResult queryRes;
-    IE_SUPPRESS_DEPRECATED_START
-    actual.QueryNetwork(*network, {}, queryRes);
-    IE_SUPPRESS_DEPRECATED_END
-
-    std::set <std::string> supportedLayers;
-    for (auto &&layer : queryRes.supportedLayersMap) {
-        supportedLayers.insert(layer.first);
-    }
-
-    return supportedLayers;
-}
-
-
-void InferenceEnginePython::IEPlugin::addCpuExtension(const std::string &extension_path) {
-    auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(extension_path);
-    auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
-    IE_SUPPRESS_DEPRECATED_START
-    actual.AddExtension(extension);
-    IE_SUPPRESS_DEPRECATED_END
-}
-
-std::unique_ptr <InferenceEnginePython::IEExecNetwork>
-InferenceEnginePython::IEPlugin::load(const InferenceEnginePython::IENetwork &net,
-                                      int num_requests,
-                                      const std::map <std::string, std::string> &config) {
-    auto exec_network = InferenceEnginePython::make_unique<InferenceEnginePython::IEExecNetwork>(net.name,
-                                                                                                 num_requests);
-    IE_SUPPRESS_DEPRECATED_START
-    exec_network->actual = actual.LoadNetwork(*net.actual, config);
-    IE_SUPPRESS_DEPRECATED_END
-    exec_network->createInferRequests(num_requests);
-
-    return exec_network;
-}
-
-void InferenceEnginePython::IEPlugin::setConfig(const std::map<std::string, std::string> &config) {
-    IE_SUPPRESS_DEPRECATED_START
-    actual.SetConfig(config);
-    IE_SUPPRESS_DEPRECATED_END
-}
-
 InferenceEnginePython::IEExecNetwork::IEExecNetwork(const std::string &name, size_t num_requests) :
         infer_requests(num_requests), name(name) {
     request_queue_ptr = std::make_shared<IdleInferRequestQueue>();
index 38f6975..0cbdf90 100644 (file)
@@ -150,31 +150,6 @@ struct IEExecNetwork {
 };
 
 
-struct IEPlugin {
-    std::unique_ptr<InferenceEnginePython::IEExecNetwork> load(const InferenceEnginePython::IENetwork &net,
-                                                               int num_requests,
-                                                               const std::map<std::string, std::string> &config);
-
-    std::string device_name;
-    std::string version;
-
-    void setConfig(const std::map<std::string, std::string> &);
-
-    void addCpuExtension(const std::string &extension_path);
-
-    void setInitialAffinity(const InferenceEnginePython::IENetwork &net);
-
-    IEPlugin(const std::string &device, const std::vector<std::string> &plugin_dirs);
-
-    IEPlugin() = default;
-
-    std::set<std::string> queryNetwork(const InferenceEnginePython::IENetwork &net);
-
-    IE_SUPPRESS_DEPRECATED_START
-    InferenceEngine::InferencePlugin actual;
-    IE_SUPPRESS_DEPRECATED_END
-};
-
 struct IECore {
     InferenceEngine::Core actual;
     explicit IECore(const std::string & xmlConfigFile = std::string());
index f044ee9..2d40b6f 100644 (file)
@@ -194,17 +194,6 @@ cdef extern from "ie_api_impl.hpp" namespace "InferenceEnginePython":
         void load_from_buffer(const char*xml, size_t xml_size, uint8_t*bin, size_t bin_size) except +
         object getFunction() except +
 
-    cdef cppclass IEPlugin:
-        IEPlugin() except +
-        IEPlugin(const string &, const vector[string] &) except +
-        unique_ptr[IEExecNetwork] load(IENetwork & net, int num_requests, const map[string, string]& config) except +
-        void addCpuExtension(const string &) except +
-        void setConfig(const map[string, string] &) except +
-        void setInitialAffinity(IENetwork & net) except +
-        set[string] queryNetwork(const IENetwork & net) except +
-        string device_name
-        string version
-
     cdef cppclass InferRequestWrap:
         double exec_time;
         int index;
diff --git a/inference-engine/ie_bridges/python/tests/test_IEPlugin.py b/inference-engine/ie_bridges/python/tests/test_IEPlugin.py
deleted file mode 100644 (file)
index 70c9aae..0000000
+++ /dev/null
@@ -1,136 +0,0 @@
-import warnings
-import pytest
-
-
-from openvino.inference_engine import IENetwork, IEPlugin, ExecutableNetwork
-from conftest import model_path
-
-test_net_xml, test_net_bin = model_path()
-
-
-def test_init_plugin(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device, None)
-        assert isinstance(plugin, IEPlugin)
-    assert len(w) == 1
-    assert "IEPlugin class is deprecated. " \
-                "Please use IECore class instead." in str(w[0].message)
-
-
-def test_device_attr(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device, None)
-        assert plugin.device == device
-    assert len(w) == 1
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-
-
-def test_get_version(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device, None)
-        assert not len(plugin.version) == 0
-    assert len(w) == 1
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-
-
-def test_load_network(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device, None)
-        net = IENetwork(model=test_net_xml, weights=test_net_bin)
-        exec_net = plugin.load(net)
-        assert isinstance(exec_net, ExecutableNetwork)
-    assert len(w) == 2
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    assert "Reading network using constructor is deprecated. " \
-            "Please, use IECore.read_network() method instead"  in str(w[1].message)
-
-
-def test_load_network_many_requests(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device)
-        net = IENetwork(model=test_net_xml, weights=test_net_bin)
-        exec_net = plugin.load(net, num_requests=5)
-        assert len(exec_net.requests) == 5
-    assert len(w) == 2
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    assert "Reading network using constructor is deprecated. " \
-            "Please, use IECore.read_network() method instead"  in str(w[1].message)
-
-
-def test_get_supported_layers(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin(device)
-        net = IENetwork(model=test_net_xml, weights=test_net_bin)
-        supported = plugin.get_supported_layers(net)
-        layers = ['19/Fused_Add_', '21', '22', '23', '24/Fused_Add_', '26', '27', '29', 'data', 'fc_out']
-        if device == "GPU":
-            layers.remove("data")
-        assert sorted(supported) == layers
-    assert len(w) == 2
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    assert "Reading network using constructor is deprecated. " \
-            "Please, use IECore.read_network() method instead"  in str(w[1].message)
-
-
-@pytest.mark.skip(reason="Plugiin specific test.")
-def test_set_config(device):
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin("HETERO:CPU")
-        plugin.set_config({"TARGET_FALLBACK": "CPU,GPU"})
-    assert len(w) == 1
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-
-
-@pytest.mark.skip(reason="Sporadically fail in CI, not reproducible locally")
-def test_set_initial_affinity():
-    with warnings.catch_warnings(record=True) as w:
-        plugin = IEPlugin("HETERO:CPU", None)
-        net = IENetwork(model=test_net_xml, weights=test_net_bin)
-        plugin.set_initial_affinity(net)
-        for l, params in net.layers.items():
-            assert params.affinity == "CPU", "Incorrect affinity for {}".format(l)
-    assert len(w) == 1
-    assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-
-
-def test_set_initial_affinity_wrong_device(device):
-    with pytest.raises(RuntimeError) as e:
-        with warnings.catch_warnings(record=True) as w:
-            plugin = IEPlugin("CPU", None)
-            net = IENetwork(model=test_net_xml, weights=test_net_bin)
-            plugin.set_initial_affinity(net)
-        assert len(w) == 1
-        assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    assert "set_initial_affinity method applicable only for HETERO device" in str(e.value)
-
-
-def test_add_cpu_extenstion_wrong_device():
-    with pytest.raises(RuntimeError) as e:
-        with warnings.catch_warnings(record=True) as w:
-            plugin = IEPlugin("GPU", None)
-            plugin.add_cpu_extension("./")
-        assert len(w) == 1
-        assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    if "Cannot find plugin to use" in str(e.value):
-        pytest.skip("No GPU found. Skipping test")
-    else:
-        assert "add_cpu_extension method applicable only for CPU or HETERO devices" in str(e.value)
-
-
-def test_unknown_plugin():
-    with pytest.raises(ValueError) as e:
-        with warnings.catch_warnings(record=True) as w:
-            IEPlugin("BLA")
-        assert len(w) == 1
-        assert "IEPlugin class is deprecated. " \
-               "Please use IECore class instead." in str(w[0].message)
-    assert "Unknown plugin: BLA, expected one of:" in str(e.value)
diff --git a/inference-engine/include/ie_plugin_dispatcher.hpp b/inference-engine/include/ie_plugin_dispatcher.hpp
deleted file mode 100644 (file)
index 178b709..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-/**
- * @brief A header for a class to handle plugin loading.
- *
- * @file ie_plugin_dispatcher.hpp
- */
-#pragma once
-
-#include <cpp/ie_plugin_cpp.hpp>
-#include <string>
-#include <vector>
-
-#include "ie_plugin_ptr.hpp"
-
-namespace InferenceEngine {
-
-/**
- * @deprecated Use InferenceEngine::Core instead. Will be removed in 2021.1
- * @brief This is a class to load a suitable plugin
- */
-class INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::Core instead which dispatches plugin automatically."
-                                  "Will be removed in 2021.1") INFERENCE_ENGINE_API_CLASS(PluginDispatcher) {
-public:
-    /**
-     * @brief A constructor
-     *
-     * @param pp Vector of paths to plugin directories
-     */
-    explicit PluginDispatcher(const std::vector<file_name_t>& pp = {file_name_t()});
-
-    IE_SUPPRESS_DEPRECATED_START
-
-    /**
-     * @brief Loads a plugin from plugin directories
-     *
-     * @param name Plugin name
-     * @return A pointer to the loaded plugin
-     */
-    virtual InferencePlugin getPluginByName(const file_name_t& name) const;
-
-    /**
-     * @deprecated Use InferenceEngine::Core to work with devices by name
-     * @brief Loads a plugin from directories that is suitable for the device string
-     *
-     * @param deviceName A string value representing target device
-     * @return A pointer to the plugin
-     */
-    InferencePlugin getPluginByDevice(const std::string& deviceName) const;
-
-    IE_SUPPRESS_DEPRECATED_END
-
-protected:
-    /**
-     * @brief Creates path to the plugin
-     *
-     * @param path Path to the plugin
-     * @param input Plugin name
-     * @return The path to the plugin
-     */
-    file_name_t make_plugin_name(const file_name_t& path, const file_name_t& input) const;
-
-private:
-    std::vector<file_name_t> pluginDirs;
-};
-}  // namespace InferenceEngine
index 70bbb27..b3bcf19 100644 (file)
@@ -19,5 +19,4 @@
 #include <ie_icnn_network.hpp>
 #include <ie_icnn_network_stats.hpp>
 #include <ie_plugin_config.hpp>
-#include <ie_plugin_dispatcher.hpp>
 #include <ie_version.hpp>
diff --git a/inference-engine/src/inference_engine/ie_plugin_dispatcher.cpp b/inference-engine/src/inference_engine/ie_plugin_dispatcher.cpp
deleted file mode 100644 (file)
index 505221c..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#include "ie_plugin_dispatcher.hpp"
-
-#include <map>
-#include <multi-device/multi_device_config.hpp>
-#include <string>
-#include <vector>
-
-#include "file_utils.h"
-
-using namespace InferenceEngine;
-
-IE_SUPPRESS_DEPRECATED_START
-
-PluginDispatcher::PluginDispatcher(const std::vector<file_name_t>& pp): pluginDirs(pp) {}
-
-InferencePlugin PluginDispatcher::getPluginByName(const file_name_t& name) const {
-    std::stringstream err;
-    for (auto& pluginPath : pluginDirs) {
-        try {
-            return InferencePlugin(InferenceEnginePluginPtr(FileUtils::makeSharedLibraryName(pluginPath, name)));
-        } catch (const std::exception& ex) {
-            err << "cannot load plugin: " << fileNameToString(name) << " from " << fileNameToString(pluginPath) << ": "
-                << ex.what() << ", skipping\n";
-        }
-    }
-    THROW_IE_EXCEPTION << "Plugin " << fileNameToString(name) << " cannot be loaded: " << err.str() << "\n";
-}
-
-namespace {
-
-std::string getPluginName(const std::string& deviceName) {
-    static std::map<std::string, std::string> plugunFromDeviceMap = {
-        {"CPU", "MKLDNNPlugin"},    {"GPU", "clDNNPlugin"},         {"FPGA", "dliaPlugin"},
-        {"MYRIAD", "myriadPlugin"}, {"HDDL", "HDDLPlugin"},         {"GNA", "GNAPlugin"},
-        {"HETERO", "HeteroPlugin"}, {"MULTI", "MultiDevicePlugin"}, {"KMB", "kmbPlugin"}};
-    auto val = plugunFromDeviceMap.find(deviceName);
-
-    if (val == plugunFromDeviceMap.end()) {
-        THROW_IE_EXCEPTION << "Cannot find plugin name for device " << deviceName;
-    }
-
-    return val->second;
-}
-
-}  // namespace
-
-InferencePlugin PluginDispatcher::getPluginByDevice(const std::string& deviceName) const {
-    auto createPluginByDevice = [&](const std::string& deviceName) {
-        std::string pluginName = getPluginName(deviceName);
-
-        std::stringstream err;
-        try {
-            return getPluginByName(stringToFileName(pluginName));
-        } catch (const std::exception& ex) {
-            err << "Tried load plugin : " << pluginName << " for device " << deviceName << ",  error: " << ex.what()
-                << "\n";
-        }
-
-        THROW_IE_EXCEPTION << "Cannot find plugin to use: " << err.str() << "\n";
-    };
-
-    InferenceEnginePluginPtr ptr;
-    // looking for HETERO: if can find, add everything after ':' to the options of hetero plugin
-    if (deviceName.find("HETERO:") == 0) {
-        ptr = createPluginByDevice("HETERO");
-        if (ptr) {
-            InferenceEngine::ResponseDesc response;
-            ptr->SetConfig({{"TARGET_FALLBACK", deviceName.substr(7, deviceName.length() - 7)}}, &response);
-        }
-    } else if (deviceName.find("MULTI:") == 0) {
-        // MULTI found: everything after ':' to the options of the multi-device plugin
-        ptr = createPluginByDevice("MULTI");
-        if (ptr) {
-            InferenceEngine::ResponseDesc response;
-            if (deviceName.length() < 6) THROW_IE_EXCEPTION << "Missing devices priorities for the multi-device case";
-            ptr->SetConfig({{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES,
-                             deviceName.substr(6, deviceName.length() - 6)}},
-                           &response);
-        }
-    } else {
-        ptr = createPluginByDevice(deviceName);
-    }
-    return InferencePlugin(ptr);
-}
-
-file_name_t PluginDispatcher::make_plugin_name(const file_name_t& path, const file_name_t& input) const {
-    return FileUtils::makeSharedLibraryName(path, input);
-}
-
-IE_SUPPRESS_DEPRECATED_END
index 29a4021..20b0207 100644 (file)
@@ -16,7 +16,6 @@
 #include <string>
 
 #include <cpp/ie_plugin_cpp.hpp>
-#include <ie_plugin_dispatcher.hpp>
 #include <cpp_interfaces/impl/ie_plugin_internal.hpp>
 #include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
 #include <cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp>
index bd96884..3f131e8 100644 (file)
@@ -11,7 +11,6 @@
 #include "unit_test_utils/mocks/mock_iinfer_request.hpp"
 #include "unit_test_utils/mocks/mock_iinference_plugin.hpp"
 #include "unit_test_utils/mocks/mock_not_empty_icnn_network.hpp"
-#include "unit_test_utils/mocks/mock_plugin_dispatcher.hpp"
 
 #include "unit_test_utils/mocks/cpp_interfaces/mock_plugin_impl.hpp"
 #include "unit_test_utils/mocks/cpp_interfaces/mock_task_executor.hpp"
diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_plugin_dispatcher.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_plugin_dispatcher.hpp
deleted file mode 100644 (file)
index 4890a18..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#pragma once
-
-#include <gmock/gmock.h>
-
-#include <string>
-#include <vector>
-
-#include "ie_plugin_dispatcher.hpp"
-
-IE_SUPPRESS_DEPRECATED_START
-class MockDispatcher : public InferenceEngine::PluginDispatcher {
-public:
-    explicit MockDispatcher(const std::vector<std::string>& pp) : PluginDispatcher(pp) {}
-    MOCK_CONST_METHOD1(getPluginByName, InferenceEngine::InferencePlugin(const std::string& name));
-};
-IE_SUPPRESS_DEPRECATED_END
\ No newline at end of file
diff --git a/inference-engine/tests/unit/inference_engine/ie_plugin_dispatcher_tests.cpp b/inference-engine/tests/unit/inference_engine/ie_plugin_dispatcher_tests.cpp
deleted file mode 100644 (file)
index 824115f..0000000
+++ /dev/null
@@ -1,145 +0,0 @@
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#if defined _WIN32
-// Avoidance of Windows.h to define min/max.
-#ifndef NOMINMAX
-#define NOMINMAX
-#endif
-#include <windows.h>
-#else
-#include <unistd.h>
-#endif  // _WIN32
-
-#include <gtest/gtest.h>
-#include <gmock/gmock.h>
-
-#include <string>
-#include <fstream>
-
-#include "ie_plugin_dispatcher.hpp"
-
-#include "unit_test_utils/mocks/mock_plugin_dispatcher.hpp"
-
-using namespace InferenceEngine;
-using namespace ::testing;
-
-class PluginDispatcherTests : public ::testing::Test {
-public:
-    const std::string nameExt(const std::string& name) { return name + IE_BUILD_POSTFIX;}
-};
-
-IE_SUPPRESS_DEPRECATED_START
-TEST_F(PluginDispatcherTests, canLoadMockPlugin) {
-    PluginDispatcher dispatcher({ "", "./", "./lib" });
-    ASSERT_NO_THROW(dispatcher.getPluginByName(nameExt("mock_engine")));
-}
-
-#if defined _WIN32
-
-class SetDllDirectoryCaller {
-public:
-    /// Call SetDllDirectory if not called before
-    SetDllDirectoryCaller(const char* path) {
-        // Check if user already called SetDllDirectory with acctual directory
-        call_setdlldirectory = (1 >= GetDllDirectory(0, nullptr));
-        if (call_setdlldirectory) {
-            SetDllDirectory(path);
-        }
-    }
-    /// Restore serch path order to default
-    ~SetDllDirectoryCaller() {
-        if (call_setdlldirectory)
-            SetDllDirectory(nullptr);
-    }
-
-    bool call_setdlldirectory;
-
-    // Non copyable or movable
-    SetDllDirectoryCaller(const SetDllDirectoryCaller&) = delete;
-    SetDllDirectoryCaller& operator=(const SetDllDirectoryCaller&) = delete;
-};
-
-TEST_F(PluginDispatcherTests, canLoadMockPluginAndRetainSetDllDirectory) {
-    // a test pre-requisite that SetDllDirectory is not configured
-    ASSERT_EQ(1, GetDllDirectory(0, nullptr));
-
-    // try modify DLL search order with SetDllDirectory
-    const char *set_dir = "12345";
-    char get_dir[6] = {0};
-    SetDllDirectoryCaller set_dll_directory_caller(set_dir);
-
-    PluginDispatcher dispatcher({ "", "./", "./lib" });
-    ASSERT_NO_THROW(dispatcher.getPluginByName(nameExt("mock_engine")));
-
-    // verify DLL search order retained
-    ASSERT_EQ(sizeof(get_dir), GetDllDirectory(0, nullptr));
-    ASSERT_NE(0, GetDllDirectory(sizeof(get_dir), get_dir));
-    ASSERT_EQ(std::string(get_dir), std::string(set_dir));
-}
-
-TEST_F(PluginDispatcherTests, canLoadMockPluginAndKeepDefaultDLLSearch) {
-    // a test pre-requisite that SetDllDirectory is not configured
-    ASSERT_EQ(1, GetDllDirectory(0, nullptr));
-
-    PluginDispatcher dispatcher({ "", "./", "./lib" });
-    ASSERT_NO_THROW(dispatcher.getPluginByName(nameExt("mock_engine")));
-
-    // verify DLL search order is still default
-    ASSERT_EQ(1, GetDllDirectory(0, nullptr));
-}
-#endif
-
-TEST_F(PluginDispatcherTests, throwsOnUnknownPlugin) {
-    PluginDispatcher dispatcher({ "./", "./lib" });
-    ASSERT_THROW(dispatcher.getPluginByName(nameExt("unknown_plugin")), InferenceEngine::details::InferenceEngineException);
-}
-
-ACTION(ThrowException) {
-    THROW_IE_EXCEPTION << "Exception!";
-}
-
-#if defined(ENABLE_MKL_DNN)
-TEST_F(PluginDispatcherTests, returnsIfLoadSuccessfull) {
-    MockDispatcher disp({ "./", "./lib" });
-    PluginDispatcher dispatcher({ "", "./", "./lib" });
-    auto ptr = dispatcher.getPluginByName(nameExt("mock_engine"));
-
-    EXPECT_CALL(disp, getPluginByName(_)).WillOnce(Return(ptr));
-    ASSERT_NO_THROW(disp.getPluginByName(nameExt("MKLDNNPlugin")));
-}
-
-#if defined ENABLE_MKL_DNN && !defined _WIN32 && !defined __CYGWIN__ && !defined __APPLE__
-TEST_F(PluginDispatcherTests, libMKLDNNPluginSymbolsExposure) {
-    std::vector<std::string> locations = {"/libMKLDNNPlugin.so", "/lib/libMKLDNNPlugin.so"};
-    char path[PATH_MAX];
-    if (readlink("/proc/self/exe", path, sizeof(path)/sizeof(path[0])) < 0) {
-        return;
-    }
-
-    std::string Path = path;
-    for (auto location : locations) {
-        std::string fullPath = Path.substr(0, Path.find_last_of("/")) + location;
-        if (std::ifstream(fullPath.c_str()).good()) {
-            std::string command = "readelf --dyn-syms ";
-            command += fullPath;
-            command += " | grep UNIQUE | c++filt";
-            char buffer[128];
-            std::string result;
-            std::unique_ptr<FILE, decltype(&pclose)> pipe(popen(command.c_str(), "r"), pclose);
-            if (pipe) {
-                while (fgets(buffer, sizeof(buffer), pipe.get()) != nullptr) {
-                    result += buffer;
-                }
-            }
-            if (result.size())
-                FAIL() << " Visibility is not hidden and there are symbols exposure:\n" << result << std::endl;
-        }
-    }
-}
-#endif
-
-#endif
-
-IE_SUPPRESS_DEPRECATED_END