1 // Copyright (C) 2018 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
11 #include <unordered_map>
12 #include <unordered_set>
14 #include <ie_common.h>
15 #include <cpp/ie_plugin_cpp.hpp>
16 #include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
18 #include "hetero_infer_request.h"
19 #include "cnn_network_impl.hpp"
20 #include "hetero_async_infer_request.h"
22 namespace HeteroPlugin {
25 * @class ExecutableNetwork
26 * @brief Interface of executable network
28 class HeteroExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
30 typedef std::shared_ptr<HeteroExecutableNetwork> Ptr;
35 HeteroExecutableNetwork(InferenceEngine::ICNNNetwork &network,
36 const std::map<std::string, std::string> &config,
37 const std::vector<InferenceEngine::IExtensionPtr> &extensions,
38 InferenceEngine::MapDeviceLoaders &deviceLoaders,
39 InferenceEngine::IErrorListener *listener);
41 virtual ~HeteroExecutableNetwork() = default;
44 * this functino implements the loading of hetero network,
45 * performs split to subgraphs and prepare intermediate blobs
50 void load(InferenceEngine::ICNNNetwork &network,
51 const std::map<std::string, std::string> &config,
52 const std::vector<InferenceEngine::IExtensionPtr> &extensions,
53 InferenceEngine::IErrorListener *listener);
55 InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
56 InferenceEngine::OutputsDataMap networkOutputs) override;
58 void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
63 InferenceEngine::details::CNNNetworkImplPtr _clonedNetwork;
64 InferenceEngine::IHeteroDeviceLoader::Ptr _deviceLoader;
65 InferenceEngine::ExecutableNetwork::Ptr network;
66 std::unordered_set<std::string> _oNames;
67 std::unordered_set<std::string> _iNames;
69 std::vector<NetworkDesc> networks;
71 InferenceEngine::MapDeviceLoaders &_deviceLoaders;
74 } // namespace HeteroPlugin