Publishing R5 content (#72)
[platform/upstream/dldt.git] / inference-engine / src / hetero_plugin / hetero_executable_network.h
1 // Copyright (C) 2018 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <memory>
8 #include <string>
9 #include <vector>
10 #include <map>
11 #include <unordered_map>
12 #include <unordered_set>
13
14 #include <ie_common.h>
15 #include <cpp/ie_plugin_cpp.hpp>
16 #include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
17
18 #include "hetero_infer_request.h"
19 #include "cnn_network_impl.hpp"
20 #include "hetero_async_infer_request.h"
21
22 namespace HeteroPlugin {
23
24 /**
25  * @class ExecutableNetwork
26  * @brief Interface of executable network
27  */
28 class HeteroExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
29 public:
30     typedef std::shared_ptr<HeteroExecutableNetwork> Ptr;
31
32     /**
33     * @brief constructor
34     */
35     HeteroExecutableNetwork(InferenceEngine::ICNNNetwork &network,
36                             const std::map<std::string, std::string> &config,
37                             const std::vector<InferenceEngine::IExtensionPtr> &extensions,
38                             InferenceEngine::MapDeviceLoaders &deviceLoaders,
39                             InferenceEngine::IErrorListener *listener);
40
41     virtual ~HeteroExecutableNetwork() = default;
42
43     /**
44      * this functino implements the loading of hetero network,
45      * performs split to subgraphs and prepare intermediate blobs
46      *
47      * @param network
48      * @param extensions
49      */
50     void load(InferenceEngine::ICNNNetwork &network,
51               const std::map<std::string, std::string> &config,
52               const std::vector<InferenceEngine::IExtensionPtr> &extensions,
53               InferenceEngine::IErrorListener *listener);
54
55     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
56                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
57
58     void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
59
60 private:
61     struct NetworkDesc {
62         std::string _device;
63         InferenceEngine::details::CNNNetworkImplPtr _clonedNetwork;
64         InferenceEngine::IHeteroDeviceLoader::Ptr _deviceLoader;
65         InferenceEngine::ExecutableNetwork::Ptr network;
66         std::unordered_set<std::string> _oNames;
67         std::unordered_set<std::string> _iNames;
68     };
69     std::vector<NetworkDesc> networks;
70
71     InferenceEngine::MapDeviceLoaders &_deviceLoaders;
72 };
73
74 }  // namespace HeteroPlugin