Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / src / hetero_plugin / hetero_executable_network.h
1 //
2 // Copyright (C) 2018-2019 Intel Corporation.
3 //
4 // This software and the related documents are Intel copyrighted materials,
5 // and your use of them is governed by the express license under which they
6 // were provided to you (End User License Agreement for the Intel(R) Software
7 // Development Products (Version May 2017)). Unless the License provides
8 // otherwise, you may not use, modify, copy, publish, distribute, disclose or
9 // transmit this software or the related documents without Intel's prior
10 // written permission.
11 //
12 // This software and the related documents are provided as is, with no
13 // express or implied warranties, other than those that are expressly
14 // stated in the License.
15 //
16
17 /**
18  * @brief a header file for ExecutableNetwork
19  * @file dlia_executable_network.hpp
20  */
21 #pragma once
22
23 #include <memory>
24 #include <string>
25 #include <vector>
26 #include <map>
27 #include <unordered_map>
28 #include <unordered_set>
29
30 #include <ie_common.h>
31 #include <cpp/ie_plugin_cpp.hpp>
32 #include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
33
34 #include "hetero_infer_request.h"
35 #include "cnn_network_impl.hpp"
36 #include "hetero_async_infer_request.h"
37
38 namespace HeteroPlugin {
39
40 /**
41  * @class ExecutableNetwork
42  * @brief Interface of executable network
43  */
44 class HeteroExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
45 public:
46     typedef std::shared_ptr<HeteroExecutableNetwork> Ptr;
47
48     /**
49     * @brief constructor
50     */
51     HeteroExecutableNetwork(InferenceEngine::ICNNNetwork &network,
52                             const std::map<std::string, std::string> &config,
53                             const std::vector<InferenceEngine::IExtensionPtr> &extensions,
54                             InferenceEngine::MapDeviceLoaders &deviceLoaders,
55                             InferenceEngine::IErrorListener *listener);
56
57     virtual ~HeteroExecutableNetwork() = default;
58
59     /**
60      * this functino implements the loading of hetero network,
61      * performs split to subgraphs and prepare intermediate blobs
62      *
63      * @param network
64      * @param extensions
65      */
66     void load(InferenceEngine::ICNNNetwork &network,
67               const std::map<std::string, std::string> &config,
68               const std::vector<InferenceEngine::IExtensionPtr> &extensions,
69               InferenceEngine::IErrorListener *listener);
70
71     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
72                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
73
74     void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
75
76 private:
77     struct NetworkDesc {
78         std::string _device;
79         InferenceEngine::details::CNNNetworkImplPtr _clonedNetwork;
80         InferenceEngine::IHeteroDeviceLoader::Ptr _deviceLoader;
81         InferenceEngine::ExecutableNetwork::Ptr network;
82         std::unordered_set<std::string> _oNames;
83         std::unordered_set<std::string> _iNames;
84     };
85     std::vector<NetworkDesc> networks;
86
87     InferenceEngine::MapDeviceLoaders &_deviceLoaders;
88 };
89
90 }  // namespace HeteroPlugin