std::map<std::string, std::vector<std::size_t>> reshape_table;
std::unordered_set<std::string> layer_names_to_reshape;
+ // NB: Number of asyncrhonious infer requests
size_t nireq;
+
+ // NB: An optional config to setup RemoteContext for IE
+ cv::util::any context_config;
};
} // namespace detail
, {}
, {}
, {}
- , 1u} {
+ , 1u
+ , {}} {
};
/** @overload
, {}
, {}
, {}
- , 1u} {
+ , 1u
+ , {}} {
};
/** @brief Specifies sequence of network input layers names for inference.
return *this;
}
+ /** @brief Specifies configuration for RemoteContext in InferenceEngine.
+
+ When RemoteContext is configured the backend imports the networks using the context.
+ It also expects cv::MediaFrames to be actually remote, to operate with blobs via the context.
+
+ @param ctx_cfg cv::util::any value which holds InferenceEngine::ParamMap.
+ @return reference to this parameter structure.
+ */
+ Params& cfgContextParams(const cv::util::any& ctx_cfg) {
+ desc.context_config = ctx_cfg;
+ return *this;
+ }
+
+ /** @overload
+ Function with an rvalue parameter.
+
+ @param ctx_cfg cv::util::any value which holds InferenceEngine::ParamMap.
+ @return reference to this parameter structure.
+ */
+ Params& cfgContextParams(cv::util::any&& ctx_cfg) {
+ desc.context_config = std::move(ctx_cfg);
+ return *this;
+ }
+
/** @brief Specifies number of asynchronous inference requests.
@param nireq Number of inference asynchronous requests.
const std::string &model,
const std::string &weights,
const std::string &device)
- : desc{ model, weights, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Load, true, {}, {}, {}, 1u}, m_tag(tag) {
+ : desc{ model, weights, device, {}, {}, {}, 0u, 0u,
+ detail::ParamDesc::Kind::Load, true, {}, {}, {}, 1u,
+ {}},
+ m_tag(tag) {
};
/** @overload
Params(const std::string &tag,
const std::string &model,
const std::string &device)
- : desc{ model, {}, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Import, true, {}, {}, {}, 1u}, m_tag(tag) {
+ : desc{ model, {}, device, {}, {}, {}, 0u, 0u,
+ detail::ParamDesc::Kind::Import, true, {}, {}, {}, 1u,
+ {}},
+ m_tag(tag) {
};
/** @see ie::Params::pluginConfig. */
IE::ExecutableNetwork this_network;
cv::gimpl::ie::wrap::Plugin this_plugin;
+ InferenceEngine::RemoteContext::Ptr rctx = nullptr;
+
explicit IEUnit(const cv::gapi::ie::detail::ParamDesc &pp)
: params(pp) {
+ InferenceEngine::ParamMap* ctx_params =
+ cv::util::any_cast<InferenceEngine::ParamMap>(¶ms.context_config);
+ if (ctx_params != nullptr) {
+ auto ie_core = cv::gimpl::ie::wrap::getCore();
+ rctx = ie_core.CreateContext(params.device_id, *ctx_params);
+ }
+
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
net = cv::gimpl::ie::wrap::readNetwork(params);
inputs = net.getInputsInfo();
} else if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Import) {
this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
this_plugin.SetConfig(params.config);
- this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params);
+ this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params, rctx);
// FIXME: ICNNetwork returns InputsDataMap/OutputsDataMap,
// but ExecutableNetwork returns ConstInputsDataMap/ConstOutputsDataMap
inputs = cv::gimpl::ie::wrap::toInputsDataMap(this_network.GetInputsInfo());
// for loadNetwork they can be obtained by using readNetwork
non_const_this->this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
non_const_this->this_plugin.SetConfig(params.config);
- non_const_this->this_network = cv::gimpl::ie::wrap::loadNetwork(non_const_this->this_plugin, net, params);
+ non_const_this->this_network = cv::gimpl::ie::wrap::loadNetwork(non_const_this->this_plugin,
+ net, params, rctx);
}
return {params, this_plugin, this_network};
, IECallable
>;
+inline IE::Blob::Ptr extractRemoteBlob(IECallContext& ctx, std::size_t i) {
+ GAPI_Assert(ctx.inShape(i) == cv::GShape::GFRAME &&
+ "Remote blob is supported for MediaFrame only");
+
+ cv::util::any any_blob_params = ctx.inFrame(i).blobParams();
+ auto ie_core = cv::gimpl::ie::wrap::getCore();
+
+ using ParamType = std::pair<InferenceEngine::TensorDesc,
+ InferenceEngine::ParamMap>;
+
+ ParamType* blob_params = cv::util::any_cast<ParamType>(&any_blob_params);
+ if (blob_params == nullptr) {
+ GAPI_Assert(false && "Incorrect type of blobParams: "
+ "expected std::pair<InferenceEngine::TensorDesc,"
+ "InferenceEngine::ParamMap>");
+ }
+
+ return ctx.uu.rctx->CreateBlob(blob_params->first,
+ blob_params->second);
+}
+
inline IE::Blob::Ptr extractBlob(IECallContext& ctx, std::size_t i) {
+ if (ctx.uu.rctx != nullptr) {
+ return extractRemoteBlob(ctx, i);
+ }
+
switch (ctx.inShape(i)) {
case cv::GShape::GFRAME: {
const auto& frame = ctx.inFrame(i);
}
IE::Blob::Ptr this_blob = extractBlob(*ctx, 1);
+
std::vector<std::vector<int>> cached_dims(ctx->uu.params.num_out);
for (auto i : ade::util::iota(ctx->uu.params.num_out)) {
const IE::DataPtr& ie_out = ctx->uu.outputs.at(ctx->uu.params.output_names[i]);
#include <vector>
#include <string>
+#include <fstream>
#include "opencv2/gapi/infer/ie.hpp"
GAPI_EXPORTS IE::Core getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::Core& core,
const IE::CNNNetwork& net,
- const GIEParam& params) {
- return core.LoadNetwork(net, params.device_id);
+ const GIEParam& params,
+ IE::RemoteContext::Ptr rctx = nullptr) {
+ if (rctx != nullptr) {
+ return core.LoadNetwork(net, rctx);
+ } else {
+ return core.LoadNetwork(net, params.device_id);
+ }
}
GAPI_EXPORTS inline IE::ExecutableNetwork importNetwork( IE::Core& core,
- const GIEParam& param) {
- return core.ImportNetwork(param.model_path, param.device_id, {});
+ const GIEParam& params,
+ IE::RemoteContext::Ptr rctx = nullptr) {
+ if (rctx != nullptr) {
+ std::filebuf blobFile;
+ if (!blobFile.open(params.model_path, std::ios::in | std::ios::binary))
+ {
+ blobFile.close();
+ throw std::runtime_error("Could not open file");
+ }
+ std::istream graphBlob(&blobFile);
+ return core.ImportNetwork(graphBlob, rctx);
+ } else {
+ return core.ImportNetwork(params.model_path, params.device_id, {});
+ }
}
#endif // INF_ENGINE_RELEASE < 2019020000
}}}}