return precision;
}
+InferenceEngine::Precision InfEngineBackendNet::getPrecision() const noexcept
+{
+ return precision;
+}
+
// Assume that outputs of network is unconnected blobs.
void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) noexcept
{
- outputs_ = outputs;
+ const_cast<const InfEngineBackendNet*>(this)->getOutputsInfo(outputs_);
}
void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) const noexcept
{
// Returns input references that aren't connected to internal outputs.
void InfEngineBackendNet::getInputsInfo(InferenceEngine::InputsDataMap &inputs_) noexcept
{
- inputs_ = inputs;
+ const_cast<const InfEngineBackendNet*>(this)->getInputsInfo(inputs_);
}
// Returns input references that aren't connected to internal outputs.
InferenceEngine::InputInfo::Ptr InfEngineBackendNet::getInput(const std::string &inputName) noexcept
{
- getInputsInfo(inputs);
+ return const_cast<const InfEngineBackendNet*>(this)->getInput(inputName);
+}
+
+InferenceEngine::InputInfo::Ptr InfEngineBackendNet::getInput(const std::string &inputName) const noexcept
+{
const auto& it = inputs.find(inputName);
CV_Assert(it != inputs.end());
return it->second;
{
}
+const std::string& InfEngineBackendNet::getName() const noexcept
+{
+ return name;
+}
+
size_t InfEngineBackendNet::layerCount() noexcept
{
+ return const_cast<const InfEngineBackendNet*>(this)->layerCount();
+}
+
+size_t InfEngineBackendNet::layerCount() const noexcept
+{
return layers.size();
}
InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNLayerPtr &out,
InferenceEngine::ResponseDesc *resp) noexcept
{
+ return const_cast<const InfEngineBackendNet*>(this)->getLayerByName(layerName, out, resp);
+}
+
+InferenceEngine::StatusCode InfEngineBackendNet::getLayerByName(const char *layerName,
+ InferenceEngine::CNNLayerPtr &out,
+ InferenceEngine::ResponseDesc *resp) const noexcept
+{
for (auto& l : layers)
{
if (l->name == layerName)
return targetDevice;
}
-InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t size) noexcept
+InferenceEngine::TargetDevice InfEngineBackendNet::getTargetDevice() const noexcept
+{
+ return targetDevice;
+}
+
+InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t) noexcept
{
CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK;
switch (targetId)
{
case DNN_TARGET_CPU: setTargetDevice(InferenceEngine::TargetDevice::eCPU); break;
- case DNN_TARGET_OPENCL_FP16: setPrecision(InferenceEngine::Precision::FP16); // Fallback to the next.
+ case DNN_TARGET_OPENCL_FP16:
+ setPrecision(InferenceEngine::Precision::FP16);
+ /* Falls through. */
case DNN_TARGET_OPENCL: setTargetDevice(InferenceEngine::TargetDevice::eGPU); break;
case DNN_TARGET_MYRIAD:
{
#ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__
#define __OPENCV_DNN_OP_INF_ENGINE_HPP__
+#include "opencv2/core/cvdef.h"
+
#ifdef HAVE_INF_ENGINE
#if defined(__GNUC__) && __GNUC__ >= 5
//#pragma GCC diagnostic push
void setPrecision(InferenceEngine::Precision p) noexcept;
- virtual InferenceEngine::Precision getPrecision() noexcept CV_OVERRIDE;
+ virtual InferenceEngine::Precision getPrecision() noexcept;
+
+ virtual InferenceEngine::Precision getPrecision() const noexcept;
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) noexcept /*CV_OVERRIDE*/;
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) const noexcept /*CV_OVERRIDE*/;
- virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept CV_OVERRIDE;
+ virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept;
+
+ virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) const noexcept;
virtual void getName(char *pName, size_t len) noexcept;
virtual void getName(char *pName, size_t len) const noexcept;
- virtual size_t layerCount() noexcept CV_OVERRIDE;
+ virtual const std::string& getName() const noexcept;
+
+ virtual size_t layerCount() noexcept;
+
+ virtual size_t layerCount() const noexcept;
virtual InferenceEngine::DataPtr& getData(const char *dname) noexcept CV_OVERRIDE;
virtual InferenceEngine::StatusCode addOutput(const std::string &layerName,
size_t outputIndex = 0,
- InferenceEngine::ResponseDesc *resp = nullptr) noexcept CV_OVERRIDE;
+ InferenceEngine::ResponseDesc *resp = nullptr) noexcept;
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
InferenceEngine::CNNLayerPtr &out,
- InferenceEngine::ResponseDesc *resp) noexcept CV_OVERRIDE;
+ InferenceEngine::ResponseDesc *resp) noexcept;
+
+ virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
+ InferenceEngine::CNNLayerPtr &out,
+ InferenceEngine::ResponseDesc *resp) const noexcept;
virtual void setTargetDevice(InferenceEngine::TargetDevice device) noexcept CV_OVERRIDE;
- virtual InferenceEngine::TargetDevice getTargetDevice() noexcept CV_OVERRIDE;
+ virtual InferenceEngine::TargetDevice getTargetDevice() noexcept;
+
+ virtual InferenceEngine::TargetDevice getTargetDevice() const noexcept;
virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE;
InferenceEngine::ExecutableNetwork netExec;
InferenceEngine::InferRequest infRequest;
+ std::string name;
+
void initPlugin(InferenceEngine::ICNNNetwork& net);
};