Inference::Inference() :
mCanRun(),
mConfig(),
+ mBackendCapacity(),
mSupportedInferenceBackend()
{
LOGI("ENTER");
return MEDIA_VISION_ERROR_INVALID_OPERATION;
}
+ // Get capacity information from a backend.
+ ret = mBackend->GetBackendCapacity(&mBackendCapacity);
+ if (ret != MEDIA_VISION_ERROR_NONE) {
+ LOGE("Fail to get backend capacity.");
+ return ret;
+ }
+
LOGI("LEAVE");
return MEDIA_VISION_ERROR_NONE;
mBackend->SetOutputTensorParamNodes(mConfig.mOutputNodeNames);
+ // Check if backend supports a given target device/devices or not.
+ if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CPU) {
+ if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CPU)) {
+ LOGE("Backend doesn't support CPU device as an accelerator.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
+ }
+
+ if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_GPU) {
+ if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_GPU)) {
+ LOGE("Backend doesn't support CPU device as an accelerator.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
+ }
+
+ if (mConfig.mTargetTypes & MV_INFERENCE_TARGET_CUSTOM) {
+ if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CUSTOM)) {
+ LOGE("Backend doesn't support CPU device as an accelerator.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
+ }
+
mBackend->SetTargetDevices(ConvertTargetTypes(mConfig.mTargetTypes));
LOGI("LEAVE");