return MEDIA_VISION_ERROR_NONE;
}
- int Inference::ConfigureTargetTypes(const int targetType)
+ int Inference::ConfigureTargetTypes(int targetType, bool isNewVersion)
{
- // Check if given target types are valid or not.
- if (MV_INFERENCE_TARGET_NONE >= targetType ||
- MV_INFERENCE_TARGET_MAX <= targetType) {
- LOGE("Invalid target device.");
- return MEDIA_VISION_ERROR_INVALID_PARAMETER;
- }
+ if (isNewVersion) {
+ if (MV_INFERENCE_TARGET_DEVICE_NONE >= targetType ||
+ MV_INFERENCE_TARGET_DEVICE_MAX <= targetType) {
+ LOGE("Invalid target device.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
+ } else {
+ if (MV_INFERENCE_TARGET_NONE >= targetType ||
+ MV_INFERENCE_TARGET_MAX <= targetType) {
+ LOGE("Invalid target device.");
+ return MEDIA_VISION_ERROR_INVALID_PARAMETER;
+ }
- LOGI("Before converting target types : %d", targetType);
+ LOGI("Before converting target types : %d", targetType);
- unsigned int new_type = MV_INFERENCE_TARGET_DEVICE_NONE;
+ // Convert old type to new one.
+ switch (targetType) {
+ case MV_INFERENCE_TARGET_CPU:
+ targetType = MV_INFERENCE_TARGET_DEVICE_CPU;
+ break;
+ case MV_INFERENCE_TARGET_GPU:
- // Convert old type to new one.
- switch (targetType) {
- case MV_INFERENCE_TARGET_CPU:
- if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CPU)) {
- LOGE("Backend doesn't support CPU acceleration.");
- return MEDIA_VISION_ERROR_NOT_SUPPORTED;
- }
- new_type = MV_INFERENCE_TARGET_DEVICE_CPU;
- break;
- case MV_INFERENCE_TARGET_GPU:
- if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_GPU)) {
- LOGE("Backend doesn't support GPU acceleration.");
- return MEDIA_VISION_ERROR_NOT_SUPPORTED;
- }
- new_type = MV_INFERENCE_TARGET_DEVICE_GPU;
- break;
- case MV_INFERENCE_TARGET_CUSTOM:
- if (!(mBackendCapacity.supported_accel_devices & INFERENCE_TARGET_CUSTOM)) {
- LOGE("Backend doesn't support custom device acceleration.");
- return MEDIA_VISION_ERROR_NOT_SUPPORTED;
+ targetType = MV_INFERENCE_TARGET_DEVICE_GPU;
+ break;
+ case MV_INFERENCE_TARGET_CUSTOM:
+ targetType = MV_INFERENCE_TARGET_DEVICE_CUSTOM;
+ break;
}
- new_type = MV_INFERENCE_TARGET_DEVICE_CUSTOM;
- break;
- }
- LOGI("After converting target types : %d", new_type);
+ LOGI("After converting target types : %d", targetType);
+ }
- mConfig.mTargetTypes = new_type;
+ mConfig.mTargetTypes = targetType;
return MEDIA_VISION_ERROR_NONE;
}
return MEDIA_VISION_ERROR_NONE;
}
+ int Inference::CheckSupportedTargetDevice(const int targetDevices)
+ {
+ if (!(mBackendCapacity.supported_accel_devices & targetDevices)) {
+ LOGE("Backend doesn't support a given %d device acceleration.", targetDevices);
+ return MEDIA_VISION_ERROR_NOT_SUPPORTED;
+ }
+
+ return MEDIA_VISION_ERROR_NONE;
+ }
+
void Inference::ConfigureOutput(const int maxOutputNumbers)
{
mConfig.mMaxOutputNumbers = std::max(
if (ret != MEDIA_VISION_ERROR_NONE)
goto _ERROR_;
+ // Convert old type to new one and then use it if is_new_version is false
+ if (pInfer->ConfigureTargetTypes(targetTypes, is_new_version) !=
+ MEDIA_VISION_ERROR_NONE) {
+ LOGE("Tried to configure invalid target types.");
+ goto _ERROR_;
+ }
+
// Create a inference-engine-common class object and load its corresponding library.
// Ps. Inference engine gets a capability from a given backend by Bind call
// so access to mBackendCapacity should be done after Bind.
LOGE("Fail to bind a backend engine.");
}
- if (is_new_version) {
- // Use new type.
- if (pInfer->ConfigureTargetDevices(targetTypes) !=
- MEDIA_VISION_ERROR_NONE) {
- LOGE("Tried to configure invalid target types.");
- goto _ERROR_;
- }
- } else {
- // Convert old type to new one and then use it.
- if (pInfer->ConfigureTargetTypes(targetTypes) !=
- MEDIA_VISION_ERROR_NONE) {
- LOGE("Tried to configure invalid target types.");
- goto _ERROR_;
- }
+ if (pInfer->CheckSupportedTargetDevice(targetTypes) !=
+ MEDIA_VISION_ERROR_NONE) {
+ LOGE("Tried to configure invalid target types.");
+ goto _ERROR_;
}
LOGI("LEAVE");