From: Inki Dae Date: Thu, 11 Jun 2020 07:16:42 +0000 (+0900) Subject: Set supported_device_types according to MLAPI backend type X-Git-Tag: submit/tizen/20200626.050805~4 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=f911018186326850d1f3c883e3f0d4a63ec25cbe;p=platform%2Fcore%2Fmultimedia%2Finference-engine-mlapi.git Set supported_device_types according to MLAPI backend type NNFW supports only CPU and GPU accelerated NN runtime so Consider using NNFW tensor filter plugin of NNStreamer. Change-Id: I3ed4ae5018b984c812f8bad69eebbfdae69dd030 Signed-off-by: Inki Dae --- diff --git a/src/inference_engine_mlapi.cpp b/src/inference_engine_mlapi.cpp index ab36c9b..d6d4706 100644 --- a/src/inference_engine_mlapi.cpp +++ b/src/inference_engine_mlapi.cpp @@ -366,7 +366,12 @@ namespace MLAPIImpl } // TODO. flag supported accel device types according to a given ML Single API of nnstreamer backend. - capacity->supported_accel_devices = INFERENCE_TARGET_CUSTOM; + if (mPluginType == INFERENCE_BACKEND_MLAPI) { + capacity->supported_accel_devices = INFERENCE_TARGET_CUSTOM; + } else { + capacity->supported_accel_devices = INFERENCE_TARGET_GPU | + INFERENCE_TARGET_CPU; + } LOGI("LEAVE");