From 8ee8ca35a1072d9397e5a43ed932795eab33265d Mon Sep 17 00:00:00 2001 From: Inki Dae Date: Tue, 2 Jun 2020 18:36:54 +0900 Subject: [PATCH] Change postfix of file name to "mlapi" Signed-off-by: Inki Dae --- CMakeLists.txt | 2 +- ...e_nnstreamer.cpp => inference_engine_mlapi.cpp} | 14 +++++++------- ..._private.h => inference_engine_mlapi_private.h} | 0 3 files changed, 8 insertions(+), 8 deletions(-) rename src/{inference_engine_nnstreamer.cpp => inference_engine_mlapi.cpp} (96%) rename src/{inference_engine_nnstreamer_private.h => inference_engine_mlapi_private.h} (100%) diff --git a/CMakeLists.txt b/CMakeLists.txt index d7aab7a..010a06f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,6 @@ CMAKE_MINIMUM_REQUIRED(VERSION 2.6) -SET(fw_name "inference-engine-nnstreamer") +SET(fw_name "inference-engine-mlapi") PROJECT(${fw_name}) diff --git a/src/inference_engine_nnstreamer.cpp b/src/inference_engine_mlapi.cpp similarity index 96% rename from src/inference_engine_nnstreamer.cpp rename to src/inference_engine_mlapi.cpp index ddfb784..4a418ec 100644 --- a/src/inference_engine_nnstreamer.cpp +++ b/src/inference_engine_mlapi.cpp @@ -15,7 +15,7 @@ */ #include -#include "inference_engine_nnstreamer_private.h" +#include "inference_engine_mlapi_private.h" #include #include @@ -159,7 +159,7 @@ int InferenceMLAPI::GetInputTensorBuffers(std::vector().swap(mDesignated_inputs); - // TODO. Request input property information to a given nnstreamer backend, + // TODO. Request input property information to a given ML Single API of nnstreamer backend, // and set it instead of user-given one, mDesignated_inputs = property.layer_names; @@ -325,7 +325,7 @@ int InferenceMLAPI::SetOutputLayerProperty(inference_engine_layer_property &prop mDesignated_outputs.clear(); std::vector().swap(mDesignated_outputs); - // TODO. Request output property information to a given nnstreamer backend, + // TODO. Request output property information to a given ML Single API of nnstreamer backend, // and set it instead of user-given one, mDesignated_outputs = property.layer_names; @@ -345,7 +345,7 @@ int InferenceMLAPI::GetBackendCapacity(inference_engine_capacity *capacity) return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; } - // TODO. flag supported accel device types according to a given nnstreamer backend. + // TODO. flag supported accel device types according to a given ML Single API of nnstreamer backend. capacity->supported_accel_devices = INFERENCE_TARGET_CUSTOM; LOGI("LEAVE"); diff --git a/src/inference_engine_nnstreamer_private.h b/src/inference_engine_mlapi_private.h similarity index 100% rename from src/inference_engine_nnstreamer_private.h rename to src/inference_engine_mlapi_private.h -- 2.34.1