From: Inki Dae Date: Wed, 25 Mar 2020 01:44:22 +0000 (+0900) Subject: Add the use of private CL kernel cache file path X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fheads%2Fsandbox%2Finki.dae%2Fcl_kernel_cache;p=platform%2Fcore%2Fmultimedia%2Finference-engine-armnn.git Add the use of private CL kernel cache file path Change-Id: I3bf58fbeb6c09c1d97880edc8b51d1fe48518d7c Signed-off-by: Inki Dae --- diff --git a/CMakeLists.txt b/CMakeLists.txt index ea4cc23..95fc77f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -38,6 +38,12 @@ ADD_DEFINITIONS("-DTIZEN_DEBUG") SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}") +IF(USE_PRIVATE_CACHE_PATH) + ADD_DEFINITIONS(-DCACHE_PATH="${PRIVATE_CACHE_PATH}") +ELSE() + ADD_DEFINITIONS(-DCACHE_PATH="./") +ENDIF() + aux_source_directory(src SOURCES) ADD_LIBRARY(${fw_name} SHARED ${SOURCES}) diff --git a/packaging/inference-engine-armnn.spec b/packaging/inference-engine-armnn.spec index 1d01dd2..fcc411d 100644 --- a/packaging/inference-engine-armnn.spec +++ b/packaging/inference-engine-armnn.spec @@ -28,6 +28,9 @@ export CXXFLAGS="$CXXFLAGS -DTIZEN_DEBUG_ENABLE" export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE" %endif +# If you want to designate the location of CL kernel cache files then +# add below flags as cmake flags. +# -DUSE_PRIVATE_CACHE_PATH=ON -DPRIVATE_CACHE_PATH=/path/to/ %cmake . make %{?jobs:-j%jobs} diff --git a/src/inference_engine_armnn.cpp b/src/inference_engine_armnn.cpp index 9486c73..e600a08 100644 --- a/src/inference_engine_armnn.cpp +++ b/src/inference_engine_armnn.cpp @@ -33,6 +33,15 @@ #include "arm_compute/runtime/CL/Utils.h" #include "openssl/sha.h" +// CACHE_PATH has user-designated location of the CL kernel +// cache files, which can be done in build time. +// Please refer to below flags, +// USE_PRIVATE_CACHE_PATH=ON means that inference-engine-armnn will use +// user-designated location, and the location should be assigned to +// PRIVATE_CACHE_PATH flag like PRIVATE_CACHE_PATH=/path/to/. +// In default, CACHE_PATH has current directory, "./" +#define USER_CACHE_PATH CACHE_PATH + namespace InferenceEngineImpl { namespace ARMNNImpl { @@ -155,7 +164,10 @@ std::string InferenceARMNN::GetHashKey(std::string &model_path) close(fd); - return s.str(); + std::string cache_file_path(USER_CACHE_PATH); + cache_file_path += s.str(); + + return cache_file_path; } int InferenceARMNN::SetTargetDevices(int types) @@ -325,8 +337,6 @@ int InferenceARMNN::Load(std::vector model_paths, inference_model_f LOGI("Optimized Network."); if (mUseCache == true) { - LOGI("cache file count = %d", mCacheFile.size()); - // restoring CL kernel binaries to memory needs CLBackendContext's instance of ARMCL and // the instance is created at armnn::IRuntime::Create function above so make sure to restore // CL kernel binaries after creating runtime and before loading model.