SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}")
+IF(USE_PRIVATE_CACHE_PATH)
+ ADD_DEFINITIONS(-DCACHE_PATH="${PRIVATE_CACHE_PATH}")
+ELSE()
+ ADD_DEFINITIONS(-DCACHE_PATH="./")
+ENDIF()
+
aux_source_directory(src SOURCES)
ADD_LIBRARY(${fw_name} SHARED ${SOURCES})
export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE"
%endif
+# If you want to designate the location of CL kernel cache files then
+# add below flags as cmake flags.
+# -DUSE_PRIVATE_CACHE_PATH=ON -DPRIVATE_CACHE_PATH=/path/to/
%cmake .
make %{?jobs:-j%jobs}
#include "arm_compute/runtime/CL/Utils.h"
#include "openssl/sha.h"
+// CACHE_PATH has user-designated location of the CL kernel
+// cache files, which can be done in build time.
+// Please refer to below flags,
+// USE_PRIVATE_CACHE_PATH=ON means that inference-engine-armnn will use
+// user-designated location, and the location should be assigned to
+// PRIVATE_CACHE_PATH flag like PRIVATE_CACHE_PATH=/path/to/.
+// In default, CACHE_PATH has current directory, "./"
+#define USER_CACHE_PATH CACHE_PATH
+
namespace InferenceEngineImpl {
namespace ARMNNImpl {
close(fd);
- return s.str();
+ std::string cache_file_path(USER_CACHE_PATH);
+ cache_file_path += s.str();
+
+ return cache_file_path;
}
int InferenceARMNN::SetTargetDevices(int types)
LOGI("Optimized Network.");
if (mUseCache == true) {
- LOGI("cache file count = %d", mCacheFile.size());
-
// restoring CL kernel binaries to memory needs CLBackendContext's instance of ARMCL and
// the instance is created at armnn::IRuntime::Create function above so make sure to restore
// CL kernel binaries after creating runtime and before loading model.