Do not skip updating the profile data if the profiling failed.
Instead, update all data to 0 for visualizer.
Change-Id: I2f145e462716ad221bfa57a9831c606025059f54
Signed-off-by: Inki Dae <inki.dae@samsung.com>
mDumpFilename = filename;
}
+ /**
+ * @brief Set whether this profile data is valid or not.
+ * @details This function will be set when the profiling failed. In this case, all profile data will be written by 0.
+ *
+ * @since_tizen 6.5
+ */
+ void SetInvalid(void) { mInvalid = true; }
+
+ /**
+ * @brief Indicate whether this profile data is valid or not.
+ *
+ * @since_tizen 6.5
+ */
+ bool IsInvalid(void) { return mInvalid; }
+
private:
void PushData(ProfileData &data);
struct timespec GetTimeDiff(struct timespec &start,
std::string mDumpFilename;
MemoryData mStartMemoryData;
MemoryData mEndMemoryData;
+ bool mInvalid;
};
} /* Profiler */
} /* InferenceEngineInterface */
int ret = mBackendHandle->Load(model_paths, model_format);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- LOGE("Fail to load InferenceEngineVision");
- mUseProfiler = false;
+ LOGE("Fail to load InferenceEngineVision. All profile data will be set to 0.");
+ mProfiler.SetInvalid();
}
if (mUseProfiler == true) {
mProfiler.Stop(IE_PROFILER_LATENCY, "Load");
+
+ // Set the profile data to 0 for Run if the profiling failed, which will be needed by visualizer.
+ if (mProfiler.IsInvalid() == true) {
+ mProfiler.Start(IE_PROFILER_LATENCY);
+ mProfiler.Stop(IE_PROFILER_LATENCY, "Run");
+ }
}
LOGI("LEAVE");
int ret = mBackendHandle->Run(input_buffers, output_buffers);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- LOGE("Failed to inference.");
- mUseProfiler = false;
+ LOGE("Failed to inference. All profile data will be set to 0.");
+ mProfiler.SetInvalid();
}
if (mUseProfiler == true) {
static const std::string sTitleMarkdown(
"backend|target devices|model name|Function name|Latency(ms)\n--|--|--|--|--\n");
- InferenceEngineProfiler::InferenceEngineProfiler()
+ InferenceEngineProfiler::InferenceEngineProfiler() : mInvalid(false)
{
mStartTime = {
0,
void InferenceEngineProfiler::PushData(ProfileData &data)
{
+ // If the profiling failed then just push 0 data.
+ if (mInvalid == true) {
+ v_mProfileData.push_back(data);
+ return;
+ }
+
std::string key = std::to_string(mEnvNum - 1) + data.function_name;
// In case of multiple 'Run' per one 'Load', update just average value of measured ones instead of adding new one.
ProfileData data = { mEnvNum - 1, func_name, 0 };
+ // If the profiling failed then set all data to 0.
+ if (mInvalid == true) {
+ mEndMemoryData.rss = 0;
+ mStartMemoryData.rss = 0;
+
+ PushData(data);
+ return;
+ }
+
switch (type) {
case IE_PROFILER_LATENCY: {
clock_gettime(CLOCK_MONOTONIC, &mEndTime);