Set profile data to 0 at failure 66/249266/1 accepted/tizen/unified/20201214.124522 submit/tizen/20201210.015517
authorInki Dae <inki.dae@samsung.com>
Wed, 9 Dec 2020 10:42:40 +0000 (19:42 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 9 Dec 2020 10:42:40 +0000 (19:42 +0900)
Do not skip updating the profile data if the profiling failed.
Instead, update all data to 0 for visualizer.

Change-Id: I2f145e462716ad221bfa57a9831c606025059f54
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_common_profiler.h
src/inference_engine_common_impl.cpp
src/inference_engine_common_profiler.cpp

index d185f5f..5cd5ba4 100644 (file)
@@ -214,6 +214,21 @@ namespace Profiler
                        mDumpFilename = filename;
                }
 
+               /**
+                * @brief Set whether this profile data is valid or not.
+                * @details This function will be set when the profiling failed. In this case, all profile data will be written by 0.
+                *
+                * @since_tizen 6.5
+                */
+               void SetInvalid(void) { mInvalid = true; }
+
+               /**
+                * @brief Indicate whether this profile data is valid or not.
+                *
+                * @since_tizen 6.5
+                */
+               bool IsInvalid(void) { return mInvalid; }
+
        private:
                void PushData(ProfileData &data);
                struct timespec GetTimeDiff(struct timespec &start,
@@ -232,6 +247,7 @@ namespace Profiler
                std::string mDumpFilename;
                MemoryData mStartMemoryData;
                MemoryData mEndMemoryData;
+               bool mInvalid;
        };
 } /* Profiler */
 } /* InferenceEngineInterface */
index a39cb17..347e71c 100644 (file)
@@ -457,12 +457,18 @@ out:
 
                int ret = mBackendHandle->Load(model_paths, model_format);
                if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-                       LOGE("Fail to load InferenceEngineVision");
-                       mUseProfiler = false;
+                       LOGE("Fail to load InferenceEngineVision. All profile data will be set to 0.");
+                       mProfiler.SetInvalid();
                }
 
                if (mUseProfiler == true) {
                        mProfiler.Stop(IE_PROFILER_LATENCY, "Load");
+
+                       // Set the profile data to 0 for Run if the profiling failed, which will be needed by visualizer.
+                       if (mProfiler.IsInvalid() == true) {
+                               mProfiler.Start(IE_PROFILER_LATENCY);
+                               mProfiler.Stop(IE_PROFILER_LATENCY, "Run");
+                       }
                }
 
                LOGI("LEAVE");
@@ -617,8 +623,8 @@ out:
 
                int ret = mBackendHandle->Run(input_buffers, output_buffers);
                if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-                       LOGE("Failed to inference.");
-                       mUseProfiler = false;
+                       LOGE("Failed to inference. All profile data will be set to 0.");
+                       mProfiler.SetInvalid();
                }
 
                if (mUseProfiler == true) {
index 21a6b92..1aed520 100644 (file)
@@ -44,7 +44,7 @@ namespace Profiler
        static const std::string sTitleMarkdown(
                        "backend|target devices|model name|Function name|Latency(ms)\n--|--|--|--|--\n");
 
-       InferenceEngineProfiler::InferenceEngineProfiler()
+       InferenceEngineProfiler::InferenceEngineProfiler() : mInvalid(false)
        {
                mStartTime = {
                        0,
@@ -75,6 +75,12 @@ namespace Profiler
 
        void InferenceEngineProfiler::PushData(ProfileData &data)
        {
+               // If the profiling failed then just push 0 data.
+               if (mInvalid == true) {
+                       v_mProfileData.push_back(data);
+                       return;
+               }
+
                std::string key = std::to_string(mEnvNum - 1) + data.function_name;
 
                // In case of multiple 'Run' per one 'Load', update just average value of measured ones instead of adding new one.
@@ -176,6 +182,15 @@ namespace Profiler
 
                ProfileData data = { mEnvNum - 1, func_name, 0 };
 
+               // If the profiling failed then set all data to 0.
+               if (mInvalid == true) {
+                       mEndMemoryData.rss = 0;
+                       mStartMemoryData.rss = 0;
+
+                       PushData(data);
+                       return;
+               }
+
                switch (type) {
                case IE_PROFILER_LATENCY: {
                        clock_gettime(CLOCK_MONOTONIC, &mEndTime);