Fix prefix to profiler enumerations
authorInki Dae <inki.dae@samsung.com>
Sun, 5 Apr 2020 23:51:46 +0000 (08:51 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: I81d5639e46b99111e79b7b21acae55ef6f8d3393
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_profiler.h
src/inference_engine_common_impl.cpp
src/inference_engine_profiler.cpp

index 924d649..134499c 100644 (file)
@@ -34,12 +34,12 @@ namespace Profiler {
  *
  */
 enum {
-       IR_PROFILER_DUMP_MIN,
+       IE_PROFILER_DUMP_MIN,
        // Profile data will be printed out on console screen.
-       IR_PROFILER_DUMP_CONSOLE,
+       IE_PROFILER_DUMP_CONSOLE,
        // Profile data will be stored on a given file.
-       IR_PROFILER_DUMP_FILE,
-       IR_PROFILER_DUMP_MAX
+       IE_PROFILER_DUMP_FILE,
+       IE_PROFILER_DUMP_MAX
 };
 
 /**
@@ -49,12 +49,12 @@ enum {
  *
  */
 enum {
-       IR_PROFILER_MIN,
+       IE_PROFILER_MIN,
        // Measure performance in millisecond.
-       IR_PROFILER_LATENCY,
+       IE_PROFILER_LATENCY,
        // Measure physical memory usage.
-       IR_PROFILER_MEMORY,
-       IR_PROFILER_MAX
+       IE_PROFILER_MEMORY,
+       IE_PROFILER_MAX
 };
 
 /**
@@ -64,11 +64,11 @@ enum {
  *
  */
 enum {
-       IR_PROFILER_DUMP_FORMAT_MIN,
+       IE_PROFILER_DUMP_FORMAT_MIN,
        // Store profiling data to a given file in Markdown syntax[1]
        // [1] https://daringfireball.net/projects/markdown/syntax
-       IR_PROFILER_DUMP_FORMAT_MARKDOWN,
-       IR_PROFILER_DUMP_FORMAT_MAX
+       IE_PROFILER_DUMP_FORMAT_MARKDOWN,
+       IE_PROFILER_DUMP_FORMAT_MAX
 };
 
 /**
index 6b2923f..73d75f2 100755 (executable)
@@ -167,7 +167,7 @@ int InferenceEngineCommon::EnableProfiler(bool enable)
                mProfiler = new InferenceEngineProfiler();
 
                // In default, profile data will be stored to a given file.
-               mProfilerDumpType = IR_PROFILER_DUMP_FILE;
+               mProfilerDumpType = IE_PROFILER_DUMP_FILE;
        }
 
        return INFERENCE_ENGINE_ERROR_NONE;
@@ -180,7 +180,7 @@ int InferenceEngineCommon::DumpProfileToConsole(void)
                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
        }
 
-       mProfilerDumpType = IR_PROFILER_DUMP_CONSOLE;
+       mProfilerDumpType = IE_PROFILER_DUMP_CONSOLE;
        return INFERENCE_ENGINE_ERROR_NONE;
 }
 
@@ -191,7 +191,7 @@ int InferenceEngineCommon::DumpProfileToFile(const std::string filename)
                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
        }
 
-       mProfilerDumpType = IR_PROFILER_DUMP_FILE;
+       mProfilerDumpType = IE_PROFILER_DUMP_FILE;
        mProfiler->SetDumpFilename(filename);
 
        return INFERENCE_ENGINE_ERROR_NONE;
@@ -274,7 +274,7 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
        if (mUseProfiler == true) {
                mProfiler->AddModelName(model_paths[0]);
                mProfiler->PushEnv();
-               mProfiler->Start(IR_PROFILER_LATENCY);
+               mProfiler->Start(IE_PROFILER_LATENCY);
        }
 
     int ret = mBackendHandle->Load(model_paths, model_format);
@@ -282,7 +282,7 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
         LOGE("Fail to load InferenceEngineVision");
 
        if (mUseProfiler == true) {
-               mProfiler->Stop(IR_PROFILER_LATENCY, "Load");
+               mProfiler->Stop(IE_PROFILER_LATENCY, "Load");
        }
 
     LOGI("LEAVE");
@@ -391,13 +391,13 @@ int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &inpu
                                 std::vector<inference_engine_tensor_buffer> &output_buffers)
 {
        if (mUseProfiler == true) {
-               mProfiler->Start(IR_PROFILER_LATENCY);
+               mProfiler->Start(IE_PROFILER_LATENCY);
        }
 
     int ret = mBackendHandle->Run(input_buffers, output_buffers);
 
        if (mUseProfiler == true) {
-               mProfiler->Stop(IR_PROFILER_LATENCY, "Run");
+               mProfiler->Stop(IE_PROFILER_LATENCY, "Run");
        }
 
        return ret;
index a934e5c..aba71f3 100644 (file)
@@ -108,16 +108,16 @@ unsigned long InferenceEngineProfiler::ConvertMillisec(const struct timespec &ti
 
 void InferenceEngineProfiler::Start(const unsigned int type)
 {
-       if (IR_PROFILER_MIN >= type && IR_PROFILER_MAX <= type) {
+       if (IE_PROFILER_MIN >= type && IE_PROFILER_MAX <= type) {
                LOGE("Invalid profiler type.");
                return;
        }
 
        switch (type) {
-       case IR_PROFILER_LATENCY:
+       case IE_PROFILER_LATENCY:
                clock_gettime(CLOCK_MONOTONIC, &mStartTime);
                break;
-       case IR_PROFILER_MEMORY:
+       case IE_PROFILER_MEMORY:
                break;
        /* TODO */
        }
@@ -125,7 +125,7 @@ void InferenceEngineProfiler::Start(const unsigned int type)
 
 void InferenceEngineProfiler::Stop(const unsigned int type, const char *func_name)
 {
-       if (IR_PROFILER_MIN >= type && IR_PROFILER_MAX <= type) {
+       if (IE_PROFILER_MIN >= type && IE_PROFILER_MAX <= type) {
                LOGE("Invalid profiler type.");
                return;
        }
@@ -133,12 +133,12 @@ void InferenceEngineProfiler::Stop(const unsigned int type, const char *func_nam
        ProfileData data = { mEnvNum - 1, func_name, 0, 0 };
 
        switch (type) {
-       case IR_PROFILER_LATENCY: {
+       case IE_PROFILER_LATENCY: {
                clock_gettime(CLOCK_MONOTONIC, &mEndTime);
                data.elapsed_time = ConvertMillisec(GetTimeDiff(mStartTime, mEndTime));
                break;
        }
-       case IR_PROFILER_MEMORY:
+       case IE_PROFILER_MEMORY:
                break;
                /* TODO */
        }
@@ -199,15 +199,15 @@ void InferenceEngineProfiler::DumpToFile(const unsigned int dump_type, std::stri
 
 void InferenceEngineProfiler::Dump(const unsigned int dump_type)
 {
-       if (IR_PROFILER_DUMP_MIN >= dump_type && IR_PROFILER_DUMP_MAX <= dump_type) {
+       if (IE_PROFILER_DUMP_MIN >= dump_type && IE_PROFILER_DUMP_MAX <= dump_type) {
                LOGE("Invalid profiler dump type.");
                return;
        }
 
-       if (dump_type == IR_PROFILER_DUMP_CONSOLE) {
+       if (dump_type == IE_PROFILER_DUMP_CONSOLE) {
                DumpToConsole();
        } else {
-               DumpToFile(IR_PROFILER_DUMP_FORMAT_MARKDOWN, mDumpFilename);
+               DumpToFile(IE_PROFILER_DUMP_FORMAT_MARKDOWN, mDumpFilename);
        }
 }