*
*/
enum {
- IR_PROFILER_DUMP_MIN,
+ IE_PROFILER_DUMP_MIN,
// Profile data will be printed out on console screen.
- IR_PROFILER_DUMP_CONSOLE,
+ IE_PROFILER_DUMP_CONSOLE,
// Profile data will be stored on a given file.
- IR_PROFILER_DUMP_FILE,
- IR_PROFILER_DUMP_MAX
+ IE_PROFILER_DUMP_FILE,
+ IE_PROFILER_DUMP_MAX
};
/**
*
*/
enum {
- IR_PROFILER_MIN,
+ IE_PROFILER_MIN,
// Measure performance in millisecond.
- IR_PROFILER_LATENCY,
+ IE_PROFILER_LATENCY,
// Measure physical memory usage.
- IR_PROFILER_MEMORY,
- IR_PROFILER_MAX
+ IE_PROFILER_MEMORY,
+ IE_PROFILER_MAX
};
/**
*
*/
enum {
- IR_PROFILER_DUMP_FORMAT_MIN,
+ IE_PROFILER_DUMP_FORMAT_MIN,
// Store profiling data to a given file in Markdown syntax[1]
// [1] https://daringfireball.net/projects/markdown/syntax
- IR_PROFILER_DUMP_FORMAT_MARKDOWN,
- IR_PROFILER_DUMP_FORMAT_MAX
+ IE_PROFILER_DUMP_FORMAT_MARKDOWN,
+ IE_PROFILER_DUMP_FORMAT_MAX
};
/**
mProfiler = new InferenceEngineProfiler();
// In default, profile data will be stored to a given file.
- mProfilerDumpType = IR_PROFILER_DUMP_FILE;
+ mProfilerDumpType = IE_PROFILER_DUMP_FILE;
}
return INFERENCE_ENGINE_ERROR_NONE;
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
}
- mProfilerDumpType = IR_PROFILER_DUMP_CONSOLE;
+ mProfilerDumpType = IE_PROFILER_DUMP_CONSOLE;
return INFERENCE_ENGINE_ERROR_NONE;
}
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
}
- mProfilerDumpType = IR_PROFILER_DUMP_FILE;
+ mProfilerDumpType = IE_PROFILER_DUMP_FILE;
mProfiler->SetDumpFilename(filename);
return INFERENCE_ENGINE_ERROR_NONE;
if (mUseProfiler == true) {
mProfiler->AddModelName(model_paths[0]);
mProfiler->PushEnv();
- mProfiler->Start(IR_PROFILER_LATENCY);
+ mProfiler->Start(IE_PROFILER_LATENCY);
}
int ret = mBackendHandle->Load(model_paths, model_format);
LOGE("Fail to load InferenceEngineVision");
if (mUseProfiler == true) {
- mProfiler->Stop(IR_PROFILER_LATENCY, "Load");
+ mProfiler->Stop(IE_PROFILER_LATENCY, "Load");
}
LOGI("LEAVE");
std::vector<inference_engine_tensor_buffer> &output_buffers)
{
if (mUseProfiler == true) {
- mProfiler->Start(IR_PROFILER_LATENCY);
+ mProfiler->Start(IE_PROFILER_LATENCY);
}
int ret = mBackendHandle->Run(input_buffers, output_buffers);
if (mUseProfiler == true) {
- mProfiler->Stop(IR_PROFILER_LATENCY, "Run");
+ mProfiler->Stop(IE_PROFILER_LATENCY, "Run");
}
return ret;
void InferenceEngineProfiler::Start(const unsigned int type)
{
- if (IR_PROFILER_MIN >= type && IR_PROFILER_MAX <= type) {
+ if (IE_PROFILER_MIN >= type && IE_PROFILER_MAX <= type) {
LOGE("Invalid profiler type.");
return;
}
switch (type) {
- case IR_PROFILER_LATENCY:
+ case IE_PROFILER_LATENCY:
clock_gettime(CLOCK_MONOTONIC, &mStartTime);
break;
- case IR_PROFILER_MEMORY:
+ case IE_PROFILER_MEMORY:
break;
/* TODO */
}
void InferenceEngineProfiler::Stop(const unsigned int type, const char *func_name)
{
- if (IR_PROFILER_MIN >= type && IR_PROFILER_MAX <= type) {
+ if (IE_PROFILER_MIN >= type && IE_PROFILER_MAX <= type) {
LOGE("Invalid profiler type.");
return;
}
ProfileData data = { mEnvNum - 1, func_name, 0, 0 };
switch (type) {
- case IR_PROFILER_LATENCY: {
+ case IE_PROFILER_LATENCY: {
clock_gettime(CLOCK_MONOTONIC, &mEndTime);
data.elapsed_time = ConvertMillisec(GetTimeDiff(mStartTime, mEndTime));
break;
}
- case IR_PROFILER_MEMORY:
+ case IE_PROFILER_MEMORY:
break;
/* TODO */
}
void InferenceEngineProfiler::Dump(const unsigned int dump_type)
{
- if (IR_PROFILER_DUMP_MIN >= dump_type && IR_PROFILER_DUMP_MAX <= dump_type) {
+ if (IE_PROFILER_DUMP_MIN >= dump_type && IE_PROFILER_DUMP_MAX <= dump_type) {
LOGE("Invalid profiler dump type.");
return;
}
- if (dump_type == IR_PROFILER_DUMP_CONSOLE) {
+ if (dump_type == IE_PROFILER_DUMP_CONSOLE) {
DumpToConsole();
} else {
- DumpToFile(IR_PROFILER_DUMP_FORMAT_MARKDOWN, mDumpFilename);
+ DumpToFile(IE_PROFILER_DUMP_FORMAT_MARKDOWN, mDumpFilename);
}
}