Extend timetest_infer pipeline with infer track and blob's support (#2298)
authorVitaliy Urusovskij <vitaliy.urusovskij@intel.com>
Sun, 20 Sep 2020 23:09:41 +0000 (02:09 +0300)
committerGitHub <noreply@github.com>
Sun, 20 Sep 2020 23:09:41 +0000 (02:09 +0300)
tests/time_tests/include/timetests_helper/utils.h [new file with mode: 0644]
tests/time_tests/src/timetests/timetest_infer.cpp

diff --git a/tests/time_tests/include/timetests_helper/utils.h b/tests/time_tests/include/timetests_helper/utils.h
new file mode 100644 (file)
index 0000000..5c4370d
--- /dev/null
@@ -0,0 +1,20 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#pragma once
+
+#include <string>
+
+namespace TimeTest {
+/**
+* @brief Get extension from filename
+* @param filename - name of the file which extension should be extracted
+* @return string with extracted file extension
+*/
+std::string fileExt(const std::string& filename) {
+    auto pos = filename.rfind('.');
+    if (pos == std::string::npos) return "";
+    return filename.substr(pos + 1);
+}
+}
\ No newline at end of file
index a6e4502..1ba0b24 100644 (file)
@@ -6,6 +6,7 @@
 #include <iostream>
 
 #include "timetests_helper/timer.h"
+#include "timetests_helper/utils.h"
 using namespace InferenceEngine;
 
 /**
@@ -15,20 +16,41 @@ using namespace InferenceEngine;
  */
 int runPipeline(const std::string &model, const std::string &device) {
   auto pipeline = [](const std::string &model, const std::string &device) {
-    SCOPED_TIMER(first_time_to_inference);
-
     Core ie;
-    CNNNetwork cnnNetwork;
     ExecutableNetwork exeNetwork;
+    InferRequest inferRequest;
 
     {
-      SCOPED_TIMER(read_network);
-      cnnNetwork = ie.ReadNetwork(model);
+      SCOPED_TIMER(first_inference_latency);
+      {
+        SCOPED_TIMER(load_plugin);
+        ie.GetVersions(device);
+      }
+      {
+        SCOPED_TIMER(create_exenetwork);
+        if (TimeTest::fileExt(model) == "blob") {
+          SCOPED_TIMER(import_network);
+          exeNetwork = ie.ImportNetwork(model, device);
+        }
+        else {
+          CNNNetwork cnnNetwork;
+          {
+            SCOPED_TIMER(read_network);
+            cnnNetwork = ie.ReadNetwork(model);
+          }
+
+          {
+            SCOPED_TIMER(load_network);
+            exeNetwork = ie.LoadNetwork(cnnNetwork, device);
+          }
+        }
+      }
     }
 
     {
-      SCOPED_TIMER(load_network);
-      ExecutableNetwork exeNetwork = ie.LoadNetwork(cnnNetwork, device);
+      SCOPED_TIMER(first_inference);
+      inferRequest = exeNetwork.CreateInferRequest();
+      inferRequest.Infer();
     }
   };