Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / samples / hello_request_classification / main.cpp
index d5fabb2..e03142b 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 Intel Corporation
+// Copyright (C) 2018-2019 Intel Corporation
 // SPDX-License-Identifier: Apache-2.0
 //
 
@@ -10,6 +10,7 @@
 
 #include <opencv2/opencv.hpp>
 #include <inference_engine.hpp>
+#include <samples/classification_results.h>
 
 using namespace InferenceEngine;
 
@@ -28,7 +29,7 @@ int main(int argc, char *argv[]) {
         // -----------------------------------------------------------------------------------------------------
 
         // --------------------------- 1. Load Plugin for inference engine -------------------------------------
-        InferencePlugin plugin = PluginDispatcher({"../../../lib/intel64", ""}).getPluginByDevice(device_name);
+        InferencePlugin plugin = PluginDispatcher().getPluginByDevice(device_name);
         // -----------------------------------------------------------------------------------------------------
 
         // --------------------------- 2. Read IR Generated by ModelOptimizer (.xml and .bin files) ------------
@@ -123,18 +124,10 @@ int main(int argc, char *argv[]) {
         // --------------------------- 8. Process output -------------------------------------------------------
         for (auto &item : output_info) {
             auto output_name = item.first;
-            Blob::Ptr output = async_infer_request.GetBlob(output_name);
-            auto output_buffer = output->buffer().as<PrecisionTrait<Precision::FP32>::value_type *>();
-            std::vector<unsigned> results;
-            /**  This is to sort output probabilities and put it to results vector **/
-            TopResults(10, *output, results);
-
-            std::cout << std::endl << "Top 10 results:" << std::endl << std::endl;
-            for (size_t id = 0; id < 10; ++id) {
-                std::cout.precision(7);
-                auto result = output_buffer[results[id]];
-                std::cout << std::left << std::fixed << result << " label #" << results[id] << std::endl;
-            }
+            Blob::Ptr output = async_infer_request.GetBlob(output_name);;
+            // Print classification results
+            ClassificationResult classificationResult(output, {input_image_path});
+            classificationResult.print();
         }
         // -----------------------------------------------------------------------------------------------------
     } catch (const std::exception & ex) {