Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / samples / validation_app / ClassificationProcessor.cpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #include <string>
6 #include <vector>
7 #include <memory>
8
9 #include "ClassificationProcessor.hpp"
10 #include "Processor.hpp"
11
12 using InferenceEngine::details::InferenceEngineException;
13
14 ClassificationProcessor::ClassificationProcessor(const std::string& flags_m, const std::string& flags_d, const std::string& flags_i, int flags_b,
15         InferencePlugin plugin, CsvDumper& dumper, const std::string& flags_l,
16         PreprocessingOptions preprocessingOptions, bool zeroBackground)
17     : Processor(flags_m, flags_d, flags_i, flags_b, plugin, dumper, "Classification network", preprocessingOptions), zeroBackground(zeroBackground) {
18
19     // Change path to labels file if necessary
20     if (flags_l.empty()) {
21         labelFileName = fileNameNoExt(modelFileName) + ".labels";
22     } else {
23         labelFileName = flags_l;
24     }
25 }
26
27 ClassificationProcessor::ClassificationProcessor(const std::string& flags_m, const std::string& flags_d, const std::string& flags_i, int flags_b,
28         InferencePlugin plugin, CsvDumper& dumper, const std::string& flags_l, bool zeroBackground)
29     : ClassificationProcessor(flags_m, flags_d, flags_i, flags_b, plugin, dumper, flags_l,
30             PreprocessingOptions(false, ResizeCropPolicy::ResizeThenCrop, 256, 256), zeroBackground) {
31 }
32
33 std::shared_ptr<Processor::InferenceMetrics> ClassificationProcessor::Process(bool stream_output) {
34      slog::info << "Collecting labels" << slog::endl;
35      ClassificationSetGenerator generator;
36      try {
37          generator.readLabels(labelFileName);
38      } catch (InferenceEngine::details::InferenceEngineException& ex) {
39          slog::warn << "Can't read labels file " << labelFileName << slog::endl;
40          slog::warn << "Error: " << ex.what() << slog::endl;
41      }
42
43      auto validationMap = generator.getValidationMap(imagesPath);
44      ImageDecoder decoder;
45
46      // ----------------------------Do inference-------------------------------------------------------------
47      slog::info << "Starting inference" << slog::endl;
48
49      std::vector<int> expected(batch);
50      std::vector<std::string> files(batch);
51
52      ConsoleProgress progress(validationMap.size(), stream_output);
53
54      ClassificationInferenceMetrics im;
55
56      std::string firstInputName = this->inputInfo.begin()->first;
57      std::string firstOutputName = this->outInfo.begin()->first;
58      auto firstInputBlob = inferRequest.GetBlob(firstInputName);
59      auto firstOutputBlob = inferRequest.GetBlob(firstOutputName);
60
61      auto iter = validationMap.begin();
62      while (iter != validationMap.end()) {
63          size_t b = 0;
64          int filesWatched = 0;
65          for (; b < batch && iter != validationMap.end(); b++, iter++, filesWatched++) {
66              expected[b] = iter->first;
67              try {
68                  decoder.insertIntoBlob(iter->second, b, *firstInputBlob, preprocessingOptions);
69                  files[b] = iter->second;
70              } catch (const InferenceEngineException& iex) {
71                  slog::warn << "Can't read file " << iter->second << slog::endl;
72                  slog::warn << "Error: " << iex.what() << slog::endl;
73                  // Could be some non-image file in directory
74                  b--;
75                  continue;
76              }
77          }
78
79          Infer(progress, filesWatched, im);
80
81          std::vector<unsigned> results;
82          auto firstOutputData = firstOutputBlob->buffer().as<PrecisionTrait<Precision::FP32>::value_type*>();
83          InferenceEngine::TopResults(TOP_COUNT, *firstOutputBlob, results);
84
85          for (size_t i = 0; i < b; i++) {
86              int expc = expected[i];
87              if (zeroBackground) expc++;
88
89              bool top1Scored = (static_cast<int>(results[0 + TOP_COUNT * i]) == expc);
90              dumper << "\"" + files[i] + "\"" << top1Scored;
91              if (top1Scored) im.top1Result++;
92              for (int j = 0; j < TOP_COUNT; j++) {
93                  unsigned classId = results[j + TOP_COUNT * i];
94                  if (static_cast<int>(classId) == expc) {
95                      im.topCountResult++;
96                  }
97                  dumper << classId << firstOutputData[classId + i * (firstOutputBlob->size() / batch)];
98              }
99              dumper.endLine();
100              im.total++;
101          }
102      }
103      progress.finish();
104
105      return std::shared_ptr<Processor::InferenceMetrics>(new ClassificationInferenceMetrics(im));
106 }
107
108 void ClassificationProcessor::Report(const Processor::InferenceMetrics& im) {
109     Processor::Report(im);
110     if (im.nRuns > 0) {
111         const ClassificationInferenceMetrics& cim = dynamic_cast<const ClassificationInferenceMetrics&>(im);
112
113         cout << "Top1 accuracy: " << OUTPUT_FLOATING(100.0 * cim.top1Result / cim.total) << "% (" << cim.top1Result << " of "
114                 << cim.total << " images were detected correctly, top class is correct)" << "\n";
115         cout << "Top5 accuracy: " << OUTPUT_FLOATING(100.0 * cim.topCountResult / cim.total) << "% (" << cim.topCountResult << " of "
116             << cim.total << " images were detected correctly, top five classes contain required class)" << "\n";
117     }
118 }
119