Publishing 2019 R1 content
[platform/upstream/dldt.git] / inference-engine / tests / helpers / tests_common.hpp
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <cctype>
8 #include <gtest/gtest.h>
9 #include <gmock/gmock.h>
10
11 #include "inference_engine.hpp"
12 #include "test_model_path.hpp"
13 #include <tests_file_utils.hpp>
14 #include <cctype>
15 #include <chrono>
16
17 #ifdef WIN32
18 #define UNUSED
19 #else
20 #define UNUSED  __attribute__((unused))
21 #endif
22
23 #include "stdlib.h"
24 #include "stdio.h"
25 #include "string.h"
26 #ifdef _WIN32
27         #include "Psapi.h"
28 #endif
29
30 class BaseTestCreator {
31 protected:
32     std::string _type;
33 public:
34     explicit BaseTestCreator(const std::string& type) : _type(type) {}
35
36     virtual InferenceEngine::CNNLayerPtr create(const std::string& type)  = 0;
37
38     virtual bool shouldCreate(const std::string& type) = 0;
39 };
40
41 template<class LT>
42 class LayerTestCreator : public BaseTestCreator {
43 public:
44     explicit LayerTestCreator(const std::string& type) : BaseTestCreator(type) {}
45
46     InferenceEngine::CNNLayerPtr create(const std::string& type) override {
47         InferenceEngine::LayerParams params;
48         params.type = type;
49         return std::make_shared<LT>(params);
50     }
51
52     bool shouldCreate(const std::string& type) override {
53         return type == _type;
54     }
55 };
56
57 class TestsCommon : public ::testing::Test {
58 private:
59     static std::vector<std::shared_ptr<BaseTestCreator>>& getCreators() {
60         // there should be unique_ptr but it cant be used with initializer lists
61         static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
62                 std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
63                 std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
64                 std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
65                 std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
66                 std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
67                 std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
68                 std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
69                 std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
70                 std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
71                 std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("SoftMax"),
72                 std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
73                 std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
74                 std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
75                 std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
76                 std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
77                 std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
78                 std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
79                 std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
80                 std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
81                 std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
82                 std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
83                 std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
84                 std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
85                 std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
86                 std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
87                 std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
88                 std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather"),
89                 std::make_shared<LayerTestCreator<InferenceEngine::StridedSliceLayer>>("StridedSlice"),
90                 std::make_shared<LayerTestCreator<InferenceEngine::ShuffleChannelsLayer>>("ShuffleChannels"),
91                 std::make_shared<LayerTestCreator<InferenceEngine::DepthToSpaceLayer>>("DepthToSpace"),
92                 std::make_shared<LayerTestCreator<InferenceEngine::ReverseSequenceLayer>>("ReverseSequence")
93         };
94         return creators;
95     }
96 public:
97     static InferenceEngine::CNNLayer::Ptr createLayer(const std::string& type) {
98         for (auto& creator : getCreators()) {
99             if (!creator->shouldCreate(type))
100                 continue;
101             return creator->create(type);
102         }
103         static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
104         return genericCreator.create(type);
105     }
106
107     static size_t parseLine(char* line) {
108         // This assumes that a digit will be found and the line ends in " Kb".
109         size_t i = strlen(line);
110         const char* p = line;
111         while (*p <'0' || *p > '9') p++;
112         line[i-3] = '\0';
113         i = (size_t)atoi(p);
114         return i;
115     }
116
117     static size_t getVmSizeInKB(){
118         FILE* file = fopen("/proc/self/status", "r");
119         size_t result = 0;
120         if (file != nullptr) {
121             char line[128];
122
123             while (fgets(line, 128, file) != NULL) {
124                 if (strncmp(line, "VmSize:", 7) == 0) {
125                     result = parseLine(line);
126                     break;
127                 }
128             }
129             fclose(file);
130         }
131         return result;
132     }
133 #ifdef _WIN32
134         static size_t getVmSizeInKBWin() {
135                 PROCESS_MEMORY_COUNTERS pmc;
136                 pmc.cb = sizeof(PROCESS_MEMORY_COUNTERS);
137                 GetProcessMemoryInfo(GetCurrentProcess(),&pmc, pmc.cb);
138                 return pmc.WorkingSetSize;
139             }
140 #endif
141
142  public:
143 #ifdef _WIN32
144     static std::string library_path() {return ".";};
145 #else
146     static std::string library_path() { return "./lib";};
147 #endif  // _WIN32
148
149     static std::string archPath() {
150         if (sizeof(void*) == 8) {
151             return  "../../lib/intel64";
152         } else {
153             return  "../../lib/ia32";
154         }
155     }
156
157     protected:
158     void TearDown() override {}
159
160     void SetUp() override {
161         auto memsize = getVmSizeInKB();
162         if (memsize != 0) {
163             std::cout << "\nMEM_USAGE=" << getVmSizeInKB() << "KB\n";
164         }
165     }
166     public:
167
168
169     inline std::string get_mock_engine_name() {
170         return make_plugin_name("mock_engine");
171     }
172
173     inline std::string get_mock_extension_name() {
174         return make_plugin_name("mock_extensions");
175     }
176     static std::string get_data_path(){
177         const char* data_path = std::getenv("DATA_PATH");
178
179         if (data_path == NULL){
180             if(DATA_PATH != NULL){
181                 data_path = DATA_PATH;
182             } else{
183                 ::testing::AssertionFailure()<<"DATA_PATH not defined";
184             }
185         }
186         return std::string(data_path);
187     }
188
189     static std::string make_so_name(const std::string & input) {
190 #ifdef _WIN32
191     #ifdef __MINGW32__
192         std::string pre = "lib";
193         std::string ext = ".dll";
194     #else
195         std::string pre = "";
196         std::string ext = ".dll";
197     #endif
198 #elif __APPLE__
199         std::string pre = "lib";
200         std::string ext = ".dylib";
201 #else
202         std::string pre = "lib";
203         std::string ext = ".so";
204 #endif
205         return pre + input + IE_BUILD_POSTFIX + ext;
206
207     }
208
209     static std::string make_plugin_name(const std::string & input) {
210         return make_so_name(input);
211     }
212
213     static void fill_data(InferenceEngine::Blob::Ptr& blob) {
214         fill_data(blob->buffer().as<float*>(), blob->byteSize() / sizeof(float));
215     }
216
217     static void fill_data(float *data, size_t size, size_t duty_ratio = 10) {
218         for (size_t i = 0; i < size; i++) {
219             if ( ( i / duty_ratio)%2 == 1) {
220                 data[i] = 0.0;
221             } else {
222                 data[i] = sin((float)i);
223             }
224         }
225     }
226
227     static void fill_data_non_zero(int32_t *data, size_t size, int n) {
228         for (size_t i = 0; i < size; i++) {
229             data[i] = n*i%254+1;
230         }
231     }
232
233     static void fill_data_bin(float *data, size_t size) {
234         for (size_t i = 0; i < size; i++) {
235             data[i] = sinf((float)i) > 0.f ? 1.f : -1.f;
236         }
237     }
238
239     static void fill_data_bin_packed(int8_t *data, size_t size) {
240         int nbits = 8;
241         for (size_t i = 0; i < div_up(size, nbits); i++) {
242             data[i] = static_cast<int8_t>(i % 255);
243         }
244     }
245
246     static void fill_data_sine(float *data, size_t size, float center, float ampl, float omega) {
247         for (size_t i = 0; i < size; i++) {
248             data[i] = center + ampl * sin((float)i * omega);
249         }
250     }
251
252     static void fill_data_const(float *data, size_t size, float value) {
253         for (size_t i = 0; i < size; i++) {
254             data[i] = value;
255         }
256     }
257
258     static void fill_data_dbgval(float *data, size_t size) {
259         for (size_t i = 0; i < size; i++) {
260             data[i] = i;
261         }
262     }
263
264     static void compare(InferenceEngine::Blob &res, InferenceEngine::Blob &ref, float max_diff = 0.01f) {
265         float *res_ptr = res.buffer().as<float*>();
266         size_t res_size = res.size();
267
268         float *ref_ptr = ref.buffer().as<float*>();
269         size_t ref_size = ref.size();
270
271         ASSERT_EQ(res_size, ref_size);
272
273         for (size_t i = 0; i < ref_size; i++) {
274             ASSERT_NEAR(res_ptr[i], ref_ptr[i], max_diff);
275         }
276     }
277
278     static void compare_NRMSD(InferenceEngine::Blob &res, InferenceEngine::Blob &ref, float max_nrmsd = 0.01f) {
279         float *res_ptr = res.buffer().as<float*>();
280         size_t res_size = res.size();
281
282         float *ref_ptr = ref.buffer().as<float*>();
283         size_t ref_size = ref.size();
284
285         ASSERT_EQ(res_size, ref_size);
286
287         float sum = 0;
288
289         float mmin = ref_ptr[0], mmax = ref_ptr[0];
290
291         for (size_t i = 0; i < ref_size; i++) {
292             float sqr = (ref_ptr[i] - res_ptr[i]);
293             sqr *= sqr;
294             sum += sqr;
295
296             mmin = (std::min)(mmin, ref_ptr[i]);
297             mmax = (std::max)(mmax, ref_ptr[i]);
298
299             if (i % 10007 == 0) {
300                 std::cout << i << ": " << res_ptr[i] << "\t" << ref_ptr[i] << "\t" << "\tdiv: " << ref_ptr[i] / res_ptr[i] << std::endl;
301             }
302
303         }
304         sum /= ref_size;
305
306         sum = pow(sum, 0.5);
307
308         sum /= mmax - mmin;
309
310         ASSERT_LE(sum, max_nrmsd);
311     }
312
313     static void compare(float* res, float* ref, size_t size, float max_diff = 0.01f) {
314         for (size_t i = 0; i < size; i++) {
315             ASSERT_NEAR(res[i], ref[i], max_diff);
316         }
317     }
318
319     void replace(std::string& str, const std::string& from, const std::string& to) {
320         std::string::size_type pos = 0;
321
322         while((pos = str.find(from, pos)) != std::string::npos) {
323             str.replace(pos, from.length(), to);
324             pos += to.length();
325         }
326     }
327
328     std::string replace(std::string& str, const std::string& from, const int& to) {
329         replace(str, from, std::to_string(to));
330         return str;
331     }
332
333     std::string replace(std::string& str, const std::string& from, const size_t& to) {
334         replace(str, from, std::to_string(to));
335         return str;
336     }
337
338     std::string replace(std::string& str, const std::string& from, const float& to) {
339         replace(str, from, std::to_string(to));
340         return str;
341     }
342     // trim from both ends (in place)
343     static inline std::string &trim(std::string &s) {
344         s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun<int, int>(std::isspace))));
345         s.erase(std::find_if(s.rbegin(), s.rend(), std::not1(std::ptr_fun<int, int>(std::isspace))).base(), s.end());
346         return s;
347     }
348
349
350     template <class T>
351     static InferenceEngine::StatusCode measurePerformance(const T & callInfer) {
352         bool isPerformance = nullptr != getenv("DLSDK_performance_test");
353         if (!isPerformance) {
354             return callInfer();
355         }
356
357         typedef std::chrono::high_resolution_clock Time;
358         typedef std::chrono::nanoseconds ns;
359         typedef std::chrono::duration<float> fsec;
360
361         size_t niter = atoi(getenv("DLSDK_ITER_NUM"));
362         std::vector<double> times(niter);
363         InferenceEngine::StatusCode sts = InferenceEngine::OK;
364
365         for (size_t i = 0; i < niter; ++i)
366         {
367             auto t0 = Time::now();
368             sts = callInfer();
369             auto t1 = Time::now();
370             fsec fs = t1 - t0;
371             ns d = std::chrono::duration_cast<ns>(fs);
372             double total = static_cast<double>(d.count());
373
374             times[i] = total*0.000001;
375         }
376
377         for (size_t i = 0; i < times.size(); i++)
378             std::cout << "Iteration: " << i << " | infer time: " << times[i] << " ms" << std::endl;
379
380         std::sort(times.begin(), times.end());
381
382         size_t first_index = (size_t)floor(times.size() * 0.25);
383         size_t last_index = (size_t)floor(times.size() * 0.75);
384         size_t num = last_index - first_index;
385
386         std::cout << "Q25: " << times[first_index] << std::endl;
387         std::cout << "Q75: " << times[last_index]  << std::endl;
388
389         if (niter < 4)
390         {
391             first_index = 0;
392             last_index = times.size();
393             num = times.size();
394         }
395
396         std::vector<double> clipped_times;
397         double mean = 0;
398         for (auto i = first_index; i < last_index; i++)
399         {
400             clipped_times.push_back(times[i]);
401             mean += times[i];
402         }
403
404         mean = mean/clipped_times.size();
405
406         double median = 0;
407         if (clipped_times.size()%2 != 0)
408             median = clipped_times[int(clipped_times.size()/2)];
409         else median = (clipped_times[int(clipped_times.size()/2)] + clipped_times[int(clipped_times.size()/2)-1])/2;
410
411         std::cout << "mean: " << mean << std::endl;
412         std::cout << "median: " << median << std::endl;
413
414         times.clear();
415         clipped_times.clear();
416
417         return sts;
418     }
419
420     template <typename T, typename U>
421     static inline T div_up(const T a, const U b) {
422         assert(b);
423         return (a + b - 1) / b;
424     }
425 };
426
427
428 // Check bitness
429 #include <stdint.h>
430 #if UINTPTR_MAX == 0xffffffff
431     /* 32-bit */
432     #define ENVIRONMENT32
433 #elif UINTPTR_MAX == 0xffffffffffffffff
434     /* 64-bit */
435     #define ENVIRONMENT64
436 #else
437     # error Unsupported architecture
438 #endif
439
440
441 template <typename T,typename S>
442 std::shared_ptr<InferenceEngine::TBlob<T>> to_tblob(const std::shared_ptr<S> &obj)
443 {
444     return std::dynamic_pointer_cast<InferenceEngine::TBlob<T>>(obj);
445 }
446
447 inline InferenceEngine::InputInfo::Ptr getFirstInput(InferenceEngine::ICNNNetwork *pNet)
448 {
449     InferenceEngine::InputsDataMap inputs;
450     pNet->getInputsInfo(inputs);
451     //ASSERT_GT(inputs.size(), 0);
452     return inputs.begin()->second;
453 }
454
455 using OptionsMap = std::map<std::string, std::string>;