1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
8 #include <gtest/gtest.h>
9 #include <gmock/gmock.h>
11 #include "inference_engine.hpp"
12 #include "test_model_path.hpp"
13 #include <tests_file_utils.hpp>
20 #define UNUSED __attribute__((unused))
30 class BaseTestCreator {
34 explicit BaseTestCreator(const std::string& type) : _type(type) {}
36 virtual InferenceEngine::CNNLayerPtr create(const std::string& type) = 0;
38 virtual bool shouldCreate(const std::string& type) = 0;
42 class LayerTestCreator : public BaseTestCreator {
44 explicit LayerTestCreator(const std::string& type) : BaseTestCreator(type) {}
46 InferenceEngine::CNNLayerPtr create(const std::string& type) override {
47 InferenceEngine::LayerParams params;
49 return std::make_shared<LT>(params);
52 bool shouldCreate(const std::string& type) override {
57 class TestsCommon : public ::testing::Test {
59 static std::vector<std::shared_ptr<BaseTestCreator>>& getCreators() {
60 // there should be unique_ptr but it cant be used with initializer lists
61 static std::vector<std::shared_ptr<BaseTestCreator> > creators = {
62 std::make_shared<LayerTestCreator<InferenceEngine::PowerLayer>>("Power"),
63 std::make_shared<LayerTestCreator<InferenceEngine::ConvolutionLayer>>("Convolution"),
64 std::make_shared<LayerTestCreator<InferenceEngine::DeconvolutionLayer>>("Deconvolution"),
65 std::make_shared<LayerTestCreator<InferenceEngine::PoolingLayer>>("Pooling"),
66 std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("InnerProduct"),
67 std::make_shared<LayerTestCreator<InferenceEngine::FullyConnectedLayer>>("FullyConnected"),
68 std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("LRN"),
69 std::make_shared<LayerTestCreator<InferenceEngine::NormLayer>>("Norm"),
70 std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("Softmax"),
71 std::make_shared<LayerTestCreator<InferenceEngine::SoftMaxLayer>>("SoftMax"),
72 std::make_shared<LayerTestCreator<InferenceEngine::GRNLayer>>("GRN"),
73 std::make_shared<LayerTestCreator<InferenceEngine::MVNLayer>>("MVN"),
74 std::make_shared<LayerTestCreator<InferenceEngine::ReLULayer>>("ReLU"),
75 std::make_shared<LayerTestCreator<InferenceEngine::ClampLayer>>("Clamp"),
76 std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Split"),
77 std::make_shared<LayerTestCreator<InferenceEngine::SplitLayer>>("Slice"),
78 std::make_shared<LayerTestCreator<InferenceEngine::ConcatLayer>>("Concat"),
79 std::make_shared<LayerTestCreator<InferenceEngine::EltwiseLayer>>("Eltwise"),
80 std::make_shared<LayerTestCreator<InferenceEngine::ScaleShiftLayer>>("ScaleShift"),
81 std::make_shared<LayerTestCreator<InferenceEngine::PReLULayer>>("PReLU"),
82 std::make_shared<LayerTestCreator<InferenceEngine::CropLayer>>("Crop"),
83 std::make_shared<LayerTestCreator<InferenceEngine::ReshapeLayer>>("Reshape"),
84 std::make_shared<LayerTestCreator<InferenceEngine::TileLayer>>("Tile"),
85 std::make_shared<LayerTestCreator<InferenceEngine::BatchNormalizationLayer>>("BatchNormalization"),
86 std::make_shared<LayerTestCreator<InferenceEngine::GemmLayer>>("Gemm"),
87 std::make_shared<LayerTestCreator<InferenceEngine::PadLayer>>("Pad"),
88 std::make_shared<LayerTestCreator<InferenceEngine::GatherLayer>>("Gather"),
89 std::make_shared<LayerTestCreator<InferenceEngine::StridedSliceLayer>>("StridedSlice"),
90 std::make_shared<LayerTestCreator<InferenceEngine::ShuffleChannelsLayer>>("ShuffleChannels"),
91 std::make_shared<LayerTestCreator<InferenceEngine::DepthToSpaceLayer>>("DepthToSpace"),
92 std::make_shared<LayerTestCreator<InferenceEngine::ReverseSequenceLayer>>("ReverseSequence")
97 static InferenceEngine::CNNLayer::Ptr createLayer(const std::string& type) {
98 for (auto& creator : getCreators()) {
99 if (!creator->shouldCreate(type))
101 return creator->create(type);
103 static LayerTestCreator<InferenceEngine::GenericLayer> genericCreator("");
104 return genericCreator.create(type);
107 static size_t parseLine(char* line) {
108 // This assumes that a digit will be found and the line ends in " Kb".
109 size_t i = strlen(line);
110 const char* p = line;
111 while (*p <'0' || *p > '9') p++;
117 static size_t getVmSizeInKB(){
118 FILE* file = fopen("/proc/self/status", "r");
120 if (file != nullptr) {
123 while (fgets(line, 128, file) != NULL) {
124 if (strncmp(line, "VmSize:", 7) == 0) {
125 result = parseLine(line);
134 static size_t getVmSizeInKBWin() {
135 PROCESS_MEMORY_COUNTERS pmc;
136 pmc.cb = sizeof(PROCESS_MEMORY_COUNTERS);
137 GetProcessMemoryInfo(GetCurrentProcess(),&pmc, pmc.cb);
138 return pmc.WorkingSetSize;
144 static std::string library_path() {return ".";};
146 static std::string library_path() { return "./lib";};
149 static std::string archPath() {
150 if (sizeof(void*) == 8) {
151 return "../../lib/intel64";
153 return "../../lib/ia32";
158 void TearDown() override {}
160 void SetUp() override {
161 auto memsize = getVmSizeInKB();
163 std::cout << "\nMEM_USAGE=" << getVmSizeInKB() << "KB\n";
169 inline std::string get_mock_engine_name() {
170 return make_plugin_name("mock_engine");
173 inline std::string get_mock_extension_name() {
174 return make_plugin_name("mock_extensions");
176 static std::string get_data_path(){
177 const char* data_path = std::getenv("DATA_PATH");
179 if (data_path == NULL){
180 if(DATA_PATH != NULL){
181 data_path = DATA_PATH;
183 ::testing::AssertionFailure()<<"DATA_PATH not defined";
186 return std::string(data_path);
189 static std::string make_so_name(const std::string & input) {
192 std::string pre = "lib";
193 std::string ext = ".dll";
195 std::string pre = "";
196 std::string ext = ".dll";
199 std::string pre = "lib";
200 std::string ext = ".dylib";
202 std::string pre = "lib";
203 std::string ext = ".so";
205 return pre + input + IE_BUILD_POSTFIX + ext;
209 static std::string make_plugin_name(const std::string & input) {
210 return make_so_name(input);
213 static void fill_data(InferenceEngine::Blob::Ptr& blob) {
214 fill_data(blob->buffer().as<float*>(), blob->byteSize() / sizeof(float));
217 static void fill_data(float *data, size_t size, size_t duty_ratio = 10) {
218 for (size_t i = 0; i < size; i++) {
219 if ( ( i / duty_ratio)%2 == 1) {
222 data[i] = sin((float)i);
227 static void fill_data_non_zero(int32_t *data, size_t size, int n) {
228 for (size_t i = 0; i < size; i++) {
233 static void fill_data_bin(float *data, size_t size) {
234 for (size_t i = 0; i < size; i++) {
235 data[i] = sinf((float)i) > 0.f ? 1.f : -1.f;
239 static void fill_data_bin_packed(int8_t *data, size_t size) {
241 for (size_t i = 0; i < div_up(size, nbits); i++) {
242 data[i] = static_cast<int8_t>(i % 255);
246 static void fill_data_sine(float *data, size_t size, float center, float ampl, float omega) {
247 for (size_t i = 0; i < size; i++) {
248 data[i] = center + ampl * sin((float)i * omega);
252 static void fill_data_const(float *data, size_t size, float value) {
253 for (size_t i = 0; i < size; i++) {
258 static void fill_data_dbgval(float *data, size_t size) {
259 for (size_t i = 0; i < size; i++) {
264 static void compare(InferenceEngine::Blob &res, InferenceEngine::Blob &ref, float max_diff = 0.01f) {
265 float *res_ptr = res.buffer().as<float*>();
266 size_t res_size = res.size();
268 float *ref_ptr = ref.buffer().as<float*>();
269 size_t ref_size = ref.size();
271 ASSERT_EQ(res_size, ref_size);
273 for (size_t i = 0; i < ref_size; i++) {
274 ASSERT_NEAR(res_ptr[i], ref_ptr[i], max_diff);
278 static void compare_NRMSD(InferenceEngine::Blob &res, InferenceEngine::Blob &ref, float max_nrmsd = 0.01f) {
279 float *res_ptr = res.buffer().as<float*>();
280 size_t res_size = res.size();
282 float *ref_ptr = ref.buffer().as<float*>();
283 size_t ref_size = ref.size();
285 ASSERT_EQ(res_size, ref_size);
289 float mmin = ref_ptr[0], mmax = ref_ptr[0];
291 for (size_t i = 0; i < ref_size; i++) {
292 float sqr = (ref_ptr[i] - res_ptr[i]);
296 mmin = (std::min)(mmin, ref_ptr[i]);
297 mmax = (std::max)(mmax, ref_ptr[i]);
299 if (i % 10007 == 0) {
300 std::cout << i << ": " << res_ptr[i] << "\t" << ref_ptr[i] << "\t" << "\tdiv: " << ref_ptr[i] / res_ptr[i] << std::endl;
310 ASSERT_LE(sum, max_nrmsd);
313 static void compare(float* res, float* ref, size_t size, float max_diff = 0.01f) {
314 for (size_t i = 0; i < size; i++) {
315 ASSERT_NEAR(res[i], ref[i], max_diff);
319 void replace(std::string& str, const std::string& from, const std::string& to) {
320 std::string::size_type pos = 0;
322 while((pos = str.find(from, pos)) != std::string::npos) {
323 str.replace(pos, from.length(), to);
328 std::string replace(std::string& str, const std::string& from, const int& to) {
329 replace(str, from, std::to_string(to));
333 std::string replace(std::string& str, const std::string& from, const size_t& to) {
334 replace(str, from, std::to_string(to));
338 std::string replace(std::string& str, const std::string& from, const float& to) {
339 replace(str, from, std::to_string(to));
342 // trim from both ends (in place)
343 static inline std::string &trim(std::string &s) {
344 s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun<int, int>(std::isspace))));
345 s.erase(std::find_if(s.rbegin(), s.rend(), std::not1(std::ptr_fun<int, int>(std::isspace))).base(), s.end());
351 static InferenceEngine::StatusCode measurePerformance(const T & callInfer) {
352 bool isPerformance = nullptr != getenv("DLSDK_performance_test");
353 if (!isPerformance) {
357 typedef std::chrono::high_resolution_clock Time;
358 typedef std::chrono::nanoseconds ns;
359 typedef std::chrono::duration<float> fsec;
361 size_t niter = atoi(getenv("DLSDK_ITER_NUM"));
362 std::vector<double> times(niter);
363 InferenceEngine::StatusCode sts = InferenceEngine::OK;
365 for (size_t i = 0; i < niter; ++i)
367 auto t0 = Time::now();
369 auto t1 = Time::now();
371 ns d = std::chrono::duration_cast<ns>(fs);
372 double total = static_cast<double>(d.count());
374 times[i] = total*0.000001;
377 for (size_t i = 0; i < times.size(); i++)
378 std::cout << "Iteration: " << i << " | infer time: " << times[i] << " ms" << std::endl;
380 std::sort(times.begin(), times.end());
382 size_t first_index = (size_t)floor(times.size() * 0.25);
383 size_t last_index = (size_t)floor(times.size() * 0.75);
384 size_t num = last_index - first_index;
386 std::cout << "Q25: " << times[first_index] << std::endl;
387 std::cout << "Q75: " << times[last_index] << std::endl;
392 last_index = times.size();
396 std::vector<double> clipped_times;
398 for (auto i = first_index; i < last_index; i++)
400 clipped_times.push_back(times[i]);
404 mean = mean/clipped_times.size();
407 if (clipped_times.size()%2 != 0)
408 median = clipped_times[int(clipped_times.size()/2)];
409 else median = (clipped_times[int(clipped_times.size()/2)] + clipped_times[int(clipped_times.size()/2)-1])/2;
411 std::cout << "mean: " << mean << std::endl;
412 std::cout << "median: " << median << std::endl;
415 clipped_times.clear();
420 template <typename T, typename U>
421 static inline T div_up(const T a, const U b) {
423 return (a + b - 1) / b;
430 #if UINTPTR_MAX == 0xffffffff
432 #define ENVIRONMENT32
433 #elif UINTPTR_MAX == 0xffffffffffffffff
435 #define ENVIRONMENT64
437 # error Unsupported architecture
441 template <typename T,typename S>
442 std::shared_ptr<InferenceEngine::TBlob<T>> to_tblob(const std::shared_ptr<S> &obj)
444 return std::dynamic_pointer_cast<InferenceEngine::TBlob<T>>(obj);
447 inline InferenceEngine::InputInfo::Ptr getFirstInput(InferenceEngine::ICNNNetwork *pNet)
449 InferenceEngine::InputsDataMap inputs;
450 pNet->getInputsInfo(inputs);
451 //ASSERT_GT(inputs.size(), 0);
452 return inputs.begin()->second;
455 using OptionsMap = std::map<std::string, std::string>;