1 // Copyright (C) 2018 Intel Corporation
3 // SPDX-License-Identifier: Apache-2.0
7 #include "ie_layer_validators.hpp"
9 #include "xml_parse_utils.h"
14 #include <ie_iextension.h>
15 #include <v2_format_parser.h>
17 using namespace InferenceEngine;
18 using namespace details;
21 void CNNLayer::validateLayer() {
22 LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
23 validator->parseParams(this);
24 validator->checkParams(this);
26 getInOutShapes(this, shapes);
27 validator->checkShapes(this, shapes.inDims);
30 struct WeightableParams {
31 size_t kernel_w, kernel_h, outputs, groups;
32 bool isKernelFromInput;
34 WeightableParams(size_t _outputs, bool _isKernelFromInput, size_t _groups = 0, size_t _kernel_h = 0,
35 size_t _kernel_w = 0) : outputs(_outputs), isKernelFromInput(_isKernelFromInput),
36 kernel_h(_kernel_h), kernel_w(_kernel_w),
40 void checkWeightable(const std::map<std::string, Blob::Ptr>& blobs,
41 const vector<SizeVector>& inShapes, WeightableParams params,
42 const SizeVector& numDims) {
43 if (inShapes.size() != 1)
44 THROW_IE_EXCEPTION << "Number of inputs (" << inShapes.size() << ") is not equal to expected ones (1)";
45 SizeVector firstInputShape = inShapes[0];
46 size_t inputSize = firstInputShape.size();
49 for (auto dim : numDims) {
50 if (inputSize == dim) {
56 THROW_IE_EXCEPTION << "Input shape " << details::dumpVec(firstInputShape)
57 << " has unexpected size, supported sizes: " << details::dumpVec(numDims);
60 if (firstInputShape.empty()) THROW_IE_EXCEPTION << "Input shape can't be empty";
62 size_t KW = 1, KH = 1, IC, OC;
63 IC = firstInputShape[1];
64 if (params.isKernelFromInput) {
65 if (firstInputShape.size() == 4) {
66 KH = firstInputShape[2];
67 KW = firstInputShape[3];
75 auto it = blobs.find("weights");
77 blobs.end()) { // TODO: return with fixing shape infer tests: THROW_IE_EXCEPTION << "Invalid blobs: no weights";
78 auto weights = it->second;
79 if (weights == nullptr || weights->dims().empty()) THROW_IE_EXCEPTION << "Weights can't be empty";
81 auto weightsSize = details::product(weights->dims());
82 size_t expectedWeightsSize = OC * KW * KH * IC;
83 if (params.groups) expectedWeightsSize /= params.groups;
84 if (expectedWeightsSize != weightsSize) {
85 THROW_IE_EXCEPTION << "New shapes " << details::dumpVec(firstInputShape) << " make Kernels(" << KH << "x"
86 << KW << "), Channels(" << IC << "), Output depth(" << OC << "), Groups("
87 << params.groups << ") not matching weights size: " << weightsSize;
91 it = blobs.find("biases");
92 if (it != blobs.end()) {
93 auto biases = it->second;
94 if (biases == nullptr || biases->dims().empty()) THROW_IE_EXCEPTION << "Biases can't be empty";
95 auto biasesSize = details::product(biases->dims());
96 if (OC != biasesSize) {
97 THROW_IE_EXCEPTION << "Number of outputs (" << OC << ") don't match biases size: " << biasesSize;
102 LayerValidators* LayerValidators::getInstance() {
104 _instance = new LayerValidators();
109 LayerValidator::Ptr LayerValidators::getValidator(const std::string& type) {
110 if (_validators.find(type) == _validators.end()) {
111 return std::make_shared<GeneralValidator>(type);
113 return _validators[type];
116 void LayerValidators::addImpl(const std::string& type, const LayerValidator::Ptr& validator) {
117 _validators[type] = validator;
120 LayerValidators* LayerValidators::_instance = nullptr;
122 GeneralValidator::GeneralValidator(const std::string& _type) : LayerValidator(_type) {}
124 void FullyConnectedValidator::parseParams(CNNLayer* layer) {
125 auto casted = dynamic_cast<FullyConnectedLayer*>(layer);
127 THROW_IE_EXCEPTION << "Layer is not instance of FullyConnectedLayer class";
129 casted->_out_num = casted->GetParamAsUInt("out-size");
132 void FullyConnectedValidator::checkParams(const CNNLayer* layer) {
133 // TODO: check that values belong to the scope of the definition according to spec
136 void FullyConnectedValidator::checkCorrespondence(const CNNLayer* layer,
137 const std::map<std::string, Blob::Ptr>& blobs,
138 const vector<SizeVector>& inShapes) const {
139 const auto casted = dynamic_cast<const FullyConnectedLayer*>(layer);
140 if (!casted) THROW_IE_EXCEPTION << "Layer is not instance of FullyConnectedLayer class";
141 checkWeightable(blobs, inShapes, {casted->_out_num, true, 1}, {4, 2});
144 FullyConnectedValidator::FullyConnectedValidator(const std::string& _type) : LayerValidator(_type) {}
146 void CropValidator::parseParams(CNNLayer* layer) {
147 auto casted = dynamic_cast<CropLayer*>(layer);
149 THROW_IE_EXCEPTION << "Layer is not instance of CropLayer class";
151 if (casted->axis.empty()) {
152 auto getArray = [](std::string param, vector<int>& array) {
153 std::istringstream stream(param);
155 while (getline(stream, str, ',')) {
156 int val = std::stoi(str);
157 array.push_back(val);
160 getArray(layer->GetParamAsString("axis"), casted->axis);
161 if (casted->params.find("offset") != casted->params.end()) {
162 getArray(layer->GetParamAsString("offset"), casted->offset);
164 if (casted->params.find("dim") != casted->params.end()) {
165 getArray(layer->GetParamAsString("dim"), casted->dim);
167 if (casted->params.find("crop_begin") != casted->params.end()) {
168 getArray(layer->GetParamAsString("crop_begin"), casted->offset);
173 void CropValidator::checkParams(const CNNLayer* layer) {
174 auto casted = dynamic_cast<const CropLayer*>(layer);
176 THROW_IE_EXCEPTION << "Layer is not instance of CropLayer class";
178 if (casted->axis.size() != casted->offset.size()) {
179 THROW_IE_EXCEPTION << "Incorrect format of the Crop layer: number of axis doesn't match number of offset - ("
180 << casted->axis.size() << " vs. " << casted->offset.size() << ")";
184 CropValidator::CropValidator(const std::string& _type) : LayerValidator(_type) {}
186 void CropValidator::checkShapes(const CNNLayer* layer, const vector<SizeVector>& inShapes) const {
187 auto casted = dynamic_cast<const CropLayer*>(layer);
189 THROW_IE_EXCEPTION << "Layer is not instance of CropLayer class";
191 size_t numInputs = inShapes.size();
192 if (numInputs != 1 && numInputs != 2) {
193 THROW_IE_EXCEPTION << "Crop can take only 1 or 2 inputs, but actually it has: " << numInputs;
195 auto firstShape = inShapes[0];
196 size_t shapeSize = firstShape.size();
197 for (size_t i = 0; i < casted->axis.size(); i++) {
198 int axis = casted->axis[i];
199 int offset = casted->offset[i];
200 if (shapeSize <= axis)
201 THROW_IE_EXCEPTION << "Crop axis(" << casted->axis[i]
202 << ") should be less the number of dimensions of first input ("
203 << firstShape.size() << ")";
204 if (numInputs == 2) {
205 if (casted->params.find("crop_begin") != casted->params.end()) {
207 << "Incorrect format of the Crop layer: `crop_begin` and `crop_end` attributes are valid for single input only";
209 auto secondShape = inShapes[1];
210 if (secondShape.size() <= axis)
211 THROW_IE_EXCEPTION << "Crop axis(" << axis
212 << ") should be less the number of dimensions of second input ("
213 << secondShape.size() << ")";
214 size_t newSize = secondShape[axis];
215 if (firstShape[axis] < static_cast<size_t>(offset + newSize)) {
216 THROW_IE_EXCEPTION << "Incorrect crop data! Offset(" << offset << ") + result size of output("
217 << newSize << ") should be less then input size(" << firstShape[axis]
218 << ") for axis(" << axis << ")";
220 } else if (!casted->dim.empty()) {
221 int dim = casted->dim[i];
222 if (firstShape[axis] < static_cast<size_t>(offset + dim)) {
223 THROW_IE_EXCEPTION << "Incorrect crop data! Offset(" << offset << ") + result size of output("
224 << dim << ") should be less then input size(" << firstShape[axis]
225 << ") for axis(" << axis << ")";
231 ConvolutionValidator::ConvolutionValidator(const std::string& _type) : LayerValidator(_type) {}
233 void ConvolutionValidator::parseParams(CNNLayer* layer) {
234 auto convLayer = dynamic_cast<ConvolutionLayer*>(layer);
236 THROW_IE_EXCEPTION << "Layer is not instance of ConvolutionLayer class";
238 auto version = BaseCreator::version_;
239 convLayer->_out_depth = convLayer->GetParamAsUInt("output");
242 convLayer->_kernel.clear();
243 convLayer->_kernel.insert(X_AXIS, convLayer->GetParamAsUInt("kernel-x"));
244 convLayer->_kernel.insert(Y_AXIS, convLayer->GetParamAsUInt("kernel-y"));
246 convLayer->_stride.clear();
247 convLayer->_stride.insert(X_AXIS, convLayer->GetParamAsUInt("stride-x", 1u));
248 convLayer->_stride.insert(Y_AXIS, convLayer->GetParamAsUInt("stride-y", 1u));
249 // TODO: maybe just throw exception, why do we change IR?
250 if (0 == convLayer->_stride[X_AXIS]) {
251 convLayer->_stride[X_AXIS] = 1u;
252 LogError("Warning! in layer %s: Stride x is 0, setting to 1 ", convLayer->name.c_str());
254 if (0 == convLayer->_stride[Y_AXIS]) {
255 convLayer->_stride[Y_AXIS] = 1u;
256 LogError("Warning! in layer %s: Stride y is 0, setting to 1", convLayer->name.c_str());
259 convLayer->_padding.clear();
260 convLayer->_padding.insert(X_AXIS, convLayer->GetParamAsUInt("pad-x", 0u));
261 convLayer->_padding.insert(Y_AXIS, convLayer->GetParamAsUInt("pad-y", 0u));
263 convLayer->_pads_end.clear();
264 convLayer->_pads_end.insert(X_AXIS, convLayer->GetParamAsUInt("pad-r", convLayer->_padding[X_AXIS]));
265 convLayer->_pads_end.insert(Y_AXIS, convLayer->GetParamAsUInt("pad-b", convLayer->_padding[Y_AXIS]));
267 convLayer->_dilation.clear();
268 convLayer->_dilation.insert(X_AXIS, convLayer->GetParamAsUInt("dilation-x", 1u));
269 convLayer->_dilation.insert(Y_AXIS, convLayer->GetParamAsUInt("dilation-y", 1u));
271 // TODO: checks for presence of all required attributes, and that there's no extraneous parameters only.
272 } else if (version == 3) {
273 vector<unsigned int> kernels = convLayer->GetParamAsUInts("kernel");
274 if (kernels.empty()) {
275 THROW_IE_EXCEPTION << "Invalid kernel field in layer " << convLayer->name;
277 convLayer->_kernel.clear();
278 for (int i = 1; i <= kernels.size(); i++) {
279 convLayer->_kernel.insert(i - 1, kernels[kernels.size() - i]);
282 vector<unsigned int> default_0 = vector<unsigned int> (convLayer->_kernel.size(), 0u);
283 vector<unsigned int> default_1 = vector<unsigned int> (convLayer->_kernel.size(), 1u);
285 vector<unsigned int> strides = convLayer->GetParamAsUInts("strides", default_1);
286 convLayer->_stride.clear();
287 for (int i = 1; i <= strides.size(); i++) {
288 if (strides[strides.size() - i] == 0) {
289 THROW_IE_EXCEPTION << "Stride could not be 0.\nIn layer " << convLayer->name;
291 convLayer->_stride.insert(i - 1, strides[strides.size() - i]);
294 vector<unsigned int> pads_begin = convLayer->GetParamAsUInts("pads_begin", default_0);
295 convLayer->_padding.clear();
296 for (int i = 1; i <= pads_begin.size(); i++) {
297 convLayer->_padding.insert(i - 1, pads_begin[pads_begin.size() - i]);
300 vector<unsigned int> pads_end = convLayer->GetParamAsUInts("pads_end", default_0);
301 convLayer->_pads_end.clear();
302 for (int i = 1; i <= pads_end.size(); i++) {
303 convLayer->_pads_end.insert(i - 1, pads_end[pads_end.size() - i]);
306 vector<unsigned int> dilations = convLayer->GetParamAsUInts("dilations", default_1);
307 convLayer->_dilation.clear();
308 for (int i = 1; i <= dilations.size(); i++) {
309 convLayer->_dilation.insert(i - 1, dilations[dilations.size() - i]);
313 convLayer->_group = convLayer->GetParamAsUInt("group", 1u);
316 void ConvolutionValidator::checkParams(const CNNLayer* layer) {
317 auto casted = dynamic_cast<const ConvolutionLayer*>(layer);
319 THROW_IE_EXCEPTION << "Layer is not instance of ConvolutionLayer class";
321 // TODO: check that values belong to the scope of the definition according to spec
324 void ConvolutionValidator::checkCorrespondence(const CNNLayer* layer,
325 const std::map<std::string, Blob::Ptr>& blobs,
326 const vector<SizeVector>& inShapes) const {
327 auto convLayer = dynamic_cast<const ConvolutionLayer*>(layer);
328 if (!convLayer) THROW_IE_EXCEPTION << "Layer is not instance of ConvolutionLayer class";
329 auto version = BaseCreator::version_;
331 checkWeightable(blobs, inShapes, {convLayer->_out_depth, false, convLayer->_group, convLayer->_kernel[Y_AXIS], convLayer->_kernel[X_AXIS]},
333 } else if (version == 3) {
334 // TODO: implement v2 convolution valitation
338 void DeconvolutionValidator::parseParams(CNNLayer* layer) {
339 auto deconvLayer = dynamic_cast<DeconvolutionLayer*>(layer);
341 THROW_IE_EXCEPTION << "Layer is not instance of DeconvolutionLayer class";
344 auto version = BaseCreator::version_;
346 deconvLayer->_out_depth = deconvLayer->GetParamAsUInt("output");
349 deconvLayer->_kernel.clear();
350 deconvLayer->_kernel.insert(X_AXIS, deconvLayer->GetParamAsUInt("kernel-x"));
351 deconvLayer->_kernel.insert(Y_AXIS, deconvLayer->GetParamAsUInt("kernel-y"));
353 deconvLayer->_stride.clear();
354 deconvLayer->_stride.insert(X_AXIS, deconvLayer->GetParamAsUInt("stride-x", 1u));
355 deconvLayer->_stride.insert(Y_AXIS, deconvLayer->GetParamAsUInt("stride-y", 1u));
356 // TODO: maybe just throw exception, why do we change IR?
357 if (0 == deconvLayer->_stride[X_AXIS]) {
358 deconvLayer->_stride[X_AXIS] = 1u;
359 LogError("Warning! in layer %s: Stride x is 0, setting to 1 ", deconvLayer->name.c_str());
361 if (0 == deconvLayer->_stride[Y_AXIS]) {
362 deconvLayer->_stride[Y_AXIS] = 1u;
363 LogError("Warning! in layer %s: Stride y is 0, setting to 1", deconvLayer->name.c_str());
366 deconvLayer->_padding.clear();
367 deconvLayer->_padding.insert(X_AXIS, deconvLayer->GetParamAsUInt("pad-x", 0u));
368 deconvLayer->_padding.insert(Y_AXIS, deconvLayer->GetParamAsUInt("pad-y", 0u));
370 deconvLayer->_pads_end.clear();
371 deconvLayer->_pads_end.insert(X_AXIS, deconvLayer->GetParamAsUInt("pad-r", deconvLayer->_padding[X_AXIS]));
372 deconvLayer->_pads_end.insert(Y_AXIS, deconvLayer->GetParamAsUInt("pad-b", deconvLayer->_padding[Y_AXIS]));
374 deconvLayer->_dilation.clear();
375 deconvLayer->_dilation.insert(X_AXIS, deconvLayer->GetParamAsUInt("dilation-x", 1u));
376 deconvLayer->_dilation.insert(Y_AXIS, deconvLayer->GetParamAsUInt("dilation-y", 1u));
377 } else if (version == 3) {
378 vector<unsigned int> kernels = deconvLayer->GetParamAsUInts("kernel");
379 if (kernels.empty()) {
380 THROW_IE_EXCEPTION << "Invalid kernel field in layer " << deconvLayer->name;
382 deconvLayer->_kernel.clear();
383 for (int i = 1; i <= kernels.size(); i++) {
384 deconvLayer->_kernel.insert(i - 1, kernels[kernels.size() - i]);
387 vector<unsigned int> default_0 = vector<unsigned int> (deconvLayer->_kernel.size(), 0u);
388 vector<unsigned int> default_1 = vector<unsigned int> (deconvLayer->_kernel.size(), 1u);
390 vector<unsigned int> strides = deconvLayer->GetParamAsUInts("strides", default_1);
391 deconvLayer->_stride.clear();
392 for (int i = 1; i <= strides.size(); i++) {
393 if (strides[strides.size() - i] == 0) {
394 THROW_IE_EXCEPTION << "Stride could not be 0.\nIn layer " << deconvLayer->name;
396 deconvLayer->_stride.insert(i - 1, strides[strides.size() - i]);
399 vector<unsigned int> pads_begin = deconvLayer->GetParamAsUInts("pads_begin", default_0);
400 deconvLayer->_padding.clear();
401 for (int i = 1; i <= pads_begin.size(); i++) {
402 deconvLayer->_padding.insert(i - 1, pads_begin[pads_begin.size() - i]);
405 vector<unsigned int> pads_end = deconvLayer->GetParamAsUInts("pads_end", default_0);
406 deconvLayer->_pads_end.clear();
407 for (int i = 1; i <= pads_end.size(); i++) {
408 deconvLayer->_pads_end.insert(i - 1, pads_end[pads_end.size() - i]);
411 vector<unsigned int> dilations = deconvLayer->GetParamAsUInts("dilations", default_1);
412 deconvLayer->_dilation.clear();
413 for (int i = 1; i <= dilations.size(); i++) {
414 deconvLayer->_dilation.insert(i - 1, dilations[dilations.size() - i]);
418 deconvLayer->_group = deconvLayer->GetParamAsUInt("group", 1u);
421 void DeconvolutionValidator::checkParams(const CNNLayer* layer) {
422 LayerValidator::checkParams(layer);
425 DeconvolutionValidator::DeconvolutionValidator(const std::string& _type) : LayerValidator(_type) {}
428 void DeconvolutionValidator::checkCorrespondence(const CNNLayer* layer,
429 const std::map<std::string, Blob::Ptr>& blobs,
430 const vector<SizeVector>& inShapes) const {
431 auto casted = dynamic_cast<const DeconvolutionLayer*>(layer);
432 if (!casted) THROW_IE_EXCEPTION << "Layer is not instance of ConvolutionLayer class";
433 checkWeightable(blobs, inShapes, {casted->_out_depth, false, casted->_group, casted->_kernel[Y_AXIS], casted->_kernel[X_AXIS]},
437 PoolingValidator::PoolingValidator(const std::string& _type) : LayerValidator(_type) {}
439 void PoolingValidator::parseParams(CNNLayer* layer) {
440 auto poolLayer = dynamic_cast<PoolingLayer*>(layer);
442 THROW_IE_EXCEPTION << "Layer is not instance of PoolingLayer class";
445 auto version = BaseCreator::version_;
447 int kernel_x = poolLayer->GetParamAsInt("kernel-x", -1);
448 /** Pooling as custom layer */
449 if (kernel_x == -1) {
451 unsigned int kernel_size = poolLayer->GetParamAsUInt("kernel_size");
452 unsigned int kernel_w = poolLayer->GetParamAsUInt("kernel_w", 0u);
453 unsigned int kernel_h = poolLayer->GetParamAsUInt("kernel_h", 0u);
454 poolLayer->_kernel.clear();
455 poolLayer->_kernel.insert(X_AXIS, kernel_w == 0u ? kernel_size : kernel_w);
456 poolLayer->_kernel.insert(Y_AXIS, kernel_h == 0u ? kernel_size : kernel_h);
458 unsigned int stride = poolLayer->GetParamAsUInt("stride", 1u);
459 unsigned int stride_w = poolLayer->GetParamAsUInt("stride_w", 0u);
460 unsigned int stride_h = poolLayer->GetParamAsUInt("stride_h", 0u);
461 poolLayer->_stride.clear();
462 poolLayer->_stride.insert(X_AXIS, stride_w == 0u ? stride : stride_w);
463 poolLayer->_stride.insert(Y_AXIS, stride_h == 0u ? stride : stride_h);
465 unsigned int pad = poolLayer->GetParamAsUInt("pad", 0u);
466 unsigned int pad_w = poolLayer->GetParamAsUInt("pad_w", 0u);
467 unsigned int pad_h = poolLayer->GetParamAsUInt("pad_h", 0u);
469 poolLayer->_padding.clear();
470 poolLayer->_padding.insert(X_AXIS, pad_w == 0u ? pad : pad_w);
471 poolLayer->_padding.insert(Y_AXIS, pad_h == 0u ? pad : pad_h);
473 poolLayer->_pads_end.clear();
474 poolLayer->_pads_end.insert(X_AXIS, 0u);
475 poolLayer->_pads_end.insert(Y_AXIS, 0u);
479 std::string alg = poolLayer->GetParamAsString("pool", "caffe.PoolingParameter.MAX");
480 poolLayer->_type = alg == "caffe.PoolingParameter.MAX" ? PoolingLayer::MAX : PoolingLayer::AVG;
481 } else /** Default behavior */ {
482 poolLayer->_kernel.clear();
483 poolLayer->_kernel.insert(X_AXIS, poolLayer->GetParamAsUInt("kernel-x"));
484 poolLayer->_kernel.insert(Y_AXIS, poolLayer->GetParamAsUInt("kernel-y"));
486 poolLayer->_stride.clear();
487 poolLayer->_stride.insert(X_AXIS, poolLayer->GetParamAsUInt("stride-x", 1u));
488 poolLayer->_stride.insert(Y_AXIS, poolLayer->GetParamAsUInt("stride-y", 1u));
489 // TODO: maybe just throw exception, why do we change IR?
490 if (0 == poolLayer->_stride[X_AXIS]) {
491 poolLayer->_stride[X_AXIS] = 1u;
492 LogError("Warning! in layer %s: Stride x is 0, setting to 1 ", poolLayer->name.c_str());
494 if (0 == poolLayer->_stride[Y_AXIS]) {
495 poolLayer->_stride[Y_AXIS] = 1u;
496 LogError("Warning! in layer %s: Stride y is 0, setting to 1", poolLayer->name.c_str());
499 poolLayer->_padding.clear();
500 poolLayer->_padding.insert(X_AXIS, poolLayer->GetParamAsUInt("pad-x", 0u));
501 poolLayer->_padding.insert(Y_AXIS, poolLayer->GetParamAsUInt("pad-y", 0u));
503 poolLayer->_pads_end.clear();
504 poolLayer->_pads_end.insert(X_AXIS, poolLayer->GetParamAsUInt("pad-r", poolLayer->_padding[X_AXIS]));
505 poolLayer->_pads_end.insert(Y_AXIS, poolLayer->GetParamAsUInt("pad-b", poolLayer->_padding[Y_AXIS]));
507 // TODO: All kind of pool methods
508 poolLayer->_exclude_pad = poolLayer->GetParamsAsBool("exclude-pad", false);
509 std::string alg = poolLayer->GetParamAsString("pool-method", "max");
510 poolLayer->_type = alg == "avg" ? PoolingLayer::AVG : PoolingLayer::MAX;
511 if (alg != "max" && alg != "avg") {
512 THROW_IE_EXCEPTION << "Layer with type `" << _type << "` has incorrect pad-type!";
515 } else if (version == 3) {
516 vector<unsigned int> kernels = poolLayer->GetParamAsUInts("kernel");
517 if (kernels.empty()) {
518 THROW_IE_EXCEPTION << "Invalid kernel field in layer " << poolLayer->name;
520 poolLayer->_kernel.clear();
521 for (int i = 1; i <= kernels.size(); i++) {
522 poolLayer->_kernel.insert(i - 1, kernels[kernels.size() - i]);
525 vector<unsigned int> default_0 = vector<unsigned int> (poolLayer->_kernel.size(), 0u);
526 vector<unsigned int> default_1 = vector<unsigned int> (poolLayer->_kernel.size(), 1u);
528 vector<unsigned int> strides = poolLayer->GetParamAsUInts("strides", default_1);
529 poolLayer->_stride.clear();
530 for (int i = 1; i <= strides.size(); i++) {
531 if (strides[strides.size() - i] == 0) {
532 THROW_IE_EXCEPTION << "Stride could not be 0.\nIn layer " << poolLayer->name;
534 poolLayer->_stride.insert(i - 1, strides[strides.size() - i]);
537 vector<unsigned int> pads_begin = poolLayer->GetParamAsUInts("pads_begin", default_0);
538 poolLayer->_padding.clear();
539 for (int i = 1; i <= pads_begin.size(); i++) {
540 poolLayer->_padding.insert(i - 1, pads_begin[pads_begin.size() - i]);
543 vector<unsigned int> pads_end = poolLayer->GetParamAsUInts("pads_end", default_0);
544 poolLayer->_pads_end.clear();
545 for (int i = 1; i <= pads_end.size(); i++) {
546 poolLayer->_pads_end.insert(i - 1, pads_end[pads_end.size() - i]);
549 poolLayer->_exclude_pad = poolLayer->GetParamsAsBool("exclude-pad", false);
550 std::string alg = poolLayer->GetParamAsString("pool-method", "max");
551 poolLayer->_type = alg == "avg" ? PoolingLayer::AVG : PoolingLayer::MAX;
552 if (alg != "max" && alg != "avg") {
553 THROW_IE_EXCEPTION << "Layer with type `" << _type << "` has incorrect pad-type!";
556 // TODO: checks for presence of all required attributes, and that there's no extraneous parameters only.
559 void PoolingValidator::checkParams(const CNNLayer* layer) {
560 // TODO: check that values belong to the scope of the definition according to spec
563 void BatchNormalizationValidator::parseParams(CNNLayer* layer) {
564 auto casted = dynamic_cast<BatchNormalizationLayer*>(layer);
566 THROW_IE_EXCEPTION << "Layer is not instance of BatchNormalizationLayer class";
568 casted->epsilon = casted->GetParamAsFloat("epsilon");
571 void BatchNormalizationValidator::checkParams(const CNNLayer* layer) {
572 LayerValidator::checkParams(layer);
575 BatchNormalizationValidator::BatchNormalizationValidator(const std::string& _type) : LayerValidator(_type) {}
577 void PowerValidator::parseParams(CNNLayer* layer) {
578 auto casted = dynamic_cast<PowerLayer*>(layer);
580 THROW_IE_EXCEPTION << "Layer is not instance of PowerLayer class";
582 casted->offset = casted->GetParamAsFloat("shift");
583 casted->power = casted->GetParamAsFloat("power");
584 casted->scale = casted->GetParamAsFloat("scale");
587 void PowerValidator::checkParams(const CNNLayer* layer) {
588 LayerValidator::checkParams(layer);
591 PowerValidator::PowerValidator(const std::string& _type) : LayerValidator(_type) {}
593 void PReLUValidator::parseParams(CNNLayer* layer) {
594 auto casted = dynamic_cast<PReLULayer*>(layer);
596 THROW_IE_EXCEPTION << "Layer is not instance of PReLULayer class";
598 casted->_channel_shared = casted->GetParamsAsBool("channel_shared", false);
601 void PReLUValidator::checkParams(const CNNLayer* layer) {
602 LayerValidator::checkParams(layer);
605 PReLUValidator::PReLUValidator(const std::string& _type) : LayerValidator(_type) {}
607 void ScaleShiftValidator::parseParams(CNNLayer* layer) {
608 auto casted = dynamic_cast<ScaleShiftLayer*>(layer);
610 THROW_IE_EXCEPTION << "Layer is not instance of ScaleShiftLayer class";
612 if (!casted->params.empty()) {
613 casted->_broadcast = casted->GetParamAsUInt("broadcast", 2);
617 void ScaleShiftValidator::checkParams(const CNNLayer* layer) {
618 LayerValidator::checkParams(layer);
621 ScaleShiftValidator::ScaleShiftValidator(const std::string& _type) : LayerValidator(_type) {}
623 void TileValidator::parseParams(CNNLayer* layer) {
624 auto casted = dynamic_cast<TileLayer*>(layer);
626 THROW_IE_EXCEPTION << "Layer is not instance of TileLayer class";
628 casted->axis = casted->GetParamAsInt("axis", -1);
629 casted->tiles = casted->GetParamAsInt("tiles", -1);
632 void TileValidator::checkParams(const CNNLayer* layer) {
633 LayerValidator::checkParams(layer);
636 TileValidator::TileValidator(const std::string& _type) : LayerValidator(_type) {}
638 ReshapeValidator::ReshapeValidator(const std::string& _type) : LayerValidator(_type) {}
640 void ReshapeValidator::parseParams(CNNLayer* layer) {
641 auto casted = dynamic_cast<ReshapeLayer*>(layer);
643 THROW_IE_EXCEPTION << "Layer is not instance of ReshapeLayer class";
646 if (!casted->params.empty()) {
647 casted->num_axes = casted->GetParamAsInt(casted->type == "Flatten" ? "end_axis" : "num_axes", -1);
648 casted->axis = casted->GetParamAsInt("axis", 1);
649 casted->shape = casted->GetParamAsInts("dim", {});
650 calculateIn2Out(casted);
655 void ReshapeValidator::checkParams(const CNNLayer* layer) {
656 LayerValidator::checkParams(layer);
659 void ReshapeValidator::calculateIn2Out(ReshapeLayer* layer) {
660 if (layer->outData.empty() || layer->insData.empty())
663 if (!layer->shape.empty() && std::find(layer->shape.begin(), layer->shape.end(), 0) != layer->shape.end())
666 SizeVector inDims = layer->input()->getTensorDesc().getDims();
667 SizeVector outDims = layer->outData[0]->getTensorDesc().getDims();
669 vector<size_t> inMapped;
670 vector<size_t> outMapped;
671 for (size_t i = 0; i < inDims.size(); i++) {
673 inMapped.push_back(i);
674 for (size_t j = 0; !mapped && j < outDims.size(); j++) {
675 if (outDims[j] == inDims[i] && std::find(outMapped.begin(), outMapped.end(), j) == outMapped.end()) {
676 outMapped.push_back(j);
681 for (size_t j = 1; !mapped && j <= outDims.size(); j++) {
682 if (outDims[outDims.size() - j] != inDims[i] && (outDims[outDims.size() - j] % inDims[i] == 0)) {
683 outMapped.push_back(outDims.size() - j);
688 size_t outIndex = outDims.size() - 1;
689 for (size_t k = 0; k < layer->shape.size(); k++) {
690 if (layer->shape[k] < 0) {
695 outMapped.push_back(outIndex);
698 std::string mapped_params;
699 for (size_t i = 0; i < inMapped.size(); i++) {
700 if (!mapped_params.empty())
701 mapped_params += ",";
702 mapped_params += std::to_string(inMapped[i]) + "-" + std::to_string(outMapped[i]);
705 layer->params["in2out"] = mapped_params;
708 void EltwiseValidator::parseParams(CNNLayer* layer) {
709 auto casted = dynamic_cast<EltwiseLayer*>(layer);
711 THROW_IE_EXCEPTION << "Layer is not instance of EltwiseLayer class";
713 // TODO: fix this onece we switched to IR v2.x also enable dedicated unit tests
714 // @details: need to remove sum
715 std::string op = casted->GetParamAsString("operation", "sum");
716 // TODO: remove empty value case in IRv2.x
717 if (op == "sum" || op == "") {
718 casted->_operation = EltwiseLayer::Sum;
719 } else if (op == "mul" || op == "prod") {
720 casted->_operation = EltwiseLayer::Prod;
721 } else if (op == "max") {
722 casted->_operation = EltwiseLayer::Max;
724 THROW_IE_EXCEPTION << "Unsupported element wise operation: " << op;
727 auto getArray = [](std::string param, vector<float>& array) {
728 std::istringstream stream(param);
730 while (getline(stream, str, ',')) {
731 float val = std::stof(str);
732 array.push_back(val);
735 getArray(casted->GetParamAsString("coeff", ""), casted->coeff);
738 void EltwiseValidator::checkParams(const CNNLayer* layer) {
739 LayerValidator::checkParams(layer);
742 EltwiseValidator::EltwiseValidator(const std::string& _type) : LayerValidator(_type) {}
744 void ClampValidator::parseParams(CNNLayer* layer) {
745 auto casted = dynamic_cast<ClampLayer*>(layer);
747 THROW_IE_EXCEPTION << "Layer is not instance of ClampLayer class";
749 casted->min_value = casted->GetParamAsFloat("min");
750 casted->max_value = casted->GetParamAsFloat("max");
753 void ClampValidator::checkParams(const CNNLayer* layer) {
754 LayerValidator::checkParams(layer);
757 ClampValidator::ClampValidator(const std::string& _type) : LayerValidator(_type) {}
759 void ReLUValidator::parseParams(CNNLayer* layer) {
760 auto casted = dynamic_cast<ReLULayer*>(layer);
762 THROW_IE_EXCEPTION << "Layer is not instance of ReLULayer class";
764 if (!casted->params.empty()) {
765 casted->negative_slope = casted->GetParamAsFloat("negative_slope");
769 void ReLUValidator::checkParams(const CNNLayer* layer) {
770 LayerValidator::checkParams(layer);
773 ReLUValidator::ReLUValidator(const std::string& _type) : LayerValidator(_type) {}
775 void MVNValidator::parseParams(CNNLayer* layer) {
776 auto casted = dynamic_cast<MVNLayer*>(layer);
778 THROW_IE_EXCEPTION << "Layer is not instance of MVNLayer class";
780 casted->across_channels = casted->GetParamAsInt("across_channels", 0);
781 casted->normalize = casted->GetParamAsInt("normalize_variance", 1);
784 void MVNValidator::checkParams(const CNNLayer* layer) {
785 LayerValidator::checkParams(layer);
788 MVNValidator::MVNValidator(const std::string& _type) : LayerValidator(_type) {}
790 void GRNValidator::parseParams(CNNLayer* layer) {
791 auto casted = dynamic_cast<GRNLayer*>(layer);
793 THROW_IE_EXCEPTION << "Layer is not instance of GRNLayer class";
795 casted->bias = casted->GetParamAsFloat("bias", 0.f);
798 void GRNValidator::checkParams(const CNNLayer* layer) {
799 LayerValidator::checkParams(layer);
802 GRNValidator::GRNValidator(const std::string& _type) : LayerValidator(_type) {}
804 void SoftMaxValidator::parseParams(CNNLayer* layer) {
805 auto casted = dynamic_cast<SoftMaxLayer*>(layer);
807 THROW_IE_EXCEPTION << "Layer is not instance of SoftMaxLayer class";
809 casted->axis = casted->GetParamAsInt("axis", 1);
812 void SoftMaxValidator::checkParams(const CNNLayer* layer) {
813 LayerValidator::checkParams(layer);
816 SoftMaxValidator::SoftMaxValidator(const std::string& _type) : LayerValidator(_type) {}
818 void NormValidator::parseParams(CNNLayer* layer) {
819 auto casted = dynamic_cast<NormLayer*>(layer);
821 THROW_IE_EXCEPTION << "Layer is not instance of NormLayer class";
823 casted->_size = casted->GetParamAsUInt("local_size", 0);
824 casted->_size += casted->GetParamAsUInt("local-size", 0);
825 casted->_k = casted->GetParamAsUInt("k", 1);
826 casted->_alpha = casted->GetParamAsFloat("alpha");
827 casted->_beta = casted->GetParamAsFloat("beta");
828 casted->_isAcrossMaps = casted->GetParamsAsBool("region", false);
831 void NormValidator::checkParams(const CNNLayer* layer) {
832 LayerValidator::checkParams(layer);
835 NormValidator::NormValidator(const std::string& _type) : LayerValidator(_type) {}
837 SplitValidator::SplitValidator(const std::string& _type) : LayerValidator(_type) {}
839 void SplitValidator::parseParams(CNNLayer* layer) {
840 auto casted = dynamic_cast<SplitLayer*>(layer);
842 THROW_IE_EXCEPTION << "Layer is not instance of SplitLayer class";
844 casted->_axis = casted->GetParamAsUInt("axis", 1);
846 std::string out_sizes;
847 for (auto& i : layer->outData) {
848 if (!out_sizes.empty())
850 out_sizes += std::to_string(i->getTensorDesc().getDims()[casted->_axis]);
852 if (!out_sizes.empty())
853 casted->params["out_sizes"] = out_sizes;
856 void SplitValidator::checkParams(const CNNLayer* layer) {
857 LayerValidator::checkParams(layer);
860 ConcatValidator::ConcatValidator(const std::string& _type) : LayerValidator(_type) {}
862 void ConcatValidator::parseParams(CNNLayer* layer) {
863 auto casted = dynamic_cast<ConcatLayer*>(layer);
865 THROW_IE_EXCEPTION << "Layer is not instance of ConcatLayer class";
867 casted->_axis = casted->GetParamAsUInt("axis", 1);
870 void ConcatValidator::checkParams(const CNNLayer* layer) {
871 LayerValidator::checkParams(layer);