// pattern matched - lets substitute
gnalog() << "SoftSign subgraph found consits of: \n"
<< "\t" << abs->name << "\n";
- if (addition == nullptr) gnalog() << "\t" << addition->name << "\n";
+ if (addition != nullptr) gnalog() << "\t" << addition->name << "\n";
gnalog() << "\t" << mul->name << "\n"
<< std::endl;
CNNLayerPtr activationLayer =
std::make_shared<GenericLayer>(LayerParams({layerName, "SoftSign", Precision::FP32}));
+ IE_ASSERT(activationLayer != nullptr);
auto activationLayerWithQuant = quantized ?
InferenceEngine::injectData<QuantizedLayerParams>(activationLayer) :
activationLayer;
// sum
auto sum = getNext(negate);
+ IE_ASSERT(sum != nullptr);
if (!LayerInfo(sum).isEltwiseSum()) continue;
if (sum->insData.size() != 2
|| sum->insData[0].lock() == nullptr
if (!info.isConcat()) continue;
size_t offset = 0;
auto concatLayer = info.as<ConcatLayer*>();
+ IE_ASSERT(concatLayer != nullptr);
for (auto input_idx = 0; input_idx != concatLayer->insData.size(); input_idx++) {
auto getLayerByIndex = [&concatLayer](int idx) {
continue;
}
auto masterEltwise = std::dynamic_pointer_cast<EltwiseLayer>(l);
+ IE_ASSERT(masterEltwise != nullptr);
+
if (l->outData.size() != 1) {
THROW_GNA_LAYER_EXCEPTION(l) << "number of outputs expected to be 1";
}
}
auto scaleShift = layerInfo.as<ScaleShiftLayer*>();
+ IE_ASSERT(scaleShift != nullptr);
auto insData = scaleShift->insData.front().lock();
if (!insData) {