return Mat(size, CV_32F, (void*)blob->buffer());
}
-InfEngineBackendLayer::InfEngineBackendLayer(const InferenceEngine::DataPtr& output_)
-{
- output = output_;
-}
-
bool InfEngineBackendLayer::getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
std::vector<MatShape> &outputs,
std::vector<MatShape> &internals) const
{
- std::vector<size_t> dims = output->dims;
- std::vector<int> shape(dims.rbegin(), dims.rend());
- outputs.assign(1, shape);
+ InferenceEngine::ICNNNetwork::InputShapes inShapes = t_net.getInputShapes();
+ InferenceEngine::ICNNNetwork::InputShapes::iterator itr;
+ bool equal_flag = true;
+ size_t i = 0;
+ for (itr = inShapes.begin(); itr != inShapes.end(); ++itr)
+ {
+ InferenceEngine::SizeVector currentInShape(inputs[i].begin(), inputs[i].end());
+ if (itr->second != currentInShape)
+ {
+ itr->second = currentInShape;
+ equal_flag = false;
+ }
+ i++;
+ }
+
+ if (!equal_flag)
+ {
+ InferenceEngine::CNNNetwork curr_t_net(t_net);
+ curr_t_net.reshape(inShapes);
+ }
+ std::vector<size_t> dims = t_net.getOutputsInfo()[name]->getDims();
+ outputs.push_back(MatShape(dims.begin(), dims.end()));
return false;
}
class InfEngineBackendLayer : public Layer
{
public:
- InfEngineBackendLayer(const InferenceEngine::DataPtr& output);
+ InfEngineBackendLayer(const InferenceEngine::CNNNetwork &t_net_) : t_net(t_net_) {};
virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
virtual bool supportBackend(int backendId) CV_OVERRIDE;
private:
- InferenceEngine::DataPtr output;
+ InferenceEngine::CNNNetwork t_net;
};
#endif // HAVE_INF_ENGINE
// net.save('/path/to/caffemodel')
//
// 3. Convert using ModelOptimizer.
-typedef testing::TestWithParam<tuple<int, int, Target> > Test_DLDT_two_inputs;
-TEST_P(Test_DLDT_two_inputs, as_IR)
+typedef testing::TestWithParam<tuple<int, int, Target, std::vector<int> > > Test_DLDT_two_inputs_3dim;
+TEST_P(Test_DLDT_two_inputs_3dim, as_IR)
{
int firstInpType = get<0>(GetParam());
int secondInpType = get<1>(GetParam());
#endif
Net net = readNet(_tf("net_two_inputs.xml"), _tf("net_two_inputs.bin"));
- int inpSize[] = {1, 2, 3};
- Mat firstInp(3, &inpSize[0], firstInpType);
- Mat secondInp(3, &inpSize[0], secondInpType);
+ std::vector<int> inpSize = get<3>(GetParam());
+ Mat firstInp(3, inpSize.data(), firstInpType);
+ Mat secondInp(3, inpSize.data(), secondInpType);
randu(firstInp, 0, 255);
randu(secondInp, 0, 255);
}
}
+std::vector< std::vector<int> > list_sizes{ {1, 2, 3}, {3, 2, 1}, {5, 5, 5}, {13, 7, 11} };
+
+INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_DLDT_two_inputs_3dim, Combine(
+ Values(CV_8U, CV_32F), Values(CV_8U, CV_32F),
+ testing::ValuesIn(getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE)),
+ testing::ValuesIn(list_sizes)
+));
+
+typedef testing::TestWithParam<tuple<int, int, Target> > Test_DLDT_two_inputs;
TEST_P(Test_DLDT_two_inputs, as_backend)
{
static const float kScale = 0.5f;