InferenceEngine::Precision netPrecision;
InferenceEngine::SizeVector inputShape0;
InferenceEngine::SizeVector inputShape1;
+ bool transpose_a;
+ bool transpose_b;
ngraph::helpers::InputLayerType secondaryInputType;
std::string targetDevice;
- std::tie(netPrecision, inputShape0, inputShape1, secondaryInputType, targetDevice) = obj.param;
+ std::tie(netPrecision, inputShape0, inputShape1, transpose_a, transpose_b, secondaryInputType, targetDevice) = obj.param;
std::ostringstream result;
result << "IS0=" << CommonTestUtils::vec2str(inputShape0) << "_";
result << "IS1=" << CommonTestUtils::vec2str(inputShape1) << "_";
+ result << "transpose_a=" << transpose_a << "_";
+ result << "transpose_b=" << transpose_b << "_";
result << "secondaryInputType=" << secondaryInputType << "_";
result << "netPRC=" << netPrecision.name() << "_";
result << "targetDevice=" << targetDevice;
void MatMulTest::SetUp() {
InferenceEngine::SizeVector inputShape0;
InferenceEngine::SizeVector inputShape1;
+ bool transpose_a;
+ bool transpose_b;
ngraph::helpers::InputLayerType secondaryInputType;
auto netPrecision = InferenceEngine::Precision::UNSPECIFIED;
- std::tie(netPrecision, inputShape0, inputShape1, secondaryInputType, targetDevice) = this->GetParam();
+ std::tie(netPrecision, inputShape0, inputShape1, transpose_a, transpose_b, secondaryInputType, targetDevice) = this->GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {inputShape0});
auto paramOuts = ngraph::helpers::convert2OutputVector(
ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto MatMul = std::dynamic_pointer_cast<ngraph::opset3::MatMul>(
- ngraph::builder::makeMatMul(paramOuts[0], secondaryInput));
+ ngraph::builder::makeMatMul(paramOuts[0], secondaryInput, transpose_a, transpose_b));
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(MatMul)};
function = std::make_shared<ngraph::Function>(results, params, "MatMul");
}