2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // See LICENSE file in the project root for full license information.
5 #include "../InferenceTest.hpp"
6 #include "../MnistDatabase.hpp"
7 #include "armnnOnnxParser/IOnnxParser.hpp"
9 int main(int argc, char* argv[])
11 armnn::TensorShape inputTensorShape({ 1, 1, 28, 28 });
13 int retVal = EXIT_FAILURE;
16 using DataType = float;
17 using DatabaseType = MnistDatabase;
18 using ParserType = armnnOnnxParser::IOnnxParser;
19 using ModelType = InferenceModel<ParserType, DataType>;
21 // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
22 retVal = armnn::test::ClassifierInferenceTestMain<DatabaseType, ParserType>(
23 argc, argv, "mnist_onnx.onnx", true,
24 "Input3", "Plus214_Output_0", { 0, 1, 2, 3, 4},
25 [](const char* dataDir, const ModelType&) {
26 return DatabaseType(dataDir, true);
30 catch (const std::exception& e)
32 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
33 // exception of type std::length_error.
34 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
35 std::cerr << "WARNING: OnnxMnist-Armnn: An error has occurred when running "
36 "the classifier inference tests: " << e.what() << std::endl;