2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
8 #include <armnn/IRuntime.hpp>
9 #include <armnn/test/TensorHelpers.hpp>
11 #include <armnnOnnxParser/IOnnxParser.hpp>
13 #include <armnnUtils/VerificationHelpers.hpp>
15 #include <backends/BackendRegistry.hpp>
17 #include <boost/format.hpp>
24 template<typename TParser>
25 struct ParserPrototxtFixture
27 ParserPrototxtFixture()
28 : m_Parser(TParser::Create())
29 , m_NetworkIdentifier(-1)
31 armnn::IRuntime::CreationOptions options;
33 // Create runtimes for each available backend
34 const armnn::BackendIdSet availableBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
35 for (auto& backendId : availableBackendIds)
37 m_Runtimes.push_back(std::make_pair(armnn::IRuntime::Create(options), backendId));
41 /// Parses and loads the network defined by the m_Prototext string.
43 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName);
44 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
45 const std::string& inputName,
46 const std::string& outputName);
47 void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
48 const std::vector<std::string>& requestedOutputs);
52 /// Executes the network with the given input tensor and checks the result against the given output tensor.
53 /// This overload assumes that the network has a single input and a single output.
54 template <std::size_t NumOutputDimensions>
55 void RunTest(const std::vector<float>& inputData, const std::vector<float>& expectedOutputData);
57 /// Executes the network with the given input tensors and checks the results against the given output tensors.
58 /// This overload supports multiple inputs and multiple outputs, identified by name.
59 template <std::size_t NumOutputDimensions>
60 void RunTest(const std::map<std::string, std::vector<float>>& inputData,
61 const std::map<std::string, std::vector<float>>& expectedOutputData);
63 std::string m_Prototext;
64 std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser;
65 std::vector<std::pair<armnn::IRuntimePtr, armnn::BackendId>> m_Runtimes;
66 armnn::NetworkId m_NetworkIdentifier;
68 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
69 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
71 std::string m_SingleInputName;
72 std::string m_SingleOutputName;
76 template<typename TParser>
77 void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const std::string& inputName,
78 const std::string& outputName)
80 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
81 m_SingleInputName = inputName;
82 m_SingleOutputName = outputName;
83 Setup({ }, { outputName });
86 template<typename TParser>
87 void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
88 const std::string& inputName,
89 const std::string& outputName)
91 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
92 m_SingleInputName = inputName;
93 m_SingleOutputName = outputName;
94 Setup({ { inputName, inputTensorShape } }, { outputName });
97 template<typename TParser>
98 void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
99 const std::vector<std::string>& requestedOutputs)
101 for (auto&& runtime : m_Runtimes)
103 std::string errorMessage;
105 armnn::INetworkPtr network =
106 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
107 auto optimized = Optimize(*network,
108 { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec());
109 armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
110 if (ret != armnn::Status::Success)
112 throw armnn::Exception(boost::str(
113 boost::format("LoadNetwork failed with error: '%1%' %2%")
115 % CHECK_LOCATION().AsString()));
120 template<typename TParser>
121 void ParserPrototxtFixture<TParser>::Setup()
123 for (auto&& runtime : m_Runtimes)
125 std::string errorMessage;
127 armnn::INetworkPtr network =
128 m_Parser->CreateNetworkFromString(m_Prototext.c_str());
129 auto optimized = Optimize(*network,
130 { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec());
131 armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
132 if (ret != armnn::Status::Success)
134 throw armnn::Exception(boost::str(
135 boost::format("LoadNetwork failed with error: '%1%' %2%")
137 % CHECK_LOCATION().AsString()));
142 template<typename TParser>
143 template <std::size_t NumOutputDimensions>
144 void ParserPrototxtFixture<TParser>::RunTest(const std::vector<float>& inputData,
145 const std::vector<float>& expectedOutputData)
147 RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
150 template<typename TParser>
151 template <std::size_t NumOutputDimensions>
152 void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData,
153 const std::map<std::string, std::vector<float>>& expectedOutputData)
155 for (auto&& runtime : m_Runtimes)
157 using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
159 // Sets up the armnn input tensors from the given vectors.
160 armnn::InputTensors inputTensors;
161 for (auto&& it : inputData)
163 BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
164 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
167 // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
168 std::map<std::string, boost::multi_array<float, NumOutputDimensions>> outputStorage;
169 armnn::OutputTensors outputTensors;
170 for (auto&& it : expectedOutputData)
172 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
173 outputStorage.emplace(it.first, MakeTensor<float, NumOutputDimensions>(bindingInfo.second));
174 outputTensors.push_back(
175 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
178 runtime.first->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
180 // Compares each output tensor to the expected values.
181 for (auto&& it : expectedOutputData)
183 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
184 if (bindingInfo.second.GetNumElements() != it.second.size())
186 throw armnn::Exception(
188 boost::format("Output tensor %1% is expected to have %2% elements. "
189 "%3% elements supplied. %4%") %
191 bindingInfo.second.GetNumElements() %
193 CHECK_LOCATION().AsString()));
195 auto outputExpected = MakeTensor<float, NumOutputDimensions>(bindingInfo.second, it.second);
196 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
201 } // namespace armnnUtils