IVGCVSW-5301 Remove all boost::numeric_cast from armnn/src/profiling
[platform/upstream/armnn.git] / src / profiling / test / FileOnlyProfilingDecoratorTests.cpp
1 //
2 // Copyright © 2019 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include <Filesystem.hpp>
7 #include <LabelsAndEventClasses.hpp>
8 #include <ProfilingService.hpp>
9 #include "ProfilingTestUtils.hpp"
10 #include "PrintPacketHeaderHandler.hpp"
11 #include <Runtime.hpp>
12 #include "TestTimelinePacketHandler.hpp"
13
14 #include <boost/test/unit_test.hpp>
15
16 #include <cstdio>
17 #include <sstream>
18 #include <sys/stat.h>
19
20 using namespace armnn::profiling;
21 using namespace armnn;
22
23 using namespace std::chrono_literals;
24
25 class FileOnlyHelperService : public ProfilingService
26 {
27     public:
28     // Wait for a notification from the send thread
29     bool WaitForPacketsSent(uint32_t timeout = 1000)
30     {
31         return ProfilingService::WaitForPacketSent(m_ProfilingService, timeout);
32     }
33     armnn::profiling::ProfilingService m_ProfilingService;
34 };
35
36 BOOST_AUTO_TEST_SUITE(FileOnlyProfilingDecoratorTests)
37
38 BOOST_AUTO_TEST_CASE(TestFileOnlyProfiling)
39 {
40     // Get all registered backends
41     std::vector<BackendId> suitableBackends = GetSuitableBackendRegistered();
42
43     // Run test for each backend separately
44     for (auto const& backend : suitableBackends)
45     {
46         // Enable m_FileOnly but also provide ILocalPacketHandler which should consume the packets.
47         // This won't dump anything to file.
48         armnn::Runtime::CreationOptions creationOptions;
49         creationOptions.m_ProfilingOptions.m_EnableProfiling     = true;
50         creationOptions.m_ProfilingOptions.m_FileOnly            = true;
51         creationOptions.m_ProfilingOptions.m_CapturePeriod       = 100;
52         creationOptions.m_ProfilingOptions.m_TimelineEnabled     = true;
53         ILocalPacketHandlerSharedPtr localPacketHandlerPtr = std::make_shared<TestTimelinePacketHandler>();
54         creationOptions.m_ProfilingOptions.m_LocalPacketHandlers.push_back(localPacketHandlerPtr);
55
56         armnn::Runtime runtime(creationOptions);
57         // ensure the GUID generator is reset to zero
58         GetProfilingService(&runtime).ResetGuidGenerator();
59
60         // Load a simple network
61         // build up the structure of the network
62         INetworkPtr net(INetwork::Create());
63
64         IConnectableLayer* input = net->AddInputLayer(0, "input");
65
66         ElementwiseUnaryDescriptor descriptor(UnaryOperation::Rsqrt);
67         IConnectableLayer* Rsqrt = net->AddElementwiseUnaryLayer(descriptor, "Rsqrt");
68
69         IConnectableLayer* output = net->AddOutputLayer(0, "output");
70
71         input->GetOutputSlot(0).Connect(Rsqrt->GetInputSlot(0));
72         Rsqrt->GetOutputSlot(0).Connect(output->GetInputSlot(0));
73
74         input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
75         Rsqrt->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
76
77         std::vector<armnn::BackendId> backendsVec {backend};
78         IOptimizedNetworkPtr optNet = Optimize(*net, backendsVec, runtime.GetDeviceSpec());
79
80         // Load it into the runtime. It should succeed.
81         armnn::NetworkId netId;
82         BOOST_TEST(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
83
84         // Creates structures for input & output.
85         std::vector<float> inputData(16);
86         std::vector<float> outputData(16);
87         for (unsigned int i = 0; i < 16; ++i) {
88             inputData[i] = 9.0;
89             outputData[i] = 3.0;
90         }
91
92         InputTensors inputTensors
93         {
94             {0, ConstTensor(runtime.GetInputTensorInfo(netId, 0), inputData.data())}
95         };
96         OutputTensors outputTensors
97         {
98             {0, Tensor(runtime.GetOutputTensorInfo(netId, 0), outputData.data())}
99         };
100
101         // Does the inference.
102         runtime.EnqueueWorkload(netId, inputTensors, outputTensors);
103
104         static_cast<TestTimelinePacketHandler *>(localPacketHandlerPtr.get())->WaitOnInferenceCompletion(3000);
105
106         const TimelineModel &model =
107                 static_cast<TestTimelinePacketHandler *>(localPacketHandlerPtr.get())->GetTimelineModel();
108
109         for (auto &error : model.GetErrors()) {
110             std::cout << error.what() << std::endl;
111         }
112         BOOST_TEST(model.GetErrors().empty());
113         std::vector<std::string> desc = GetModelDescription(model);
114         std::vector<std::string> expectedOutput;
115         expectedOutput.push_back("Entity [0] name = input type = layer");
116         expectedOutput.push_back("   connection [17] from entity [0] to entity [1]");
117         expectedOutput.push_back("   child: Entity [26] backendId = " + backend.Get() + " type = workload");
118         expectedOutput.push_back("Entity [1] name = Rsqrt type = layer");
119         expectedOutput.push_back("   connection [25] from entity [1] to entity [2]");
120         expectedOutput.push_back("   child: Entity [18] backendId = " + backend.Get() + " type = workload");
121         expectedOutput.push_back("Entity [2] name = output type = layer");
122         expectedOutput.push_back("   child: Entity [30] backendId = " + backend.Get() + " type = workload");
123         expectedOutput.push_back("Entity [6] processId = [processId] type = network");
124         expectedOutput.push_back("   child: Entity [0] name = input type = layer");
125         expectedOutput.push_back("   child: Entity [1] name = Rsqrt type = layer");
126         expectedOutput.push_back("   child: Entity [2] name = output type = layer");
127         expectedOutput.push_back("   execution: Entity [34] type = inference");
128         expectedOutput.push_back("   event: [8] class [start_of_life]");
129         expectedOutput.push_back("Entity [18] backendId = " + backend.Get() + " type = workload");
130         expectedOutput.push_back("   execution: Entity [47] type = workload_execution");
131         expectedOutput.push_back("Entity [26] backendId = " + backend.Get() + " type = workload");
132         expectedOutput.push_back("   execution: Entity [39] type = workload_execution");
133         expectedOutput.push_back("Entity [30] backendId = " + backend.Get() + " type = workload");
134         expectedOutput.push_back("   execution: Entity [55] type = workload_execution");
135         expectedOutput.push_back("Entity [34] type = inference");
136         expectedOutput.push_back("   child: Entity [39] type = workload_execution");
137         expectedOutput.push_back("   child: Entity [47] type = workload_execution");
138         expectedOutput.push_back("   child: Entity [55] type = workload_execution");
139         expectedOutput.push_back("   event: [37] class [start_of_life]");
140         expectedOutput.push_back("   event: [63] class [end_of_life]");
141         expectedOutput.push_back("Entity [39] type = workload_execution");
142         expectedOutput.push_back("   event: [43] class [start_of_life]");
143         expectedOutput.push_back("   event: [45] class [end_of_life]");
144         expectedOutput.push_back("Entity [47] type = workload_execution");
145         expectedOutput.push_back("   event: [51] class [start_of_life]");
146         expectedOutput.push_back("   event: [53] class [end_of_life]");
147         expectedOutput.push_back("Entity [55] type = workload_execution");
148         expectedOutput.push_back("   event: [59] class [start_of_life]");
149         expectedOutput.push_back("   event: [61] class [end_of_life]");
150         BOOST_TEST(CompareOutput(desc, expectedOutput));
151     }
152 }
153
154 BOOST_AUTO_TEST_CASE(DumpOutgoingValidFileEndToEnd)
155 {
156     // Get all registered backends
157     std::vector<BackendId> suitableBackends = GetSuitableBackendRegistered();
158
159     // Run test for each backend separately
160     for (auto const& backend : suitableBackends)
161     {
162         // Create a temporary file name.
163         fs::path tempPath = armnnUtils::Filesystem::NamedTempFile("DumpOutgoingValidFileEndToEnd_CaptureFile.txt");
164         // Make sure the file does not exist at this point
165         BOOST_CHECK(!fs::exists(tempPath));
166
167         armnn::Runtime::CreationOptions options;
168         options.m_ProfilingOptions.m_EnableProfiling     = true;
169         options.m_ProfilingOptions.m_FileOnly            = true;
170         options.m_ProfilingOptions.m_IncomingCaptureFile = "";
171         options.m_ProfilingOptions.m_OutgoingCaptureFile = tempPath.string();
172         options.m_ProfilingOptions.m_CapturePeriod       = 100;
173         options.m_ProfilingOptions.m_TimelineEnabled     = true;
174
175         ILocalPacketHandlerSharedPtr localPacketHandlerPtr = std::make_shared<TestTimelinePacketHandler>();
176         options.m_ProfilingOptions.m_LocalPacketHandlers.push_back(localPacketHandlerPtr);
177
178         armnn::Runtime runtime(options);
179         // ensure the GUID generator is reset to zero
180         GetProfilingService(&runtime).ResetGuidGenerator();
181
182         // Load a simple network
183         // build up the structure of the network
184         INetworkPtr net(INetwork::Create());
185
186         IConnectableLayer* input = net->AddInputLayer(0, "input");
187
188         ElementwiseUnaryDescriptor descriptor(UnaryOperation::Rsqrt);
189         IConnectableLayer* Rsqrt = net->AddElementwiseUnaryLayer(descriptor, "Rsqrt");
190
191         IConnectableLayer* output = net->AddOutputLayer(0, "output");
192
193         input->GetOutputSlot(0).Connect(Rsqrt->GetInputSlot(0));
194         Rsqrt->GetOutputSlot(0).Connect(output->GetInputSlot(0));
195
196         input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
197         Rsqrt->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
198
199
200         std::vector<BackendId> backendsVec{backend};
201         IOptimizedNetworkPtr optNet = Optimize(*net, backendsVec, runtime.GetDeviceSpec());
202
203         // Load it into the runtime. It should succeed.
204         armnn::NetworkId netId;
205         BOOST_TEST(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
206
207         // Creates structures for input & output.
208         std::vector<float> inputData(16);
209         std::vector<float> outputData(16);
210         for (unsigned int i = 0; i < 16; ++i) {
211             inputData[i] = 9.0;
212             outputData[i] = 3.0;
213         }
214
215         InputTensors inputTensors
216         {
217             {0, ConstTensor(runtime.GetInputTensorInfo(netId, 0), inputData.data())}
218         };
219         OutputTensors outputTensors
220         {
221             {0, Tensor(runtime.GetOutputTensorInfo(netId, 0), outputData.data())}
222         };
223
224         // Does the inference.
225         runtime.EnqueueWorkload(netId, inputTensors, outputTensors);
226
227         static_cast<TestTimelinePacketHandler *>(localPacketHandlerPtr.get())->WaitOnInferenceCompletion(3000);
228
229         // In order to flush the files we need to gracefully close the profiling service.
230         options.m_ProfilingOptions.m_EnableProfiling = false;
231         GetProfilingService(&runtime).ResetExternalProfilingOptions(options.m_ProfilingOptions, true);
232
233         // The output file size should be greater than 0.
234         BOOST_CHECK(fs::file_size(tempPath) > 0);
235
236         // NOTE: would be an interesting exercise to take this file and decode it
237
238         // Delete the tmp file.
239         BOOST_CHECK(fs::remove(tempPath));
240     }
241 }
242
243 BOOST_AUTO_TEST_SUITE_END()