Release 18.03
[platform/upstream/armnn.git] / src / armnn / test / Network_test.cpp
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // See LICENSE file in the project root for full license information.
4 //
5 #include <boost/test/unit_test.hpp>
6
7 #include "armnn/ArmNN.hpp"
8 #include "Network.hpp"
9 #include "Graph.hpp"
10 #include "backends/RefWorkloadFactory.hpp"
11
12 #include "GraphUtils.hpp"
13
14 namespace
15 {
16
17 bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer)
18 {
19     bool allConnected = true;
20     for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i)
21     {
22         const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr;
23         allConnected &= inputConnected;
24     }
25     return allConnected;
26 }
27
28 }
29
30 BOOST_AUTO_TEST_SUITE(Network)
31
32 BOOST_AUTO_TEST_CASE(LayerGuids)
33 {
34     armnn::Network net;
35     armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
36     armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid();
37     armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
38
39     BOOST_TEST(inputId != addId);
40     BOOST_TEST(addId != outputId);
41     BOOST_TEST(inputId != outputId);
42 }
43
44 BOOST_AUTO_TEST_CASE(SerializeToDot)
45 {
46     armnn::Network net;
47
48     //define layers
49     auto input = net.AddInputLayer(0);
50     auto add = net.AddAdditionLayer();
51     auto output = net.AddOutputLayer(0);
52
53     // connect layers
54     input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
55     input->GetOutputSlot(0).Connect(add->GetInputSlot(1));
56     add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
57
58     armnn::TensorShape shape({4});
59     armnn::TensorInfo info(shape, armnn::DataType::Float32);
60     input->GetOutputSlot(0).SetTensorInfo(info);
61     add->GetOutputSlot(0).SetTensorInfo(info);
62
63     armnn::DeviceSpec spec;
64     spec.DefaultComputeDevice = armnn::Compute::CpuAcc;
65     armnn::IOptimizedNetworkPtr optimizedNet = armnn::Optimize(net, spec);
66
67     std::ostringstream ss;
68     optimizedNet->SerializeToDot(ss);
69
70     auto inputId = input->GetGuid();
71     auto addId = add->GetGuid();
72     auto outputId = output->GetGuid();
73
74     std::stringstream expected;
75     expected <<
76         "digraph Optimized {\n"
77         "    node [shape=\"record\"];\n"
78         "    edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n"
79         "    " << inputId << " [label=\"{Input}\"];\n"
80         "    " << addId << " [label=\"{Addition}\"];\n"
81         "    " << outputId << " [label=\"{Output}\"];\n"
82         "    " << inputId << " -> " << addId << " [label=< [4] >];\n"
83         "    " << inputId << " -> " << addId << " [label=< [4] >];\n"
84         "    " << addId << " -> " << outputId << " [label=< [4] >];\n"
85         "}\n";
86
87     BOOST_TEST(ss.str() == expected.str());
88 }
89
90 BOOST_AUTO_TEST_CASE(NetworkBasic)
91 {
92     armnn::Network net;
93     BOOST_TEST(net.PrintGraph() == armnn::Status::Success);
94 }
95
96 BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForINetwork)
97 {
98     armnn::Network net;
99     armnn::INetwork& inet = net;
100     inet.AddInputLayer(0);
101     inet.AddAdditionLayer();
102     inet.AddActivationLayer(armnn::ActivationDescriptor());
103     inet.AddOutputLayer(0);
104 }
105
106 BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForNetwork)
107 {
108     armnn::Network net;
109     net.AddInputLayer(0);
110     net.AddAdditionLayer();
111     net.AddActivationLayer(armnn::ActivationDescriptor());
112     net.AddOutputLayer(0);
113 }
114
115 BOOST_AUTO_TEST_CASE(NetworkModification)
116 {
117     armnn::Network net;
118
119     armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
120     BOOST_TEST(inputLayer);
121
122     unsigned int dims[] = { 10,1,1,1 };
123     std::vector<float> convWeightsData(10);
124     armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32), convWeightsData);
125
126     armnn::Convolution2dDescriptor convDesc2d;
127     armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d, weights, "conv layer");
128     BOOST_TEST(convLayer);
129
130     inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
131
132     armnn::FullyConnectedDescriptor fullyConnectedDesc;
133     armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc,
134                                                                                      weights,
135                                                                                      "fully connected");
136     BOOST_TEST(fullyConnectedLayer);
137
138     convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
139
140     armnn::Pooling2dDescriptor pooling2dDesc;
141     armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d");
142     BOOST_TEST(poolingLayer);
143
144     fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
145
146     armnn::ActivationDescriptor activationDesc;
147     armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation");
148     BOOST_TEST(activationLayer);
149
150     poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
151
152     armnn::NormalizationDescriptor normalizationDesc;
153     armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization");
154     BOOST_TEST(normalizationLayer);
155
156     activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
157
158     armnn::SoftmaxDescriptor softmaxDesc;
159     armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax");
160     BOOST_TEST(softmaxLayer);
161
162     normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
163
164     armnn::BatchNormalizationDescriptor batchNormDesc;
165
166     armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32);
167     std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float));
168     armnn::ConstTensor invalidTensor(tensorInfo, data);
169
170     armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc,
171         invalidTensor,
172         invalidTensor,
173         invalidTensor,
174         invalidTensor,
175         "batch norm");
176     BOOST_TEST(batchNormalizationLayer);
177
178     softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
179
180     armnn::IConnectableLayer* const additionLayer = net.AddAdditionLayer("addition");
181     BOOST_TEST(additionLayer);
182
183     batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
184     batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
185
186     armnn::IConnectableLayer* const multiplicationLayer = net.AddMultiplicationLayer("multiplication");
187     BOOST_TEST(multiplicationLayer);
188
189     additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
190     additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
191
192     armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
193     BOOST_TEST(outputLayer);
194
195     multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
196
197     //Test that all layers are present in the graph
198     BOOST_TEST(net.GetGraph().GetNumLayers() == 11);
199
200     //Test that the vertices exist and have correct names
201     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "input layer"));
202     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "conv layer"));
203     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "fully connected"));
204     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "pooling2d"));
205     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "activation"));
206     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "normalization"));
207     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "softmax"));
208     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "batch norm"));
209     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "addition"));
210     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "multiplication"));
211     BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "output layer"));
212
213     auto checkOneOutputToOneInputConnection = []
214         (const armnn::IConnectableLayer* const srcLayer,
215          const armnn::IConnectableLayer* const tgtLayer,
216          int expectedSrcNumInputs = 1,
217          int expectedDstNumOutputs = 1)
218         {
219             BOOST_TEST(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
220             BOOST_TEST(srcLayer->GetNumOutputSlots() == 1);
221             BOOST_TEST(tgtLayer->GetNumInputSlots() == 1);
222             BOOST_TEST(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
223
224             BOOST_TEST(srcLayer->GetOutputSlot(0).GetNumConnections() == 1);
225             BOOST_TEST(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
226             BOOST_TEST(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
227         };
228     auto checkOneOutputToTwoInputsConnections = []
229         (const armnn::IConnectableLayer* const srcLayer,
230          const armnn::IConnectableLayer* const tgtLayer,
231          int expectedSrcNumInputs,
232          int expectedDstNumOutputs = 1)
233         {
234             BOOST_TEST(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
235             BOOST_TEST(srcLayer->GetNumOutputSlots() == 1);
236             BOOST_TEST(tgtLayer->GetNumInputSlots() == 2);
237             BOOST_TEST(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
238
239             BOOST_TEST(srcLayer->GetOutputSlot(0).GetNumConnections() == 2);
240             for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i)
241             {
242                 BOOST_TEST(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i));
243                 BOOST_TEST(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection());
244             }
245         };
246
247     BOOST_TEST(AreAllLayerInputSlotsConnected(*convLayer));
248     BOOST_TEST(AreAllLayerInputSlotsConnected(*fullyConnectedLayer));
249     BOOST_TEST(AreAllLayerInputSlotsConnected(*poolingLayer));
250     BOOST_TEST(AreAllLayerInputSlotsConnected(*activationLayer));
251     BOOST_TEST(AreAllLayerInputSlotsConnected(*normalizationLayer));
252     BOOST_TEST(AreAllLayerInputSlotsConnected(*softmaxLayer));
253     BOOST_TEST(AreAllLayerInputSlotsConnected(*batchNormalizationLayer));
254     BOOST_TEST(AreAllLayerInputSlotsConnected(*additionLayer));
255     BOOST_TEST(AreAllLayerInputSlotsConnected(*multiplicationLayer));
256     BOOST_TEST(AreAllLayerInputSlotsConnected(*outputLayer));
257
258     // Check connectivity
259     checkOneOutputToOneInputConnection(inputLayer, convLayer, 0);
260     checkOneOutputToOneInputConnection(convLayer, fullyConnectedLayer);
261     checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer);
262     checkOneOutputToOneInputConnection(poolingLayer, activationLayer);
263     checkOneOutputToOneInputConnection(activationLayer, normalizationLayer);
264     checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer);
265     checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer);
266     checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1);
267     checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2);
268     checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
269 }
270
271 BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMerger)
272 {
273     armnn::Network net;
274
275     // Add an input layer and an input tensor descriptor.
276     armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
277     BOOST_TEST(inputLayer);
278
279     // Add a splitter layer
280     armnn::ViewsDescriptor splitterDesc(2,4);
281
282     armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
283     BOOST_TEST(splitterLayer);
284
285     inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
286
287     // Add a softmax layer 1
288     armnn::SoftmaxDescriptor softmaxDescriptor;
289     armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
290     BOOST_TEST(softmaxLayer1);
291
292     splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
293
294     // Add a softmax layer 2
295     armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
296     BOOST_TEST(softmaxLayer2);
297
298     splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
299
300     // Add a merger layer
301     armnn::OriginsDescriptor mergerDesc(2, 4);
302
303     armnn::IConnectableLayer* mergerLayer = net.AddMergerLayer(mergerDesc, "merger layer");
304     BOOST_TEST(mergerLayer);
305
306     softmaxLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
307     softmaxLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
308
309     // Add an output layer
310     armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
311     BOOST_TEST(outputLayer);
312
313     mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
314
315     BOOST_TEST(splitterLayer->GetNumOutputSlots() == 2);
316     BOOST_TEST(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
317     BOOST_TEST(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection());
318     BOOST_TEST(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
319     BOOST_TEST(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
320
321     BOOST_TEST(mergerLayer->GetNumInputSlots() == 2);
322     BOOST_TEST(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &mergerLayer->GetInputSlot(0));
323     BOOST_TEST(&softmaxLayer1->GetOutputSlot(0) == mergerLayer->GetInputSlot(0).GetConnection());
324     BOOST_TEST(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &mergerLayer->GetInputSlot(1));
325     BOOST_TEST(&softmaxLayer2->GetOutputSlot(0) == mergerLayer->GetInputSlot(1).GetConnection());
326 }
327
328 BOOST_AUTO_TEST_CASE(NetworkModification_SplitterAddition)
329 {
330     armnn::Network net;
331
332     // Add an input layer and an input tensor descriptor.
333     armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
334     BOOST_TEST(layer);
335
336     // Add a splitter layer
337     armnn::ViewsDescriptor splitterDesc(2,4);
338
339     armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
340     BOOST_TEST(splitterLayer);
341
342     layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
343
344     // Add a softmax layer 1
345     armnn::SoftmaxDescriptor softmaxDescriptor;
346     armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
347     BOOST_TEST(softmax1Layer);
348
349     splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
350
351     // Add a softmax layer 2
352     armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
353     BOOST_TEST(softmax2Layer);
354
355     splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
356
357     // Add addition layer
358     layer = net.AddAdditionLayer("add layer");
359     BOOST_TEST(layer);
360
361     softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
362     softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
363
364     // Add an output layer
365     armnn::IConnectableLayer* prevLayer = layer;
366     layer = net.AddOutputLayer(0, "output layer");
367
368     prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
369
370     BOOST_TEST(layer);
371 }
372
373 BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMultiplication)
374 {
375     armnn::Network net;
376
377     // Add an input layer and an input tensor descriptor.
378     armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
379     BOOST_TEST(layer);
380
381     // Add a splitter layer
382     armnn::ViewsDescriptor splitterDesc(2,4);
383     armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
384     BOOST_TEST(splitterLayer);
385
386     layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
387
388     // Add a softmax layer 1
389     armnn::SoftmaxDescriptor softmaxDescriptor;
390     armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
391     BOOST_TEST(softmax1Layer);
392
393     splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
394
395     // Add a softmax layer 2
396     armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
397     BOOST_TEST(softmax2Layer);
398
399     splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
400
401     // Add multiplication layer
402     layer = net.AddMultiplicationLayer("multiplication layer");
403     BOOST_TEST(layer);
404
405     softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
406     softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
407
408     // Add an output layer
409     armnn::IConnectableLayer* prevLayer = layer;
410     layer = net.AddOutputLayer(0, "output layer");
411     BOOST_TEST(layer);
412
413     prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
414 }
415
416 BOOST_AUTO_TEST_CASE(ValidateWorkloads)
417 {
418     const armnn::TensorInfo desc({3, 5}, armnn::DataType::Float32);
419
420     armnn::Network  net;
421
422     armnn::NormalizationDescriptor nmDesc;
423     armnn::ActivationDescriptor acDesc;
424
425     //    in
426     //     |
427     //    nm
428     //   /  |
429     //  ac  |
430     //   \  |
431     //    ml
432     //     |
433     //    sm
434     //     |
435     //    ot
436     armnn::IConnectableLayer* layer = net.AddInputLayer(0, "in");
437     layer->GetOutputSlot(0).SetTensorInfo(desc);
438
439     armnn::IConnectableLayer* const normLayer = net.AddNormalizationLayer(nmDesc, "nm");
440
441     layer->GetOutputSlot(0).Connect(normLayer->GetInputSlot(0));
442     normLayer->GetOutputSlot(0).SetTensorInfo(desc);
443
444     layer = net.AddActivationLayer(acDesc, "ac");
445
446     normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
447     layer->GetOutputSlot(0).SetTensorInfo(desc);
448
449     armnn::IConnectableLayer* prevLayer = layer;
450     layer = net.AddMultiplicationLayer("ml");
451
452     prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
453     normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
454     layer->GetOutputSlot(0).SetTensorInfo(desc);
455
456     prevLayer = layer;
457     armnn::SoftmaxDescriptor softmaxDescriptor;
458     layer = net.AddSoftmaxLayer(softmaxDescriptor, "sm");
459
460     prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
461     layer->GetOutputSlot(0).SetTensorInfo(desc);
462
463     prevLayer = layer;
464     layer = net.AddOutputLayer(0, "ot");
465
466     prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
467
468     armnn::DeviceSpec spec;
469     spec.DefaultComputeDevice = armnn::Compute::CpuRef;
470
471     armnn::IOptimizedNetworkPtr optNet = Optimize(net, spec);
472     static_cast<armnn::OptimizedNetwork*>(optNet.get())->GetGraph().AllocateDynamicBuffers();
473
474     // validate workloads
475     armnn::RefWorkloadFactory fact;
476     for (auto&& layer : static_cast<armnn::OptimizedNetwork*>(optNet.get())->GetGraph())
477     {
478         BOOST_CHECK_NO_THROW(
479             layer->CreateWorkload(static_cast<armnn::OptimizedNetwork*>(optNet.get())->GetGraph(), fact));
480     }
481 }
482
483 BOOST_AUTO_TEST_SUITE_END()