arm_compute v18.02
[platform/upstream/armcl.git] / examples / graph_inception_v3.cpp
1 /*
2  * Copyright (c) 2017-2018 ARM Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph/Graph.h"
25 #include "arm_compute/graph/Nodes.h"
26 #include "arm_compute/graph/SubGraph.h"
27 #include "support/ToolchainSupport.h"
28 #include "utils/GraphUtils.h"
29 #include "utils/Utils.h"
30
31 #include <cstdlib>
32 #include <tuple>
33
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph;
36 using namespace arm_compute::graph_utils;
37
38 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API
39  *
40  * @param[in] argc Number of arguments
41  * @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
42  */
43 class InceptionV3Example : public Example
44 {
45 public:
46     void do_setup(int argc, char **argv) override
47     {
48         std::string data_path; /* Path to the trainable data */
49         std::string image;     /* Image data */
50         std::string label;     /* Label data */
51
52         // Create a preprocessor object
53         std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
54
55         // Set target. 0 (NEON), 1 (OpenCL), 2 (OpenCL with Tuner). By default it is NEON
56         const int  int_target_hint = argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0;
57         TargetHint target_hint     = set_target_hint(int_target_hint);
58
59         // Parse arguments
60         if(argc < 2)
61         {
62             // Print help
63             std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
64             std::cout << "No data folder provided: using random values\n\n";
65         }
66         else if(argc == 2)
67         {
68             std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
69             std::cout << "No data folder provided: using random values\n\n";
70         }
71         else if(argc == 3)
72         {
73             data_path = argv[2];
74             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
75             std::cout << "No image provided: using random values\n\n";
76         }
77         else if(argc == 4)
78         {
79             data_path = argv[2];
80             image     = argv[3];
81             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
82             std::cout << "No text file with labels provided: skipping output accessor\n\n";
83         }
84         else
85         {
86             data_path = argv[2];
87             image     = argv[3];
88             label     = argv[4];
89         }
90
91         graph << target_hint << Tensor(TensorInfo(TensorShape(299U, 299U, 3U, 1U), 1, DataType::F32),
92                                        get_input_accessor(image, std::move(preprocessor), false))
93
94               << ConvolutionLayer(3U, 3U, 32U,
95                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"),
96                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
97               << BatchNormalizationLayer(get_weights_accessor(data_path,
98                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
99                                          get_weights_accessor(data_path,
100                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
101                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
102                                                                                              "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
103                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
104
105               << ConvolutionLayer(3U, 3U, 32U,
106                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"),
107                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
108               << BatchNormalizationLayer(get_weights_accessor(data_path,
109                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
110                                          get_weights_accessor(data_path,
111                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
112                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
113                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
114                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
115
116               << ConvolutionLayer(3U, 3U, 64U,
117                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"),
118                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
119               << BatchNormalizationLayer(get_weights_accessor(data_path,
120                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
121                                          get_weights_accessor(data_path,
122                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
123                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
124                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
125                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
126
127               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
128
129               << ConvolutionLayer(1U, 1U, 80U,
130                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"),
131                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
132               << BatchNormalizationLayer(get_weights_accessor(data_path,
133                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
134                                          get_weights_accessor(data_path,
135                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
136                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
137                                                                                              "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
138                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
139
140               << ConvolutionLayer(3U, 3U, 192U,
141                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"),
142                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
143               << BatchNormalizationLayer(get_weights_accessor(data_path,
144                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
145                                          get_weights_accessor(data_path,
146                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
147                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
148                                                                                              "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
149                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
150
151               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
152
153               << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
154                                       32U)
155               << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
156                                       64U, true)
157               << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
158                                       64U)
159
160               << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U))
161
162               << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U),
163                                       std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
164               << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U),
165                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
166               << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U),
167                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
168               << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U),
169                                       std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
170
171               << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U),
172                                       std::make_tuple(192U, 192U, 192U, 192U))
173
174               << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U),
175                                       std::make_tuple(448U, 384U, 384U, 384U), 192U)
176               << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U),
177                                       std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
178
179               << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
180               << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
181                                                                       "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"),
182                                   get_weights_accessor(data_path,
183                                                        "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
184                                   PadStrideInfo(1, 1, 0, 0))
185               << ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer()
186               << Tensor(get_output_accessor(label, 5));
187
188         // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
189         graph.graph_init(int_target_hint == 2);
190     }
191
192     void do_run() override
193     {
194         graph.run();
195     }
196
197 private:
198     Graph graph{};
199
200 private:
201     BranchLayer get_inception_node_A(const std::string &data_path, std::string &&param_path,
202                                      unsigned int a_filt,
203                                      std::tuple<unsigned int, unsigned int> b_filters,
204                                      std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
205                                      unsigned int d_filt,
206                                      bool         is_name_different = false)
207     {
208         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
209         std::cout << total_path << std::endl;
210
211         // This is due to a naming issue in the tf model
212         std::string conv_id0 = "_0a_";
213         std::string conv_id1 = "2d_0b_";
214         if(is_name_different)
215         {
216             conv_id0 = "_0b_";
217             conv_id1 = "_1_0c_";
218         }
219
220         SubGraph i_a;
221         i_a << ConvolutionLayer(
222                 1U, 1U, a_filt,
223                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
224                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
225                 PadStrideInfo(1, 1, 0, 0))
226             << BatchNormalizationLayer(
227                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
228                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
229                 get_random_accessor(1.f, 1.f),
230                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
231                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
232
233         SubGraph i_b;
234         i_b << ConvolutionLayer(
235                 1U, 1U, std::get<0>(b_filters),
236                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"),
237                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
238                 PadStrideInfo(1, 1, 0, 0))
239             << BatchNormalizationLayer(
240                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
241                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
242                 get_random_accessor(1.f, 1.f),
243                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
244                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
245             << ConvolutionLayer(
246                 5U, 5U, std::get<1>(b_filters),
247                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"),
248                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
249                 PadStrideInfo(1, 1, 2, 2))
250             << BatchNormalizationLayer(
251                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
252                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
253                 get_random_accessor(1.f, 1.f),
254                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
255                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
256
257         SubGraph i_c;
258         i_c << ConvolutionLayer(
259                 1U, 1U, std::get<0>(c_filters),
260                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
261                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
262                 PadStrideInfo(1, 1, 0, 0))
263             << BatchNormalizationLayer(
264                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
265                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
266                 get_random_accessor(1.f, 1.f),
267                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
268                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
269             << ConvolutionLayer(
270                 3U, 3U, std::get<1>(c_filters),
271                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
272                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
273                 PadStrideInfo(1, 1, 1, 1))
274             << BatchNormalizationLayer(
275                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
276                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
277                 get_random_accessor(1.f, 1.f),
278                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
279                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
280             << ConvolutionLayer(
281                 3U, 3U, std::get<2>(c_filters),
282                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"),
283                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
284                 PadStrideInfo(1, 1, 1, 1))
285             << BatchNormalizationLayer(
286                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
287                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
288                 get_random_accessor(1.f, 1.f),
289                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
290                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
291
292         SubGraph i_d;
293         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
294             << ConvolutionLayer(
295                 1U, 1U, d_filt,
296                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
297                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
298                 PadStrideInfo(1, 1, 0, 0))
299             << BatchNormalizationLayer(
300                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
301                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
302                 get_random_accessor(1.f, 1.f),
303                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
304                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
305
306         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
307     }
308
309     BranchLayer get_inception_node_B(const std::string &data_path, std::string &&param_path,
310                                      unsigned int a_filt,
311                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
312     {
313         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
314         SubGraph    i_a;
315         i_a << ConvolutionLayer(
316                 3U, 3U, a_filt,
317                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"),
318                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
319                 PadStrideInfo(2, 2, 0, 0))
320             << BatchNormalizationLayer(
321                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
322                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
323                 get_random_accessor(1.f, 1.f),
324                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
325                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
326
327         SubGraph i_b;
328         i_b << ConvolutionLayer(
329                 1U, 1U, std::get<0>(b_filters),
330                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
331                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
332                 PadStrideInfo(1, 1, 0, 0))
333             << BatchNormalizationLayer(
334                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
335                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
336                 get_random_accessor(1.f, 1.f),
337                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
338                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
339             << ConvolutionLayer(
340                 3U, 3U, std::get<1>(b_filters),
341                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"),
342                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
343                 PadStrideInfo(1, 1, 1, 1))
344             << BatchNormalizationLayer(
345                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
346                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
347                 get_random_accessor(1.f, 1.f),
348                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
349                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
350             << ConvolutionLayer(
351                 3U, 3U, std::get<2>(b_filters),
352                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"),
353                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
354                 PadStrideInfo(2, 2, 0, 0))
355             << BatchNormalizationLayer(
356                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
357                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
358                 get_random_accessor(1.f, 1.f),
359                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
360                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
361
362         SubGraph i_c;
363         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
364             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
365
366         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
367     }
368
369     BranchLayer get_inception_node_C(const std::string &data_path, std::string &&param_path,
370                                      unsigned int a_filt,
371                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
372                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
373                                      unsigned int d_filt)
374     {
375         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
376         SubGraph    i_a;
377         i_a << ConvolutionLayer(
378                 1U, 1U, a_filt,
379                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
380                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
381                 PadStrideInfo(1, 1, 0, 0))
382             << BatchNormalizationLayer(
383                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
384                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
385                 get_random_accessor(1.f, 1.f),
386                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
387                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
388
389         SubGraph i_b;
390         i_b << ConvolutionLayer(
391                 1U, 1U, std::get<0>(b_filters),
392                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
393                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
394                 PadStrideInfo(1, 1, 0, 0))
395             << BatchNormalizationLayer(
396                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
397                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
398                 get_random_accessor(1.f, 1.f),
399                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
400                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
401             << ConvolutionLayer(
402                 7U, 1U, std::get<1>(b_filters),
403                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
404                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
405                 PadStrideInfo(1, 1, 3, 0))
406             << BatchNormalizationLayer(
407                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
408                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
409                 get_random_accessor(1.f, 1.f),
410                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
411                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
412             << ConvolutionLayer(
413                 1U, 7U, std::get<2>(b_filters),
414                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
415                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
416                 PadStrideInfo(1, 1, 0, 3))
417             << BatchNormalizationLayer(
418                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
419                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
420                 get_random_accessor(1.f, 1.f),
421                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
422                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
423
424         SubGraph i_c;
425         i_c << ConvolutionLayer(
426                 1U, 1U, std::get<0>(c_filters),
427                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
428                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
429                 PadStrideInfo(1, 1, 0, 0))
430             << BatchNormalizationLayer(
431                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
432                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
433                 get_random_accessor(1.f, 1.f),
434                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
435                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
436             << ConvolutionLayer(
437                 1U, 7U, std::get<1>(c_filters),
438                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"),
439                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
440                 PadStrideInfo(1, 1, 0, 3))
441             << BatchNormalizationLayer(
442                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
443                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
444                 get_random_accessor(1.f, 1.f),
445                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
446                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
447             << ConvolutionLayer(
448                 7U, 1U, std::get<2>(c_filters),
449                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"),
450                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
451                 PadStrideInfo(1, 1, 3, 0))
452             << BatchNormalizationLayer(
453                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
454                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
455                 get_random_accessor(1.f, 1.f),
456                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
457                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
458             << ConvolutionLayer(
459                 1U, 7U, std::get<3>(c_filters),
460                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"),
461                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
462                 PadStrideInfo(1, 1, 0, 3))
463             << BatchNormalizationLayer(
464                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
465                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
466                 get_random_accessor(1.f, 1.f),
467                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
468                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
469             << ConvolutionLayer(
470                 7U, 1U, std::get<4>(c_filters),
471                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"),
472                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
473                 PadStrideInfo(1, 1, 3, 0))
474             << BatchNormalizationLayer(
475                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
476                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
477                 get_random_accessor(1.f, 1.f),
478                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
479                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
480
481         SubGraph i_d;
482         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
483             << ConvolutionLayer(
484                 1U, 1U, d_filt,
485                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
486                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
487                 PadStrideInfo(1, 1, 0, 0))
488             << BatchNormalizationLayer(
489                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
490                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
491                 get_random_accessor(1.f, 1.f),
492                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
493                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
494
495         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
496     }
497
498     BranchLayer get_inception_node_D(const std::string &data_path, std::string &&param_path,
499                                      std::tuple<unsigned int, unsigned int>      a_filters,
500                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
501     {
502         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
503         SubGraph    i_a;
504         i_a << ConvolutionLayer(
505                 1U, 1U, std::get<0>(a_filters),
506                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
507                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
508                 PadStrideInfo(1, 1, 0, 0))
509             << BatchNormalizationLayer(
510                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
511                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
512                 get_random_accessor(1.f, 1.f),
513                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
514                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
515             << ConvolutionLayer(
516                 3U, 3U, std::get<1>(a_filters),
517                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"),
518                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
519                 PadStrideInfo(2, 2, 0, 0))
520             << BatchNormalizationLayer(
521                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
522                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
523                 get_random_accessor(1.f, 1.f),
524                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
525                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
526
527         SubGraph i_b;
528         i_b << ConvolutionLayer(
529                 1U, 1U, std::get<0>(b_filters),
530                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
531                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
532                 PadStrideInfo(1, 1, 0, 0))
533             << BatchNormalizationLayer(
534                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
535                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
536                 get_random_accessor(1.f, 1.f),
537                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
538                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
539             << ConvolutionLayer(
540                 7U, 1U, std::get<1>(b_filters),
541                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
542                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
543                 PadStrideInfo(1, 1, 3, 0))
544             << BatchNormalizationLayer(
545                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
546                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
547                 get_random_accessor(1.f, 1.f),
548                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
549                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
550             << ConvolutionLayer(
551                 1U, 7U, std::get<2>(b_filters),
552                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
553                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
554                 PadStrideInfo(1, 1, 0, 3))
555             << BatchNormalizationLayer(
556                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
557                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
558                 get_random_accessor(1.f, 1.f),
559                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
560                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
561             << ConvolutionLayer(
562                 3U, 3U, std::get<3>(b_filters),
563                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"),
564                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
565                 PadStrideInfo(2, 2, 0, 0))
566             << BatchNormalizationLayer(
567                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
568                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
569                 get_random_accessor(1.f, 1.f),
570                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
571                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
572
573         SubGraph i_c;
574         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
575             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
576
577         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
578     }
579
580     BranchLayer get_inception_node_E(const std::string &data_path, std::string &&param_path,
581                                      unsigned int a_filt,
582                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
583                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
584                                      unsigned int d_filt,
585                                      bool         is_name_different = false)
586     {
587         // This is due to a naming issue in the tf model
588         std::string conv_id = "_0b_";
589         if(is_name_different)
590         {
591             conv_id = "_0c_";
592         }
593
594         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
595         SubGraph    i_a;
596         i_a << ConvolutionLayer(
597                 1U, 1U, a_filt,
598                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
599                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
600                 PadStrideInfo(1, 1, 0, 0))
601             << BatchNormalizationLayer(
602                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
603                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
604                 get_random_accessor(1.f, 1.f),
605                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
606                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
607
608         SubGraph i_b1;
609         i_b1 << ConvolutionLayer(
610                  3U, 1U, std::get<1>(b_filters),
611                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"),
612                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
613                  PadStrideInfo(1, 1, 1, 0))
614              << BatchNormalizationLayer(
615                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
616                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
617                  get_random_accessor(1.f, 1.f),
618                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
619                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
620
621         SubGraph i_b2;
622         i_b2 << ConvolutionLayer(
623                  1U, 3U, std::get<2>(b_filters),
624                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"),
625                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
626                  PadStrideInfo(1, 1, 0, 1))
627              << BatchNormalizationLayer(
628                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
629                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
630                  get_random_accessor(1.f, 1.f),
631                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
632                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
633
634         SubGraph i_b;
635         i_b << ConvolutionLayer(
636                 1U, 1U, std::get<0>(b_filters),
637                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
638                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
639                 PadStrideInfo(1, 1, 0, 0))
640             << BatchNormalizationLayer(
641                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
642                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
643                 get_random_accessor(1.f, 1.f),
644                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
645                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
646             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2));
647
648         SubGraph i_c1;
649         i_c1 << ConvolutionLayer(
650                  3U, 1U, std::get<2>(c_filters),
651                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"),
652                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
653                  PadStrideInfo(1, 1, 1, 0))
654              << BatchNormalizationLayer(
655                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
656                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
657                  get_random_accessor(1.f, 1.f),
658                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
659                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
660
661         SubGraph i_c2;
662         i_c2 << ConvolutionLayer(
663                  1U, 3U, std::get<3>(c_filters),
664                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"),
665                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
666                  PadStrideInfo(1, 1, 0, 1))
667              << BatchNormalizationLayer(
668                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
669                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
670                  get_random_accessor(1.f, 1.f),
671                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
672                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
673
674         SubGraph i_c;
675         i_c << ConvolutionLayer(
676                 1U, 1U, std::get<0>(c_filters),
677                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
678                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
679                 PadStrideInfo(1, 1, 0, 0))
680             << BatchNormalizationLayer(
681                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
682                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
683                 get_random_accessor(1.f, 1.f),
684                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
685                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
686             << ConvolutionLayer(
687                 3U, 3U, std::get<1>(c_filters),
688                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
689                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
690                 PadStrideInfo(1, 1, 1, 1))
691             << BatchNormalizationLayer(
692                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
693                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
694                 get_random_accessor(1.f, 1.f),
695                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
696                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
697             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2));
698
699         SubGraph i_d;
700         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
701             << ConvolutionLayer(
702                 1U, 1U, d_filt,
703                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
704                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
705                 PadStrideInfo(1, 1, 0, 0))
706             << BatchNormalizationLayer(
707                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
708                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
709                 get_random_accessor(1.f, 1.f),
710                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
711                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
712
713         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
714     }
715 };
716
717 /** Main program for Inception V3
718  *
719  * @param[in] argc Number of arguments
720  * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL), [optional] Path to the weights folder, [optional] image, [optional] labels )
721  */
722 int main(int argc, char **argv)
723 {
724     return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
725 }