arm_compute v18.03
[platform/upstream/armcl.git] / examples / graph_inception_v3.cpp
1 /*
2  * Copyright (c) 2017-2018 ARM Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph/Graph.h"
25 #include "arm_compute/graph/Nodes.h"
26 #include "arm_compute/graph/SubGraph.h"
27 #include "support/ToolchainSupport.h"
28 #include "utils/GraphUtils.h"
29 #include "utils/Utils.h"
30
31 #include <cstdlib>
32 #include <tuple>
33
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph;
36 using namespace arm_compute::graph_utils;
37
38 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API
39  *
40  * @param[in] argc Number of arguments
41  * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels )
42  */
43 class InceptionV3Example final : public Example
44 {
45 public:
46     void do_setup(int argc, char **argv) override
47     {
48         std::string data_path; /* Path to the trainable data */
49         std::string image;     /* Image data */
50         std::string label;     /* Label data */
51
52         // Create a preprocessor object
53         std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
54
55         // Set target. 0 (NEON), 1 (OpenCL), 2 (OpenCL with Tuner). By default it is NEON
56         const int  int_target_hint = argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0;
57         TargetHint target_hint     = set_target_hint(int_target_hint);
58
59         // Parse arguments
60         if(argc < 2)
61         {
62             // Print help
63             std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
64             std::cout << "No data folder provided: using random values\n\n";
65         }
66         else if(argc == 2)
67         {
68             std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
69             std::cout << "No data folder provided: using random values\n\n";
70         }
71         else if(argc == 3)
72         {
73             data_path = argv[2];
74             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
75             std::cout << "No image provided: using random values\n\n";
76         }
77         else if(argc == 4)
78         {
79             data_path = argv[2];
80             image     = argv[3];
81             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
82             std::cout << "No text file with labels provided: skipping output accessor\n\n";
83         }
84         else
85         {
86             data_path = argv[2];
87             image     = argv[3];
88             label     = argv[4];
89         }
90
91         graph << target_hint << Tensor(TensorInfo(TensorShape(299U, 299U, 3U, 1U), 1, DataType::F32),
92                                        get_input_accessor(image, std::move(preprocessor), false))
93
94               << ConvolutionLayer(3U, 3U, 32U,
95                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"),
96                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
97               << BatchNormalizationLayer(get_weights_accessor(data_path,
98                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
99                                          get_weights_accessor(data_path,
100                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
101                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
102                                                                                              "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
103                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
104
105               << ConvolutionLayer(3U, 3U, 32U,
106                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"),
107                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
108               << BatchNormalizationLayer(get_weights_accessor(data_path,
109                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
110                                          get_weights_accessor(data_path,
111                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
112                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
113                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
114                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
115
116               << ConvolutionLayer(3U, 3U, 64U,
117                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"),
118                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
119               << BatchNormalizationLayer(get_weights_accessor(data_path,
120                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
121                                          get_weights_accessor(data_path,
122                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
123                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
124                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
125                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
126
127               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
128
129               << ConvolutionLayer(1U, 1U, 80U,
130                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"),
131                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
132               << BatchNormalizationLayer(get_weights_accessor(data_path,
133                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
134                                          get_weights_accessor(data_path,
135                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
136                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
137                                                                                              "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
138                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
139
140               << ConvolutionLayer(3U, 3U, 192U,
141                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"),
142                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
143               << BatchNormalizationLayer(get_weights_accessor(data_path,
144                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
145                                          get_weights_accessor(data_path,
146                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
147                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
148                                                                                              "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
149                                          0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
150
151               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
152
153               << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
154                                       32U)
155               << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
156                                       64U, true)
157               << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
158                                       64U)
159
160               << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U))
161
162               << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U),
163                                       std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
164               << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U),
165                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
166               << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U),
167                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
168               << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U),
169                                       std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
170
171               << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U),
172                                       std::make_tuple(192U, 192U, 192U, 192U))
173
174               << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U),
175                                       std::make_tuple(448U, 384U, 384U, 384U), 192U)
176               << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U),
177                                       std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
178
179               << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
180               << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
181                                                                       "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"),
182                                   get_weights_accessor(data_path,
183                                                        "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
184                                   PadStrideInfo(1, 1, 0, 0))
185               << ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer()
186               << Tensor(get_output_accessor(label, 5));
187
188         // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
189         graph.graph_init(int_target_hint == 2);
190     }
191
192     void do_run() override
193     {
194         graph.run();
195     }
196
197 private:
198     Graph graph{};
199
200 private:
201     BranchLayer get_inception_node_A(const std::string &data_path, std::string &&param_path,
202                                      unsigned int a_filt,
203                                      std::tuple<unsigned int, unsigned int> b_filters,
204                                      std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
205                                      unsigned int d_filt,
206                                      bool         is_name_different = false)
207     {
208         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
209
210         // This is due to a naming issue in the tf model
211         std::string conv_id0 = "_0a_";
212         std::string conv_id1 = "2d_0b_";
213         if(is_name_different)
214         {
215             conv_id0 = "_0b_";
216             conv_id1 = "_1_0c_";
217         }
218
219         SubGraph i_a;
220         i_a << ConvolutionLayer(
221                 1U, 1U, a_filt,
222                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
223                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
224                 PadStrideInfo(1, 1, 0, 0))
225             << BatchNormalizationLayer(
226                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
227                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
228                 get_random_accessor(1.f, 1.f),
229                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
230                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
231
232         SubGraph i_b;
233         i_b << ConvolutionLayer(
234                 1U, 1U, std::get<0>(b_filters),
235                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"),
236                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
237                 PadStrideInfo(1, 1, 0, 0))
238             << BatchNormalizationLayer(
239                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
240                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
241                 get_random_accessor(1.f, 1.f),
242                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
243                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
244             << ConvolutionLayer(
245                 5U, 5U, std::get<1>(b_filters),
246                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"),
247                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
248                 PadStrideInfo(1, 1, 2, 2))
249             << BatchNormalizationLayer(
250                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
251                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
252                 get_random_accessor(1.f, 1.f),
253                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
254                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
255
256         SubGraph i_c;
257         i_c << ConvolutionLayer(
258                 1U, 1U, std::get<0>(c_filters),
259                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
260                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
261                 PadStrideInfo(1, 1, 0, 0))
262             << BatchNormalizationLayer(
263                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
264                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
265                 get_random_accessor(1.f, 1.f),
266                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
267                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
268             << ConvolutionLayer(
269                 3U, 3U, std::get<1>(c_filters),
270                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
271                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
272                 PadStrideInfo(1, 1, 1, 1))
273             << BatchNormalizationLayer(
274                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
275                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
276                 get_random_accessor(1.f, 1.f),
277                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
278                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
279             << ConvolutionLayer(
280                 3U, 3U, std::get<2>(c_filters),
281                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"),
282                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
283                 PadStrideInfo(1, 1, 1, 1))
284             << BatchNormalizationLayer(
285                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
286                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
287                 get_random_accessor(1.f, 1.f),
288                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
289                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
290
291         SubGraph i_d;
292         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
293             << ConvolutionLayer(
294                 1U, 1U, d_filt,
295                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
296                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
297                 PadStrideInfo(1, 1, 0, 0))
298             << BatchNormalizationLayer(
299                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
300                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
301                 get_random_accessor(1.f, 1.f),
302                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
303                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
304
305         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
306     }
307
308     BranchLayer get_inception_node_B(const std::string &data_path, std::string &&param_path,
309                                      unsigned int a_filt,
310                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
311     {
312         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
313         SubGraph    i_a;
314         i_a << ConvolutionLayer(
315                 3U, 3U, a_filt,
316                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"),
317                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
318                 PadStrideInfo(2, 2, 0, 0))
319             << BatchNormalizationLayer(
320                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
321                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
322                 get_random_accessor(1.f, 1.f),
323                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
324                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
325
326         SubGraph i_b;
327         i_b << ConvolutionLayer(
328                 1U, 1U, std::get<0>(b_filters),
329                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
330                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
331                 PadStrideInfo(1, 1, 0, 0))
332             << BatchNormalizationLayer(
333                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
334                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
335                 get_random_accessor(1.f, 1.f),
336                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
337                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
338             << ConvolutionLayer(
339                 3U, 3U, std::get<1>(b_filters),
340                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"),
341                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
342                 PadStrideInfo(1, 1, 1, 1))
343             << BatchNormalizationLayer(
344                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
345                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
346                 get_random_accessor(1.f, 1.f),
347                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
348                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
349             << ConvolutionLayer(
350                 3U, 3U, std::get<2>(b_filters),
351                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"),
352                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
353                 PadStrideInfo(2, 2, 0, 0))
354             << BatchNormalizationLayer(
355                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
356                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
357                 get_random_accessor(1.f, 1.f),
358                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
359                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
360
361         SubGraph i_c;
362         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
363             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
364
365         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
366     }
367
368     BranchLayer get_inception_node_C(const std::string &data_path, std::string &&param_path,
369                                      unsigned int a_filt,
370                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
371                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
372                                      unsigned int d_filt)
373     {
374         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
375         SubGraph    i_a;
376         i_a << ConvolutionLayer(
377                 1U, 1U, a_filt,
378                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
379                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
380                 PadStrideInfo(1, 1, 0, 0))
381             << BatchNormalizationLayer(
382                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
383                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
384                 get_random_accessor(1.f, 1.f),
385                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
386                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
387
388         SubGraph i_b;
389         i_b << ConvolutionLayer(
390                 1U, 1U, std::get<0>(b_filters),
391                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
392                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
393                 PadStrideInfo(1, 1, 0, 0))
394             << BatchNormalizationLayer(
395                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
396                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
397                 get_random_accessor(1.f, 1.f),
398                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
399                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
400             << ConvolutionLayer(
401                 7U, 1U, std::get<1>(b_filters),
402                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
403                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
404                 PadStrideInfo(1, 1, 3, 0))
405             << BatchNormalizationLayer(
406                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
407                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
408                 get_random_accessor(1.f, 1.f),
409                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
410                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
411             << ConvolutionLayer(
412                 1U, 7U, std::get<2>(b_filters),
413                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
414                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
415                 PadStrideInfo(1, 1, 0, 3))
416             << BatchNormalizationLayer(
417                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
418                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
419                 get_random_accessor(1.f, 1.f),
420                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
421                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
422
423         SubGraph i_c;
424         i_c << ConvolutionLayer(
425                 1U, 1U, std::get<0>(c_filters),
426                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
427                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
428                 PadStrideInfo(1, 1, 0, 0))
429             << BatchNormalizationLayer(
430                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
431                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
432                 get_random_accessor(1.f, 1.f),
433                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
434                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
435             << ConvolutionLayer(
436                 1U, 7U, std::get<1>(c_filters),
437                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"),
438                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
439                 PadStrideInfo(1, 1, 0, 3))
440             << BatchNormalizationLayer(
441                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
442                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
443                 get_random_accessor(1.f, 1.f),
444                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
445                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
446             << ConvolutionLayer(
447                 7U, 1U, std::get<2>(c_filters),
448                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"),
449                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
450                 PadStrideInfo(1, 1, 3, 0))
451             << BatchNormalizationLayer(
452                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
453                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
454                 get_random_accessor(1.f, 1.f),
455                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
456                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
457             << ConvolutionLayer(
458                 1U, 7U, std::get<3>(c_filters),
459                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"),
460                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
461                 PadStrideInfo(1, 1, 0, 3))
462             << BatchNormalizationLayer(
463                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
464                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
465                 get_random_accessor(1.f, 1.f),
466                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
467                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
468             << ConvolutionLayer(
469                 7U, 1U, std::get<4>(c_filters),
470                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"),
471                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
472                 PadStrideInfo(1, 1, 3, 0))
473             << BatchNormalizationLayer(
474                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
475                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
476                 get_random_accessor(1.f, 1.f),
477                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
478                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
479
480         SubGraph i_d;
481         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
482             << ConvolutionLayer(
483                 1U, 1U, d_filt,
484                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
485                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
486                 PadStrideInfo(1, 1, 0, 0))
487             << BatchNormalizationLayer(
488                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
489                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
490                 get_random_accessor(1.f, 1.f),
491                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
492                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
493
494         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
495     }
496
497     BranchLayer get_inception_node_D(const std::string &data_path, std::string &&param_path,
498                                      std::tuple<unsigned int, unsigned int>      a_filters,
499                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
500     {
501         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
502         SubGraph    i_a;
503         i_a << ConvolutionLayer(
504                 1U, 1U, std::get<0>(a_filters),
505                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
506                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
507                 PadStrideInfo(1, 1, 0, 0))
508             << BatchNormalizationLayer(
509                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
510                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
511                 get_random_accessor(1.f, 1.f),
512                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
513                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
514             << ConvolutionLayer(
515                 3U, 3U, std::get<1>(a_filters),
516                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"),
517                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
518                 PadStrideInfo(2, 2, 0, 0))
519             << BatchNormalizationLayer(
520                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
521                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
522                 get_random_accessor(1.f, 1.f),
523                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
524                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
525
526         SubGraph i_b;
527         i_b << ConvolutionLayer(
528                 1U, 1U, std::get<0>(b_filters),
529                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
530                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
531                 PadStrideInfo(1, 1, 0, 0))
532             << BatchNormalizationLayer(
533                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
534                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
535                 get_random_accessor(1.f, 1.f),
536                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
537                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
538             << ConvolutionLayer(
539                 7U, 1U, std::get<1>(b_filters),
540                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
541                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
542                 PadStrideInfo(1, 1, 3, 0))
543             << BatchNormalizationLayer(
544                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
545                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
546                 get_random_accessor(1.f, 1.f),
547                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
548                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
549             << ConvolutionLayer(
550                 1U, 7U, std::get<2>(b_filters),
551                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
552                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
553                 PadStrideInfo(1, 1, 0, 3))
554             << BatchNormalizationLayer(
555                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
556                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
557                 get_random_accessor(1.f, 1.f),
558                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
559                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
560             << ConvolutionLayer(
561                 3U, 3U, std::get<3>(b_filters),
562                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"),
563                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
564                 PadStrideInfo(2, 2, 0, 0))
565             << BatchNormalizationLayer(
566                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
567                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
568                 get_random_accessor(1.f, 1.f),
569                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
570                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
571
572         SubGraph i_c;
573         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
574             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
575
576         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
577     }
578
579     BranchLayer get_inception_node_E(const std::string &data_path, std::string &&param_path,
580                                      unsigned int a_filt,
581                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
582                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
583                                      unsigned int d_filt,
584                                      bool         is_name_different = false)
585     {
586         // This is due to a naming issue in the tf model
587         std::string conv_id = "_0b_";
588         if(is_name_different)
589         {
590             conv_id = "_0c_";
591         }
592
593         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
594         SubGraph    i_a;
595         i_a << ConvolutionLayer(
596                 1U, 1U, a_filt,
597                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
598                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
599                 PadStrideInfo(1, 1, 0, 0))
600             << BatchNormalizationLayer(
601                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
602                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
603                 get_random_accessor(1.f, 1.f),
604                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
605                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
606
607         SubGraph i_b1;
608         i_b1 << ConvolutionLayer(
609                  3U, 1U, std::get<1>(b_filters),
610                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"),
611                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
612                  PadStrideInfo(1, 1, 1, 0))
613              << BatchNormalizationLayer(
614                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
615                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
616                  get_random_accessor(1.f, 1.f),
617                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
618                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
619
620         SubGraph i_b2;
621         i_b2 << ConvolutionLayer(
622                  1U, 3U, std::get<2>(b_filters),
623                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"),
624                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
625                  PadStrideInfo(1, 1, 0, 1))
626              << BatchNormalizationLayer(
627                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
628                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
629                  get_random_accessor(1.f, 1.f),
630                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
631                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
632
633         SubGraph i_b;
634         i_b << ConvolutionLayer(
635                 1U, 1U, std::get<0>(b_filters),
636                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
637                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
638                 PadStrideInfo(1, 1, 0, 0))
639             << BatchNormalizationLayer(
640                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
641                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
642                 get_random_accessor(1.f, 1.f),
643                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
644                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
645             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2));
646
647         SubGraph i_c1;
648         i_c1 << ConvolutionLayer(
649                  3U, 1U, std::get<2>(c_filters),
650                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"),
651                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
652                  PadStrideInfo(1, 1, 1, 0))
653              << BatchNormalizationLayer(
654                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
655                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
656                  get_random_accessor(1.f, 1.f),
657                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
658                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
659
660         SubGraph i_c2;
661         i_c2 << ConvolutionLayer(
662                  1U, 3U, std::get<3>(c_filters),
663                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"),
664                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
665                  PadStrideInfo(1, 1, 0, 1))
666              << BatchNormalizationLayer(
667                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
668                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
669                  get_random_accessor(1.f, 1.f),
670                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
671                  0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
672
673         SubGraph i_c;
674         i_c << ConvolutionLayer(
675                 1U, 1U, std::get<0>(c_filters),
676                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
677                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
678                 PadStrideInfo(1, 1, 0, 0))
679             << BatchNormalizationLayer(
680                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
681                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
682                 get_random_accessor(1.f, 1.f),
683                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
684                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
685             << ConvolutionLayer(
686                 3U, 3U, std::get<1>(c_filters),
687                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
688                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
689                 PadStrideInfo(1, 1, 1, 1))
690             << BatchNormalizationLayer(
691                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
692                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
693                 get_random_accessor(1.f, 1.f),
694                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
695                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
696             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2));
697
698         SubGraph i_d;
699         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
700             << ConvolutionLayer(
701                 1U, 1U, d_filt,
702                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
703                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
704                 PadStrideInfo(1, 1, 0, 0))
705             << BatchNormalizationLayer(
706                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
707                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
708                 get_random_accessor(1.f, 1.f),
709                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
710                 0.001f, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
711
712         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
713     }
714 };
715
716 /** Main program for Inception V3
717  *
718  * @param[in] argc Number of arguments
719  * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels )
720  */
721 int main(int argc, char **argv)
722 {
723     return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
724 }