arm_compute v18.01
[platform/upstream/armcl.git] / examples / graph_inception_v3.cpp
1 /*
2  * Copyright (c) 2017-2018 ARM Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph/Graph.h"
25 #include "arm_compute/graph/Nodes.h"
26 #include "arm_compute/graph/SubGraph.h"
27 #include "support/ToolchainSupport.h"
28 #include "utils/GraphUtils.h"
29 #include "utils/Utils.h"
30
31 #include <cstdlib>
32 #include <tuple>
33
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph;
36 using namespace arm_compute::graph_utils;
37
38 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API
39  *
40  * @param[in] argc Number of arguments
41  * @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
42  */
43 class InceptionV3Example : public Example
44 {
45 public:
46     void do_setup(int argc, char **argv) override
47     {
48         std::string data_path; /* Path to the trainable data */
49         std::string image;     /* Image data */
50         std::string label;     /* Label data */
51
52         constexpr float mean = 0.f;   /* Mean value to subtract from the channels */
53         constexpr float std  = 255.f; /* Standard deviation value to divide from the channels */
54
55         // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
56         TargetHint            target_hint      = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
57         ConvolutionMethodHint convolution_hint = ConvolutionMethodHint::DIRECT;
58
59         // Parse arguments
60         if(argc < 2)
61         {
62             // Print help
63             std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
64             std::cout << "No data folder provided: using random values\n\n";
65         }
66         else if(argc == 2)
67         {
68             std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
69             std::cout << "No data folder provided: using random values\n\n";
70         }
71         else if(argc == 3)
72         {
73             data_path = argv[2];
74             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
75             std::cout << "No image provided: using random values\n\n";
76         }
77         else if(argc == 4)
78         {
79             data_path = argv[2];
80             image     = argv[3];
81             std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
82             std::cout << "No text file with labels provided: skipping output accessor\n\n";
83         }
84         else
85         {
86             data_path = argv[2];
87             image     = argv[3];
88             label     = argv[4];
89         }
90
91         graph << target_hint << convolution_hint << Tensor(TensorInfo(TensorShape(299U, 299U, 3U, 1U), 1, DataType::F32),
92                                                            get_input_accessor(image,
93                                                                               mean, mean, mean,
94                                                                               std, std, std, false /* Do not convert to BGR */))
95
96               << ConvolutionLayer(3U, 3U, 32U,
97                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"),
98                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
99               << BatchNormalizationLayer(get_weights_accessor(data_path,
100                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
101                                          get_weights_accessor(data_path,
102                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
103                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
104                                                                                              "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
105                                          0.001f)
106               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
107
108               << ConvolutionLayer(3U, 3U, 32U,
109                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"),
110                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
111               << BatchNormalizationLayer(get_weights_accessor(data_path,
112                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
113                                          get_weights_accessor(data_path,
114                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
115                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
116                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
117                                          0.001f)
118               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
119
120               << ConvolutionLayer(3U, 3U, 64U,
121                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"),
122                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
123               << BatchNormalizationLayer(get_weights_accessor(data_path,
124                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
125                                          get_weights_accessor(data_path,
126                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
127                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
128                                                                                              "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
129                                          0.001f)
130               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
131
132               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
133
134               << ConvolutionLayer(1U, 1U, 80U,
135                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"),
136                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
137               << BatchNormalizationLayer(get_weights_accessor(data_path,
138                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
139                                          get_weights_accessor(data_path,
140                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
141                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
142                                                                                              "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
143                                          0.001f)
144               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
145
146               << ConvolutionLayer(3U, 3U, 192U,
147                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"),
148                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
149               << BatchNormalizationLayer(get_weights_accessor(data_path,
150                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
151                                          get_weights_accessor(data_path,
152                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
153                                          get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
154                                                                                              "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
155                                          0.001f)
156               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
157
158               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
159
160               << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
161                                       32U)
162               << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
163                                       64U, true)
164               << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
165                                       64U)
166
167               << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U))
168
169               << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U),
170                                       std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
171               << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U),
172                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
173               << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U),
174                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
175               << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U),
176                                       std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
177
178               << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U),
179                                       std::make_tuple(192U, 192U, 192U, 192U))
180
181               << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U),
182                                       std::make_tuple(448U, 384U, 384U, 384U), 192U)
183               << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U),
184                                       std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
185
186               << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
187               << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
188                                                                       "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"),
189                                   get_weights_accessor(data_path,
190                                                        "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
191                                   PadStrideInfo(1, 1, 0, 0))
192               << ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer()
193               << Tensor(get_output_accessor(label, 5));
194     }
195
196     void do_run() override
197     {
198         graph.run();
199     }
200
201 private:
202     Graph graph{};
203
204 private:
205     BranchLayer get_inception_node_A(const std::string &data_path, std::string &&param_path,
206                                      unsigned int a_filt,
207                                      std::tuple<unsigned int, unsigned int> b_filters,
208                                      std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
209                                      unsigned int d_filt,
210                                      bool         is_name_different = false)
211     {
212         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
213         std::cout << total_path << std::endl;
214
215         // This is due to a naming issue in the tf model
216         std::string conv_id0 = "_0a_";
217         std::string conv_id1 = "2d_0b_";
218         if(is_name_different)
219         {
220             conv_id0 = "_0b_";
221             conv_id1 = "_1_0c_";
222         }
223
224         SubGraph i_a;
225         i_a << ConvolutionLayer(
226                 1U, 1U, a_filt,
227                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
228                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
229                 PadStrideInfo(1, 1, 0, 0))
230             << BatchNormalizationLayer(
231                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
232                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
233                 get_random_accessor(1.f, 1.f),
234                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
235                 0.001f)
236             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
237
238         SubGraph i_b;
239         i_b << ConvolutionLayer(
240                 1U, 1U, std::get<0>(b_filters),
241                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"),
242                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
243                 PadStrideInfo(1, 1, 0, 0))
244             << BatchNormalizationLayer(
245                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
246                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
247                 get_random_accessor(1.f, 1.f),
248                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
249                 0.001f)
250             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
251             << ConvolutionLayer(
252                 5U, 5U, std::get<1>(b_filters),
253                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"),
254                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
255                 PadStrideInfo(1, 1, 2, 2))
256             << BatchNormalizationLayer(
257                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
258                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
259                 get_random_accessor(1.f, 1.f),
260                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
261                 0.001f)
262             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
263
264         SubGraph i_c;
265         i_c << ConvolutionLayer(
266                 1U, 1U, std::get<0>(c_filters),
267                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
268                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
269                 PadStrideInfo(1, 1, 0, 0))
270             << BatchNormalizationLayer(
271                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
272                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
273                 get_random_accessor(1.f, 1.f),
274                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
275                 0.001f)
276             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
277             << ConvolutionLayer(
278                 3U, 3U, std::get<1>(c_filters),
279                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
280                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
281                 PadStrideInfo(1, 1, 1, 1))
282             << BatchNormalizationLayer(
283                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
284                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
285                 get_random_accessor(1.f, 1.f),
286                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
287                 0.001f)
288             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
289             << ConvolutionLayer(
290                 3U, 3U, std::get<2>(c_filters),
291                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"),
292                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
293                 PadStrideInfo(1, 1, 1, 1))
294             << BatchNormalizationLayer(
295                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
296                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
297                 get_random_accessor(1.f, 1.f),
298                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
299                 0.001f)
300             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
301
302         SubGraph i_d;
303         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
304             << ConvolutionLayer(
305                 1U, 1U, d_filt,
306                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
307                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
308                 PadStrideInfo(1, 1, 0, 0))
309             << BatchNormalizationLayer(
310                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
311                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
312                 get_random_accessor(1.f, 1.f),
313                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
314                 0.001f)
315             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
316
317         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
318     }
319
320     BranchLayer get_inception_node_B(const std::string &data_path, std::string &&param_path,
321                                      unsigned int a_filt,
322                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
323     {
324         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
325         SubGraph    i_a;
326         i_a << ConvolutionLayer(
327                 3U, 3U, a_filt,
328                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"),
329                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
330                 PadStrideInfo(2, 2, 0, 0))
331             << BatchNormalizationLayer(
332                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
333                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
334                 get_random_accessor(1.f, 1.f),
335                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
336                 0.001f)
337             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
338
339         SubGraph i_b;
340         i_b << ConvolutionLayer(
341                 1U, 1U, std::get<0>(b_filters),
342                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
343                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
344                 PadStrideInfo(1, 1, 0, 0))
345             << BatchNormalizationLayer(
346                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
347                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
348                 get_random_accessor(1.f, 1.f),
349                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
350                 0.001f)
351             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
352             << ConvolutionLayer(
353                 3U, 3U, std::get<1>(b_filters),
354                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"),
355                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
356                 PadStrideInfo(1, 1, 1, 1))
357             << BatchNormalizationLayer(
358                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
359                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
360                 get_random_accessor(1.f, 1.f),
361                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
362                 0.001f)
363             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
364             << ConvolutionLayer(
365                 3U, 3U, std::get<2>(b_filters),
366                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"),
367                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
368                 PadStrideInfo(2, 2, 0, 0))
369             << BatchNormalizationLayer(
370                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
371                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
372                 get_random_accessor(1.f, 1.f),
373                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
374                 0.001f)
375             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
376
377         SubGraph i_c;
378         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
379             // TODO (geopin01) : Remove once we understand why a single node graph does not run in CL
380             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
381
382         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
383     }
384
385     BranchLayer get_inception_node_C(const std::string &data_path, std::string &&param_path,
386                                      unsigned int a_filt,
387                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
388                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
389                                      unsigned int d_filt)
390     {
391         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
392         SubGraph    i_a;
393         i_a << ConvolutionLayer(
394                 1U, 1U, a_filt,
395                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
396                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
397                 PadStrideInfo(1, 1, 0, 0))
398             << BatchNormalizationLayer(
399                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
400                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
401                 get_random_accessor(1.f, 1.f),
402                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
403                 0.001f)
404             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
405
406         SubGraph i_b;
407         i_b << ConvolutionLayer(
408                 1U, 1U, std::get<0>(b_filters),
409                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
410                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
411                 PadStrideInfo(1, 1, 0, 0))
412             << BatchNormalizationLayer(
413                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
414                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
415                 get_random_accessor(1.f, 1.f),
416                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
417                 0.001f)
418             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
419             << ConvolutionLayer(
420                 7U, 1U, std::get<1>(b_filters),
421                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
422                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
423                 PadStrideInfo(1, 1, 3, 0))
424             << BatchNormalizationLayer(
425                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
426                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
427                 get_random_accessor(1.f, 1.f),
428                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
429                 0.001f)
430             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
431             << ConvolutionLayer(
432                 1U, 7U, std::get<2>(b_filters),
433                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
434                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
435                 PadStrideInfo(1, 1, 0, 3))
436             << BatchNormalizationLayer(
437                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
438                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
439                 get_random_accessor(1.f, 1.f),
440                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
441                 0.001f)
442             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
443
444         SubGraph i_c;
445         i_c << ConvolutionLayer(
446                 1U, 1U, std::get<0>(c_filters),
447                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
448                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
449                 PadStrideInfo(1, 1, 0, 0))
450             << BatchNormalizationLayer(
451                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
452                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
453                 get_random_accessor(1.f, 1.f),
454                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
455                 0.001f)
456             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
457             << ConvolutionLayer(
458                 1U, 7U, std::get<1>(c_filters),
459                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"),
460                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
461                 PadStrideInfo(1, 1, 0, 3))
462             << BatchNormalizationLayer(
463                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
464                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
465                 get_random_accessor(1.f, 1.f),
466                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
467                 0.001f)
468             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
469             << ConvolutionLayer(
470                 7U, 1U, std::get<2>(c_filters),
471                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"),
472                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
473                 PadStrideInfo(1, 1, 3, 0))
474             << BatchNormalizationLayer(
475                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
476                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
477                 get_random_accessor(1.f, 1.f),
478                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
479                 0.001f)
480             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
481             << ConvolutionLayer(
482                 1U, 7U, std::get<3>(c_filters),
483                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"),
484                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
485                 PadStrideInfo(1, 1, 0, 3))
486             << BatchNormalizationLayer(
487                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
488                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
489                 get_random_accessor(1.f, 1.f),
490                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
491                 0.001f)
492             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
493             << ConvolutionLayer(
494                 7U, 1U, std::get<4>(c_filters),
495                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"),
496                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
497                 PadStrideInfo(1, 1, 3, 0))
498             << BatchNormalizationLayer(
499                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
500                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
501                 get_random_accessor(1.f, 1.f),
502                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
503                 0.001f)
504             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
505
506         SubGraph i_d;
507         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
508             << ConvolutionLayer(
509                 1U, 1U, d_filt,
510                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
511                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
512                 PadStrideInfo(1, 1, 0, 0))
513             << BatchNormalizationLayer(
514                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
515                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
516                 get_random_accessor(1.f, 1.f),
517                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
518                 0.001f)
519             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
520
521         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
522     }
523
524     BranchLayer get_inception_node_D(const std::string &data_path, std::string &&param_path,
525                                      std::tuple<unsigned int, unsigned int>      a_filters,
526                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
527     {
528         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
529         SubGraph    i_a;
530         i_a << ConvolutionLayer(
531                 1U, 1U, std::get<0>(a_filters),
532                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
533                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
534                 PadStrideInfo(1, 1, 0, 0))
535             << BatchNormalizationLayer(
536                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
537                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
538                 get_random_accessor(1.f, 1.f),
539                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
540                 0.001f)
541             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
542             << ConvolutionLayer(
543                 3U, 3U, std::get<1>(a_filters),
544                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"),
545                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
546                 PadStrideInfo(2, 2, 0, 0))
547             << BatchNormalizationLayer(
548                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
549                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
550                 get_random_accessor(1.f, 1.f),
551                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
552                 0.001f)
553             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
554
555         SubGraph i_b;
556         i_b << ConvolutionLayer(
557                 1U, 1U, std::get<0>(b_filters),
558                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
559                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
560                 PadStrideInfo(1, 1, 0, 0))
561             << BatchNormalizationLayer(
562                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
563                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
564                 get_random_accessor(1.f, 1.f),
565                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
566                 0.001f)
567             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
568             << ConvolutionLayer(
569                 7U, 1U, std::get<1>(b_filters),
570                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
571                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
572                 PadStrideInfo(1, 1, 3, 0))
573             << BatchNormalizationLayer(
574                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
575                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
576                 get_random_accessor(1.f, 1.f),
577                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
578                 0.001f)
579             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
580             << ConvolutionLayer(
581                 1U, 7U, std::get<2>(b_filters),
582                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
583                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
584                 PadStrideInfo(1, 1, 0, 3))
585             << BatchNormalizationLayer(
586                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
587                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
588                 get_random_accessor(1.f, 1.f),
589                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
590                 0.001f)
591             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
592             << ConvolutionLayer(
593                 3U, 3U, std::get<3>(b_filters),
594                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"),
595                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
596                 PadStrideInfo(2, 2, 0, 0))
597             << BatchNormalizationLayer(
598                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
599                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
600                 get_random_accessor(1.f, 1.f),
601                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
602                 0.001f)
603             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
604
605         SubGraph i_c;
606         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
607             // TODO (geopin01) : Remove once we understand why a single node graph does not run in CL
608             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
609
610         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
611     }
612
613     BranchLayer get_inception_node_E(const std::string &data_path, std::string &&param_path,
614                                      unsigned int a_filt,
615                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
616                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
617                                      unsigned int d_filt,
618                                      bool         is_name_different = false)
619     {
620         // This is due to a naming issue in the tf model
621         std::string conv_id = "_0b_";
622         if(is_name_different)
623         {
624             conv_id = "_0c_";
625         }
626
627         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
628         SubGraph    i_a;
629         i_a << ConvolutionLayer(
630                 1U, 1U, a_filt,
631                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
632                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
633                 PadStrideInfo(1, 1, 0, 0))
634             << BatchNormalizationLayer(
635                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
636                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
637                 get_random_accessor(1.f, 1.f),
638                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
639                 0.001f)
640             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
641
642         SubGraph i_b1;
643         i_b1 << ConvolutionLayer(
644                  3U, 1U, std::get<1>(b_filters),
645                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"),
646                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647                  PadStrideInfo(1, 1, 1, 0))
648              << BatchNormalizationLayer(
649                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
650                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
651                  get_random_accessor(1.f, 1.f),
652                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
653                  0.001f)
654              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
655
656         SubGraph i_b2;
657         i_b2 << ConvolutionLayer(
658                  1U, 3U, std::get<2>(b_filters),
659                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"),
660                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
661                  PadStrideInfo(1, 1, 0, 1))
662              << BatchNormalizationLayer(
663                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
664                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
665                  get_random_accessor(1.f, 1.f),
666                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
667                  0.001f)
668              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
669
670         SubGraph i_b;
671         i_b << ConvolutionLayer(
672                 1U, 1U, std::get<0>(b_filters),
673                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
674                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
675                 PadStrideInfo(1, 1, 0, 0))
676             << BatchNormalizationLayer(
677                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
678                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
679                 get_random_accessor(1.f, 1.f),
680                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
681                 0.001f)
682             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
683             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2));
684
685         SubGraph i_c1;
686         i_c1 << ConvolutionLayer(
687                  3U, 1U, std::get<2>(c_filters),
688                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"),
689                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
690                  PadStrideInfo(1, 1, 1, 0))
691              << BatchNormalizationLayer(
692                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
693                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
694                  get_random_accessor(1.f, 1.f),
695                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
696                  0.001f)
697              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
698
699         SubGraph i_c2;
700         i_c2 << ConvolutionLayer(
701                  1U, 3U, std::get<3>(c_filters),
702                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"),
703                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
704                  PadStrideInfo(1, 1, 0, 1))
705              << BatchNormalizationLayer(
706                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
707                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
708                  get_random_accessor(1.f, 1.f),
709                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
710                  0.001f)
711              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
712
713         SubGraph i_c;
714         i_c << ConvolutionLayer(
715                 1U, 1U, std::get<0>(c_filters),
716                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
717                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
718                 PadStrideInfo(1, 1, 0, 0))
719             << BatchNormalizationLayer(
720                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
721                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
722                 get_random_accessor(1.f, 1.f),
723                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
724                 0.001f)
725             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
726             << ConvolutionLayer(
727                 3U, 3U, std::get<1>(c_filters),
728                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
729                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
730                 PadStrideInfo(1, 1, 1, 1))
731             << BatchNormalizationLayer(
732                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
733                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
734                 get_random_accessor(1.f, 1.f),
735                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
736                 0.001f)
737             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
738             << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2));
739
740         SubGraph i_d;
741         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
742             << ConvolutionLayer(
743                 1U, 1U, d_filt,
744                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
745                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
746                 PadStrideInfo(1, 1, 0, 0))
747             << BatchNormalizationLayer(
748                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
749                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
750                 get_random_accessor(1.f, 1.f),
751                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
752                 0.001f)
753             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
754
755         return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
756     }
757 };
758
759 /** Main program for Inception V3
760  *
761  * @param[in] argc Number of arguments
762  * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL), [optional] Path to the weights folder, [optional] image, [optional] labels )
763  */
764 int main(int argc, char **argv)
765 {
766     return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
767 }