2 * Copyright (c) 2017-2018 ARM Limited.
4 * SPDX-License-Identifier: MIT
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 #include "arm_compute/graph/Graph.h"
25 #include "arm_compute/graph/Nodes.h"
26 #include "arm_compute/graph/SubGraph.h"
27 #include "support/ToolchainSupport.h"
28 #include "utils/GraphUtils.h"
29 #include "utils/Utils.h"
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph;
36 using namespace arm_compute::graph_utils;
38 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API
40 * @param[in] argc Number of arguments
41 * @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
43 class InceptionV3Example : public Example
46 void do_setup(int argc, char **argv) override
48 std::string data_path; /* Path to the trainable data */
49 std::string image; /* Image data */
50 std::string label; /* Label data */
52 constexpr float mean = 0.f; /* Mean value to subtract from the channels */
53 constexpr float std = 255.f; /* Standard deviation value to divide from the channels */
55 // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
56 TargetHint target_hint = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
57 ConvolutionMethodHint convolution_hint = ConvolutionMethodHint::DIRECT;
63 std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
64 std::cout << "No data folder provided: using random values\n\n";
68 std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
69 std::cout << "No data folder provided: using random values\n\n";
74 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
75 std::cout << "No image provided: using random values\n\n";
81 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
82 std::cout << "No text file with labels provided: skipping output accessor\n\n";
91 graph << target_hint << convolution_hint << Tensor(TensorInfo(TensorShape(299U, 299U, 3U, 1U), 1, DataType::F32),
92 get_input_accessor(image,
94 std, std, std, false /* Do not convert to BGR */))
96 << ConvolutionLayer(3U, 3U, 32U,
97 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"),
98 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
99 << BatchNormalizationLayer(get_weights_accessor(data_path,
100 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
101 get_weights_accessor(data_path,
102 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
103 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
104 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
106 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
108 << ConvolutionLayer(3U, 3U, 32U,
109 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"),
110 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
111 << BatchNormalizationLayer(get_weights_accessor(data_path,
112 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
113 get_weights_accessor(data_path,
114 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
115 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
116 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
118 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
120 << ConvolutionLayer(3U, 3U, 64U,
121 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"),
122 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
123 << BatchNormalizationLayer(get_weights_accessor(data_path,
124 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
125 get_weights_accessor(data_path,
126 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
127 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
128 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
130 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
132 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
134 << ConvolutionLayer(1U, 1U, 80U,
135 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"),
136 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
137 << BatchNormalizationLayer(get_weights_accessor(data_path,
138 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
139 get_weights_accessor(data_path,
140 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
141 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
142 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
144 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
146 << ConvolutionLayer(3U, 3U, 192U,
147 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"),
148 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
149 << BatchNormalizationLayer(get_weights_accessor(data_path,
150 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
151 get_weights_accessor(data_path,
152 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
153 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
154 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
156 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
158 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
160 << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
162 << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
164 << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
167 << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U))
169 << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U),
170 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
171 << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U),
172 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
173 << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U),
174 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
175 << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U),
176 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
178 << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U),
179 std::make_tuple(192U, 192U, 192U, 192U))
181 << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U),
182 std::make_tuple(448U, 384U, 384U, 384U), 192U)
183 << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U),
184 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
186 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
187 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
188 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"),
189 get_weights_accessor(data_path,
190 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
191 PadStrideInfo(1, 1, 0, 0))
192 << ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer()
193 << Tensor(get_output_accessor(label, 5));
196 void do_run() override
205 BranchLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path,
207 std::tuple<unsigned int, unsigned int> b_filters,
208 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
210 bool is_name_different = false)
212 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
213 std::cout << total_path << std::endl;
215 // This is due to a naming issue in the tf model
216 std::string conv_id0 = "_0a_";
217 std::string conv_id1 = "2d_0b_";
218 if(is_name_different)
225 i_a << ConvolutionLayer(
227 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
228 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
229 PadStrideInfo(1, 1, 0, 0))
230 << BatchNormalizationLayer(
231 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
232 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
233 get_random_accessor(1.f, 1.f),
234 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
236 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
239 i_b << ConvolutionLayer(
240 1U, 1U, std::get<0>(b_filters),
241 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"),
242 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
243 PadStrideInfo(1, 1, 0, 0))
244 << BatchNormalizationLayer(
245 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
246 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
247 get_random_accessor(1.f, 1.f),
248 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
250 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
252 5U, 5U, std::get<1>(b_filters),
253 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"),
254 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
255 PadStrideInfo(1, 1, 2, 2))
256 << BatchNormalizationLayer(
257 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
258 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
259 get_random_accessor(1.f, 1.f),
260 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
262 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
265 i_c << ConvolutionLayer(
266 1U, 1U, std::get<0>(c_filters),
267 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
268 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
269 PadStrideInfo(1, 1, 0, 0))
270 << BatchNormalizationLayer(
271 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
272 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
273 get_random_accessor(1.f, 1.f),
274 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
276 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
278 3U, 3U, std::get<1>(c_filters),
279 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
280 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
281 PadStrideInfo(1, 1, 1, 1))
282 << BatchNormalizationLayer(
283 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
284 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
285 get_random_accessor(1.f, 1.f),
286 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
288 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
290 3U, 3U, std::get<2>(c_filters),
291 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"),
292 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
293 PadStrideInfo(1, 1, 1, 1))
294 << BatchNormalizationLayer(
295 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
296 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
297 get_random_accessor(1.f, 1.f),
298 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
300 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
303 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
306 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
307 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
308 PadStrideInfo(1, 1, 0, 0))
309 << BatchNormalizationLayer(
310 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
311 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
312 get_random_accessor(1.f, 1.f),
313 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
315 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
317 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
320 BranchLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path,
322 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
324 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
326 i_a << ConvolutionLayer(
328 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"),
329 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
330 PadStrideInfo(2, 2, 0, 0))
331 << BatchNormalizationLayer(
332 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
333 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
334 get_random_accessor(1.f, 1.f),
335 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
337 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
340 i_b << ConvolutionLayer(
341 1U, 1U, std::get<0>(b_filters),
342 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
343 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
344 PadStrideInfo(1, 1, 0, 0))
345 << BatchNormalizationLayer(
346 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
347 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
348 get_random_accessor(1.f, 1.f),
349 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
351 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
353 3U, 3U, std::get<1>(b_filters),
354 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"),
355 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
356 PadStrideInfo(1, 1, 1, 1))
357 << BatchNormalizationLayer(
358 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
359 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
360 get_random_accessor(1.f, 1.f),
361 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
363 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
365 3U, 3U, std::get<2>(b_filters),
366 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"),
367 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
368 PadStrideInfo(2, 2, 0, 0))
369 << BatchNormalizationLayer(
370 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
371 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
372 get_random_accessor(1.f, 1.f),
373 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
375 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
378 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
379 // TODO (geopin01) : Remove once we understand why a single node graph does not run in CL
380 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
382 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
385 BranchLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path,
387 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
388 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
391 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
393 i_a << ConvolutionLayer(
395 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
396 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
397 PadStrideInfo(1, 1, 0, 0))
398 << BatchNormalizationLayer(
399 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
400 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
401 get_random_accessor(1.f, 1.f),
402 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
404 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
407 i_b << ConvolutionLayer(
408 1U, 1U, std::get<0>(b_filters),
409 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
410 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
411 PadStrideInfo(1, 1, 0, 0))
412 << BatchNormalizationLayer(
413 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
414 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
415 get_random_accessor(1.f, 1.f),
416 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
418 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
420 7U, 1U, std::get<1>(b_filters),
421 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
422 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
423 PadStrideInfo(1, 1, 3, 0))
424 << BatchNormalizationLayer(
425 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
426 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
427 get_random_accessor(1.f, 1.f),
428 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
430 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
432 1U, 7U, std::get<2>(b_filters),
433 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
434 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
435 PadStrideInfo(1, 1, 0, 3))
436 << BatchNormalizationLayer(
437 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
438 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
439 get_random_accessor(1.f, 1.f),
440 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
442 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
445 i_c << ConvolutionLayer(
446 1U, 1U, std::get<0>(c_filters),
447 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
448 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
449 PadStrideInfo(1, 1, 0, 0))
450 << BatchNormalizationLayer(
451 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
452 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
453 get_random_accessor(1.f, 1.f),
454 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
456 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
458 1U, 7U, std::get<1>(c_filters),
459 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"),
460 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
461 PadStrideInfo(1, 1, 0, 3))
462 << BatchNormalizationLayer(
463 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
464 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
465 get_random_accessor(1.f, 1.f),
466 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
468 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
470 7U, 1U, std::get<2>(c_filters),
471 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"),
472 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
473 PadStrideInfo(1, 1, 3, 0))
474 << BatchNormalizationLayer(
475 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
476 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
477 get_random_accessor(1.f, 1.f),
478 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
480 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
482 1U, 7U, std::get<3>(c_filters),
483 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"),
484 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
485 PadStrideInfo(1, 1, 0, 3))
486 << BatchNormalizationLayer(
487 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
488 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
489 get_random_accessor(1.f, 1.f),
490 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
492 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
494 7U, 1U, std::get<4>(c_filters),
495 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"),
496 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
497 PadStrideInfo(1, 1, 3, 0))
498 << BatchNormalizationLayer(
499 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
500 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
501 get_random_accessor(1.f, 1.f),
502 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
504 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
507 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
510 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
511 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
512 PadStrideInfo(1, 1, 0, 0))
513 << BatchNormalizationLayer(
514 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
515 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
516 get_random_accessor(1.f, 1.f),
517 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
519 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
521 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
524 BranchLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path,
525 std::tuple<unsigned int, unsigned int> a_filters,
526 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
528 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
530 i_a << ConvolutionLayer(
531 1U, 1U, std::get<0>(a_filters),
532 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
533 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
534 PadStrideInfo(1, 1, 0, 0))
535 << BatchNormalizationLayer(
536 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
537 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
538 get_random_accessor(1.f, 1.f),
539 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
541 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
543 3U, 3U, std::get<1>(a_filters),
544 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"),
545 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
546 PadStrideInfo(2, 2, 0, 0))
547 << BatchNormalizationLayer(
548 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
549 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
550 get_random_accessor(1.f, 1.f),
551 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
553 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
556 i_b << ConvolutionLayer(
557 1U, 1U, std::get<0>(b_filters),
558 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
559 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
560 PadStrideInfo(1, 1, 0, 0))
561 << BatchNormalizationLayer(
562 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
563 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
564 get_random_accessor(1.f, 1.f),
565 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
567 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
569 7U, 1U, std::get<1>(b_filters),
570 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
571 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
572 PadStrideInfo(1, 1, 3, 0))
573 << BatchNormalizationLayer(
574 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
575 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
576 get_random_accessor(1.f, 1.f),
577 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
579 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
581 1U, 7U, std::get<2>(b_filters),
582 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
583 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
584 PadStrideInfo(1, 1, 0, 3))
585 << BatchNormalizationLayer(
586 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
587 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
588 get_random_accessor(1.f, 1.f),
589 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
593 3U, 3U, std::get<3>(b_filters),
594 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"),
595 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
596 PadStrideInfo(2, 2, 0, 0))
597 << BatchNormalizationLayer(
598 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
599 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
600 get_random_accessor(1.f, 1.f),
601 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
603 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
606 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
607 // TODO (geopin01) : Remove once we understand why a single node graph does not run in CL
608 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 1.f, 0.f));
610 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
613 BranchLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path,
615 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
616 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
618 bool is_name_different = false)
620 // This is due to a naming issue in the tf model
621 std::string conv_id = "_0b_";
622 if(is_name_different)
627 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
629 i_a << ConvolutionLayer(
631 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
632 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
633 PadStrideInfo(1, 1, 0, 0))
634 << BatchNormalizationLayer(
635 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
636 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
637 get_random_accessor(1.f, 1.f),
638 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
640 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
643 i_b1 << ConvolutionLayer(
644 3U, 1U, std::get<1>(b_filters),
645 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"),
646 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647 PadStrideInfo(1, 1, 1, 0))
648 << BatchNormalizationLayer(
649 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
650 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
651 get_random_accessor(1.f, 1.f),
652 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
654 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
657 i_b2 << ConvolutionLayer(
658 1U, 3U, std::get<2>(b_filters),
659 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"),
660 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
661 PadStrideInfo(1, 1, 0, 1))
662 << BatchNormalizationLayer(
663 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
664 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
665 get_random_accessor(1.f, 1.f),
666 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
668 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
671 i_b << ConvolutionLayer(
672 1U, 1U, std::get<0>(b_filters),
673 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
674 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
675 PadStrideInfo(1, 1, 0, 0))
676 << BatchNormalizationLayer(
677 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
678 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
679 get_random_accessor(1.f, 1.f),
680 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
682 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
683 << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2));
686 i_c1 << ConvolutionLayer(
687 3U, 1U, std::get<2>(c_filters),
688 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"),
689 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
690 PadStrideInfo(1, 1, 1, 0))
691 << BatchNormalizationLayer(
692 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
693 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
694 get_random_accessor(1.f, 1.f),
695 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
697 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
700 i_c2 << ConvolutionLayer(
701 1U, 3U, std::get<3>(c_filters),
702 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"),
703 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
704 PadStrideInfo(1, 1, 0, 1))
705 << BatchNormalizationLayer(
706 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
707 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
708 get_random_accessor(1.f, 1.f),
709 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
711 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
714 i_c << ConvolutionLayer(
715 1U, 1U, std::get<0>(c_filters),
716 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
717 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
718 PadStrideInfo(1, 1, 0, 0))
719 << BatchNormalizationLayer(
720 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
721 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
722 get_random_accessor(1.f, 1.f),
723 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
725 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
727 3U, 3U, std::get<1>(c_filters),
728 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
729 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
730 PadStrideInfo(1, 1, 1, 1))
731 << BatchNormalizationLayer(
732 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
733 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
734 get_random_accessor(1.f, 1.f),
735 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
737 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
738 << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2));
741 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
744 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
745 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
746 PadStrideInfo(1, 1, 0, 0))
747 << BatchNormalizationLayer(
748 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
749 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
750 get_random_accessor(1.f, 1.f),
751 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
753 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
755 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
759 /** Main program for Inception V3
761 * @param[in] argc Number of arguments
762 * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL), [optional] Path to the weights folder, [optional] image, [optional] labels )
764 int main(int argc, char **argv)
766 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);