From bb729d0ee99c2d21ea7e5a71064f700651559c12 Mon Sep 17 00:00:00 2001 From: Anastasia Kuporosova Date: Thu, 27 Aug 2020 14:14:59 +0300 Subject: [PATCH] [Samples] Use get_ops for iteration over graph in samples (#1961) --- .../sample/classification_sample/classification_sample.py | 5 ++++- .../classification_sample_async.py | 5 ++++- .../object_detection_sample_ssd.py | 12 ++++++++---- .../sample/style_transfer_sample/style_transfer_sample.py | 5 ++++- 4 files changed, 20 insertions(+), 7 deletions(-) diff --git a/inference-engine/ie_bridges/python/sample/classification_sample/classification_sample.py b/inference-engine/ie_bridges/python/sample/classification_sample/classification_sample.py index 4fcc787..80fb4bd 100644 --- a/inference-engine/ie_bridges/python/sample/classification_sample/classification_sample.py +++ b/inference-engine/ie_bridges/python/sample/classification_sample/classification_sample.py @@ -23,6 +23,7 @@ import numpy as np import logging as log from openvino.inference_engine import IECore +import ngraph as ng def build_argparser(): parser = ArgumentParser(add_help=False) @@ -62,10 +63,12 @@ def main(): # Read IR log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin)) net = ie.read_network(model=model_xml, weights=model_bin) + func = ng.function_from_cnn(net) if "CPU" in args.device: supported_layers = ie.query_network(net, "CPU") - not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers] + ops = func.get_ordered_ops() + not_supported_layers = [op.friendly_name for op in ops if op.friendly_name not in supported_layers] if len(not_supported_layers) != 0: log.error("Following layers are not supported by the plugin for specified device {}:\n {}". format(args.device, ', '.join(not_supported_layers))) diff --git a/inference-engine/ie_bridges/python/sample/classification_sample_async/classification_sample_async.py b/inference-engine/ie_bridges/python/sample/classification_sample_async/classification_sample_async.py index 12acabc..9fbf95a 100644 --- a/inference-engine/ie_bridges/python/sample/classification_sample_async/classification_sample_async.py +++ b/inference-engine/ie_bridges/python/sample/classification_sample_async/classification_sample_async.py @@ -24,6 +24,7 @@ import logging as log from openvino.inference_engine import IECore import threading +import ngraph as ng class InferReqWrap: def __init__(self, request, id, num_iter): @@ -107,10 +108,12 @@ def main(): # Read IR log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin)) net = ie.read_network(model=model_xml, weights=model_bin) + func = ng.function_from_cnn(net) if "CPU" in args.device: supported_layers = ie.query_network(net, "CPU") - not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers] + ops = func.get_ordered_ops() + not_supported_layers = [op.friendly_name for op in ops if op.friendly_name not in supported_layers] if len(not_supported_layers) != 0: log.error("Following layers are not supported by the plugin for specified device {}:\n {}". format(args.device, ', '.join(not_supported_layers))) diff --git a/inference-engine/ie_bridges/python/sample/object_detection_sample_ssd/object_detection_sample_ssd.py b/inference-engine/ie_bridges/python/sample/object_detection_sample_ssd/object_detection_sample_ssd.py index 42df4bf..862abe4 100644 --- a/inference-engine/ie_bridges/python/sample/object_detection_sample_ssd/object_detection_sample_ssd.py +++ b/inference-engine/ie_bridges/python/sample/object_detection_sample_ssd/object_detection_sample_ssd.py @@ -23,6 +23,7 @@ import numpy as np import logging as log from openvino.inference_engine import IECore +import ngraph as ng def build_argparser(): parser = ArgumentParser(add_help=False) @@ -57,6 +58,8 @@ def main(): model_bin = os.path.splitext(model_xml)[0] + ".bin" log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin)) net = ie.read_network(model=model_xml, weights=model_bin) + func = ng.function_from_cnn(net) + ops = func.get_ordered_ops() # ----------------------------------------------------------------------------------------------------- # ------------- 2. Load Plugin for inference engine and extensions library if specified -------------- @@ -73,7 +76,7 @@ def main(): if "CPU" in args.device: supported_layers = ie.query_network(net, "CPU") - not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers] + not_supported_layers = [op.friendly_name for op in ops if op.friendly_name not in supported_layers] if len(not_supported_layers) != 0: log.error("Following layers are not supported by the plugin for specified device {}:\n {}". format(args.device, ', '.join(not_supported_layers))) @@ -143,9 +146,10 @@ def main(): log.info('Preparing output blobs') output_name, output_info = "", net.outputs[next(iter(net.outputs.keys()))] - for output_key in net.outputs: - if net.layers[output_key].type == "DetectionOutput": - output_name, output_info = output_key, net.outputs[output_key] + output_ops = {op.friendly_name : op for op in ops \ + if op.friendly_name in net.outputs and op.get_type_name() == "DetectionOutput"} + if len(output_ops) != 0: + output_name, output_info = output_ops.popitem() if output_name == "": log.error("Can't find a DetectionOutput layer in the topology") diff --git a/inference-engine/ie_bridges/python/sample/style_transfer_sample/style_transfer_sample.py b/inference-engine/ie_bridges/python/sample/style_transfer_sample/style_transfer_sample.py index 7d157db..f6c8bd0 100644 --- a/inference-engine/ie_bridges/python/sample/style_transfer_sample/style_transfer_sample.py +++ b/inference-engine/ie_bridges/python/sample/style_transfer_sample/style_transfer_sample.py @@ -23,6 +23,7 @@ import numpy as np import logging as log from openvino.inference_engine import IECore +import ngraph as ng def build_argparser(): parser = ArgumentParser(add_help=False) @@ -66,10 +67,12 @@ def main(): # Read IR log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin)) net = ie.read_network(model=model_xml, weights=model_bin) + func = ng.function_from_cnn(net) if "CPU" in args.device: supported_layers = ie.query_network(net, "CPU") - not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers] + ops = func.get_ordered_ops() + not_supported_layers = [op.friendly_name for op in ops if op.friendly_name not in supported_layers] if len(not_supported_layers) != 0: log.error("Following layers are not supported by the plugin for specified device {}:\n {}". format(args.device, ', '.join(not_supported_layers))) -- 2.7.4