Publishing R3
[platform/upstream/dldt.git] / model-optimizer / extensions / ops / resample.py
1 """
2  Copyright (c) 2018 Intel Corporation
3
4  Licensed under the Apache License, Version 2.0 (the "License");
5  you may not use this file except in compliance with the License.
6  You may obtain a copy of the License at
7
8       http://www.apache.org/licenses/LICENSE-2.0
9
10  Unless required by applicable law or agreed to in writing, software
11  distributed under the License is distributed on an "AS IS" BASIS,
12  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  See the License for the specific language governing permissions and
14  limitations under the License.
15 """
16
17 import logging as log
18 import networkx as nx
19
20 from mo.front.common.layout import get_height_dim, get_width_dim
21 from mo.graph.graph import Node
22 from mo.ops.op import Op
23 from extensions.ops.resize_factor_utils import factor_update
24
25
26 class ResampleOp(Op):
27     op = 'Resample'
28
29     def __init__(self, graph: nx.MultiDiGraph, attrs: dict):
30         mandatory_props = {
31             'type': __class__.op,
32             'op': __class__.op,
33             'factor': None,
34             'infer': ResampleOp.resample_infer
35         }
36         super().__init__(graph, mandatory_props, attrs)
37
38     def supported_attrs(self):
39         return [
40             'antialias',
41             'height',
42             'width',
43             'resample_type',
44             'factor',
45         ]
46
47     def backend_attrs(self):
48         return [
49             'antialias',
50             'height',
51             'width',
52             ('type', 'resample_type'),
53             'factor'
54         ]
55
56     @staticmethod
57     def resample_infer(node: Node):
58         height_dim = get_height_dim(node.graph.graph['layout'])
59         width_dim = get_width_dim(node.graph.graph['layout'])
60
61         input_shape = node.in_node(0).shape
62         if input_shape is None:
63             return
64         out_shape = input_shape.copy()
65         if node.has('fw') and node.fw == 'tf':
66             dst_shape = node.in_node(1).value
67             if dst_shape is None or len(input_shape) != 4 or len(dst_shape) != 2:
68                 log.error(
69                     'Node {} with op {} cannot be converted to Resample layer because there is no enough info about '
70                     'src/dst shapes: src_shape = {}, dst_shape = {}'.format(node.name, node.op, input_shape, dst_shape))
71                 node.type = None  # prevent translation to a valid IE layer
72                 return
73             out_shape[height_dim] = dst_shape[0]
74             out_shape[width_dim] = dst_shape[1]
75             node.graph.remove_edge(node.in_node(1).id, node.id)
76         else:
77             if len(node.in_nodes()) == 1:
78                 if node.has('width') and node.has('height'):
79                     out_shape[height_dim] = node.height
80                     out_shape[width_dim] = node.width
81                 else:
82                     out_shape[height_dim] = node.factor * input_shape[height_dim]
83                     out_shape[width_dim] = node.factor * input_shape[width_dim]
84             else:
85                 out_shape[height_dim] = node.in_node(1).shape[height_dim]
86                 out_shape[width_dim] = node.in_node(1).shape[width_dim]
87
88         real_factor = [float(out_shape[height_dim])/input_shape[height_dim], float(out_shape[width_dim])/input_shape[width_dim]]
89         node.factor = factor_update(
90             node.factor,
91             real_factor,
92             [input_shape[height_dim], input_shape[width_dim]],
93             [out_shape[height_dim], out_shape[width_dim]],
94             node.soft_get('name'))
95
96         node.out_node().shape = out_shape