2 Copyright (c) 2018-2019 Intel Corporation
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
8 http://www.apache.org/licenses/LICENSE-2.0
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
21 from mo.graph.graph import Graph
22 from mo.middle.pattern_match import apply_pattern
23 from mo.ops.relu import ReLU
26 def _convert_to_leaky_relu_action(graph: Graph, matches: dict):
28 This function checks given patten and if pattern satisfies all requirements, converts to ReLU with negative slope
30 power_op = matches['power_op']
31 power_data = matches['power_data']
32 input_data = matches['data']
33 eltwise_op = matches['eltwise_op']
34 eltwise_data = eltwise_op.out_node()
36 # Check that all nodes satisfies conversion requirements
37 if len(eltwise_op.in_nodes()) > 2:
38 log.debug('Eltwise layer ({}) can not participate in conversion to leaky ReLU due to it has more than two '
39 'inputs ({})'.format(eltwise_op.id, len(eltwise_op.in_nodes())))
42 if eltwise_op.soft_get('operation') != 'max':
43 log.debug('Eltwise layer ({}) can not participate in conversion to leaky ReLU due to it has not satisfied '
44 'operation type ({}) should be max'.format(eltwise_op.id, eltwise_op.soft_get('operation')))
47 if not (power_op.has_valid('scale') and power_op.has_valid('power') and power_op.has_valid('shift')):
48 log.debug('Power layer ({}) can not participate in conversion to leaky ReLU due to missing attribute (scale, '
49 'power or shift)'.format(power_op.id))
52 if power_op.scale > 1 or power_op.power != 1 or power_op.shift != 0:
53 log.debug('Power layer ({}) can not participate in conversion to leaky ReLU due to wrong parameters(Scale = {} '
54 '(should be < 1), Power {} (should be = 1), Shift {} (should be = 0))'
55 ''.format(power_op.id, power_op.scale, power_op.power, power_op.shift))
58 if len(power_data.out_nodes()) > 1:
59 log.debug('Power layer({}) can not participate in conversion to leaky ReLU due to it has more than one consumer'
60 ''.format(power_op.id))
63 # Disconnect data nodes from ops
64 graph.remove_edge(eltwise_op.id, eltwise_data.id)
65 graph.remove_edge(input_data.id, power_op.id)
66 graph.remove_edge(input_data.id, eltwise_op.id)
68 # Create new ReLU operation
69 relu_op = ReLU(graph, dict(name="LeakyReLU_", negative_slope=np.array(power_op.scale)))
70 relu_op.create_node_with_data(inputs=[input_data], data_nodes=eltwise_data)
72 log.debug('Successful conversion from {} {} to ReLU with negative slope (leaky ReLU)'
73 ''.format(eltwise_op.id, power_op.id))
76 def convert_mul_eltwise_to_leaky_relu(graph: Graph):
78 This function finds next subgraph:
79 -->Data-------->Eltwise(Max)-->Data
81 and replace with ReLU with negative slope
86 ('data', dict(kind='data')),
87 ('power_data', dict(kind='data')),
88 ('eltwise_op', dict(kind='op', type='Eltwise')),
89 ('power_op', dict(kind='op', type='Power')),
93 ('power_op', 'power_data'),
94 ('data', 'eltwise_op'),
95 ('power_data', 'eltwise_op'),
97 action=_convert_to_leaky_relu_action