1 /*******************************************************************************
2 * Copyright 2019 Intel Corporation
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *******************************************************************************/
20 #include "c_types_map.hpp"
21 #include "type_helpers.hpp"
24 using namespace mkldnn::impl;
25 using namespace mkldnn::impl::utils;
26 using namespace mkldnn::impl::status;
27 using namespace mkldnn::impl::prop_kind;
28 using namespace mkldnn::impl::alg_kind;
29 using namespace mkldnn::impl::types;
33 status_t bin_conv_desc_init(binary_convolution_desc_t *bin_conv_desc,
34 prop_kind_t prop_kind, alg_kind_t alg_kind,
35 const memory_desc_t *src_desc, const memory_desc_t *weights_desc,
36 const memory_desc_t *dst_desc,
37 const dims_t strides, const dims_t dilates,
38 const dims_t padding_l, const dims_t padding_r,
41 && !any_null(bin_conv_desc, src_desc, weights_desc, dst_desc, strides,
43 && one_of(alg_kind, binary_convolution_direct)
44 && one_of(pad_value, -1.f, 0.f, 1.f);
45 if (!args_ok) return invalid_arguments;
47 if (padding_r == nullptr) padding_r = padding_l;
49 auto bcd = binary_convolution_desc_t();
50 bcd.primitive_kind = primitive_kind::binary_convolution;
51 bcd.prop_kind = prop_kind;
52 bcd.alg_kind = alg_kind;
54 bcd.src_desc = zero_md();
55 bcd.dst_desc = zero_md();
56 bcd.weights_desc = zero_md();
58 const bool with_groups = weights_desc->ndims == src_desc->ndims + 1;
60 bcd.src_desc = *src_desc;
61 bcd.dst_desc = *dst_desc;
62 bcd.weights_desc = *weights_desc;
64 int sp_dims = src_desc->ndims - 2;
65 utils::array_copy(bcd.strides, strides, sp_dims);
66 utils::array_copy(bcd.padding[0], padding_l, sp_dims);
67 utils::array_copy(bcd.padding[1], padding_r, sp_dims);
69 utils::array_copy(bcd.dilates, dilates, sp_dims);
71 utils::array_set(bcd.dilates, 0, sp_dims);
73 bcd.pad_value = pad_value;
74 bcd.accum_data_type = types::default_accum_data_type(src_desc->data_type,
75 weights_desc->data_type, dst_desc->data_type, prop_kind);
77 bool consistency = true
78 && memory_desc_wrapper(weights_desc).nelems()
79 && src_desc->ndims == dst_desc->ndims
80 && utils::one_of(src_desc->ndims, 3, 4, 5)
81 && utils::one_of(weights_desc->ndims, src_desc->ndims, src_desc->ndims + 1)
82 && src_desc->dims[0] == dst_desc->dims[0];
83 for (int i = 2; i < src_desc->ndims; ++i)
85 int src = src_desc->dims[i];
86 int ker = weights_desc->dims[with_groups + i];
87 int dil = bcd.dilates[i - 2];
88 int pad_l = padding_l[i - 2];
89 int pad_r = padding_r[i - 2];
90 int str = strides[i - 2];
91 int dst = dst_desc->dims[i];
92 int ker_range = 1 + (ker - 1) * (dil + 1);
94 if (str < 1) return invalid_arguments;
95 consistency = consistency
98 // && pad_r + str > 0 // TODO: [dmitrygo] Commented as WA to support dw conv fusing
99 && (src - ker_range + pad_l + pad_r) / str + 1 == dst;
101 if (!consistency) return invalid_arguments;
103 *bin_conv_desc = bcd;
109 status_t mkldnn_dilated_binary_convolution_forward_desc_init(
110 binary_convolution_desc_t *bin_conv_desc, prop_kind_t prop_kind,
111 alg_kind_t alg_kind, const memory_desc_t *src_desc,
112 const memory_desc_t *weights_desc, const memory_desc_t *dst_desc, const dims_t strides,
113 const dims_t dilates, const dims_t padding_l,
114 const dims_t padding_r,
115 const float pad_value) {
116 if (!one_of(prop_kind, forward_training, forward_inference))
117 return invalid_arguments;
118 return mkldnn::impl::bin_conv_desc_init(bin_conv_desc, prop_kind, alg_kind, src_desc,
119 weights_desc, dst_desc, strides, dilates, padding_l, padding_r, pad_value);