bool Padding1D::isValid(const std::string &v) const {
- /// case 1, 2: padding has string literal
- if (istrequal(v, "valid") || istrequal(v, "same")) {
+ /// case 1, 2, 3: padding has string literal
+ if (istrequal(v, "valid") || istrequal(v, "same") || istrequal(v, "causal")) {
return true;
}
std::vector<props::Padding_> paddings;
from_string(v, paddings);
- /// case 3, 4: padding has a sequence of unsigned integer
+ /// case 4, 5: padding has a sequence of unsigned integer
if (paddings.size() == 1 || paddings.size() == 2) {
/// check if every padding is non-negative integer
for (const auto &padding : paddings) {
return false;
}
-std::array<unsigned int, 2> Padding1D::compute(const TensorDim &input,
- const TensorDim &kernel,
- const unsigned int &strides) {
+std::array<unsigned int, 2> Padding1D::compute(const TensorDim &input_dim,
+ const unsigned int &kernel,
+ const unsigned int &stride,
+ const unsigned int &dilation) {
auto &padding_repr = get(); /// padding representation
+ auto calculate_padding = [](unsigned input, unsigned kernel, unsigned stride,
+ unsigned dilation) {
+ /// ceil(input / stride)
+ unsigned int eff_kernel = (kernel - 1) * dilation + 1;
+ auto out = (input + stride - 1) / stride;
+ auto req_input = (out - 1) * stride + eff_kernel;
+ return req_input >= input ? req_input - input : 0;
+ };
+
if (istrequal(padding_repr, "valid")) {
return {0, 0};
+ } else if (istrequal(padding_repr, "same")) {
+
+ auto pad_horizontal =
+ calculate_padding(input_dim.width(), kernel, stride, dilation);
+
+ auto pad_left = pad_horizontal / 2;
+
+ return {pad_left, pad_horizontal - pad_left};
+ } else if (istrequal(padding_repr, "causal")) {
+ auto pad_horizontal =
+ calculate_padding(input_dim.width(), kernel, stride, dilation);
+ return {pad_horizontal, 0};
}
- // NYI
- return {0, 0};
+ /// case 4, 5: padding has a sequence of unsigned integer
+ std::vector<props::Padding_> paddings_;
+ from_string(padding_repr, paddings_);
+ std::vector<unsigned int> paddings(paddings_.begin(), paddings_.end());
+
+ switch (paddings.size()) {
+ case 1:
+ return {paddings[0], paddings[0]};
+ case 2:
+ return {paddings[0], paddings[1]};
+ default:
+ throw std::logic_error("[Padding1D] should not reach here");
+ }
}
BasicRegularizerConstant::BasicRegularizerConstant(float value) { set(value); }
* formats are accepted valid
* 1. "same" (case insensitive literal string)
* 2. "valid" (case insensitive literal string)
+ * 2. "causal" (case insensitive literal string)
* 3. "padding1d_all", eg) padding=1
* 4. "padding1d_left, padding1d_right" eg) padding=1,1
*
* @return std::array<unsigned int, 4> list of unsigned padding
*/
std::array<unsigned int, 2> compute(const TensorDim &input,
- const TensorDim &kernel,
- const unsigned int &strides);
+ const unsigned int &kernel,
+ const unsigned int &stride,
+ const unsigned int &dilation);
};
/**
static constexpr size_t SINGLE_INOUT_IDX = 0;
-Conv1DLayer::Conv1DLayer(const std::array<unsigned int, 2> &padding_) :
+Conv1DLayer::Conv1DLayer() :
LayerImpl(),
- padding(padding_),
conv_props(props::FilterSize(), props::KernelSize(), props::Stride(),
- props::Padding2D(), props::Dilation()) {
+ props::Padding1D(), props::Dilation()) {
wt_idx.fill(std::numeric_limits<unsigned>::max());
conv2d_layer = std::make_unique<Conv2DLayer>();
}
throw std::invalid_argument("Conv1D layer requires input with height 1");
}
+ const TensorDim &in_dim = context.getInputDimensions()[0];
+ const unsigned int kernel_size =
+ std::get<props::KernelSize>(conv_props).get();
+ const unsigned int stride = std::get<props::Stride>(conv_props).get();
+ const unsigned int dilation = std::get<props::Dilation>(conv_props).get();
+
+ const std::array<unsigned int, 2> padding =
+ std::get<props::Padding1D>(conv_props)
+ .compute(in_dim, kernel_size, stride, dilation);
+ const std::string padding_str =
+ "0,0," + std::to_string(padding[0]) + "," + std::to_string(padding[1]);
+
/** set the given properties as key value pair */
auto setPropertyKV = [this](const std::string &key,
const std::string &value) {
setPropertyKV(props::FilterSize::key,
std::to_string(std::get<props::FilterSize>(conv_props).get()));
- setPropertyKV(
- props::KernelSize::key,
- "1," + std::to_string(std::get<props::KernelSize>(conv_props).get()));
- setPropertyKV(props::Stride::key,
- "1," +
- std::to_string(std::get<props::Stride>(conv_props).get()));
- setPropertyKV(props::Padding2D::key,
- std::get<props::Padding2D>(conv_props).get());
- setPropertyKV(props::Dilation::key,
- "1," +
- std::to_string(std::get<props::Dilation>(conv_props).get()));
+ setPropertyKV(props::KernelSize::key, "1," + std::to_string(kernel_size));
+ setPropertyKV(props::Stride::key, "1," + std::to_string(stride));
+ setPropertyKV(props::Padding2D::key, padding_str);
+ setPropertyKV(props::Dilation::key, "1," + std::to_string(dilation));
conv2d_layer->finalize(context);
}
/**
* @brief Constructor of Conv 1D Layer
*/
- Conv1DLayer(const std::array<unsigned int, 2> &padding_ = {0, 0});
+ Conv1DLayer();
/**
* @brief Destructor of Conv 1D Layer
inline static const std::string type = "conv1d";
private:
- std::array<unsigned int, 2> padding;
std::tuple<props::FilterSize, props::KernelSize, props::Stride,
- props::Padding2D, props::Dilation>
+ props::Padding1D, props::Dilation>
conv_props;
std::array<unsigned int, 5> wt_idx; /**< indices of the weights and tensors */
record_single(conv, (1, 3, 1, 11), "conv1d_sb_same_dilation")
record_single(conv, (3, 3, 1, 11), "conv1d_mb_same_dilation")
+ conv = K.layers.Conv1D(3, 2, padding="causal")
+ record_single(conv, (1, 1, 1, 4), "conv1d_sb_causal")
+ record_single(conv, (3, 1, 1, 4), "conv1d_mb_causal")
+
+ conv = K.layers.Conv1D(3, 2, padding="causal", dilation_rate=2)
+ record_single(conv, (1, 1, 1, 4), "conv1d_sb_causal_dilation")
+ record_single(conv, (3, 1, 1, 4), "conv1d_mb_causal_dilation")
+
concat = K.layers.Concatenate(axis=3)
record_single(concat, [(2,3,3,2), (2, 3, 3, 3)], "concat_dim3")
auto semantic_conv1d = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::Conv1DLayer>, nntrainer::Conv1DLayer::type,
- {"filters=1", "kernel_size=1", "padding=1"}, 0, false, 1);
+ {"filters=1", "kernel_size=1", "padding1d=1"}, 0, false, 1);
GTEST_PARAMETER_TEST(Convolution1D, LayerSemantics,
::testing::Values(semantic_conv1d));
auto conv1d_sb_same_remain = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::Conv1DLayer>,
- {"filters=2", "kernel_size=3", "padding=same"}, "1:1:1:4",
+ {"filters=2", "kernel_size=3", "padding1d=same"}, "1:1:1:4",
"conv1d_sb_same_remain.nnlayergolden", LayerGoldenTestParamOptions::DEFAULT);
auto conv1d_mb_same_remain = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::Conv1DLayer>,
- {"filters=2", "kernel_size=3", "padding=same"}, "3:1:1:4",
+ {"filters=2", "kernel_size=3", "padding1d=same"}, "3:1:1:4",
"conv1d_mb_same_remain.nnlayergolden", LayerGoldenTestParamOptions::DEFAULT);
auto conv1d_sb_same_uneven_remain_1 = LayerGoldenTestParamType(
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=same",
+ "padding1d=same",
},
"1:3:1:4", "conv1d_sb_same_uneven_remain.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=0,0,0,1",
+ "padding1d=0,1",
},
"1:3:1:4", "conv1d_sb_same_uneven_remain.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=same",
+ "padding1d=same",
},
"3:3:1:4", "conv1d_mb_same_uneven_remain.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=0,0,0,1",
+ "padding1d=0,1",
},
"3:3:1:4", "conv1d_mb_same_uneven_remain.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=valid",
+ "padding1d=valid",
},
"1:3:1:7", "conv1d_sb_valid_drop_last.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"filters=2",
"kernel_size=3",
"stride=2",
- "padding=valid",
+ "padding1d=valid",
},
"3:3:1:7", "conv1d_mb_valid_drop_last.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
"3:2:1:5", "conv1d_mb_no_overlap.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
+auto conv1d_sb_causal = LayerGoldenTestParamType(
+ nntrainer::createLayer<nntrainer::Conv1DLayer>,
+ {"filters=3", "kernel_size=2", "padding1d=causal"}, "1:1:1:4",
+ "conv1d_sb_causal.nnlayergolden", LayerGoldenTestParamOptions::DEFAULT);
+
+auto conv1d_mb_causal = LayerGoldenTestParamType(
+ nntrainer::createLayer<nntrainer::Conv1DLayer>,
+ {"filters=3", "kernel_size=2", "padding1d=causal"}, "3:1:1:4",
+ "conv1d_mb_causal.nnlayergolden", LayerGoldenTestParamOptions::DEFAULT);
+
auto conv1d_sb_1x1_kernel = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::Conv1DLayer>,
{"filters=3", "kernel_size=1", "stride=2"}, "1:2:1:5",
{
"filters=2",
"kernel_size=3",
- "padding=same",
+ "padding1d=same",
"dilation=2",
},
"1:3:1:11", "conv1d_sb_same_dilation.nnlayergolden",
{
"filters=2",
"kernel_size=3",
- "padding=same",
+ "padding1d=same",
"dilation=2",
},
"3:3:1:11", "conv1d_mb_same_dilation.nnlayergolden",
LayerGoldenTestParamOptions::DEFAULT);
+auto conv1d_sb_causal_dilation = LayerGoldenTestParamType(
+ nntrainer::createLayer<nntrainer::Conv1DLayer>,
+ {"filters=3", "kernel_size=2", "padding1d=causal", "dilation=2"}, "1:1:1:4",
+ "conv1d_sb_causal_dilation.nnlayergolden",
+ LayerGoldenTestParamOptions::DEFAULT);
+
+auto conv1d_mb_causal_dilation = LayerGoldenTestParamType(
+ nntrainer::createLayer<nntrainer::Conv1DLayer>,
+ {"filters=3", "kernel_size=2", "padding1d=causal", "dilation=2"}, "3:1:1:4",
+ "conv1d_mb_causal_dilation.nnlayergolden",
+ LayerGoldenTestParamOptions::DEFAULT);
+
GTEST_PARAMETER_TEST(
Convolution1D, LayerGoldenTest,
- ::testing::Values(
- conv1d_sb_minimum, conv1d_mb_minimum, conv1d_sb_same_remain,
- conv1d_mb_same_remain, conv1d_sb_same_uneven_remain_1,
- conv1d_sb_same_uneven_remain_2, conv1d_mb_same_uneven_remain_1,
- conv1d_mb_same_uneven_remain_2, conv1d_sb_valid_drop_last,
- conv1d_mb_valid_drop_last, conv1d_sb_no_overlap, conv1d_mb_no_overlap,
- conv1d_sb_1x1_kernel, conv1d_mb_1x1_kernel, conv1d_sb_dilation,
- conv1d_mb_dilation, conv1d_sb_same_dilation, conv1d_mb_same_dilation));
+ ::testing::Values(conv1d_sb_minimum, conv1d_mb_minimum, conv1d_sb_same_remain,
+ conv1d_mb_same_remain, conv1d_sb_same_uneven_remain_1,
+ conv1d_sb_same_uneven_remain_2,
+ conv1d_mb_same_uneven_remain_1,
+ conv1d_mb_same_uneven_remain_2, conv1d_sb_valid_drop_last,
+ conv1d_mb_valid_drop_last, conv1d_sb_no_overlap,
+ conv1d_mb_no_overlap, conv1d_sb_causal, conv1d_mb_causal,
+ conv1d_sb_1x1_kernel, conv1d_mb_1x1_kernel,
+ conv1d_sb_dilation, conv1d_mb_dilation,
+ conv1d_sb_same_dilation, conv1d_mb_same_dilation,
+ conv1d_sb_causal_dilation, conv1d_mb_causal_dilation));