LAYER_GRU, /** GRU Layer type */
LAYER_TIME_DIST, /** Time Distributed Layer type */
LAYER_PERMUTE, /** Permute layer */
+ LAYER_DROPOUT, /** DropOut Layer type */
LAYER_UNKNOWN = ML_TRAIN_LAYER_TYPE_UNKNOWN /** Unknown */
};
}
/**
+ * @brief Helper function to create DropOut layer
+ */
+inline std::unique_ptr<Layer>
+DropOut(const std::vector<std::string> &properties = {}) {
+ return createLayer(LayerType::LAYER_DROPOUT, properties);
+}
+
+/**
* @brief Helper function to create Time Distributed layer
*/
inline std::unique_ptr<Layer>
$(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/gru.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/time_dist.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/dropout.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/permute_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/split_layer.cpp \
#include <bn_layer.h>
#include <concat_layer.h>
#include <conv2d_layer.h>
+#include <dropout.h>
#include <embedding.h>
#include <fc_layer.h>
#include <flatten_layer.h>
LayerType::LAYER_LSTM);
ac.registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
LayerType::LAYER_GRU);
+ ac.registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
+ LayerType::LAYER_DROPOUT);
ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
LayerType::LAYER_TIME_DIST);
ac.registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
return !v.empty() && std::regex_match(v, allowed);
}
+bool DropOutSpec::isValid(const float &v) const {
+ if (v <= 0.0)
+ return false;
+ else
+ return true;
+}
+
ConnectionSpec::ConnectionSpec(const std::vector<props::Name> &layer_ids_,
const std::string &op_type_) :
op_type(op_type_),
const TensorDim &kernel);
};
+/**
+ * @brief DropOutSpec property, this defines drop out specification of layer
+ *
+ */
+class DropOutSpec : public nntrainer::Property<float> {
+
+public:
+ /**
+ * @brief Construct a new DropOut object wit a default value 0.0
+ *
+ */
+ DropOutSpec(float value = 0.0) : nntrainer::Property<float>(value) {}
+ static constexpr const char *key = "dropout"; /**< unique key to access */
+ using prop_tag = float_prop_tag; /**< property type */
+
+ /**
+ * @brief DropOutSpec validator
+ *
+ * @param v float to validate
+ * @retval true if it is greater or equal than 0.0
+ * @retval false if it is samller than 0.0
+ */
+ bool isValid(const float &v) const override;
+};
+
} // namespace props
} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file dropout_layer.cpp
+ * @date 16 June 2020
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is Dropout Layer Class for Neural Network
+ *
+ */
+
+#include <dropout.h>
+#include <layer_internal.h>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <parse_util.h>
+#include <util_func.h>
+
+namespace nntrainer {
+
+int DropOutLayer::initialize(Manager &manager) {
+ output_dim = input_dim;
+ // TODO : asking Tensor to manager
+ for (auto &t : input_dim) {
+ mask.push_back(std::make_shared<Tensor>(t, true));
+ }
+
+ return ML_ERROR_NONE;
+}
+
+void DropOutLayer::forwarding(bool training) {
+ auto &rate_ = std::get<props::DropOutSpec>(dropout_rate).get();
+ // Assume it is in-place calculation. It means input and output share mem
+ // buffer. So if the training is false, the output is the same with input. In
+ // orther words, there is nothing happen during inference.
+
+ if (training && rate_ > 0.0) {
+ for (unsigned int i = 0; i < input_dim.size(); ++i) {
+ Tensor &input = net_input[i]->getVariableRef();
+ Tensor &mask_ = *mask[i].get();
+
+ mask_ = input.dropout_mask(rate_);
+ input.multiply_i(mask_);
+ }
+ }
+}
+
+void DropOutLayer::calcDerivative() {
+ // Assume it is in-place calculation
+ auto &rate_ = std::get<props::DropOutSpec>(dropout_rate).get();
+ if (rate_ > 0.0) {
+ for (unsigned int i = 0; i < input_dim.size(); ++i) {
+ Tensor deriv = net_hidden[i]->getGradientRef();
+ deriv.multiply_i(*mask[i].get());
+ }
+ }
+}
+
+int DropOutLayer::setProperty(std::vector<std::string> values) {
+ try {
+ values = loadProperties(values, dropout_rate);
+ } catch (std::invalid_argument &e) {
+ ml_loge("parsing property failed, reason: %s", e.what());
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+
+ return LayerV1::setProperty(values);
+}
+} /* namespace nntrainer */
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file dropout.h
+ * @date 05 July 2021
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is DropOut Layer Class for Neural Network
+ *
+ */
+
+#ifndef __DROPOUT_H__
+#define __DROPOUT_H__
+#ifdef __cplusplus
+
+#include <common_properties.h>
+#include <layer_internal.h>
+#include <node_exporter.h>
+#include <tensor.h>
+
+namespace nntrainer {
+
+/**
+ * @class DropOut Layer
+ * @brief DropOut Layer
+ */
+class DropOutLayer : public LayerV1 {
+public:
+ /**
+ * @brief Constructor of DropOut Layer
+ */
+ template <typename... Args>
+ DropOutLayer(float dropout = 0.0, Args... args) :
+ LayerV1(args...),
+ dropout_rate(props::DropOutSpec(dropout)) {
+ setTrainable(false);
+ }
+
+ /**
+ * @brief Destructor of DropOut Layer
+ */
+ ~DropOutLayer() = default;
+
+ /**
+ * @brief Move constructor of DropOutLayer.
+ * @param[in] DropOutLayer &&
+ */
+ DropOutLayer(DropOutLayer &&rhs) noexcept = default;
+
+ /**
+ * @brief Move assignment operator.
+ * @parma[in] rhs DropOutLayer to be moved.
+ */
+ DropOutLayer &operator=(DropOutLayer &&rhs) = default;
+
+ /**
+ * @brief initialize layer
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ int initialize(Manager &manager) override;
+
+ /**
+ * @brief Read Weight & Bias Data from file
+ * @param[in] file input stream file
+ */
+ void read(std::ifstream &file) override{};
+
+ /**
+ * @brief Save Weight & Bias Data to file
+ * @param[in] file output stream file
+ */
+ void save(std::ofstream &file) override{};
+
+ /**
+ * @copydoc Layer::forwarding(bool training)
+ */
+ void forwarding(bool training = true) override;
+
+ /**
+ * @copydoc Layer::calcDerivative()
+ */
+ void calcDerivative() override;
+
+ /**
+ * @copydoc Layer::supportInPlace()
+ */
+ bool supportInPlace() const override { return true; }
+
+ /**
+ * @copydoc Layer::setProperty(std::vector<std::string> values)
+ */
+ int setProperty(std::vector<std::string> values) override;
+
+ /**
+ * @copydoc Layer::export_to(Exporter &exporter, ExportMethods method)
+ */
+ void export_to(
+ Exporter &exporter,
+ ExportMethods method = ExportMethods::METHOD_STRINGVECTOR) const override{};
+
+ /**
+ * @copydoc Layer::getType()
+ */
+ const std::string getType() const override { return DropOutLayer::type; };
+
+ inline static const std::string type = "dropout";
+
+private:
+ std::tuple<props::DropOutSpec> dropout_rate;
+ std::vector<std::shared_ptr<Tensor>> mask;
+};
+
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+#endif /* __DROPOUT_H__ */
'split_layer.cpp',
'permute_layer.cpp',
'layer_impl.cpp',
- 'gru.cpp'
+ 'gru.cpp',
+ 'dropout.cpp'
]
layer_headers = [