std::string ConnectionSpec::NoneType = "";
+WeightRegularizerConstant::WeightRegularizerConstant(float value) {
+ set(value);
+}
+
+bool WeightRegularizerConstant::isValid(const float &value) const {
+ return value >= 0.0f;
+}
+
HiddenStateActivation::HiddenStateActivation(ActivationTypeInfo::Enum value) {
set(value);
};
set(value);
};
+WeightInitializer::WeightInitializer(Tensor::Initializer value) { set(value); }
+
+BiasInitializer::BiasInitializer(Tensor::Initializer value) { set(value); }
+
BNPARAMS_MU_INIT::BNPARAMS_MU_INIT(Tensor::Initializer value) { set(value); }
BNPARAMS_VAR_INIT::BNPARAMS_VAR_INIT(Tensor::Initializer value) { set(value); }
set(value);
}
+WeightRegularizer::WeightRegularizer(nntrainer::WeightRegularizer value) {
+ set(value);
+}
+
+bool WeightRegularizer::isValid(
+ const nntrainer::WeightRegularizer &value) const {
+ return value != nntrainer::WeightRegularizer::UNKNOWN;
+}
} // namespace props
static const std::vector<std::pair<char, std::string>>
#include <base_properties.h>
#include <tensor.h>
+#include <tensor_wrap_specs.h>
namespace nntrainer {
bool isValid(const unsigned int &v) const override;
};
+/**
+ * @brief WeightRegularizerConstant property, this defines how much regularize
+ * the weight
+ *
+ */
+class WeightRegularizerConstant : public nntrainer::Property<float> {
+
+public:
+ /**
+ * @brief Construct a new WeightRegularizerConstant object
+ *
+ */
+ WeightRegularizerConstant(float value = 1.0f);
+ static constexpr const char *key =
+ "weight_regularizer_constant"; /**< unique key to access */
+ using prop_tag = float_prop_tag; /**< property type */
+
+ /**
+ * @brief check if given value is valid
+ *
+ * @param value value to check
+ * @return bool true if valid
+ */
+ bool isValid(const float &value) const override;
+};
+
/******** below section is for enumerations ***************/
/**
* @brief Enumeration of activation function type
"he_normal", "he_uniform", "none"};
};
+/**
+ * @brief WeightInitializer Initialization Enumeration Information
+ *
+ */
+class WeightInitializer final : public EnumProperty<InitializerInfo> {
+public:
+ /**
+ * @brief Construct a WeightInitializer object
+ */
+ WeightInitializer(
+ Tensor::Initializer value = Tensor::Initializer::XAVIER_UNIFORM);
+ using prop_tag = enum_class_prop_tag;
+ static constexpr const char *key = "weight_initializer";
+};
+
+/**
+ * @brief BiasInitializer Initialization Enumeration Information
+ *
+ */
+class BiasInitializer final : public EnumProperty<InitializerInfo> {
+public:
+ /**
+ * @brief Construct a BiasInitializer object
+ */
+ BiasInitializer(Tensor::Initializer value = Tensor::Initializer::ZEROS);
+ using prop_tag = enum_class_prop_tag;
+ static constexpr const char *key = "bias_initializer";
+};
+
/**
* @brief BNPARAMS_MU_INIT Initialization Enumeration Information
*
static constexpr const char *key = "beta_initializer";
};
+/**
+ * @brief Enumeration of tensor regularization type
+ */
+struct RegularizerInfo {
+ using Enum = nntrainer::WeightRegularizer;
+ static constexpr std::initializer_list<Enum> EnumList = {
+ Enum::L2NORM, Enum::NONE, Enum::UNKNOWN};
+
+ static constexpr const char *EnumStr[] = {"l2norm", "none", "unknown"};
+};
+
+/**
+ * @brief WeightRegularizer Regularization Enumeration Information
+ *
+ */
+class WeightRegularizer final : public EnumProperty<RegularizerInfo> {
+public:
+ /**
+ * @brief Construct a WeightRegularizer object
+ */
+ WeightRegularizer(
+ nntrainer::WeightRegularizer value = nntrainer::WeightRegularizer::NONE);
+ using prop_tag = enum_class_prop_tag;
+ static constexpr const char *key = "weight_regularizer";
+
+ /**
+ * @brief WeightRegularizer validator
+ *
+ * @param value nntrainer::WeightRegularizer to validate
+ * @retval true if value is not nntrainer::WeightRegularizer::UNKNOWN
+ * @retval false if value is nntrainer::WeightRegularizer::UNKNOWN
+ */
+ bool isValid(const nntrainer::WeightRegularizer &value) const override;
+};
+
/**
* @brief Enumeration of pooling type
*/
const TensorDim &in_dim = context.getInputDimensions()[0];
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+ auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
unsigned int filter_size = std::get<props::FilterSize>(conv_props);
auto &kernel_size =
std::get<std::array<props::KernelSize, CONV2D_DIM>>(conv_props);
"Embedding layer takes only one for channel size");
}
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+
unsigned int in_dim = std::get<props::InDim>(embedding_props);
unsigned int out_dim = std::get<props::OutDim>(embedding_props);
enum FCParams { weight, bias };
void FullyConnectedLayer::finalize(InitLayerContext &context) {
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+ auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
auto unit = std::get<props::Unit>(fc_props).get();
if (context.getNumInputs() != 1) {
// - bias_h ( hidden bias )
// : [1, 1, 1, unit (hidden_size) x NUM_GATE] -> f, g, i, o
void GRULayer::finalize(InitLayerContext &context) {
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+ auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
auto unit = std::get<props::Unit>(gru_props).get();
auto &hidden_state_activation_type =
std::get<props::HiddenStateActivation>(gru_props);
namespace nntrainer {
LayerImpl::LayerImpl() :
- layer_impl_props(std::make_unique<std::tuple<>>()),
- weight_regularizer(WeightRegularizer::NONE),
- weight_regularizer_constant(1.0f),
- weight_initializer(Tensor::Initializer::XAVIER_UNIFORM),
- bias_initializer(Tensor::Initializer::ZEROS) {}
+ layer_impl_props(
+ std::make_unique<
+ std::tuple<props::WeightRegularizer, props::WeightRegularizerConstant,
+ props::WeightInitializer, props::BiasInitializer>>()) {}
void LayerImpl::setProperty(const std::vector<std::string> &values) {
- loadProperties(values, *layer_impl_props);
-
- /// @todo: deprecate this in favor of loadProperties
- for (unsigned int i = 0; i < values.size(); ++i) {
- std::string key;
- std::string value;
- std::stringstream ss;
-
- if (getKeyValue(values[i], key, value) != ML_ERROR_NONE) {
- throw std::invalid_argument("Error parsing the property: " + values[i]);
- }
-
- if (value.empty()) {
- ss << "value is empty: key: " << key << ", value: " << value;
- throw std::invalid_argument(ss.str());
- }
-
- /// @note this calls derived setProperty if available
- setProperty(key, value);
- }
-}
-
-void LayerImpl::setProperty(const std::string &type_str,
- const std::string &value) {
- using PropertyType = nntrainer::Layer::PropertyType;
-
- int status = ML_ERROR_NONE;
- nntrainer::Layer::PropertyType type =
- static_cast<nntrainer::Layer::PropertyType>(parseLayerProperty(type_str));
-
- switch (type) {
- case PropertyType::weight_regularizer:
- if (!value.empty()) {
- weight_regularizer =
- (WeightRegularizer)parseType(value, TOKEN_WEIGHT_REGULARIZER);
- if (weight_regularizer == WeightRegularizer::UNKNOWN) {
- throw std::invalid_argument("[Layer] Unknown Weight decay");
- }
- }
- break;
- case PropertyType::weight_regularizer_constant:
- if (!value.empty()) {
- status = setFloat(weight_regularizer_constant, value);
- throw_status(status);
- }
- break;
- case PropertyType::weight_initializer:
- if (!value.empty()) {
- weight_initializer =
- (Tensor::Initializer)parseType(value, TOKEN_WEIGHT_INIT);
- }
- break;
- case PropertyType::bias_initializer:
- if (!value.empty()) {
- bias_initializer =
- (Tensor::Initializer)parseType(value, TOKEN_WEIGHT_INIT);
- }
- break;
- default:
- std::string msg =
- "[Layer] Unknown Layer Property Key for value, key: " + type_str +
- " value: " + value;
- throw exception::not_supported(msg);
- }
+ auto remain_props = loadProperties(values, *layer_impl_props);
+ NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
+ << "[LayerImpl] Unknown Layer Properties count " +
+ std::to_string(values.size());
}
void LayerImpl::exportTo(Exporter &exporter,
- const ExportMethods &method) const {}
+ const ExportMethods &method) const {
+ exporter.saveResult(*layer_impl_props, method, this);
+}
} // namespace nntrainer
const ExportMethods &method) const override;
protected:
- std::unique_ptr<std::tuple<>> layer_impl_props; /**< layer_impl_props */
-
- WeightRegularizer weight_regularizer; /**< weight regularizer */
- float weight_regularizer_constant; /**< weight regularizer constant */
- Tensor::Initializer weight_initializer; /**< initializer for the weights */
- Tensor::Initializer bias_initializer; /**< initializer for the bias */
-
- /**
- * @brief setProperty by type and value separated
- * @param[in] type property type to be passed
- * @param[in] value value to be passed
- * @exception exception::not_supported when property type is not valid for
- * the particular layer
- * @exception std::invalid_argument invalid argument
- */
- virtual void setProperty(const std::string &type, const std::string &value);
+ std::unique_ptr<
+ std::tuple<props::WeightRegularizer, props::WeightRegularizerConstant,
+ props::WeightInitializer, props::BiasInitializer>>
+ layer_impl_props; /**< layer_impl_props */
};
} // namespace nntrainer
// - bias_h ( hidden bias )
// : [1, 1, 1, unit (hidden_size) x NUM_GATE] -> f, g, i, o
void LSTMLayer::finalize(InitLayerContext &context) {
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+ auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
auto unit = std::get<props::Unit>(lstm_props).get();
auto &hidden_state_activation_type =
std::get<props::HiddenStateActivation>(lstm_props);
epsilon(1e-3) {}
void RNNLayer::finalize(InitLayerContext &context) {
+ auto &weight_regularizer =
+ std::get<props::WeightRegularizer>(*layer_impl_props);
+ auto &weight_regularizer_constant =
+ std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+ auto &weight_initializer =
+ std::get<props::WeightInitializer>(*layer_impl_props);
+ auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
auto unit = std::get<props::Unit>(rnn_props).get();
auto &hidden_state_activation_type =
std::get<props::HiddenStateActivation>(rnn_props);
auto result = e.getResult<nntrainer::ExportMethods::METHOD_STRINGVECTOR>();
auto pair1 = std::pair<std::string, std::string>("unit", "1");
- EXPECT_EQ(result->at(1), pair1);
+ for (unsigned int i = 0; i < (*result).size(); ++i) {
+ if (result->at(i).first == "unit") {
+ EXPECT_EQ(result->at(i), pair1);
+ }
+ }
}
{ /**< load from layer */