*/
std::unique_ptr<Layer> createLayer(const std::string &type,
const std::vector<std::string> &properties) {
- auto &ac = nntrainer::AppContext::Global();
- std::shared_ptr<nntrainer::Layer> nntr_layer =
- ac.createObject<nntrainer::Layer>(type, properties);
std::unique_ptr<nntrainer::LayerNode> layer =
- std::make_unique<nntrainer::LayerNode>(nntr_layer);
+ nntrainer::createLayerNode(type, properties);
return layer;
}
static std::unique_ptr<Layer>
createLoss(nntrainer::LossType type,
const std::vector<std::string> &properties) {
- std::shared_ptr<nntrainer::Layer> nntr_layer = nntrainer::createLoss(type);
std::unique_ptr<nntrainer::LayerNode> layer =
- std::make_unique<nntrainer::LayerNode>(nntr_layer);
-
- if (layer->setProperty(properties) != ML_ERROR_NONE)
- throw std::invalid_argument("Set properties failed for layer");
+ nntrainer::createLayerNode(nntrainer::createLoss(type), properties);
return layer;
}
#include <ini_wrapper.h>
#include <layer.h>
#include <layer_factory.h>
+#include <layer_node.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
#include <node_exporter.h>
<< FUNC_TAG << "section type is invalid for section name: " << sec_name;
auto properties = section2properties(ini, sec_name);
- std::shared_ptr<Layer> nntr_layer =
- ac.createObject<Layer>(layer_type, properties);
-
- auto layer = std::make_unique<LayerNode>(nntr_layer);
+ std::shared_ptr<Layer> nntr_layer = ac.createObject<Layer>(layer_type);
if (nntr_layer->getDistribute()) {
ml_logd("This %s layer is going to distributed", sec_name.c_str());
std::dynamic_pointer_cast<TimeDistLayer>(dist_layer)
->setDistLayer(nntr_layer);
- layer = std::make_unique<LayerNode>(dist_layer);
+ nntr_layer = dist_layer;
}
+ auto layer = createLayerNode(nntr_layer, properties);
+
return layer;
}
auto properties = section2properties(ini, sec_name);
properties.push_back("modelfile=" + backbone_file);
- std::shared_ptr<Layer> nntr_layer = ac.createObject<Layer>(type, properties);
- auto layer = std::make_unique<LayerNode>(nntr_layer);
+ auto layer = createLayerNode(type, properties);
return layer;
}
std::vector<std::shared_ptr<GraphNode>> node_list = graph.getNodes();
for (unsigned int i = 0; i < num_nodes; ++i) {
- Layer &l = *LNODE(node_list[i])->getObject();
+ auto const &lnode = LNODE(node_list[i]);
+ Layer &l = *lnode->getObject();
ml_logd("layer name: %s", l.getName().c_str());
/** If a layer does not has input nodes, then it must have input dimension
}
// Flatten in TimeDistLayer is not supported.
- if (l.getFlatten() && l.getType() != TimeDistLayer::type) {
+ if (lnode->getFlatten() && l.getType() != TimeDistLayer::type) {
status = realizeFlattenType(l);
NN_RETURN_STATUS();
}
this->weight_regularizer = l->weight_regularizer;
this->weight_regularizer_constant = l->weight_regularizer_constant;
this->weight_initializer = l->weight_initializer;
- this->flatten = l->flatten;
this->trainable = l->trainable;
this->distribute = l->distribute;
}
setActivation((ActivationType)parseType(value, TOKEN_ACTI));
}
break;
- case PropertyType::flatten:
- if (!value.empty()) {
- status = setBoolean(flatten, value);
- throw_status(status);
- }
- break;
case PropertyType::weight_regularizer:
if (!value.empty()) {
weight_regularizer =
printIfValid(
out, PropertyType::activation,
static_cast<std::underlying_type<ActivationType>::type>(activation_type));
- printIfValid(out, PropertyType::flatten, flatten);
}
void Layer::printProperties(std::ostream &out) {
weight_regularizer_constant(weight_regularizer_constant_),
weight_initializer(weight_initializer_),
bias_initializer(bias_initializer_),
- flatten(flatten_),
trainable(trainable_),
distribute(distribute_) {
setNumInputs(1);
*/
virtual std::vector<Weight> getWeights() { return weights; }
- /**
- * @brief get if the output of this layer must be flatten
- * @retval flatten value
- */
- virtual bool getFlatten() { return flatten; }
-
/**
* @brief Set name of the layer
*/
*/
WeightInitializer bias_initializer;
- // TODO: remove this from here
- /**
- * @brief Output of this layer should be flattened
- */
- bool flatten;
-
/**
* @brief making this false will skip updating this layer variables
*/
bool trainable;
+ // TODO: remove this from here
/**
* @brief making this true will iterating along with time distribution
*/
*/
void setWeightInit(WeightInitializer wini) { weight_initializer = wini; }
- /**
- * @brief get if the output of this layer must be flatten
- * @retval flatten value
- */
- void setFlatten(bool flatten) { this->flatten = flatten; }
-
/**
* @brief Print layer related information. Do not override without clear
* reason. It is recommended to override printShapeInfo, printPropertiesMeta,
* @brief This is the layer node for network graph
*/
+#include <app_context.h>
#include <layer_factory.h>
#include <layer_node.h>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
namespace nntrainer {
/**
* @brief Layer factory creator with constructor
*/
-std::unique_ptr<LayerNode> createLayerNode(const std::string &type) {
- return std::make_unique<LayerNode>(createLayer(type));
+std::unique_ptr<LayerNode>
+createLayerNode(const std::string &type,
+ const std::vector<std::string> &properties) {
+ auto &ac = nntrainer::AppContext::Global();
+ return createLayerNode(ac.createObject<nntrainer::Layer>(type), properties);
+}
+
+/**
+ * @brief Layer factory creator with constructor
+ */
+std::unique_ptr<LayerNode>
+createLayerNode(std::shared_ptr<nntrainer::Layer> layer,
+ const std::vector<std::string> &properties) {
+ auto lnode = std::make_unique<LayerNode>(layer);
+ if (lnode->setProperty(properties) != ML_ERROR_NONE)
+ throw std::invalid_argument("Error setting layer properties.");
+
+ return lnode;
+}
+
+int LayerNode::setProperty(std::vector<std::string> properties) {
+ int status = ML_ERROR_NONE;
+
+ try {
+ properties = loadProperties(properties, props);
+ } catch (std::invalid_argument &e) {
+ ml_loge("parsing property failed, reason: %s", e.what());
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+
+ /// @todo: deprecate this in favor of loadProperties
+ std::vector<std::string> remainder;
+ for (unsigned int i = 0; i < properties.size(); ++i) {
+ std::string key;
+ std::string value;
+
+ status = getKeyValue(properties[i], key, value);
+ NN_RETURN_STATUS();
+
+ unsigned int type = parseLayerProperty(key);
+
+ if (value.empty()) {
+ ml_logd("value is empty for layer: %s, key: %s, value: %s",
+ getName().c_str(), key.c_str(), value.c_str());
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+
+ try {
+ /// @note this calls derived setProperty if available
+ setProperty(static_cast<nntrainer::Layer::PropertyType>(type), value);
+ } catch (...) {
+ remainder.push_back(properties[i]);
+ }
+ }
+
+ status = layer->setProperty(remainder);
+ return status;
+}
+
+void LayerNode::setProperty(const nntrainer::Layer::PropertyType type,
+ const std::string &value) {
+ int status = ML_ERROR_NONE;
+
+ switch (type) {
+ case nntrainer::Layer::PropertyType::flatten:
+ if (!value.empty()) {
+ status = setBoolean(flatten, value);
+ throw_status(status);
+ }
+ break;
+ default:
+ throw std::invalid_argument("Unknown property.");
+ }
}
std::ostream &operator<<(std::ostream &out, const LayerNode &l) {
* @author Parichay Kapoor <pk.kapoor@samsung.com>
* @bug No known bugs except for NYI items
* @brief This is the layer node for network graph
+ *
+ * @todo Add printPreset support
*/
#ifndef __LAYER_NODE_H__
* @details This function accepts vector of properties in the format -
* { std::string property_name=property_val, ...}
*/
- int setProperty(std::vector<std::string> properties) {
- return layer->setProperty(properties);
- }
+ int setProperty(std::vector<std::string> properties);
/**
* @brief set name of layer
*/
bool getTrainable() noexcept { return layer->getTrainable(); }
+ /**
+ * Support interfaces for the properties intercepted from layer
+ */
+
+ /**
+ * @brief get if the output of this layer must be flatten
+ * @retval flatten value
+ */
+ bool getFlatten() { return flatten; }
+
#ifdef PROFILE
int event_key;
#endif
bool flatten; /**< flatten the output of this node */
ActivationType
activation_type; /**< activation applied to the output of this node */
+ bool distribute;
+
+ /**
+ * These properties are set for the layer by the user but are intercepted
+ * and used in the node which forms the basic element of the graph.
+ */
+ std::tuple<> props; /**< properties for the layer node */
+
+ /**
+ * @brief setProperty by PropertyType
+ * @note By passing empty string, this can validate if @a type is valid
+ * @param[in] type property type to be passed
+ * @param[in] value value to be passed, if empty string is passed, do nothing
+ * but throws error when @a type is invalid
+ * @exception exception::not_supported when property type is not valid for
+ * the particular layer
+ * @exception std::invalid_argument invalid argument
+ */
+ virtual void setProperty(const nntrainer::Layer::PropertyType type,
+ const std::string &value = "");
};
/**
* @brief LayerNode creator with constructor
+ *
+ * @params[in] type Type of the layer to be constructed
+ * @params[in] properties Properties of the layer
+ */
+std::unique_ptr<LayerNode>
+createLayerNode(const std::string &type,
+ const std::vector<std::string> &properties = {});
+
+/**
+ * @brief LayerNode creator with constructor
+ *
+ * @params[in] layer Already constructed layer
+ * @params[in] properties Properties of the layer
*/
-std::unique_ptr<LayerNode> createLayerNode(const std::string &type);
+std::unique_ptr<LayerNode>
+createLayerNode(std::shared_ptr<nntrainer::Layer> layer,
+ const std::vector<std::string> &properties = {});
} // namespace nntrainer
#endif // __LAYER_NODE_H__
#include <app_context.h>
#include <nntrainer_error.h>
+/**
+ * @brief Directory for appcontext unittests
+ *
+ */
class nntrainerAppContextDirectory : public ::testing::Test {
protected:
std::invalid_argument);
}
+/**
+ * @brief Custom Optimizer for unittests
+ *
+ */
class CustomOptimizer : public nntrainer::Optimizer {
public:
/** Full custom optimizer example which overrides all functions */
int iteration) override {}
};
+/**
+ * @brief Custom Optimizer for unittests
+ *
+ */
class CustomOptimizer2 : public nntrainer::Optimizer {
public:
/** Minimal custom optimizer example which define only necessary functions */
int iteration) override {}
};
-/// @todo solidify the api signature
+/**
+ * @brief Custom Layer for unittests
+ *
+ * @todo solidify the api signature
+ */
class CustomLayer : public nntrainer::Layer {
public:
static const std::string type;
void setTrainable(bool train) override {}
- bool getFlatten() override { return true; }
-
std::string getName() noexcept override { return ""; }
const std::string getType() const override { return CustomLayer::type; }