WEIGHT_UNKNOWN
} WeightIniType;
+/**
+ * @brief Print Options when printing layer info
+ */
+typedef enum {
+ // clang-format off
+ PRINT_INST_INFO = (1 << 0), /**< Option to print type & instance address info */
+ PRINT_SHAPE_INFO = (1 << 1), /**< Option to print shape information, invalid before initiation*/
+ PRINT_PROP = (1 << 2), /**< Option to print properties */
+ PRINT_PROP_META = (1 << 3), /**< Option to print properties that describe meta info
+ e.g) layer activation type for non-activation layer. */
+ PRINT_WEIGHTS = (1 << 4), /**< Option to print weights */
+ PRINT_METRIC = (1 << 5) /**< Option to print metrics (currently loss only) */
+ // clang-format on
+} LayerPrintOption;
+
/**
* @class Layer Base class for layers
* @brief Base class for all layers
unknown = 18
};
+ virtual void print(std::ostream &out, unsigned int flags = 0);
+
protected:
/**
* @brief Name of the layer (works as the identifier)
* @brief Ensure that layer has a name
*/
void ensureName();
+
+ /**
+ * @brief check if @a type is valid and print if prop is valid to @a out
+ */
+ template <typename T>
+ void printIfValid(std::ostream &out, const PropertyType type, T target);
+
+ /**
+ * @brief anchor point to override if PRINT_SHAPE_INFO is enabled for
+ * Layer::print()
+ */
+ virtual void printShapeInfo(std::ostream &out);
+
+ /**
+ * @brief anchor point to override if PRINT_PROP_META is enabled for
+ * Layer::print()
+ */
+ virtual void printPropertiesMeta(std::ostream &out);
+
+ /**
+ * @brief anchor point to override if PRINT_PROP is enabled for Layer::print()
+ */
+ virtual void printProperties(std::ostream &out);
+
+ /**
+ * @brief anchor point to override if PRINT_METRIC is enabled for
+ * Layer::print()
+ */
+ virtual void printMetric(std::ostream &out);
};
+
+/**
+ * @brief Overriding output stream for layers and it's derived class
+ */
+template <typename T, typename std::enable_if_t<
+ std::is_base_of<Layer, T>::value, T> * = nullptr>
+std::ostream &operator<<(std::ostream &out, T const &l) {
+ unsigned int option = nntrainer::LayerPrintOption::PRINT_INST_INFO |
+ nntrainer::LayerPrintOption::PRINT_SHAPE_INFO |
+ nntrainer::LayerPrintOption::PRINT_PROP |
+ nntrainer::LayerPrintOption::PRINT_PROP_META;
+ l.print(out, option);
+ return out;
+}
+
} // namespace nntrainer
#endif /* __cplusplus */
*/
unsigned int parseLayerProperty(std::string property);
+/**
+ * @brief Unparse Layer property to string
+ * @param[in] type property type
+ * @retval string representation of the type
+ */
+std::string propToStr(const unsigned int type);
+
/**
* @brief Parsing Configuration Token
* @param[in] ll string to be parsed
int getValues(int n_str, std::string str, int *value);
+/**
+ * @brief print instance info. as <Type at (address)>
+ * @param[in] std::ostream &out, T&& t
+ * @param[in] t pointer to the instance
+ */
+template <typename T,
+ typename std::enable_if_t<std::is_pointer<T>::value, T> * = nullptr>
+void printInstance(std::ostream &out, const T &t) {
+ out << '<' << typeid(*t).name() << " at " << t << '>' << std::endl;
+}
+
} /* namespace nntrainer */
#endif /* __cplusplus */
return ML_ERROR_NONE;
}
+template <typename T>
+void Layer::printIfValid(std::ostream &out, const PropertyType type,
+ const T target) {
+ try {
+ setProperty(type);
+ } catch (std::invalid_argument &e) {
+ return;
+ }
+
+ out << propToStr(static_cast<unsigned int>(type)) << ": " << target
+ << std::endl;
+}
+
+void Layer::printShapeInfo(std::ostream &out) {
+ out << "input " << input_dim << "inner " << dim << "output " << output_dim;
+}
+
+void Layer::printPropertiesMeta(std::ostream &out) {
+ printIfValid(out, PropertyType::activation, activation_type);
+ printIfValid(out, PropertyType::flatten, flatten);
+}
+
+void Layer::printProperties(std::ostream &out) {
+ out << "Trainable: " << trainable << std::endl;
+ printIfValid(out, PropertyType::bias_init_zero, bias_init_zero);
+ printIfValid(out, PropertyType::weight_decay,
+ static_cast<int>(weight_decay.type));
+ printIfValid(out, PropertyType::weight_decay_lambda, weight_decay.lambda);
+}
+
+void Layer::printMetric(std::ostream &out) {
+ if (loss > 0) {
+ out << "Weight regularization loss: " << loss;
+ }
+}
+
+void Layer::print(std::ostream &out, unsigned int flags) {
+ if (flags & PRINT_INST_INFO) {
+ std::cout << "======instance info: " << std::endl;
+ printInstance(out, this);
+
+ out << "Layer Type: " << type << std::endl;
+ }
+
+ if (flags & PRINT_SHAPE_INFO) {
+ std::cout << "======shape information: " << std::endl;
+ printShapeInfo(out);
+ }
+
+ if (flags & PRINT_PROP_META) {
+ std::cout << "======meta properties: " << std::endl;
+ printPropertiesMeta(out);
+ }
+
+ if (flags & PRINT_PROP) {
+ std::cout << "======properties: " << std::endl;
+ printProperties(out);
+ }
+
+ if (flags & PRINT_WEIGHTS) {
+ std::cout << "======weights: " << std::endl;
+ for (unsigned int i = 0; i < param_size; ++i) {
+ out << '[' << paramsAt(i).name << ']' << std::endl;
+ out << paramsAt(i).weight;
+ }
+ }
+
+ if (flags & PRINT_METRIC) {
+ std::cout << "======metrics: " << std::endl;
+ printMetric(out);
+ }
+};
+
} /* namespace nntrainer */
return ret;
}
+/**
+ * @brief Layer Properties
+ * input_shape = 0,
+ * bias_init_zero = 1,
+ * normalization = 2,
+ * standardization = 3,
+ * activation = 4,
+ * epsilon = 5
+ * weight_decay = 6
+ * weight_decay_lambda = 7
+ * unit = 8
+ * weight_ini = 9
+ * filter = 10
+ * kernel_size = 11
+ * stride = 12
+ * padding = 13
+ * pooling_size = 14
+ * pooling = 15
+ * flatten = 16
+ * name = 17
+ *
+ * InputLayer has 0, 1, 2, 3 properties.
+ * FullyConnectedLayer has 1, 4, 6, 7, 8, 9 properties.
+ * Conv2DLayer has 0, 1, 4, 6, 7, 9, 10, 11, 12, 13 properties.
+ * Pooling2DLayer has 12, 13, 14, 15 properties.
+ * BatchNormalizationLayer has 0, 1, 5, 6, 7 properties.
+ */
+static std::array<std::string, 19> property_string = {
+ "input_shape", "bias_init_zero", "normalization", "standardization",
+ "activation", "epsilon", "weight_decay", "weight_decay_lambda",
+ "unit", "weight_ini", "filter", "kernel_size",
+ "stride", "padding", "pooling_size", "pooling",
+ "flatten", "name", "unknown"};
+
unsigned int parseLayerProperty(std::string property) {
unsigned int i;
- /**
- * @brief Layer Properties
- * input_shape = 0,
- * bias_init_zero = 1,
- * normalization = 2,
- * standardization = 3,
- * activation = 4,
- * epsilon = 5
- * weight_decay = 6
- * weight_decay_lambda = 7
- * unit = 8
- * weight_ini = 9
- * filter = 10
- * kernel_size = 11
- * stride = 12
- * padding = 13
- * pooling_size = 14
- * pooling = 15
- * flatten = 16
- * name = 17
- *
- * InputLayer has 0, 1, 2, 3 properties.
- * FullyConnectedLayer has 1, 4, 6, 7, 8, 9 properties.
- * Conv2DLayer has 0, 1, 4, 6, 7, 9, 10, 11, 12, 13 properties.
- * Pooling2DLayer has 12, 13, 14, 15 properties.
- * BatchNormalizationLayer has 0, 1, 5, 6, 7 properties.
- */
- std::array<std::string, 19> property_string = {
- "input_shape", "bias_init_zero", "normalization", "standardization",
- "activation", "epsilon", "weight_decay", "weight_decay_lambda",
- "unit", "weight_ini", "filter", "kernel_size",
- "stride", "padding", "pooling_size", "pooling",
- "flatten", "name", "unknown"};
-
for (i = 0; i < property_string.size(); i++) {
unsigned int size = (property_string[i].size() > property.size())
? property_string[i].size()
return (unsigned int)Layer::PropertyType::unknown;
}
+std::string propToStr(unsigned int type) { return property_string[type]; }
+
unsigned int parseOptProperty(std::string property) {
unsigned int i;
#include <assert.h>
#include <cstring>
+#include <iomanip>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
#include <parse_util.h>
Tensor Tensor::apply(std::function<Tensor(Tensor)> f) const { return f(*this); }
void Tensor::print(std::ostream &out) const {
- out << "<Tensor Object at " << this << ">" << std::endl;
+ printInstance(out, this);
const float *data = getData();
unsigned int len = length();
for (unsigned int l = 0; l < dim.channel(); l++) {
for (unsigned int i = 0; i < dim.height(); i++) {
for (unsigned int j = 0; j < dim.width(); j++) {
- out << this->getValue(k, l, i, j) << " ";
+ out << std::setw(10) << this->getValue(k, l, i, j) << " ";
}
out << std::endl;
}
}
};
+TEST_F(nntrainer_Conv2DLayer, print_01_p) {
+ setProperty("filter=3");
+ reinitialize();
+ unsigned int option = nntrainer::LayerPrintOption::PRINT_INST_INFO |
+ nntrainer::LayerPrintOption::PRINT_SHAPE_INFO |
+ nntrainer::LayerPrintOption::PRINT_PROP |
+ nntrainer::LayerPrintOption::PRINT_PROP_META |
+ nntrainer::LayerPrintOption::PRINT_WEIGHTS |
+ nntrainer::LayerPrintOption::PRINT_METRIC;
+ layer.print(std::cerr, option);
+}
+
/**
* @brief Convolution 2D Layer
*/
TEST(nntrainer_Tensor, print_small_size) {
nntrainer::Tensor target = constant(1.0, 3, 1, 2, 3);
+ std::cerr << target;
std::stringstream ss, expected;
ss << target;
- expected << "<Tensor Object at " << &target << ">\n"
+
+ expected << '<' << typeid(target).name() << " at " << &target << ">\n"
<< "Shape: 3:1:2:3\n"
- "1 1 1 \n"
- "1 1 1 \n"
- "\n"
- "-------\n"
- "1 1 1 \n"
- "1 1 1 \n"
- "\n"
- "-------\n"
- "1 1 1 \n"
- "1 1 1 \n"
- "\n"
- "-------\n";
+ << " 1 1 1 \n"
+ << " 1 1 1 \n"
+ << "\n"
+ << "-------\n"
+ << " 1 1 1 \n"
+ << " 1 1 1 \n"
+ << "\n"
+ << "-------\n"
+ << " 1 1 1 \n"
+ << " 1 1 1 \n"
+ << "\n"
+ << "-------\n";
EXPECT_EQ(ss.str(), expected.str());
}
std::stringstream ss, expected;
- expected << "<Tensor Object at " << &target << ">\n"
+ expected << '<' << typeid(target).name() << " at " << &target << ">\n"
<< "Shape: 3:10:10:10\n"
<< "[1.2 1.2 1.2 ... 1.2 1.2 1.2]\n";
ss << target;