+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file centroid_knn.cpp
- * @date 09 Jan 2021
- * @brief This file contains the simple nearest neighbor layer
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug No known bugs except for NYI items
- *
- * @details This layer takes centroid and calculate l2 distance
- */
-
-#include <iostream>
-#include <limits>
-#include <regex>
-#include <sstream>
-
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-#include <tensor.h>
-#include <weight.h>
-
-#include <centroid_knn.h>
-#include <simpleshot_utils.h>
-
-namespace simpleshot {
-namespace layers {
-
-static constexpr size_t SINGLE_INOUT_IDX = 0;
-
-enum KNNParams { map, num_samples };
-
-void CentroidKNN::setProperty(const std::vector<std::string> &values) {
- util::Entry e;
-
- for (auto &val : values) {
- e = util::getKeyValue(val);
-
- if (e.key == "num_class") {
- num_class = std::stoul(e.value);
- if (num_class == 0) {
- throw std::invalid_argument("[CentroidKNN] num_class cannot be zero");
- }
- } else {
- std::string msg = "[CentroidKNN] Unknown Layer Properties count " + val;
- throw nntrainer::exception::not_supported(msg);
- }
- }
-}
-
-void CentroidKNN::finalize(nntrainer::InitLayerContext &context) {
- auto const &input_dim = context.getInputDimensions()[0];
- if (input_dim.channel() != 1 || input_dim.height() != 1) {
- ml_logw("centroid nearest layer is designed for flattend feature for now, "
- "please check");
- }
-
- if (num_class == 0) {
- throw std::invalid_argument(
- "Error: num_class must be a positive non-zero integer");
- }
-
- auto output_dim = nntrainer::TensorDim({num_class});
- context.setOutputDimensions({output_dim});
-
- /// weight is a distance map that contains centroid of features of each class
- auto map_dim = nntrainer::TensorDim({num_class, input_dim.getFeatureLen()});
-
- /// samples seen for the current run to calculate the centroid
- auto samples_seen = nntrainer::TensorDim({num_class});
-
- weight_idx[KNNParams::map] =
- context.requestWeight(map_dim, nntrainer::Tensor::Initializer::ZEROS,
- nntrainer::WeightRegularizer::NONE, 1.0f,
- context.getName() + ":map", false);
-
- weight_idx[KNNParams::num_samples] =
- context.requestWeight(samples_seen, nntrainer::Tensor::Initializer::ZEROS,
- nntrainer::WeightRegularizer::NONE, 1.0f,
- context.getName() + ":num_samples", false);
-}
-
-void CentroidKNN::forwarding(nntrainer::RunLayerContext &context,
- bool training) {
- auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
- auto &input_ = context.getInput(SINGLE_INOUT_IDX);
- auto &label = context.getLabel(SINGLE_INOUT_IDX);
- const auto &input_dim = input_.getDim();
-
- if (training && label.empty()) {
- throw std::invalid_argument(
- "[CentroidKNN] forwarding requires label feeded");
- }
-
- auto &map = context.getWeight(weight_idx[KNNParams::map]);
- auto &num_samples = context.getWeight(weight_idx[KNNParams::num_samples]);
- auto feature_len = input_dim.getFeatureLen();
-
- auto get_distance = [](const nntrainer::Tensor &a,
- const nntrainer::Tensor &b) {
- return -a.subtract(b).l2norm();
- };
-
- if (training) {
- auto ans = label.argmax();
-
- for (unsigned int b = 0; b < input_.batch(); ++b) {
- auto saved_feature =
- map.getSharedDataTensor({feature_len}, ans[b] * feature_len);
-
- // nntrainer::Tensor::Map(map.getData(), {feature_len},
- // ans[b] * feature_len);
- auto num_sample = num_samples.getValue(0, 0, 0, ans[b]);
- auto current_feature = input_.getBatchSlice(b, 1);
- saved_feature.multiply_i(num_sample);
- saved_feature.add_i(current_feature);
- saved_feature.divide_i(num_sample + 1);
- num_samples.setValue(0, 0, 0, ans[b], num_sample + 1);
- }
- }
-
- for (unsigned int i = 0; i < num_class; ++i) {
- auto saved_feature =
- map.getSharedDataTensor({feature_len}, i * feature_len);
- // nntrainer::Tensor::Map(map.getData(), {feature_len}, i * feature_len);
-
- auto num_sample = num_samples.getValue(0, 0, 0, i);
-
- for (unsigned int b = 0; b < input_.batch(); ++b) {
- auto current_feature = input_.getBatchSlice(b, 1);
-
- if (num_sample == 0) {
- hidden_.setValue(b, 0, 0, i, std::numeric_limits<float>::min());
- } else {
- hidden_.setValue(b, 0, 0, i,
- get_distance(current_feature, saved_feature));
- }
- }
- }
-}
-
-void CentroidKNN::calcDerivative(nntrainer::RunLayerContext &context) {
- throw std::invalid_argument("[CentroidKNN::calcDerivative] This Layer "
- "does not support backward propagation");
-}
-
-} // namespace layers
-} // namespace simpleshot
+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file centroid_knn.h
- * @date 09 Jan 2021
- * @details This file contains the simple nearest neighbor layer, this layer
- * takes centroid and calculate l2 distance
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#ifndef __NEAREST_NEIGHBORS_H__
-#define __NEAREST_NEIGHBORS_H__
-#include <string>
-
-#include <layer_context.h>
-#include <layer_devel.h>
-#include <node_exporter.h>
-
-namespace simpleshot {
-namespace layers {
-
-/**
- * @brief Centroid KNN layer which takes centroid and do k-nearest neighbor
- * classification
- */
-class CentroidKNN : public nntrainer::Layer {
-public:
- /**
- * @brief Construct a new NearestNeighbors Layer object that does elementwise
- * subtraction from mean feature vector
- */
- CentroidKNN() : Layer(), num_class(0), weight_idx({0}) {}
-
- /**
- * @brief Move constructor.
- * @param[in] CentroidKNN &&
- */
- CentroidKNN(CentroidKNN &&rhs) noexcept = default;
-
- /**
- * @brief Move assignment operator.
- * @parma[in] rhs CentroidKNN to be moved.
- */
- CentroidKNN &operator=(CentroidKNN &&rhs) = default;
-
- /**
- * @brief Destroy the NearestNeighbors Layer object
- *
- */
- ~CentroidKNN() = default;
-
- /**
- * @copydoc Layer::requireLabel()
- */
- bool requireLabel() const override { return true; }
-
- /**
- * @copydoc Layer::finalize(InitLayerContext &context)
- */
- void finalize(nntrainer::InitLayerContext &context) override;
-
- /**
- * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
- */
- void forwarding(nntrainer::RunLayerContext &context, bool training) override;
-
- /**
- * @copydoc Layer::calcDerivative(RunLayerContext &context)
- */
- void calcDerivative(nntrainer::RunLayerContext &context) override;
-
- /**
- * @copydoc bool supportBackwarding() const
- */
- bool supportBackwarding() const override { return false; };
-
- /**
- * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
- */
- void exportTo(nntrainer::Exporter &exporter,
- const nntrainer::ExportMethods &method) const override {}
-
- /**
- * @copydoc Layer::getType()
- */
- const std::string getType() const override { return CentroidKNN::type; };
-
- /**
- * @copydoc Layer::setProperty(const std::vector<std::string> &values)
- */
- void setProperty(const std::vector<std::string> &values) override;
-
- inline static const std::string type = "centroid_knn";
-
-private:
- unsigned int num_class;
- std::array<unsigned int, 2> weight_idx; /**< indices of the weights */
-};
-} // namespace layers
-} // namespace simpleshot
-
-#endif /** __NEAREST_NEIGHBORS_H__ */
simpleshot_sources = [
'simpleshot_utils.cpp',
- 'layers/centroid_knn.cpp',
'layers/centering.cpp',
'layers/l2norm.cpp',
]
#include <nntrainer-api-common.h>
#include "layers/centering.h"
-#include "layers/centroid_knn.h"
#include "layers/l2norm.h"
namespace simpleshot {
nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
app_context.registerFactory(
nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
- app_context.registerFactory(
- nntrainer::createLayer<simpleshot::layers::CentroidKNN>);
} catch (std::exception &e) {
std::cerr << "registering factory failed: " << e.what();
return 1;
simpleshot::layers::L2NormLayer::type, {}, 0, false);
auto semantic_activation_centroid_knn = LayerSemanticsParamType(
- nntrainer::createLayer<simpleshot::layers::CentroidKNN>,
- simpleshot::layers::CentroidKNN::type, {"num_class=1"}, 0, false);
+ nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
+ {"num_class=1"}, 0, false);
auto semantic_activation_centering = LayerSemanticsParamType(
nntrainer::createLayer<simpleshot::layers::CenteringLayer>,
return createLayer(LayerType::LAYER_TIME_DIST, properties);
}
+/**
+ * @brief Helper function to create Centroid KNN Layer
+ */
+inline std::unique_ptr<Layer>
+CentroidKNN(const std::vector<std::string> &properties = {}) {
+ return createLayer(LayerType::LAYER_CENTROID_KNN, properties);
+}
+
/**
* @brief Helper function to create activation layer
*/
$(NNTRAINER_ROOT)/nntrainer/layers/time_dist.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/dropout.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/permute_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/centroid_knn.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/split_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/common_properties.cpp \
#include <activation_layer.h>
#include <addition_layer.h>
#include <bn_layer.h>
+#include <centroid_knn.h>
#include <concat_layer.h>
#include <conv2d_layer.h>
#include <cross_entropy_sigmoid_loss_layer.h>
LayerType::LAYER_MULTIOUT);
ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
LayerType::LAYER_CONCAT);
- ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
- PreprocessFlipLayer::type,
- LayerType::LAYER_PREPROCESS_FLIP);
- ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
- PreprocessTranslateLayer::type,
- LayerType::LAYER_PREPROCESS_TRANSLATE);
+
#ifdef ENABLE_NNSTREAMER_BACKBONE
ac.registerFactory(nntrainer::createLayer<NNStreamerLayer>,
NNStreamerLayer::type,
LayerType::LAYER_SPLIT);
ac.registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
LayerType::LAYER_PERMUTE);
+ ac.registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
+ LayerType::LAYER_CENTROID_KNN);
+
+ /** proprocess layers */
+ ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
+ PreprocessFlipLayer::type,
+ LayerType::LAYER_PREPROCESS_FLIP);
+ ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
+ PreprocessTranslateLayer::type,
+ LayerType::LAYER_PREPROCESS_TRANSLATE);
/** register losses */
ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file centroid_knn.cpp
+ * @date 09 Jan 2021
+ * @brief This file contains the simple nearest neighbor layer
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ * @details This layer takes centroid and calculate l2 distance
+ */
+
+#include <iostream>
+#include <limits>
+#include <regex>
+#include <sstream>
+
+#include <centroid_knn.h>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <node_exporter.h>
+#include <tensor.h>
+#include <weight.h>
+
+namespace nntrainer {
+
+static constexpr size_t SINGLE_INOUT_IDX = 0;
+
+enum KNNParams { map, num_samples };
+
+CentroidKNN::CentroidKNN() :
+ Layer(),
+ centroid_knn_props(props::NumClass()),
+ weight_idx({0}) {}
+
+CentroidKNN::~CentroidKNN() {}
+
+void CentroidKNN::setProperty(const std::vector<std::string> &values) {
+ auto left = loadProperties(values, centroid_knn_props);
+ NNTR_THROW_IF(!left.empty(), std::invalid_argument)
+ << "[Centroid KNN] there are unparsed properties " << left.front();
+}
+
+void CentroidKNN::finalize(nntrainer::InitLayerContext &context) {
+ auto const &input_dim = context.getInputDimensions()[0];
+ if (input_dim.channel() != 1 || input_dim.height() != 1) {
+ ml_logw("centroid nearest layer is designed for flattend feature for now, "
+ "please check");
+ }
+
+ auto num_class = std::get<props::NumClass>(centroid_knn_props);
+
+ auto output_dim = nntrainer::TensorDim({num_class});
+ context.setOutputDimensions({output_dim});
+
+ /// weight is a distance map that contains centroid of features of each class
+ auto map_dim = nntrainer::TensorDim({num_class, input_dim.getFeatureLen()});
+
+ /// samples seen for the current run to calculate the centroid
+ auto samples_seen = nntrainer::TensorDim({num_class});
+
+ weight_idx[KNNParams::map] =
+ context.requestWeight(map_dim, nntrainer::Tensor::Initializer::ZEROS,
+ nntrainer::WeightRegularizer::NONE, 1.0f,
+ context.getName() + ":map", false);
+
+ weight_idx[KNNParams::num_samples] =
+ context.requestWeight(samples_seen, nntrainer::Tensor::Initializer::ZEROS,
+ nntrainer::WeightRegularizer::NONE, 1.0f,
+ context.getName() + ":num_samples", false);
+}
+
+void CentroidKNN::forwarding(nntrainer::RunLayerContext &context,
+ bool training) {
+ auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
+ auto &input_ = context.getInput(SINGLE_INOUT_IDX);
+ auto &label = context.getLabel(SINGLE_INOUT_IDX);
+ const auto &input_dim = input_.getDim();
+
+ if (training && label.empty()) {
+ throw std::invalid_argument(
+ "[CentroidKNN] forwarding requires label feeded");
+ }
+
+ auto &map = context.getWeight(weight_idx[KNNParams::map]);
+ auto &num_samples = context.getWeight(weight_idx[KNNParams::num_samples]);
+ auto feature_len = input_dim.getFeatureLen();
+
+ auto get_distance = [](const nntrainer::Tensor &a,
+ const nntrainer::Tensor &b) {
+ return -a.subtract(b).l2norm();
+ };
+
+ if (training) {
+ auto ans = label.argmax();
+
+ for (unsigned int b = 0; b < input_.batch(); ++b) {
+ auto saved_feature =
+ map.getSharedDataTensor({feature_len}, ans[b] * feature_len);
+
+ // nntrainer::Tensor::Map(map.getData(), {feature_len},
+ // ans[b] * feature_len);
+ auto num_sample = num_samples.getValue(0, 0, 0, ans[b]);
+ auto current_feature = input_.getBatchSlice(b, 1);
+ saved_feature.multiply_i(num_sample);
+ saved_feature.add_i(current_feature);
+ saved_feature.divide_i(num_sample + 1);
+ num_samples.setValue(0, 0, 0, ans[b], num_sample + 1);
+ }
+ }
+
+ for (unsigned int i = 0; i < std::get<props::NumClass>(centroid_knn_props);
+ ++i) {
+ auto saved_feature =
+ map.getSharedDataTensor({feature_len}, i * feature_len);
+ // nntrainer::Tensor::Map(map.getData(), {feature_len}, i * feature_len);
+
+ auto num_sample = num_samples.getValue(0, 0, 0, i);
+
+ for (unsigned int b = 0; b < input_.batch(); ++b) {
+ auto current_feature = input_.getBatchSlice(b, 1);
+
+ if (num_sample == 0) {
+ hidden_.setValue(b, 0, 0, i, std::numeric_limits<float>::min());
+ } else {
+ hidden_.setValue(b, 0, 0, i,
+ get_distance(current_feature, saved_feature));
+ }
+ }
+ }
+}
+
+void CentroidKNN::calcDerivative(nntrainer::RunLayerContext &context) {
+ throw std::invalid_argument("[CentroidKNN::calcDerivative] This Layer "
+ "does not support backward propagation");
+}
+} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file centroid_knn.h
+ * @date 09 Jan 2021
+ * @details This file contains the simple nearest neighbor layer, this layer
+ * takes centroid and calculate l2 distance
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#ifndef __CENTROID_KNN_H__
+#define __CENTROID_KNN_H__
+#include <string>
+
+#include <common_properties.h>
+#include <layer_context.h>
+#include <layer_devel.h>
+
+namespace nntrainer {
+
+/**
+ * @brief Centroid KNN layer which takes centroid and do k-nearest neighbor
+ * classification
+ */
+class CentroidKNN : public Layer {
+public:
+ /**
+ * @brief Construct a new NearestNeighbors Layer object that does elementwise
+ * subtraction from mean feature vector
+ */
+ CentroidKNN();
+
+ /**
+ * @brief Move constructor.
+ * @param[in] CentroidKNN &&
+ */
+ CentroidKNN(CentroidKNN &&rhs) noexcept = default;
+
+ /**
+ * @brief Move assignment operator.
+ * @parma[in] rhs CentroidKNN to be moved.
+ */
+ CentroidKNN &operator=(CentroidKNN &&rhs) noexcept = default;
+
+ /**
+ * @brief Destroy the NearestNeighbors Layer object
+ *
+ */
+ ~CentroidKNN();
+
+ /**
+ * @copydoc Layer::requireLabel()
+ */
+ bool requireLabel() const override { return true; }
+
+ /**
+ * @copydoc Layer::finalize(InitLayerContext &context)
+ */
+ void finalize(nntrainer::InitLayerContext &context) override;
+
+ /**
+ * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+ */
+ void forwarding(nntrainer::RunLayerContext &context, bool training) override;
+
+ /**
+ * @copydoc Layer::calcDerivative(RunLayerContext &context)
+ */
+ void calcDerivative(nntrainer::RunLayerContext &context) override;
+
+ /**
+ * @copydoc bool supportBackwarding() const
+ */
+ bool supportBackwarding() const override { return false; };
+
+ /**
+ * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
+ */
+ void exportTo(nntrainer::Exporter &exporter,
+ const nntrainer::ExportMethods &method) const override {}
+
+ /**
+ * @copydoc Layer::getType()
+ */
+ const std::string getType() const override { return CentroidKNN::type; };
+
+ /**
+ * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+ */
+ void setProperty(const std::vector<std::string> &values) override;
+
+ inline static const std::string type = "centroid_knn";
+
+private:
+ std::tuple<props::NumClass> centroid_knn_props;
+ std::array<unsigned int, 2> weight_idx; /**< indices of the weights */
+};
+} // namespace nntrainer
+
+#endif /** __CENTROID_KNN_H__ */
std::ifstream::pos_type FilePath::file_size() { return cached_pos_size; }
+bool NumClass::isValid(const unsigned int &v) const { return v > 0; }
+
ConnectionSpec::ConnectionSpec(const std::vector<props::Name> &layer_ids_,
const std::string &op_type_) :
op_type(op_type_),
private:
std::ifstream::pos_type cached_pos_size;
};
+
+/**
+ * @brief Number of class
+ * @todo deprecate this
+ */
+class NumClass final : public nntrainer::Property<unsigned int> {
+public:
+ using prop_tag = uint_prop_tag; /**< property type */
+ static constexpr const char *key = "num_class"; /**< unique key to access */
+
+ /**
+ * @copydoc nntrainer::Property<unsigned int>::isValid(const unsigned int &v);
+ */
+ bool isValid(const unsigned int &v) const override;
+};
} // namespace props
} // namespace nntrainer
'layer_impl.cpp',
'gru.cpp',
'dropout.cpp',
+ 'centroid_knn.cpp',
'layer_context.cpp'
]