simpleshot_sources = [
'simpleshot_utils.cpp',
- 'layers/centroid_knn.cpp',
'layers/centering.cpp',
'layers/l2norm.cpp',
]
#include <nntrainer-api-common.h>
#include "layers/centering.h"
-#include "layers/centroid_knn.h"
#include "layers/l2norm.h"
namespace simpleshot {
nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
app_context.registerFactory(
nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
- app_context.registerFactory(
- nntrainer::createLayer<simpleshot::layers::CentroidKNN>);
} catch (std::exception &e) {
std::cerr << "registering factory failed: " << e.what();
return 1;
simpleshot::layers::L2NormLayer::type, {}, 0, false);
auto semantic_activation_centroid_knn = LayerSemanticsParamType(
- nntrainer::createLayer<simpleshot::layers::CentroidKNN>,
- simpleshot::layers::CentroidKNN::type, {"num_class=1"}, 0, false);
+ nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
+ {"num_class=1"}, 0, false);
auto semantic_activation_centering = LayerSemanticsParamType(
nntrainer::createLayer<simpleshot::layers::CenteringLayer>,
}
/**
+ * @brief Helper function to create Centroid KNN Layer
+ */
+inline std::unique_ptr<Layer>
+CentroidKNN(const std::vector<std::string> &properties = {}) {
+ return createLayer(LayerType::LAYER_CENTROID_KNN, properties);
+}
+
+/**
* @brief Helper function to create activation layer
*/
inline std::unique_ptr<Layer>
$(NNTRAINER_ROOT)/nntrainer/layers/time_dist.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/dropout.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/permute_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/centroid_knn.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/split_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/common_properties.cpp \
#include <activation_layer.h>
#include <addition_layer.h>
#include <bn_layer.h>
+#include <centroid_knn.h>
#include <concat_layer.h>
#include <conv2d_layer.h>
#include <cross_entropy_sigmoid_loss_layer.h>
LayerType::LAYER_MULTIOUT);
ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
LayerType::LAYER_CONCAT);
- ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
- PreprocessFlipLayer::type,
- LayerType::LAYER_PREPROCESS_FLIP);
- ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
- PreprocessTranslateLayer::type,
- LayerType::LAYER_PREPROCESS_TRANSLATE);
+
#ifdef ENABLE_NNSTREAMER_BACKBONE
ac.registerFactory(nntrainer::createLayer<NNStreamerLayer>,
NNStreamerLayer::type,
LayerType::LAYER_SPLIT);
ac.registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
LayerType::LAYER_PERMUTE);
+ ac.registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
+ LayerType::LAYER_CENTROID_KNN);
+
+ /** proprocess layers */
+ ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
+ PreprocessFlipLayer::type,
+ LayerType::LAYER_PREPROCESS_FLIP);
+ ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
+ PreprocessTranslateLayer::type,
+ LayerType::LAYER_PREPROCESS_TRANSLATE);
/** register losses */
ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
#include <regex>
#include <sstream>
+#include <centroid_knn.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
+#include <node_exporter.h>
#include <tensor.h>
#include <weight.h>
-#include <centroid_knn.h>
-#include <simpleshot_utils.h>
-
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
static constexpr size_t SINGLE_INOUT_IDX = 0;
enum KNNParams { map, num_samples };
-void CentroidKNN::setProperty(const std::vector<std::string> &values) {
- util::Entry e;
+CentroidKNN::CentroidKNN() :
+ Layer(),
+ centroid_knn_props(props::NumClass()),
+ weight_idx({0}) {}
- for (auto &val : values) {
- e = util::getKeyValue(val);
+CentroidKNN::~CentroidKNN() {}
- if (e.key == "num_class") {
- num_class = std::stoul(e.value);
- if (num_class == 0) {
- throw std::invalid_argument("[CentroidKNN] num_class cannot be zero");
- }
- } else {
- std::string msg = "[CentroidKNN] Unknown Layer Properties count " + val;
- throw nntrainer::exception::not_supported(msg);
- }
- }
+void CentroidKNN::setProperty(const std::vector<std::string> &values) {
+ auto left = loadProperties(values, centroid_knn_props);
+ NNTR_THROW_IF(!left.empty(), std::invalid_argument)
+ << "[Centroid KNN] there are unparsed properties " << left.front();
}
void CentroidKNN::finalize(nntrainer::InitLayerContext &context) {
"please check");
}
- if (num_class == 0) {
- throw std::invalid_argument(
- "Error: num_class must be a positive non-zero integer");
- }
+ auto num_class = std::get<props::NumClass>(centroid_knn_props);
auto output_dim = nntrainer::TensorDim({num_class});
context.setOutputDimensions({output_dim});
}
}
- for (unsigned int i = 0; i < num_class; ++i) {
+ for (unsigned int i = 0; i < std::get<props::NumClass>(centroid_knn_props);
+ ++i) {
auto saved_feature =
map.getSharedDataTensor({feature_len}, i * feature_len);
// nntrainer::Tensor::Map(map.getData(), {feature_len}, i * feature_len);
throw std::invalid_argument("[CentroidKNN::calcDerivative] This Layer "
"does not support backward propagation");
}
-
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
*
*/
-#ifndef __NEAREST_NEIGHBORS_H__
-#define __NEAREST_NEIGHBORS_H__
+#ifndef __CENTROID_KNN_H__
+#define __CENTROID_KNN_H__
#include <string>
+#include <common_properties.h>
#include <layer_context.h>
#include <layer_devel.h>
-#include <node_exporter.h>
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
/**
* @brief Centroid KNN layer which takes centroid and do k-nearest neighbor
* classification
*/
-class CentroidKNN : public nntrainer::Layer {
+class CentroidKNN : public Layer {
public:
/**
* @brief Construct a new NearestNeighbors Layer object that does elementwise
* subtraction from mean feature vector
*/
- CentroidKNN() : Layer(), num_class(0), weight_idx({0}) {}
+ CentroidKNN();
/**
* @brief Move constructor.
* @brief Move assignment operator.
* @parma[in] rhs CentroidKNN to be moved.
*/
- CentroidKNN &operator=(CentroidKNN &&rhs) = default;
+ CentroidKNN &operator=(CentroidKNN &&rhs) noexcept = default;
/**
* @brief Destroy the NearestNeighbors Layer object
*
*/
- ~CentroidKNN() = default;
+ ~CentroidKNN();
/**
* @copydoc Layer::requireLabel()
inline static const std::string type = "centroid_knn";
private:
- unsigned int num_class;
+ std::tuple<props::NumClass> centroid_knn_props;
std::array<unsigned int, 2> weight_idx; /**< indices of the weights */
};
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
-#endif /** __NEAREST_NEIGHBORS_H__ */
+#endif /** __CENTROID_KNN_H__ */
std::ifstream::pos_type FilePath::file_size() { return cached_pos_size; }
+bool NumClass::isValid(const unsigned int &v) const { return v > 0; }
+
ConnectionSpec::ConnectionSpec(const std::vector<props::Name> &layer_ids_,
const std::string &op_type_) :
op_type(op_type_),
private:
std::ifstream::pos_type cached_pos_size;
};
+
+/**
+ * @brief Number of class
+ * @todo deprecate this
+ */
+class NumClass final : public nntrainer::Property<unsigned int> {
+public:
+ using prop_tag = uint_prop_tag; /**< property type */
+ static constexpr const char *key = "num_class"; /**< unique key to access */
+
+ /**
+ * @copydoc nntrainer::Property<unsigned int>::isValid(const unsigned int &v);
+ */
+ bool isValid(const unsigned int &v) const override;
+};
} // namespace props
} // namespace nntrainer
'layer_impl.cpp',
'gru.cpp',
'dropout.cpp',
+ 'centroid_knn.cpp',
'layer_context.cpp'
]