namespace simpleshot {
namespace layers {
-/// @todo inherit this to API
-// class CenteringLayer : public ml::train::Layer {
+/**
+ * @brief centering layer that centers the feature
+ *
+ */
class CenteringLayer : public nntrainer::Layer {
public:
/**
/**
* @brief get boolean if the function is trainable
*
- * @return true trainable
- * @return false not trainable
+ * @retval true trainable
+ * @retval false not trainable
*/
bool getTrainable() noexcept override { return false; }
/**
* @brief get boolean if the function is trainable
*
- * @return true trainable
- * @return false not trainable
+ * @retval true trainable
+ * @retval false not trainable
*/
bool getTrainable() noexcept override { return false; }
namespace simpleshot {
namespace layers {
-/// @todo inherit this to API
-// class L2NormLayer : public ml::train::Layer {
+/**
+ * @brief This layer l2 normalize the feature
+ *
+ */
class L2NormLayer : public nntrainer::Layer {
public:
/**
/**
* @brief get boolean if the function is trainable
*
- * @return true trainable
- * @return false not trainable
+ * @retval true trainable
+ * @retval false not trainable
*/
bool getTrainable() noexcept override { return false; }
/**
* @brief query if the appcontext has working directory set
*
- * @return true working path base is set
- * @return false working path base is not set
+ * @retval true working path base is set
+ * @retval false working path base is not set
*/
bool hasWorkingDirectory() { return !working_path_base.empty(); }
* @brief check if graph is supported
*
* @param backbone_name name of the backbone
- * @return true if the file extension is supported to make a graph
- * @return false if the file extension is not supported
+ * @retval true if the file extension is supported to make a graph
+ * @retval false if the file extension is not supported
*/
static bool graphSupported(const std::string &backbone_name) {
return endswith(backbone_name, ".ini");
/**
* @brief check if this op node is model input
*
- * @return true if op node is model input
- * @return false if op node is not model input
+ * @retval true if op node is model input
+ * @retval false if op node is not model input
*/
bool isInputNode() const { return is_input; }
/**
* @brief check if this op node is model output
*
- * @return true if op node is model output
- * @return false if op node is not model output
+ * @retval true if op node is model output
+ * @retval false if op node is not model output
*/
bool isOutputNode() const { return is_output; }
*
* @param lhs iterator lhs
* @param rhs iterator rhs
- * @return true if match
- * @return false if mismatch
+ * @retval true if match
+ * @retval false if mismatch
*/
friend bool operator==(GraphNodeIterator const &lhs,
GraphNodeIterator const &rhs) {
*
* @param lhs iterator lhs
* @param rhs iterator rhs
- * @return true if mismatch
- * @return false if match
+ * @retval true if mismatch
+ * @retval false if match
*/
friend bool operator!=(GraphNodeIterator const &lhs,
GraphNodeIterator const &rhs) {
* @brief name validator
*
* @param v string to validate
- * @return true if it contains alphanumeric and/or '-', '_', '/'
- * @return false if it is empty or contains non-valid character
+ * @retval true if it contains alphanumeric and/or '-', '_', '/'
+ * @retval false if it is empty or contains non-valid character
*/
bool isValid(const std::string &v) const override;
};
* @todo If we get to have a use case for requireLabel(true) but in the
* middle of a graph, change the semantics
*
- * @return true requires a label when training
- * @return false does not require a label
+ * @retval true requires a label when training
+ * @retval false does not require a label
*/
virtual bool requireLabel() const { return false; }
* @brief check if tensor dims are equal
*
* @param rhs other side to compare
- * @return true equal
- * @return false not equal
+ * @retval true equal
+ * @retval false not equal
*/
bool operator==(const TensorDim &rhs) const;
* @brief check if tensor dims are not equal
*
* @param rhs other side to compare
- * @return true not equal
- * @return false equal
+ * @retval true not equal
+ * @retval false equal
*/
bool operator!=(const TensorDim &rhs) const { return !(*this == rhs); }
/**
* @brief check if given tensor dimension is empty
*
- * @return true empty
- * @return false not empty
+ * @retval true empty
+ * @retval false not empty
*/
bool isEmpty() const { return len == 0; }
/**
* @brief check if tensor is dynamic
*
- * @return true any of dyn_dim_flag is set
- * @return false none of dyn_dim_flag is set
+ * @retval true any of dyn_dim_flag is set
+ * @retval false none of dyn_dim_flag is set
*/
bool is_dynamic() const { return dyn_dim_flag.any(); }
/**
* @brief Get if the Var_Grad is trainable
*
- * @return true if trainable
- * @return false is not trainable
+ * @retval true if trainable
+ * @retval false is not trainable
*/
bool getTrainable() const { return trainable; }
* @brief check if given value is valid
*
* @param v value to check
- * @return true if valid
- * @return false if not valid
+ * @retval true if valid
+ * @retval false if not valid
*/
virtual bool isValid(const T &v) const { return true; }
* @brief operator==
*
* @param rhs right side to compare
- * @return true if equal
- * @return false if not equal
+ * @retval true if equal
+ * @retval false if not equal
*/
bool operator==(const Property<T> &rhs) const { return value == rhs.value; }
* @brief operator==
*
* @param rhs right side to compare
- * @return true if equal
- * @return false if not equal
+ * @retval true if equal
+ * @retval false if not equal
*/
bool operator==(const Property<std::string> &rhs) const {
return value == rhs.value;
* @brief check if given value is valid
*
* @param v value to check
- * @return true if valid
- * @return false if not valid
+ * @retval true if valid
+ * @retval false if not valid
*/
virtual bool isValid(const std::string &v) const { return true; }
* @brief equal operator between ini section
*
* @param rhs ini section to compare
- * @return true two inisections are equal
- * @return false two ini sections are not equal
+ * @retval true two inisections are equal
+ * @retval false two ini sections are not equal
*/
bool operator==(const IniSection &rhs) const {
return section_name == rhs.section_name && entry == rhs.entry;
* @brief not equal operator between ini section
*
* @param rhs ini section to compare
- * @return true two inisections are not equal
- * @return false two inisections are equal
+ * @retval true two inisections are not equal
+ * @retval false two inisections are equal
*/
bool operator!=(const IniSection &rhs) const { return !operator==(rhs); }
* @brief ini operator== to check if IniWrapper is equal
*
* @param rhs IniWrapper to compare
- * @return true true if ini is equal (deeply)
- * @return false false if ini is not equal
+ * @retval true true if ini is equal (deeply)
+ * @retval false false if ini is not equal
*/
bool operator==(const IniWrapper &rhs) const {
return name == rhs.name && sections == rhs.sections;
* @brief ini operator!= to check if IniWrapper is not equal
*
* @param rhs IniWrapper to compare
- * @return true if not equal
- * @return false if equal
+ * @retval true if not equal
+ * @retval false if equal
*/
bool operator!=(const IniWrapper &rhs) const { return !operator==(rhs); }
TOKEN_UNKNOWN
} InputType;
+/**
+ * @brief convert integer based status to throw
+ *
+ * @param status status to throw
+ */
inline void throw_status(int status) {
switch (status) {
case ML_ERROR_NONE:
*
* @param a first string to compare
* @param b second string to compare
- * @return true if string is case-insensitive equal
- * @return false if string is case-insensitive not equal
+ * @retval true if string is case-insensitive equal
+ * @retval false if string is case-insensitive not equal
*/
bool istrequal(const std::string &a, const std::string &b);
} /* namespace nntrainer */
/**
* @brief get the seed
- * @retVal seed
+ * @return seed
*/
unsigned int getSeed();
*/
float sqrtFloat(float x);
+/**
+ * @brief sqrt function for dobuld type
+ *
+ * @param x value to take sqrt
+ * @return double return value
+ */
double sqrtDouble(double x);
/**
*
* @param target string to cehck
* @param suffix check if string ends with @a suffix
- * @return true @a target ends with @a suffix
- * @return false @a target does not ends with @a suffix
+ * @retval true @a target ends with @a suffix
+ * @retval false @a target does not ends with @a suffix
*/
bool endswith(const std::string &target, const std::string &suffix);
*
* @param lhs compiled(later, finalized) graph to be compared
* @param rhs compiled(later, finalized) graph to be compared
- * @return true graph is equal
- * @return false graph is not equal
+ * @retval true graph is equal
+ * @retval false graph is not equal
*/
static void graphEqual(const nntrainer::GraphRepresentation &lhs,
const nntrainer::GraphRepresentation &rhs) {