From: Jihoon Lee Date: Thu, 10 Jun 2021 04:42:09 +0000 (+0900) Subject: [Trivial] Change return -> retval X-Git-Tag: accepted/tizen/unified/20210829.234903~294 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=dac38ff46afa367cb8a6e19674b0d5031e36e158;p=platform%2Fcore%2Fml%2Fnntrainer.git [Trivial] Change return -> retval When stating value in doxygen it is recommended to use @retval, this patch reflects the issue **Self evaluation:** 1. Build test: [ ]Passed [ ]Failed [X]Skipped 2. Run test: [ ]Passed [ ]Failed [X]Skipped Signed-off-by: Jihoon Lee --- diff --git a/Applications/SimpleShot/layers/centering.h b/Applications/SimpleShot/layers/centering.h index 404f272..d277bd6 100644 --- a/Applications/SimpleShot/layers/centering.h +++ b/Applications/SimpleShot/layers/centering.h @@ -25,8 +25,10 @@ namespace simpleshot { namespace layers { -/// @todo inherit this to API -// class CenteringLayer : public ml::train::Layer { +/** + * @brief centering layer that centers the feature + * + */ class CenteringLayer : public nntrainer::Layer { public: /** @@ -98,8 +100,8 @@ public: /** * @brief get boolean if the function is trainable * - * @return true trainable - * @return false not trainable + * @retval true trainable + * @retval false not trainable */ bool getTrainable() noexcept override { return false; } diff --git a/Applications/SimpleShot/layers/centroid_knn.h b/Applications/SimpleShot/layers/centroid_knn.h index 5080ec6..9ab5faf 100644 --- a/Applications/SimpleShot/layers/centroid_knn.h +++ b/Applications/SimpleShot/layers/centroid_knn.h @@ -99,8 +99,8 @@ public: /** * @brief get boolean if the function is trainable * - * @return true trainable - * @return false not trainable + * @retval true trainable + * @retval false not trainable */ bool getTrainable() noexcept override { return false; } diff --git a/Applications/SimpleShot/layers/l2norm.h b/Applications/SimpleShot/layers/l2norm.h index 1a19d2c..94ddc7b 100644 --- a/Applications/SimpleShot/layers/l2norm.h +++ b/Applications/SimpleShot/layers/l2norm.h @@ -25,8 +25,10 @@ namespace simpleshot { namespace layers { -/// @todo inherit this to API -// class L2NormLayer : public ml::train::Layer { +/** + * @brief This layer l2 normalize the feature + * + */ class L2NormLayer : public nntrainer::Layer { public: /** @@ -82,8 +84,8 @@ public: /** * @brief get boolean if the function is trainable * - * @return true trainable - * @return false not trainable + * @retval true trainable + * @retval false not trainable */ bool getTrainable() noexcept override { return false; } diff --git a/nntrainer/app_context.h b/nntrainer/app_context.h index ff60f6b..d0aa098 100644 --- a/nntrainer/app_context.h +++ b/nntrainer/app_context.h @@ -95,8 +95,8 @@ public: /** * @brief query if the appcontext has working directory set * - * @return true working path base is set - * @return false working path base is not set + * @retval true working path base is set + * @retval false working path base is not set */ bool hasWorkingDirectory() { return !working_path_base.empty(); } diff --git a/nntrainer/compiler/ini_interpreter.cpp b/nntrainer/compiler/ini_interpreter.cpp index 0c37db0..1ad418b 100644 --- a/nntrainer/compiler/ini_interpreter.cpp +++ b/nntrainer/compiler/ini_interpreter.cpp @@ -173,8 +173,8 @@ section2layer(dictionary *ini, const std::string &sec_name, * @brief check if graph is supported * * @param backbone_name name of the backbone - * @return true if the file extension is supported to make a graph - * @return false if the file extension is not supported + * @retval true if the file extension is supported to make a graph + * @retval false if the file extension is not supported */ static bool graphSupported(const std::string &backbone_name) { return endswith(backbone_name, ".ini"); diff --git a/nntrainer/compiler/tflite_opnode.h b/nntrainer/compiler/tflite_opnode.h index 0939c9a..2f3eb0f 100644 --- a/nntrainer/compiler/tflite_opnode.h +++ b/nntrainer/compiler/tflite_opnode.h @@ -140,16 +140,16 @@ public: /** * @brief check if this op node is model input * - * @return true if op node is model input - * @return false if op node is not model input + * @retval true if op node is model input + * @retval false if op node is not model input */ bool isInputNode() const { return is_input; } /** * @brief check if this op node is model output * - * @return true if op node is model output - * @return false if op node is not model output + * @retval true if op node is model output + * @retval false if op node is not model output */ bool isOutputNode() const { return is_output; } diff --git a/nntrainer/graph/graph_node.h b/nntrainer/graph/graph_node.h index 2701be8..7813daf 100644 --- a/nntrainer/graph/graph_node.h +++ b/nntrainer/graph/graph_node.h @@ -136,8 +136,8 @@ public: * * @param lhs iterator lhs * @param rhs iterator rhs - * @return true if match - * @return false if mismatch + * @retval true if match + * @retval false if mismatch */ friend bool operator==(GraphNodeIterator const &lhs, GraphNodeIterator const &rhs) { @@ -149,8 +149,8 @@ public: * * @param lhs iterator lhs * @param rhs iterator rhs - * @return true if mismatch - * @return false if match + * @retval true if mismatch + * @retval false if match */ friend bool operator!=(GraphNodeIterator const &lhs, GraphNodeIterator const &rhs) { diff --git a/nntrainer/layers/common_properties.h b/nntrainer/layers/common_properties.h index 9146c52..50704c9 100644 --- a/nntrainer/layers/common_properties.h +++ b/nntrainer/layers/common_properties.h @@ -36,8 +36,8 @@ public: * @brief name validator * * @param v string to validate - * @return true if it contains alphanumeric and/or '-', '_', '/' - * @return false if it is empty or contains non-valid character + * @retval true if it contains alphanumeric and/or '-', '_', '/' + * @retval false if it is empty or contains non-valid character */ bool isValid(const std::string &v) const override; }; diff --git a/nntrainer/layers/layer_internal.h b/nntrainer/layers/layer_internal.h index 4a6929a..ff46391 100644 --- a/nntrainer/layers/layer_internal.h +++ b/nntrainer/layers/layer_internal.h @@ -132,8 +132,8 @@ public: * @todo If we get to have a use case for requireLabel(true) but in the * middle of a graph, change the semantics * - * @return true requires a label when training - * @return false does not require a label + * @retval true requires a label when training + * @retval false does not require a label */ virtual bool requireLabel() const { return false; } diff --git a/nntrainer/tensor/tensor_dim.h b/nntrainer/tensor/tensor_dim.h index f2f70fb..7dfd42b 100644 --- a/nntrainer/tensor/tensor_dim.h +++ b/nntrainer/tensor/tensor_dim.h @@ -303,8 +303,8 @@ public: * @brief check if tensor dims are equal * * @param rhs other side to compare - * @return true equal - * @return false not equal + * @retval true equal + * @retval false not equal */ bool operator==(const TensorDim &rhs) const; @@ -312,16 +312,16 @@ public: * @brief check if tensor dims are not equal * * @param rhs other side to compare - * @return true not equal - * @return false equal + * @retval true not equal + * @retval false equal */ bool operator!=(const TensorDim &rhs) const { return !(*this == rhs); } /** * @brief check if given tensor dimension is empty * - * @return true empty - * @return false not empty + * @retval true empty + * @retval false not empty */ bool isEmpty() const { return len == 0; } @@ -374,8 +374,8 @@ public: /** * @brief check if tensor is dynamic * - * @return true any of dyn_dim_flag is set - * @return false none of dyn_dim_flag is set + * @retval true any of dyn_dim_flag is set + * @retval false none of dyn_dim_flag is set */ bool is_dynamic() const { return dyn_dim_flag.any(); } diff --git a/nntrainer/tensor/var_grad.h b/nntrainer/tensor/var_grad.h index 08174f8..a5c515f 100644 --- a/nntrainer/tensor/var_grad.h +++ b/nntrainer/tensor/var_grad.h @@ -120,8 +120,8 @@ public: /** * @brief Get if the Var_Grad is trainable * - * @return true if trainable - * @return false is not trainable + * @retval true if trainable + * @retval false is not trainable */ bool getTrainable() const { return trainable; } diff --git a/nntrainer/utils/base_properties.h b/nntrainer/utils/base_properties.h index 69b191e..43cda6a 100644 --- a/nntrainer/utils/base_properties.h +++ b/nntrainer/utils/base_properties.h @@ -172,8 +172,8 @@ public: * @brief check if given value is valid * * @param v value to check - * @return true if valid - * @return false if not valid + * @retval true if valid + * @retval false if not valid */ virtual bool isValid(const T &v) const { return true; } @@ -181,8 +181,8 @@ public: * @brief operator== * * @param rhs right side to compare - * @return true if equal - * @return false if not equal + * @retval true if equal + * @retval false if not equal */ bool operator==(const Property &rhs) const { return value == rhs.value; } @@ -284,8 +284,8 @@ public: * @brief operator== * * @param rhs right side to compare - * @return true if equal - * @return false if not equal + * @retval true if equal + * @retval false if not equal */ bool operator==(const Property &rhs) const { return value == rhs.value; @@ -295,8 +295,8 @@ public: * @brief check if given value is valid * * @param v value to check - * @return true if valid - * @return false if not valid + * @retval true if valid + * @retval false if not valid */ virtual bool isValid(const std::string &v) const { return true; } diff --git a/nntrainer/utils/ini_wrapper.h b/nntrainer/utils/ini_wrapper.h index 43ca4c5..49017a8 100644 --- a/nntrainer/utils/ini_wrapper.h +++ b/nntrainer/utils/ini_wrapper.h @@ -125,8 +125,8 @@ public: * @brief equal operator between ini section * * @param rhs ini section to compare - * @return true two inisections are equal - * @return false two ini sections are not equal + * @retval true two inisections are equal + * @retval false two ini sections are not equal */ bool operator==(const IniSection &rhs) const { return section_name == rhs.section_name && entry == rhs.entry; @@ -136,8 +136,8 @@ public: * @brief not equal operator between ini section * * @param rhs ini section to compare - * @return true two inisections are not equal - * @return false two inisections are equal + * @retval true two inisections are not equal + * @retval false two inisections are equal */ bool operator!=(const IniSection &rhs) const { return !operator==(rhs); } @@ -225,8 +225,8 @@ public: * @brief ini operator== to check if IniWrapper is equal * * @param rhs IniWrapper to compare - * @return true true if ini is equal (deeply) - * @return false false if ini is not equal + * @retval true true if ini is equal (deeply) + * @retval false false if ini is not equal */ bool operator==(const IniWrapper &rhs) const { return name == rhs.name && sections == rhs.sections; @@ -236,8 +236,8 @@ public: * @brief ini operator!= to check if IniWrapper is not equal * * @param rhs IniWrapper to compare - * @return true if not equal - * @return false if equal + * @retval true if not equal + * @retval false if equal */ bool operator!=(const IniWrapper &rhs) const { return !operator==(rhs); } diff --git a/nntrainer/utils/parse_util.h b/nntrainer/utils/parse_util.h index ecdccbb..cf66b3d 100644 --- a/nntrainer/utils/parse_util.h +++ b/nntrainer/utils/parse_util.h @@ -62,6 +62,11 @@ typedef enum { TOKEN_UNKNOWN } InputType; +/** + * @brief convert integer based status to throw + * + * @param status status to throw + */ inline void throw_status(int status) { switch (status) { case ML_ERROR_NONE: @@ -203,8 +208,8 @@ void printInstance(std::ostream &out, const T &t) { * * @param a first string to compare * @param b second string to compare - * @return true if string is case-insensitive equal - * @return false if string is case-insensitive not equal + * @retval true if string is case-insensitive equal + * @retval false if string is case-insensitive not equal */ bool istrequal(const std::string &a, const std::string &b); } /* namespace nntrainer */ diff --git a/nntrainer/utils/util_func.h b/nntrainer/utils/util_func.h index a8a6662..c5600e2 100644 --- a/nntrainer/utils/util_func.h +++ b/nntrainer/utils/util_func.h @@ -30,7 +30,7 @@ namespace nntrainer { /** * @brief get the seed - * @retVal seed + * @return seed */ unsigned int getSeed(); @@ -45,6 +45,12 @@ float random(); */ float sqrtFloat(float x); +/** + * @brief sqrt function for dobuld type + * + * @param x value to take sqrt + * @return double return value + */ double sqrtDouble(double x); /** @@ -142,8 +148,8 @@ void writeString(std::ofstream &file, const std::string &str, * * @param target string to cehck * @param suffix check if string ends with @a suffix - * @return true @a target ends with @a suffix - * @return false @a target does not ends with @a suffix + * @retval true @a target ends with @a suffix + * @retval false @a target does not ends with @a suffix */ bool endswith(const std::string &target, const std::string &suffix); diff --git a/test/unittest/compiler/unittest_interpreter.cpp b/test/unittest/compiler/unittest_interpreter.cpp index c87534e..2ae0587 100644 --- a/test/unittest/compiler/unittest_interpreter.cpp +++ b/test/unittest/compiler/unittest_interpreter.cpp @@ -58,8 +58,8 @@ auto ini_interpreter = * * @param lhs compiled(later, finalized) graph to be compared * @param rhs compiled(later, finalized) graph to be compared - * @return true graph is equal - * @return false graph is not equal + * @retval true graph is equal + * @retval false graph is not equal */ static void graphEqual(const nntrainer::GraphRepresentation &lhs, const nntrainer::GraphRepresentation &rhs) {