From: jijoong.moon Date: Wed, 4 Dec 2019 11:23:25 +0000 (+0900) Subject: add doxygen doc for Logistic regression X-Git-Tag: accepted/tizen/unified/20200706.064221~240 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3640faba778fcb670a59ae50367b9c1aa03f507d;p=platform%2Fcore%2Fml%2Fnntrainer.git add doxygen doc for Logistic regression add doxygen doc for Logistic regression **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: jijoong.moon --- diff --git a/LogisticRegression/jni/main.cpp b/LogisticRegression/jni/main.cpp index fa0ba89..d1318b7 100644 --- a/LogisticRegression/jni/main.cpp +++ b/LogisticRegression/jni/main.cpp @@ -1,9 +1,22 @@ +/** + * @file main.cpp + * @date 04 December 2019 + * @see https://github.sec.samsung.net/jijoong-moon/Transfer-Learning.git + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * @brief This is Binary Logistic Regression Example + * + * Trainig set (dataset1.txt) : two colume data + result (1.0 or 0.0) + * Configuration file : ../../res/LogisticRegression.ini + * Test set (test.txt) + */ + +#include +#include #include #include #include #include -#include -#include #include "include/matrix.h" #include "include/neuralnet.h" @@ -11,6 +24,11 @@ std::string data_file; +/** + * @brief step function + * @param[in] x value to be distinguished + * @retval 0.0 or 1.0 + */ double stepFunction(double x) { if (x > 0.5) { return 1.0; @@ -23,6 +41,12 @@ double stepFunction(double x) { return x; } +/** + * @brief create NN + * back propagation of NN + * @param[in] arg 1 : configuration file path + * @param[in] arg 2 : resource path (dataset.txt or testset.txt) + */ int main(int argc, char *argv[]) { if (argc < 3) { std::cout << "./LogisticRegression Config.ini data.txt\n"; @@ -35,13 +59,22 @@ int main(int argc, char *argv[]) { srand(time(NULL)); + /** + * @brief Create NN with configuration file path + */ std::vector> inputVector, outputVector; Network::NeuralNetwork NN(config); + /** + * @brief Initialize NN + */ NN.init(); if (!training) NN.readModel(); + /** + * @brief Generate Trainig Set + */ std::ifstream dataFile(data_file); if (dataFile.is_open()) { std::string temp; @@ -67,18 +100,30 @@ int main(int argc, char *argv[]) { index++; } } + + /** + * @brief training NN ( back propagation ) + */ if (training) { for (unsigned int i = 0; i < NN.getEpoch(); i++) { NN.backwarding(Matrix(inputVector), Matrix(outputVector), i); - std::cout << "#" << i + 1 << "/" << NN.getEpoch() - << " - Loss : " << NN.getLoss() << std::endl; + std::cout << "#" << i + 1 << "/" << NN.getEpoch() << " - Loss : " << NN.getLoss() << std::endl; NN.setLoss(0.0); } } else { - std::cout << NN.forwarding(Matrix(inputVector)).applyFunction(stepFunction) - << std::endl; + /** + * @brief forward propagation + */ + std::cout << NN.forwarding(Matrix(inputVector)).applyFunction(stepFunction) << std::endl; } + /** + * @brief save Weight & Bias + */ NN.saveModel(); + + /** + * @brief Finalize NN + */ NN.finalize(); }