*/
virtual int setOptimizer(std::shared_ptr<Optimizer> optimizer) = 0;
- /*
+ /**
* @brief get layer by name from neural network model
* @param[in] name name of the layer to get
* @param[out] layer shared_ptr to hold the layer to get
createOptimizer(const OptimizerType &type,
const std::vector<std::string> &properties = {});
-/*
+/**
* @brief General Optimizer Factory function to register optimizer
*
* @param props property representation
*
* @retval name of the layer
* @note This name is unique to this layer in a model
- * @Note This name might be changed once this layer is added to the model
+ * @note This name might be changed once this layer is added to the model
* to keep the name unique to the model
*/
std::string getName() noexcept { return layer->getName(); }
-/* SPDX-License-Identifier: Apache-2.0
- *
- * Copyright (C) 2020 Jihoon Lee <jihoon.it.lee@samsung.com>
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
* @file lazy_tensor.cpp
* @date 05 Jun 2020
* @brief A lazy evaluation calculator for tensors
* @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jihoon.it.lee@samsung.com>
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
* @bug No known bugs except for NYI items
*
*/
// SPDX-License-Identifier: Apache-2.0
-/* Copyright (C) 2020 Jihoon Lee <jihoon.it.lee@samsung.com>
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
* @file lazy_tensor.h
* @date 05 Jun 2020
* @brief A lazy evaluation calculator for tensors
* @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jihoon.it.lee@samsung.com>
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
* @bug No known bugs except for NYI items
*
*/
}
/**
- * @bried Clone the currnet object
+ * @brief Clone the currnet object
*
* @return Cloned copy
*/
resetGradient();
}
- /*
+ /**
* @brief Allocate memory for the variable and gradient
*/
void allocate() {
allocateGradient();
}
- /*
+ /**
* @brief Deallocate memory for the variable and gradient
*/
void deallocate() {
void deallocateGradient() { grad->deallocate(); }
/**
- * @bried Update the variable to use the variable from the given param
+ * @brief Update the variable to use the variable from the given param
* @param vg Var_Grad whose variable must be updated with
*/
void updateVariableByVariable(const Var_Grad &vg) { var = vg.var; }
/**
- * @bried Update the gradient to use the variable from the given param
+ * @brief Update the gradient to use the variable from the given param
* @param vg Var_Grad whose variable must be updated with
*/
void updateGradientByVariable(const Var_Grad &vg) { grad = vg.var; }
// SPDX-License-Identifier: Apache-2.0
/**
- * Copyright (C) 2020 Jihoon Lee <jihoon.it.lee@samsung.com>
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
* @file unittest_nntrainer_lazy_tensor.cpp
* @date 05 Jun 2020
* @brief A unittest for nntrainer_lazy_tensor
* @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jihoon.it.lee@samsung.com>
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
* @bug No known bugs except for NYI items
*/
#include <gtest/gtest.h>
// SPDX-License-Identifier: Apache-2.0
/**
- * Copyright (C) 2020 Jihoon Lee <jihoon.it.lee@samsung.com>
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
* @file unittest_nntrainer_models.cpp
* @date 19 Oct 2020
* @brief Model multi iteration, itegrated test
* @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jihoon.it.lee@samsung.com>
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
* @bug No known bugs except for NYI items
*
*/