props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
- props::TensorFormat(), props::ModelTensorDataType()),
+ props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
load_path(std::string()),
epoch_idx(0),
iter(0),
props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
- props::TensorFormat(), props::ModelTensorDataType()),
+ props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
load_path(std::string()),
epoch_idx(0),
iter(0),
const std::string tensor_type =
to_string(std::get<props::ModelTensorDataType>(model_flex_props));
+ const float loss_scale = std::get<props::LossScale>(model_flex_props);
model_graph = NetworkGraph(memory_swap, memory_swap_path, lookahead,
- tensor_format, tensor_type);
+ tensor_format, tensor_type, loss_scale);
model_graph.setMemoryOptimizations(
std::get<props::MemoryOptimization>(model_flex_props));
auto train_for_iteration =
[this, stop_cb, stop_user_data](RunStats &stat, DataBuffer &buffer) {
+ ml_loge("train for iteration");
forwarding(true, stop_cb, stop_user_data);
backwarding(iter++, stop_cb, stop_user_data);
/**
* @brief Forward Propagation of the neural network
*/
- sharedConstTensors forwarding(bool training = true,
- std::function<bool(void *userdata)> stop_cb =
- [](void *user_data) { return false; },
- void *user_data = nullptr);
+ sharedConstTensors forwarding(
+ bool training = true,
+ std::function<bool(void *userdata)> stop_cb =
+ [](void *user_data) { return false; },
+ void *user_data = nullptr);
/**
* @brief Forward Propagation of the neural network
/**
* @brief Incremental forward Propagation of the neural network
*/
- sharedConstTensors
- incremental_forwarding(unsigned int from, unsigned int to,
- bool training = true,
- std::function<bool(void *userdata)> stop_cb =
- [](void *user_data) { return false; },
- void *user_data = nullptr);
+ sharedConstTensors incremental_forwarding(
+ unsigned int from, unsigned int to, bool training = true,
+ std::function<bool(void *userdata)> stop_cb =
+ [](void *user_data) { return false; },
+ void *user_data = nullptr);
/**
* @brief Incremental forward Propagation of the neural network
* @brief Backward Propagation of the neural network
* @param[in] iteration Iteration Number for the optimizer
*/
- void backwarding(int iteration,
- std::function<bool(void *userdata)> stop_cb =
- [](void *user_data) { return false; },
- void *user_data = nullptr);
+ void backwarding(
+ int iteration,
+ std::function<bool(void *userdata)> stop_cb =
+ [](void *user_data) { return false; },
+ void *user_data = nullptr);
/**
* @copydoc Model::save(const std::string &file_path, ml::train::ModelFormat
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int train(const std::vector<std::string> &values = {},
- std::function<bool(void *)> stop_cb =
- [](void *stop_user_data) { return false; },
- void *stop_user_data = nullptr,
- std::function<void(void *)> epoch_complete_cb =
- [](void *epoch_user_data) { return false; },
- void *epoch_user_data = nullptr) override;
+ int train(
+ const std::vector<std::string> &values = {},
+ std::function<bool(void *)> stop_cb =
+ [](void *stop_user_data) { return false; },
+ void *stop_user_data = nullptr,
+ std::function<void(void *)> epoch_complete_cb =
+ [](void *epoch_user_data) { return false; },
+ void *epoch_user_data = nullptr) override;
/**
* @brief Run NeuralNetwork inference
const std::string file_path) override;
private:
- using FlexiblePropTypes =
- std::tuple<props::Epochs, props::TrainingBatchSize, props::SavePath,
- props::ContinueTrain, props::SaveBestPath,
- props::MemoryOptimization, props::MemorySwap,
- props::MemorySwapPath, props::MemorySwapLookahead,
- props::TensorFormat, props::ModelTensorDataType>;
+ using FlexiblePropTypes = std::tuple<
+ props::Epochs, props::TrainingBatchSize, props::SavePath,
+ props::ContinueTrain, props::SaveBestPath, props::MemoryOptimization,
+ props::MemorySwap, props::MemorySwapPath, props::MemorySwapLookahead,
+ props::TensorFormat, props::ModelTensorDataType, props::LossScale>;
using RigidPropTypes =
std::tuple<props::LossType, std::vector<props::InputConnection>,
std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm>;
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- int train_run(std::function<bool(void *)> stop_cb =
- [](void *) { return false; },
- void *user_data = nullptr,
- std::function<void(void *)> epoch_complete_cb =
- [](void *) { return false; },
- void *data = nullptr);
+ int train_run(
+ std::function<bool(void *)> stop_cb = [](void *) { return false; },
+ void *user_data = nullptr,
+ std::function<void(void *)> epoch_complete_cb =
+ [](void *) { return false; },
+ void *data = nullptr);
/**
* @brief Swap function for the class