static constexpr const char *NONE_STR = "NONE";
static constexpr const char *MODEL_STR = "model";
static constexpr const char *DATASET_STR = "dataset";
+static constexpr const char *TRAINSET_STR = "train_set";
+static constexpr const char *VALIDSET_STR = "valid_set";
+static constexpr const char *TESTSET_STR = "test_set";
static constexpr const char *OPTIMIZER_STR = "optimizer";
namespace nntrainer {
std::string sec_name(sec_name_);
if (istrequal(sec_name, MODEL_STR) || istrequal(sec_name, DATASET_STR) ||
+ istrequal(sec_name, TRAINSET_STR) ||
+ istrequal(sec_name, VALIDSET_STR) ||
+ istrequal(sec_name, TESTSET_STR) ||
istrequal(sec_name, OPTIMIZER_STR)) {
/// dedicated sections so skip
continue;
namespace nntrainer {
-TfOpNode::TfOpNode(){};
+TfOpNode::TfOpNode() :
+ inputs(),
+ outputs(),
+ weights(),
+ weight_transform(nullptr),
+ is_input(false),
+ is_output(false),
+ node_owned_variable(),
+ op_type(tflite::BuiltinOperator_ADD),
+ builtin_ops(),
+ builtin_option_type(tflite::BuiltinOptions_NONE){};
void TfOpNode::setLayerNode(const LayerNode &layer) {
is_input = layer.getNumInputConnections() == 0;
int height = in_dim.height();
int width = in_dim.width();
- auto [pt, pb, pl, pr] = padding;
+ auto pt = padding[0];
+ auto pl = padding[2];
unsigned int p_height = pool_size[0];
unsigned int p_width = pool_size[1];
NN_RETURN_STATUS();
status = parse_buffer_section("valid_set", DatasetModeType::MODE_VALID);
NN_RETURN_STATUS();
- status = parse_buffer_section("test", DatasetModeType::MODE_TEST);
+ status = parse_buffer_section("test_set", DatasetModeType::MODE_TEST);
NN_RETURN_STATUS();
return status;
case ml::train::ModelFormat::MODEL_FORMAT_INI:
saveModelIni(file_path);
break;
+
+ case ml::train::ModelFormat::MODEL_FORMAT_INI_WITH_BIN: {
+ auto old_save_path = std::get<props::SavePath>(model_flex_props);
+ auto bin_file_name =
+ file_path.substr(0, file_path.find_last_of('.')) + ".bin";
+
+ std::get<props::SavePath>(model_flex_props).set(bin_file_name);
+ save(file_path, ml::train::ModelFormat::MODEL_FORMAT_INI);
+ save(bin_file_name, ml::train::ModelFormat::MODEL_FORMAT_BIN);
+ std::get<props::SavePath>(model_flex_props) = old_save_path;
+ break;
+ }
default:
throw nntrainer::exception::not_supported(
"saving with given format is not supported yet");
template <>
std::unique_ptr<std::vector<std::pair<std::string, std::string>>>
-Exporter::getResult<ExportMethods::METHOD_STRINGVECTOR>() noexcept {
+Exporter::getResult<ExportMethods::METHOD_STRINGVECTOR>() {
return std::move(stored_result);
}
#ifdef ENABLE_TFLITE_INTERPRETER
template <>
-std::unique_ptr<TfOpNode>
-Exporter::getResult<ExportMethods::METHOD_TFLITE>() noexcept {
+std::unique_ptr<TfOpNode> Exporter::getResult<ExportMethods::METHOD_TFLITE>() {
tf_node->finalize();
return std::move(tf_node);
}
*/
template <ExportMethods methods,
typename T = typename return_type<methods>::type>
- std::unique_ptr<T> getResult() noexcept;
+ std::unique_ptr<T> getResult();
private:
/**