* limitations under the License.
*/
-var MachineLearningTrainer = function() {};
+var MachineLearningTrainer = function () { };
var OptimizerType = {
OPTIMIZER_ADAM: 'OPTIMIZER_ADAM',
function ValidateCompileOptions(options) {
var args = {};
if (options.hasOwnProperty('loss_val')) {
- args.loss_val = options.loss_val;
+ args.loss = options.loss_val;
}
if (options.hasOwnProperty('loss')) {
- args.loss_val = options.loss;
+ args.loss = options.loss;
}
if (options.hasOwnProperty('batch_size')) {
- args.loss_val = options.batch_size;
+ args.batch_size = options.batch_size;
}
return args;
}
+var ValidModelCompileExceptions = [
+ 'InvalidValuesError',
+ 'TypeMismatchError',
+ 'AbortError'
+];
+
Model.prototype.compile = function() {
- var args = validator.validateArgs(arguments, [
+ var args = validator_.validateArgs(arguments, [
{
name: 'options',
- type: validator.Types.DICTIONARY,
+ type: validator_.Types.DICTIONARY,
optional: true,
nullable: true
}
if (native_.isFailure(result)) {
throw native_.getErrorObjectAndValidate(
result,
- ValidSetPropertyExceptions,
+ ValidModelCompileExceptions,
AbortError
);
}
- // TODO:
};
function ValidateRunOptions(options) {
var args = {};
if (options.hasOwnProperty('batch_size')) {
- args.loss_val = options.batch_size;
+ args.batch_size = options.batch_size;
}
if (options.hasOwnProperty('epochs')) {
- args.loss_val = options.epochs;
+ args.epochs = options.epochs;
}
if (options.hasOwnProperty('save_path')) {
- args.loss_val = options.save_path;
- }
- if (options.hasOwnProperty('continue_train')) {
- args.loss_val = options.continue_train;
+ args.save_path = options.save_path;
}
return args;
}
+var ValidModelRunExceptions = [
+ 'InvalidValuesError',
+ 'TypeMismatchError'
+];
+
Model.prototype.run = function() {
- var args = validator.validateArgs(arguments, [
+ var args = validator_.validateArgs(arguments, [
{
name: 'options',
- type: validator.Types.DICTIONARY,
+ type: validator_.Types.DICTIONARY,
+ optional: true,
+ nullable: true
+ },
+ {
+ name: 'successCallback',
+ type: types_.FUNCTION
+ },
+ {
+ name: 'errorCallback',
+ type: types_.FUNCTION,
optional: true,
nullable: true
}
]);
+ var runOptions = {};
if (args.has.options) {
- ValidateRunOptions(args.options);
+ runOptions = ValidateRunOptions(args.options);
+ }
+
+ var callArgs = {
+ id: this._id,
+ options: runOptions
+ };
+
+ var callback = function (result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(
+ args.errorCallback,
+ native_.getErrorObjectAndValidate(
+ result,
+ ValidModelRunExceptions,
+ AbortError
+ )
+ );
+ } else {
+ args.successCallback();
+ }
+ };
+
+ var result = native_.call('MLTrainerModelRun', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidModelRunExceptions,
+ AbortError
+ );
}
- // TODO
};
Model.prototype.summarize = function() {
name: 'level',
type: types_.ENUM,
values: Object.values(VerbosityLevel),
- optional: false
+ optional: true,
+ nullable: true
}
]);
- // TODO
+
+ var callArgs = {
+ id: this._id,
+ level: args.level ? args.level : "SUMMARY_MODEL"
+ }
+
+ var result = native_.callSync('MLTrainerModelSummarize', callArgs);
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidBasicExceptions,
+ AbortError
+ );
+ }
+
+ return result.summary
};
+var ValidBasicExceptions = [
+ 'TypeMismatchError',
+ 'AbortError'
+];
+
Model.prototype.addLayer = function() {
var args = validator_.validateArgs(arguments, [
{
values: Layer
}
]);
- // TODO
+
+ if (!args.has.layer) {
+ throw new WebAPIException(
+ WebAPIException.TYPE_MISMATCH_ERR, 'Invalid parameter: layer is undefined'
+ );
+ }
+
+ var callArgs = {
+ id: this._id,
+ layerId: args.layer._id
+ };
+
+ var result = native_.callSync('MLTrainerModelAddLayer', callArgs);
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidBasicExceptions,
+ AbortError
+ );
+ }
};
Model.prototype.setDataset = function() {
values: Dataset
}
]);
- // TODO
+
+ if (!args.has.dataset) {
+ throw new WebAPIException(
+ WebAPIException.TYPE_MISMATCH_ERR, 'Invalid parameter: dataset is undefined'
+ );
+ }
+
+ var callArgs = {
+ id: this._id,
+ datasetId: args.dataset._id
+ };
+
+ var result = native_.callSync('MLTrainerModelSetDataset', callArgs);
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidBasicExceptions,
+ AbortError
+ );
+ }
};
Model.prototype.setOptimizer = function() {
values: Optimizer
}
]);
- // TODO
+
+ if (!args.has.optimizer) {
+ throw new WebAPIException(
+ WebAPIException.TYPE_MISMATCH_ERR, 'Invalid parameter: optimizer is undefined'
+ );
+ }
+
+ var callArgs = {
+ id: this._id,
+ optimizerId: args.optimizer._id
+ };
+
+ var result = native_.callSync('MLTrainerModelSetOptimizer', callArgs);
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidBasicExceptions,
+ AbortError
+ );
+ }
};
var ValidCreateLayerExceptions = ['NotSupportedError', 'TypeMismatchError', 'AbortError'];
);
}
- return new Layer(result.id);
+ return new Layer(result.id, args.type);
};
function ValidateAndReturnDatasetPaths(train, valid, test) {
const std::string kValid = "valid";
const std::string kTest = "test";
const std::string kOptions = "options";
+const std::string kLayerId = "layerId";
+const std::string kDatasetId = "datasetId";
+const std::string kOptimizerId = "optimizerId";
+const std::string kLevel = "level";
+const std::string kSummary = "summary";
} // namespace
using namespace common;
void MlInstance::MLTrainerModelCompile(const picojson::value& args, picojson::object& out) {
ScopeLogger("args: %s", args.serialize().c_str());
CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kOptions, picojson::object, out);
auto id = static_cast<int>(args.get(kId).get<double>());
+ auto options = args.get(kOptions).get<picojson::object>();
- PlatformResult result = trainer_manager_.ModelCompile(id);
+ PlatformResult result = trainer_manager_.ModelCompile(id, options);
if (!result) {
ReportError(result, &out);
}
void MlInstance::MLTrainerModelAddLayer(const picojson::value& args, picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kLayerId, double, out);
+
+ auto id = static_cast<int>(args.get(kId).get<double>());
+ auto layerId = static_cast<int>(args.get(kLayerId).get<double>());
+
+ PlatformResult result = trainer_manager_.ModelAddLayer(id, layerId);
+
+ if (!result) {
+ ReportError(result, &out);
+ return;
+ }
+ ReportSuccess(out);
}
void MlInstance::MLTrainerModelRun(const picojson::value& args, picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kOptions, picojson::object, out);
+ CHECK_ARGS(args, kCallbackId, double, out);
+
+ auto id = static_cast<int>(args.get(kId).get<double>());
+ auto options = args.get(kOptions).get<picojson::object>();
+ auto cb_id = args.get(kCallbackId).get<double>();
+
+ auto async_logic = [this, id, options](decltype(out) out) {
+ PlatformResult result;
+
+ try {
+ result = trainer_manager_.ModelRun(id, options);
+ } catch (...) { // MK-TODO verify why this exception occurs
+ LoggerE("Unhandled and unexpected exception!!");
+ ReportError(result, &out);
+ }
+
+ if (!result) {
+ ReportError(result, &out);
+ return;
+ }
+
+ ReportSuccess(out);
+ };
+
+ this->worker_.add_job([this, cb_id, async_logic] {
+ picojson::value response = picojson::value(picojson::object());
+ picojson::object& async_out = response.get<picojson::object>();
+ async_out[kCallbackId] = picojson::value(cb_id);
+ async_logic(async_out);
+ this->PostMessage(response.serialize().c_str());
+ });
+
+ ReportSuccess(out);
}
void MlInstance::MLTrainerModelSummarize(const picojson::value& args, picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kLevel, std::string, out);
+
+ auto id = static_cast<int>(args.get(kId).get<double>());
+
+ ml_train_summary_type_e summaryType = ML_TRAIN_SUMMARY_MODEL;
+ PlatformResult result = types::SummaryTypeEnum.getValue(
+ args.get(kLevel).get<std::string>(), &summaryType);
+ if (!result) {
+ LogAndReportError(result, &out);
+ return;
+ }
+
+ std::string summary;
+
+ result = trainer_manager_.ModelSummarize(id, summaryType, summary);
+
+ if (!result) {
+ ReportError(result, &out);
+ return;
+ }
+
+ out[kSummary] = picojson::value(summary);
+ ReportSuccess(out);
}
void MlInstance::MLTrainerModelSetDataset(const picojson::value& args, picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kDatasetId, double, out);
+
+ auto id = static_cast<int>(args.get(kId).get<double>());
+ auto datasetId = static_cast<int>(args.get(kDatasetId).get<double>());
+
+ PlatformResult result = trainer_manager_.ModelSetDataset(id, datasetId);
+
+ if (!result) {
+ ReportError(result, &out);
+ return;
+ }
+ ReportSuccess(out);
}
void MlInstance::MLTrainerModelSetOptimizer(const picojson::value& args, picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kOptimizerId, double, out);
+
+ auto id = static_cast<int>(args.get(kId).get<double>());
+ auto optimizerId = static_cast<int>(args.get(kOptimizerId).get<double>());
+
+ PlatformResult result = trainer_manager_.ModelSetOptimizer(id, optimizerId);
+
+ if (!result) {
+ ReportError(result, &out);
+ return;
+ }
+ ReportSuccess(out);
}
void MlInstance::MLTrainerDatasetCreateGenerator(const picojson::value& args,
picojson::object& out) {
- ScopeLogger();
+ ScopeLogger("args: %s", args.serialize().c_str());
}
void MlInstance::MLTrainerDatasetCreateFromFile(const picojson::value& args,
namespace extension {
namespace ml {
+const std::string OPTION_SEPARATOR = " | ";
+const std::string FILE_PATH_PREFIX = "file://";
+
TrainerManager::TrainerManager() {
ScopeLogger();
}
int ret_val = ml_train_model_construct(&n_model);
if (ret_val != 0) {
- LoggerE("Could not create model: %s", ml_strerror(ret_val));
+ LoggerE("Could not create model: %d (%s)", ret_val, ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
int ret_val = ml_train_model_construct_with_conf(config.c_str(), &n_model);
if (ret_val != 0) {
- LoggerE("Could not create model: %s", ml_strerror(ret_val));
+ LoggerE("Could not create model: %d (%s)", ret_val, ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
}
-PlatformResult TrainerManager::ModelCompile(int id) {
+PlatformResult TrainerManager::ModelCompile(int id,
+ const picojson::object& options) {
ScopeLogger();
if (models_.find(id) == models_.end()) {
auto& model = models_[id];
- int ret_val = ml_train_model_compile(model, NULL);
+ std::stringstream ss;
+ for (const auto& opt : options) {
+ const auto& key = opt.first;
+ if (opt.second.is<std::string>()) {
+ const auto& value = opt.second.get<std::string>();
+ ss << key << "=" << value << OPTION_SEPARATOR;
+ } else if (opt.second.is<double>()) {
+ const auto& value = opt.second.get<double>();
+ ss << key << "=" << value << OPTION_SEPARATOR;
+ } else {
+ LoggerE("Unexpected param type for: %s", key.c_str());
+ return PlatformResult(ErrorCode::ABORT_ERR,
+ "Unexpected param type for:" + key);
+ }
+ }
+
+ int ret_val = 0;
+ auto compileOpts = ss.str();
+ if (compileOpts.length() < OPTION_SEPARATOR.length()) {
+ ret_val = ml_train_model_compile(model, NULL);
+ } else {
+ // remove trailing ' | ' from options string
+ compileOpts =
+ compileOpts.substr(0, compileOpts.length() - OPTION_SEPARATOR.length());
+ LoggerI("Compiling model with options: %s", compileOpts.c_str());
+ ret_val = ml_train_model_compile(model, compileOpts.c_str(), NULL);
+ }
+
+ ss.clear();
+
if (ret_val != 0) {
- LoggerE("Could not compile model: %s", ml_strerror(ret_val));
+ LoggerE("Could not compile model: %d (%s)", ret_val, ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
}
-PlatformResult TrainerManager::ModelRun(int id) {
+PlatformResult TrainerManager::ModelRun(int id,
+ const picojson::object& options) {
ScopeLogger();
if (models_.find(id) == models_.end()) {
auto& model = models_[id];
- int ret_val = ml_train_model_run(model, NULL);
+ std::stringstream ss;
+ for (const auto& opt : options) {
+ const auto& key = opt.first;
+ if (opt.second.is<std::string>()) {
+ const auto& value = opt.second.get<std::string>();
+ ss << key << "=" << value << OPTION_SEPARATOR;
+ } else if (opt.second.is<double>()) {
+ const auto& value = opt.second.get<double>();
+ ss << key << "=" << value << OPTION_SEPARATOR;
+ } else {
+ LoggerE("Unexpected param type for: %s", key.c_str());
+ return PlatformResult(ErrorCode::ABORT_ERR,
+ "Unexpected param type for:" + key);
+ }
+ }
+
+ int ret_val = 0;
+ auto runOpts = ss.str();
+
+ if (runOpts.length() < OPTION_SEPARATOR.length()) {
+ ret_val = ml_train_model_run(model, NULL);
+ } else {
+ // remove trailing ' | ' from options string
+ runOpts = runOpts.substr(0, runOpts.length() - OPTION_SEPARATOR.length());
+ LoggerI("Running model with options: %s", runOpts.c_str());
+ ret_val = ml_train_model_run(model, runOpts.c_str(), NULL);
+ }
+
if (ret_val != 0) {
- LoggerE("Could not run model: %s", ml_strerror(ret_val));
+ LoggerE("Could not run (train) model: %d (%s)", ret_val,
+ ml_strerror(ret_val));
+ return PlatformResult(ErrorCode::UNKNOWN_ERR, ml_strerror(ret_val));
+ }
+
+ return PlatformResult();
+}
+
+PlatformResult TrainerManager::ModelAddLayer(int id, int layerId) {
+ ScopeLogger();
+
+ if (models_.find(id) == models_.end()) {
+ LoggerE("Could not find model with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find model");
+ }
+
+ if (layers_.find(layerId) == layers_.end()) {
+ LoggerE("Could not find layer with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find layer");
+ }
+
+ auto& model = models_[id];
+ auto& layer = layers_[layerId];
+
+ int ret_val = ml_train_model_add_layer(model, layer);
+ if (ret_val != 0) {
+ LoggerE("Could not add layer to model: %d (%s)", ret_val,
+ ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
}
+PlatformResult TrainerManager::ModelSetOptimizer(int id, int optimizerId) {
+ ScopeLogger();
+
+ if (models_.find(id) == models_.end()) {
+ LoggerE("Could not find model with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find model");
+ }
+
+ if (optimizers_.find(optimizerId) == optimizers_.end()) {
+ LoggerE("Could not find optimizer with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find optimizer");
+ }
+
+ auto& model = models_[id];
+ auto& optimizer = optimizers_[optimizerId];
+
+ int ret_val = ml_train_model_set_optimizer(model, optimizer);
+ if (ret_val != 0) {
+ LoggerE("Could not set optimizer for model: %d (%s)", ret_val,
+ ml_strerror(ret_val));
+ return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
+ }
+
+ return PlatformResult();
+}
+
+PlatformResult TrainerManager::ModelSetDataset(int id, int datasetId) {
+ ScopeLogger();
+
+ if (models_.find(id) == models_.end()) {
+ LoggerE("Could not find model with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find model");
+ }
+
+ if (datasets_.find(datasetId) == datasets_.end()) {
+ LoggerE("Could not find dataset with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find dataset");
+ }
+
+ auto& model = models_[id];
+ auto& dataset = datasets_[datasetId];
+
+ int ret_val = ml_train_model_set_dataset(model, dataset);
+ if (ret_val != 0) {
+ LoggerE("Could not set dataset for model: %d (%s)", ret_val,
+ ml_strerror(ret_val));
+ return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
+ }
+
+ return PlatformResult();
+}
+
+PlatformResult TrainerManager::ModelSummarize(int id,
+ ml_train_summary_type_e level,
+ std::string& summary) {
+ ScopeLogger();
+
+ if (models_.find(id) == models_.end()) {
+ LoggerE("Could not find model with id: %d", id);
+ return PlatformResult(ErrorCode::ABORT_ERR, "Could not find model");
+ }
+
+ auto& model = models_[id];
+ char* tmpSummary = NULL;
+
+ int ret_val = ml_train_model_get_summary(model, level, &tmpSummary);
+
+ if (ret_val != 0) {
+ LoggerE("Could not get summary for model: %d (%s)", ret_val,
+ ml_strerror(ret_val));
+ return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
+ }
+
+ summary = tmpSummary;
+ free(tmpSummary);
+
+ return PlatformResult();
+}
+
PlatformResult TrainerManager::CreateLayer(int& id,
ml_train_layer_type_e type) {
ScopeLogger();
return PlatformResult();
}
-PlatformResult TrainerManager::LayerSetProperty(int& id, const std::string& name,
+PlatformResult TrainerManager::LayerSetProperty(int id, const std::string& name,
const std::string& value) {
ScopeLogger("id: %d, name: %s, value: %s", id, name.c_str(), value.c_str());
int ret_val = ml_train_layer_set_property(layer, opt.c_str(), NULL);
if (ret_val != 0) {
- LoggerE("Could not set layer property: %s", ml_strerror(ret_val));
+ LoggerE("Could not set layer property: %d (%s)", ret_val,
+ ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
int ret_val = ml_train_optimizer_create(&n_optimizer, type);
if (ret_val != 0) {
- LoggerE("Could not create optimizer: %s", ml_strerror(ret_val));
+ LoggerE("Could not create optimizer: %d (%s)", ret_val,
+ ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
}
-PlatformResult TrainerManager::OptimizerSetProperty(int& id, const std::string& name,
+PlatformResult TrainerManager::OptimizerSetProperty(int id,
+ const std::string& name,
const std::string& value) {
ScopeLogger("id: %d, name: %s, value: %s", id, name.c_str(), value.c_str());
std::string opt = name + "=" + value;
int ret_val = ml_train_optimizer_set_property(optimizer, opt.c_str(), NULL);
if (ret_val != 0) {
- LoggerE("Could not set optimizer property: %s", ml_strerror(ret_val));
+ LoggerE("Could not set optimizer property: %d (%s)", ret_val,
+ ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
}
if (!train_file.empty()) {
+ auto tmpString = train_file;
+ if (tmpString.substr(0, 7) == "file://") {
+ // remove 'file://' prefix from path before passing to native api
+ tmpString.erase(0, 7);
+ }
+
ret_val = ml_train_dataset_add_file(n_dataset, ML_TRAIN_DATASET_MODE_TRAIN,
- train_file.c_str());
+ tmpString.c_str());
if (ret_val != 0) {
- LoggerE("Could not add train file %s to dataset: %s", train_file.c_str(),
+ LoggerE("Could not add train file %s to dataset: %s", tmpString.c_str(),
ml_strerror(ret_val));
ml_train_dataset_destroy(n_dataset);
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
if (!valid_file.empty()) {
+ auto tmpString = valid_file;
+ if (tmpString.substr(0, 7) == "file://") {
+ // remove 'file://' prefix from path before passing to native api
+ tmpString.erase(0, 7);
+ }
ret_val = ml_train_dataset_add_file(n_dataset, ML_TRAIN_DATASET_MODE_VALID,
- valid_file.c_str());
+ tmpString.c_str());
if (ret_val != 0) {
LoggerE("Could not add validation file %s to dataset: %s",
- valid_file.c_str(), ml_strerror(ret_val));
+ tmpString.c_str(), ml_strerror(ret_val));
ml_train_dataset_destroy(n_dataset);
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
}
if (!test_file.empty()) {
+ auto tmpString = test_file;
+ if (tmpString.substr(0, 7) == "file://") {
+ // remove 'file://' prefix from path before passing to native api
+ tmpString.erase(0, 7);
+ }
ret_val = ml_train_dataset_add_file(n_dataset, ML_TRAIN_DATASET_MODE_TEST,
- test_file.c_str());
+ tmpString.c_str());
if (ret_val != 0) {
- LoggerE("Could not add test file %s to dataset: %s", test_file.c_str(),
+ LoggerE("Could not add test file %s to dataset: %s", tmpString.c_str(),
ml_strerror(ret_val));
ml_train_dataset_destroy(n_dataset);
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
// MK-TODO Add creating Dataset with generator
-PlatformResult TrainerManager::DatasetSetProperty(int& id, const std::string& name,
+PlatformResult TrainerManager::DatasetSetProperty(int id,
+ const std::string& name,
const std::string& value) {
ScopeLogger("id: %d, name: %s, value: %s", id, name.c_str(), value.c_str());
std::string opt = name + "=" + value;
// ml_train_dataset_set_property() is marked as deprecated
- // temporary set same property for all modes (all data files)
+ // temporary set same property for all modes (all data files) if possible
int ret_val = ml_train_dataset_set_property_for_mode(
dataset, ML_TRAIN_DATASET_MODE_TRAIN, opt.c_str(), NULL);
if (ret_val != 0) {
- LoggerE("Could not set dataset property for train mode: %s",
+ LoggerE("Could not set dataset property for train mode: %d (%s)", ret_val,
ml_strerror(ret_val));
return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
ret_val = ml_train_dataset_set_property_for_mode(
dataset, ML_TRAIN_DATASET_MODE_VALID, opt.c_str(), NULL);
if (ret_val != 0) {
- LoggerE("Could not set dataset property for validation mode: %s",
- ml_strerror(ret_val));
- return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
+ LoggerE("Could not set dataset property for validation mode: %d (%s)",
+ ret_val, ml_strerror(ret_val));
+ // MK-TODO report error for each file when extracted to separate functions
+ // return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
ret_val = ml_train_dataset_set_property_for_mode(
dataset, ML_TRAIN_DATASET_MODE_TEST, opt.c_str(), NULL);
if (ret_val != 0) {
- LoggerE("Could not set dataset property for test mode: %s",
+ LoggerE("Could not set dataset property for test mode: %d (%s)", ret_val,
ml_strerror(ret_val));
- return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
+ // MK-TODO report error for each file when extracted to separate functions
+ // return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
}
return PlatformResult();
PlatformResult CreateModel(int& id);
PlatformResult CreateModel(int& id, const std::string config);
- PlatformResult ModelCompile(int id);
- PlatformResult ModelRun(int id);
+ PlatformResult ModelCompile(int id, const picojson::object& options);
+ PlatformResult ModelRun(int id, const picojson::object& options);
+ PlatformResult ModelAddLayer(int id, int layerId);
+ PlatformResult ModelSetOptimizer(int id, int optimizerId);
+ PlatformResult ModelSetDataset(int id, int datasetId);
+ PlatformResult ModelSummarize(int id, ml_train_summary_type_e level,
+ std::string& summary);
PlatformResult CreateLayer(int& id, ml_train_layer_type_e type);
- PlatformResult LayerSetProperty(int& id, const std::string& name,
+ PlatformResult LayerSetProperty(int id, const std::string& name,
const std::string& value);
PlatformResult CreateOptimizer(int& id, ml_train_optimizer_type_e type);
- PlatformResult OptimizerSetProperty(int& id, const std::string& name, const std::string& value);
+ PlatformResult OptimizerSetProperty(int id, const std::string& name,
+ const std::string& value);
PlatformResult CreateFileDataset(int& id, const std::string train_file,
const std::string valid_file, const std::string test_file);
- PlatformResult DatasetSetProperty(int& id, const std::string& name, const std::string& value);
+ PlatformResult DatasetSetProperty(int id, const std::string& name,
+ const std::string& value);
private:
int next_model_id_ = 0;
{"INT64", ML_TENSOR_TYPE_INT64}, {"UINT64", ML_TENSOR_TYPE_UINT64},
{"UNKNOWN", ML_TENSOR_TYPE_UNKNOWN}};
-// const PlatformEnum<TODO> DatasetTypeEnum{{"DATASET_GENERATOR", TODO},
-// {"DATASET_FILE", TODO},
-// {"DATASET_UNKNOWN",TODO}};
-
const PlatformEnum<ml_train_optimizer_type_e> OptimizerTypeEnum{
{"OPTIMIZER_ADAM", ML_TRAIN_OPTIMIZER_TYPE_ADAM},
{"OPTIMIZER_SGD", ML_TRAIN_OPTIMIZER_TYPE_SGD},
{"LAYER_BACKBONE_NNSTREAMER", ML_TRAIN_LAYER_TYPE_BACKBONE_NNSTREAMER},
{"LAYER_UNKNOWN", ML_TRAIN_LAYER_TYPE_UNKNOWN}};
+const PlatformEnum<ml_train_summary_type_e> SummaryTypeEnum{
+ {"SUMMARY_MODEL", ML_TRAIN_SUMMARY_MODEL},
+ {"SUMMARY_LAYER", ML_TRAIN_SUMMARY_LAYER},
+ {"SUMMARY_TENSOR", ML_TRAIN_SUMMARY_TENSOR}};
+
} // namespace types
namespace util {
extern const PlatformEnum<ml_nnfw_type_e> NNFWTypeEnum;
extern const PlatformEnum<ml_tensor_type_e> TensorTypeEnum;
-// MK-TODO implement internal enum or remove from API design if not needed
-// extern const PlatformEnum<TODO> DatasetTypeEnum;
extern const PlatformEnum<ml_train_optimizer_type_e> OptimizerTypeEnum;
extern const PlatformEnum<ml_train_layer_type_e> LayerTypeEnum;
+extern const PlatformEnum<ml_train_summary_type_e> SummaryTypeEnum;
} // namespace types