From ff78dd34d8185373a60eaabd13054ac8f0e47195 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Thu, 17 Feb 2022 10:57:26 +0900 Subject: [PATCH] [capi/test] Example of batchsize set at train Update capi test to include an example where batchsize is set at the training time and not at compile time. Signed-off-by: Parichay Kapoor --- test/tizen_capi/unittest_tizen_capi.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/tizen_capi/unittest_tizen_capi.cpp b/test/tizen_capi/unittest_tizen_capi.cpp index 4d37bb1..8b2260f 100644 --- a/test/tizen_capi/unittest_tizen_capi.cpp +++ b/test/tizen_capi/unittest_tizen_capi.cpp @@ -732,10 +732,10 @@ TEST(nntrainer_capi_nnmodel, train_with_file_01_p) { status = ml_train_model_set_dataset(model, dataset); EXPECT_EQ(status, ML_ERROR_NONE); - status = ml_train_model_compile(model, "loss=cross", "batch_size=16", NULL); + status = ml_train_model_compile(model, "loss=cross", NULL); EXPECT_EQ(status, ML_ERROR_NONE); - status = ml_train_model_run(model, "epochs=2", + status = ml_train_model_run(model, "epochs=2", "batch_size=16", "save_path=capi_tizen_model.bin", NULL); EXPECT_EQ(status, ML_ERROR_NONE); -- 2.7.4