return status;
}
-NNStreamerLayer::~NNStreamerLayer() {
- try {
- finalizeError(ML_ERROR_NONE);
- } catch (std::exception &e) {
- std::cerr << "failed in destructor, reason: " << e.what();
- }
-}
-
-void NNStreamerLayer::finalizeError(int status) {
- if (status == ML_ERROR_NONE)
- return;
+NNStreamerLayer::~NNStreamerLayer() { release(); }
+void NNStreamerLayer::release() noexcept {
if (in_res) {
ml_tensors_info_destroy(in_res);
in_res = nullptr;
ml_single_close(single);
single = nullptr;
}
+}
+
+void NNStreamerLayer::finalizeError(int status) {
+ if (status == ML_ERROR_NONE)
+ return;
+
+ release();
if (status != ML_ERROR_NONE)
throw std::invalid_argument(
*/
void finalizeError(int status);
+ /**
+ * @brief release the layer resources
+ */
+ void release() noexcept;
+
/**
* @brief convert nnstreamer's tensor_info to nntrainer's tensor_dim
* @param[in] out_res nnstreamer's tensor_info
ML_ERROR_INVALID_PARAMETER);
EXPECT_EQ(ml_train_model_get_output_tensors_info(handle, &output_info),
ML_ERROR_INVALID_PARAMETER);
+
+ status = ml_train_model_destroy(handle);
+ EXPECT_EQ(status, ML_ERROR_NONE);
}
TEST(nntrainer_capi_nnmodel, get_input_output_dimension_04_n) {
status = ml_train_model_destroy(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
+ handle = NULL;
EXPECT_EQ(ml_train_model_get_input_tensors_info(handle, &input_info),
ML_ERROR_INVALID_PARAMETER);