return;
}
- auto ret = ml_pipeline_destroy(pipeline_);
- if (ML_ERROR_NONE != ret) {
- LoggerE("ml_pipeline_destroy() failed: [%d] (%s)", ret, get_error_message(ret));
- }
- LoggerD("ml_pipeline_destroy() succeeded");
+ Dispose();
}
void Pipeline::PipelineStateChangeListener(ml_pipeline_state_e state, void* user_data) {
PlatformResult Pipeline::Dispose() {
ScopeLogger("id_: [%d]", id_);
+ /*
+ * TODO in future commits:
+ *
+ * Release all nodes belonging to this pipeline and
+ * cached in this object in containers like
+ * switches_, node_infos_, etc.
+ *
+ * They have to be released HERE (i.e. BEFORE releasing pipeline_).
+ * If they're released after pipeline_, the app may crash.
+ */
+
auto ret = ml_pipeline_destroy(pipeline_);
if (ML_ERROR_NONE != ret) {
LoggerE("ml_pipeline_destroy() failed: [%d] (%s)", ret, get_error_message(ret));
private:
Pipeline(int id, const std::string& state_change_listener_name, common::Instance* instance_ptr);
+ /* ######### VERY IMPORTANT #########
+ * All nnstreamer handles to nodes belonging to this Pipeline
+ * object have to be released in Dispose(), before calling
+ * ml_pipeline_destroy(pipeline_) (otherwise, the app may crash).
+ */
const int id_;
ml_pipeline_h pipeline_;
const std::string state_change_listener_name_;