virtual ~IInferenceEngineCommon() {};
/**
- * @brief Set a tensor filter plugin type.
- * @details See #inference_backend_type_e
- * This callback passes a tensor filter plugin type - NNFW or VIVANTE to a tensor filter plugin for NNStreamer.
+ * @brief Set backend private data if needed.
+ * @details This callback passes a backend private data to a given backend.
+ * I.e., ML Single API backend needs which tensor filter type of NNStreamer should be used such as NNFW or VIVANTE.
*
- * @since_tizen 6.0 (Optional)
- * @param[in] type This could be one among plugin types enumerated on inference_backend_type_e.
+ * @since_tizen 6.0
+ * @param[in] data This could be backend specific data object.
*/
- virtual int SetPluginType(const int type = 0) { return type; }
+ virtual int SetPrivateData(void *data) = 0;
/**
* @brief Set target devices.
// If a backend is ML Single API of NNStreamer or NNFW then set a tensor filter plugin type.
if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) {
- int ret = mBackendHandle->SetPluginType(backend_type);
+ int ret = mBackendHandle->SetPrivateData(&backend_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to set a tensor filter plugin.");
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;