Added SetCLTuner interface for CLTuner feature support of inference
engine interface framework which has a pure virtual function interface
of SetCLTUner function so the interface implementataion is required.
Change-Id: Ie20991e6562864bca285383443880367ea00b522
Signed-off-by: Inki Dae <inki.dae@samsung.com>
return INFERENCE_ENGINE_ERROR_NONE;
}
+ int InferenceMLAPI::SetCLTuner(const inference_engine_cltuner *cltuner)
+ {
+ LOGI("ENTER");
+
+ // TODO. let's wait until CLTuner feature is ready for NNFW tensor
+ // filter which is a ONERT runtime backend of MLAPI.
+
+ LOGI("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+ }
+
int InferenceMLAPI::Load(std::vector<std::string> model_paths,
inference_model_format_e model_format)
{
int SetTargetDevices(int types) override;
+ int SetCLTuner(const inference_engine_cltuner *cltuner) final;
+
int Load(std::vector<std::string> model_paths,
inference_model_format_e model_format) override;