[VF2] Add inference_engine_backend_path.ini 47/316947/1 accepted/tizen/unified/20241224.130059 accepted/tizen/unified/x/20241224.221627 accepted/tizen/unified/x/asan/20250113.001937
authorTae-Young Chung <ty83.chung@samsung.com>
Fri, 20 Dec 2024 05:36:04 +0000 (14:36 +0900)
committerTae-Young Chung <ty83.chung@samsung.com>
Fri, 20 Dec 2024 05:36:38 +0000 (14:36 +0900)
[Version] 0.4.4
[Issue Type] Update

Change-Id: Ia05eff7f8dcb42184ec87f2205f93b1bd0a206f2
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
media-config-target-vf2/hal/etc/inference/inference_engine_backend_path.ini [new file with mode: 0644]
packaging/media-config.spec

diff --git a/media-config-target-vf2/hal/etc/inference/inference_engine_backend_path.ini b/media-config-target-vf2/hal/etc/inference/inference_engine_backend_path.ini
new file mode 100644 (file)
index 0000000..97b4f0e
--- /dev/null
@@ -0,0 +1,24 @@
+; This ini file describes,
+; 1. which NPU backend to be used for user-given INFERENCE_ENGINE_CUSTOM type. As for NPU device, we will use ML Single API.
+; 2. which API framework to be used for a given backend engine such as TFLITE, ARMNN, and OPENCV.
+;    In default, Mediavision internal API will be used. If you want to use ML Single API then set 'MLAPI' to 'API framework' of each backend section.
+
+; Available NPU backends : VIVANTE, TRIV2
+; Available backend engine sections : TFLITE, ARMNN, ONE, OPENCV
+; Available external inference engine API framework : MLAPI
+
+[NPU backend]
+; Writing rule = [VIVANTE|TRIV2]
+type = VIVANTE
+
+[TFLITE]
+; Writing rule = [MLAPI]
+API framework = MLAPI
+
+[ARMNN]
+; Writing rule = [MLAPI]
+API framework =
+
+[OPENCV]
+; Writing rule = [MLAPI]
+API framework =
index cc6f6d2796bd53f3e491c99af275b610cd4ab522..7e45d296a2cacc5bbcc6260a2aee0bba0f494c98 100644 (file)
@@ -2,7 +2,7 @@
 
 Name:       media-config
 Summary:    Multimedia Framework system configuration package
-Version:    0.4.3
+Version:    0.4.4
 Release:    0
 Group:      Multimedia/Configuration
 License:    LGPL-2.1 and Apache-2.0