if cxx.has_type('kTfLiteComplex64', prefix : '#include <tensorflow/contrib/lite/model.h>')
tflite_compile_args += '-DTFLITE_COMPLEX64=1'
endif
+ tflite_compile_args += '-DTFLITE_VERSION=1'
tflite_extra_dep = declare_dependency(
compile_args : tflite_compile_args,
)
endif
+if tflite2_support_is_available
+ filter_sub_tflite2_sources = ['tensor_filter_tensorflow_lite.cc']
+
+ nnstreamer_filter_tflite2_sources = []
+ foreach s : filter_sub_tflite2_sources
+ nnstreamer_filter_tflite2_sources += join_paths(meson.current_source_dir(), s)
+ endforeach
+
+ nnstreamer_filter_tflite2_deps = tflite_support_deps + [thread_dep, libdl_dep, glib_dep, gst_dep, nnstreamer_dep]
+
+ tflite2_compile_args = []
+
+ if cxx.has_type('kTfLiteInt8', prefix : '#include <tensorflow2/contrib/lite/model.h>')
+ tflite2_compile_args += '-DTFLITE_INT8=1'
+ endif
+ if cxx.has_type('kTfLiteInt16', prefix : '#include <tensorflow2/contrib/lite/model.h>')
+ tflite2_compile_args += '-DTFLITE_INT16=1'
+ endif
+ if cxx.has_type('kTfLiteFloat16', prefix : '#include <tensorflow2/contrib/lite/model.h>')
+ tflite2_compile_args += '-DTFLITE_FLOAT16=1'
+ endif
+ if cxx.has_type('kTfLiteComplex64', prefix : '#include <tensorflow2/contrib/lite/model.h>')
+ tflite2_compile_args += '-DTFLITE_COMPLEX64=1'
+ endif
+ tflite2_compile_args += '-DTFLITE_VERSION=2'
+
+ tflite2_extra_dep = declare_dependency(
+ compile_args : tflite2_compile_args,
+ )
+
+ nnstreamer_filter_tflite2_deps += tflite2_extra_dep
+
+ shared_library('nnstreamer_filter_tensorflow2-lite',
+ nnstreamer_filter_tflite2_sources,
+ dependencies: nnstreamer_filter_tflite2_deps,
+ install: true,
+ install_dir: filter_subplugin_install_dir
+ )
+
+ static_library('nnstreamer_filter_tensorflow2-lite',
+ nnstreamer_filter_tflite2_sources,
+ dependencies: nnstreamer_filter_tflite2_deps,
+ install: true,
+ install_dir: nnstreamer_libdir
+ )
+endif
+
if pytorch_support_is_available
filter_sub_torch_sources = ['tensor_filter_pytorch.cc']
* @author HyoungJoo Ahn <hello.ahn@samsung.com>
* @bug No known bugs except for NYI items
*
- * This is the per-NN-framework plugin (tensorflow-lite) for tensor_filter.
+ * This is the per-NN-framework plugin (tensorflow-lite, tensorflow2-lite)
+ * for tensor_filter. The meson build system generates two .so files
+ * (e.g., TF-Lite and TF2-Lite) from this source code.
*/
#include <unistd.h>
return -ENOENT;
}
+#if TFLITE_VERSION == 1
static gchar filter_subplugin_tensorflow_lite[] = "tensorflow-lite";
+#else
+static gchar filter_subplugin_tensorflow_lite[] = "tensorflow2-lite";
+#endif
static GstTensorFilterFramework NNS_support_tensorflow_lite = {
.version = GST_TENSOR_FILTER_FRAMEWORK_V0,
'target': 'tensorflow-lite',
'project_args': { 'ENABLE_TENSORFLOW_LITE': 1 }
},
+ 'tflite2-support': {
+ 'target': 'tensorflow2-lite',
+ 'project_args': { 'ENABLE_TENSORFLOW2_LITE': 1 }
+ },
'pytorch-support': {
'target': 'pytorch',
'project_args': { 'ENABLE_PYTORCH': 1 }
option('audio-support', type: 'feature', value: 'enabled')
option('tf-support', type: 'feature', value: 'auto')
option('tflite-support', type: 'feature', value: 'auto')
+option('tflite2-support', type: 'feature', value: 'auto')
option('pytorch-support', type: 'feature', value: 'auto')
option('caffe2-support', type: 'feature', value: 'auto')
option('python2-support', type: 'feature', value: 'auto')
option('enable-vivante', type: 'boolean', value: false)
option('enable-tizen-feature-check', type: 'boolean', value: true)
option('enable-tizen-privilege-check', type: 'boolean', value: true)
-option('framework-priority-tflite', type: 'string', value: 'tensorflow-lite,nnfw,armnn,edgetpu', description: 'A comma separated prioritized list of neural network frameworks to open a .tflite file')
+option('framework-priority-tflite', type: 'string', value: 'tensorflow-lite,tensorflow2-lite,nnfw,armnn,edgetpu', description: 'A comma separated prioritized list of neural network frameworks to open a .tflite file')
%define nnstexampledir /usr/lib/nnstreamer/bin
%define tensorflow_support 0
%define tensorflow_lite_support 1
+%define tensorflow2_lite_support 1
%define armnn_support 0
%define vivante_support 0
%define flatbuf_support 1
# for tensorflow-lite
BuildRequires: tensorflow-lite-devel
%endif
+%if 0%{?tensorflow2_lite_support}
+# for tensorflow2-lite
+BuildRequires: tensorflow2-lite-devel
+%endif
# custom_example_opencv filter requires opencv-devel
BuildRequires: opencv-devel
# For './testAll.sh' time limit.
NNStreamer's tensor_fliter subplugin of TensorFlow Lite.
%endif
+# for tensorflow2-lite
+%if 0%{?tensorflow2_lite_support}
+%package tensorflow2-lite
+Summary: NNStreamer TensorFlow2 Lite Support
+Requires: nnstreamer = %{version}-%{release}
+# tensorflow2-lite provides .a file and it's embedded into the subplugin. No dep to tflite.
+%description tensorflow2-lite
+NNStreamer's tensor_fliter subplugin of TensorFlow2 Lite.
+%endif
+
%if 0%{?python_support}
%package python2
Summary: NNStreamer Python Custom Filter Support
%else
%define enable_tf -Dtf-support=disabled
%endif
+
# Support tensorflow-lite
%if 0%{?tensorflow_lite_support}
%define enable_tf_lite -Dtflite-support=enabled
%define enable_tf_lite -Dtflite-support=disabled
%endif
+# Support tensorflow2-lite
+%if 0%{?tensorflow2_lite_support}
+%define enable_tf2_lite -Dtflite2-support=enabled
+%else
+%define enable_tf2_lite -Dtflite2-support=disabled
+%endif
+
# Support pytorch
%if 0%{?pytorch_support}
%define enable_pytorch -Dpytorch-support=enabled
meson --buildtype=plain --prefix=%{_prefix} --sysconfdir=%{_sysconfdir} --libdir=%{_libdir} \
--bindir=%{nnstexampledir} --includedir=%{_includedir} -Dinstall-example=true \
%{enable_api} %{enable_tizen} %{element_restriction} -Denable-env-var=false -Denable-symbolic-link=false \
- %{enable_tf_lite} %{enable_tf} %{enable_pytorch} %{enable_caffe2} %{enable_python} \
+ %{enable_tf_lite} %{enable_tf2_lite} %{enable_tf} %{enable_pytorch} %{enable_caffe2} %{enable_python} \
%{enable_nnfw_runtime} %{enable_mvncsdk2} %{enable_openvino} %{enable_armnn} %{enable_edgetpu} %{enable_vivante} %{enable_flatbuf} \
%{enable_tizen_privilege_check} %{enable_tizen_feature_check} %{enable_tizen_sensor} %{enable_test} %{enable_test_coverage} %{install_test} \
build
%{_prefix}/lib/nnstreamer/filters/libnnstreamer_filter_tensorflow-lite.so
%endif
+# for tensorflow2-lite
+%if 0%{?tensorflow2_lite_support}
+%files tensorflow2-lite
+%manifest nnstreamer.manifest
+%defattr(-,root,root,-)
+%{_prefix}/lib/nnstreamer/filters/libnnstreamer_filter_tensorflow2-lite.so
+%endif
+
%if 0%{?python_support}
%files python2
%manifest nnstreamer.manifest
install_subdir('nnstreamer_decoder_image_labeling', install_dir: unittest_tests_install_dir)
install_subdir('nnstreamer_filter_reload', install_dir: unittest_tests_install_dir)
endif
+ if tflite2_support_is_available
+ install_subdir('nnstreamer_filter_tensorflow2_lite', install_dir: unittest_tests_install_dir)
+ endif
if have_python2 or have_python3
install_subdir('nnstreamer_filter_python', install_dir: unittest_tests_install_dir)
endif
--- /dev/null
+#!/usr/bin/env python
+
+##
+# SPDX-License-Identifier: LGPL-2.1-only
+#
+# Copyright (C) 2018 Samsung Electronics
+#
+# @file checkLabel.py
+# @brief Check the result label of tensorflow-lite model
+# @author HyoungJoo Ahn <hello.ahn@samsung.com>
+
+import sys
+import os
+import struct
+import string
+sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
+from gen24bBMP import convert_to_bytes
+
+
+def readbyte (filename):
+ F = open(filename, 'rb')
+ readbyte = F.read()
+ F.close()
+ return readbyte
+
+
+def readlabel (filename):
+ F = open(filename, 'r')
+ line = F.readlines()
+ F.close()
+ return line
+
+onehot = readbyte(sys.argv[1])
+onehot = [convert_to_bytes(x) for x in onehot]
+idx = onehot.index(max(onehot))
+
+label_list = readlabel(sys.argv[2])
+label = label_list[idx].strip()
+
+answer = sys.argv[3].strip()
+exit(label != answer)
--- /dev/null
+#!/usr/bin/env bash
+##
+## SPDX-License-Identifier: LGPL-2.1-only
+##
+## @file runTest.sh
+## @author MyungJoo Ham <myungjoo.ham@gmail.com>
+## @date Nov 01 2018
+## @brief SSAT Test Cases for NNStreamer
+##
+if [[ "$SSATAPILOADED" != "1" ]]; then
+ SILENT=0
+ INDEPENDENT=1
+ search="ssat-api.sh"
+ source $search
+ printf "${Blue}Independent Mode${NC}
+"
+fi
+
+# This is compatible with SSAT (https://github.com/myungjoo/SSAT)
+testInit $1
+
+# NNStreamer and plugins path for test
+PATH_TO_PLUGIN="../../build"
+
+if [[ -d $PATH_TO_PLUGIN ]]; then
+ ini_path="${PATH_TO_PLUGIN}/ext/nnstreamer/tensor_filter"
+ if [[ -d ${ini_path} ]]; then
+ check=$(ls ${ini_path} | grep tensorflow2-lite.so)
+ if [[ ! $check ]]; then
+ echo "Cannot find tensorflow2-lite shared lib"
+ report
+ exit
+ fi
+ else
+ echo "Cannot find ${ini_path}"
+ fi
+else
+ ini_file="/etc/nnstreamer.ini"
+ if [[ -f ${ini_file} ]]; then
+ path=$(grep "^filters" ${ini_file})
+ key=${path%=*}
+ value=${path##*=}
+
+ if [[ $key != "filters" ]]; then
+ echo "String Error"
+ report
+ exit
+ fi
+
+ if [[ -d ${value} ]]; then
+ check=$(ls ${value} | grep tensorflow2-lite.so)
+ if [[ ! $check ]]; then
+ echo "Cannot find tensorflow2-lite shared lib"
+ report
+ exit
+ fi
+ else
+ echo "Cannot file ${value}"
+ report
+ exit
+ fi
+ else
+ echo "Cannot identify nnstreamer.ini"
+ report
+ exit
+ fi
+fi
+
+PATH_TO_MODEL="../test_models/models/mobilenet_v1_1.0_224_quant.tflite"
+PATH_TO_LABEL="../test_models/labels/labels.txt"
+PATH_TO_IMAGE="../test_models/data/orange.png"
+
+gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} filesrc location=${PATH_TO_IMAGE} ! pngdec ! videoscale ! imagefreeze ! videoconvert ! video/x-raw,format=RGB,framerate=0/1 ! tensor_converter ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MODEL} ! filesink location=tensorfilter.out.log" 1 0 0 $PERFORMANCE
+python checkLabel.py tensorfilter.out.log ${PATH_TO_LABEL} orange
+testResult $? 1 "Golden test comparison" 0 1
+
+# Fail test for invalid input properties
+gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} filesrc location=${PATH_TO_IMAGE} ! pngdec ! videoscale ! imagefreeze ! videoconvert ! video/x-raw,format=RGB,framerate=0/1 ! tensor_converter ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MODEL} input=7:1 inputtype=float32 ! filesink location=tensorfilter.out.log" 2F_n 0 1 $PERFORMANCE
+
+# Fail test for invalid output properties
+gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} filesrc location=${PATH_TO_IMAGE} ! pngdec ! videoscale ! imagefreeze ! videoconvert ! video/x-raw,format=RGB,framerate=0/1 ! tensor_converter ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MODEL} output=1:7 outputtype=int8 ! filesink location=tensorfilter.out.log" 3F_n 0 1 $PERFORMANCE
+
+PATH_TO_MULTI_TENSOR_OUTPUT_MODEL="../test_models/models/multi_person_mobilenet_v1_075_float.tflite"
+
+# Simple tests for multi-tensor output model
+# This should emit error because of invalid width and height size
+gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num_buffers=4 ! videoconvert ! videoscale ! video/x-raw,format=RGB,width=353,height=257 ! tensor_converter ! tensor_transform mode=arithmetic option=typecast:float32,add:-127.5,div:127.5 ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MULTI_TENSOR_OUTPUT_MODEL} ! fakesink" 4_n 0 1 $PERFORMANCE
+
+# This won't fail, but not much meaningful
+gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num_buffers=4 ! videoconvert ! videoscale ! video/x-raw,format=RGB,width=257,height=353 ! tensor_converter ! tensor_transform mode=arithmetic option=typecast:float32,add:-127.5,div:127.5 ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MULTI_TENSOR_OUTPUT_MODEL} ! fakesink" 5 0 0 $PERFORMANCE
+
+# Test the backend setting done with tensorflow2-lite
+# This also performs tests for generic backend configuration parsing
+function run_pipeline() {
+ gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} filesrc location=${PATH_TO_IMAGE} ! pngdec ! videoscale ! imagefreeze ! videoconvert ! video/x-raw,format=RGB,framerate=0/1 ! tensor_converter ! tensor_filter framework=tensorflow2-lite model=${PATH_TO_MODEL} accelerator=$1 ! filesink location=tensorfilter.out.log 2>info
+}
+
+arch=$(uname -m)
+if [ "$arch" = "aarch64" ] || [ "$arch" = "armv7l" ]; then
+ auto_accl="cpu.neon"
+elif [ "$arch" = "x86_64" ]; then
+ auto_accl="cpu.simd"
+else
+ auto_accl="cpu"
+fi
+
+# Property reading test for nnapi
+run_pipeline true:cpu,npu,gpu
+cat info | grep "nnapi = 1, accl = cpu$"
+testResult $? 2-1 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!cpu
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-2 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!npu,gpu
+cat info | grep "nnapi = 1, accl = gpu$"
+testResult $? 2-3 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!npu,gpu,abcd
+cat info | grep "nnapi = 1, accl = gpu$"
+testResult $? 2-4 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!npu,!abcd,gpu
+cat info | grep "nnapi = 1, accl = gpu$"
+testResult $? 2-5 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:auto
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-6 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:default,gpu
+cat info | grep "nnapi = 1, accl = cpu$"
+testResult $? 2-7 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!cpu,default
+cat info | grep "nnapi = 1, accl = cpu$"
+testResult $? 2-8 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!default
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-9 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:npu.srcn
+cat info | grep "nnapi = 1, accl = npu$"
+testResult $? 2-10 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline false:abcd
+cat info | grep "nnapi = 0, accl = none$"
+testResult $? 2-11 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline false
+cat info | grep "nnapi = 0, accl = none$"
+testResult $? 2-12 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-13 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-14 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline auto
+cat info | grep "nnapi = 0, accl = none$"
+testResult $? 2-15 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:!npu,abcd,gpu
+cat info | grep "nnapi = 1, accl = gpu$"
+testResult $? 2-16 "NNAPI activation test" 0 1
+
+# Property reading test for nnapi
+run_pipeline true:${auto_accl},cpu
+cat info | grep "nnapi = 1, accl = ${auto_accl}$"
+testResult $? 2-17 "NNAPI activation test" 0 1
+
+# Cleanup
+rm info
+
+report