export NNSTREAMER_FILTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_filter
export NNSTREAMER_DECODERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_decoder
export NNSTREAMER_CONVERTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_converter
+export NNSTREAMER_TRAINERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_trainer
export PYTHONIOENCODING=utf-8
ifeq ($(DEB_BUILD_ARCH_CPU), arm)
#define NNSTREAMER_PREFIX_FILTER "libnnstreamer_filter_"
#define NNSTREAMER_PREFIX_CUSTOMFILTERS ""
#define NNSTREAMER_PREFIX_CONVERTER "libnnstreamer_converter_"
+#define NNSTREAMER_PREFIX_TRAINER "libnnstreamer_trainer_"
/* Custom filter does not have prefix */
/* Env-var names */
[NNSCONF_PATH_FILTERS] = "NNSTREAMER_FILTERS",
[NNSCONF_PATH_DECODERS] = "NNSTREAMER_DECODERS",
[NNSCONF_PATH_CUSTOM_FILTERS] = "NNSTREAMER_CUSTOMFILTERS",
- [NNSCONF_PATH_CONVERTERS] = "NNSTREAMER_CONVERTERS"
+ [NNSCONF_PATH_CONVERTERS] = "NNSTREAMER_CONVERTERS",
+ [NNSCONF_PATH_TRAINERS] = "NNSTREAMER_TRAINERS"
};
static const gchar *NNSTREAMER_PATH[NNSCONF_PATH_END] = {
[NNSCONF_PATH_DECODERS] = "/usr/lib/nnstreamer/decoders/",
[NNSCONF_PATH_CUSTOM_FILTERS] = "/usr/lib/nnstreamer/customfilters/",
[NNSCONF_PATH_CONVERTERS] = "/usr/lib/nnstreamer/converters/",
+ [NNSCONF_PATH_TRAINERS] = "/usr/lib/nnstreamer/trainers/"
};
static const gchar *subplugin_prefixes[] = {
[NNSCONF_PATH_CUSTOM_FILTERS] = NNSTREAMER_PREFIX_CUSTOMFILTERS,
[NNSCONF_PATH_EASY_CUSTOM_FILTERS] = NNSTREAMER_PREFIX_CUSTOMFILTERS, /**< Same as Custom Filters */
[NNSCONF_PATH_CONVERTERS] = NNSTREAMER_PREFIX_CONVERTER,
+ [NNSCONF_PATH_TRAINERS] = NNSTREAMER_PREFIX_TRAINER,
[NNSCONF_PATH_END] = NULL
};
g_key_file_get_string (key_file, "filter", "customfilters", NULL);
cdata->conf[NNSCONF_PATH_CONVERTERS].path[src] =
g_key_file_get_string (key_file, "converter", "converters", NULL);
+ cdata->conf[NNSCONF_PATH_TRAINERS].path[src] =
+ g_key_file_get_string (key_file, "trainer", "trainer", NULL);
}
/** @brief Public function defined in the header */
nnsconf_subplugin_dump (gchar * str, gulong size)
{
static const nnsconf_type_path dump_list_type[] = {
- NNSCONF_PATH_FILTERS, NNSCONF_PATH_DECODERS, NNSCONF_PATH_CONVERTERS
+ NNSCONF_PATH_FILTERS, NNSCONF_PATH_DECODERS, NNSCONF_PATH_CONVERTERS,
+ NNSCONF_PATH_TRAINERS
};
static const char *dump_list_str[] = {
"Filter", "Decoder", "Conterver"
NNSCONF_PATH_CUSTOM_FILTERS,
NNSCONF_PATH_EASY_CUSTOM_FILTERS,
NNSCONF_PATH_CONVERTERS,
+ NNSCONF_PATH_TRAINERS,
NNSCONF_PATH_END,
} nnsconf_type_path;
[NNS_SUBPLUGIN_DECODER] = NNS_SEARCH_FILENAME,
[NNS_EASY_CUSTOM_FILTER] = NNS_SEARCH_FILENAME,
[NNS_SUBPLUGIN_CONVERTER] = NNS_SEARCH_GETALL,
+ [NNS_SUBPLUGIN_TRAINER] = NNS_SEARCH_FILENAME,
[NNS_CUSTOM_CONVERTER] = NNS_SEARCH_NO_OP,
[NNS_CUSTOM_DECODER] = NNS_SEARCH_NO_OP,
[NNS_IF_CUSTOM] = NNS_SEARCH_NO_OP,
case NNS_SUBPLUGIN_DECODER:
case NNS_EASY_CUSTOM_FILTER:
case NNS_SUBPLUGIN_CONVERTER:
+ case NNS_SUBPLUGIN_TRAINER:
case NNS_CUSTOM_DECODER:
case NNS_IF_CUSTOM:
case NNS_CUSTOM_CONVERTER:
NNS_SUBPLUGIN_DECODER = NNSCONF_PATH_DECODERS,
NNS_EASY_CUSTOM_FILTER = NNSCONF_PATH_EASY_CUSTOM_FILTERS,
NNS_SUBPLUGIN_CONVERTER = NNSCONF_PATH_CONVERTERS,
+ NNS_SUBPLUGIN_TRAINER = NNSCONF_PATH_TRAINERS,
NNS_CUSTOM_CONVERTER,
NNS_CUSTOM_DECODER,
NNS_IF_CUSTOM,
path_nns_plugin_filters = join_paths(path_nns_plugin_prefix, 'tensor_filter')
path_nns_plugin_decoders = join_paths(path_nns_plugin_prefix, 'tensor_decoder')
path_nns_plugin_converters = join_paths(path_nns_plugin_prefix, 'tensor_converter')
+ path_nns_plugin_trainers = join_paths(path_nns_plugin_prefix, 'tensor_trainer')
testenv = environment()
testenv.set('GST_PLUGIN_PATH', path_gst_plugin + ':' + path_ext_plugin)
testenv.set('NNSTREAMER_FILTERS', path_nns_plugin_filters)
testenv.set('NNSTREAMER_DECODERS', path_nns_plugin_decoders)
testenv.set('NNSTREAMER_CONVERTERS', path_nns_plugin_converters)
+ testenv.set('NNSTREAMER_TRAINERS', path_nns_plugin_trainers)
testenv.set('NNSTREAMER_SOURCE_ROOT_PATH', meson.source_root())
testenv.set('NNSTREAMER_BUILD_ROOT_PATH', meson.build_root())
[converter]
converters=@SUBPLUGIN_INSTALL_PREFIX@/converters/
+[trainer]
+trainers=@SUBPLUGIN_INSTALL_PREFIX@/trainers/
+
# Set 1 or True if you want to use GPU with pytorch for computation.
[pytorch]
enable_use_gpu=@TORCH_USE_GPU@
export NNSTREAMER_FILTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_filter
export NNSTREAMER_DECODERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_decoder
export NNSTREAMER_CONVERTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_converter
+export NNSTREAMER_TRAINERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_trainer
%define test_script $(pwd)/packaging/run_unittests_binaries.sh
export NNSTREAMER_FILTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_filter
export NNSTREAMER_DECODERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_decoder
export NNSTREAMER_CONVERTERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_converter
+export NNSTREAMER_TRAINERS=${NNSTREAMER_BUILD_ROOT_PATH}/ext/nnstreamer/tensor_trainer
export _PYTHONPATH=${PYTHONPATH}
run_entry() {
NNSTREAMER_FILTERS=/home/${USERNAME}/nnstreamer/${BUILDDIR}/ext/nnstreamer/tensor_filter \
NNSTREAMER_DECODERS=/home/${USERNAME}/nnstreamer/${BUILDDIR}/ext/nnstreamer/tensor_decoder \
NNSTREAMER_CONVERTERS=/home/${USERNAME}/nnstreamer/${BUILDDIR}/ext/nnstreamer/tensor_converter \
+ NNSTREAMER_TRAINERS=/home/${USERNAME}/nnstreamer/${BUILDDIR}/ext/nnstreamer/tensor_trainer \
NNS_USERNAME=${USERNAME} \
NNS_BUILDDIR=${BUILDDIR}