In Java APIs, handle multiple model files and custom option to support SNAP.
Define enum for supported NNFW and implement native functions.
Signed-off-by: Jaeyun Jung <jy1210.jung@samsung.com>
<uses-feature android:glEsVersion="0x00020000"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application
android:largeHeap="true" >
public static final int TENSOR_SIZE_LIMIT = 16;
/**
+ * The enumeration for supported frameworks in NNStreamer.
+ */
+ public enum NNFWType {
+ /**
+ * TensorFlow Lite<br>
+ * <br>
+ * <a href="https://www.tensorflow.org/lite">TensorFlow Lite</a> is an open source
+ * deep learning framework for on-device inference.<br>
+ */
+ TENSORFLOW_LITE,
+ /**
+ * SNAP (Samsung Neural Acceleration Platform)<br>
+ * <br>
+ * Supports <a href="https://developer.samsung.com/neural">Samsung Neural SDK</a>
+ * (Version 1.0, run only on Samsung devices)<br>
+ * To construct a pipeline with SNAP, developer should set the custom option string
+ * to specify the neural network and data format.<br>
+ * <br>
+ * Custom options<br>
+ * - ModelFWType: the type of model (TensorFlow/Caffe)<br>
+ * - ExecutionDataType: the execution data type for SNAP (default float32)<br>
+ * - ComputingUnit: the computing unit to execute the model (default CPU)<br>
+ * - CpuThreadCount: the number of CPU threads to be executed (optional, default 4 if ComputingUnit is CPU)<br>
+ * - GpuCacheSource: the absolute path to GPU Kernel caching (mandatory if ComputingUnit is GPU)<br>
+ */
+ SNAP,
+ /**
+ * Unknown framework (usually error)
+ */
+ UNKNOWN
+ }
+
+ /**
* The enumeration for possible data type of tensor in NNStreamer.
*/
public enum TensorType {
}
private static native boolean nativeInitialize(Context context);
+ private static native boolean nativeCheckAvailability(int fw);
private static native String nativeGetVersion();
/**
}
/**
+ * Checks the neural network framework is available.
+ *
+ * @param fw The neural network framework
+ *
+ * @return true if the neural network framework is available
+ */
+ public static boolean isAvailable(NNFWType fw) {
+ return nativeCheckAvailability(fw.ordinal());
+ }
+
+ /**
* Gets the version string of GStreamer and NNStreamer.
*
* @return The version string
public final class SingleShot implements AutoCloseable {
private long mHandle = 0;
- private native long nativeOpen(String model, TensorsInfo in, TensorsInfo out);
+ private native long nativeOpen(String[] models, TensorsInfo in, TensorsInfo out, int fw, String option);
private native void nativeClose(long handle);
private native TensorsData nativeInvoke(long handle, TensorsData in);
private native TensorsInfo nativeGetInputInfo(long handle);
private native boolean nativeSetTimeout(long handle, int timeout);
/**
- * Creates a new {@link SingleShot} instance with the given model.
+ * Creates a new {@link SingleShot} instance with the given model for TensorFlow Lite.
* If the model has flexible data dimensions, the pipeline will not be constructed and this will make an exception.
*
* @param model The path to the neural network model file
}
/**
- * Creates a new {@link SingleShot} instance with the given model.
+ * Creates a new {@link SingleShot} instance with the given model for TensorFlow Lite.
* The input and output tensors information are required if the given model has flexible data dimensions,
* where the information MUST be given before executing the model.
* However, once it's given, the dimension cannot be changed for the given model handle.
* @throws IllegalStateException if failed to construct the pipeline
*/
public SingleShot(@NonNull File model, @Nullable TensorsInfo in, @Nullable TensorsInfo out) {
- if (model == null || !model.exists()) {
+ this(new File[]{model}, in, out, NNStreamer.NNFWType.TENSORFLOW_LITE, null);
+ }
+
+ /**
+ * Creates a new {@link SingleShot} instance with the given files and custom option.
+ *
+ * Unlike other constructors, this handles multiple files and custom option string
+ * when the neural network requires various options and model files.
+ *
+ * @param models The array of {@link File} objects to the neural network model files
+ * @param in The input tensors information
+ * @param out The output tensors information
+ * @param fw The neural network framework
+ * @param option The custom option string to open the neural network
+ *
+ * @throws IllegalArgumentException if given param is invalid
+ * @throws IllegalStateException if failed to construct the pipeline
+ *
+ * @see NNStreamer#isAvailable(NNStreamer.NNFWType)
+ */
+ public SingleShot(@NonNull File[] models, @Nullable TensorsInfo in, @Nullable TensorsInfo out,
+ NNStreamer.NNFWType fw, @Nullable String option) {
+ if (models == null) {
throw new IllegalArgumentException("Given model is invalid");
}
- String path = model.getAbsolutePath();
+ if (!NNStreamer.isAvailable(fw)) {
+ throw new IllegalStateException("Given framework is not available");
+ }
+
+ String[] path = new String[models.length];
+ int index = 0;
+
+ for (File model : models) {
+ if (model == null || !model.exists()) {
+ throw new IllegalArgumentException("Given model is invalid");
+ }
+
+ path[index++] = model.getAbsolutePath();
+ }
- mHandle = nativeOpen(path, in, out);
+ mHandle = nativeOpen(path, in, out, fw.ordinal(), option);
if (mHandle == 0) {
throw new IllegalStateException("Failed to construct the pipeline");
}
}
/**
+ * @brief Get NNFW from integer value.
+ */
+gboolean
+nns_get_nnfw_type (jint fw_type, ml_nnfw_type_e * nnfw)
+{
+ gboolean is_supported = TRUE;
+
+ if (!nnfw)
+ return FALSE;
+
+ *nnfw = ML_NNFW_TYPE_ANY;
+
+ /* enumeration defined in NNStreamer.java */
+ if (fw_type == 0) {
+ *nnfw = ML_NNFW_TYPE_TENSORFLOW_LITE;
+ } else if (fw_type == 1) {
+ *nnfw = ML_NNFW_TYPE_SNAP;
+
+#if !defined (ENABLE_SNAP)
+ nns_logw ("SNAP is not supported.");
+ is_supported = FALSE;
+#endif
+ } else {
+ nns_logw ("Unknown NNFW type (%d).", fw_type);
+ is_supported = FALSE;
+ }
+
+ return is_supported && ml_nnfw_is_available (*nnfw, ML_NNFW_HW_ANY);
+}
+
+/**
* @brief Initialize NNStreamer, register required plugins.
*/
jboolean
}
/**
+ * @brief Native method to check the availability of NNFW.
+ */
+jboolean
+Java_org_nnsuite_nnstreamer_NNStreamer_nativeCheckAvailability (JNIEnv * env, jclass clazz,
+ jint fw_type)
+{
+ ml_nnfw_type_e nnfw;
+
+ if (!nns_get_nnfw_type (fw_type, &nnfw)) {
+ return JNI_FALSE;
+ }
+
+ return JNI_TRUE;
+}
+
+/**
* @brief Native method to get the version string of NNStreamer and GStreamer.
*/
jstring
*/
jlong
Java_org_nnsuite_nnstreamer_SingleShot_nativeOpen (JNIEnv * env, jobject thiz,
- jstring model, jobject in, jobject out)
+ jobjectArray models, jobject in, jobject out, jint fw_type, jstring option)
{
pipeline_info_s *pipe_info = NULL;
- ml_single_h single;
- ml_tensors_info_h in_info, out_info;
+ ml_single_h single = NULL;
+ ml_single_preset info = { 0, };
gboolean opened = FALSE;
- const char *model_info = (*env)->GetStringUTFChars (env, model, NULL);
-
- single = NULL;
- in_info = out_info = NULL;
pipe_info = nns_construct_pipe_info (env, thiz, NULL, NNS_PIPE_TYPE_SINGLE);
if (pipe_info == NULL) {
goto done;
}
+ /* parse in/out tensors information */
if (in) {
- if (!nns_parse_tensors_info (pipe_info, env, in, &in_info)) {
+ if (!nns_parse_tensors_info (pipe_info, env, in, &info.input_info)) {
nns_loge ("Failed to parse input tensor.");
goto done;
}
}
if (out) {
- if (!nns_parse_tensors_info (pipe_info, env, out, &out_info)) {
+ if (!nns_parse_tensors_info (pipe_info, env, out, &info.output_info)) {
nns_loge ("Failed to parse output tensor.");
goto done;
}
}
- /* supposed tensorflow-lite only for android */
- if (ml_single_open (&single, model_info, in_info, out_info,
- ML_NNFW_TYPE_TENSORFLOW_LITE, ML_NNFW_HW_AUTO) != ML_ERROR_NONE) {
+ /* nnfw type and hw resource */
+ if (!nns_get_nnfw_type (fw_type, &info.nnfw)) {
+ nns_loge ("Failed, unsupported framework (%d).", fw_type);
+ goto done;
+ }
+
+ info.hw = ML_NNFW_HW_ANY;
+
+ /* parse models */
+ if (models) {
+ GString *model_str;
+ jsize i, models_count;
+
+ model_str = g_string_new (NULL);
+ models_count = (*env)->GetArrayLength (env, models);
+
+ for (i = 0; i < models_count; i++) {
+ jstring model_obj = (jstring) (*env)->GetObjectArrayElement (env, models, i);
+ const char *model_path = (*env)->GetStringUTFChars (env, model_obj, NULL);
+
+ g_string_append (model_str, model_path);
+ if (i < models_count - 1) {
+ g_string_append (model_str, ",");
+ }
+
+ (*env)->ReleaseStringUTFChars (env, model_obj, model_path);
+ }
+
+ info.models = g_string_free (model_str, FALSE);
+ } else {
+ nns_loge ("Failed to get model file.");
+ goto done;
+ }
+
+ /* parse option string */
+ if (option) {
+ const char *option_str = (*env)->GetStringUTFChars (env, option, NULL);
+
+ info.custom_option = g_strdup (option_str);
+ (*env)->ReleaseStringUTFChars (env, option, option_str);
+ }
+
+ if (ml_single_open_custom (&single, &info) != ML_ERROR_NONE) {
nns_loge ("Failed to create the pipeline.");
goto done;
}
pipe_info->pipeline_handle = single;
done:
- ml_tensors_info_destroy (in_info);
- ml_tensors_info_destroy (out_info);
+ ml_tensors_info_destroy (info.input_info);
+ ml_tensors_info_destroy (info.output_info);
+ g_free (info.models);
+ g_free (info.custom_option);
if (!opened) {
nns_destroy_pipe_info (pipe_info, env);
pipe_info = NULL;
}
- (*env)->ReleaseStringUTFChars (env, model, model_info);
return CAST_TO_LONG (pipe_info);
}
extern gboolean
nns_parse_tensors_info (pipeline_info_s * pipe_info, JNIEnv * env, jobject obj_info, ml_tensors_info_h * info_h);
+/**
+ * @brief Get NNFW from integer value.
+ */
+extern gboolean
+nns_get_nnfw_type (jint fw_type, ml_nnfw_type_e * nnfw);
+
#endif /* __NNSTREAMER_ANDROID_NATIVE_H__ */