[Android/SNPE] Support SNPE in Android
authorYongjoo Ahn <yongjoo1.ahn@samsung.com>
Fri, 8 May 2020 04:11:53 +0000 (13:11 +0900)
committerMyungJoo Ham <myungjoo.ham@samsung.com>
Thu, 14 May 2020 01:28:17 +0000 (10:28 +0900)
- Support SNPE for Android api
- Add simple tests (singleshot, pipeline) using pre-loaded SNPE model

Signed-off-by: Yongjoo Ahn <yongjoo1.ahn@samsung.com>
api/android/api/src/androidTest/java/org/nnsuite/nnstreamer/APITestCommon.java
api/android/api/src/androidTest/java/org/nnsuite/nnstreamer/APITestPipeline.java
api/android/api/src/androidTest/java/org/nnsuite/nnstreamer/APITestSingleShot.java
api/android/api/src/main/jni/Android-snpe.mk [new file with mode: 0644]
api/android/api/src/main/jni/nnstreamer-native-api.c
api/android/build-android-lib.sh
api/capi/include/nnstreamer.h
api/capi/src/nnstreamer-capi-util.c
jni/nnstreamer.mk

index fa9e524..59fa480 100644 (file)
@@ -230,6 +230,18 @@ public class APITestCommon {
         return new File[]{model};
     }
 
+    public static File getSNPEModel() {
+        String root = Environment.getExternalStorageDirectory().getAbsolutePath();
+
+        File model = new File(root + "/nnstreamer/snpe_data/inception_v3_quantized.dlc");
+
+        if (!model.exists()) {
+            fail();
+        }
+
+        return model;
+    }
+
     /**
      * Verifies the byte buffer is direct buffer with native order.
      *
index 31445be..426d829 100644 (file)
@@ -1390,4 +1390,72 @@ public class APITestPipeline {
             fail();
         }
     }
+
+    @Test
+    public void testSNPE() {
+        if (!NNStreamer.isAvailable(NNStreamer.NNFWType.SNPE)) {
+            /* cannot run the test */
+            return;
+        }
+
+        File model = APITestCommon.getSNPEModel();
+        String desc = "appsrc name=srcx ! " +
+                "other/tensor,dimension=(string)3:299:299:1,type=(string)float32,framerate=(fraction)0/1 ! " +
+                "tensor_filter framework=snpe " + "model=" + model.getAbsolutePath() + " ! " +
+                "tensor_sink name=sinkx";
+
+        try (
+            Pipeline pipe = new Pipeline(desc);
+            TensorsInfo info = new TensorsInfo()
+        ) {
+            info.addTensorInfo(NNStreamer.TensorType.FLOAT32, new int[]{3,299,299,1});
+
+            /* register sink callback */
+            pipe.registerSinkCallback("sinkx", new Pipeline.NewDataCallback() {
+                @Override
+                public void onNewDataReceived(TensorsData data) {
+                    if (data == null || data.getTensorsCount() != 1) {
+                        mInvalidState = true;
+                        return;
+                    }
+
+                    TensorsInfo info = data.getTensorsInfo();
+
+                    if (info == null || info.getTensorsCount() != 1) {
+                        mInvalidState = true;
+                    } else {
+                        ByteBuffer output = data.getTensorData(0);
+
+                        if (!APITestCommon.isValidBuffer(output, 4004)) {
+                            mInvalidState = true;
+                        }
+                    }
+
+                    mReceived++;
+                }
+            });
+
+            /* start pipeline */
+            pipe.start();
+
+            /* push input buffer */
+            for (int i = 0; i < 10; i++) {
+                /* dummy input */
+                pipe.inputData("srcx", TensorsData.allocate(info));
+                Thread.sleep(100);
+            }
+
+            /* sleep 500 to invoke */
+            Thread.sleep(500);
+
+            /* stop pipeline */
+            pipe.stop();
+
+            /* check received data from sink */
+            assertFalse(mInvalidState);
+            assertTrue(mReceived > 0);
+        } catch (Exception e) {
+            fail();
+        }
+    }
 }
index 5098ee7..e08915b 100644 (file)
@@ -775,4 +775,42 @@ public class APITestSingleShot {
             fail();
         }
     }
+
+    @Test
+    public void testSNPE() {
+        if (!NNStreamer.isAvailable(NNStreamer.NNFWType.SNPE)) {
+            /* cannot run the test */
+            return;
+        }
+
+        try {
+            File model = APITestCommon.getSNPEModel();
+
+            SingleShot single = new SingleShot(model, NNStreamer.NNFWType.SNPE);
+            TensorsInfo in = single.getInputInfo();
+
+            /* let's ignore timeout (set 60 sec) */
+            single.setTimeout(60000);
+
+            /* single-shot invoke */
+            for (int i = 0; i < 5; i++) {
+                /* input data */
+                TensorsData input = in.allocate();
+                
+                /* invoke */
+                TensorsData output = single.invoke(input);
+
+                /* check output: 1 tensor (float32 1:1001) */
+                assertEquals(1, output.getTensorsCount());
+                assertEquals(4004, output.getTensorData(0).capacity());
+
+                Thread.sleep(30);
+            }
+
+            single.close();
+        } catch (Exception e) {
+            fail();
+        }
+    }
+
 }
diff --git a/api/android/api/src/main/jni/Android-snpe.mk b/api/android/api/src/main/jni/Android-snpe.mk
new file mode 100644 (file)
index 0000000..5cf5d61
--- /dev/null
@@ -0,0 +1,48 @@
+#------------------------------------------------------
+# SNPE (The Snapdragon Neural Processing Engine)
+#
+# This mk file defines snpe module with prebuilt shared library.
+# (snpe-sdk, arm64-v8a only)
+# See Qualcomm Neural Processing SDK for AI (https://developer.qualcomm.com/software/qualcomm-neural-processing-sdk) for the details.
+#------------------------------------------------------
+LOCAL_PATH := $(call my-dir)
+
+ifndef NNSTREAMER_ROOT
+$(error NNSTREAMER_ROOT is not defined!)
+endif
+
+include $(NNSTREAMER_ROOT)/jni/nnstreamer.mk
+
+SNPE_DIR := $(LOCAL_PATH)/snpe
+SNPE_INCLUDES := $(SNPE_DIR)/include/zdl/
+
+ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
+SNPE_LIB_PATH := $(SNPE_DIR)/lib/aarch64-android-clang6.0
+SNPE_DSP_LIB_PATH := $(SNPE_DIR)/lib/dsp
+else
+$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
+endif
+
+#------------------------------------------------------
+# snpe-sdk (prebuilt shared library)
+#------------------------------------------------------
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libSNPE
+LOCAL_SRC_FILES := $(SNPE_LIB_PATH)/libSNPE.so
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+#------------------------------------------------------
+# tensor-filter sub-plugin for snpe
+#------------------------------------------------------
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := snpe
+LOCAL_SRC_FILES := $(NNSTREAMER_FILTER_SNPE_SRCS)
+LOCAL_CXXFLAGS += -std=c++11 -frtti -fexceptions -Wno-exceptions -O2 -DNDEBUG $(NNS_API_FLAGS)
+LOCAL_C_INCLUDES := $(NNSTREAMER_INCLUDES) $(SNPE_INCLUDES) $(GST_HEADERS_COMMON)
+LOCAL_SHARED_LIBRARIES := libSNPE
+LOCAL_STATIC_LIBRARIES := nnstreamer
+
+include $(BUILD_STATIC_LIBRARY)
index 2b7f2f7..3e38089 100644 (file)
@@ -46,6 +46,9 @@ extern void init_filter_snap (void);
 #if defined (ENABLE_NNFW)
 extern void init_filter_nnfw (void);
 #endif
+#if defined (ENABLE_SNPE)
+extern void _init_filter_snpe (void);
+#endif
 
 /**
  * @brief External function from GStreamer Android.
@@ -617,7 +620,7 @@ nns_get_nnfw_type (jint fw_type, ml_nnfw_type_e * nnfw)
 #endif
       break;
     case 3: /* NNFWType.SNPE */
-      /** @todo add ML_NNFW_TYPE_SNPE (for android only?) */
+      *nnfw = ML_NNFW_TYPE_SNPE;
 #if !defined (ENABLE_SNPE)
       nns_logw ("SNPE is not supported.");
       is_supported = FALSE;
@@ -684,7 +687,7 @@ nnstreamer_native_initialize (JNIEnv * env, jobject context)
     init_filter_nnfw ();
 #endif
 #if defined (ENABLE_SNPE)
-    /** @todo register snpe sub-plugin */
+    _init_filter_snpe ();
 #endif
 
     nns_is_initilaized = TRUE;
@@ -725,7 +728,6 @@ Java_org_nnsuite_nnstreamer_NNStreamer_nativeCheckAvailability (JNIEnv * env,
     jclass clazz, jint fw_type)
 {
   ml_nnfw_type_e nnfw;
-
   if (!nns_get_nnfw_type (fw_type, &nnfw)) {
     return JNI_FALSE;
   }
index 12f662b..06a8865 100644 (file)
@@ -266,7 +266,9 @@ fi
 if [[ $enable_snpe == "yes" ]]; then
     sed -i "s|ENABLE_SNPE := false|ENABLE_SNPE := true|" external/Android-nnstreamer-prebuilt.mk
     sed -i "s|ENABLE_SNPE := false|ENABLE_SNPE := true|" api/src/main/jni/Android.mk
-    cp -r $SNPE_DIRECTORY/* api/src/main/jni
+    mkdir api/src/main/jni/snpe
+    cp -r $SNPE_DIRECTORY/include api/src/main/jni/snpe
+    cp -r $SNPE_DIRECTORY/lib api/src/main/jni/snpe
 fi
 
 # Update tf-lite option
index 16e32cd..e9ef87b 100644 (file)
@@ -128,6 +128,7 @@ typedef enum {
   ML_NNFW_TYPE_VIVANTE = 7,           /**< VeriSilicon's Vivante. (Since 6.0) */
   ML_NNFW_TYPE_EDGE_TPU = 8,          /**< Google Coral Edge TPU (USB). (Since 6.0) */
   ML_NNFW_TYPE_ARMNN = 9,             /**< Arm Neural Network framework (support for caffe and tensorflow-lite). (Since 6.0) */
+  ML_NNFW_TYPE_SNPE = 10,             /**< Qualcomm SNPE (Snapdgragon Neural Processing Engine (.dlc). (Since 6.0) */
   ML_NNFW_TYPE_SNAP = 0x2001,         /**< SNAP (Samsung Neural Acceleration Platform), only for Android. (Since 6.0) */
 } ml_nnfw_type_e;
 
index 7b5b57f..d3deef8 100644 (file)
@@ -1081,6 +1081,16 @@ ml_validate_model_file (char **model, unsigned int num_models,
 #endif
       /* SNAP requires multiple files, set supported if model file exists. */
       break;
+    case ML_NNFW_TYPE_SNPE:
+#if !defined(__ANDROID__)
+      ml_loge ("Given framework, SNPE is not supported yet for non Android (arm64-v8a).");
+      status = ML_ERROR_NOT_SUPPORTED;
+      break;
+#endif
+      if (g_ascii_strcasecmp (file_ext[0], ".dlc") != 0) {
+        status = ML_ERROR_INVALID_PARAMETER;
+      }
+      break;
     case ML_NNFW_TYPE_ARMNN:
       if (g_ascii_strcasecmp (file_ext[0], ".caffemodel") != 0 &&
           g_ascii_strcasecmp (file_ext[0], ".tflite") != 0 &&
@@ -1171,6 +1181,7 @@ ml_get_nnfw_subplugin_name (ml_nnfw_type_e nnfw)
     [ML_NNFW_TYPE_EDGE_TPU] = "edgetpu",
     [ML_NNFW_TYPE_ARMNN] = "armnn",
     [ML_NNFW_TYPE_SNAP] = "snap",
+    [ML_NNFW_TYPE_SNPE] = "snpe",
     NULL
   };
 
index 09637a8..0281c4f 100644 (file)
@@ -97,6 +97,10 @@ NNSTREAMER_FILTER_TORCH_SRCS := \
 NNSTREAMER_FILTER_CAFFE2_SRCS := \
     $(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_caffe2.cc
 
+# filter snpe
+NNSTREAMER_FILTER_SNPE_SRCS := \
+    $(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_snpe.cc
+
 # decoder boundingbox
 NNSTREAMER_DECODER_BB_SRCS := \
     $(NNSTREAMER_EXT_HOME)/tensor_decoder/tensordec-boundingbox.c \