--- /dev/null
+# How to run deep networks on Android device {#tutorial_dnn_android}
+
+## Introduction
+In this tutorial you'll know how to run deep learning networks on Android device
+using OpenCV deep learning module.
+
+Tutorial was written for the following versions of corresponding software:
+- Android Studio 2.3.3
+- OpenCV 3.3.0
+
+## Requirements
+
+- Download and install Android Studio from https://developer.android.com/studio.
+
+- Get the latest pre-built OpenCV for Android release from https://github.com/opencv/opencv/releases and unpack it. At this moment it's an `opencv-3.3.0-android-sdk.zip`.
+
+- Download MobileNet object detection model from https://github.com/chuanqi305/MobileNet-SSD. We need a configuration file `MobileNetSSD_deploy.prototxt` and weights `MobileNetSSD_deploy.caffemodel`.
+
+## Create an empty Android Studio project
+- Open Android Studio. Start a new project. Let's call it `opencv_mobilenet`.
+![](1_start_new_project.png)
+
+- Keep default target settings.
+![](2_start_new_project.png)
+
+- Use "Empty Activity" template. Name activity as `MainActivity` with a
+corresponding layout `activity_main`.
+![](3_start_new_project.png)
+
+ ![](4_start_new_project.png)
+
+- Wait until a project was created. Go to `Run->Edit Configurations`.
+Choose `USB Device` as target device for runs.
+![](5_setup.png)
+Plug in your device and run the project. It should be installed and launched
+successfully before we'll go next.
+@note Read @ref tutorial_android_dev_intro in case of problems.
+
+![](6_run_empty_project.png)
+
+## Add OpenCV dependency
+
+- Go to `File->New->Import module` and provide a path to `unpacked_OpenCV_package/sdk/java`. The name of module detects automatically.
+Disable all features that Android Studio will suggest you on the next window.
+![](7_import_module.png)
+
+ ![](8_import_module.png)
+
+- Open two files:
+
+ 1. `AndroidStudioProjects/opencv_mobilenet/app/build.gradle`
+
+ 2. `AndroidStudioProjects/opencv_mobilenet/openCVLibrary330/build.gradle`
+
+ Copy both `compileSdkVersion` and `buildToolsVersion` from the first file to
+ the second one.
+
+ `compileSdkVersion 14` -> `compileSdkVersion 26`
+
+ `buildToolsVersion "25.0.0"` -> `buildToolsVersion "26.0.1"`
+
+- Make the project. There is no errors should be at this point.
+
+- Go to `File->Project Structure`. Add OpenCV module dependency.
+![](9_opencv_dependency.png)
+
+ ![](10_opencv_dependency.png)
+
+- Install once an appropriate OpenCV manager from `unpacked_OpenCV_package/apk`
+to target device.
+@code
+adb install OpenCV_3.3.0_Manager_3.30_armeabi-v7a.apk
+@endcode
+
+- Congratulations! We're ready now to make a sample using OpenCV.
+
+## Make a sample
+Our sample will takes pictures from a camera, forwards it into a deep network and
+receives a set of rectangles, class identifiers and confidence values in `[0, 1]`
+range.
+
+- First of all, we need to add a necessary widget which displays processed
+frames. Modify `app/src/main/res/layout/activity_main.xml`:
+@include android/mobilenet-objdetect/res/layout/activity_main.xml
+
+- Put downloaded `MobileNetSSD_deploy.prototxt` and `MobileNetSSD_deploy.caffemodel`
+into `app/build/intermediates/assets/debug` folder.
+
+- Modify `/app/src/main/AndroidManifest.xml` to enable full-screen mode, set up
+a correct screen orientation and allow to use a camera.
+@include android/mobilenet-objdetect/AndroidManifest.xml
+
+- Replace content of `app/src/main/java/org/opencv/samples/opencv_mobilenet/MainActivity.java`:
+@include android/mobilenet-objdetect/src/org/opencv/samples/opencv_mobilenet/MainActivity.java
+
+- Launch an application and make a fun!
+![](11_demo.jpg)
--- /dev/null
+package org.opencv.samples.opencv_mobilenet;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.os.Bundle;
+import android.support.v7.app.AppCompatActivity;
+import android.util.Log;
+
+import org.opencv.android.BaseLoaderCallback;
+import org.opencv.android.CameraBridgeViewBase;
+import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
+import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
+import org.opencv.android.LoaderCallbackInterface;
+import org.opencv.android.OpenCVLoader;
+import org.opencv.core.Core;
+import org.opencv.core.Mat;
+import org.opencv.core.Point;
+import org.opencv.core.Scalar;
+import org.opencv.core.Size;
+import org.opencv.dnn.Net;
+import org.opencv.dnn.Dnn;
+import org.opencv.imgproc.Imgproc;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+public class MainActivity extends AppCompatActivity implements CvCameraViewListener2 {
+
+ // Initialize OpenCV manager.
+ private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
+ @Override
+ public void onManagerConnected(int status) {
+ switch (status) {
+ case LoaderCallbackInterface.SUCCESS: {
+ Log.i(TAG, "OpenCV loaded successfully");
+ mOpenCvCameraView.enableView();
+ break;
+ }
+ default: {
+ super.onManagerConnected(status);
+ break;
+ }
+ }
+ }
+ };
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, mLoaderCallback);
+ }
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+
+ // Set up camera listener.
+ mOpenCvCameraView = (CameraBridgeViewBase)findViewById(R.id.CameraView);
+ mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
+ mOpenCvCameraView.setCvCameraViewListener(this);
+ }
+
+ // Load a network.
+ public void onCameraViewStarted(int width, int height) {
+ String proto = getPath("MobileNetSSD_deploy.prototxt", this);
+ String weights = getPath("MobileNetSSD_deploy.caffemodel", this);
+ net = Dnn.readNetFromCaffe(proto, weights);
+ Log.i(TAG, "Network loaded successfully");
+ }
+
+ public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
+ final int IN_WIDTH = 300;
+ final int IN_HEIGHT = 300;
+ final float WH_RATIO = (float)IN_WIDTH / IN_HEIGHT;
+ final double IN_SCALE_FACTOR = 0.007843;
+ final double MEAN_VAL = 127.5;
+ final double THRESHOLD = 0.2;
+
+ // Get a new frame
+ Mat frame = inputFrame.rgba();
+ Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGBA2RGB);
+
+ // Forward image through network.
+ Mat blob = Dnn.blobFromImage(frame, IN_SCALE_FACTOR,
+ new Size(IN_WIDTH, IN_HEIGHT),
+ new Scalar(MEAN_VAL, MEAN_VAL, MEAN_VAL), true);
+ net.setInput(blob);
+ Mat detections = net.forward();
+
+ int cols = frame.cols();
+ int rows = frame.rows();
+
+ Size cropSize;
+ if ((float)cols / rows > WH_RATIO) {
+ cropSize = new Size(rows * WH_RATIO, rows);
+ } else {
+ cropSize = new Size(cols, cols / WH_RATIO);
+ }
+
+ int y1 = (int)(rows - cropSize.height) / 2;
+ int y2 = (int)(y1 + cropSize.height);
+ int x1 = (int)(cols - cropSize.width) / 2;
+ int x2 = (int)(x1 + cropSize.width);
+ Mat subFrame = frame.submat(y1, y2, x1, x2);
+
+ cols = subFrame.cols();
+ rows = subFrame.rows();
+
+ detections = detections.reshape(1, (int)detections.total() / 7);
+
+ for (int i = 0; i < detections.rows(); ++i) {
+ double confidence = detections.get(i, 2)[0];
+ if (confidence > THRESHOLD) {
+ int classId = (int)detections.get(i, 1)[0];
+
+ int xLeftBottom = (int)(detections.get(i, 3)[0] * cols);
+ int yLeftBottom = (int)(detections.get(i, 4)[0] * rows);
+ int xRightTop = (int)(detections.get(i, 5)[0] * cols);
+ int yRightTop = (int)(detections.get(i, 6)[0] * rows);
+
+ // Draw rectangle around detected object.
+ Imgproc.rectangle(subFrame, new Point(xLeftBottom, yLeftBottom),
+ new Point(xRightTop, yRightTop),
+ new Scalar(0, 255, 0));
+ String label = classNames[classId] + ": " + confidence;
+ int[] baseLine = new int[1];
+ Size labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
+
+ // Draw background for label.
+ Imgproc.rectangle(subFrame, new Point(xLeftBottom, yLeftBottom - labelSize.height),
+ new Point(xLeftBottom + labelSize.width, yLeftBottom + baseLine[0]),
+ new Scalar(255, 255, 255), Core.FILLED);
+
+ // Write class name and confidence.
+ Imgproc.putText(subFrame, label, new Point(xLeftBottom, yLeftBottom),
+ Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0));
+ }
+ }
+ return frame;
+ }
+
+ public void onCameraViewStopped() {}
+
+ // Upload file to storage and return a path.
+ private static String getPath(String file, Context context) {
+ AssetManager assetManager = context.getAssets();
+
+ BufferedInputStream inputStream = null;
+ try {
+ // Read data from assets.
+ inputStream = new BufferedInputStream(assetManager.open(file));
+ byte[] data = new byte[inputStream.available()];
+ inputStream.read(data);
+ inputStream.close();
+
+ // Create copy file in storage.
+ File outFile = new File(context.getFilesDir(), file);
+ FileOutputStream os = new FileOutputStream(outFile);
+ os.write(data);
+ os.close();
+ // Return a path to file which may be read in common way.
+ return outFile.getAbsolutePath();
+ } catch (IOException ex) {
+ Log.i(TAG, "Failed to upload a file");
+ }
+ return "";
+ }
+
+ private static final String TAG = "OpenCV/Sample/MobileNet";
+ private static final String[] classNames = {"background",
+ "aeroplane", "bicycle", "bird", "boat",
+ "bottle", "bus", "car", "cat", "chair",
+ "cow", "diningtable", "dog", "horse",
+ "motorbike", "person", "pottedplant",
+ "sheep", "sofa", "train", "tvmonitor"};
+
+ private Net net;
+ private CameraBridgeViewBase mOpenCvCameraView;
+}