.. |Author_AnaH| unicode:: Ana U+0020 Huam U+00E1 n\r
.. |Author_BernatG| unicode:: Bern U+00E1 t U+0020 G U+00E1 bor\r
+.. |Author_AndreyK| unicode:: Andrey U+0020 Kamaev\r
\r
\r
\r
--- /dev/null
+.. _Android_Binary_Package:\r
+\r
+Using Android binary package with Eclipse\r
+*****************************************\r
+\r
+.. include:: <isonum.txt>\r
+\r
+This tutorial was tested using Ubuntu 10.04 and Windows 7 SP1 operating systems. Nevertheless, it should also work on any other **OS**\ es supported by Android SDK (including Mac OS X). If you encounter errors after following the steps described here feel free to contact us via *android-opencv* disscussion group https://groups.google.com/group/android-opencv/ and we will try to fix your problem.\r
+\r
+.. _Android_Environment_Setup_Lite: \r
+\r
+Setup environment to start Android Development\r
+==============================================\r
+\r
+You need the following tools to be installed:\r
+\r
+1. **Sun JDK 6**\r
+\r
+ Visit http://www.oracle.com/technetwork/java/javase/downloads/index.html and download installer for your OS.\r
+\r
+ Here is detailed JDK installation guide for Ubuntu and Mac OS: http://source.android.com/source/initializing.html (only JDK sections are applicable for OpenCV)\r
+\r
+ .. note:: OpenJDK is not usable for Android development because Android SDK supports only Sun JDK.\r
+\r
+#. **Android SDK**\r
+\r
+ Get the latest Android SDK from http://developer.android.com/sdk/index.html\r
+\r
+ Here is Google's install guide for SDK http://developer.android.com/sdk/installing.html\r
+\r
+ .. note:: If you choose SDK packed into Windows installer then you should have installed 32-bit JRE. It does not needed for Android development but installer is x86 application and requires 32-bit Java runtime.\r
+\r
+ .. note:: If you are running x64 version of Ubuntu Linux then you need ia32 shared libraries for use on amd64 and ia64 systems installed. You can install them with following command:\r
+\r
+ .. code-block:: bash\r
+\r
+ sudo apt-get install ia32-libs\r
+\r
\r
=========== ======================================================\r
\r
- .. |WinVSHowT| image:: images/visual-studio-2010-logo.jpg\r
- :height: 90pt\r
- :width: 90pt\r
+ .. |WinVSHowT| image:: images/visual-studio-2010-logo.jpg\r
+ :height: 90pt\r
+ :width: 90pt\r
+\r
+* **Android**\r
+\r
+ .. tabularcolumns:: m{100pt} m{300pt}\r
+ .. cssclass:: toctableopencv\r
+ \r
+ ================ ======================================================\r
+ |AndroidBinPack| **Title:** :ref:`Android_Binary_Package`\r
+\r
+ *Compatibility:* > OpenCV 2.3.1\r
+\r
+ *Author:* |Author_AndreyK|\r
+\r
+ You will learn how to setup OpenCV for Android platform!\r
+\r
+ ================ ======================================================\r
+\r
+ .. |AndroidBinPack| image:: images/android_logo.png\r
+ :height: 90pt\r
+ :width: 90pt\r
\r
* **From where to start?**\r
\r
../linux_eclipse/linux_eclipse\r
../windows_install/windows_install\r
../windows_visual_studio_Opencv/windows_visual_studio_Opencv\r
+ ../android_binary_package/android_binary_package\r
../display_image/display_image\r
../load_save_image/load_save_image\r
)
LIST(APPEND additional_clean_files "${JAVA_OUTPUT_DIR}/${java_file_name}")
if(ANDROID)
- install(FILES "${JAVA_OUTPUT_DIR}/${java_file_name}" DESTINATION src/org/opencv COMPONENT main)
+ get_filename_component(install_dir "${java_file_name}" PATH)
+message("!!!${java_file_name}!!!!!src/org/opencv/${install_dir}!!!!!!!!!!!!")
+ install(FILES "${JAVA_OUTPUT_DIR}/${java_file_name}" DESTINATION src/org/opencv/${install_dir} COMPONENT main)
endif()
endforeach()
import java.util.List;
-import org.opencv.*;
+import org.opencv.core.Size;
+import org.opencv.highgui.VideoCapture;
+import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
}
}
- mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
- mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
- mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
+ mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
package org.opencv.samples.puzzle15;\r
\r
-import org.opencv.*;\r
+import org.opencv.android;\r
+import org.opencv.core.Core;\r
+import org.opencv.core.Mat;\r
+import org.opencv.core.Size;\r
+import org.opencv.core.Point;\r
+import org.opencv.core.Scalar;\r
+import org.opencv.highgui.Highgui;\r
+import org.opencv.highgui.VideoCapture;\r
\r
import android.content.Context;\r
import android.graphics.Bitmap;\r
mTextWidths = new int[gridArea];\r
mTextHeights = new int[gridArea];\r
for (int i = 0; i < gridArea; i++) {\r
- Size s = core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);\r
+ Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);\r
mTextHeights[i] = (int) s.height;\r
mTextWidths[i] = (int) s.width;\r
}\r
\r
@Override\r
protected Bitmap processFrame(VideoCapture capture) {\r
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);\r
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);\r
int cols = mRgba.cols();\r
int rows = mRgba.rows();\r
\r
else {\r
mCells[idx].copyTo(mCells15[i]);\r
if (mShowTileNumbers) {\r
- core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2,\r
+ Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2,\r
(rows / gridSize + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);\r
}\r
}\r
\r
private void drawGrid(int cols, int rows) {\r
for (int i = 1; i < gridSize; i++) {\r
- core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);\r
- core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);\r
+ Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);\r
+ Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);\r
}\r
}\r
\r
import java.util.LinkedList;
import java.util.List;
-import org.opencv.*;
+import org.opencv.android;
+import org.opencv.core.Core;
+import org.opencv.core.Mat;
+import org.opencv.core.Rect;
+import org.opencv.core.Scalar;
+import org.opencv.core.Size;
+import org.opencv.highgui.Highgui;
+import org.opencv.highgui.VideoCapture;
import org.opencv.objdetect.CascadeClassifier;
import android.content.Context;
@Override
protected Bitmap processFrame(VideoCapture capture) {
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
- capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mCascade != null) {
int height = mGray.rows();
, new Size(faceSize, faceSize));
for (Rect r : faces)
- core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
+ Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
}
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
import java.text.DecimalFormat;
-import org.opencv.core;
+import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
public void init() {
step = 20;
framesCouner = 0;
- freq = core.getTickFrequency();
- prevFrameTime = core.getTickCount();
+ freq = Core.getTickFrequency();
+ prevFrameTime = Core.getTickCount();
strfps = "";
paint = new Paint();
public void measure() {
framesCouner++;
if (framesCouner % step == 0) {
- long time = core.getTickCount();
+ long time = Core.getTickCount();
double fps = step * freq / (time - prevFrameTime);
prevFrameTime = time;
DecimalFormat twoPlaces = new DecimalFormat("0.00");
import java.util.List;
-import org.opencv.*;
+import org.opencv.core.Size;
+import org.opencv.highgui.VideoCapture;
+import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
}
}
- mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
- mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
- mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
+ mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
import java.text.DecimalFormat;
-import org.opencv.core;
+import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
public void init() {
step = 20;
framesCouner = 0;
- freq = core.getTickFrequency();
- prevFrameTime = core.getTickCount();
+ freq = Core.getTickFrequency();
+ prevFrameTime = Core.getTickCount();
strfps = "";
paint = new Paint();
public void measure() {
framesCouner++;
if (framesCouner % step == 0) {
- long time = core.getTickCount();
+ long time = Core.getTickCount();
double fps = step * freq / (time - prevFrameTime);
prevFrameTime = time;
DecimalFormat twoPlaces = new DecimalFormat("0.00");
package org.opencv.samples.imagemanipulations;
-import org.opencv.*;
+import org.opencv.android;
+import org.opencv.core.Core;
+import org.opencv.core.Mat;
+import org.opencv.core.Size;
+import org.opencv.core.Point;
+import org.opencv.core.Scalar;
+import org.opencv.core.CvType;
+import org.opencv.imgproc.Imgproc;
+import org.opencv.highgui.Highgui;
+import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.graphics.Bitmap;
switch (ImageManipulationsActivity.viewMode) {
case ImageManipulationsActivity.VIEW_MODE_RGBA:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
break;
case ImageManipulationsActivity.VIEW_MODE_CANNY:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
- capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
CreateAuxiliaryMats();
- imgproc.Canny(mGrayInnerWindow, mGrayInnerWindow, 80, 90);
- imgproc.cvtColor(mGrayInnerWindow, mRgbaInnerWindow, imgproc.COLOR_GRAY2BGRA, 4);
+ Imgproc.Canny(mGrayInnerWindow, mGrayInnerWindow, 80, 90);
+ Imgproc.cvtColor(mGrayInnerWindow, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SOBEL:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
- capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
CreateAuxiliaryMats();
- imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
- core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10);
- imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, imgproc.COLOR_GRAY2BGRA, 4);
+ Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
+ Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10);
+ Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SEPIA:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
- core.transform(mRgba, mRgba, mSepiaKernel);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ Core.transform(mRgba, mRgba, mSepiaKernel);
break;
case ImageManipulationsActivity.VIEW_MODE_BLUR:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mBlurWindow == null)
CreateAuxiliaryMats();
- imgproc.blur(mBlurWindow, mBlurWindow, new Size(15, 15));
+ Imgproc.blur(mBlurWindow, mBlurWindow, new Size(15, 15));
break;
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mZoomCorner == null || mZoomWindow == null)
CreateAuxiliaryMats();
- imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
+ Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
Size wsize = mZoomWindow.size();
- core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
+ Core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
break;
}
synchronized (this) {
// Explicitly deallocate Mats
- if (mZoomWindow != null)
- mZoomWindow.dispose();
- if (mZoomCorner != null)
- mZoomCorner.dispose();
- if (mBlurWindow != null)
- mBlurWindow.dispose();
- if (mGrayInnerWindow != null)
- mGrayInnerWindow.dispose();
- if (mRgbaInnerWindow != null)
- mRgbaInnerWindow.dispose();
+ if (mZoomWindow != null)
+ mZoomWindow.dispose();
+ if (mZoomCorner != null)
+ mZoomCorner.dispose();
+ if (mBlurWindow != null)
+ mBlurWindow.dispose();
+ if (mGrayInnerWindow != null)
+ mGrayInnerWindow.dispose();
+ if (mRgbaInnerWindow != null)
+ mRgbaInnerWindow.dispose();
if (mRgba != null)
mRgba.dispose();
if (mGray != null)
import java.util.List;
-import org.opencv.*;
+import org.opencv.core.Size;
+import org.opencv.highgui.VideoCapture;
+import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
}
}
- mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
- mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
- mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
+ mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
import org.opencv.android;
import org.opencv.core.Core;
import org.opencv.core.Mat;
-import org.opencv.core.Size;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.CvType;
package org.opencv.samples.tutorial2;
-import org.opencv.*;
+import org.opencv.android;
+import org.opencv.core.Core;
+import org.opencv.core.Mat;
+import org.opencv.core.Point;
+import org.opencv.core.Scalar;
+import org.opencv.imgproc.Imgproc;
+import org.opencv.highgui.Highgui;
+import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.graphics.Bitmap;
protected Bitmap processFrame(VideoCapture capture) {
switch (Sample2NativeCamera.viewMode) {
case Sample2NativeCamera.VIEW_MODE_GRAY:
- capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
- imgproc.cvtColor(mGray, mRgba, imgproc.COLOR_GRAY2RGBA, 4);
+ capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
+ Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample2NativeCamera.VIEW_MODE_RGBA:
- capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
- core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
+ capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
+ Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
- capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
- imgproc.Canny(mGray, mIntermediateMat, 80, 100);
- imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.COLOR_GRAY2BGRA, 4);
+ capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
+ Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
+ Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
}
import java.util.List;
-import org.opencv.*;
+import org.opencv.core.Size;
+import org.opencv.highgui.VideoCapture;
+import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
}
}
- mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
- mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
+ mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
- mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
+ mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
package org.opencv.samples.tutorial4;
-import org.opencv.*;
+import org.opencv.android;
+import org.opencv.core.Mat;
+import org.opencv.core.CvType;
+import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
switch (Sample4Mixed.viewMode) {
case Sample4Mixed.VIEW_MODE_GRAY:
- imgproc.cvtColor(mGraySubmat, mRgba, imgproc.COLOR_GRAY2RGBA, 4);
+ Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample4Mixed.VIEW_MODE_RGBA:
- imgproc.cvtColor(mYuv, mRgba, imgproc.COLOR_YUV420i2RGB, 4);
+ Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
break;
case Sample4Mixed.VIEW_MODE_CANNY:
- imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
- imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.COLOR_GRAY2BGRA, 4);
+ Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
+ Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
- imgproc.cvtColor(mYuv, mRgba, imgproc.COLOR_YUV420i2RGB, 4);
+ Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}