Merged android-experimental branch back to trunk.
authorAndrey Kamaev <no@email>
Wed, 6 Apr 2011 14:31:03 +0000 (14:31 +0000)
committerAndrey Kamaev <no@email>
Wed, 6 Apr 2011 14:31:03 +0000 (14:31 +0000)
155 files changed:
3rdparty/CMakeLists.txt
3rdparty/lapack/CMakeLists.txt
3rdparty/libjasper/CMakeLists.txt
3rdparty/libjpeg/CMakeLists.txt
3rdparty/libpng/CMakeLists.txt
3rdparty/libtiff/tif_config.h
3rdparty/zlib/CMakeLists.txt
CMakeLists.txt
OpenCVConfig.cmake.in
OpenCVModule.cmake
OpenCVPCHSupport.cmake
android/Android.mk.in [deleted file]
android/Android.mk.master.in [deleted file]
android/Android.mk.modules.in [deleted file]
android/AndroidCVModule.cmake [deleted file]
android/AndroidManifest.xml.in [deleted file]
android/Application.mk.in [deleted file]
android/CMakeCache.android.initial.cmake [new file with mode: 0644]
android/CMakeLists.txt [deleted file]
android/README.android.txt
android/android-cmake/CMakeLists.txt [deleted file]
android/android-jni/AndroidManifest.xml [deleted file]
android/android-jni/Makefile [deleted file]
android/android-jni/README.txt [deleted file]
android/android-jni/default.properties [deleted file]
android/android-jni/jni/Android.mk [deleted file]
android/android-jni/jni/Application.mk [deleted file]
android/android-jni/jni/Calibration.cpp [deleted file]
android/android-jni/jni/Calibration.h [deleted file]
android/android-jni/jni/Calibration.i [deleted file]
android/android-jni/jni/android-cv-typemaps.i [deleted file]
android/android-jni/jni/android-cv.i [deleted file]
android/android-jni/jni/buffers.i [deleted file]
android/android-jni/jni/cv.i [deleted file]
android/android-jni/jni/gl_code.cpp [deleted file]
android/android-jni/jni/glcamera.h [deleted file]
android/android-jni/jni/glcamera.i [deleted file]
android/android-jni/jni/image_pool.cpp [deleted file]
android/android-jni/jni/image_pool.h [deleted file]
android/android-jni/jni/image_pool.i [deleted file]
android/android-jni/jni/nocopy.i [deleted file]
android/android-jni/jni/yuv2rgb_neon.c [deleted file]
android/android-jni/jni/yuv420sp2rgb.c [deleted file]
android/android-jni/jni/yuv420sp2rgb.h [deleted file]
android/android-jni/project_create.sh [deleted file]
android/android-jni/res/layout/calibrationviewer.xml [deleted file]
android/android-jni/res/layout/camerasettings.xml [deleted file]
android/android-jni/res/layout/chesssizer.xml [deleted file]
android/android-jni/res/values/attrs.xml [deleted file]
android/android-jni/res/values/chessnumbers.xml [deleted file]
android/android-jni/res/values/settingnumbers.xml [deleted file]
android/android-jni/res/values/strings.xml [deleted file]
android/android-jni/sample.local.env.mk [deleted file]
android/android-jni/src/com/opencv/OpenCV.java [deleted file]
android/android-jni/src/com/opencv/calibration/CalibrationViewer.java [deleted file]
android/android-jni/src/com/opencv/calibration/Calibrator.java [deleted file]
android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java [deleted file]
android/android-jni/src/com/opencv/calibration/services/CalibrationService.java [deleted file]
android/android-jni/src/com/opencv/camera/CameraConfig.java [deleted file]
android/android-jni/src/com/opencv/camera/NativePreviewer.java [deleted file]
android/android-jni/src/com/opencv/camera/NativeProcessor.java [deleted file]
android/android-jni/src/com/opencv/opengl/GL2CameraViewer.java [deleted file]
android/android-opencv.mk.in [deleted file]
android/android-opencv/AndroidManifest.xml [new file with mode: 0644]
android/android-opencv/AndroidOpenCVConfig.cmake.in [new file with mode: 0644]
android/android-opencv/CMakeLists.txt [new file with mode: 0644]
android/android-opencv/README.txt [new file with mode: 0644]
android/android-opencv/default.properties [new file with mode: 0644]
android/android-opencv/jni/CMakeLists.txt [new file with mode: 0644]
android/android-opencv/jni/Calibration.cpp [new file with mode: 0644]
android/android-opencv/jni/Calibration.i [new file with mode: 0644]
android/android-opencv/jni/android-cv-typemaps.i [new file with mode: 0644]
android/android-opencv/jni/android-cv.i [new file with mode: 0644]
android/android-opencv/jni/buffers.i [new file with mode: 0644]
android/android-opencv/jni/cv.i [new file with mode: 0644]
android/android-opencv/jni/gl_code.cpp [new file with mode: 0644]
android/android-opencv/jni/glcamera.i [new file with mode: 0644]
android/android-opencv/jni/image_pool.cpp [new file with mode: 0644]
android/android-opencv/jni/image_pool.i [new file with mode: 0644]
android/android-opencv/jni/include/Calibration.h [new file with mode: 0644]
android/android-opencv/jni/include/android_logger.h [new file with mode: 0644]
android/android-opencv/jni/include/glcamera.h [new file with mode: 0644]
android/android-opencv/jni/include/image_pool.h [new file with mode: 0644]
android/android-opencv/jni/include/yuv2rgb.h [new file with mode: 0644]
android/android-opencv/jni/include/yuv420sp2rgb.h [new file with mode: 0644]
android/android-opencv/jni/nocopy.i [new file with mode: 0644]
android/android-opencv/jni/yuv2rgb16tab.c [new file with mode: 0644]
android/android-opencv/jni/yuv2rgb_neon.c [new file with mode: 0644]
android/android-opencv/jni/yuv420rgb888.s [new file with mode: 0644]
android/android-opencv/jni/yuv420rgb888c.c [new file with mode: 0644]
android/android-opencv/jni/yuv420sp2rgb.c [new file with mode: 0644]
android/android-opencv/project_create.sh [new file with mode: 0755]
android/android-opencv/res/drawable-mdpi/cameraback.jpg [new file with mode: 0644]
android/android-opencv/res/layout/calibrationviewer.xml [new file with mode: 0644]
android/android-opencv/res/layout/camera.xml [new file with mode: 0644]
android/android-opencv/res/layout/camerasettings.xml [new file with mode: 0644]
android/android-opencv/res/layout/chesssizer.xml [new file with mode: 0644]
android/android-opencv/res/values/attrs.xml [new file with mode: 0644]
android/android-opencv/res/values/chessnumbers.xml [new file with mode: 0644]
android/android-opencv/res/values/settingnumbers.xml [new file with mode: 0644]
android/android-opencv/res/values/strings.xml [new file with mode: 0644]
android/android-opencv/src/com/opencv/OpenCV.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/calibration/CalibrationViewer.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/calibration/Calibrator.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/calibration/ChessBoardChooser.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/calibration/services/CalibrationService.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/camera/CameraActivity.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/camera/CameraButtonsHandler.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/camera/CameraConfig.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/camera/NativePreviewer.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/camera/NativeProcessor.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/opengl/GL2CameraViewer.java [new file with mode: 0644]
android/android-opencv/src/com/opencv/utils/BitmapBridge.java [new file with mode: 0644]
android/apps/CVCamera/CMakeLists.txt [new file with mode: 0644]
android/apps/CVCamera/Makefile [deleted file]
android/apps/CVCamera/build.sh [deleted file]
android/apps/CVCamera/clean.sh [deleted file]
android/apps/CVCamera/default.properties
android/apps/CVCamera/jni/Android.mk [deleted file]
android/apps/CVCamera/jni/Application.mk [deleted file]
android/apps/CVCamera/jni/CMakeLists.txt [new file with mode: 0644]
android/apps/CVCamera/sample.local.env.mk [deleted file]
android/apps/Calibration/default.properties
android/apps/Calibration/res/layout/calib_camera.xml [new file with mode: 0644]
android/apps/Calibration/res/layout/camera.xml [deleted file]
android/apps/Calibration/src/com/opencv/calibration/Calibration.java
android/apps/OpenCV_SAMPLE/AndroidManifest.xml [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/CMakeLists.txt [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/default.properties [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/jni/CMakeLists.txt [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/jni/OpenCV_SAMPLE.i [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/jni/cvsample.cpp [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/jni/cvsample.h [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/project_create.sh [new file with mode: 0755]
android/apps/OpenCV_SAMPLE/res/drawable-hdpi/icon.png [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/res/drawable-ldpi/icon.png [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/res/drawable-mdpi/icon.png [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/res/layout/main.xml [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/res/menu/sample_menu.xml [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/res/values/strings.xml [new file with mode: 0644]
android/apps/OpenCV_SAMPLE/src/com/OpenCV_SAMPLE/OpenCV_SAMPLE.java [new file with mode: 0644]
android/changes.Android.txt [deleted file]
android/changes.diff [deleted file]
android/cmake_android.sh [new file with mode: 0644]
android/cmake_android_armeabi.sh [new file with mode: 0644]
android/cvconfig.h.in [deleted file]
android/default.properties.in [deleted file]
android/diff.txt [deleted file]
modules/CMakeLists.txt
modules/haartraining/CMakeLists.txt
modules/highgui/CMakeLists.txt
modules/highgui/include/opencv2/highgui/highgui_c.h
modules/highgui/src/cap.cpp
modules/highgui/src/cap_android.cpp [new file with mode: 0644]
modules/highgui/src/precomp.hpp

index babacb3037cc51f9ae3e8faf0a35b75e90c7150c..f536e215b63d676399b9fd2548790a90c8e8e0e4 100644 (file)
@@ -1,7 +1,3 @@
-if(ANDROID)
- configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-endif()
-
 add_subdirectory(lapack)
 add_subdirectory(zlib)
 if(WITH_JASPER AND NOT JASPER_FOUND)
index b2185ee9fc237bfbe7e499fc1f23de929457fb61..9ccf07cf297e546a2deaba86e5826e4ef55cb12c 100644 (file)
@@ -2,9 +2,6 @@
 #  CMake file for opencv_lapack. See root CMakeLists.txt
 #
 # ----------------------------------------------------------------------------
-if(ANDROID)
-define_3rdparty_module(opencv_lapack)
-else()
 
 project(opencv_lapack)
 
@@ -65,5 +62,3 @@ if(NOT BUILD_SHARED_LIBS)
     install(TARGETS ${the_target}
            ARCHIVE DESTINATION share/opencv/3rdparty/lib COMPONENT main)
 endif()
-    
-endif() #android
index 7f1effc1a5cdffcf8d6030573f0a36e6cbfcae45..d5627b38db8c47c1326e92b63e932ff75da7fac4 100644 (file)
@@ -1,10 +1,3 @@
-if(ANDROID)
-set(android_defs "-DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT  -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT")
-define_3rdparty_module(jasper)
-set(android_defs "")
-else(ANDROID)
-
-
 # ----------------------------------------------------------------------------
 #  CMake file for libjasper. See root CMakeLists.txt
 #
@@ -62,5 +55,3 @@ if(NOT BUILD_SHARED_LIBS)
     install(TARGETS ${the_target}
            ARCHIVE DESTINATION share/opencv/3rdparty/lib COMPONENT main)
 endif()
-    
-endif(ANDROID)#!android
index af8e6f2131a5821f38ff025b0f6c50fa2f61a210..7b2b1fa73c18c08feb818792eb7c2a7b64887326 100644 (file)
@@ -1,7 +1,3 @@
-if(ANDROID)
-define_3rdparty_module(jpeg)
-else()
-#endif()#android
 # ----------------------------------------------------------------------------
 #  CMake file for libjpeg. See root CMakeLists.txt
 #
@@ -52,5 +48,3 @@ if(NOT BUILD_SHARED_LIBS)
     install(TARGETS ${the_target}
            ARCHIVE DESTINATION share/opencv/3rdparty/lib COMPONENT main)
 endif()
-    
-endif()#android
index 0eb998bd3b26182310dddf209474660e3f31ae4b..b5d441ca8a6cb0269197eb5c5f22b04bcfd1b153 100644 (file)
@@ -1,6 +1,3 @@
-if(ANDROID)
-define_3rdparty_module(png)
-else()
 # ----------------------------------------------------------------------------
 #  CMake file for libpng. See root CMakeLists.txt
 #
@@ -46,5 +43,3 @@ if(NOT BUILD_SHARED_LIBS)
     install(TARGETS ${the_target}
            ARCHIVE DESTINATION share/opencv/3rdparty/lib COMPONENT main)
 endif()
-    
-endif()#android
index 08d1f0d071fc1f0c8519df933f14c28168d8fa6f..8c8d145978c480a2e5370a665606024dfd8df4f0 100644 (file)
 /* Define to 1 if you have the <io.h> header file. */
 #define HAVE_IO_H 1
 
+#if !__ANDROID__
 /* Define to 1 if you have the <search.h> header file. */
 #define HAVE_SEARCH_H 1
+#endif
 
 /* Define to 1 if you have the `setmode' function. */
 #define HAVE_SETMODE 1
index 340a8b58db4aa14f62e192e73b4a4c66c297b33e..9c20f9cc9293b31e12392d3dceefb9a0e6684c01 100644 (file)
@@ -1,6 +1,3 @@
-if(ANDROID)
-define_3rdparty_module(zlib)
-else()
 # ----------------------------------------------------------------------------
 #  CMake file for zlib. See root CMakeLists.txt
 #
@@ -43,5 +40,3 @@ if(NOT BUILD_SHARED_LIBS)
     install(TARGETS ${the_target}
            ARCHIVE DESTINATION share/opencv/3rdparty/lib COMPONENT main)
 endif()
-
-endif(ANDROID)
index ae1b2208393f41192f63b2d2a74af96d4f6262de..9cf7c2501399d9d4c28803e2205c7beaed1ba9b5 100644 (file)
@@ -482,8 +482,10 @@ if(UNIX)
             endif()\r
         endif()\r
 \r
-        if(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD" OR ${CMAKE_SYSTEM_NAME} MATCHES "NetBSD")\r
+        if(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD" OR ${CMAKE_SYSTEM_NAME} MATCHES "NetBSD" AND NOT ANDROID)\r
             set(OPENCV_LINKER_LIBS m pthread)\r
+        elseif(ANDROID)\r
+            set(OPENCV_LINKER_LIBS dl m)\r
         else()\r
             set(OPENCV_LINKER_LIBS dl m pthread rt)\r
         endif()\r
@@ -508,6 +510,18 @@ if(WITH_JASPER AND NOT JASPER_FOUND)
     set(JASPER_LIBRARIES libjasper)\r
 endif()\r
 \r
+if (ANDROID)\r
+  set(NativeCamera_DIR "${CMAKE_SOURCE_DIR}/android/native-camera/build")\r
+  FIND_PACKAGE(NativeCamera QUIET)\r
+  if(NativeCamera_FOUND)\r
+    set(HAVE_ANDROID_NATIVE_CAMERA TRUE)\r
+    set(OPENCV_LINKER_LIBS ${OPENCV_LINKER_LIBS} ${NativeCamera_LIBS} log)\r
+  else()\r
+    set(HAVE_ANDROID_NATIVE_CAMERA FALSE)\r
+    message("-- Could NOT find NativeCamera for Android")\r
+  endif()\r
+endif()\r
+\r
 #message(STATUS "Graphic libraries: ${PNG_LIBRARIES} ${JPEG_LIBRARIES} ${TIFF_LIBRARIES} ${JASPER_LIBRARIES}")\r
 \r
 if(WITH_OPENEXR)\r
@@ -895,7 +909,9 @@ if(CMAKE_COMPILER_IS_GNUCXX)
 \r
     # We need pthread's\r
     if(UNIX)\r
+      if(NOT ANDROID)\r
         set(EXTRA_C_FLAGS "${EXTRA_C_FLAGS} -pthread")\r
+      endif()\r
     endif()\r
 \r
     if(OPENCV_WARNINGS_ARE_ERRORS)\r
@@ -1083,7 +1099,17 @@ endif()
 set(CMAKE_INCLUDE_DIRS_CONFIGCMAKE "\"${OPENCV_CONFIG_FILE_INCLUDE_DIR}\" \"${CMAKE_CURRENT_SOURCE_DIR}/include\" \"${CMAKE_CURRENT_SOURCE_DIR}/include/opencv\"")\r
 set(CMAKE_BASE_INCLUDE_DIRS_CONFIGCMAKE "\"${CMAKE_CURRENT_SOURCE_DIR}\"")\r
 \r
-set(CMAKE_LIB_DIRS_CONFIGCMAKE "${LIBRARY_OUTPUT_PATH}")\r
+set(CMAKE_LIB_DIRS_CONFIGCMAKE "${LIBRARY_OUTPUT_PATH}")
+
+if (ANDROID)
+       if (NOT BUILD_SHARED_LIBS)\r
+         set(CMAKE_LIB_DIRS_CONFIGCMAKE ${CMAKE_LIB_DIRS_CONFIGCMAKE} "${CMAKE_BINARY_DIR}/lib")\r
+       endif()
+
+       if( HAVE_ANDROID_NATIVE_CAMERA )\r
+       set(CMAKE_LIB_DIRS_CONFIGCMAKE ${CMAKE_LIB_DIRS_CONFIGCMAKE} ${NativeCamera_LIB_DIR})\r
+       endif()\r
+endif()\r
 \r
 configure_file("${CMAKE_CURRENT_SOURCE_DIR}/OpenCVConfig.cmake.in" "${CMAKE_BINARY_DIR}/OpenCVConfig.cmake" IMMEDIATE @ONLY)\r
 \r
@@ -1367,6 +1393,9 @@ else()
 message(STATUS "    V4L/V4L2:                   ${HAVE_CAMV4L}/${HAVE_CAMV4L2}")\r
 endif()\r
 message(STATUS "    Xine:                       ${HAVE_XINE}")\r
+if(ANDROID)\r
+message(STATUS "    AndroidNativeCamera:        ${HAVE_ANDROID_NATIVE_CAMERA}")\r
+endif()\r
 endif() #if(UNIX AND NOT APPLE)\r
 \r
 if(APPLE)\r
index 8efd6028f303021b050b92330be70e568db539f4..efe3593e3184a8fd684ef65c4eee72f73b103aff 100644 (file)
@@ -52,8 +52,14 @@ LINK_DIRECTORIES(${OpenCV_LIB_DIR})
 \r
 # ====================================================================\r
 # Link libraries: e.g.   opencv_core220.so, opencv_imgproc220d.lib, etc...\r
-# ====================================================================\r
-set(OPENCV_LIB_COMPONENTS opencv_core opencv_imgproc opencv_features2d opencv_gpu opencv_calib3d opencv_objdetect opencv_video opencv_highgui opencv_ml opencv_legacy opencv_contrib opencv_flann)\r
+# ====================================================================
+if(NOT ANDROID)\r
+       set(OPENCV_LIB_COMPONENTS opencv_core opencv_imgproc opencv_features2d opencv_gpu opencv_calib3d opencv_objdetect opencv_video opencv_highgui opencv_ml opencv_legacy opencv_contrib opencv_flann)
+else()
+       #libraries order is very important because linker from Android NDK is one-pass linker\r
+       set(OPENCV_LIB_COMPONENTS opencv_calib3d opencv_objdetect opencv_features2d opencv_imgproc opencv_video  opencv_highgui opencv_ml opencv_legacy  opencv_flann opencv_core )\r
+endif()
+\r
 SET(OpenCV_LIBS "")\r
 foreach(__CVLIB ${OPENCV_LIB_COMPONENTS})\r
        # CMake>=2.6 supports the notation "debug XXd optimized XX"\r
@@ -85,6 +91,9 @@ IF (NOT @OPENCV_BUILD_SHARED_LIB@)
     LINK_DIRECTORIES(@CMAKE_LIB_DIRS_CONFIGCMAKE@/../3rdparty/lib)\r
     if(WIN32)\r
         LINK_DIRECTORIES(@CMAKE_BASE_INCLUDE_DIRS_CONFIGCMAKE@/3rdparty/lib)\r
+    elseif(ANDROID)\r
+        LINK_DIRECTORIES(@CMAKE_BINARY_DIR@/3rdparty/lib)\r
+        LINK_DIRECTORIES(@CMAKE_LIB_DIRS_CONFIGCMAKE@/3rdparty/lib)\r
     else()\r
         LINK_DIRECTORIES(@CMAKE_LIB_DIRS_CONFIGCMAKE@/../share/opencv/3rdparty/lib)\r
     endif()    \r
index be84e19639c58bd9771ed7d57abfc4fb5d580320..5939ebbc6420d6d4415695b8e4b508418063ae17 100644 (file)
@@ -29,18 +29,6 @@ macro(define_opencv_module name)
 
     add_library(${the_target} ${lib_srcs} ${lib_hdrs} ${lib_int_hdrs})
 
-    if(PCHSupport_FOUND)
-        set(pch_header ${CMAKE_CURRENT_SOURCE_DIR}/src/precomp.hpp)
-        if(${CMAKE_GENERATOR} MATCHES "Visual*" OR ${CMAKE_GENERATOR} MATCHES "Xcode*")
-            if(${CMAKE_GENERATOR} MATCHES "Visual*")
-                set(${the_target}_pch "src/precomp.cpp")
-            endif()            
-            add_native_precompiled_header(${the_target} ${pch_header})
-        elseif(CMAKE_COMPILER_IS_GNUCXX AND ${CMAKE_GENERATOR} MATCHES ".*Makefiles")
-            add_precompiled_header(${the_target} ${pch_header})
-        endif()
-    endif()
-
     # For dynamic link numbering convenions
     set_target_properties(${the_target} PROPERTIES
         VERSION ${OPENCV_VERSION}
@@ -56,6 +44,18 @@ macro(define_opencv_module name)
         INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib"
         )
 
+    if(PCHSupport_FOUND)
+        set(pch_header ${CMAKE_CURRENT_SOURCE_DIR}/src/precomp.hpp)
+        if(${CMAKE_GENERATOR} MATCHES "Visual*" OR ${CMAKE_GENERATOR} MATCHES "Xcode*")
+            if(${CMAKE_GENERATOR} MATCHES "Visual*")
+                set(${the_target}_pch "src/precomp.cpp")
+            endif()            
+            add_native_precompiled_header(${the_target} ${pch_header})
+        elseif(CMAKE_COMPILER_IS_GNUCXX AND ${CMAKE_GENERATOR} MATCHES ".*Makefiles")
+            add_precompiled_header(${the_target} ${pch_header})
+        endif()
+    endif()
+
     # Add the required libraries for linking:
     target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${IPP_LIBS} ${ARGN})
 
index 3c3820125624a3a07fe923b58d8b3b86ab6822ce..2fdfba55710eaeb6052b4359fec3d360b258b1c7 100644 (file)
@@ -34,6 +34,10 @@ ELSE()
        ENDIF()
 ENDIF(CMAKE_COMPILER_IS_GNUCXX)
 
+#if (ANDROID)
+  #SET(PCHSupport_FOUND FALSE)
+#endif()
+
 
 MACRO(_PCH_GET_COMPILE_FLAGS _out_compile_flags)
 
diff --git a/android/Android.mk.in b/android/Android.mk.in
deleted file mode 100644 (file)
index 65d0826..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-LOCAL_PATH := ${CMAKE_CURRENT_SOURCE_DIR}
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := ${android_module_name}
-
-ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
-LOCAL_ARM_NEON := true
-endif
-
-LOCAL_SRC_FILES :=  ${android_srcs}
-
-LOCAL_CFLAGS := ${android_defs}
-
-LOCAL_C_INCLUDES :=  ${include_dirs} $(LOCAL_PATH)
-
-include $(BUILD_STATIC_LIBRARY)
diff --git a/android/Android.mk.master.in b/android/Android.mk.master.in
deleted file mode 100644 (file)
index d2a0add..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-include android-opencv.mk
-include modules/Android.mk
-include 3rdparty/Android.mk
-#include zlib-android/Android.mk
-
diff --git a/android/Android.mk.modules.in b/android/Android.mk.modules.in
deleted file mode 100644 (file)
index 8338432..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-include $(call all-subdir-makefiles)
-
diff --git a/android/AndroidCVModule.cmake b/android/AndroidCVModule.cmake
deleted file mode 100644 (file)
index c7a5de0..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-macro(define_android_manual name lib_srcs includes)
-set(android_module_name ${name})
-set(android_srcs "")
-set(include_dirs "${includes}")
-foreach(f ${lib_srcs})
-               string(REPLACE "${CMAKE_CURRENT_SOURCE_DIR}/" "" n_f ${f})      
-               set(android_srcs "${android_srcs} ${n_f}")
-endforeach()
-configure_file("${CMAKE_SOURCE_DIR}/Android.mk.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-endmacro()
-
-
-macro(define_3rdparty_module name)
-       file(GLOB lib_srcs "*.c" "*.cpp")
-       file(GLOB lib_int_hdrs "*.h*")
-       define_android_manual(${name} "${lib_srcs}" "$(LOCAL_PATH)/../include")         
-endmacro()
-
-macro(define_opencv_module name)
-       file(GLOB lib_srcs "src/*.cpp")
-       file(GLOB lib_int_hdrs "src/*.h*")
-       define_android_manual(opencv_${name} "${lib_srcs}" "$(LOCAL_PATH)/src  $(OPENCV_INCLUDES)")
-endmacro()
-
-
-
-
-
diff --git a/android/AndroidManifest.xml.in b/android/AndroidManifest.xml.in
deleted file mode 100644 (file)
index 3ce558f..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
-      package="com.opencv"
-      android:versionCode="1"
-      android:versionName="1.0">
-   
-</manifest> 
\ No newline at end of file
diff --git a/android/Application.mk.in b/android/Application.mk.in
deleted file mode 100644 (file)
index c504912..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
-APP_PROJECT_PATH := $(call my-dir)
-# The ARMv7 is significanly faster due to the use of the hardware FPU
-APP_ABI := ${ARM_TARGETS}
-APP_MODULES      := png jpeg jasper zlib opencv_lapack opencv_core opencv_imgproc opencv_ml opencv_highgui opencv_features2d \
-    opencv_legacy opencv_objdetect opencv_calib3d opencv_video opencv_contrib opencv_flann 
diff --git a/android/CMakeCache.android.initial.cmake b/android/CMakeCache.android.initial.cmake
new file mode 100644 (file)
index 0000000..be8c492
--- /dev/null
@@ -0,0 +1,110 @@
+########################
+# Initial cache settings for opencv on android
+# run cmake with:
+# cmake -C 
+########################
+#Build all examples
+set(BUILD_EXAMPLES OFF CACHE BOOL "" )
+
+#Build Reference Manual
+set(BUILD_REFMAN OFF CACHE BOOL "" )
+
+#Build LaTeX OpenCV Documentation
+#set(BUILD_LATEX_DOCS OFF CACHE BOOL "" )
+
+#Build with Python support
+set(BUILD_NEW_PYTHON_SUPPORT OFF CACHE BOOL "" )
+
+#Build a installer with the SDK
+set(BUILD_PACKAGE OFF CACHE BOOL "" )
+
+#Build shared libraries (.dll/.so CACHE BOOL "" ) instead of static ones (.lib/.a CACHE BOOL "" )
+set(BUILD_SHARED_LIBS OFF CACHE BOOL "" )
+
+#Build 3rd party libraries
+set(OPENCV_BUILD_3RDPARTY_LIBS ON CACHE BOOL "" )
+
+#Build tests
+set(BUILD_TESTS OFF CACHE BOOL "" )
+
+#Choose the type of build, options are: None Debug Release RelWithDebInfo
+# MinSizeRel.
+set(CMAKE_BUILD_TYPE "Release" CACHE STRING "" )
+
+#Include IEEE1394 support
+set(WITH_1394 OFF CACHE BOOL "" )
+
+#Include NVidia Cuda Runtime support
+set(WITH_CUDA OFF CACHE BOOL "" )
+
+#Include Eigen2/Eigen3 support
+set(WITH_EIGEN2 OFF CACHE BOOL "" )
+
+#Include FFMPEG support
+set(WITH_FFMPEG OFF CACHE BOOL "" )
+
+#Include Gstreamer support
+set(WITH_GSTREAMER OFF CACHE BOOL "" )
+
+#Include GTK support
+set(WITH_GTK OFF CACHE BOOL "" )
+
+#Include Intel IPP support
+set(WITH_IPP OFF CACHE BOOL "" )
+
+#Include JPEG2K support
+set(WITH_JASPER ON CACHE BOOL "" )
+
+#Include JPEG support
+set(WITH_JPEG ON CACHE BOOL "" )
+
+#Include ILM support via OpenEXR
+set(WITH_OPENEXR OFF CACHE BOOL "" )
+
+#Include OpenNI support
+set(WITH_OPENNI OFF CACHE BOOL "" )
+
+#Include PNG support
+set(WITH_PNG ON CACHE BOOL "" )
+
+#Include Prosilica GigE support
+set(WITH_PVAPI OFF CACHE BOOL "" )
+
+#Build with Qt Backend support
+set(WITH_QT OFF CACHE BOOL "" )
+
+#Add OpenGL extension to Qt
+set(WITH_QT_OPENGL OFF CACHE BOOL "" )
+
+#Include Intel TBB support
+set(WITH_TBB OFF CACHE BOOL "" )
+
+#Include TIFF support
+set(WITH_TIFF ON CACHE BOOL "" )
+
+#Include Unicap support (GPL CACHE BOOL "" )
+set(WITH_UNICAP OFF CACHE BOOL "" )
+
+#Include Video 4 Linux support
+set(WITH_V4L OFF CACHE BOOL "" )
+
+#Include Xine support (GPL CACHE BOOL "" )
+set(WITH_XINE OFF CACHE BOOL "" )
+
+#Enable SSE instructions
+SET( ENABLE_SSE OFF CACHE BOOL "" )
+
+#Enable SSE2 instructions
+SET( ENABLE_SSE2 OFF CACHE BOOL "" )
+
+#Enable SSE3 instructions
+SET( ENABLE_SSE3 OFF CACHE BOOL "" )
+
+#Enable SSE4.1 instructions
+SET( ENABLE_SSE41 OFF CACHE BOOL "" )
+
+#Enable SSE4.2 instructions
+SET( ENABLE_SSE42 OFF CACHE BOOL "" )
+
+#Enable SSSE3 instructions
+SET( ENABLE_SSSE3 OFF CACHE BOOL "" )
diff --git a/android/CMakeLists.txt b/android/CMakeLists.txt
deleted file mode 100644 (file)
index 302cce3..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-# ----------------------------------------------------------------------------\r
-#  Root CMake file for Android Opencv Build\r
-#\r
-#    To build with cmake\r
-#        $ mkdir build\r
-#        $ cd build\r
-#        $ cmake ..\r
-#        $ make\r
-#    Make sure to set the path in the cache for the crystax ndk available \r
-#    here:\r
-#          http://www.crystax.net/android/ndk-r4.php\r
-#\r
-#   - initial version August 2010 Ethan Rublee ethan.ruble@gmail.com\r
-#\r
-# ----------------------------------------------------------------------------\r
-\r
-project(android-opencv)\r
-\r
-cmake_minimum_required(VERSION 2.8)\r
-\r
-include(AndroidCVModule.cmake REQUIRED)\r
-\r
-set(opencv_root "${CMAKE_SOURCE_DIR}/.." CACHE STRING "opencv source root directory")\r
-if(NOT EXISTS ${opencv_root})\r
-message(FATAL_ERROR "Cannot find your opencv root directory!" )\r
-endif()\r
-set(ANDROID true)\r
-set(WITH_JASPER true)\r
-set(JASPER_FOUND false)\r
-set(WITH_PNG true)\r
-set(WITH_JPEG true)\r
-\r
-file(GLOB module_includes "${opencv_root}/modules/[a-zA-Z]*")\r
-\r
-list(REMOVE_ITEM module_includes ${opencv_root}/modules/CMakeLists.txt)\r
-\r
-set(module_includes ${module_includes} ${CMAKE_SOURCE_DIR}/../3rdparty $(OPENCV_BUILD_ROOT) ${opencv_root} )\r
-foreach(mdir ${module_includes})\r
-    string(REPLACE "${opencv_root}" "$(OPENCV_ROOT)" n_f ${mdir})      \r
-    set(android_module_include_dirs "${android_module_include_dirs} ${n_f}/include")\r
-endforeach()\r
-\r
-set(PossibleArmTargets\r
-  "armeabi armeabi-v7a;armeabi;armeabi-v7a")\r
-set(ARM_TARGETS "armeabi armeabi-v7a" CACHE STRING "the arm targets for android, recommend armeabi-v7a for floating point support and neon")\r
-set_property(CACHE ARM_TARGETS PROPERTY STRINGS ${PossibleArmTargets} )\r
-\r
-\r
-\r
-configure_file("${CMAKE_SOURCE_DIR}/Android.mk.master.in" "${CMAKE_BINARY_DIR}/Android.mk")\r
-configure_file("${CMAKE_SOURCE_DIR}/Application.mk.in" "${CMAKE_BINARY_DIR}/Application.mk")\r
-configure_file("${CMAKE_SOURCE_DIR}/AndroidManifest.xml.in" "${CMAKE_BINARY_DIR}/AndroidManifest.xml")\r
-configure_file("${CMAKE_SOURCE_DIR}/default.properties.in" "${CMAKE_BINARY_DIR}/default.properties")\r
-configure_file("${CMAKE_SOURCE_DIR}/cvconfig.h.in" "${CMAKE_BINARY_DIR}/include/cvconfig.h")\r
-\r
-add_subdirectory(${opencv_root}/modules "${CMAKE_BINARY_DIR}/modules")\r
-\r
-add_subdirectory(${opencv_root}/3rdparty "${CMAKE_BINARY_DIR}/3rdparty")\r
-\r
-set(NDK_ROOT  "$ENV{HOME}/android-ndk-r4-crystax" CACHE STRING "the crystax ndk directory")\r
-if(NOT EXISTS ${NDK_ROOT})\r
-message(FATAL_ERROR "Cannot find your ndk root directory! please download and \r
-unzip the android ndk from crystax to the directory specified by NDK_ROOT\r
-You may download the crystax ndk from: \r
-        http://www.crystax.net/android/ndk-r4.php" )\r
-endif()\r
-set(J  "2" CACHE STRING "how many processes for make -j <J>")\r
-\r
-ADD_CUSTOM_COMMAND(\r
-OUTPUT    android-opencv\r
-DEPENDS   ${CMAKE_BINARY_DIR}/Android.mk\r
-COMMAND   "${NDK_ROOT}/ndk-build" \r
-ARGS      --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}\r
-)\r
-\r
-configure_file("${CMAKE_SOURCE_DIR}/android-opencv.mk.in" "${CMAKE_BINARY_DIR}/android-opencv.mk")\r
-\r
-ADD_CUSTOM_TARGET(ndk ALL echo\r
-  DEPENDS android-opencv\r
-)\r
-\r
-message(STATUS "Make will use make -j${J} - for speeding up build - you may change this in the cache")\r
-message(STATUS "The NDK directory is ${NDK_ROOT}")\r
-message(STATUS "OpenCV source root is ${opencv_root}")\r
-message(STATUS "just run make - and grab some coffee or tea ;)")\r
-message(STATUS "The android opencv libs will be located in ${CMAKE_BINARY_DIR}/obj/local/armeabi*/")\r
-\r
-\r
-\r
-  \r
-\r
-\r
-\r
index dec230c94df2d11f7eee34a26e78e4baadc91ebd..99fa368e8ba9256d754752bec2e5f1ab0dff05ca 100644 (file)
@@ -1,39 +1,77 @@
 Author: Ethan Rublee
 email: ethan.rublee@gmail.com
+########################################################
+Prerequisites:
+########################################################
+   android-ndk-r5b http://developer.android.com/sdk/ndk/index.html
+      the official ndk with standalone toolchain
+   android-cmake http://code.google.com/p/android-cmake/
+      this is for the cmake toolchain for android
+   mercurial
+    sudo apt-get install mercurial
+   cmake
+   opencv (you should have this if you're reading this file :)
 
-  
-To build with cmake:
+########################################################   
+Quick NDK Setup(ubuntu and bash):
+########################################################
+create some working directory:
+  WORK=$HOME/android_dev
+  cd $WORK
+
+now get the android-cmake project with mercurial
+  hg clone https://android-cmake.googlecode.com/hg/ android-cmake
 
-mkdir build
-cd build
-cmake ..
-make
+there is a convenience script in there for pulling down and setting up the
+android ndk as a standalone toolchain
+  cd android-cmake/scripts
+  ./get_ndk_toolchain_linux.sh $WORK
 
+add the cmake toolchain location to your bashrc or otherwise export it to your env
+  echo export ANDTOOLCHAIN=$WORK/android-cmake/toolchain/android.toolchain.cmake >> $HOME/.bashrc
 
+########################################################
+Quick opencv build(ubuntu and bash):
+########################################################
+Make sure you either source your bashrc or otherwise export the ANDTOOLCHAIN variable.
 
-Make sure to set the path in the cache for the crystax ndk available 
-here:
-   http://www.crystax.net/android/ndk-r4.php
-   
-   
-to include in an android project -
-just include the generated android-opencv.mk in you android ndk project 
-(in an Android.mk file)
-with:
+There is a script in the android folder for running cmake with the proper cache
+variables set.  It is recommended that you use this to setup a smake build directory.
+  cd opencv/android
+  sh ./cmake_android.sh
 
-include android-opencv.mk
+You should now see a build directory, that is ready to be made.
+  cd build
+  make -j8
 
-this defines OPENCV_INCLUDES and OPENCV_LIBS - which you should add to your
-makefiles like:
+That will build most of the opencv modules, except for those that don't make sense
+on android - gpu, etc..
+
+To install to the toolchain:
+  make install
+########################################################
+Using opencv in you're cmake own projects.
+########################################################
+Use the cmake find script for opencv:
+  find_package(OpenCV REQUIRED)
+  
+Then when you run cmake, use:
+  cmake -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ..
 
-#define OPENCV_INCLUDES and OPENCV_LIBS
-include $(PATH_TO_OPENCV_ANDROID_BUILD)/android-opencv.mk
+And direct your cmake cache for OpenCV_Dir to the path that you build opencv for android.
+  something like : opencv/android/build
 
-LOCAL_LDLIBS += $(OPENCV_LIBS)
-    
-LOCAL_C_INCLUDES +=  $(OPENCV_INCLUDES)
+To avoid setting the cmake cache for OpenCV_Dir, you can just "install" opencv to your
+android toolchain. Run the following from the opencv/android/build path:
+  make install
 
-for now, you also need to cd to android-jni and run make
-this will create the android shared library with some useful functionality
-that may be reused in android projects.
+########################################################
+android targets
+########################################################
+You may wish to build android for multiple hardware targets.
 
+Just change the cmake cache ARM_TARGETS to either:
+ "armeabi" "armeab-v7a" "armeab-v7a with NEON"
+You may install each of these to the toolchain, and they should be linked against
+properly via way of the android-cmake toolchain.
diff --git a/android/android-cmake/CMakeLists.txt b/android/android-cmake/CMakeLists.txt
deleted file mode 100644 (file)
index 5f404fc..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-# ----------------------------------------------------------------------------\r
-#  Root CMake file for Android Opencv Build\r
-#\r
-#    To build with cmake\r
-#        $ mkdir build\r
-#        $ cd build\r
-#        $ cmake ..\r
-#        $ make\r
-#    Make sure to set the path in the cache for the crystax ndk available \r
-#    here:\r
-#          http://www.crystax.net/android/ndk-r4.php\r
-#\r
-#   - initial version August 2010 Ethan Rublee ethan.rublee@gmail.com\r
-#\r
-# ----------------------------------------------------------------------------\r
-\r
-project(android-opencv-shared)\r
-\r
-cmake_minimum_required(VERSION 2.8)\r
-\r
-set(opencv_root "$ENV{HOME}/opencv" CACHE PATH "opencv source root directory")\r
-if(NOT EXISTS ${opencv_root})\r
-message(FATAL_ERROR "Cannot find your opencv root directory!" )\r
-endif()\r
-\r
-set(android_opencv_mk "${opencv_root}/android/build/android-opencv.mk" cache FILE "the generated android-opencv.mk file")\r
-if(NOT EXISTS ${android_opencv_mk})\r
-message(FATAL_ERROR "please locate the cmake generated android-opencv.mk file, usually in the android/build directory...")\r
-endif()\r
-\r
-set(ANDROID true)\r
-\r
-file(GLOB module_includes "${opencv_root}/modules/[a-zA-Z]*")\r
-\r
-list(REMOVE_ITEM module_includes ${opencv_root}/modules/CMakeLists.txt)\r
-\r
-set(module_includes ${module_includes} ${CMAKE_SOURCE_DIR}/../3rdparty $(OPENCV_BUILD_ROOT) )\r
-foreach(mdir ${module_includes})\r
-    string(REPLACE "${opencv_root}" "$(OPENCV_ROOT)" n_f ${mdir})      \r
-    set(android_module_include_dirs "${android_module_include_dirs} ${n_f}/include")\r
-endforeach()\r
-\r
-configure_file("${CMAKE_SOURCE_DIR}/Android.mk.master.in" "${CMAKE_BINARY_DIR}/Android.mk")\r
-configure_file("${CMAKE_SOURCE_DIR}/Application.mk.in" "${CMAKE_BINARY_DIR}/Application.mk")\r
-configure_file("${CMAKE_SOURCE_DIR}/AndroidManifest.xml.in" "${CMAKE_BINARY_DIR}/AndroidManifest.xml")\r
-configure_file("${CMAKE_SOURCE_DIR}/default.properties.in" "${CMAKE_BINARY_DIR}/default.properties")\r
-\r
-set(NDK_ROOT  "$ENV{HOME}/android-ndk-r4-crystax" CACHE STRING "the crystax ndk directory")\r
-if(NOT EXISTS ${NDK_ROOT})\r
-message(FATAL_ERROR "Cannot find your ndk root directory! please download and \r
-unzip the android ndk from crystax to the directory specified by NDK_ROOT\r
-You may download the crystax ndk from: \r
-        http://www.crystax.net/android/ndk-r4.php" )\r
-endif()\r
-\r
-set(J  "2" CACHE STRING "how many processes for make -j <J>")\r
-\r
-ADD_CUSTOM_COMMAND(\r
-OUTPUT    android-ndk\r
-DEPENDS   ${CMAKE_BINARY_DIR}/Android.mk\r
-COMMAND   "${NDK_ROOT}/ndk-build" \r
-ARGS      --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}\r
-)\r
-\r
-ADD_CUSTOM_COMMAND(\r
-OUTPUT    android-swig\r
-DEPENDS   ${SWIG_MAIN}\r
-COMMAND   "swig" \r
-ARGS      --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}\r
-)\r
-\r
-\r
-configure_file("${CMAKE_SOURCE_DIR}/android-opencv.mk.in" "${CMAKE_BINARY_DIR}/android-opencv.mk")\r
-\r
-ADD_CUSTOM_TARGET(ndk ALL echo\r
-  DEPENDS android-ndk\r
-)\r
-\r
-message(STATUS "Make will use make -j${J} - for speeding up build - you may change this in the cache")\r
-message(STATUS "The NDK directory is ${NDK_ROOT}")\r
-message(STATUS "OpenCV source root is ${opencv_root}")\r
-message(STATUS "just run make - and grab some coffee or tea ;)")\r
-message(STATUS "The android opencv libs will be located in ${CMAKE_BINARY_DIR}/obj/local/armeabi*/")\r
-\r
-\r
-\r
-  \r
-\r
-\r
-\r
diff --git a/android/android-jni/AndroidManifest.xml b/android/android-jni/AndroidManifest.xml
deleted file mode 100644 (file)
index f0a9639..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
-       package="com.opencv" android:versionCode="1"
-       android:versionName="0.1">
-       
-        <application android:debuggable="true">
-        <!-- The activity tag here is currently not used. The main project TicTacToeMain
-             must currently redefine the activities to be used from the libraries.
-             However later the tools will pick up the activities from here and merge them
-             automatically, so it's best to define your activities here like for any
-             regular Android project.
-        -->
-        <activity android:name="com.opencv.OpenCV" />
-        <activity android:name="com.opencv.calibration.ChessBoardChooser"/>
-               <activity android:name="com.opencv.calibration.CameraConfig"/>
-               <activity android:name="com.opencv.calibration.CalibrationViewer"/>
-        <service android:name="com.opencv.calibration.services.CalibrationService"/>
-    </application>
-    <!-- set the opengl version 
-    <uses-feature android:glEsVersion="0x00020000" />-->
-       <uses-sdk android:minSdkVersion="7" />
-       <uses-permission android:name="android.permission.CAMERA"></uses-permission>
-       
-</manifest> 
diff --git a/android/android-jni/Makefile b/android/android-jni/Makefile
deleted file mode 100644 (file)
index 9aa6da4..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-# The path to the NDK, requires crystax version r-4 for now, due to support
-# for the standard library
-
-# load environment from local make file
-LOCAL_ENV_MK=local.env.mk
-ifneq "$(wildcard $(LOCAL_ENV_MK))" ""
-include $(LOCAL_ENV_MK)
-else
-$(shell cp sample.$(LOCAL_ENV_MK) $(LOCAL_ENV_MK))
-$(info ERROR local environement not setup! try:)
-$(info gedit $(LOCAL_ENV_MK))
-$(error Please setup the $(LOCAL_ENV_MK) - the default was just created')
-endif
-
-ifndef ARM_TARGETS
-ARM_TARGETS="armeabi armeabi-v7a"
-endif
-
-ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
-
-$(info OPENCV_CONFIG = $(OPENCV_CONFIG))
-
-# The name of the native library
-LIBNAME = libandroid-opencv.so
-
-# Find all the C++ sources in the native folder
-SOURCES = $(wildcard jni/*.cpp)
-HEADERS = $(wildcard jni/*.h)
-SWIG_IS = $(wildcard jni/*.i)
-
-ANDROID_MKS = $(wildcard jni/*.mk)
-
-SWIG_MAIN = jni/android-cv.i
-
-SWIG_JAVA_DIR = src/com/opencv/jni
-SWIG_JAVA_OUT = $(wildcard $(SWIG_JAVA_DIR)/*.java)
-
-SWIG_C_DIR = jni/gen
-SWIG_C_OUT = $(SWIG_C_DIR)/android_cv_wrap.cpp
-
-# The real native library stripped of symbols
-LIB            = libs/armeabi-v7a/$(LIBNAME) libs/armeabi/$(LIBNAME)
-
-
-all:   $(LIB) nogdb
-
-
-#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
-$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
-       $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
-       PROJECT_PATH=$(PROJECT_PATH) ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
-
-
-#this creates the swig wrappers
-$(SWIG_C_OUT): $(SWIG_IS)
-       make clean-swig &&\
-       mkdir -p $(SWIG_C_DIR) &&\
-       mkdir -p $(SWIG_JAVA_DIR) &&\
-       swig -java -c++ -package "com.opencv.jni" \
-       -outdir $(SWIG_JAVA_DIR) \
-       -o $(SWIG_C_OUT) $(SWIG_MAIN)
-       
-       
-#clean targets
-.PHONY: clean  clean-swig cleanall nogdb
-
-nogdb: $(LIB)
-       rm -f libs/armeabi*/gdb*
-
-#this deletes the generated swig java and the generated c wrapper
-clean-swig:
-       rm -f $(SWIG_JAVA_OUT) $(SWIG_C_OUT)
-       
-#does clean-swig and then uses the ndk-build clean
-clean: clean-swig
-       $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
-       PROJECT_PATH=$(PROJECT_PATH) clean  ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
-       
diff --git a/android/android-jni/README.txt b/android/android-jni/README.txt
deleted file mode 100644 (file)
index 36eb32a..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-android-jni
-
-this is an example of an android library project that has some reusable
-code that exposes part of OpenCV to android. In particular this provides a
-native camera interface for loading live video frames from the android camera
-into native opencv functions(as cv::Mat's)
-
-pre-reqs:
-* build the opencv/android libraries - up one directory
-* you need swig in you path for android-jni
-    on ubuntu - sudo apt-get install swig
-    others: http://www.swig.org/
-   
-to build:
-
-make
-
-that should work...  If it doesn't make sure to edit the generated local.env.mk
-to reflect your machine's setup
-
-see the sample for how to use this in your own projects
-
-If you only support armeabi-v7a or armeabi your final apks will be much smaller.
-
-To build the class files, either start a new Android project from existing sources
-in eclipse
-or from the commmand line:
-sh project_create.sh
-ant debug
-
-This should be linked to in your android projects, if you would like to reuse the
-code. See Calibration or CVCamera in the opencv/android/apps directory
-
-With cdt installed in eclipse, you may also "convert to C++ project" once you have
-opened this as an android project. Select makefile project->toolchain other to do this.
-
-Eclipse tip of the day:
-You may get build warnings when linking to the project, complainging about duplicate something
-or other in you .svn directories.  Right click project->settings->java build path->source->excude paths->add
-.svn/ and **/.svn/ should do it ;)
-    
diff --git a/android/android-jni/default.properties b/android/android-jni/default.properties
deleted file mode 100644 (file)
index b586c76..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-# 
-# This file must be checked in Version Control Systems.
-# 
-# To customize properties used by the Ant build system use,
-# "build.properties", and override values to adapt the script to your
-# project structure.
-
-android.library=true
-# Project target.
-target=android-7
diff --git a/android/android-jni/jni/Android.mk b/android/android-jni/jni/Android.mk
deleted file mode 100644 (file)
index 8f38719..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-# date: Summer, 2010 
-# author: Ethan Rublee
-# contact: ethan.rublee@gmail.com
-#
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-#define OPENCV_INCLUDES and OPENCV_LIBS
-include $(OPENCV_CONFIG)
-
-LOCAL_LDLIBS += $(OPENCV_LIBS) -llog -lGLESv2
-    
-LOCAL_C_INCLUDES +=  $(OPENCV_INCLUDES) 
-
-LOCAL_MODULE    := android-opencv
-
-LOCAL_SRC_FILES := gen/android_cv_wrap.cpp image_pool.cpp \
-    gl_code.cpp Calibration.cpp
-    
-
-#ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
-#    LOCAL_CFLAGS := -DHAVE_NEON=1
-#    LOCAL_SRC_FILES += yuv2rgb_neon.c.neon
-#else
-       LOCAL_SRC_FILES +=  yuv420sp2rgb.c
-#endif
-    
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/android/android-jni/jni/Application.mk b/android/android-jni/jni/Application.mk
deleted file mode 100644 (file)
index f23b245..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_ABI := $(ARM_TARGETS)
-APP_MODULES      := android-opencv
diff --git a/android/android-jni/jni/Calibration.cpp b/android/android-jni/jni/Calibration.cpp
deleted file mode 100644 (file)
index 9ba8fa8..0000000
+++ /dev/null
@@ -1,245 +0,0 @@
-/*
- * Processor.cpp
- *
- *  Created on: Jun 13, 2010
- *      Author: ethan
- */
-
-#include "Calibration.h"
-
-#include <sys/stat.h>
-
-using namespace cv;
-
-Calibration::Calibration() :
-  patternsize(6, 8)
-{
-
-}
-
-Calibration::~Calibration()
-{
-
-}
-
-namespace
-{
-double computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
-                                 const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs, const vector<
-                                     Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs,
-                                 vector<float>& perViewErrors)
-{
-  vector<Point2f> imagePoints2;
-  int i, totalPoints = 0;
-  double totalErr = 0, err;
-  perViewErrors.resize(objectPoints.size());
-
-  for (i = 0; i < (int)objectPoints.size(); i++)
-  {
-    projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
-    err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1);
-    int n = (int)objectPoints[i].size();
-    perViewErrors[i] = err / n;
-    totalErr += err;
-    totalPoints += n;
-  }
-
-  return totalErr / totalPoints;
-}
-
-void calcChessboardCorners(Size boardSize, float squareSize, vector<Point3f>& corners)
-{
-  corners.resize(0);
-
-  for (int i = 0; i < boardSize.height; i++)
-    for (int j = 0; j < boardSize.width; j++)
-      corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0));
-}
-
-/**from opencv/samples/cpp/calibration.cpp
- *
- */
-bool runCalibration(vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, float squareSize,
-                    float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
-                    vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr)
-{
-  cameraMatrix = Mat::eye(3, 3, CV_64F);
-  if (flags & CV_CALIB_FIX_ASPECT_RATIO)
-    cameraMatrix.at<double> (0, 0) = aspectRatio;
-
-  distCoeffs = Mat::zeros(4, 1, CV_64F);
-
-  vector<vector<Point3f> > objectPoints(1);
-  calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
-  for (size_t i = 1; i < imagePoints.size(); i++)
-    objectPoints.push_back(objectPoints[0]);
-
-  calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
-
-  bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET);
-
-  totalAvgErr
-      = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
-
-  return ok;
-}
-void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio,
-                      int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector<Mat>& rvecs,
-                      const vector<Mat>& tvecs, const vector<float>& reprojErrs,
-                      const vector<vector<Point2f> >& imagePoints, double totalAvgErr)
-{
-  FileStorage fs(filename, FileStorage::WRITE);
-
-  time_t t;
-  time(&t);
-  struct tm *t2 = localtime(&t);
-  char buf[1024];
-  strftime(buf, sizeof(buf) - 1, "%c", t2);
-
-  fs << "calibration_time" << buf;
-
-  if (!rvecs.empty() || !reprojErrs.empty())
-    fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
-  fs << "image_width" << imageSize.width;
-  fs << "image_height" << imageSize.height;
-  fs << "board_width" << boardSize.width;
-  fs << "board_height" << boardSize.height;
-  fs << "squareSize" << squareSize;
-
-  if (flags & CV_CALIB_FIX_ASPECT_RATIO)
-    fs << "aspectRatio" << aspectRatio;
-
-  if (flags != 0)
-  {
-    sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags
-        & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT
-        ? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
-    cvWriteComment(*fs, buf, 0);
-  }
-
-  fs << "flags" << flags;
-
-  fs << "camera_matrix" << cameraMatrix;
-  fs << "distortion_coefficients" << distCoeffs;
-
-  fs << "avg_reprojection_error" << totalAvgErr;
-  if (!reprojErrs.empty())
-    fs << "per_view_reprojection_errors" << Mat(reprojErrs);
-
-  if (!rvecs.empty() && !tvecs.empty())
-  {
-    Mat bigmat(rvecs.size(), 6, CV_32F);
-    for (size_t i = 0; i < rvecs.size(); i++)
-    {
-      Mat r = bigmat(Range(i, i + 1), Range(0, 3));
-      Mat t = bigmat(Range(i, i + 1), Range(3, 6));
-      rvecs[i].copyTo(r);
-      tvecs[i].copyTo(t);
-    }
-    cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
-    fs << "extrinsic_parameters" << bigmat;
-  }
-
-  if (!imagePoints.empty())
-  {
-    Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
-    for (size_t i = 0; i < imagePoints.size(); i++)
-    {
-      Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
-      Mat(imagePoints[i]).copyTo(r);
-    }
-    fs << "image_points" << imagePtMat;
-  }
-}
-}//anon namespace
-bool Calibration::detectAndDrawChessboard(int idx, image_pool* pool)
-{
-
-  bool patternfound = false;
-  Mat grey = pool->getGrey(idx);
-  if (grey.empty())
-    return false;
-  vector<Point2f> corners;
-
-  patternfound = findChessboardCorners(grey, patternsize, corners,CALIB_CB_FILTER_QUADS + CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
-                                                + CALIB_CB_FAST_CHECK);
-  Mat img = pool->getImage(idx);
-
-  if (corners.size() < 1)
-    return false;
-
-  if (patternfound)
-  {
-    cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
-    imagepoints.push_back(corners);
-  }
-
-  drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
-
-  imgsize = grey.size();
-
-  return patternfound;
-
-}
-
-void Calibration::drawText(int i, image_pool* pool, const char* ctext)
-{
-  // Use "y" to show that the baseLine is about
-  string text = ctext;
-  int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
-  double fontScale = .8;
-  int thickness = .5;
-
-  Mat img = pool->getImage(i);
-
-  int baseline = 0;
-  Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
-  baseline += thickness;
-
-  // center the text
-  Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2));
-
-  // draw the box
-  rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255),
-            CV_FILLED);
-  // ... and the baseline first
-  line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255));
-
-  // then put the text itself
-  putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8);
-}
-
-void Calibration::resetChess()
-{
-
-  imagepoints.clear();
-}
-
-void Calibration::calibrate(const char* filename)
-{
-
-  vector<Mat> rvecs, tvecs;
-  vector<float> reprojErrs;
-  double totalAvgErr = 0;
-  int flags = 0;
-  flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
-  bool writeExtrinsics = true;
-  bool writePoints = true;
-
-  bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs,
-                           totalAvgErr);
-
-  if (ok)
-  {
-
-    saveCameraParams(filename, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector<
-        Mat> (), writeExtrinsics ? tvecs : vector<Mat> (), writeExtrinsics ? reprojErrs : vector<float> (), writePoints
-        ? imagepoints : vector<vector<Point2f> > (), totalAvgErr);
-  }
-
-}
-
-int Calibration::getNumberDetectedChessboards()
-{
-  return imagepoints.size();
-}
diff --git a/android/android-jni/jni/Calibration.h b/android/android-jni/jni/Calibration.h
deleted file mode 100644 (file)
index 6e0eef3..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Processor.h
- *
- *  Created on: Jun 13, 2010
- *      Author: ethan
- */
-
-#ifndef PROCESSOR_H_
-#define PROCESSOR_H_
-
-#include <opencv2/core/core.hpp>
-#include <opencv2/features2d/features2d.hpp>
-#include <opencv2/highgui/highgui.hpp>
-#include <opencv2/imgproc/imgproc.hpp>
-#include <opencv2/calib3d/calib3d.hpp>
-
-#include <vector>
-
-#include "image_pool.h"
-
-#define DETECT_FAST 0
-#define DETECT_STAR 1
-#define DETECT_SURF 2
-
-class Calibration
-{
-public:
-
-  Calibration();
-  virtual ~Calibration();
-
-  bool detectAndDrawChessboard(int idx, image_pool* pool);
-
-  void resetChess();
-
-  int getNumberDetectedChessboards();
-
-  void calibrate(const char* filename);
-
-  void drawText(int idx, image_pool* pool, const char* text);
-
-  cv::Size patternsize;
-private:
-  std::vector<cv::KeyPoint> keypoints;
-
-  std::vector<std::vector<cv::Point2f> > imagepoints;
-
-  cv::Mat K;
-  cv::Mat distortion;
-  cv::Size imgsize;
-
-};
-
-#endif /* PROCESSOR_H_ */
diff --git a/android/android-jni/jni/Calibration.i b/android/android-jni/jni/Calibration.i
deleted file mode 100644 (file)
index ba6154b..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * include the headers required by the generated cpp code
- */
-%{
-#include "Calibration.h"
-#include "image_pool.h"
-using namespace cv;
-%}
-
-
-class Calibration {
-public:
-
-       Size patternsize;
-       
-       Calibration();
-       virtual ~Calibration();
-
-       bool detectAndDrawChessboard(int idx, image_pool* pool);
-       
-       void resetChess();
-       
-       int getNumberDetectedChessboards();
-       
-       void calibrate(const char* filename);
-       
-       void drawText(int idx, image_pool* pool, const char* text);
-};
diff --git a/android/android-jni/jni/android-cv-typemaps.i b/android/android-jni/jni/android-cv-typemaps.i
deleted file mode 100644 (file)
index cae2d6b..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-%feature("director") Mat;
-%feature("director") glcamera;
-%feature("director") image_pool;
-%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
-%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
-%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";
\ No newline at end of file
diff --git a/android/android-jni/jni/android-cv.i b/android/android-jni/jni/android-cv.i
deleted file mode 100644 (file)
index 54eba06..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-/* File : android-cv.i
-
-import this file, and make sure to add the System.loadlibrary("android-opencv")
-before loading any lib that depends on this.
- */
-
-%module opencv
-%{
-#include "image_pool.h"
-#include "glcamera.h"
-
-using namespace cv;
-%}
-#ifndef SWIGIMPORTED
-%include "various.i"
-%include "typemaps.i"
-%include "arrays_java.i"
-#endif
-
-/**
- * Make all the swig pointers public, so that
- * external libraries can refer to these, otherwise they default to 
- * protected...
- */
-%typemap(javabody) SWIGTYPE %{
-  private long swigCPtr;
-  protected boolean swigCMemOwn;
-  public $javaclassname(long cPtr, boolean cMemoryOwn) {
-       swigCMemOwn = cMemoryOwn;
-       swigCPtr = cPtr;
-  }
-  public static long getCPtr($javaclassname obj) {
-       return (obj == null) ? 0 : obj.swigCPtr;
-  }
-%}
-
-
-%pragma(java) jniclasscode=%{
-  static {
-    try {
-       //load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
-       //so that android sdk automatically installs it along with the app.
-        System.loadLibrary("android-opencv");
-    } catch (UnsatisfiedLinkError e) {
-       //badness
-       throw e;
-     
-    }
-  }
-%}
-
-
-%include "cv.i"
-
-%include "glcamera.i"
-
-%include "image_pool.i"
-
-%include "Calibration.i"
diff --git a/android/android-jni/jni/buffers.i b/android/android-jni/jni/buffers.i
deleted file mode 100644 (file)
index d601120..0000000
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * These typemaps provide support for sharing data between JNI and JVM code
- * using NIO direct buffers. It is the responsibility of the JVM code to
- * allocate a direct buffer of the appropriate size.
- *
- * Example use:
-
- * Wrapping:
- * %include "buffers.i" 
- * %apply int* BUFF {int* buffer}
- * int read_foo_int(int* buffer);
- *
- * Java:
- * IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
- * Example.read_foo_int(buffer);
- *
-
- * The following typemaps are defined:
- * void* BUFF           <--> javax.nio.Buffer
- * char* BUFF           <--> javax.nio.ByteBuffer
- * char* CBUFF          <--> javax.nio.CharBuffer
- * unsigned char* INBUFF/OUTBUFF  <--> javax.nio.ShortBuffer
- * short* BUFF          <--> javax.nio.ShortBuffer 
- * unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
- * int* BUFF            <--> javax.nio.IntBuffer
- * unsigned int* INBUFF/OUTBUFF   <--> javax.nio.LongBuffer
- * long* BUFF           <--> javax.nio.IntBuffer
- * unsigned long* INBUFF/OUTBUF  <--> javax.nio.LongBuffer
- * long long* BUFF      <--> javax.nio.LongBuffer
- * float* BUFF          <--> javax.nio.FloatBuffer
- * double* BUFF         <--> javax.nio.DoubleBuffer
- *
- * Note the potential for data loss in the conversion from 
- * the C type 'unsigned long' to the signed Java long type.
- * Hopefully, I can implement a workaround with BigNumber in the future.
- *
- * The use of ByteBuffer vs CharBuffer for the char* type should
- * depend on the type of data. In general you'll probably
- * want to use CharBuffer for actual text data.
- */
-/*
- * This macro is used to define the nio buffers for primitive types.
- */
-%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
-%typemap(jni) CTYPE* LABEL "jobject"
-%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
-%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
-%typemap(javain, 
-       pre="    assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
-%typemap(javaout) CTYPE* LABEL {
-    return $jnicall;
-}
-%typemap(in) CTYPE* LABEL {
-  $1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
-  if ($1 == NULL) {
-    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
-  }
-}
-%typemap(memberin) CTYPE* LABEL {
-  if ($input) {
-    $1 = $input;
-  } else {
-    $1 = 0;
-  }
-}
-%typemap(freearg) CTYPE* LABEL ""
-%enddef
-
-NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
-NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
-NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
-/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
-NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
-NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
-NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
-NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
-NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
-NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
-NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
-NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
-NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
-#undef NIO_BUFFER_TYPEMAP
-
-
-%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
-%typemap(jni) CTYPE* INBUFF "jobject"
-%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
-%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
-%typemap(javain, 
-       pre="    java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
-%typemap(javaout) CTYPE* INBUFF {
-    return $jnicall;
-}
-%typemap(in) CTYPE* INBUFF {
-  $1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
-  if ($1 == NULL) {
-    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
-  }
-}
-%typemap(memberin) CTYPE* INBUFF {
-  if ($input) {
-    $1 = $input;
-  } else {
-    $1 = 0;
-  }
-}
-%typemap(freearg) CTYPE* INBUFF ""
-
-%typemap(jni) CTYPE* OUTBUFF "jobject"
-%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
-%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
-%typemap(javain, 
-       pre="    java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
-        post="       UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
-%typemap(javaout) CTYPE* OUTBUFF {
-    return $jnicall;
-}
-%typemap(in) CTYPE* OUTBUFF {
-  $1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
-  if ($1 == NULL) {
-    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
-  }
-}
-%typemap(memberin) CTYPE* OUTBUFF {
-  if ($input) {
-    $1 = $input;
-  } else {
-    $1 = 0;
-  }
-}
-%typemap(freearg) CTYPE* OUTBUFF ""
-%enddef
-
-UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
-UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
-UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
-UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
-
-/*
-%typemap(jni) unsigned char* BUFF "jobject"
-%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
-%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
-%typemap(javain, 
-       pre="    java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
-        post="      permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
-%typemap(javaout) unsigned char* BUFF {
-    return $jnicall;
-}
-%typemap(in) unsigned char* BUFF {
-  $1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
-  if ($1 == NULL) {
-    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
-  }
-}
-%typemap(memberin) unsigned char* BUFF {
-  if ($input) {
-    $1 = $input;
-  } else {
-    $1 = 0;
-  }
-}
-%typemap(freearg) unsigned char* BUFF ""
-*/
-
-#undef UNSIGNED_NIO_BUFFER_TYPEMAP
\ No newline at end of file
diff --git a/android/android-jni/jni/cv.i b/android/android-jni/jni/cv.i
deleted file mode 100644 (file)
index 3c47d0a..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-%typemap(javaimports) Mat "
-/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
-*/"
-
-%typemap(javaimports) Size "
-/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
-*/"
-
-class Mat {
-public:
- %immutable;
-       int rows;
-       int cols;
-};
-
-class Size{
-public:
-       Size(int width,int height);
-       int width;
-       int height;
-       
-};
-
-template<class _Tp> class Ptr
-{
-public:
-    //! empty constructor
-    Ptr();
-    //! take ownership of the pointer. The associated reference counter is allocated and set to 1
-    Ptr(_Tp* _obj);
-    //! calls release()
-    ~Ptr();
-    //! copy constructor. Copies the members and calls addref()
-    Ptr(const Ptr& ptr);
-    //! copy operator. Calls ptr.addref() and release() before copying the members
-   // Ptr& operator = (const Ptr& ptr);
-    //! increments the reference counter
-    void addref();
-    //! decrements the reference counter. If it reaches 0, delete_obj() is called
-    void release();
-    //! deletes the object. Override if needed
-    void delete_obj();
-    //! returns true iff obj==NULL
-    bool empty() const;
-
-    
-    //! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
-    _Tp* operator -> ();
-   // const _Tp* operator -> () const;
-
-   // operator _Tp* ();
-  //  operator const _Tp*() const;
-    
-protected:
-    _Tp* obj; //< the object pointer.
-    int* refcount; //< the associated reference counter
-};
-
-%template(PtrMat) Ptr<Mat>;
\ No newline at end of file
diff --git a/android/android-jni/jni/gl_code.cpp b/android/android-jni/jni/gl_code.cpp
deleted file mode 100644 (file)
index 4512b9d..0000000
+++ /dev/null
@@ -1,322 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// OpenGL ES 2.0 code
-
-#include <jni.h>
-
-#include <android/log.h>
-#include <opencv2/core/core.hpp>
-#include <opencv2/imgproc/imgproc.hpp>
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <math.h>
-#include <stdint.h>
-
-#include "glcamera.h"
-#include "image_pool.h"
-using namespace cv;
-#define  LOG_TAG    "libandroid-opencv"
-#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
-#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
-
-static void printGLString(const char *name, GLenum s)
-{
-  const char *v = (const char *)glGetString(s);
-  LOGI("GL %s = %s\n", name, v);
-}
-
-static void checkGlError(const char* op)
-{
-  for (GLint error = glGetError(); error; error = glGetError())
-  {
-    LOGI("after %s() glError (0x%x)\n", op, error);
-  }
-}
-
-static const char gVertexShader[] = "attribute vec4 a_position;   \n"
-  "attribute vec2 a_texCoord;   \n"
-  "varying vec2 v_texCoord;     \n"
-  "void main()                  \n"
-  "{                            \n"
-  "   gl_Position = a_position; \n"
-  "   v_texCoord = a_texCoord;  \n"
-  "}                            \n";
-
-static const char gFragmentShader[] = "precision mediump float;                            \n"
-  "varying vec2 v_texCoord;                            \n"
-  "uniform sampler2D s_texture;                        \n"
-  "void main()                                         \n"
-  "{                                                   \n"
-  "  gl_FragColor = texture2D( s_texture, v_texCoord );\n"
-  "}                                                   \n";
-
-const GLfloat gTriangleVertices[] = {0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f};
-GLubyte testpixels[4 * 3] = {255, 0, 0, // Red
-                             0, 255, 0, // Green
-                             0, 0, 255, // Blue
-                             255, 255, 0 // Yellow
-    };
-
-GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels)
-{
-
-  // Bind the texture
-  glActiveTexture( GL_TEXTURE0);
-  checkGlError("glActiveTexture");
-  // Bind the texture object
-  glBindTexture(GL_TEXTURE_2D, _textureid);
-  checkGlError("glBindTexture");
-
-  GLenum format;
-  switch (channels)
-  {
-    case 3:
-      format = GL_RGB;
-      break;
-    case 1:
-      format = GL_LUMINANCE;
-      break;
-    case 4:
-      format = GL_RGBA;
-      break;
-  }
-  // Load the texture
-  glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels);
-
-  checkGlError("glTexImage2D");
-  // Set the filtering mode
-  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
-  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
-
-  return _textureid;
-
-}
-
-GLuint glcamera::loadShader(GLenum shaderType, const char* pSource)
-{
-  GLuint shader = glCreateShader(shaderType);
-  if (shader)
-  {
-    glShaderSource(shader, 1, &pSource, NULL);
-    glCompileShader(shader);
-    GLint compiled = 0;
-    glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
-    if (!compiled)
-    {
-      GLint infoLen = 0;
-      glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
-      if (infoLen)
-      {
-        char* buf = (char*)malloc(infoLen);
-        if (buf)
-        {
-          glGetShaderInfoLog(shader, infoLen, NULL, buf);
-          LOGE("Could not compile shader %d:\n%s\n",
-              shaderType, buf);
-          free(buf);
-        }
-        glDeleteShader(shader);
-        shader = 0;
-      }
-    }
-  }
-  return shader;
-}
-
-GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource)
-{
-  GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
-  if (!vertexShader)
-  {
-    return 0;
-  }
-
-  GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
-  if (!pixelShader)
-  {
-    return 0;
-  }
-
-  GLuint program = glCreateProgram();
-  if (program)
-  {
-    glAttachShader(program, vertexShader);
-    checkGlError("glAttachShader");
-    glAttachShader(program, pixelShader);
-    checkGlError("glAttachShader");
-    glLinkProgram(program);
-    GLint linkStatus = GL_FALSE;
-    glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
-    if (linkStatus != GL_TRUE)
-    {
-      GLint bufLength = 0;
-      glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
-      if (bufLength)
-      {
-        char* buf = (char*)malloc(bufLength);
-        if (buf)
-        {
-          glGetProgramInfoLog(program, bufLength, NULL, buf);
-          LOGE("Could not link program:\n%s\n", buf);
-          free(buf);
-        }
-      }
-      glDeleteProgram(program);
-      program = 0;
-    }
-  }
-  return program;
-}
-
-//GLuint textureID;
-
-bool glcamera::setupGraphics(int w, int h)
-{
-  printGLString("Version", GL_VERSION);
-  printGLString("Vendor", GL_VENDOR);
-  printGLString("Renderer", GL_RENDERER);
-  printGLString("Extensions", GL_EXTENSIONS);
-
-  LOGI("setupGraphics(%d, %d)", w, h);
-  gProgram = createProgram(gVertexShader, gFragmentShader);
-  if (!gProgram)
-  {
-    LOGE("Could not create program.");
-    return false;
-  }
-  gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
-  gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
-
-  gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
-
-  // Use tightly packed data
-  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
-
-  // Generate a texture object
-  glGenTextures(1, &textureID);
-  textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3);
-
-  checkGlError("glGetAttribLocation");
-  LOGI("glGetAttribLocation(\"vPosition\") = %d\n",
-      gvPositionHandle);
-
-  glViewport(0, 0, w, h);
-  checkGlError("glViewport");
-  return true;
-}
-
-void glcamera::renderFrame()
-{
-
-  GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0
-                         0.0f, 0.0f, // TexCoord 0
-                         -1.0f, -1.0f, 0.0f, // Position 1
-                         0.0f, 1.0f, // TexCoord 1
-                         1.0f, -1.0f, 0.0f, // Position 2
-                         1.0f, 1.0f, // TexCoord 2
-                         1.0f, 1.0f, 0.0f, // Position 3
-                         1.0f, 0.0f // TexCoord 3
-      };
-  GLushort indices[] = {0, 1, 2, 0, 2, 3};
-  GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
-
-  glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
-  checkGlError("glClearColor");
-
-  glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
-  checkGlError("glClear");
-
-  glUseProgram(gProgram);
-  checkGlError("glUseProgram");
-
-  // Load the vertex position
-  glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices);
-  // Load the texture coordinate
-  glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]);
-
-  glEnableVertexAttribArray(gvPositionHandle);
-  glEnableVertexAttribArray(gvTexCoordHandle);
-
-  // Bind the texture
-  glActiveTexture( GL_TEXTURE0);
-  glBindTexture(GL_TEXTURE_2D, textureID);
-
-  // Set the sampler texture unit to 0
-  glUniform1i(gvSamplerHandle, 0);
-
-  glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
-
-  //checkGlError("glVertexAttribPointer");
-  //glEnableVertexAttribArray(gvPositionHandle);
-  //checkGlError("glEnableVertexAttribArray");
-  //glDrawArrays(GL_TRIANGLES, 0, 3);
-  //checkGlError("glDrawArrays");
-}
-
-void glcamera::init(int width, int height)
-{
-  newimage = false;
-  nimg = Mat();
-  setupGraphics(width, height);
-
-}
-
-void glcamera::step()
-{
-  if (newimage && !nimg.empty())
-  {
-
-    textureID = createSimpleTexture2D(textureID, nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols, nimg.channels());
-    newimage = false;
-  }
-  renderFrame();
-
-}
-#define NEAREST_POW2(x)((int)(0.5 + std::log(x)/0.69315) )
-void glcamera::setTextureImage(const Mat& img)
-{
-  Size size(256, 256);
-  resize(img, nimg, size, cv::INTER_NEAREST);
-  newimage = true;
-}
-
-void glcamera::drawMatToGL(int idx, image_pool* pool)
-{
-
-  Mat img = pool->getImage(idx);
-
-  if (img.empty())
-    return; //no image at input_idx!
-
-  setTextureImage(img);
-
-}
-
-glcamera::glcamera() :
-  newimage(false)
-{
-  LOGI("glcamera constructor");
-}
-glcamera::~glcamera()
-{
-  LOGI("glcamera destructor");
-}
-
diff --git a/android/android-jni/jni/glcamera.h b/android/android-jni/jni/glcamera.h
deleted file mode 100644 (file)
index 923fc53..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-#ifndef GLCAMERA_H_
-#define GLCAMERA_H_
-#include <opencv2/core/core.hpp>
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-#include "image_pool.h"
-
-class glcamera
-{
-public:
-
-  glcamera();
-  ~glcamera();
-  void init(int width, int height);
-  void step();
-
-  void drawMatToGL(int idx, image_pool* pool);
-  void setTextureImage(const cv::Mat& img);
-
-private:
-  GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
-  GLuint loadShader(GLenum shaderType, const char* pSource);
-  GLuint
-  createProgram(const char* pVertexSource, const char* pFragmentSource);
-  bool setupGraphics(int w, int h);
-  void renderFrame();
-  cv::Mat nimg;
-  bool newimage;
-  GLuint textureID;
-
-  GLuint gProgram;
-  GLuint gvPositionHandle;
-
-  GLuint gvTexCoordHandle;
-  GLuint gvSamplerHandle;
-};
-#endif
diff --git a/android/android-jni/jni/glcamera.i b/android/android-jni/jni/glcamera.i
deleted file mode 100644 (file)
index 0a4a059..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-
-%typemap(javaimports) glcamera "
-/** a class for doing the native rendering of images
-this class renders using GL2 es, the native ndk version
-This class is used by the GL2CameraViewer to do the rendering,
-and is inspired by the gl2 example in the ndk samples
-*/"
-
-
-
-%javamethodmodifiers glcamera::init"
-  /**  should be called onSurfaceChanged by the GLSurfaceView that is using this
-       *  as the drawing engine
-       * @param width the width of the surface view that this will be drawing to
-    * @param width the height of the surface view that this will be drawing to
-       *
-    */
-  public";
-  
-%javamethodmodifiers glcamera::step"
-  /**  should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
-  handles the rendering of the opengl scene, and requires that the opengl context be
-  valid.
-       *
-    */
-  public";
-%javamethodmodifiers glcamera::drawMatToGL"
-  /** copies an image from a pool and queues it for drawing in opengl.
-       *  this does transformation into power of two texture sizes
-       * @param idx the image index to copy
-    * @param pool the image_pool to look up the image from
-       *
-    */
-  public";
-  
-class glcamera {
-public:
-     void init(int width, int height);
-     void step();
-     void drawMatToGL(int idx, image_pool* pool);
-};
-
diff --git a/android/android-jni/jni/image_pool.cpp b/android/android-jni/jni/image_pool.cpp
deleted file mode 100644 (file)
index 4e70a28..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-#include "image_pool.h"
-
-#include "yuv420sp2rgb.h"
-
-#include <android/log.h>
-#include <opencv2/imgproc/imgproc.hpp>
-#include <jni.h>
-using namespace cv;
-
-#define  LOG_TAG    "libandroid-opencv"
-#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
-#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
-
-JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
-{
-  JNIEnv *env;
-  LOGI("JNI_OnLoad called for opencv");
-  return JNI_VERSION_1_4;
-}
-
-JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, jclass thiz, jlong ppool,
-                                                                  jobject _jpool, jbyteArray jbuffer, jint jidx,
-                                                                  jint jwidth, jint jheight, jboolean jgrey)
-{
-  int buff_height = jheight + (jheight / 2);
-  Size buff_size(jwidth, buff_height);
-  image_pool *pool = (image_pool *)ppool;
-
-  Mat mat = pool->getYUV(jidx);
-
-  if (mat.empty() || mat.size() != buff_size)
-  {
-    mat.create(buff_size, CV_8UC1);
-  }
-
-  jsize sz = env->GetArrayLength(jbuffer);
-  uchar* buff = mat.ptr<uchar> (0);
-
-  env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*)buff);
-
-  pool->addYUVMat(jidx, mat);
-
-  Mat color = pool->getImage(jidx);
-
-  if (!jgrey)
-  {
-
-    if (color.cols != jwidth || color.rows != jheight || color.channels() != 3)
-    {
-      color.create(jheight, jwidth, CV_8UC3);
-    }
-    //doesn't work unfortunately..
-    //TODO cvtColor(mat,color, CV_YCrCb2RGB);
-    color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight, color.ptr<uchar> (0), false);
-  }
-
-  if (jgrey)
-  {
-    Mat grey = pool->getGrey(jidx);
-    color = grey;
-  }
-
-  pool->addImage(jidx, color);
-
-}
-
-image_pool::image_pool()
-{
-
-}
-
-image_pool::~image_pool()
-{
-  __android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
-}
-
-Mat image_pool::getImage(int i)
-{
-  return imagesmap[i];
-}
-Mat image_pool::getGrey(int i)
-{
-  Mat tm = yuvImagesMap[i];
-  if (tm.empty())
-    return tm;
-  return tm(Range(0, tm.rows * (2.0f / 3)), Range::all());
-}
-Mat image_pool::getYUV(int i)
-{
-  return yuvImagesMap[i];
-}
-void image_pool::addYUVMat(int i, Mat mat)
-{
-  yuvImagesMap[i] = mat;
-}
-void image_pool::addImage(int i, Mat mat)
-{
-  imagesmap[i] = mat;
-}
-
-void image_pool::convertYUVtoColor(int i, cv::Mat& out)
-{
-
-  Mat yuv = getYUV(i);
-
-  if (yuv.empty())
-    return;
-  int width = yuv.cols;
-  int height = yuv.rows * (2.0f / 3);
-  out.create(height, width, CV_8UC3);
-  const unsigned char* buff = yuv.ptr<unsigned char> (0);
-  unsigned char* out_buff = out.ptr<unsigned char> (0);
-  //doesn't work unfortunately..
-  //TODO cvtColor(mat,color, CV_YCrCb2RGB);
-  color_convert_common(buff, buff + width * height, width, height, out_buff, false);
-}
diff --git a/android/android-jni/jni/image_pool.h b/android/android-jni/jni/image_pool.h
deleted file mode 100644 (file)
index 8ce13cb..0000000
+++ /dev/null
@@ -1,66 +0,0 @@
-#ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
-#define IMAGE_POOL_H_ANDROID_KDJFKJ
-#include <opencv2/core/core.hpp>
-#include <map>
-
-#if ANDROID
-#include <jni.h>
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
-//
-//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
-//             JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
-
-
-JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
-                                                                  jint, jint, jboolean);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
-class image_pool
-{
-
-public:
-  image_pool();
-  ~image_pool();
-  cv::Mat getImage(int i);
-  cv::Mat getGrey(int i);
-  cv::Mat getYUV(int i);
-
-  int getCount()
-  {
-    return imagesmap.size();
-  }
-
-  /** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
-   *
-   */
-  void addImage(int i, cv::Mat mat);
-
-  /** this function stores the given matrix in the the yuvImagesMap. Also,
-   * after this call getGrey will work, as the grey image is just the top
-   * half of the YUV mat.
-   *
-   * \param i index to store yuv image at
-   * \param mat the yuv matrix to store
-   */
-  void addYUVMat(int i, cv::Mat mat);
-
-  void convertYUVtoColor(int i, cv::Mat& out);
-
-  //   int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
-  //
-  //   void getBitmap(int * outintarray, int size, int idx);
-private:
-  std::map<int, cv::Mat> imagesmap;
-  std::map<int, cv::Mat> yuvImagesMap;
-
-};
-#endif
diff --git a/android/android-jni/jni/image_pool.i b/android/android-jni/jni/image_pool.i
deleted file mode 100644 (file)
index c1b3c6d..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-
-
-%typemap(javaimports) image_pool "
-/** image_pool is used for keeping track of a pool of native images.  It stores images as cv::Mat's and
-references them by an index.  It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
-
-
-%javamethodmodifiers image_pool::getImage"
-  /** gets a pointer to a stored image, by an index.  If the index is new, returns a null pointer
-       * @param idx the index in the pool that is associated with a cv::Mat
-       * @return the pointer to a cv::Mat, null pointer if the given idx is novel
-    */
-  public";
-  
-  
-%javamethodmodifiers image_pool::deleteImage"
-  /** deletes the image from the pool
-       * @param idx the index in the pool that is associated with a cv::Mat
-    */
-  public";
-  
-  
-  
-%javamethodmodifiers addYUVtoPool"
-  /** adds a yuv
-       * @param idx the index in the pool that is associated with a cv::Mat
-    */
-  public";
-  
-%include "various.i"
-
-
-%apply (char* BYTE) { (char *data)}; //byte[] to char*
-
-
-%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
-
-
-
-
-%feature("director") image_pool;
-class image_pool {
-       
-public:
-       image_pool();
-       ~image_pool();
-       
-       
-       Mat getImage(int i);
-       void addImage(int i, Mat mat);
-
-       
-
-
-};
-
diff --git a/android/android-jni/jni/nocopy.i b/android/android-jni/jni/nocopy.i
deleted file mode 100644 (file)
index 23e685f..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-/* 
- * int *INTARRAY  typemaps. 
- * These are input typemaps for mapping a Java int[] array to a C int array.
- * Note that as a Java array is used and thus passeed by reference, the C routine 
- * can return data to Java via the parameter.
- *
- * Example usage wrapping:
- *   void foo((int *INTARRAY, int INTARRAYSIZE);
- *  
- * Java usage:
- *   byte b[] = new byte[20];
- *   modulename.foo(b);
- */
-
-%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
-    $1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0); 
-    jsize sz = JCALL1(GetArrayLength, jenv, $input);
-    $2 = (int)sz;
-}
-
-%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
-    JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0); 
-}
-
-
-/* Prevent default freearg typemap from being used */
-%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
-
-%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
-%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
-%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
-%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"
-
-
-
-
diff --git a/android/android-jni/jni/yuv2rgb_neon.c b/android/android-jni/jni/yuv2rgb_neon.c
deleted file mode 100644 (file)
index a845858..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-#include "yuv420sp2rgb.h"
-#include <arm_neon.h>
-#include <stdlib.h>
-
-/* this source file should only be compiled by Android.mk when targeting
- * the armeabi-v7a ABI, and should be built in NEON mode
- */
-void fir_filter_neon_intrinsics(short *output, const short* input, const short* kernel, int width, int kernelSize)
-{
-#if 1
-  int nn, offset = -kernelSize / 2;
-
-  for (nn = 0; nn < width; nn++)
-  {
-    int mm, sum = 0;
-    int32x4_t sum_vec = vdupq_n_s32(0);
-    for (mm = 0; mm < kernelSize / 4; mm++)
-    {
-      int16x4_t kernel_vec = vld1_s16(kernel + mm * 4);
-      int16x4_t input_vec = vld1_s16(input + (nn + offset + mm * 4));
-      sum_vec = vmlal_s16(sum_vec, kernel_vec, input_vec);
-    }
-
-    sum += vgetq_lane_s32(sum_vec, 0);
-    sum += vgetq_lane_s32(sum_vec, 1);
-    sum += vgetq_lane_s32(sum_vec, 2);
-    sum += vgetq_lane_s32(sum_vec, 3);
-
-    if (kernelSize & 3)
-    {
-      for (mm = kernelSize - (kernelSize & 3); mm < kernelSize; mm++)
-        sum += kernel[mm] * input[nn + offset + mm];
-    }
-
-    output[nn] = (short)((sum + 0x8000) >> 16);
-  }
-#else /* for comparison purposes only */
-  int nn, offset = -kernelSize/2;
-  for (nn = 0; nn < width; nn++)
-  {
-    int sum = 0;
-    int mm;
-    for (mm = 0; mm < kernelSize; mm++)
-    {
-      sum += kernel[mm]*input[nn+offset+mm];
-    }
-    output[n] = (short)((sum + 0x8000) >> 16);
-  }
-#endif
-}
-
-/*
- YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
- U/V plane containing 8 bit 2x2 subsampled chroma samples.
- except the interleave order of U and V is reversed.
-
- H V
- Y Sample Period      1 1
- U (Cb) Sample Period 2 2
- V (Cr) Sample Period 2 2
- */
-
-/*
- size of a char:
- find . -name limits.h -exec grep CHAR_BIT {} \;
- */
-
-#ifndef max
-#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
-#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
-#endif
-
-#define bytes_per_pixel 2
-#define LOAD_Y(i,j) (pY + i * width + j)
-#define LOAD_V(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2))
-#define LOAD_U(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2)+1)
-
-const uint8_t ZEROS[8] = {220,220, 220, 220, 220, 220, 220, 220};
-const uint8_t Y_SUBS[8] = {16, 16, 16, 16, 16, 16, 16, 16};
-const uint8_t UV_SUBS[8] = {128, 128, 128, 128, 128, 128, 128, 128};
-
-const uint32_t UV_MULS[] = {833, 400, 833, 400};
-
-void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
-{
-
-  int i, j;
-  int nR, nG, nB;
-  int nY, nU, nV;
-  unsigned char *out = buffer;
-  int offset = 0;
-
-  uint8x8_t Y_SUBvec = vld1_u8(Y_SUBS);
-  uint8x8_t UV_SUBvec = vld1_u8(UV_SUBS); // v,u,v,u v,u,v,u
-  uint32x4_t UV_MULSvec = vld1q_u32(UV_MULS);
-  uint8x8_t ZEROSvec =vld1_u8(ZEROS);
-
-  uint32_t UVvec_int[8];
-  if (grey)
-  {
-    memcpy(out, pY, width * height * sizeof(unsigned char));
-  }
-  else
-    // YUV 4:2:0
-    for (i = 0; i < height; i++)
-    {
-      for (j = 0; j < width; j += 8)
-      {
-        //        nY = *(pY + i * width + j);
-        //        nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
-        //        nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
-
-        uint8x8_t nYvec = vld1_u8(LOAD_Y(i,j));
-        uint8x8_t nUVvec = vld1_u8(LOAD_V(i,j)); // v,u,v,u v,u,v,u
-
-        nYvec = vmul_u8(nYvec, vcle_u8(nYvec,ZEROSvec));
-
-        // Yuv Convert
-        //        nY -= 16;
-        //        nU -= 128;
-        //        nV -= 128;
-
-        //        nYvec = vsub_u8(nYvec, Y_SUBvec);
-        //        nUVvec = vsub_u8(nYvec, UV_SUBvec);
-
-        uint16x8_t nYvec16 = vmovl_u8(vsub_u8(nYvec, Y_SUBvec));
-        uint16x8_t nUVvec16 = vmovl_u8(vsub_u8(nYvec, UV_SUBvec));
-
-        uint16x4_t Y_low4 = vget_low_u16(nYvec16);
-        uint16x4_t Y_high4 = vget_high_u16(nYvec16);
-        uint16x4_t UV_low4 = vget_low_u16(nUVvec16);
-        uint16x4_t UV_high4 = vget_high_u16(nUVvec16);
-
-        uint32x4_t UV_low4_int = vmovl_u16(UV_low4);
-        uint32x4_t UV_high4_int = vmovl_u16(UV_high4);
-
-        uint32x4_t Y_low4_int = vmull_n_u16(Y_low4, 1192);
-        uint32x4_t Y_high4_int = vmull_n_u16(Y_high4, 1192);
-
-        uint32x4x2_t UV_uzp = vuzpq_u32(UV_low4_int, UV_high4_int);
-
-        uint32x2_t Vl = vget_low_u32(UV_uzp.val[0]);// vld1_u32(UVvec_int);
-        uint32x2_t Vh = vget_high_u32(UV_uzp.val[0]);//vld1_u32(UVvec_int + 2);
-
-        uint32x2x2_t Vll_ = vzip_u32(Vl, Vl);
-        uint32x4_t* Vll = (uint32x4_t*)(&Vll_);
-
-        uint32x2x2_t Vhh_ = vzip_u32(Vh, Vh);
-        uint32x4_t* Vhh = (uint32x4_t*)(&Vhh_);
-
-        uint32x2_t Ul =  vget_low_u32(UV_uzp.val[1]);
-        uint32x2_t Uh =  vget_high_u32(UV_uzp.val[1]);
-
-        uint32x2x2_t Ull_ = vzip_u32(Ul, Ul);
-        uint32x4_t* Ull = (uint32x4_t*)(&Ull_);
-
-        uint32x2x2_t Uhh_ = vzip_u32(Uh, Uh);
-        uint32x4_t* Uhh = (uint32x4_t*)(&Uhh_);
-
-        uint32x4_t B_int_low = vmlaq_n_u32(Y_low4_int, *Ull, 2066); //multiply by scalar accum
-        uint32x4_t B_int_high = vmlaq_n_u32(Y_high4_int, *Uhh, 2066); //multiply by scalar accum
-        uint32x4_t G_int_low = vsubq_u32(Y_low4_int, vmlaq_n_u32(vmulq_n_u32(*Vll, 833), *Ull, 400));
-        uint32x4_t G_int_high = vsubq_u32(Y_high4_int, vmlaq_n_u32(vmulq_n_u32(*Vhh, 833), *Uhh, 400));
-        uint32x4_t R_int_low = vmlaq_n_u32(Y_low4_int, *Vll, 1634); //multiply by scalar accum
-        uint32x4_t R_int_high = vmlaq_n_u32(Y_high4_int, *Vhh, 1634); //multiply by scalar accum
-
-        B_int_low = vshrq_n_u32 (B_int_low, 10);
-        B_int_high = vshrq_n_u32 (B_int_high, 10);
-        G_int_low = vshrq_n_u32 (G_int_low, 10);
-        G_int_high = vshrq_n_u32 (G_int_high, 10);
-        R_int_low = vshrq_n_u32 (R_int_low, 10);
-        R_int_high = vshrq_n_u32 (R_int_high, 10);
-
-
-        uint8x8x3_t RGB;
-        RGB.val[0] = vmovn_u16(vcombine_u16(vqmovn_u32 (R_int_low),vqmovn_u32 (R_int_high)));
-        RGB.val[1] = vmovn_u16(vcombine_u16(vqmovn_u32 (G_int_low),vqmovn_u32 (G_int_high)));
-        RGB.val[2] = vmovn_u16(vcombine_u16(vqmovn_u32 (B_int_low),vqmovn_u32 (B_int_high)));
-
-        vst3_u8 (out+i*width*3 + j*3, RGB);
-      }
-    }
-
-}
-
diff --git a/android/android-jni/jni/yuv420sp2rgb.c b/android/android-jni/jni/yuv420sp2rgb.c
deleted file mode 100644 (file)
index 0511df3..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-#include <string.h>
-#include <jni.h>
-
-#include <yuv420sp2rgb.h>
-
-/*
- YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
- U/V plane containing 8 bit 2x2 subsampled chroma samples.
- except the interleave order of U and V is reversed.
-
- H V
- Y Sample Period      1 1
- U (Cb) Sample Period 2 2
- V (Cr) Sample Period 2 2
- */
-
-/*
- size of a char:
- find . -name limits.h -exec grep CHAR_BIT {} \;
- */
-
-#ifndef max
-#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
-#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
-#endif
-
-const int bytes_per_pixel = 2;
-void color_convert_common(const unsigned char *pY, const unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
-{
-
-  int i, j;
-  int nR, nG, nB;
-  int nY, nU, nV;
-  unsigned char *out = buffer;
-  int offset = 0;
-
-  if (grey)
-  {
-    memcpy(out,pY,width*height*sizeof(unsigned char));
-  }
-  else
-    // YUV 4:2:0
-    for (i = 0; i < height; i++)
-    {
-      for (j = 0; j < width; j++)
-      {
-        nY = *(pY + i * width + j);
-        nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
-        nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
-
-        // Yuv Convert
-        nY -= 16;
-        nU -= 128;
-        nV -= 128;
-
-        if (nY < 0)
-          nY = 0;
-
-        nB = (int)(1192 * nY + 2066 * nU);
-        nG = (int)(1192 * nY - 833 * nV - 400 * nU);
-        nR = (int)(1192 * nY + 1634 * nV);
-
-        nR = min(262143, max(0, nR));
-        nG = min(262143, max(0, nG));
-        nB = min(262143, max(0, nB));
-
-        nR >>= 10;
-        nR &= 0xff;
-        nG >>= 10;
-        nG &= 0xff;
-        nB >>= 10;
-        nB &= 0xff;
-
-        out[offset++] = (unsigned char)nR;
-        out[offset++] = (unsigned char)nG;
-        out[offset++] = (unsigned char)nB;
-      }
-    }
-
-}
diff --git a/android/android-jni/jni/yuv420sp2rgb.h b/android/android-jni/jni/yuv420sp2rgb.h
deleted file mode 100644 (file)
index dfe9b5f..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-//yuv420sp2rgb.h
-#ifndef YUV420SP2RGB_H
-#define YUV420SP2RGB_H
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-void color_convert_common(
-    const unsigned char *pY, const unsigned char *pUV,
-    int width, int height, unsigned char *buffer,
-    int grey);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/android/android-jni/project_create.sh b/android/android-jni/project_create.sh
deleted file mode 100755 (executable)
index 0a1b6bd..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#this generates an ant based cli build of the android-jni project
-android update project --name android-opencv \
---path .
diff --git a/android/android-jni/res/layout/calibrationviewer.xml b/android/android-jni/res/layout/calibrationviewer.xml
deleted file mode 100644 (file)
index 00dea19..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout
-  xmlns:android="http://schemas.android.com/apk/res/android"
-  android:layout_width="fill_parent"
-  android:layout_height="fill_parent"
-  android:orientation="vertical"
-  android:gravity="center_vertical|center_horizontal">
-   <TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content" 
- android:layout_height="wrap_content" android:padding="20dip"/>
-
-</LinearLayout>
diff --git a/android/android-jni/res/layout/camerasettings.xml b/android/android-jni/res/layout/camerasettings.xml
deleted file mode 100644 (file)
index 5583014..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-       android:layout_width="fill_parent" android:layout_height="fill_parent"
-       android:orientation="vertical" android:gravity="center_vertical|center_horizontal">
-       <TextView android:text="@string/settings_text"
-               android:autoLink="web" android:layout_width="wrap_content"
-               android:layout_height="wrap_content" android:padding="20dip" />
-               
-       <LinearLayout android:id="@+id/LinearLayout01"
-               android:layout_width="wrap_content" android:layout_height="wrap_content"
-               android:gravity="center_vertical">
-               <TextView android:layout_width="wrap_content"
-                       android:layout_height="wrap_content" android:text="@string/image_size_prompt" />
-               <Spinner android:id="@+id/image_size" android:layout_width="fill_parent"
-                       android:layout_height="wrap_content" android:saveEnabled="true"
-                       android:prompt="@string/image_size_prompt" android:entries="@array/image_sizes">
-               </Spinner>
-       </LinearLayout>
-
-       <LinearLayout android:id="@+id/LinearLayout01"
-               android:layout_width="wrap_content" android:layout_height="wrap_content"
-               android:gravity="center_vertical">
-               <TextView android:layout_width="wrap_content"
-                       android:layout_height="wrap_content" android:text="@string/camera_mode_prompt" />
-               <Spinner android:id="@+id/camera_mode" android:layout_width="fill_parent"
-                       android:layout_height="wrap_content" android:saveEnabled="true"
-                       android:prompt="@string/camera_mode_prompt" android:entries="@array/camera_mode">
-               </Spinner>
-       </LinearLayout>
-       
-               <LinearLayout android:id="@+id/LinearLayout01"
-               android:layout_width="wrap_content" android:layout_height="wrap_content"
-               android:gravity="center_vertical">
-               <TextView android:layout_width="wrap_content"
-                       android:layout_height="wrap_content" android:text="@string/whitebalance_prompt" />
-               <Spinner android:id="@+id/whitebalance" android:layout_width="fill_parent"
-                       android:layout_height="wrap_content" android:saveEnabled="true"
-                       android:prompt="@string/whitebalance_prompt" android:entries="@array/whitebalance">
-               </Spinner>
-       </LinearLayout>
-       
-
-</LinearLayout>
diff --git a/android/android-jni/res/layout/chesssizer.xml b/android/android-jni/res/layout/chesssizer.xml
deleted file mode 100644 (file)
index b93bc0b..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout
-  xmlns:android="http://schemas.android.com/apk/res/android"
-  android:layout_width="fill_parent"
-  android:layout_height="fill_parent"
-  android:orientation="vertical"
-  android:gravity="center_vertical|center_horizontal">
-   <TextView android:text="@string/patterntext"  android:autoLink="web" android:layout_width="wrap_content" 
- android:layout_height="wrap_content" android:padding="20dip"/>
-  <LinearLayout android:id="@+id/LinearLayout01"
- android:layout_width="wrap_content" 
- android:layout_height="wrap_content"
- android:gravity="center_vertical">
- <TextView android:layout_width="wrap_content"
-  android:layout_height="wrap_content"
-   android:text="Corners in width direction:"/>
- <Spinner android:id="@+id/rows" 
-  android:layout_width="fill_parent"
-        android:layout_height="wrap_content"
-         android:saveEnabled="true"
-           android:prompt="@string/chesspromptx"
-android:entries="@array/chesssizes">
-</Spinner>
- </LinearLayout>
- <LinearLayout android:id="@+id/LinearLayout01"
- android:layout_width="wrap_content" android:layout_height="wrap_content"
- android:gravity="center_vertical">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content" android:text="Corners in height direction:"/>
- <Spinner android:id="@+id/cols" 
-  android:layout_width="fill_parent"
-        android:layout_height="wrap_content"
-        android:saveEnabled="true"
-        android:prompt="@string/chessprompty"
-android:entries="@array/chesssizes">
-</Spinner>
-</LinearLayout>
-
-</LinearLayout>
diff --git a/android/android-jni/res/values/attrs.xml b/android/android-jni/res/values/attrs.xml
deleted file mode 100644 (file)
index 89727ff..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
-
-<declare-styleable name="CameraParams">
-
-<attr name="preview_width" format="integer"/>
-<attr name="preview_height" format="integer"/>
-
-</declare-styleable>
-
-</resources>
\ No newline at end of file
diff --git a/android/android-jni/res/values/chessnumbers.xml b/android/android-jni/res/values/chessnumbers.xml
deleted file mode 100644 (file)
index c0b37fa..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
-<string-array name="chesssizes">
-<item>2</item>
-<item>3</item>
-<item>4</item>
-<item>5</item>
-<item>6</item>
-<item>7</item>
-<item>8</item>
-<item>9</item>
-<item>10</item>
-<item>11</item>
-<item>12</item>
-<item>13</item>
-</string-array>
-<string name="chesspromptx">
-Choose the width:</string>
-<string name="chessprompty">
-Choose the height:</string>
-</resources>
diff --git a/android/android-jni/res/values/settingnumbers.xml b/android/android-jni/res/values/settingnumbers.xml
deleted file mode 100644 (file)
index 54771c1..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
-<string-array name="image_sizes">
-<item>320x240</item>
-<item>400x300</item>
-<item>640x480</item>
-<item>800x600</item>
-<item>1000x800</item>
-</string-array>
-<string-array name="camera_mode">
-<item>color</item>
-<item>BW</item>
-</string-array>
-<string name="image_size_prompt">
-Image Size:\n(may not be exact)
-</string>
-<string name="camera_mode_prompt">
-Camera Mode:
-</string>
-
-<string-array name="whitebalance">
-<item>auto</item>
-<item>incandescent</item>
-<item>fluorescent</item>
-<item>daylight</item>
-<item>cloudy-daylight</item>
-</string-array>
-<string name="whitebalance_prompt">
-Whitebalance:
-</string>
-</resources>
\ No newline at end of file
diff --git a/android/android-jni/res/values/strings.xml b/android/android-jni/res/values/strings.xml
deleted file mode 100644 (file)
index 541de36..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
-    <string name="app_name">Calibration</string>
-       <string name="patternsize">Pattern Size</string>
-       <string name="patterntext">Please choose the width and height (number of inside corners) of the checker
-       board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
-       http://opencv.willowgarage.com/pattern</string>
-       
-       <string name="patternlink">http://opencv.willowgarage.com/pattern</string>
-       <string name="camera_settings_label">Camera Settings</string>
-       <string name="settings_text">Change the camera settings. Be aware that BW is much faster for previewing, than color. Also, if you change the image size, you should
-       rerun calibration.  Default values: BW and 640x480 are a good start.</string>
-       
-       <string name="calibration_service_started">Calibration calculations have started...</string>
-       <string name="calibration_service_stopped">Calibration calculations has stopped.</string>
-       <string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
-       <string name="calibration_service_label">Calibration</string>
-       <string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string> 
-       
-</resources>
diff --git a/android/android-jni/sample.local.env.mk b/android/android-jni/sample.local.env.mk
deleted file mode 100644 (file)
index 6bd9240..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#location of android-opencv.mk file that was generated by the cmake build
-#of opencv for android
-OPENCV_CONFIG=../build/android-opencv.mk
-
-#the root directory of the crystax r4 ndk - ndk-build should be in this dir
-#you can download the ndk from http://www.crystax.net/android/ndk-r4.php
-ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax
-
-#define only armeabi-v7a to make the final apk smaller or armeabi
-ARM_TARGETS="armeabi armeabi-v7a"
\ No newline at end of file
diff --git a/android/android-jni/src/com/opencv/OpenCV.java b/android/android-jni/src/com/opencv/OpenCV.java
deleted file mode 100644 (file)
index d3e9248..0000000
+++ /dev/null
@@ -1,157 +0,0 @@
-package com.opencv;
-
-import java.util.LinkedList;
-
-import android.app.Activity;
-import android.content.pm.ActivityInfo;
-import android.os.Bundle;
-import android.view.Gravity;
-import android.view.KeyEvent;
-import android.view.Menu;
-import android.view.MenuItem;
-import android.view.Window;
-import android.view.WindowManager;
-import android.view.ViewGroup.LayoutParams;
-import android.widget.FrameLayout;
-import android.widget.LinearLayout;
-
-import com.opencv.camera.NativePreviewer;
-import com.opencv.camera.NativeProcessor;
-import com.opencv.camera.NativeProcessor.PoolCallback;
-import com.opencv.opengl.GL2CameraViewer;
-
-public class OpenCV extends Activity {
-       private NativePreviewer mPreview;
-
-       private GL2CameraViewer glview;
-
-       /*
-        * (non-Javadoc)
-        * 
-        * @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
-        */
-       @Override
-       public boolean onKeyUp(int keyCode, KeyEvent event) {
-
-               return super.onKeyUp(keyCode, event);
-       }
-
-       /*
-        * (non-Javadoc)
-        * 
-        * @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
-        */
-       @Override
-       public boolean onKeyLongPress(int keyCode, KeyEvent event) {
-
-               return super.onKeyLongPress(keyCode, event);
-       }
-
-       /**
-        * Avoid that the screen get's turned off by the system.
-        */
-       public void disableScreenTurnOff() {
-               getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
-                               WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
-       }
-
-       /**
-        * Set's the orientation to landscape, as this is needed by AndAR.
-        */
-       public void setOrientation() {
-               setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
-       }
-
-       /**
-        * Maximize the application.
-        */
-       public void setFullscreen() {
-               requestWindowFeature(Window.FEATURE_NO_TITLE);
-               getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
-                               WindowManager.LayoutParams.FLAG_FULLSCREEN);
-       }
-
-       public void setNoTitle() {
-               requestWindowFeature(Window.FEATURE_NO_TITLE);
-       }
-
-       @Override
-       public boolean onCreateOptionsMenu(Menu menu) {
-               // menu.add("Sample");
-               return true;
-       }
-
-       @Override
-       public boolean onOptionsItemSelected(MenuItem item) {
-               // if(item.getTitle().equals("Sample")){
-               // //do stuff...
-               // }
-
-               return true;
-
-       }
-
-       @Override
-       public void onOptionsMenuClosed(Menu menu) {
-               // TODO Auto-generated method stub
-               super.onOptionsMenuClosed(menu);
-       }
-
-       @Override
-       protected void onCreate(Bundle savedInstanceState) {
-               super.onCreate(savedInstanceState);
-
-               setFullscreen();
-               disableScreenTurnOff();
-
-               FrameLayout frame = new FrameLayout(getApplication());
-
-               // Create our Preview view and set it as the content of our activity.
-               mPreview = new NativePreviewer(getApplication(), 400, 300);
-
-               LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
-                               LayoutParams.WRAP_CONTENT);
-               params.height = getWindowManager().getDefaultDisplay().getHeight();
-               params.width = (int) (params.height * 4.0 / 2.88);
-
-               LinearLayout vidlay = new LinearLayout(getApplication());
-
-               vidlay.setGravity(Gravity.CENTER);
-               vidlay.addView(mPreview, params);
-               frame.addView(vidlay);
-               
-               // make the glview overlay ontop of video preview
-               mPreview.setZOrderMediaOverlay(false);
-               
-               glview = new GL2CameraViewer(getApplication(), false, 0, 0);
-               glview.setZOrderMediaOverlay(true);
-               glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
-                               LayoutParams.FILL_PARENT));
-               frame.addView(glview);
-
-               setContentView(frame);
-       }
-
-       @Override
-       protected void onPause() {
-               super.onPause();
-               
-               mPreview.onPause();
-               
-               glview.onPause();
-               
-
-       }
-
-       @Override
-       protected void onResume() {
-               super.onResume();
-               glview.onResume();
-               LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
-               callbackstack.add(glview.getDrawCallback());
-               mPreview.addCallbackStack(callbackstack);
-               mPreview.onResume();
-
-       }
-
-}
\ No newline at end of file
diff --git a/android/android-jni/src/com/opencv/calibration/CalibrationViewer.java b/android/android-jni/src/com/opencv/calibration/CalibrationViewer.java
deleted file mode 100644 (file)
index 2ae6b78..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-package com.opencv.calibration;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.text.method.ScrollingMovementMethod;
-import android.util.Log;
-import android.widget.TextView;
-
-import com.opencv.R;
-
-public class CalibrationViewer extends Activity {
-
-       @Override
-       protected void onCreate(Bundle savedInstanceState) {
-               // TODO Auto-generated method stub
-               super.onCreate(savedInstanceState);
-               setContentView(R.layout.calibrationviewer);
-
-               Bundle extras = getIntent().getExtras();
-               String filename = extras.getString("calibfile");
-               if (filename != null) {
-                       TextView text = (TextView) findViewById(R.id.calibtext);
-                       text.setMovementMethod(new ScrollingMovementMethod());
-                       try {
-                               BufferedReader reader = new BufferedReader(new FileReader(
-                                               filename));
-                               while (reader.ready()) {
-                                       text.append(reader.readLine() +"\n");
-                               }
-
-                       } catch (FileNotFoundException e) {
-                               Log.e("opencv", "could not open calibration file at:"
-                                               + filename);
-                       } catch (IOException e) {
-                               Log.e("opencv", "error reading file: "
-                                               + filename);
-                       }
-               }
-
-       }
-
-}
diff --git a/android/android-jni/src/com/opencv/calibration/Calibrator.java b/android/android-jni/src/com/opencv/calibration/Calibrator.java
deleted file mode 100644 (file)
index 699a196..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-package com.opencv.calibration;
-
-
-import java.io.File;
-import java.io.IOException;
-import java.util.concurrent.locks.ReentrantLock;
-
-import android.os.AsyncTask;
-
-import com.opencv.camera.NativeProcessor;
-import com.opencv.camera.NativeProcessor.PoolCallback;
-import com.opencv.jni.Calibration;
-import com.opencv.jni.Size;
-import com.opencv.jni.image_pool;
-
-
-
-public class Calibrator implements PoolCallback {
-       private Calibration calibration;
-
-       static public interface CalibrationCallback{
-               public void onFoundChessboard(Calibrator calibrator);
-               public void onDoneCalibration(Calibrator calibration, File calibfile);
-               public void onFailedChessboard(Calibrator calibrator);
-       }
-       private CalibrationCallback callback;
-       public Calibrator(CalibrationCallback callback) {
-               calibration = new Calibration();
-               this.callback = callback;
-       }
-
-       public void resetCalibration(){
-               calibration.resetChess();
-       }
-
-       public void setPatternSize(Size size){
-               Size csize = calibration.getPatternsize();
-               if(size.getWidth() == csize.getWidth()&&
-                                  size.getHeight() == csize.getHeight())
-                                       return;
-               calibration.setPatternsize(size);       
-               resetCalibration();
-       }
-       public void setPatternSize(int width, int height){
-               Size patternsize = new Size(width,height);
-               setPatternSize(patternsize);
-       }
-       
-       private boolean capture_chess;
-
-       ReentrantLock lock = new ReentrantLock();
-       public void calibrate(File calibration_file) throws IOException{
-               if(getNumberPatternsDetected() < 3){
-                       return;
-               }
-               CalibrationTask calibtask = new CalibrationTask(calibration_file);
-               calibtask.execute((Object[])null);
-       }
-
-       public void queueChessCapture(){
-               capture_chess = true;
-       }
-       
-private class CalibrationTask extends AsyncTask<Object, Object, Object> {
-               File calibfile;
-       
-               public CalibrationTask(File calib) throws IOException{
-                       super();
-                       calibfile = calib;
-                       calibfile.createNewFile();
-               }
-       
-               @Override
-               protected Object doInBackground(Object... params) {
-                       lock.lock();
-                       try{
-                               calibration.calibrate(calibfile.getAbsolutePath());
-                       }
-                       finally{
-                               lock.unlock();
-                       }
-                       return null;
-               
-               }
-
-               @Override
-               protected void onPostExecute(Object result) {                   
-                       callback.onDoneCalibration(Calibrator.this, calibfile);
-               }
-
-       }
-       
-
-       //@Override
-       public void process(int idx, image_pool pool, long timestamp,
-                       NativeProcessor nativeProcessor) {
-               if(lock.tryLock()){
-                       try{
-                               if(capture_chess){
-                                       if(calibration.detectAndDrawChessboard(idx, pool)){
-                                               callback.onFoundChessboard(this);
-                                               
-                                       }else
-                                               callback.onFailedChessboard(this);
-                                       capture_chess = false;
-                               }
-                       }finally{
-                               lock.unlock();
-                       }
-               }
-       }
-
-
-       public int getNumberPatternsDetected(){
-               return calibration.getNumberDetectedChessboards();
-       }
-
-       public void setCallback(CalibrationCallback callback) {
-               this.callback = callback;
-               
-       }
-
-
-}
diff --git a/android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java b/android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java
deleted file mode 100644 (file)
index 461a37d..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-package com.opencv.calibration;
-
-import com.opencv.R;
-import com.opencv.jni.Size;
-
-import android.app.Activity;
-import android.content.Context;
-import android.content.SharedPreferences;
-import android.content.SharedPreferences.Editor;
-import android.os.Bundle;
-import android.view.View;
-import android.widget.AdapterView;
-import android.widget.AdapterView.OnItemSelectedListener;
-import android.widget.Spinner;
-
-public class ChessBoardChooser extends Activity {
-       public static final String CHESS_SIZE = "chess_size";
-       public static final int DEFAULT_WIDTH = 6;
-       public static final int DEFAULT_HEIGHT = 8;
-       public static final int LOWEST = 2;
-
-       class DimChooser implements OnItemSelectedListener {
-               private String dim;
-
-               public DimChooser(String dim) {
-                       this.dim = dim;
-               }
-
-               @Override
-               public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
-                               long arg3) {
-                       SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
-                       Editor editor = settings.edit();
-                       editor.putInt(dim, pos + LOWEST);
-                       editor.commit();
-               }
-
-               @Override
-               public void onNothingSelected(AdapterView<?> arg0) {
-               }
-       }
-
-       @Override
-       protected void onCreate(Bundle savedInstanceState) {
-               // TODO Auto-generated method stub
-               super.onCreate(savedInstanceState);
-               setContentView(R.layout.chesssizer);
-               // Restore preferences
-               SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
-               int width = settings.getInt("width", 6);
-
-               int height = settings.getInt("height", 8);
-
-               Spinner wspin, hspin;
-               wspin = (Spinner) findViewById(R.id.rows);
-               hspin = (Spinner) findViewById(R.id.cols);
-
-               wspin.setSelection(width - LOWEST);
-               hspin.setSelection(height - LOWEST);
-
-               wspin.setOnItemSelectedListener(new DimChooser("width"));
-               hspin.setOnItemSelectedListener(new DimChooser("height"));
-
-       }
-
-       public static Size getPatternSize(Context ctx) {
-               SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
-               int width = settings.getInt("width", 6);
-
-               int height = settings.getInt("height", 8);
-
-               return new Size(width, height);
-       }
-
-}
diff --git a/android/android-jni/src/com/opencv/calibration/services/CalibrationService.java b/android/android-jni/src/com/opencv/calibration/services/CalibrationService.java
deleted file mode 100644 (file)
index 754e2f1..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-package com.opencv.calibration.services;
-
-import java.io.File;
-import java.io.IOException;
-
-import android.app.Notification;
-import android.app.NotificationManager;
-import android.app.PendingIntent;
-import android.app.Service;
-import android.content.Intent;
-import android.os.Binder;
-import android.os.IBinder;
-import android.util.Log;
-import android.widget.Toast;
-
-
-import com.opencv.R;
-import com.opencv.calibration.CalibrationViewer;
-import com.opencv.calibration.Calibrator;
-import com.opencv.calibration.Calibrator.CalibrationCallback;
-
-
-public class CalibrationService extends Service implements CalibrationCallback {
-
-       Class<?> activity;
-       int icon;
-       File calibration_file;
-       public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
-                       throws IOException {
-               activity = activitycaller;
-               icon = icon_id;
-               // Display a notification about us starting. We put an icon in the
-               // status bar.
-               showNotification();
-               this.calibration_file = calibration_file;
-               calibrator.setCallback(this);
-               calibrator.calibrate(calibration_file);
-               
-               
-       }
-
-       private NotificationManager mNM;
-
-       /**
-        * Class for clients to access. Because we know this service always runs in
-        * the same process as its clients, we don't need to deal with IPC.
-        */
-       public class CalibrationServiceBinder extends Binder {
-               public CalibrationService getService() {
-                       return CalibrationService.this;
-               }
-       }
-
-       @Override
-       public int onStartCommand(Intent intent, int flags, int startId) {
-               Log.i("LocalService", "Received start id " + startId + ": " + intent);
-               // We want this service to continue running until it is explicitly
-               // stopped, so return sticky.
-               return START_NOT_STICKY;
-       }
-
-       @Override
-       public void onCreate() {
-               mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
-
-               
-       }
-
-       @Override
-       public void onDestroy() {
-               // Cancel the persistent notification.
-               // mNM.cancel(R.string.calibration_service_started);
-
-               // Tell the user we stopped.
-               Toast.makeText(this, R.string.calibration_service_finished,
-                               Toast.LENGTH_SHORT).show();
-       }
-
-       private final IBinder mBinder = new CalibrationServiceBinder();
-
-       @Override
-       public IBinder onBind(Intent intent) {
-               return mBinder;
-       }
-
-       /**
-        * Show a notification while this service is running.
-        */
-       private void showNotification() {
-               // In this sample, we'll use the same text for the ticker and the
-               // expanded notification
-               CharSequence text = getText(R.string.calibration_service_started);
-
-               // Set the icon, scrolling text and timestamp
-               Notification notification = new Notification(icon, text,
-                               System.currentTimeMillis());
-
-               // The PendingIntent to launch our activity if the user selects this
-               // notification
-               PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
-                               new Intent(this, activity), 0);
-
-               // Set the info for the views that show in the notification panel.
-               notification.setLatestEventInfo(this,
-                               getText(R.string.calibration_service_label), text,
-                               contentIntent);
-
-               notification.defaults |= Notification.DEFAULT_SOUND;
-               // Send the notification.
-               // We use a layout id because it is a unique number. We use it later to
-               // cancel.
-               mNM.notify(R.string.calibration_service_started, notification);
-       }
-
-       /**
-        * Show a notification while this service is running.
-        */
-       private void doneNotification() {
-               // In this sample, we'll use the same text for the ticker and the
-               // expanded notification
-               CharSequence text = getText(R.string.calibration_service_finished);
-
-               // Set the icon, scrolling text and timestamp
-               Notification notification = new Notification(icon, text,
-                               System.currentTimeMillis());
-
-               Intent intent = new Intent(this,CalibrationViewer.class);
-               intent.putExtra("calibfile", calibration_file.getAbsolutePath());
-               // The PendingIntent to launch our activity if the user selects this
-               // notification
-               PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
-                               intent, 0);
-               
-
-               // Set the info for the views that show in the notification panel.
-               notification.setLatestEventInfo(this,
-                               getText(R.string.calibration_service_label), text,
-                               contentIntent);
-               
-
-               notification.defaults |= Notification.DEFAULT_SOUND;
-               // Send the notification.
-               // We use a layout id because it is a unique number. We use it later to
-               // cancel.
-               mNM.notify(R.string.calibration_service_started, notification);
-       }
-
-       @Override
-       public void onFoundChessboard(Calibrator calibrator) {
-               // TODO Auto-generated method stub
-
-       }
-
-       @Override
-       public void onDoneCalibration(Calibrator calibration, File calibfile) {
-               doneNotification();
-               stopSelf();
-       }
-
-       @Override
-       public void onFailedChessboard(Calibrator calibrator) {
-               // TODO Auto-generated method stub
-
-       }
-
-}
diff --git a/android/android-jni/src/com/opencv/camera/CameraConfig.java b/android/android-jni/src/com/opencv/camera/CameraConfig.java
deleted file mode 100644 (file)
index 6f522ed..0000000
+++ /dev/null
@@ -1,214 +0,0 @@
-package com.opencv.camera;
-
-import com.opencv.R;
-
-import android.app.Activity;
-import android.content.Context;
-import android.content.SharedPreferences;
-import android.content.SharedPreferences.Editor;
-import android.os.Bundle;
-import android.view.View;
-import android.widget.AdapterView;
-import android.widget.AdapterView.OnItemSelectedListener;
-import android.widget.Spinner;
-
-public class CameraConfig extends Activity {
-       public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
-       public static final String CAMERA_MODE = "camera_mode";
-       public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
-       public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
-       public static final int CAMERA_MODE_BW = 0;
-       public static final int CAMERA_MODE_COLOR = 1;
-       private static final String WHITEBALANCE = "WHITEBALANCE";
-
-       public static int readCameraMode(Context ctx) {
-               // Restore preferences
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
-               return mode;
-       }
-       
-       public static String readWhitebalace(Context ctx) {
-               // Restore preferences
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               return settings.getString(WHITEBALANCE, "auto");
-       }
-
-       static public void setCameraMode(Context context, String mode) {
-               int m = 0;
-               if (mode.equals("BW")) {
-                       m = CAMERA_MODE_BW;
-               } else if (mode.equals("color"))
-                       m = CAMERA_MODE_COLOR;
-               setCameraMode(context, m);
-       }
-
-       private static String sizeToString(int[] size) {
-               return size[0] + "x" + size[1];
-       }
-
-       private static void parseStrToSize(String ssize, int[] size) {
-               String sz[] = ssize.split("x");
-               size[0] = Integer.valueOf(sz[0]);
-               size[1] = Integer.valueOf(sz[1]);
-       }
-
-       public static void readImageSize(Context ctx, int[] size) {
-               // Restore preferences
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               size[0] = settings.getInt(IMAGE_WIDTH, 640);
-               size[1] = settings.getInt(IMAGE_HEIGHT, 480);
-
-       }
-
-       public static void setCameraMode(Context ctx, int mode) {
-               // Restore preferences
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               Editor editor = settings.edit();
-               editor.putInt(CAMERA_MODE, mode);
-               editor.commit();
-       }
-
-       public static void setImageSize(Context ctx, String strsize) {
-               int size[] = { 0, 0 };
-               parseStrToSize(strsize, size);
-               setImageSize(ctx, size[0], size[1]);
-       }
-
-       public static void setImageSize(Context ctx, int width, int height) {
-               // Restore preferences
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               Editor editor = settings.edit();
-               editor.putInt(IMAGE_WIDTH, width);
-               editor.putInt(IMAGE_HEIGHT, height);
-               editor.commit();
-       }
-
-       @Override
-       protected void onCreate(Bundle savedInstanceState) {
-               // TODO Auto-generated method stub
-               super.onCreate(savedInstanceState);
-               setContentView(R.layout.camerasettings);
-               int mode = readCameraMode(this);
-               int size[] = { 0, 0 };
-               readImageSize(this, size);
-
-               final Spinner size_spinner;
-               final Spinner mode_spinner;
-               final Spinner whitebalance_spinner;
-               size_spinner = (Spinner) findViewById(R.id.image_size);
-               mode_spinner = (Spinner) findViewById(R.id.camera_mode);
-               whitebalance_spinner = (Spinner) findViewById(R.id.whitebalance);
-
-               String strsize = sizeToString(size);
-               String strmode = modeToString(mode);
-               String wbmode = readWhitebalace(getApplicationContext());
-
-               String sizes[] = getResources().getStringArray(R.array.image_sizes);
-
-               int i = 1;
-               for (String x : sizes) {
-                       if (x.equals(strsize))
-                               break;
-                       i++;
-               }
-               if(i <= sizes.length)
-                       size_spinner.setSelection(i-1);
-
-               i = 1;
-               String modes[] =  getResources().getStringArray(R.array.camera_mode);
-               for (String x :modes) {
-                       if (x.equals(strmode))
-                               break;
-                       i++;
-               }
-               if(i <= modes.length)
-                       mode_spinner.setSelection(i-1);
-               
-               i = 1;
-               String wbmodes[] =  getResources().getStringArray(R.array.whitebalance);
-               for (String x :wbmodes) {
-                       if (x.equals(wbmode))
-                               break;
-                       i++;
-               }
-               if(i <= wbmodes.length)
-                       whitebalance_spinner.setSelection(i-1);
-
-               size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
-
-                       @Override
-                       public void onItemSelected(AdapterView<?> arg0, View spinner,
-                                       int position, long arg3) {
-                               Object o = size_spinner.getItemAtPosition(position);
-                               if (o != null)
-                                       setImageSize(spinner.getContext(), (String) o);
-                       }
-
-                       @Override
-                       public void onNothingSelected(AdapterView<?> arg0) {
-
-                       }
-               });
-               mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
-
-                       @Override
-                       public void onItemSelected(AdapterView<?> arg0, View spinner,
-                                       int position, long arg3) {
-                               Object o = mode_spinner.getItemAtPosition(position);
-                               if (o != null)
-                                       setCameraMode(spinner.getContext(), (String) o);
-
-                       }
-
-                       @Override
-                       public void onNothingSelected(AdapterView<?> arg0) {
-
-                       }
-               });
-               
-               whitebalance_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
-
-                       @Override
-                       public void onItemSelected(AdapterView<?> arg0, View spinner,
-                                       int position, long arg3) {
-                               Object o = whitebalance_spinner.getItemAtPosition(position);
-                               if (o != null)
-                                       setWhitebalance(spinner.getContext(), (String) o);
-
-                       }
-
-
-                       @Override
-                       public void onNothingSelected(AdapterView<?> arg0) {
-
-                       }
-               });
-
-       }
-
-       public static void setWhitebalance(Context ctx, String o) {
-               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
-                               0);
-               Editor editor = settings.edit();
-               editor.putString(WHITEBALANCE, o);
-               editor.commit();
-               
-       }
-
-       private String modeToString(int mode) {
-               switch (mode) {
-               case CAMERA_MODE_BW:
-                       return "BW";
-               case CAMERA_MODE_COLOR:
-                       return "color";
-               default:
-                       return "";
-               }
-       }
-}
diff --git a/android/android-jni/src/com/opencv/camera/NativePreviewer.java b/android/android-jni/src/com/opencv/camera/NativePreviewer.java
deleted file mode 100644 (file)
index fc2ad3c..0000000
+++ /dev/null
@@ -1,482 +0,0 @@
-package com.opencv.camera;
-
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.Date;
-import java.util.LinkedList;
-import java.util.List;
-
-import android.content.Context;
-import android.graphics.PixelFormat;
-import android.hardware.Camera;
-import android.hardware.Camera.PreviewCallback;
-import android.hardware.Camera.Size;
-import android.os.Handler;
-import android.util.AttributeSet;
-import android.util.Log;
-import android.view.SurfaceHolder;
-import android.view.SurfaceView;
-
-import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
-import com.opencv.camera.NativeProcessor.PoolCallback;
-
-public class NativePreviewer extends SurfaceView implements
-               SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
-
-       private String whitebalance_mode = "auto";
-
-       /**
-        * Constructor useful for defining a NativePreviewer in android layout xml
-        * 
-        * @param context
-        * @param attributes
-        */
-       public NativePreviewer(Context context, AttributeSet attributes) {
-               super(context, attributes);
-               listAllCameraMethods();
-               // Install a SurfaceHolder.Callback so we get notified when the
-               // underlying surface is created and destroyed.
-               mHolder = getHolder();
-               mHolder.addCallback(this);
-               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
-
-               /*
-                * TODO get this working! Can't figure out how to define these in xml
-                */
-               preview_width = attributes.getAttributeIntValue("opencv",
-                               "preview_width", 600);
-               preview_height = attributes.getAttributeIntValue("opencv",
-                               "preview_height", 600);
-
-               Log.d("NativePreviewer", "Trying to use preview size of "
-                               + preview_width + " " + preview_height);
-
-               processor = new NativeProcessor();
-
-               setZOrderMediaOverlay(false);
-       }
-
-       /**
-        * 
-        * @param context
-        * @param preview_width
-        *            the desired camera preview width - will attempt to get as
-        *            close to this as possible
-        * @param preview_height
-        *            the desired camera preview height
-        */
-       public NativePreviewer(Context context, int preview_width,
-                       int preview_height) {
-               super(context);
-
-               listAllCameraMethods();
-               // Install a SurfaceHolder.Callback so we get notified when the
-               // underlying surface is created and destroyed.
-               mHolder = getHolder();
-               mHolder.addCallback(this);
-               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
-
-               this.preview_width = preview_width;
-               this.preview_height = preview_height;
-
-               processor = new NativeProcessor();
-               setZOrderMediaOverlay(false);
-
-       }
-
-       /**
-        * Only call in the oncreate function of the instantiating activity
-        * 
-        * @param width
-        *            desired width
-        * @param height
-        *            desired height
-        */
-       public void setPreviewSize(int width, int height) {
-               preview_width = width;
-               preview_height = height;
-
-               Log.d("NativePreviewer", "Trying to use preview size of "
-                               + preview_width + " " + preview_height);
-
-       }
-
-       public void setParamsFromPrefs(Context ctx) {
-               int size[] = { 0, 0 };
-               CameraConfig.readImageSize(ctx, size);
-               int mode = CameraConfig.readCameraMode(ctx);
-               setPreviewSize(size[0], size[1]);
-               setGrayscale(mode == CameraConfig.CAMERA_MODE_BW ? true : false);
-               whitebalance_mode = CameraConfig.readWhitebalace(ctx);
-       }
-
-       public void surfaceCreated(SurfaceHolder holder) {
-
-       }
-
-       public void surfaceDestroyed(SurfaceHolder holder) {
-               releaseCamera();
-       }
-
-       public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
-
-               try {
-                       initCamera(mHolder);
-               } catch (InterruptedException e) {
-                       // TODO Auto-generated catch block
-                       e.printStackTrace();
-                       return;
-               }
-
-               // Now that the size is known, set up the camera parameters and begin
-               // the preview.
-
-               Camera.Parameters parameters = mCamera.getParameters();
-               List<Camera.Size> pvsizes = mCamera.getParameters()
-                               .getSupportedPreviewSizes();
-               int best_width = 1000000;
-               int best_height = 1000000;
-               int bdist = 100000;
-               for (Size x : pvsizes) {
-                       if (Math.abs(x.width - preview_width) < bdist) {
-                               bdist = Math.abs(x.width - preview_width);
-                               best_width = x.width;
-                               best_height = x.height;
-                       }
-               }
-               preview_width = best_width;
-               preview_height = best_height;
-
-               Log.d("NativePreviewer", "Determined compatible preview size is: ("
-                               + preview_width + "," + preview_height + ")");
-
-               Log.d("NativePreviewer", "Supported params: "
-                               + mCamera.getParameters().flatten());
-
-               
-               // this is available in 8+
-               // parameters.setExposureCompensation(0);
-               if (parameters.getSupportedWhiteBalance().contains(whitebalance_mode)) {
-                       parameters.setWhiteBalance(whitebalance_mode);
-               }
-               if (parameters.getSupportedAntibanding().contains(
-                               Camera.Parameters.ANTIBANDING_OFF)) {
-                       parameters.setAntibanding(Camera.Parameters.ANTIBANDING_OFF);
-               }
-
-               List<String> fmodes = mCamera.getParameters().getSupportedFocusModes();
-               // for(String x: fmodes){
-
-               // }
-               
-       
-
-               if (parameters.get("meter-mode") != null)
-                       parameters.set("meter-mode", "meter-average");
-               int idx = fmodes.indexOf(Camera.Parameters.FOCUS_MODE_INFINITY);
-               if (idx != -1) {
-                       parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
-               } else if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1) {
-                       parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
-               }
-
-               if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1) {
-                       hasAutoFocus = true;
-               }
-
-               List<String> scenemodes = mCamera.getParameters()
-                               .getSupportedSceneModes();
-               if (scenemodes != null)
-                       if (scenemodes.indexOf(Camera.Parameters.SCENE_MODE_ACTION) != -1) {
-                               parameters.setSceneMode(Camera.Parameters.SCENE_MODE_ACTION);
-                               Log.d("NativePreviewer", "set scenemode to action");
-                       }
-
-               parameters.setPreviewSize(preview_width, preview_height);
-
-               mCamera.setParameters(parameters);
-
-               pixelinfo = new PixelFormat();
-               pixelformat = mCamera.getParameters().getPreviewFormat();
-               PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
-
-               Size preview_size = mCamera.getParameters().getPreviewSize();
-               preview_width = preview_size.width;
-               preview_height = preview_size.height;
-               int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
-                               / 8;
-
-               // Must call this before calling addCallbackBuffer to get all the
-               // reflection variables setup
-               initForACB();
-               initForPCWB();
-
-               // Use only one buffer, so that we don't preview to many frames and bog
-               // down system
-               byte[] buffer = new byte[bufSize];
-               addCallbackBuffer(buffer);
-               setPreviewCallbackWithBuffer();
-
-               mCamera.startPreview();
-
-       }
-
-       public void postautofocus(int delay) {
-               if (hasAutoFocus)
-                       handler.postDelayed(autofocusrunner, delay);
-
-       }
-
-       /**
-        * Demonstration of how to use onPreviewFrame. In this case I'm not
-        * processing the data, I'm just adding the buffer back to the buffer queue
-        * for re-use
-        */
-       public void onPreviewFrame(byte[] data, Camera camera) {
-
-               if (start == null) {
-                       start = new Date();
-               }
-
-               processor.post(data, preview_width, preview_height, pixelformat,
-                               System.nanoTime(), this);
-
-               fcount++;
-               if (fcount % 100 == 0) {
-                       double ms = (new Date()).getTime() - start.getTime();
-                       Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
-                       start = new Date();
-                       fcount = 0;
-               }
-
-       }
-
-       @Override
-       public void onDoneNativeProcessing(byte[] buffer) {
-               addCallbackBuffer(buffer);
-       }
-
-       public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
-               processor.addCallbackStack(callbackstack);
-       }
-
-       /**
-        * This must be called when the activity pauses, in Activity.onPause This
-        * has the side effect of clearing the callback stack.
-        * 
-        */
-       public void onPause() {
-
-               releaseCamera();
-
-               addCallbackStack(null);
-
-               processor.stop();
-
-       }
-
-       public void onResume() {
-
-               processor.start();
-
-       }
-
-       private Method mPCWB;
-
-       private void initForPCWB() {
-
-               try {
-
-                       mPCWB = Class.forName("android.hardware.Camera").getMethod(
-                                       "setPreviewCallbackWithBuffer", PreviewCallback.class);
-
-               } catch (Exception e) {
-                       Log.e("NativePreviewer",
-                                       "Problem setting up for setPreviewCallbackWithBuffer: "
-                                                       + e.toString());
-               }
-
-       }
-
-       /**
-        * This method allows you to add a byte buffer to the queue of buffers to be
-        * used by preview. See:
-        * http://android.git.kernel.org/?p=platform/frameworks
-        * /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
-        * b3d07b9620b4269ab33f78604a36327e536ce1
-        * 
-        * @param b
-        *            The buffer to register. Size should be width * height *
-        *            bitsPerPixel / 8.
-        */
-       private void addCallbackBuffer(byte[] b) {
-
-               try {
-
-                       mAcb.invoke(mCamera, b);
-               } catch (Exception e) {
-                       Log.e("NativePreviewer",
-                                       "invoking addCallbackBuffer failed: " + e.toString());
-               }
-       }
-
-       /**
-        * Use this method instead of setPreviewCallback if you want to use manually
-        * allocated buffers. Assumes that "this" implements Camera.PreviewCallback
-        */
-       private void setPreviewCallbackWithBuffer() {
-               // mCamera.setPreviewCallback(this);
-               // return;
-               try {
-
-                       // If we were able to find the setPreviewCallbackWithBuffer method
-                       // of Camera,
-                       // we can now invoke it on our Camera instance, setting 'this' to be
-                       // the
-                       // callback handler
-                       mPCWB.invoke(mCamera, this);
-
-                       // Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
-
-               } catch (Exception e) {
-
-                       Log.e("NativePreviewer", e.toString());
-               }
-       }
-
-       @SuppressWarnings("unused")
-       private void clearPreviewCallbackWithBuffer() {
-               // mCamera.setPreviewCallback(this);
-               // return;
-               try {
-
-                       // If we were able to find the setPreviewCallbackWithBuffer method
-                       // of Camera,
-                       // we can now invoke it on our Camera instance, setting 'this' to be
-                       // the
-                       // callback handler
-                       mPCWB.invoke(mCamera, (PreviewCallback) null);
-
-                       // Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
-
-               } catch (Exception e) {
-
-                       Log.e("NativePreviewer", e.toString());
-               }
-       }
-
-       /**
-        * These variables are re-used over and over by addCallbackBuffer
-        */
-       private Method mAcb;
-
-       private void initForACB() {
-               try {
-
-                       mAcb = Class.forName("android.hardware.Camera").getMethod(
-                                       "addCallbackBuffer", byte[].class);
-
-               } catch (Exception e) {
-                       Log.e("NativePreviewer",
-                                       "Problem setting up for addCallbackBuffer: " + e.toString());
-               }
-       }
-
-       private Runnable autofocusrunner = new Runnable() {
-
-               @Override
-               public void run() {
-                       mCamera.autoFocus(autocallback);
-               }
-       };
-
-       private Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
-
-               @Override
-               public void onAutoFocus(boolean success, Camera camera) {
-                       if (!success)
-                               postautofocus(1000);
-               }
-       };
-
-       /**
-        * This method will list all methods of the android.hardware.Camera class,
-        * even the hidden ones. With the information it provides, you can use the
-        * same approach I took below to expose methods that were written but hidden
-        * in eclair
-        */
-       private void listAllCameraMethods() {
-               try {
-                       Class<?> c = Class.forName("android.hardware.Camera");
-                       Method[] m = c.getMethods();
-                       for (int i = 0; i < m.length; i++) {
-                               Log.d("NativePreviewer", "  method:" + m[i].toString());
-                       }
-               } catch (Exception e) {
-                       // TODO Auto-generated catch block
-                       Log.e("NativePreviewer", e.toString());
-               }
-       }
-
-       private void initCamera(SurfaceHolder holder) throws InterruptedException {
-               if (mCamera == null) {
-                       // The Surface has been created, acquire the camera and tell it
-                       // where
-                       // to draw.
-                       int i = 0;
-                       while (i++ < 5) {
-                               try {
-                                       mCamera = Camera.open();
-                                       break;
-                               } catch (RuntimeException e) {
-                                       Thread.sleep(200);
-                               }
-                       }
-                       try {
-                               mCamera.setPreviewDisplay(holder);
-                       } catch (IOException exception) {
-                               mCamera.release();
-                               mCamera = null;
-
-                       } catch (RuntimeException e) {
-                               Log.e("camera", "stacktrace", e);
-                       }
-               }
-       }
-
-       private void releaseCamera() {
-               if (mCamera != null) {
-                       // Surface will be destroyed when we return, so stop the preview.
-                       // Because the CameraDevice object is not a shared resource, it's
-                       // very
-                       // important to release it when the activity is paused.
-                       mCamera.stopPreview();
-                       mCamera.release();
-               }
-
-               // processor = null;
-               mCamera = null;
-               mAcb = null;
-               mPCWB = null;
-       }
-
-       private Handler handler = new Handler();
-
-       private Date start;
-       private int fcount = 0;
-       private boolean hasAutoFocus = false;
-       private SurfaceHolder mHolder;
-       private Camera mCamera;
-
-       private NativeProcessor processor;
-
-       private int preview_width, preview_height;
-       private int pixelformat;
-       private PixelFormat pixelinfo;
-
-       public void setGrayscale(boolean b) {
-               processor.setGrayscale(b);
-
-       }
-
-}
\ No newline at end of file
diff --git a/android/android-jni/src/com/opencv/camera/NativeProcessor.java b/android/android-jni/src/com/opencv/camera/NativeProcessor.java
deleted file mode 100644 (file)
index 4dce3bb..0000000
+++ /dev/null
@@ -1,285 +0,0 @@
-package com.opencv.camera;
-
-import java.util.LinkedList;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
-import android.graphics.PixelFormat;
-import android.util.Log;
-
-import com.opencv.jni.image_pool;
-import com.opencv.jni.opencv;
-
-/** The NativeProcessor is a native processing stack engine.
- * 
- * What this means is that the NativeProcessor handles loading
- * live camera frames into native memory space, i.e. the image_pool
- * and then calling a stack of PoolCallback's and passing them the
- * image_pool.
- * 
- * The image_pool index 0 is populated with the live video image
- * 
- * And any modifications to this the pool are in place, so you may
- * pass on changes to the pool to the next PoolCallback in the stack.
- *
- */
-public class NativeProcessor {
-       /** Users that would like to be able to have access to live video frames
-        * should implement a PoolCallback
-        * the idx and pool contain the images, specifically at idx == 0 is the
-        * live video frame.
-        */
-       static public interface PoolCallback {
-               void process(int idx, image_pool pool, long timestamp,
-                               NativeProcessor nativeProcessor);
-       }
-
-       
-
-       /**At every frame, each PoolCallback is called in order and is passed the
-        * the same pool and index
-        * 
-        * @param stack  A list of PoolCallback objects, that will be called in order
-        */
-       public void addCallbackStack(LinkedList<PoolCallback> stack) {
-
-               try {
-                       while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
-
-                       }
-                       try {
-                               nextStack = stack;
-                       } finally {
-                               stacklock.unlock();
-                       }
-               } catch (InterruptedException e) {
-                       // TODO Auto-generated catch block
-                       e.printStackTrace();
-
-               }
-
-       }
-
-       /**
-        * Create a NativeProcessor. The processor will not start running until
-        * start is called, at which point it will operate in its own thread and
-        * sleep until a post is called. The processor should not be started until
-        * an onSurfaceChange event, and should be shut down when the surface is
-        * destroyed by calling interupt.
-        * 
-        */
-       public NativeProcessor() {
-               gray_scale_only = false;
-       }
-
-       
-       
-       /** Grayscale only is much faster because the yuv does not get decoded, and grayscale is only one
-        * byter per pixel - giving fast opengl texture loading.
-        * 
-        * You still have access to the whole yuv image, but grayscale is only immediately available to
-        * use without further effort.
-        * 
-        * Suggestion - use grayscale only and save your yuv images to disk if you would like color images
-        * 
-        * Also, in grayscale mode, the images in the pool are only single channel, so please keep this in mind
-        * when accessing the color images - check the cv::Mat::channels() or cv::Mat::type() if your messing
-        * with color channels
-        * 
-        * @param grayscale true if you want to only process grayscale images
-        */
-       public void setGrayscale(boolean grayscale){
-               gray_scale_only = grayscale;
-       }
-       
-
-       /**
-        * A callback that allows the NativeProcessor to pass back the buffer when
-        * it has completed processing a frame.
-        */
-       static protected interface NativeProcessorCallback {
-               /**
-                * Called after processing, meant to be recieved by the NativePreviewer
-                * wich reuses the byte buffer for the camera preview...
-                * 
-                * @param buffer
-                *            the buffer passed to the NativeProcessor with post.
-                */
-               void onDoneNativeProcessing(byte[] buffer);
-       }
-
-       
-       protected void stop() {
-               mthread.interrupt();
-               try {
-                       mthread.join();
-               } catch (InterruptedException e) {
-                       Log.w("NativeProcessor",
-                                       "interupted while stoping " + e.getMessage());
-               }
-               mthread = null;
-       }
-
-       protected void start() {
-               mthread = new ProcessorThread();
-               mthread.start();
-       }
-       /**
-        * post is used to notify the processor that a preview frame is ready, this
-        * will return almost immediately. if the processor is busy, returns false
-        * and is essentially a nop.
-        * 
-        * @param buffer
-        *            a preview frame from the Android Camera onPreviewFrame
-        *            callback
-        * @param width
-        *            of preview frame
-        * @param height
-        *            of preview frame
-        * @param format
-        *            of preview frame
-        * @return true if the processor wasn't busy and accepted the post, false if
-        *         the processor is still processing.
-        */
-
-       protected boolean post(byte[] buffer, int width, int height, int format,
-                       long timestamp, NativeProcessorCallback callback) {
-
-               lock.lock();
-               try {
-                       NPPostObject pobj = new NPPostObject(buffer, width, height, format,
-                                       timestamp, callback);
-                       postobjects.addFirst(pobj);
-               } finally {
-                       lock.unlock();
-               }
-               return true;
-
-       }
-       
-       private class ProcessorThread extends Thread {
-
-               private void process(NPPostObject pobj) throws Exception {
-
-                       if (pobj.format == PixelFormat.YCbCr_420_SP) {
-                               // add as color image, because we know how to decode this
-                               opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
-                                               pobj.height, gray_scale_only);
-
-                       } else if (pobj.format == PixelFormat.YCbCr_422_SP) {
-                               // add as gray image, because this format is not coded
-                               // for...//TODO figure out how to decode this
-                               // format
-                               opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
-                                               pobj.height, true);
-                       } else
-                               throw new Exception("bad pixel format!");
-
-                       for (PoolCallback x : stack) {
-                               if (interrupted()) {
-                                       throw new InterruptedException(
-                                                       "Native Processor interupted while processing");
-                               }
-                               x.process(0, pool, pobj.timestamp, NativeProcessor.this);
-                       }
-
-                       pobj.done(); // tell the postobject that we're done doing
-                                                       // all the processing.
-
-               }
-
-               @Override
-               public void run() {
-
-                       try {
-                               while (true) {
-                                       yield();
-
-                                       while (!stacklock.tryLock(5, TimeUnit.MILLISECONDS)) {
-                                       }
-                                       try {
-                                               if (nextStack != null) {
-                                                       stack = nextStack;
-                                                       nextStack = null;
-                                               }
-                                       } finally {
-                                               stacklock.unlock();
-                                       }
-
-                                       NPPostObject pobj = null;
-
-                                       while (!lock.tryLock(5, TimeUnit.MILLISECONDS)) {
-                                       }
-                                       try {
-                                               if (postobjects.isEmpty())
-                                                       continue;
-                                               pobj = postobjects.removeLast();
-
-                                       } finally {
-                                               lock.unlock();
-
-                                       }
-
-                                       if (interrupted())
-                                               throw new InterruptedException();
-
-                                       if (stack != null && pobj != null)
-                                               process(pobj);
-
-                               }
-                       } catch (InterruptedException e) {
-
-                               Log.i("NativeProcessor",
-                                               "native processor interupted, ending now");
-
-                       } catch (Exception e) {
-
-                               e.printStackTrace();
-                       } finally {
-
-                       }
-               }
-
-       }
-       
-       static private class NPPostObject {
-               public NPPostObject(byte[] buffer, int width, int height, int format,
-                               long timestamp, NativeProcessorCallback callback) {
-                       this.buffer = buffer;
-                       this.width = width;
-                       this.height = height;
-                       this.format = format;
-                       this.timestamp = timestamp;
-                       this.callback = callback;
-               }
-
-               public void done() {
-                       callback.onDoneNativeProcessing(buffer);
-
-               }
-
-               int width, height;
-               byte[] buffer;
-               int format;
-               long timestamp;
-               NativeProcessorCallback callback;
-       }
-
-
-       private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
-
-       private image_pool pool = new image_pool();
-
-       private final Lock lock = new ReentrantLock();
-
-       private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
-       private boolean gray_scale_only;
-       
-       private Lock stacklock = new ReentrantLock();
-
-       private LinkedList<PoolCallback> nextStack;
-       
-       private ProcessorThread mthread;
-
-}
\ No newline at end of file
diff --git a/android/android-jni/src/com/opencv/opengl/GL2CameraViewer.java b/android/android-jni/src/com/opencv/opengl/GL2CameraViewer.java
deleted file mode 100644 (file)
index 2498ad7..0000000
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.opencv.opengl;
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.opengles.GL10;
-
-import com.opencv.camera.NativeProcessor;
-import com.opencv.camera.NativeProcessor.PoolCallback;
-import com.opencv.jni.glcamera;
-import com.opencv.jni.image_pool;
-
-import android.content.Context;
-import android.graphics.PixelFormat;
-import android.opengl.GLSurfaceView;
-import android.util.AttributeSet;
-import android.util.Log;
-
-
-
-/**
- * A simple GLSurfaceView sub-class that demonstrate how to perform
- * OpenGL ES 2.0 rendering into a GL Surface. Note the following important
- * details:
- *
- * - The class must use a custom context factory to enable 2.0 rendering.
- *   See ContextFactory class definition below.
- *
- * - The class must use a custom EGLConfigChooser to be able to select
- *   an EGLConfig that supports 2.0. This is done by providing a config
- *   specification to eglChooseConfig() that has the attribute
- *   EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
- *   set. See ConfigChooser class definition below.
- *
- * - The class must select the surface's format, then choose an EGLConfig
- *   that matches it exactly (with regards to red/green/blue/alpha channels
- *   bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
- */
-public class GL2CameraViewer extends GLSurfaceView{
-    private static String TAG = "GL2JNIView";
-    private static final boolean DEBUG = false;
-       private PoolCallback poolcallback = new PoolCallback() {
-               
-               @Override
-               public void process(int idx, image_pool pool, long timestamp,
-                               NativeProcessor nativeProcessor){
-               
-                       
-                               drawMatToGL(idx, pool);
-                               
-                               requestRender();
-                       
-                       
-               }
-       };
-
-        public GL2CameraViewer(Context context,AttributeSet attributeSet) {
-               super(context,attributeSet);
-               
-               init(false, 0, 0);
-               setZOrderMediaOverlay(true);
-        }
-    public GL2CameraViewer(Context context) {
-        super(context);
-        init(false, 0, 0);
-        setZOrderMediaOverlay(true);
-    }
-
-    public GL2CameraViewer(Context context, boolean translucent, int depth, int stencil) {
-        super(context);
-        init(translucent, depth, stencil);
-        setZOrderMediaOverlay(true);
-    }
-
-    private void init(boolean translucent, int depth, int stencil) {
-
-       
-        /* By default, GLSurfaceView() creates a RGB_565 opaque surface.
-         * If we want a translucent one, we should change the surface's
-         * format here, using PixelFormat.TRANSLUCENT for GL Surfaces
-         * is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
-         */
-        if (translucent) {
-            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
-        }
-
-        /* Setup the context factory for 2.0 rendering.
-         * See ContextFactory class definition below
-         */
-        setEGLContextFactory(new ContextFactory());
-
-        /* We need to choose an EGLConfig that matches the format of
-         * our surface exactly. This is going to be done in our
-         * custom config chooser. See ConfigChooser class definition
-         * below.
-         */
-        setEGLConfigChooser( translucent ?
-                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
-                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );
-
-        /* Set the renderer responsible for frame rendering */
-        setRenderer(new Renderer());
-        setRenderMode(RENDERMODE_WHEN_DIRTY);
-        
-    }
-
-    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
-        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
-        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
-            Log.w(TAG, "creating OpenGL ES 2.0 context");
-            checkEglError("Before eglCreateContext", egl);
-            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
-            EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
-            checkEglError("After eglCreateContext", egl);
-            return context;
-        }
-
-        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
-            egl.eglDestroyContext(display, context);
-        }
-    }
-
-    private static void checkEglError(String prompt, EGL10 egl) {
-        int error;
-        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
-            Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
-        }
-    }
-
-    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
-
-        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
-            mRedSize = r;
-            mGreenSize = g;
-            mBlueSize = b;
-            mAlphaSize = a;
-            mDepthSize = depth;
-            mStencilSize = stencil;
-        }
-
-        /* This EGL config specification is used to specify 2.0 rendering.
-         * We use a minimum size of 4 bits for red/green/blue, but will
-         * perform actual matching in chooseConfig() below.
-         */
-        private static int EGL_OPENGL_ES2_BIT = 4;
-        private static int[] s_configAttribs2 =
-        {
-            EGL10.EGL_RED_SIZE, 4,
-            EGL10.EGL_GREEN_SIZE, 4,
-            EGL10.EGL_BLUE_SIZE, 4,
-            EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-            EGL10.EGL_NONE
-        };
-
-        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
-
-            /* Get the number of minimally matching EGL configurations
-             */
-            int[] num_config = new int[1];
-            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
-
-            int numConfigs = num_config[0];
-
-            if (numConfigs <= 0) {
-                throw new IllegalArgumentException("No configs match configSpec");
-            }
-
-            /* Allocate then read the array of minimally matching EGL configs
-             */
-            EGLConfig[] configs = new EGLConfig[numConfigs];
-            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
-
-            if (DEBUG) {
-                 printConfigs(egl, display, configs);
-            }
-            /* Now return the "best" one
-             */
-            return chooseConfig(egl, display, configs);
-        }
-
-        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
-                EGLConfig[] configs) {
-            for(EGLConfig config : configs) {
-                int d = findConfigAttrib(egl, display, config,
-                        EGL10.EGL_DEPTH_SIZE, 0);
-                int s = findConfigAttrib(egl, display, config,
-                        EGL10.EGL_STENCIL_SIZE, 0);
-
-                // We need at least mDepthSize and mStencilSize bits
-                if (d < mDepthSize || s < mStencilSize)
-                    continue;
-
-                // We want an *exact* match for red/green/blue/alpha
-                int r = findConfigAttrib(egl, display, config,
-                        EGL10.EGL_RED_SIZE, 0);
-                int g = findConfigAttrib(egl, display, config,
-                            EGL10.EGL_GREEN_SIZE, 0);
-                int b = findConfigAttrib(egl, display, config,
-                            EGL10.EGL_BLUE_SIZE, 0);
-                int a = findConfigAttrib(egl, display, config,
-                        EGL10.EGL_ALPHA_SIZE, 0);
-
-                if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
-                    return config;
-            }
-            return null;
-        }
-
-        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
-                EGLConfig config, int attribute, int defaultValue) {
-
-            if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
-                return mValue[0];
-            }
-            return defaultValue;
-        }
-
-        private void printConfigs(EGL10 egl, EGLDisplay display,
-            EGLConfig[] configs) {
-            int numConfigs = configs.length;
-            Log.w(TAG, String.format("%d configurations", numConfigs));
-            for (int i = 0; i < numConfigs; i++) {
-                Log.w(TAG, String.format("Configuration %d:\n", i));
-                printConfig(egl, display, configs[i]);
-            }
-        }
-
-        private void printConfig(EGL10 egl, EGLDisplay display,
-                EGLConfig config) {
-            int[] attributes = {
-                    EGL10.EGL_BUFFER_SIZE,
-                    EGL10.EGL_ALPHA_SIZE,
-                    EGL10.EGL_BLUE_SIZE,
-                    EGL10.EGL_GREEN_SIZE,
-                    EGL10.EGL_RED_SIZE,
-                    EGL10.EGL_DEPTH_SIZE,
-                    EGL10.EGL_STENCIL_SIZE,
-                    EGL10.EGL_CONFIG_CAVEAT,
-                    EGL10.EGL_CONFIG_ID,
-                    EGL10.EGL_LEVEL,
-                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
-                    EGL10.EGL_MAX_PBUFFER_PIXELS,
-                    EGL10.EGL_MAX_PBUFFER_WIDTH,
-                    EGL10.EGL_NATIVE_RENDERABLE,
-                    EGL10.EGL_NATIVE_VISUAL_ID,
-                    EGL10.EGL_NATIVE_VISUAL_TYPE,
-                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
-                    EGL10.EGL_SAMPLES,
-                    EGL10.EGL_SAMPLE_BUFFERS,
-                    EGL10.EGL_SURFACE_TYPE,
-                    EGL10.EGL_TRANSPARENT_TYPE,
-                    EGL10.EGL_TRANSPARENT_RED_VALUE,
-                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
-                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
-                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
-                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
-                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
-                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
-                    EGL10.EGL_LUMINANCE_SIZE,
-                    EGL10.EGL_ALPHA_MASK_SIZE,
-                    EGL10.EGL_COLOR_BUFFER_TYPE,
-                    EGL10.EGL_RENDERABLE_TYPE,
-                    0x3042 // EGL10.EGL_CONFORMANT
-            };
-            String[] names = {
-                    "EGL_BUFFER_SIZE",
-                    "EGL_ALPHA_SIZE",
-                    "EGL_BLUE_SIZE",
-                    "EGL_GREEN_SIZE",
-                    "EGL_RED_SIZE",
-                    "EGL_DEPTH_SIZE",
-                    "EGL_STENCIL_SIZE",
-                    "EGL_CONFIG_CAVEAT",
-                    "EGL_CONFIG_ID",
-                    "EGL_LEVEL",
-                    "EGL_MAX_PBUFFER_HEIGHT",
-                    "EGL_MAX_PBUFFER_PIXELS",
-                    "EGL_MAX_PBUFFER_WIDTH",
-                    "EGL_NATIVE_RENDERABLE",
-                    "EGL_NATIVE_VISUAL_ID",
-                    "EGL_NATIVE_VISUAL_TYPE",
-                    "EGL_PRESERVED_RESOURCES",
-                    "EGL_SAMPLES",
-                    "EGL_SAMPLE_BUFFERS",
-                    "EGL_SURFACE_TYPE",
-                    "EGL_TRANSPARENT_TYPE",
-                    "EGL_TRANSPARENT_RED_VALUE",
-                    "EGL_TRANSPARENT_GREEN_VALUE",
-                    "EGL_TRANSPARENT_BLUE_VALUE",
-                    "EGL_BIND_TO_TEXTURE_RGB",
-                    "EGL_BIND_TO_TEXTURE_RGBA",
-                    "EGL_MIN_SWAP_INTERVAL",
-                    "EGL_MAX_SWAP_INTERVAL",
-                    "EGL_LUMINANCE_SIZE",
-                    "EGL_ALPHA_MASK_SIZE",
-                    "EGL_COLOR_BUFFER_TYPE",
-                    "EGL_RENDERABLE_TYPE",
-                    "EGL_CONFORMANT"
-            };
-            int[] value = new int[1];
-            for (int i = 0; i < attributes.length; i++) {
-                int attribute = attributes[i];
-                String name = names[i];
-                if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
-                    Log.w(TAG, String.format("  %s: %d\n", name, value[0]));
-                } else {
-                    // Log.w(TAG, String.format("  %s: failed\n", name));
-                    while (egl.eglGetError() != EGL10.EGL_SUCCESS);
-                }
-            }
-        }
-
-        // Subclasses can adjust these values:
-        protected int mRedSize;
-        protected int mGreenSize;
-        protected int mBlueSize;
-        protected int mAlphaSize;
-        protected int mDepthSize;
-        protected int mStencilSize;
-        private int[] mValue = new int[1];
-    }
-
-    glcamera mglcamera;
-    public void drawMatToGL(int idx, image_pool pool){
-       if(mglcamera != null)
-               mglcamera.drawMatToGL(idx, pool);
-       else
-               Log.e("android-opencv", "null glcamera!!!!");
-    }
-    
-    private class Renderer implements GLSurfaceView.Renderer {
-       
-        public void onDrawFrame(GL10 gl) {
-               
-            mglcamera.step();
-        }
-
-        public void onSurfaceChanged(GL10 gl, int width, int height) {
-               
-            mglcamera.init(width, height);
-        }
-
-        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
-           
-        }
-    }
-
-
-       @Override
-       public void onPause() {
-               mglcamera = null;
-               // TODO Auto-generated method stub
-               super.onPause();
-               
-       }
-
-       @Override
-       public void onResume() {
-               mglcamera = new glcamera();
-               // TODO Auto-generated method stub
-               super.onResume();
-               
-       }
-
-       public PoolCallback getDrawCallback() {
-               // TODO Auto-generated method stub
-               return poolcallback;
-       }
-
-
-}
diff --git a/android/android-opencv.mk.in b/android/android-opencv.mk.in
deleted file mode 100644 (file)
index 5d5d446..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-#you may override this if you move the build
-#just define it before including this or on the command line - or with
-#an environment variable
-#this points to the root of the opencv trunk - where the original opencv 
-#sources are - with modules 3rparty ...
-ifndef OPENCV_ROOT
-OPENCV_ROOT := ${opencv_root}
-endif
-
-#you may override this same as above
-#this points to the actually directory that you built opencv for android from
-#maybe in under opencv/android/build
-ifndef OPENCV_BUILD_ROOT
-OPENCV_BUILD_ROOT := ${CMAKE_BINARY_DIR}
-endif
-
-OPENCV_INCLUDES := ${android_module_include_dirs}
-
-ANDROID_OPENCV_INCLUDES := $(OPENCV_ROOT)/android/android-jni/jni
-
-ARMOBJS := local/armeabi
-ARMOBJS_V7A := local/armeabi-v7a
-
-OPENCV_LIB_DIRS := -L$(OPENCV_BUILD_ROOT)/obj/$(ARMOBJS_V7A) \
-    -L$(OPENCV_BUILD_ROOT)/obj/$(ARMOBJS) -L$(OPENCV_BUILD_ROOT)/bin/ndk/$(ARMOBJS) \
-    -L$(OPENCV_BUILD_ROOT)/bin/ndk/$(ARMOBJS_V7A)
-
-ANDROID_OPENCV_LIB_DIRS := -L$(OPENCV_ROOT)/android/android-jni/libs/armeabi-v7a \
-    -L$(OPENCV_ROOT)/android/android-jni/libs/armeabi
-
-#order of linking very important ---- may have stuff out of order here, but
-#important that modules that are more dependent come first...
-
-OPENCV_LIBS := $(OPENCV_LIB_DIRS) -lopencv_calib3d -lopencv_features2d -lopencv_objdetect -lopencv_imgproc \
-     -lopencv_video  -lopencv_highgui -lopencv_ml -lopencv_legacy -lopencv_core -lopencv_lapack -lopencv_flann \
-    -lzlib -lpng -ljpeg -ljasper
-ANDROID_OPENCV_LIBS := -landroid-opencv $(ANDROID_OPENCV_LIB_DIRS)
-    
diff --git a/android/android-opencv/AndroidManifest.xml b/android/android-opencv/AndroidManifest.xml
new file mode 100644 (file)
index 0000000..8d3efb0
--- /dev/null
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+       package="com.opencv" android:versionCode="1" android:versionName="0.1">
+
+       <application android:debuggable="true">
+               <!-- The activity tag here is currently not used. The main project TicTacToeMain 
+                       must currently redefine the activities to be used from the libraries. However 
+                       later the tools will pick up the activities from here and merge them automatically, 
+                       so it's best to define your activities here like for any regular Android 
+                       project. -->
+               <activity android:name="com.opencv.OpenCV">
+                       <intent-filter>
+                               <action android:name="android.intent.action.MAIN" />
+                               <category android:name="android.intent.category.LAUNCHER" />
+                       </intent-filter>
+               </activity>
+               <activity android:name="com.opencv.calibration.ChessBoardChooser" />
+               <activity android:name="com.opencv.calibration.CameraConfig" />
+               <activity android:name="com.opencv.calibration.CalibrationViewer" />
+               <service android:name="com.opencv.calibration.services.CalibrationService" />
+       </application>
+       <uses-sdk android:minSdkVersion="7" />
+
+       <!-- set the opengl version -->
+       <uses-feature android:glEsVersion="0x00020000" />
+       <!-- set the opengl version -->
+       <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+
+</manifest> 
diff --git a/android/android-opencv/AndroidOpenCVConfig.cmake.in b/android/android-opencv/AndroidOpenCVConfig.cmake.in
new file mode 100644 (file)
index 0000000..53cdfc2
--- /dev/null
@@ -0,0 +1,44 @@
+# ============================================================================
+#  The AndroidOpenCV CMake configuration file
+#
+#             ** File generated automatically, do not modify **
+#
+#  Usage from an external project:
+#    In your CMakeLists.txt, add these lines:
+#
+#    FIND_PACKAGE(AndroidOpenCV REQUIRED )
+#    TARGET_LINK_LIBRARIES(MY_TARGET_NAME ${AndroidOpenCV_LIBS})
+#
+#    This file will define the following variables:
+#      - AndroidOpenCV_LIBS      : The list of libraries to links against.
+#      - AndroidOpenCV_LIB_DIR   : The directory where lib files are. 
+#                                  Calling LINK_DIRECTORIES with this path
+#                                  is NOT needed.
+#      - AndroidOpenCV_INCLUDE_DIRS   : The AndroidOpenCV include directories.
+#      - AndroidOpenCV_SWIG_DIR : The swig path
+#
+# ===========================================================================
+
+
+# ======================================================
+# Include directories to add to the user project:
+# ======================================================
+
+# Provide the include directories to the caller
+SET(AndroidOpenCV_INCLUDE_DIRS @CMAKE_INCLUDE_DIRS_CONFIGCMAKE@)
+INCLUDE_DIRECTORIES(${AndroidOpenCV_INCLUDE_DIRS})
+
+# ======================================================
+# Link directories to add to the user project:
+# ======================================================
+
+# Provide the libs directory anyway, it may be needed in some cases.
+SET(AndroidOpenCV_LIB_DIR @CMAKE_LIB_DIRS_CONFIGCMAKE@)
+LINK_DIRECTORIES(${AndroidOpenCV_LIB_DIR})
+
+# ======================================================
+# Libraries to add to the user project:
+# ======================================================
+SET(AndroidOpenCV_LIBS @CMAKE_LIBS_CONFIGCMAKE@)
+
+SET(AndroidOpenCV_SWIG_DIR @CMAKE_SWIG_DIR_CONFIGCMAKE@)
diff --git a/android/android-opencv/CMakeLists.txt b/android/android-opencv/CMakeLists.txt
new file mode 100644 (file)
index 0000000..2737766
--- /dev/null
@@ -0,0 +1,5 @@
+cmake_minimum_required(VERSION 2.8)
+
+project(android-jni)
+
+add_subdirectory(jni)
diff --git a/android/android-opencv/README.txt b/android/android-opencv/README.txt
new file mode 100644 (file)
index 0000000..c7f2195
--- /dev/null
@@ -0,0 +1,13 @@
+=========================================
+CMake Build
+=========================================
+mkdir build
+cd build
+cmake -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ..
+
+=========================================
+Android Build
+=========================================
+sh project_create.sh
+ant compile
+ant install
diff --git a/android/android-opencv/default.properties b/android/android-opencv/default.properties
new file mode 100644 (file)
index 0000000..b308918
--- /dev/null
@@ -0,0 +1,13 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+# 
+# This file must be checked in Version Control Systems.
+# 
+# To customize properties used by the Ant build system use,
+# "build.properties", and override values to adapt the script to your
+# project structure.
+
+#android.library=true
+# Project target.
+target=android-7
+android.library=true
diff --git a/android/android-opencv/jni/CMakeLists.txt b/android/android-opencv/jni/CMakeLists.txt
new file mode 100644 (file)
index 0000000..1f26f0d
--- /dev/null
@@ -0,0 +1,70 @@
+#########################################################
+# Find opencv and android-opencv
+#########################################################
+
+set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../build 
+    CACHE PATH "The path where you built opencv for android")
+find_package(OpenCV REQUIRED)
+
+#########################################################
+#c flags, included, and lib dependencies
+#########################################################
+#notice the "recycling" of CMAKE_C_FLAGS
+#this is necessary to pick up android flags
+set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
+
+INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR} 
+                    ${CMAKE_CURRENT_SOURCE_DIR}/include)
+
+set( LIBRARY_DEPS ${OpenCV_LIBS} )
+if(ANDROID)
+  set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl GLESv2)
+endif(ANDROID)
+
+#########################################################
+#SWIG STUFF
+#########################################################
+#the java package to place swig generated java files in
+set(MY_PACKAGE com.opencv.jni)
+
+if(NOT ANDROID)
+  #non android swig and jni
+  #jni is available by default on android
+  find_package(JNI REQUIRED)
+  include_directories(${JNI_INCLUDE_DIRS})
+  FIND_PACKAGE(SWIG)
+endif()
+
+INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
+
+if(ANDROID)
+  #this will set the output path for the java package
+  #and properly create the package declarations in generated java sources
+  SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
+endif(ANDROID)
+
+SET_SOURCE_FILES_PROPERTIES(android-cv.i PROPERTIES CPLUSPLUS ON)
+
+SWIG_ADD_MODULE(android-opencv java 
+  android-cv.i
+  Calibration.cpp
+  gl_code.cpp
+  image_pool.cpp
+  yuv420sp2rgb.c
+  #yuv420rgb888c.c
+  #yuv420rgb888.s
+  yuv2rgb16tab.c
+  )
+  
+target_link_libraries(android-opencv ${LIBRARY_DEPS} )
+
+###################################################################
+# Setup the configure file for other's to link against.
+###################################################################
+set(CMAKE_INCLUDE_DIRS_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR}/include)
+set(CMAKE_LIB_DIRS_CONFIGCMAKE ${LIBRARY_OUTPUT_PATH})
+set(CMAKE_LIBS_CONFIGCMAKE android-opencv)
+set(CMAKE_SWIG_DIR_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR})
+configure_file( "${CMAKE_SOURCE_DIR}/AndroidOpenCVConfig.cmake.in"
+                "${CMAKE_BINARY_DIR}/AndroidOpenCVConfig.cmake"
+                IMMEDIATE @ONLY)
diff --git a/android/android-opencv/jni/Calibration.cpp b/android/android-opencv/jni/Calibration.cpp
new file mode 100644 (file)
index 0000000..9ba8fa8
--- /dev/null
@@ -0,0 +1,245 @@
+/*
+ * Processor.cpp
+ *
+ *  Created on: Jun 13, 2010
+ *      Author: ethan
+ */
+
+#include "Calibration.h"
+
+#include <sys/stat.h>
+
+using namespace cv;
+
+Calibration::Calibration() :
+  patternsize(6, 8)
+{
+
+}
+
+Calibration::~Calibration()
+{
+
+}
+
+namespace
+{
+double computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
+                                 const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs, const vector<
+                                     Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs,
+                                 vector<float>& perViewErrors)
+{
+  vector<Point2f> imagePoints2;
+  int i, totalPoints = 0;
+  double totalErr = 0, err;
+  perViewErrors.resize(objectPoints.size());
+
+  for (i = 0; i < (int)objectPoints.size(); i++)
+  {
+    projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
+    err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1);
+    int n = (int)objectPoints[i].size();
+    perViewErrors[i] = err / n;
+    totalErr += err;
+    totalPoints += n;
+  }
+
+  return totalErr / totalPoints;
+}
+
+void calcChessboardCorners(Size boardSize, float squareSize, vector<Point3f>& corners)
+{
+  corners.resize(0);
+
+  for (int i = 0; i < boardSize.height; i++)
+    for (int j = 0; j < boardSize.width; j++)
+      corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0));
+}
+
+/**from opencv/samples/cpp/calibration.cpp
+ *
+ */
+bool runCalibration(vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, float squareSize,
+                    float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
+                    vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr)
+{
+  cameraMatrix = Mat::eye(3, 3, CV_64F);
+  if (flags & CV_CALIB_FIX_ASPECT_RATIO)
+    cameraMatrix.at<double> (0, 0) = aspectRatio;
+
+  distCoeffs = Mat::zeros(4, 1, CV_64F);
+
+  vector<vector<Point3f> > objectPoints(1);
+  calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
+  for (size_t i = 1; i < imagePoints.size(); i++)
+    objectPoints.push_back(objectPoints[0]);
+
+  calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
+
+  bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET);
+
+  totalAvgErr
+      = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
+
+  return ok;
+}
+void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio,
+                      int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector<Mat>& rvecs,
+                      const vector<Mat>& tvecs, const vector<float>& reprojErrs,
+                      const vector<vector<Point2f> >& imagePoints, double totalAvgErr)
+{
+  FileStorage fs(filename, FileStorage::WRITE);
+
+  time_t t;
+  time(&t);
+  struct tm *t2 = localtime(&t);
+  char buf[1024];
+  strftime(buf, sizeof(buf) - 1, "%c", t2);
+
+  fs << "calibration_time" << buf;
+
+  if (!rvecs.empty() || !reprojErrs.empty())
+    fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
+  fs << "image_width" << imageSize.width;
+  fs << "image_height" << imageSize.height;
+  fs << "board_width" << boardSize.width;
+  fs << "board_height" << boardSize.height;
+  fs << "squareSize" << squareSize;
+
+  if (flags & CV_CALIB_FIX_ASPECT_RATIO)
+    fs << "aspectRatio" << aspectRatio;
+
+  if (flags != 0)
+  {
+    sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags
+        & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT
+        ? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
+    cvWriteComment(*fs, buf, 0);
+  }
+
+  fs << "flags" << flags;
+
+  fs << "camera_matrix" << cameraMatrix;
+  fs << "distortion_coefficients" << distCoeffs;
+
+  fs << "avg_reprojection_error" << totalAvgErr;
+  if (!reprojErrs.empty())
+    fs << "per_view_reprojection_errors" << Mat(reprojErrs);
+
+  if (!rvecs.empty() && !tvecs.empty())
+  {
+    Mat bigmat(rvecs.size(), 6, CV_32F);
+    for (size_t i = 0; i < rvecs.size(); i++)
+    {
+      Mat r = bigmat(Range(i, i + 1), Range(0, 3));
+      Mat t = bigmat(Range(i, i + 1), Range(3, 6));
+      rvecs[i].copyTo(r);
+      tvecs[i].copyTo(t);
+    }
+    cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
+    fs << "extrinsic_parameters" << bigmat;
+  }
+
+  if (!imagePoints.empty())
+  {
+    Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
+    for (size_t i = 0; i < imagePoints.size(); i++)
+    {
+      Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
+      Mat(imagePoints[i]).copyTo(r);
+    }
+    fs << "image_points" << imagePtMat;
+  }
+}
+}//anon namespace
+bool Calibration::detectAndDrawChessboard(int idx, image_pool* pool)
+{
+
+  bool patternfound = false;
+  Mat grey = pool->getGrey(idx);
+  if (grey.empty())
+    return false;
+  vector<Point2f> corners;
+
+  patternfound = findChessboardCorners(grey, patternsize, corners,CALIB_CB_FILTER_QUADS + CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
+                                                + CALIB_CB_FAST_CHECK);
+  Mat img = pool->getImage(idx);
+
+  if (corners.size() < 1)
+    return false;
+
+  if (patternfound)
+  {
+    cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
+    imagepoints.push_back(corners);
+  }
+
+  drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
+
+  imgsize = grey.size();
+
+  return patternfound;
+
+}
+
+void Calibration::drawText(int i, image_pool* pool, const char* ctext)
+{
+  // Use "y" to show that the baseLine is about
+  string text = ctext;
+  int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
+  double fontScale = .8;
+  int thickness = .5;
+
+  Mat img = pool->getImage(i);
+
+  int baseline = 0;
+  Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
+  baseline += thickness;
+
+  // center the text
+  Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2));
+
+  // draw the box
+  rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255),
+            CV_FILLED);
+  // ... and the baseline first
+  line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255));
+
+  // then put the text itself
+  putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8);
+}
+
+void Calibration::resetChess()
+{
+
+  imagepoints.clear();
+}
+
+void Calibration::calibrate(const char* filename)
+{
+
+  vector<Mat> rvecs, tvecs;
+  vector<float> reprojErrs;
+  double totalAvgErr = 0;
+  int flags = 0;
+  flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
+  bool writeExtrinsics = true;
+  bool writePoints = true;
+
+  bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs,
+                           totalAvgErr);
+
+  if (ok)
+  {
+
+    saveCameraParams(filename, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector<
+        Mat> (), writeExtrinsics ? tvecs : vector<Mat> (), writeExtrinsics ? reprojErrs : vector<float> (), writePoints
+        ? imagepoints : vector<vector<Point2f> > (), totalAvgErr);
+  }
+
+}
+
+int Calibration::getNumberDetectedChessboards()
+{
+  return imagepoints.size();
+}
diff --git a/android/android-opencv/jni/Calibration.i b/android/android-opencv/jni/Calibration.i
new file mode 100644 (file)
index 0000000..ba6154b
--- /dev/null
@@ -0,0 +1,28 @@
+/*
+ * include the headers required by the generated cpp code
+ */
+%{
+#include "Calibration.h"
+#include "image_pool.h"
+using namespace cv;
+%}
+
+
+class Calibration {
+public:
+
+       Size patternsize;
+       
+       Calibration();
+       virtual ~Calibration();
+
+       bool detectAndDrawChessboard(int idx, image_pool* pool);
+       
+       void resetChess();
+       
+       int getNumberDetectedChessboards();
+       
+       void calibrate(const char* filename);
+       
+       void drawText(int idx, image_pool* pool, const char* text);
+};
diff --git a/android/android-opencv/jni/android-cv-typemaps.i b/android/android-opencv/jni/android-cv-typemaps.i
new file mode 100644 (file)
index 0000000..cae2d6b
--- /dev/null
@@ -0,0 +1,6 @@
+%feature("director") Mat;
+%feature("director") glcamera;
+%feature("director") image_pool;
+%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
+%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
+%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";
\ No newline at end of file
diff --git a/android/android-opencv/jni/android-cv.i b/android/android-opencv/jni/android-cv.i
new file mode 100644 (file)
index 0000000..f6661aa
--- /dev/null
@@ -0,0 +1,58 @@
+/* File : android-cv.i
+
+import this file, and make sure to add the System.loadlibrary("android-opencv")
+before loading any lib that depends on this.
+ */
+
+%module opencv
+%{
+#include "image_pool.h"
+#include "glcamera.h"
+using namespace cv;
+%}
+#ifndef SWIGIMPORTED
+%include "various.i"
+%include "typemaps.i"
+%include "arrays_java.i"
+#endif
+
+/**
+ * Make all the swig pointers public, so that
+ * external libraries can refer to these, otherwise they default to 
+ * protected...
+ */
+%typemap(javabody) SWIGTYPE %{
+  private long swigCPtr;
+  protected boolean swigCMemOwn;
+  public $javaclassname(long cPtr, boolean cMemoryOwn) {
+       swigCMemOwn = cMemoryOwn;
+       swigCPtr = cPtr;
+  }
+  public static long getCPtr($javaclassname obj) {
+       return (obj == null) ? 0 : obj.swigCPtr;
+  }
+%}
+
+
+%pragma(java) jniclasscode=%{
+  static {
+    try {
+       //load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
+       //so that android sdk automatically installs it along with the app.
+        System.loadLibrary("android-opencv");
+    } catch (UnsatisfiedLinkError e) {
+       //badness
+       throw e;
+     
+    }
+  }
+%}
+
+
+%include "cv.i"
+
+%include "glcamera.i"
+
+%include "image_pool.i"
+
+%include "Calibration.i"
diff --git a/android/android-opencv/jni/buffers.i b/android/android-opencv/jni/buffers.i
new file mode 100644 (file)
index 0000000..42cca9c
--- /dev/null
@@ -0,0 +1,165 @@
+/*
+ * These typemaps provide support for sharing data between JNI and JVM code
+ * using NIO direct buffers. It is the responsibility of the JVM code to
+ * allocate a direct buffer of the appropriate size.
+ *
+ * Example use:
+
+ * Wrapping:
+ * %include "buffers.i" 
+ * %apply int* BUFF {int* buffer}
+ * int read_foo_int(int* buffer);
+ *
+ * Java:
+ * IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
+ * Example.read_foo_int(buffer);
+ *
+
+ * The following typemaps are defined:
+ * void* BUFF           <--> javax.nio.Buffer
+ * char* BUFF           <--> javax.nio.ByteBuffer
+ * char* CBUFF          <--> javax.nio.CharBuffer
+ * unsigned char* INBUFF/OUTBUFF  <--> javax.nio.ShortBuffer
+ * short* BUFF          <--> javax.nio.ShortBuffer 
+ * unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
+ * int* BUFF            <--> javax.nio.IntBuffer
+ * unsigned int* INBUFF/OUTBUFF   <--> javax.nio.LongBuffer
+ * long* BUFF           <--> javax.nio.IntBuffer
+ * unsigned long* INBUFF/OUTBUF  <--> javax.nio.LongBuffer
+ * long long* BUFF      <--> javax.nio.LongBuffer
+ * float* BUFF          <--> javax.nio.FloatBuffer
+ * double* BUFF         <--> javax.nio.DoubleBuffer
+ *
+ * Note the potential for data loss in the conversion from 
+ * the C type 'unsigned long' to the signed Java long type.
+ * Hopefully, I can implement a workaround with BigNumber in the future.
+ *
+ * The use of ByteBuffer vs CharBuffer for the char* type should
+ * depend on the type of data. In general you'll probably
+ * want to use CharBuffer for actual text data.
+ */
+/*
+ * This macro is used to define the nio buffers for primitive types.
+ */
+%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
+%typemap(jni) CTYPE* LABEL "jobject"
+%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
+%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
+%typemap(javain, 
+       pre="    assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
+%typemap(javaout) CTYPE* LABEL {
+    return $jnicall;
+}
+%typemap(in) CTYPE* LABEL {
+  $1 = (CTYPE*)(jenv)->GetDirectBufferAddress( $input);
+  if ($1 == NULL) {
+    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
+  }
+}
+%typemap(memberin) CTYPE* LABEL {
+  if ($input) {
+    $1 = $input;
+  } else {
+    $1 = 0;
+  }
+}
+%typemap(freearg) CTYPE* LABEL ""
+%enddef
+
+NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
+NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
+NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
+/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
+NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
+NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
+NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
+NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
+NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
+NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
+NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
+NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
+NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
+#undef NIO_BUFFER_TYPEMAP
+
+
+%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
+%typemap(jni) CTYPE* INBUFF "jobject"
+%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
+%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
+%typemap(javain, 
+       pre="    java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
+%typemap(javaout) CTYPE* INBUFF {
+    return $jnicall;
+}
+%typemap(in) CTYPE* INBUFF {
+  $1 = (jenv)->GetDirectBufferAddress($input);
+  if ($1 == NULL) {
+    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
+  }
+}
+%typemap(memberin) CTYPE* INBUFF {
+  if ($input) {
+    $1 = $input;
+  } else {
+    $1 = 0;
+  }
+}
+%typemap(freearg) CTYPE* INBUFF ""
+
+%typemap(jni) CTYPE* OUTBUFF "jobject"
+%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
+%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
+%typemap(javain, 
+       pre="    java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
+        post="       UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
+%typemap(javaout) CTYPE* OUTBUFF {
+    return $jnicall;
+}
+%typemap(in) CTYPE* OUTBUFF {
+  $1 = (jenv)->GetDirectBufferAddress( $input);
+  if ($1 == NULL) {
+    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
+  }
+}
+%typemap(memberin) CTYPE* OUTBUFF {
+  if ($input) {
+    $1 = $input;
+  } else {
+    $1 = 0;
+  }
+}
+%typemap(freearg) CTYPE* OUTBUFF ""
+%enddef
+
+UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
+UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
+UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
+UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
+
+/*
+%typemap(jni) unsigned char* BUFF "jobject"
+%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
+%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
+%typemap(javain, 
+       pre="    java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
+        post="      permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
+%typemap(javaout) unsigned char* BUFF {
+    return $jnicall;
+}
+%typemap(in) unsigned char* BUFF {
+  $1 = (const char*)(jenv)->GetDirectBufferAddress( $input);
+  if ($1 == NULL) {
+    SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
+  }
+}
+%typemap(memberin) unsigned char* BUFF {
+  if ($input) {
+    $1 = $input;
+  } else {
+    $1 = 0;
+  }
+}
+%typemap(freearg) unsigned char* BUFF ""
+*/
+
+#undef UNSIGNED_NIO_BUFFER_TYPEMAP
\ No newline at end of file
diff --git a/android/android-opencv/jni/cv.i b/android/android-opencv/jni/cv.i
new file mode 100644 (file)
index 0000000..167d55d
--- /dev/null
@@ -0,0 +1,156 @@
+%typemap(javaimports) Mat "
+/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
+*/"
+
+%typemap(javaimports) Size "
+/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
+*/"
+
+
+class Size{
+public:
+       Size();
+       Size(int width,int height);
+       ~Size();
+       int width;
+       int height;     
+};
+
+#define CV_CN_MAX     512
+#define CV_CN_SHIFT   3
+#define CV_DEPTH_MAX  (1 << CV_CN_SHIFT)
+
+#define CV_8U   0
+#define CV_8S   1
+#define CV_16U  2
+#define CV_16S  3
+#define CV_32S  4
+#define CV_32F  5
+#define CV_64F  6
+#define CV_USRTYPE1 7
+
+#define CV_MAT_DEPTH_MASK       (CV_DEPTH_MAX - 1)
+#define CV_MAT_DEPTH(flags)     ((flags) & CV_MAT_DEPTH_MASK)
+
+#define CV_MAKETYPE(depth,cn) (CV_MAT_DEPTH(depth) + (((cn)-1) << CV_CN_SHIFT))
+#define CV_MAKE_TYPE CV_MAKETYPE
+
+#define CV_8UC1 CV_MAKETYPE(CV_8U,1)
+#define CV_8UC2 CV_MAKETYPE(CV_8U,2)
+#define CV_8UC3 CV_MAKETYPE(CV_8U,3)
+#define CV_8UC4 CV_MAKETYPE(CV_8U,4)
+#define CV_8UC(n) CV_MAKETYPE(CV_8U,(n))
+
+#define CV_8SC1 CV_MAKETYPE(CV_8S,1)
+#define CV_8SC2 CV_MAKETYPE(CV_8S,2)
+#define CV_8SC3 CV_MAKETYPE(CV_8S,3)
+#define CV_8SC4 CV_MAKETYPE(CV_8S,4)
+#define CV_8SC(n) CV_MAKETYPE(CV_8S,(n))
+
+#define CV_16UC1 CV_MAKETYPE(CV_16U,1)
+#define CV_16UC2 CV_MAKETYPE(CV_16U,2)
+#define CV_16UC3 CV_MAKETYPE(CV_16U,3)
+#define CV_16UC4 CV_MAKETYPE(CV_16U,4)
+#define CV_16UC(n) CV_MAKETYPE(CV_16U,(n))
+
+#define CV_16SC1 CV_MAKETYPE(CV_16S,1)
+#define CV_16SC2 CV_MAKETYPE(CV_16S,2)
+#define CV_16SC3 CV_MAKETYPE(CV_16S,3)
+#define CV_16SC4 CV_MAKETYPE(CV_16S,4)
+#define CV_16SC(n) CV_MAKETYPE(CV_16S,(n))
+
+#define CV_32SC1 CV_MAKETYPE(CV_32S,1)
+#define CV_32SC2 CV_MAKETYPE(CV_32S,2)
+#define CV_32SC3 CV_MAKETYPE(CV_32S,3)
+#define CV_32SC4 CV_MAKETYPE(CV_32S,4)
+#define CV_32SC(n) CV_MAKETYPE(CV_32S,(n))
+
+#define CV_32FC1 CV_MAKETYPE(CV_32F,1)
+#define CV_32FC2 CV_MAKETYPE(CV_32F,2)
+#define CV_32FC3 CV_MAKETYPE(CV_32F,3)
+#define CV_32FC4 CV_MAKETYPE(CV_32F,4)
+#define CV_32FC(n) CV_MAKETYPE(CV_32F,(n))
+
+#define CV_64FC1 CV_MAKETYPE(CV_64F,1)
+#define CV_64FC2 CV_MAKETYPE(CV_64F,2)
+#define CV_64FC3 CV_MAKETYPE(CV_64F,3)
+#define CV_64FC4 CV_MAKETYPE(CV_64F,4)
+#define CV_64FC(n) CV_MAKETYPE(CV_64F,(n))
+
+#define CV_AUTO_STEP  0x7fffffff
+#define CV_WHOLE_ARR  cvSlice( 0, 0x3fffffff )
+
+#define CV_MAT_CN_MASK          ((CV_CN_MAX - 1) << CV_CN_SHIFT)
+#define CV_MAT_CN(flags)        ((((flags) & CV_MAT_CN_MASK) >> CV_CN_SHIFT) + 1)
+#define CV_MAT_TYPE_MASK        (CV_DEPTH_MAX*CV_CN_MAX - 1)
+#define CV_MAT_TYPE(flags)      ((flags) & CV_MAT_TYPE_MASK)
+#define CV_MAT_CONT_FLAG_SHIFT  14
+#define CV_MAT_CONT_FLAG        (1 << CV_MAT_CONT_FLAG_SHIFT)
+#define CV_IS_MAT_CONT(flags)   ((flags) & CV_MAT_CONT_FLAG)
+#define CV_IS_CONT_MAT          CV_IS_MAT_CONT
+#define CV_SUBMAT_FLAG_SHIFT    15
+#define CV_SUBMAT_FLAG          (1 << CV_SUBMAT_FLAG_SHIFT)
+#define CV_IS_SUBMAT(flags)     ((flags) & CV_MAT_SUBMAT_FLAG)
+
+#define CV_MAGIC_MASK       0xFFFF0000
+#define CV_MAT_MAGIC_VAL    0x42420000
+#define CV_TYPE_NAME_MAT    "opencv-matrix"
+
+class Mat {
+public:
+       Mat();
+       ~Mat();
+       void create(Size size, int type);
+       int channels() const;
+ %immutable;
+       int rows;
+       int cols;
+};
+
+template<class _Tp> class Ptr
+{
+public:
+    //! empty constructor
+    Ptr();
+    //! take ownership of the pointer. The associated reference counter is allocated and set to 1
+    Ptr(_Tp* _obj);
+    //! calls release()
+    ~Ptr();
+    //! copy constructor. Copies the members and calls addref()
+    Ptr(const Ptr& ptr);
+    //! copy operator. Calls ptr.addref() and release() before copying the members
+   // Ptr& operator = (const Ptr& ptr);
+    //! increments the reference counter
+    void addref();
+    //! decrements the reference counter. If it reaches 0, delete_obj() is called
+    void release();
+    //! deletes the object. Override if needed
+    void delete_obj();
+    //! returns true iff obj==NULL
+    bool empty() const;
+
+    
+    //! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
+    _Tp* operator -> ();
+   // const _Tp* operator -> () const;
+
+   // operator _Tp* ();
+  //  operator const _Tp*() const;
+    
+protected:
+    _Tp* obj; //< the object pointer.
+    int* refcount; //< the associated reference counter
+};
+
+
+
+%template(PtrMat) Ptr<Mat>;
+
+void imwrite(const char* image_name, const Mat& image);
+Mat imread(const char* image_name);
+
+ %include "buffers.i" 
+ %apply char* BUFF {const char* buffer}
+ %apply char* BUFF {char* buffer}
+void copyMatToBuffer(char* buffer, const Mat& mat);
+void copyBufferToMat(Mat& mat, const char* buffer);
\ No newline at end of file
diff --git a/android/android-opencv/jni/gl_code.cpp b/android/android-opencv/jni/gl_code.cpp
new file mode 100644 (file)
index 0000000..358b448
--- /dev/null
@@ -0,0 +1,352 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// OpenGL ES 2.0 code
+
+#include <jni.h>
+#if __ANDROID__
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#else
+#include <GL/gl.h>
+#endif
+
+#include "android_logger.h"
+
+#include <opencv2/core/core.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+#include <stdint.h>
+
+#include "glcamera.h"
+#include "image_pool.h"
+using namespace cv;
+
+static void printGLString(const char *name, GLenum s)
+{
+  const char *v = (const char *)glGetString(s);
+  LOGI("GL %s = %s\n", name, v);
+}
+
+static void checkGlError(const char* op)
+{
+  for (GLint error = glGetError(); error; error = glGetError())
+  {
+    LOGI("after %s() glError (0x%x)\n", op, error);
+  }
+}
+
+static const char gVertexShader[] = "attribute vec4 a_position;   \n"
+  "attribute vec2 a_texCoord;   \n"
+  "varying vec2 v_texCoord;     \n"
+  "void main()                  \n"
+  "{                            \n"
+  "   gl_Position = a_position; \n"
+  "   v_texCoord = a_texCoord;  \n"
+  "}                            \n";
+
+static const char gFragmentShader[] = "precision mediump float;                            \n"
+  "varying vec2 v_texCoord;                            \n"
+  "uniform sampler2D s_texture;                        \n"
+  "void main()                                         \n"
+  "{                                                   \n"
+  "  gl_FragColor = texture2D( s_texture, v_texCoord );\n"
+  "}                                                   \n";
+
+
+GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels)
+{
+
+  // Bind the texture
+  glActiveTexture(GL_TEXTURE0);
+  checkGlError("glActiveTexture");
+  // Bind the texture object
+  glBindTexture(GL_TEXTURE_2D, _textureid);
+  checkGlError("glBindTexture");
+
+  GLenum format;
+  switch (channels)
+  {
+    case 3:
+#if ANDROID
+      format = GL_RGB;
+#else
+      format = GL_BGR;
+#endif
+      break;
+    case 1:
+      format = GL_LUMINANCE;
+      break;
+    case 4:
+      format = GL_RGBA;
+      break;
+  }
+  // Load the texture
+  glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels);
+
+  checkGlError("glTexImage2D");
+#if ANDROID
+  // Set the filtering mode
+  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+#else
+  /* Linear Filtering */
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+#endif
+
+  return _textureid;
+
+}
+
+GLuint glcamera::loadShader(GLenum shaderType, const char* pSource)
+{
+  GLuint shader = 0;
+#if __ANDROID__
+  shader = glCreateShader(shaderType);
+  if (shader)
+  {
+    glShaderSource(shader, 1, &pSource, NULL);
+    glCompileShader(shader);
+    GLint compiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+    if (!compiled)
+    {
+      GLint infoLen = 0;
+      glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+      if (infoLen)
+      {
+        char* buf = (char*)malloc(infoLen);
+        if (buf)
+        {
+          glGetShaderInfoLog(shader, infoLen, NULL, buf);
+          LOGE("Could not compile shader %d:\n%s\n",
+              shaderType, buf);
+          free(buf);
+        }
+        glDeleteShader(shader);
+        shader = 0;
+      }
+    }
+  }
+#endif
+  return shader;
+}
+
+GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource)
+{
+#if __ANDROID__
+  GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
+  if (!vertexShader)
+  {
+    return 0;
+  }
+
+  GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
+  if (!pixelShader)
+  {
+    return 0;
+  }
+
+  GLuint program = glCreateProgram();
+  if (program)
+  {
+    glAttachShader(program, vertexShader);
+    checkGlError("glAttachShader");
+    glAttachShader(program, pixelShader);
+    checkGlError("glAttachShader");
+    glLinkProgram(program);
+    GLint linkStatus = GL_FALSE;
+    glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+    if (linkStatus != GL_TRUE)
+    {
+      GLint bufLength = 0;
+      glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+      if (bufLength)
+      {
+        char* buf = (char*)malloc(bufLength);
+        if (buf)
+        {
+          glGetProgramInfoLog(program, bufLength, NULL, buf);
+          LOGE("Could not link program:\n%s\n", buf);
+          free(buf);
+        }
+      }
+      glDeleteProgram(program);
+      program = 0;
+    }
+  }
+  return program;
+#else
+  return 0;
+#endif
+}
+void  glcamera::clear(){
+  nimg = Mat();
+}
+//GLuint textureID;
+
+bool glcamera::setupGraphics(int w, int h)
+{
+//  printGLString("Version", GL_VERSION);
+//  printGLString("Vendor", GL_VENDOR);
+//  printGLString("Renderer", GL_RENDERER);
+//  printGLString("Extensions", GL_EXTENSIONS);
+
+#if __ANDROID__
+  gProgram = createProgram(gVertexShader, gFragmentShader);
+  if (!gProgram)
+  {
+    LOGE("Could not create program.");
+    return false;
+  }
+
+  gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
+  gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
+  gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
+
+  // Use tightly packed data
+  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+  // Generate a texture object
+  glGenTextures(1, &textureID);
+
+  glViewport(0, 0, w, h);
+#endif
+  return true;
+}
+
+void glcamera::renderFrame()
+{
+
+#if __ANDROID__
+  GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0
+                         0.0f, 0.0f, // TexCoord 0
+                         -1.0f, -1.0f, 0.0f, // Position 1
+                         0.0f, img_h, // TexCoord 1
+                         1.0f, -1.0f, 0.0f, // Position 2
+                         img_w, img_h, // TexCoord 2
+                         1.0f, 1.0f, 0.0f, // Position 3
+                         img_w, 0.0f // TexCoord 3
+      };
+  GLushort indices[] = {0, 1, 2, 0, 2, 3};
+  GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
+
+  glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+  checkGlError("glClearColor");
+
+  glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
+  checkGlError("glClear");
+
+  if(nimg.empty())return;
+
+  glUseProgram(gProgram);
+  checkGlError("glUseProgram");
+
+  // Load the vertex position
+  glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices);
+  // Load the texture coordinate
+  glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]);
+
+  glEnableVertexAttribArray(gvPositionHandle);
+  glEnableVertexAttribArray(gvTexCoordHandle);
+
+  // Bind the texture
+  glActiveTexture(GL_TEXTURE0);
+  glBindTexture(GL_TEXTURE_2D, textureID);
+
+  // Set the sampler texture unit to 0
+  glUniform1i(gvSamplerHandle, 0);
+
+  glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
+#endif
+  //checkGlError("glVertexAttribPointer");
+  //glEnableVertexAttribArray(gvPositionHandle);
+  //checkGlError("glEnableVertexAttribArray");
+  //glDrawArrays(GL_TRIANGLES, 0, 3);
+  //checkGlError("glDrawArrays");
+}
+
+void glcamera::init(int width, int height)
+{
+  newimage = false;
+  nimg = Mat();
+  setupGraphics(width, height);
+
+}
+
+void glcamera::step()
+{
+  if (newimage && !nimg.empty())
+  {
+
+    textureID = createSimpleTexture2D(textureID, nimg.ptr<unsigned char> (0), nimg.cols, nimg.rows, nimg.channels());
+    newimage = false;
+  }
+  renderFrame();
+
+}
+#define NEAREST_POW2(x)( std::ceil(std::log(x)/0.69315) )
+void glcamera::setTextureImage(const Mat& img)
+{
+  int p = NEAREST_POW2(img.cols/2); //subsample by 2
+  //int sz = std::pow(2, p);
+
+  // Size size(sz, sz);
+  Size size(256, 256);
+  img_w = 1;
+  img_h = 1;
+  if (nimg.cols != size.width)
+    LOGI_STREAM( "using texture of size: (" << size.width << " , " << size.height << ") image size is: (" << img.cols << " , " << img.rows << ")");
+  nimg.create(size, img.type());
+#if SUBREGION_NPO2
+  cv::Rect roi(0, 0, img.cols/2, img.rows/2);
+  cv::Mat nimg_sub = nimg(roi);
+  //img.copyTo(nimg_sub);
+  img_w = (img.cols/2)/float(sz);
+  img_h = (img.rows/2)/float(sz);
+  cv::resize(img,nimg_sub,nimg_sub.size(),0,0,CV_INTER_NN);
+#else
+  cv::resize(img, nimg, nimg.size(), 0, 0, CV_INTER_NN);
+#endif
+  newimage = true;
+}
+
+void glcamera::drawMatToGL(int idx, image_pool* pool)
+{
+
+  Mat img = pool->getImage(idx);
+
+  if (img.empty())
+    return; //no image at input_idx!
+
+  setTextureImage(img);
+
+}
+
+glcamera::glcamera() :
+  newimage(false)
+{
+  LOGI("glcamera constructor");
+}
+glcamera::~glcamera()
+{
+  LOGI("glcamera destructor");
+}
+
diff --git a/android/android-opencv/jni/glcamera.i b/android/android-opencv/jni/glcamera.i
new file mode 100644 (file)
index 0000000..2fd2a53
--- /dev/null
@@ -0,0 +1,44 @@
+
+%typemap(javaimports) glcamera "
+/** a class for doing the native rendering of images
+this class renders using GL2 es, the native ndk version
+This class is used by the GL2CameraViewer to do the rendering,
+and is inspired by the gl2 example in the ndk samples
+*/"
+
+
+
+%javamethodmodifiers glcamera::init"
+  /**  should be called onSurfaceChanged by the GLSurfaceView that is using this
+       *  as the drawing engine
+       * @param width the width of the surface view that this will be drawing to
+    * @param width the height of the surface view that this will be drawing to
+       *
+    */
+  public";
+  
+%javamethodmodifiers glcamera::step"
+  /**  should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
+  handles the rendering of the opengl scene, and requires that the opengl context be
+  valid.
+       *
+    */
+  public";
+%javamethodmodifiers glcamera::drawMatToGL"
+  /** copies an image from a pool and queues it for drawing in opengl.
+       *  this does transformation into power of two texture sizes
+       * @param idx the image index to copy
+    * @param pool the image_pool to look up the image from
+       *
+    */
+  public";
+  
+class glcamera {
+public:
+     void init(int width, int height);
+     void step();
+     void drawMatToGL(int idx, image_pool* pool);
+     void clear();
+};
+
diff --git a/android/android-opencv/jni/image_pool.cpp b/android/android-opencv/jni/image_pool.cpp
new file mode 100644 (file)
index 0000000..c7a6720
--- /dev/null
@@ -0,0 +1,130 @@
+#include "image_pool.h"
+
+#include "yuv420sp2rgb.h"
+
+#include "android_logger.h"
+
+#include <opencv2/imgproc/imgproc.hpp>
+
+#include <cstdlib>
+#include <jni.h>
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
+//
+//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
+//             JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
+
+
+JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
+    jint, jint, jboolean);
+
+#ifdef __cplusplus
+}
+#endif
+
+using namespace cv;
+
+JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
+{
+  JNIEnv *env;
+  LOGI("JNI_OnLoad called for opencv");
+  return JNI_VERSION_1_4;
+}
+
+JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, jclass thiz, jlong ppool,
+    jobject _jpool, jbyteArray jbuffer, jint jidx,
+    jint jwidth, jint jheight, jboolean jgrey)
+{
+  int buff_height = jheight + (jheight / 2);
+  Size buff_size(jwidth, buff_height);
+  image_pool *pool = (image_pool *)ppool;
+
+  Mat mat = pool->getYUV(jidx);
+  //create is smart and only copies if the buffer size is different
+  mat.create(buff_size, CV_8UC1);
+  {
+    uchar* buff = mat.ptr<uchar> (0);
+    jsize sz = env->GetArrayLength(jbuffer);
+    //http://elliotth.blogspot.com/2007/03/optimizing-jni-array-access.html
+    env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*)buff);
+  }
+  pool->addYUVMat(jidx, mat);
+
+  Mat color;
+  if (jgrey)
+  {
+    Mat grey = pool->getGrey(jidx);
+    color = grey;
+  }
+  else
+  {
+    color = pool->getImage(jidx);
+    pool->convertYUVtoColor(jidx, color);
+  }
+  pool->addImage(jidx, color);
+}
+
+image_pool::image_pool()
+{
+
+}
+
+image_pool::~image_pool()
+{
+
+}
+
+Mat image_pool::getImage(int i)
+{
+  return imagesmap[i];
+}
+Mat image_pool::getGrey(int i)
+{
+  Mat tm = yuvImagesMap[i];
+  if (tm.empty())
+    return tm;
+  return tm(Range(0, tm.rows * (2.0f / 3)), Range::all());
+}
+Mat image_pool::getYUV(int i)
+{
+  return yuvImagesMap[i];
+}
+void image_pool::addYUVMat(int i, Mat mat)
+{
+  yuvImagesMap[i] = mat;
+}
+void image_pool::addImage(int i, Mat mat)
+{
+  imagesmap[i] = mat;
+}
+
+void image_pool::convertYUVtoColor(int i, cv::Mat& out)
+{
+  Mat yuv = getYUV(i);
+  if (yuv.empty())
+    return;
+  int width = yuv.cols;
+  int height = yuv.rows * (2.0f / 3);
+  out.create(height, width, CV_8UC3);
+  const unsigned char* buff = yuv.ptr<unsigned char> (0);
+  unsigned char* out_buff = out.ptr<unsigned char> (0);
+  color_convert_common(buff, buff + width * height, width, height, out_buff, false);
+}
+
+void copyMatToBuffer(char* buffer, const cv::Mat& mat)
+{
+  memcpy(buffer, mat.data, mat.rows * mat.cols * mat.step1());
+}
+void copyBufferToMat(cv::Mat& mat, const char* buffer)
+{
+  memcpy(mat.data, buffer, mat.rows * mat.cols * mat.step1());
+}
+
+void RGB2BGR(const Mat& in, Mat& out)
+{
+  cvtColor(in, out, CV_RGB2BGR);
+}
diff --git a/android/android-opencv/jni/image_pool.i b/android/android-opencv/jni/image_pool.i
new file mode 100644 (file)
index 0000000..b81e64a
--- /dev/null
@@ -0,0 +1,51 @@
+
+
+%typemap(javaimports) image_pool "
+/** image_pool is used for keeping track of a pool of native images.  It stores images as cv::Mat's and
+references them by an index.  It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
+
+
+%javamethodmodifiers image_pool::getImage"
+  /** gets a pointer to a stored image, by an index.  If the index is new, returns a null pointer
+       * @param idx the index in the pool that is associated with a cv::Mat
+       * @return the pointer to a cv::Mat, null pointer if the given idx is novel
+    */
+  public";
+  
+  
+%javamethodmodifiers image_pool::deleteImage"
+  /** deletes the image from the pool
+       * @param idx the index in the pool that is associated with a cv::Mat
+    */
+  public";
+  
+  
+  
+%javamethodmodifiers addYUVtoPool"
+  /** adds a yuv
+       * @param idx the index in the pool that is associated with a cv::Mat
+    */
+  public";
+  
+%include "various.i"
+
+
+%apply (char* BYTE) { (char *data)}; //byte[] to char*
+
+
+%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
+
+
+
+
+%feature("director") image_pool;
+class image_pool {
+public:
+       Mat getGrey(int i);
+       Mat getImage(int i);
+       void addImage(int i, Mat mat);
+       void convertYUVtoColor(int i, Mat& out);
+};
+
+void RGB2BGR(const Mat& in, Mat& out);
+
diff --git a/android/android-opencv/jni/include/Calibration.h b/android/android-opencv/jni/include/Calibration.h
new file mode 100644 (file)
index 0000000..6e0eef3
--- /dev/null
@@ -0,0 +1,54 @@
+/*
+ * Processor.h
+ *
+ *  Created on: Jun 13, 2010
+ *      Author: ethan
+ */
+
+#ifndef PROCESSOR_H_
+#define PROCESSOR_H_
+
+#include <opencv2/core/core.hpp>
+#include <opencv2/features2d/features2d.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
+#include <opencv2/calib3d/calib3d.hpp>
+
+#include <vector>
+
+#include "image_pool.h"
+
+#define DETECT_FAST 0
+#define DETECT_STAR 1
+#define DETECT_SURF 2
+
+class Calibration
+{
+public:
+
+  Calibration();
+  virtual ~Calibration();
+
+  bool detectAndDrawChessboard(int idx, image_pool* pool);
+
+  void resetChess();
+
+  int getNumberDetectedChessboards();
+
+  void calibrate(const char* filename);
+
+  void drawText(int idx, image_pool* pool, const char* text);
+
+  cv::Size patternsize;
+private:
+  std::vector<cv::KeyPoint> keypoints;
+
+  std::vector<std::vector<cv::Point2f> > imagepoints;
+
+  cv::Mat K;
+  cv::Mat distortion;
+  cv::Size imgsize;
+
+};
+
+#endif /* PROCESSOR_H_ */
diff --git a/android/android-opencv/jni/include/android_logger.h b/android/android-opencv/jni/include/android_logger.h
new file mode 100644 (file)
index 0000000..09f8dc3
--- /dev/null
@@ -0,0 +1,19 @@
+#pragma once
+#include <iostream>
+#include <sstream>
+
+#define  LOG_TAG    "libopencv"
+#if ANDROID
+#include <android/log.h>
+#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
+#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+#else
+#include <cstdio>
+#define  LOGI(...)  printf("info:");printf("%s:",LOG_TAG); fprintf(stdout,__VA_ARGS__);printf("\n");
+#define  LOGE(...)  printf("error:");printf("%s:",LOG_TAG); fprintf(stderr,__VA_ARGS__);printf("\n");
+#endif
+
+#ifndef LOGI_STREAM
+#define  LOGI_STREAM(x)  {std::stringstream ss; ss << x; LOGI("%s",ss.str().c_str());}
+#endif
+#define  LOGE_STREAM(x)  {std::stringstream ss; ss << x; LOGE("%s",ss.str().c_str());}
diff --git a/android/android-opencv/jni/include/glcamera.h b/android/android-opencv/jni/include/glcamera.h
new file mode 100644 (file)
index 0000000..e7c021a
--- /dev/null
@@ -0,0 +1,48 @@
+#ifndef GLCAMERA_H_
+#define GLCAMERA_H_
+#include <opencv2/core/core.hpp>
+
+#ifdef __ANDROID__
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#else
+#include <GL/gl.h>
+#include <GL/glu.h>
+#endif
+
+#include "image_pool.h"
+
+class glcamera
+{
+public:
+
+  glcamera();
+  ~glcamera();
+  void init(int width, int height);
+  void step();
+
+  void drawMatToGL(int idx, image_pool* pool);
+  void drawMatToGL(const cv::Mat& img);
+  void setTextureImage(const cv::Mat& img);
+
+  void clear();
+
+private:
+  GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
+  GLuint loadShader(GLenum shaderType, const char* pSource);
+  GLuint
+  createProgram(const char* pVertexSource, const char* pFragmentSource);
+  bool setupGraphics(int w, int h);
+  void renderFrame();
+  cv::Mat nimg;
+  bool newimage;
+  GLuint textureID;
+
+  GLuint gProgram;
+  GLuint gvPositionHandle;
+
+  GLuint gvTexCoordHandle;
+  GLuint gvSamplerHandle;
+  float img_w, img_h;
+};
+#endif
diff --git a/android/android-opencv/jni/include/image_pool.h b/android/android-opencv/jni/include/image_pool.h
new file mode 100644 (file)
index 0000000..d32eb47
--- /dev/null
@@ -0,0 +1,51 @@
+#ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
+#define IMAGE_POOL_H_ANDROID_KDJFKJ
+#include <opencv2/core/core.hpp>
+#include <map>
+
+
+
+class image_pool
+{
+
+public:
+  image_pool();
+  ~image_pool();
+  cv::Mat getImage(int i);
+  cv::Mat getGrey(int i);
+  cv::Mat getYUV(int i);
+
+  int getCount()
+  {
+    return imagesmap.size();
+  }
+
+  /** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
+   *
+   */
+  void addImage(int i, cv::Mat mat);
+
+  /** this function stores the given matrix in the the yuvImagesMap. Also,
+   * after this call getGrey will work, as the grey image is just the top
+   * half of the YUV mat.
+   *
+   * \param i index to store yuv image at
+   * \param mat the yuv matrix to store
+   */
+  void addYUVMat(int i, cv::Mat mat);
+
+  void convertYUVtoColor(int i, cv::Mat& out);
+
+  //   int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
+  //
+  //   void getBitmap(int * outintarray, int size, int idx);
+private:
+  std::map<int, cv::Mat> imagesmap;
+  std::map<int, cv::Mat> yuvImagesMap;
+
+};
+
+void copyMatToBuffer(char* buffer, const cv::Mat& mat);
+void copyBufferToMat(cv::Mat& mat, const char* buffer);
+void RGB2BGR(const cv::Mat& in, cv::Mat& out);
+#endif
diff --git a/android/android-opencv/jni/include/yuv2rgb.h b/android/android-opencv/jni/include/yuv2rgb.h
new file mode 100644 (file)
index 0000000..9b7535c
--- /dev/null
@@ -0,0 +1,147 @@
+/* YUV-> RGB conversion code.
+ *
+ * Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
+ * Productions Ltd.
+ *
+ * Licensed under the GNU GPL. If you need it under another license, contact
+ * me and ask.
+ *
+ *  This program is free software ; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation ; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY ; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program ; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+ *
+ */
+
+#ifndef YUV2RGB_H
+
+#define YUV2RGB_H
+
+/* Define these to something appropriate in your build */
+typedef unsigned int   uint32_t;
+typedef signed   int   int32_t;
+typedef unsigned short uint16_t;
+typedef unsigned char  uint8_t;
+
+extern const uint32_t yuv2rgb565_table[];
+extern const uint32_t yuv2bgr565_table[];
+
+void yuv420_2_rgb565(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv422_2_rgb565(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv444_2_rgb565(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv420_2_rgb888(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv422_2_rgb888(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv444_2_rgb888(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither);
+
+void yuv420_2_rgb8888(uint8_t  *dst_ptr,
+                const uint8_t  *y_ptr,
+                const uint8_t  *u_ptr,
+                const uint8_t  *v_ptr,
+                      int32_t   width,
+                      int32_t   height,
+                      int32_t   y_span,
+                      int32_t   uv_span,
+                      int32_t   dst_span,
+                const uint32_t *tables,
+                      int32_t   dither);
+
+void yuv422_2_rgb8888(uint8_t  *dst_ptr,
+                const uint8_t  *y_ptr,
+                const uint8_t  *u_ptr,
+                const uint8_t  *v_ptr,
+                      int32_t   width,
+                      int32_t   height,
+                      int32_t   y_span,
+                      int32_t   uv_span,
+                      int32_t   dst_span,
+                const uint32_t *tables,
+                      int32_t   dither);
+
+void yuv444_2_rgb8888(uint8_t  *dst_ptr,
+                const uint8_t  *y_ptr,
+                const uint8_t  *u_ptr,
+                const uint8_t  *v_ptr,
+                      int32_t   width,
+                      int32_t   height,
+                      int32_t   y_span,
+                      int32_t   uv_span,
+                      int32_t   dst_span,
+                const uint32_t *tables,
+                      int32_t   dither);
+
+
+#endif /* YUV2RGB_H */
diff --git a/android/android-opencv/jni/include/yuv420sp2rgb.h b/android/android-opencv/jni/include/yuv420sp2rgb.h
new file mode 100644 (file)
index 0000000..fcfc49c
--- /dev/null
@@ -0,0 +1,17 @@
+#ifndef YUV420SP2RGB_H
+#define YUV420SP2RGB_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+void color_convert_common(
+    const unsigned char *pY, const unsigned char *pUV,
+    int width, int height, unsigned char *buffer,
+    int grey);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/android/android-opencv/jni/nocopy.i b/android/android-opencv/jni/nocopy.i
new file mode 100644 (file)
index 0000000..23e685f
--- /dev/null
@@ -0,0 +1,36 @@
+/* 
+ * int *INTARRAY  typemaps. 
+ * These are input typemaps for mapping a Java int[] array to a C int array.
+ * Note that as a Java array is used and thus passeed by reference, the C routine 
+ * can return data to Java via the parameter.
+ *
+ * Example usage wrapping:
+ *   void foo((int *INTARRAY, int INTARRAYSIZE);
+ *  
+ * Java usage:
+ *   byte b[] = new byte[20];
+ *   modulename.foo(b);
+ */
+
+%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
+    $1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0); 
+    jsize sz = JCALL1(GetArrayLength, jenv, $input);
+    $2 = (int)sz;
+}
+
+%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
+    JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0); 
+}
+
+
+/* Prevent default freearg typemap from being used */
+%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
+
+%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
+%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
+%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
+%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"
+
+
+
+
diff --git a/android/android-opencv/jni/yuv2rgb16tab.c b/android/android-opencv/jni/yuv2rgb16tab.c
new file mode 100644 (file)
index 0000000..f9f6a14
--- /dev/null
@@ -0,0 +1,802 @@
+/* YUV-> RGB conversion code.
+ *
+ * Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
+ * Productions Ltd.
+ *
+ * Licensed under the GNU GPL. If you need it under another license, contact
+ * me and ask.
+ *
+ *  This program is free software ; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation ; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY ; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program ; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+ */
+
+/* For BREW or Symbian you might need to make this static const rather than
+ * just const, and introduce a function to get the address. */
+
+#include "yuv2rgb.h"
+
+const uint32_t yuv2rgb565_table[256*3] =
+{
+       /* y_table */
+        0x7FFFFFEDU,
+        0x7FFFFFEFU,
+        0x7FFFFFF0U,
+        0x7FFFFFF1U,
+        0x7FFFFFF2U,
+        0x7FFFFFF3U,
+        0x7FFFFFF4U,
+        0x7FFFFFF6U,
+        0x7FFFFFF7U,
+        0x7FFFFFF8U,
+        0x7FFFFFF9U,
+        0x7FFFFFFAU,
+        0x7FFFFFFBU,
+        0x7FFFFFFDU,
+        0x7FFFFFFEU,
+        0x7FFFFFFFU,
+        0x80000000U,
+        0x80400801U,
+        0x80A01002U,
+        0x80E01803U,
+        0x81202805U,
+        0x81803006U,
+        0x81C03807U,
+        0x82004008U,
+        0x82604809U,
+        0x82A0500AU,
+        0x82E0600CU,
+        0x8340680DU,
+        0x8380700EU,
+        0x83C0780FU,
+        0x84208010U,
+        0x84608811U,
+        0x84A09813U,
+        0x8500A014U,
+        0x8540A815U,
+        0x8580B016U,
+        0x85E0B817U,
+        0x8620C018U,
+        0x8660D01AU,
+        0x86C0D81BU,
+        0x8700E01CU,
+        0x8740E81DU,
+        0x87A0F01EU,
+        0x87E0F81FU,
+        0x88210821U,
+        0x88811022U,
+        0x88C11823U,
+        0x89012024U,
+        0x89412825U,
+        0x89A13026U,
+        0x89E14028U,
+        0x8A214829U,
+        0x8A81502AU,
+        0x8AC1582BU,
+        0x8B01602CU,
+        0x8B61682DU,
+        0x8BA1782FU,
+        0x8BE18030U,
+        0x8C418831U,
+        0x8C819032U,
+        0x8CC19833U,
+        0x8D21A034U,
+        0x8D61B036U,
+        0x8DA1B837U,
+        0x8E01C038U,
+        0x8E41C839U,
+        0x8E81D03AU,
+        0x8EE1D83BU,
+        0x8F21E83DU,
+        0x8F61F03EU,
+        0x8FC1F83FU,
+        0x90020040U,
+        0x90420841U,
+        0x90A21042U,
+        0x90E22044U,
+        0x91222845U,
+        0x91823046U,
+        0x91C23847U,
+        0x92024048U,
+        0x92624849U,
+        0x92A2504AU,
+        0x92E2604CU,
+        0x9342684DU,
+        0x9382704EU,
+        0x93C2784FU,
+        0x94228050U,
+        0x94628851U,
+        0x94A29853U,
+        0x9502A054U,
+        0x9542A855U,
+        0x9582B056U,
+        0x95E2B857U,
+        0x9622C058U,
+        0x9662D05AU,
+        0x96C2D85BU,
+        0x9702E05CU,
+        0x9742E85DU,
+        0x97A2F05EU,
+        0x97E2F85FU,
+        0x98230861U,
+        0x98831062U,
+        0x98C31863U,
+        0x99032064U,
+        0x99632865U,
+        0x99A33066U,
+        0x99E34068U,
+        0x9A434869U,
+        0x9A83506AU,
+        0x9AC3586BU,
+        0x9B23606CU,
+        0x9B63686DU,
+        0x9BA3786FU,
+        0x9BE38070U,
+        0x9C438871U,
+        0x9C839072U,
+        0x9CC39873U,
+        0x9D23A074U,
+        0x9D63B076U,
+        0x9DA3B877U,
+        0x9E03C078U,
+        0x9E43C879U,
+        0x9E83D07AU,
+        0x9EE3D87BU,
+        0x9F23E87DU,
+        0x9F63F07EU,
+        0x9FC3F87FU,
+        0xA0040080U,
+        0xA0440881U,
+        0xA0A41082U,
+        0xA0E42084U,
+        0xA1242885U,
+        0xA1843086U,
+        0xA1C43887U,
+        0xA2044088U,
+        0xA2644889U,
+        0xA2A4588BU,
+        0xA2E4608CU,
+        0xA344688DU,
+        0xA384708EU,
+        0xA3C4788FU,
+        0xA4248090U,
+        0xA4649092U,
+        0xA4A49893U,
+        0xA504A094U,
+        0xA544A895U,
+        0xA584B096U,
+        0xA5E4B897U,
+        0xA624C098U,
+        0xA664D09AU,
+        0xA6C4D89BU,
+        0xA704E09CU,
+        0xA744E89DU,
+        0xA7A4F09EU,
+        0xA7E4F89FU,
+        0xA82508A1U,
+        0xA88510A2U,
+        0xA8C518A3U,
+        0xA90520A4U,
+        0xA96528A5U,
+        0xA9A530A6U,
+        0xA9E540A8U,
+        0xAA4548A9U,
+        0xAA8550AAU,
+        0xAAC558ABU,
+        0xAB2560ACU,
+        0xAB6568ADU,
+        0xABA578AFU,
+        0xAC0580B0U,
+        0xAC4588B1U,
+        0xAC8590B2U,
+        0xACE598B3U,
+        0xAD25A0B4U,
+        0xAD65B0B6U,
+        0xADA5B8B7U,
+        0xAE05C0B8U,
+        0xAE45C8B9U,
+        0xAE85D0BAU,
+        0xAEE5D8BBU,
+        0xAF25E8BDU,
+        0xAF65F0BEU,
+        0xAFC5F8BFU,
+        0xB00600C0U,
+        0xB04608C1U,
+        0xB0A610C2U,
+        0xB0E620C4U,
+        0xB12628C5U,
+        0xB18630C6U,
+        0xB1C638C7U,
+        0xB20640C8U,
+        0xB26648C9U,
+        0xB2A658CBU,
+        0xB2E660CCU,
+        0xB34668CDU,
+        0xB38670CEU,
+        0xB3C678CFU,
+        0xB42680D0U,
+        0xB46690D2U,
+        0xB4A698D3U,
+        0xB506A0D4U,
+        0xB546A8D5U,
+        0xB586B0D6U,
+        0xB5E6B8D7U,
+        0xB626C8D9U,
+        0xB666D0DAU,
+        0xB6C6D8DBU,
+        0xB706E0DCU,
+        0xB746E8DDU,
+        0xB7A6F0DEU,
+        0xB7E6F8DFU,
+        0xB82708E1U,
+        0xB88710E2U,
+        0xB8C718E3U,
+        0xB90720E4U,
+        0xB96728E5U,
+        0xB9A730E6U,
+        0xB9E740E8U,
+        0xBA4748E9U,
+        0xBA8750EAU,
+        0xBAC758EBU,
+        0xBB2760ECU,
+        0xBB6768EDU,
+        0xBBA778EFU,
+        0xBC0780F0U,
+        0xBC4788F1U,
+        0xBC8790F2U,
+        0xBCE798F3U,
+        0xBD27A0F4U,
+        0xBD67B0F6U,
+        0xBDC7B8F7U,
+        0xBE07C0F8U,
+        0xBE47C8F9U,
+        0xBEA7D0FAU,
+        0xBEE7D8FBU,
+        0xBF27E8FDU,
+        0xBF87F0FEU,
+        0xBFC7F8FFU,
+        0xC0080100U,
+        0xC0480901U,
+        0xC0A81102U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+        0xC0E82104U,
+       /* u_table */
+        0x0C400103U,
+        0x0C200105U,
+        0x0C200107U,
+        0x0C000109U,
+        0x0BE0010BU,
+        0x0BC0010DU,
+        0x0BA0010FU,
+        0x0BA00111U,
+        0x0B800113U,
+        0x0B600115U,
+        0x0B400117U,
+        0x0B400119U,
+        0x0B20011BU,
+        0x0B00011DU,
+        0x0AE0011FU,
+        0x0AE00121U,
+        0x0AC00123U,
+        0x0AA00125U,
+        0x0A800127U,
+        0x0A600129U,
+        0x0A60012BU,
+        0x0A40012DU,
+        0x0A20012FU,
+        0x0A000131U,
+        0x0A000132U,
+        0x09E00134U,
+        0x09C00136U,
+        0x09A00138U,
+        0x09A0013AU,
+        0x0980013CU,
+        0x0960013EU,
+        0x09400140U,
+        0x09400142U,
+        0x09200144U,
+        0x09000146U,
+        0x08E00148U,
+        0x08C0014AU,
+        0x08C0014CU,
+        0x08A0014EU,
+        0x08800150U,
+        0x08600152U,
+        0x08600154U,
+        0x08400156U,
+        0x08200158U,
+        0x0800015AU,
+        0x0800015CU,
+        0x07E0015EU,
+        0x07C00160U,
+        0x07A00162U,
+        0x07A00164U,
+        0x07800166U,
+        0x07600168U,
+        0x0740016AU,
+        0x0720016CU,
+        0x0720016EU,
+        0x07000170U,
+        0x06E00172U,
+        0x06C00174U,
+        0x06C00176U,
+        0x06A00178U,
+        0x0680017AU,
+        0x0660017CU,
+        0x0660017EU,
+        0x06400180U,
+        0x06200182U,
+        0x06000184U,
+        0x05E00185U,
+        0x05E00187U,
+        0x05C00189U,
+        0x05A0018BU,
+        0x0580018DU,
+        0x0580018FU,
+        0x05600191U,
+        0x05400193U,
+        0x05200195U,
+        0x05200197U,
+        0x05000199U,
+        0x04E0019BU,
+        0x04C0019DU,
+        0x04C0019FU,
+        0x04A001A1U,
+        0x048001A3U,
+        0x046001A5U,
+        0x044001A7U,
+        0x044001A9U,
+        0x042001ABU,
+        0x040001ADU,
+        0x03E001AFU,
+        0x03E001B1U,
+        0x03C001B3U,
+        0x03A001B5U,
+        0x038001B7U,
+        0x038001B9U,
+        0x036001BBU,
+        0x034001BDU,
+        0x032001BFU,
+        0x032001C1U,
+        0x030001C3U,
+        0x02E001C5U,
+        0x02C001C7U,
+        0x02A001C9U,
+        0x02A001CBU,
+        0x028001CDU,
+        0x026001CFU,
+        0x024001D1U,
+        0x024001D3U,
+        0x022001D5U,
+        0x020001D7U,
+        0x01E001D8U,
+        0x01E001DAU,
+        0x01C001DCU,
+        0x01A001DEU,
+        0x018001E0U,
+        0x016001E2U,
+        0x016001E4U,
+        0x014001E6U,
+        0x012001E8U,
+        0x010001EAU,
+        0x010001ECU,
+        0x00E001EEU,
+        0x00C001F0U,
+        0x00A001F2U,
+        0x00A001F4U,
+        0x008001F6U,
+        0x006001F8U,
+        0x004001FAU,
+        0x004001FCU,
+        0x002001FEU,
+        0x00000200U,
+        0xFFE00202U,
+        0xFFC00204U,
+        0xFFC00206U,
+        0xFFA00208U,
+        0xFF80020AU,
+        0xFF60020CU,
+        0xFF60020EU,
+        0xFF400210U,
+        0xFF200212U,
+        0xFF000214U,
+        0xFF000216U,
+        0xFEE00218U,
+        0xFEC0021AU,
+        0xFEA0021CU,
+        0xFEA0021EU,
+        0xFE800220U,
+        0xFE600222U,
+        0xFE400224U,
+        0xFE200226U,
+        0xFE200228U,
+        0xFE000229U,
+        0xFDE0022BU,
+        0xFDC0022DU,
+        0xFDC0022FU,
+        0xFDA00231U,
+        0xFD800233U,
+        0xFD600235U,
+        0xFD600237U,
+        0xFD400239U,
+        0xFD20023BU,
+        0xFD00023DU,
+        0xFCE0023FU,
+        0xFCE00241U,
+        0xFCC00243U,
+        0xFCA00245U,
+        0xFC800247U,
+        0xFC800249U,
+        0xFC60024BU,
+        0xFC40024DU,
+        0xFC20024FU,
+        0xFC200251U,
+        0xFC000253U,
+        0xFBE00255U,
+        0xFBC00257U,
+        0xFBC00259U,
+        0xFBA0025BU,
+        0xFB80025DU,
+        0xFB60025FU,
+        0xFB400261U,
+        0xFB400263U,
+        0xFB200265U,
+        0xFB000267U,
+        0xFAE00269U,
+        0xFAE0026BU,
+        0xFAC0026DU,
+        0xFAA0026FU,
+        0xFA800271U,
+        0xFA800273U,
+        0xFA600275U,
+        0xFA400277U,
+        0xFA200279U,
+        0xFA20027BU,
+        0xFA00027CU,
+        0xF9E0027EU,
+        0xF9C00280U,
+        0xF9A00282U,
+        0xF9A00284U,
+        0xF9800286U,
+        0xF9600288U,
+        0xF940028AU,
+        0xF940028CU,
+        0xF920028EU,
+        0xF9000290U,
+        0xF8E00292U,
+        0xF8E00294U,
+        0xF8C00296U,
+        0xF8A00298U,
+        0xF880029AU,
+        0xF860029CU,
+        0xF860029EU,
+        0xF84002A0U,
+        0xF82002A2U,
+        0xF80002A4U,
+        0xF80002A6U,
+        0xF7E002A8U,
+        0xF7C002AAU,
+        0xF7A002ACU,
+        0xF7A002AEU,
+        0xF78002B0U,
+        0xF76002B2U,
+        0xF74002B4U,
+        0xF74002B6U,
+        0xF72002B8U,
+        0xF70002BAU,
+        0xF6E002BCU,
+        0xF6C002BEU,
+        0xF6C002C0U,
+        0xF6A002C2U,
+        0xF68002C4U,
+        0xF66002C6U,
+        0xF66002C8U,
+        0xF64002CAU,
+        0xF62002CCU,
+        0xF60002CEU,
+        0xF60002CFU,
+        0xF5E002D1U,
+        0xF5C002D3U,
+        0xF5A002D5U,
+        0xF5A002D7U,
+        0xF58002D9U,
+        0xF56002DBU,
+        0xF54002DDU,
+        0xF52002DFU,
+        0xF52002E1U,
+        0xF50002E3U,
+        0xF4E002E5U,
+        0xF4C002E7U,
+        0xF4C002E9U,
+        0xF4A002EBU,
+        0xF48002EDU,
+        0xF46002EFU,
+        0xF46002F1U,
+        0xF44002F3U,
+        0xF42002F5U,
+        0xF40002F7U,
+        0xF3E002F9U,
+        0xF3E002FBU,
+       /* v_table */
+        0x1A09A000U,
+        0x19E9A800U,
+        0x19A9B800U,
+        0x1969C800U,
+        0x1949D000U,
+        0x1909E000U,
+        0x18C9E800U,
+        0x18A9F800U,
+        0x186A0000U,
+        0x182A1000U,
+        0x180A2000U,
+        0x17CA2800U,
+        0x17AA3800U,
+        0x176A4000U,
+        0x172A5000U,
+        0x170A6000U,
+        0x16CA6800U,
+        0x168A7800U,
+        0x166A8000U,
+        0x162A9000U,
+        0x160AA000U,
+        0x15CAA800U,
+        0x158AB800U,
+        0x156AC000U,
+        0x152AD000U,
+        0x14EAE000U,
+        0x14CAE800U,
+        0x148AF800U,
+        0x146B0000U,
+        0x142B1000U,
+        0x13EB2000U,
+        0x13CB2800U,
+        0x138B3800U,
+        0x134B4000U,
+        0x132B5000U,
+        0x12EB6000U,
+        0x12CB6800U,
+        0x128B7800U,
+        0x124B8000U,
+        0x122B9000U,
+        0x11EBA000U,
+        0x11ABA800U,
+        0x118BB800U,
+        0x114BC000U,
+        0x112BD000U,
+        0x10EBE000U,
+        0x10ABE800U,
+        0x108BF800U,
+        0x104C0000U,
+        0x100C1000U,
+        0x0FEC2000U,
+        0x0FAC2800U,
+        0x0F8C3800U,
+        0x0F4C4000U,
+        0x0F0C5000U,
+        0x0EEC5800U,
+        0x0EAC6800U,
+        0x0E6C7800U,
+        0x0E4C8000U,
+        0x0E0C9000U,
+        0x0DEC9800U,
+        0x0DACA800U,
+        0x0D6CB800U,
+        0x0D4CC000U,
+        0x0D0CD000U,
+        0x0CCCD800U,
+        0x0CACE800U,
+        0x0C6CF800U,
+        0x0C4D0000U,
+        0x0C0D1000U,
+        0x0BCD1800U,
+        0x0BAD2800U,
+        0x0B6D3800U,
+        0x0B2D4000U,
+        0x0B0D5000U,
+        0x0ACD5800U,
+        0x0AAD6800U,
+        0x0A6D7800U,
+        0x0A2D8000U,
+        0x0A0D9000U,
+        0x09CD9800U,
+        0x098DA800U,
+        0x096DB800U,
+        0x092DC000U,
+        0x090DD000U,
+        0x08CDD800U,
+        0x088DE800U,
+        0x086DF800U,
+        0x082E0000U,
+        0x07EE1000U,
+        0x07CE1800U,
+        0x078E2800U,
+        0x076E3800U,
+        0x072E4000U,
+        0x06EE5000U,
+        0x06CE5800U,
+        0x068E6800U,
+        0x064E7800U,
+        0x062E8000U,
+        0x05EE9000U,
+        0x05CE9800U,
+        0x058EA800U,
+        0x054EB800U,
+        0x052EC000U,
+        0x04EED000U,
+        0x04AED800U,
+        0x048EE800U,
+        0x044EF000U,
+        0x042F0000U,
+        0x03EF1000U,
+        0x03AF1800U,
+        0x038F2800U,
+        0x034F3000U,
+        0x030F4000U,
+        0x02EF5000U,
+        0x02AF5800U,
+        0x028F6800U,
+        0x024F7000U,
+        0x020F8000U,
+        0x01EF9000U,
+        0x01AF9800U,
+        0x016FA800U,
+        0x014FB000U,
+        0x010FC000U,
+        0x00EFD000U,
+        0x00AFD800U,
+        0x006FE800U,
+        0x004FF000U,
+        0x00100000U,
+        0xFFD01000U,
+        0xFFB01800U,
+        0xFF702800U,
+        0xFF303000U,
+        0xFF104000U,
+        0xFED05000U,
+        0xFEB05800U,
+        0xFE706800U,
+        0xFE307000U,
+        0xFE108000U,
+        0xFDD09000U,
+        0xFD909800U,
+        0xFD70A800U,
+        0xFD30B000U,
+        0xFD10C000U,
+        0xFCD0D000U,
+        0xFC90D800U,
+        0xFC70E800U,
+        0xFC30F000U,
+        0xFBF10000U,
+        0xFBD11000U,
+        0xFB911800U,
+        0xFB712800U,
+        0xFB313000U,
+        0xFAF14000U,
+        0xFAD14800U,
+        0xFA915800U,
+        0xFA516800U,
+        0xFA317000U,
+        0xF9F18000U,
+        0xF9D18800U,
+        0xF9919800U,
+        0xF951A800U,
+        0xF931B000U,
+        0xF8F1C000U,
+        0xF8B1C800U,
+        0xF891D800U,
+        0xF851E800U,
+        0xF831F000U,
+        0xF7F20000U,
+        0xF7B20800U,
+        0xF7921800U,
+        0xF7522800U,
+        0xF7123000U,
+        0xF6F24000U,
+        0xF6B24800U,
+        0xF6925800U,
+        0xF6526800U,
+        0xF6127000U,
+        0xF5F28000U,
+        0xF5B28800U,
+        0xF5729800U,
+        0xF552A800U,
+        0xF512B000U,
+        0xF4F2C000U,
+        0xF4B2C800U,
+        0xF472D800U,
+        0xF452E800U,
+        0xF412F000U,
+        0xF3D30000U,
+        0xF3B30800U,
+        0xF3731800U,
+        0xF3532800U,
+        0xF3133000U,
+        0xF2D34000U,
+        0xF2B34800U,
+        0xF2735800U,
+        0xF2336800U,
+        0xF2137000U,
+        0xF1D38000U,
+        0xF1B38800U,
+        0xF1739800U,
+        0xF133A800U,
+        0xF113B000U,
+        0xF0D3C000U,
+        0xF093C800U,
+        0xF073D800U,
+        0xF033E000U,
+        0xF013F000U,
+        0xEFD40000U,
+        0xEF940800U,
+        0xEF741800U,
+        0xEF342000U,
+        0xEEF43000U,
+        0xEED44000U,
+        0xEE944800U,
+        0xEE745800U,
+        0xEE346000U,
+        0xEDF47000U,
+        0xEDD48000U,
+        0xED948800U,
+        0xED549800U,
+        0xED34A000U,
+        0xECF4B000U,
+        0xECD4C000U,
+        0xEC94C800U,
+        0xEC54D800U,
+        0xEC34E000U,
+        0xEBF4F000U,
+        0xEBB50000U,
+        0xEB950800U,
+        0xEB551800U,
+        0xEB352000U,
+        0xEAF53000U,
+        0xEAB54000U,
+        0xEA954800U,
+        0xEA555800U,
+        0xEA156000U,
+        0xE9F57000U,
+        0xE9B58000U,
+        0xE9958800U,
+        0xE9559800U,
+        0xE915A000U,
+        0xE8F5B000U,
+        0xE8B5C000U,
+        0xE875C800U,
+        0xE855D800U,
+        0xE815E000U,
+        0xE7F5F000U,
+        0xE7B60000U,
+        0xE7760800U,
+        0xE7561800U,
+        0xE7162000U,
+        0xE6D63000U,
+        0xE6B64000U,
+        0xE6764800U,
+        0xE6365800U
+};
diff --git a/android/android-opencv/jni/yuv2rgb_neon.c b/android/android-opencv/jni/yuv2rgb_neon.c
new file mode 100644 (file)
index 0000000..a845858
--- /dev/null
@@ -0,0 +1,201 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "yuv420sp2rgb.h"
+#include <arm_neon.h>
+#include <stdlib.h>
+
+/* this source file should only be compiled by Android.mk when targeting
+ * the armeabi-v7a ABI, and should be built in NEON mode
+ */
+void fir_filter_neon_intrinsics(short *output, const short* input, const short* kernel, int width, int kernelSize)
+{
+#if 1
+  int nn, offset = -kernelSize / 2;
+
+  for (nn = 0; nn < width; nn++)
+  {
+    int mm, sum = 0;
+    int32x4_t sum_vec = vdupq_n_s32(0);
+    for (mm = 0; mm < kernelSize / 4; mm++)
+    {
+      int16x4_t kernel_vec = vld1_s16(kernel + mm * 4);
+      int16x4_t input_vec = vld1_s16(input + (nn + offset + mm * 4));
+      sum_vec = vmlal_s16(sum_vec, kernel_vec, input_vec);
+    }
+
+    sum += vgetq_lane_s32(sum_vec, 0);
+    sum += vgetq_lane_s32(sum_vec, 1);
+    sum += vgetq_lane_s32(sum_vec, 2);
+    sum += vgetq_lane_s32(sum_vec, 3);
+
+    if (kernelSize & 3)
+    {
+      for (mm = kernelSize - (kernelSize & 3); mm < kernelSize; mm++)
+        sum += kernel[mm] * input[nn + offset + mm];
+    }
+
+    output[nn] = (short)((sum + 0x8000) >> 16);
+  }
+#else /* for comparison purposes only */
+  int nn, offset = -kernelSize/2;
+  for (nn = 0; nn < width; nn++)
+  {
+    int sum = 0;
+    int mm;
+    for (mm = 0; mm < kernelSize; mm++)
+    {
+      sum += kernel[mm]*input[nn+offset+mm];
+    }
+    output[n] = (short)((sum + 0x8000) >> 16);
+  }
+#endif
+}
+
+/*
+ YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
+ U/V plane containing 8 bit 2x2 subsampled chroma samples.
+ except the interleave order of U and V is reversed.
+
+ H V
+ Y Sample Period      1 1
+ U (Cb) Sample Period 2 2
+ V (Cr) Sample Period 2 2
+ */
+
+/*
+ size of a char:
+ find . -name limits.h -exec grep CHAR_BIT {} \;
+ */
+
+#ifndef max
+#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
+#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
+#endif
+
+#define bytes_per_pixel 2
+#define LOAD_Y(i,j) (pY + i * width + j)
+#define LOAD_V(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2))
+#define LOAD_U(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2)+1)
+
+const uint8_t ZEROS[8] = {220,220, 220, 220, 220, 220, 220, 220};
+const uint8_t Y_SUBS[8] = {16, 16, 16, 16, 16, 16, 16, 16};
+const uint8_t UV_SUBS[8] = {128, 128, 128, 128, 128, 128, 128, 128};
+
+const uint32_t UV_MULS[] = {833, 400, 833, 400};
+
+void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
+{
+
+  int i, j;
+  int nR, nG, nB;
+  int nY, nU, nV;
+  unsigned char *out = buffer;
+  int offset = 0;
+
+  uint8x8_t Y_SUBvec = vld1_u8(Y_SUBS);
+  uint8x8_t UV_SUBvec = vld1_u8(UV_SUBS); // v,u,v,u v,u,v,u
+  uint32x4_t UV_MULSvec = vld1q_u32(UV_MULS);
+  uint8x8_t ZEROSvec =vld1_u8(ZEROS);
+
+  uint32_t UVvec_int[8];
+  if (grey)
+  {
+    memcpy(out, pY, width * height * sizeof(unsigned char));
+  }
+  else
+    // YUV 4:2:0
+    for (i = 0; i < height; i++)
+    {
+      for (j = 0; j < width; j += 8)
+      {
+        //        nY = *(pY + i * width + j);
+        //        nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
+        //        nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
+
+        uint8x8_t nYvec = vld1_u8(LOAD_Y(i,j));
+        uint8x8_t nUVvec = vld1_u8(LOAD_V(i,j)); // v,u,v,u v,u,v,u
+
+        nYvec = vmul_u8(nYvec, vcle_u8(nYvec,ZEROSvec));
+
+        // Yuv Convert
+        //        nY -= 16;
+        //        nU -= 128;
+        //        nV -= 128;
+
+        //        nYvec = vsub_u8(nYvec, Y_SUBvec);
+        //        nUVvec = vsub_u8(nYvec, UV_SUBvec);
+
+        uint16x8_t nYvec16 = vmovl_u8(vsub_u8(nYvec, Y_SUBvec));
+        uint16x8_t nUVvec16 = vmovl_u8(vsub_u8(nYvec, UV_SUBvec));
+
+        uint16x4_t Y_low4 = vget_low_u16(nYvec16);
+        uint16x4_t Y_high4 = vget_high_u16(nYvec16);
+        uint16x4_t UV_low4 = vget_low_u16(nUVvec16);
+        uint16x4_t UV_high4 = vget_high_u16(nUVvec16);
+
+        uint32x4_t UV_low4_int = vmovl_u16(UV_low4);
+        uint32x4_t UV_high4_int = vmovl_u16(UV_high4);
+
+        uint32x4_t Y_low4_int = vmull_n_u16(Y_low4, 1192);
+        uint32x4_t Y_high4_int = vmull_n_u16(Y_high4, 1192);
+
+        uint32x4x2_t UV_uzp = vuzpq_u32(UV_low4_int, UV_high4_int);
+
+        uint32x2_t Vl = vget_low_u32(UV_uzp.val[0]);// vld1_u32(UVvec_int);
+        uint32x2_t Vh = vget_high_u32(UV_uzp.val[0]);//vld1_u32(UVvec_int + 2);
+
+        uint32x2x2_t Vll_ = vzip_u32(Vl, Vl);
+        uint32x4_t* Vll = (uint32x4_t*)(&Vll_);
+
+        uint32x2x2_t Vhh_ = vzip_u32(Vh, Vh);
+        uint32x4_t* Vhh = (uint32x4_t*)(&Vhh_);
+
+        uint32x2_t Ul =  vget_low_u32(UV_uzp.val[1]);
+        uint32x2_t Uh =  vget_high_u32(UV_uzp.val[1]);
+
+        uint32x2x2_t Ull_ = vzip_u32(Ul, Ul);
+        uint32x4_t* Ull = (uint32x4_t*)(&Ull_);
+
+        uint32x2x2_t Uhh_ = vzip_u32(Uh, Uh);
+        uint32x4_t* Uhh = (uint32x4_t*)(&Uhh_);
+
+        uint32x4_t B_int_low = vmlaq_n_u32(Y_low4_int, *Ull, 2066); //multiply by scalar accum
+        uint32x4_t B_int_high = vmlaq_n_u32(Y_high4_int, *Uhh, 2066); //multiply by scalar accum
+        uint32x4_t G_int_low = vsubq_u32(Y_low4_int, vmlaq_n_u32(vmulq_n_u32(*Vll, 833), *Ull, 400));
+        uint32x4_t G_int_high = vsubq_u32(Y_high4_int, vmlaq_n_u32(vmulq_n_u32(*Vhh, 833), *Uhh, 400));
+        uint32x4_t R_int_low = vmlaq_n_u32(Y_low4_int, *Vll, 1634); //multiply by scalar accum
+        uint32x4_t R_int_high = vmlaq_n_u32(Y_high4_int, *Vhh, 1634); //multiply by scalar accum
+
+        B_int_low = vshrq_n_u32 (B_int_low, 10);
+        B_int_high = vshrq_n_u32 (B_int_high, 10);
+        G_int_low = vshrq_n_u32 (G_int_low, 10);
+        G_int_high = vshrq_n_u32 (G_int_high, 10);
+        R_int_low = vshrq_n_u32 (R_int_low, 10);
+        R_int_high = vshrq_n_u32 (R_int_high, 10);
+
+
+        uint8x8x3_t RGB;
+        RGB.val[0] = vmovn_u16(vcombine_u16(vqmovn_u32 (R_int_low),vqmovn_u32 (R_int_high)));
+        RGB.val[1] = vmovn_u16(vcombine_u16(vqmovn_u32 (G_int_low),vqmovn_u32 (G_int_high)));
+        RGB.val[2] = vmovn_u16(vcombine_u16(vqmovn_u32 (B_int_low),vqmovn_u32 (B_int_high)));
+
+        vst3_u8 (out+i*width*3 + j*3, RGB);
+      }
+    }
+
+}
+
diff --git a/android/android-opencv/jni/yuv420rgb888.s b/android/android-opencv/jni/yuv420rgb888.s
new file mode 100644 (file)
index 0000000..570ccc7
--- /dev/null
@@ -0,0 +1,379 @@
+; YUV-> RGB conversion code Copyright (C) 2008 Robin Watts (robin;wss.co.uk).
+;
+; Licensed under the GPL. If you need it under another license, contact me
+; and ask.
+;
+;  This program is free software ; you can redistribute it and/or modify
+;  it under the terms of the GNU General Public License as published by
+;  the Free Software Foundation ; either version 2 of the License, or
+;  (at your option) any later version.
+;
+;  This program is distributed in the hope that it will be useful,
+;  but WITHOUT ANY WARRANTY ; without even the implied warranty of
+;  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;  GNU General Public License for more details.
+;
+;  You should have received a copy of the GNU General Public License
+;  along with this program ; if not, write to the Free Software
+;  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+;
+;
+; The algorithm used here is based heavily on one created by Sophie Wilson
+; of Acorn/e-14/Broadcomm. Many thanks.
+;
+; Additional tweaks (in the fast fixup code) are from Paul Gardiner.
+;
+; The old implementation of YUV -> RGB did:
+;
+; R = CLAMP((Y-16)*1.164 +           1.596*V)
+; G = CLAMP((Y-16)*1.164 - 0.391*U - 0.813*V)
+; B = CLAMP((Y-16)*1.164 + 2.018*U          )
+;
+; We're going to bend that here as follows:
+;
+; R = CLAMP(y +           1.596*V)
+; G = CLAMP(y - 0.383*U - 0.813*V)
+; B = CLAMP(y + 1.976*U          )
+;
+; where y = 0               for       Y <=  16,
+;       y = (  Y-16)*1.164, for  16 < Y <= 239,
+;       y = (239-16)*1.164, for 239 < Y
+;
+; i.e. We clamp Y to the 16 to 239 range (which it is supposed to be in
+; anyway). We then pick the B_U factor so that B never exceeds 511. We then
+; shrink the G_U factor in line with that to avoid a colour shift as much as
+; possible.
+;
+; We're going to use tables to do it faster, but rather than doing it using
+; 5 tables as as the above suggests, we're going to do it using just 3.
+;
+; We do this by working in parallel within a 32 bit word, and using one
+; table each for Y U and V.
+;
+; Source Y values are    0 to 255, so    0.. 260 after scaling
+; Source U values are -128 to 127, so  -49.. 49(G), -253..251(B) after
+; Source V values are -128 to 127, so -204..203(R), -104..103(G) after
+;
+; So total summed values:
+; -223 <= R <= 481, -173 <= G <= 431, -253 <= B < 511
+;
+; We need to pack R G and B into a 32 bit word, and because of Bs range we
+; need 2 bits above the valid range of B to detect overflow, and another one
+; to detect the sense of the overflow. We therefore adopt the following
+; representation:
+;
+; osGGGGGgggggosBBBBBbbbosRRRRRrrr
+;
+; Each such word breaks down into 3 ranges.
+;
+; osGGGGGggggg   osBBBBBbbb   osRRRRRrrr
+;
+; Thus we have 8 bits for each B and R table entry, and 10 bits for G (good
+; as G is the most noticable one). The s bit for each represents the sign,
+; and o represents the overflow.
+;
+; For R and B we pack the table by taking the 11 bit representation of their
+; values, and toggling bit 10 in the U and V tables.
+;
+; For the green case we calculate 4*G (thus effectively using 10 bits for the
+; valid range) truncate to 12 bits. We toggle bit 11 in the Y table.
+
+; Theorarm library
+; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
+
+       AREA    |.text|, CODE, READONLY
+
+       EXPORT  yuv420_2_rgb888
+       EXPORT  yuv420_2_rgb888_PROFILE
+
+; void yuv420_2_rgb565
+;  uint8_t *dst_ptr
+;  uint8_t *y_ptr
+;  uint8_t *u_ptr
+;  uint8_t *v_ptr
+;  int      width
+;  int      height
+;  int      y_span
+;  int      uv_span
+;  int      dst_span
+;  int     *tables
+;  int      dither
+
+CONST_flags
+       DCD     0x40080100
+yuv420_2_rgb888
+       ; r0 = dst_ptr
+       ; r1 = y_ptr
+       ; r2 = u_ptr
+       ; r3 = v_ptr
+       ; <> = width
+       ; <> = height
+       ; <> = y_span
+       ; <> = uv_span
+       ; <> = dst_span
+       ; <> = y_table
+       ; <> = dither
+       STMFD   r13!,{r4-r11,r14}
+
+       LDR     r8, [r13,#10*4]         ; r8 = height
+       LDR     r10,[r13,#11*4]         ; r10= y_span
+       LDR     r9, [r13,#13*4]         ; r9 = dst_span
+       LDR     r14,[r13,#14*4]         ; r14= y_table
+       LDR     r5, CONST_flags
+       LDR     r11,[r13,#9*4]          ; r11= width
+       ADD     r4, r14, #256*4
+       SUBS    r8, r8, #1
+       BLT     end
+       BEQ     trail_row1
+yloop1
+       SUB     r8, r8, r11,LSL #16     ; r8 = height-(width<<16)
+       ADDS    r8, r8, #1<<16          ; if (width == 1)
+       BGE     trail_pair1             ;    just do 1 column
+xloop1
+       LDRB    r11,[r2], #1            ; r11 = u  = *u_ptr++
+       LDRB    r12,[r3], #1            ; r12 = v  = *v_ptr++
+       LDRB    r7, [r1, r10]           ; r7  = y2 = y_ptr[stride]
+       LDRB    r6, [r1], #1            ; r6  = y0 = *y_ptr++
+       ADD     r12,r12,#512
+       LDR     r11,[r4, r11,LSL #2]    ; r11 = u  = u_table[u]
+       LDR     r12,[r14,r12,LSL #2]    ; r12 = v  = v_table[v]
+       LDR     r7, [r14,r7, LSL #2]    ; r7  = y2 = y_table[y2]
+       LDR     r6, [r14,r6, LSL #2]    ; r6  = y0 = y_table[y0]
+       ADD     r11,r11,r12             ; r11 = uv = u+v
+
+       ADD     r7, r7, r11             ; r7  = y2 + uv
+       ADD     r6, r6, r11             ; r6  = y0 + uv
+       ANDS    r12,r7, r5
+       TSTEQ   r6, r5
+       BNE     fix101
+return101
+       ; Store the bottom one first
+       ADD     r12,r0, r9
+       STRB    r7,[r12],#1             ; Store R
+       MOV     r7, r7, ROR #22
+       STRB    r7,[r12],#1             ; Store G
+       MOV     r7, r7, ROR #21
+       STRB    r7,[r12],#1             ; Store B
+
+       ; Then store the top one
+       STRB    r6,[r0], #1             ; Store R
+       MOV     r6, r6, ROR #22
+       STRB    r6,[r0], #1             ; Store G
+
+       LDRB    r7, [r1, r10]           ; r7 = y3 = y_ptr[stride]
+       LDRB    r12,[r1], #1            ; r12= y1 = *y_ptr++
+       MOV     r6, r6, ROR #21
+       LDR     r7, [r14, r7, LSL #2]   ; r7 = y3 = y_table[y2]
+       LDR     r12,[r14, r12,LSL #2]   ; r12= y1 = y_table[y0]
+       STRB    r6,[r0], #1             ; Store B
+
+       ADD     r7, r7, r11             ; r7  = y3 + uv
+       ADD     r6, r12,r11             ; r6  = y1 + uv
+       ANDS    r12,r7, r5
+       TSTEQ   r6, r5
+       BNE     fix102
+return102
+       ; Store the bottom one first
+       ADD     r12,r0, r9
+       STRB    r7,[r12],#1             ; Store R
+       MOV     r7, r7, ROR #22
+       STRB    r7,[r12],#1             ; Store G
+       MOV     r7, r7, ROR #21
+       STRB    r7,[r12],#1             ; Store B
+
+       ; Then store the top one
+       STRB    r6,[r0], #1             ; Store R
+       MOV     r6, r6, ROR #22
+       STRB    r6,[r0], #1             ; Store G
+       MOV     r6, r6, ROR #21
+       STRB    r6,[r0], #1             ; Store B
+
+       ADDS    r8, r8, #2<<16
+       BLT     xloop1
+       MOVS    r8, r8, LSL #16         ; Clear the top 16 bits of r8
+       MOV     r8, r8, LSR #16         ; If the C bit is clear we still have
+       BCC     trail_pair1             ; 1 more pixel pair to do
+end_xloop1
+       LDR     r11,[r13,#9*4]          ; r11= width
+       LDR     r12,[r13,#12*4]         ; r12= uv_stride
+       ADD     r0, r0, r9, LSL #1
+       SUB     r0, r0, r11,LSL #1
+       SUB     r0, r0, r11
+       ADD     r1, r1, r10,LSL #1
+       SUB     r1, r1, r11
+       SUB     r2, r2, r11,LSR #1
+       SUB     r3, r3, r11,LSR #1
+       ADD     r2, r2, r12
+       ADD     r3, r3, r12
+
+       SUBS    r8, r8, #2
+       BGT     yloop1
+
+       LDMLTFD r13!,{r4-r11,pc}
+trail_row1
+       ; We have a row of pixels left to do
+       SUB     r8, r8, r11,LSL #16     ; r8 = height-(width<<16)
+       ADDS    r8, r8, #1<<16          ; if (width == 1)
+       BGE     trail_pix1              ;    just do 1 pixel
+xloop12
+       LDRB    r11,[r2], #1            ; r11 = u  = *u_ptr++
+       LDRB    r12,[r3], #1            ; r12 = v  = *v_ptr++
+       LDRB    r6, [r1], #1            ; r6  = y0 = *y_ptr++
+       LDRB    r7, [r1], #1            ; r7  = y1 = *y_ptr++
+       ADD     r12,r12,#512
+       LDR     r11,[r4, r11,LSL #2]    ; r11 = u  = u_table[u]
+       LDR     r12,[r14,r12,LSL #2]    ; r12 = v  = v_table[v]
+       LDR     r7, [r14,r7, LSL #2]    ; r7  = y1 = y_table[y1]
+       LDR     r6, [r14,r6, LSL #2]    ; r6  = y0 = y_table[y0]
+       ADD     r11,r11,r12             ; r11 = uv = u+v
+
+       ADD     r6, r6, r11             ; r6  = y0 + uv
+       ADD     r7, r7, r11             ; r7  = y1 + uv
+       ANDS    r12,r7, r5
+       TSTEQ   r6, r5
+       BNE     fix104
+return104
+       ; Store the bottom one first
+       STRB    r6,[r0], #1             ; Store R
+       MOV     r6, r6, ROR #22
+       STRB    r6,[r0], #1             ; Store G
+       MOV     r6, r6, ROR #21
+       STRB    r6,[r0], #1             ; Store B
+
+       ; Then store the top one
+       STRB    r7,[r0], #1             ; Store R
+       MOV     r7, r7, ROR #22
+       STRB    r7,[r0], #1             ; Store G
+       MOV     r7, r7, ROR #21
+       STRB    r7,[r0], #1             ; Store B
+
+       ADDS    r8, r8, #2<<16
+       BLT     xloop12
+       MOVS    r8, r8, LSL #16         ; Clear the top 16 bits of r8
+       MOV     r8, r8, LSR #16         ; If the C bit is clear we still have
+       BCC     trail_pix1              ; 1 more pixel pair to do
+end
+       LDMFD   r13!,{r4-r11,pc}
+trail_pix1
+       ; We have a single extra pixel to do
+       LDRB    r11,[r2], #1            ; r11 = u  = *u_ptr++
+       LDRB    r12,[r3], #1            ; r12 = v  = *v_ptr++
+       LDRB    r6, [r1], #1            ; r6  = y0 = *y_ptr++
+       ADD     r12,r12,#512
+       LDR     r11,[r4, r11,LSL #2]    ; r11 = u  = u_table[u]
+       LDR     r12,[r14,r12,LSL #2]    ; r12 = v  = v_table[v]
+       LDR     r6, [r14,r6, LSL #2]    ; r6  = y0 = y_table[y0]
+       ADD     r11,r11,r12             ; r11 = uv = u+v
+
+       ADD     r6, r6, r11             ; r6  = y0 + uv
+       ANDS    r12,r6, r5
+       BNE     fix105
+return105
+       STRB    r6,[r0], #1             ; Store R
+       MOV     r6, r6, ROR #22
+       STRB    r6,[r0], #1             ; Store G
+       MOV     r6, r6, ROR #21
+       STRB    r6,[r0], #1             ; Store B
+
+       LDMFD   r13!,{r4-r11,pc}
+
+trail_pair1
+       ; We have a pair of pixels left to do
+       LDRB    r11,[r2]                ; r11 = u  = *u_ptr++
+       LDRB    r12,[r3]                ; r12 = v  = *v_ptr++
+       LDRB    r7, [r1, r10]           ; r7  = y2 = y_ptr[stride]
+       LDRB    r6, [r1], #1            ; r6  = y0 = *y_ptr++
+       ADD     r12,r12,#512
+       LDR     r11,[r4, r11,LSL #2]    ; r11 = u  = u_table[u]
+       LDR     r12,[r14,r12,LSL #2]    ; r12 = v  = v_table[v]
+       LDR     r7, [r14,r7, LSL #2]    ; r7  = y2 = y_table[y2]
+       LDR     r6, [r14,r6, LSL #2]    ; r6  = y0 = y_table[y0]
+       ADD     r11,r11,r12             ; r11 = uv = u+v
+
+       ADD     r7, r7, r11             ; r7  = y2 + uv
+       ADD     r6, r6, r11             ; r6  = y0 + uv
+       ANDS    r12,r7, r5
+       TSTEQ   r6, r5
+       BNE     fix103
+return103
+       ; Store the bottom one first
+       ADD     r12,r0, r9
+       STRB    r7,[r12],#1             ; Store R
+       MOV     r7, r7, ROR #22
+       STRB    r7,[r12],#1             ; Store G
+       MOV     r7, r7, ROR #21
+       STRB    r7,[r12],#1             ; Store B
+
+       ; Then store the top one
+       STRB    r6,[r0], #1             ; Store R
+       MOV     r6, r6, ROR #22
+       STRB    r6,[r0], #1             ; Store G
+       MOV     r6, r6, ROR #21
+       STRB    r6,[r0], #1             ; Store B
+       B       end_xloop1
+fix101
+       ; r7 and r6 are the values, at least one of which has overflowed
+       ; r12 = r7 & mask = .s......s......s......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r7, r7, r12             ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r7, LSR #1      ; r12 = .o......o......o......
+       ADD     r7, r7, r12,LSR #8      ; r7  = fixed value
+
+       AND     r12, r6, r5             ; r12 = .S......S......S......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r6, r6, r12             ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r6, LSR #1      ; r12 = .o......o......o......
+       ADD     r6, r6, r12,LSR #8      ; r6  = fixed value
+       B       return101
+fix102
+       ; r7 and r6 are the values, at least one of which has overflowed
+       ; r12 = r7 & mask = .s......s......s......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r7, r7, r12             ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r7, LSR #1      ; r12 = .o......o......o......
+       ADD     r7, r7, r12,LSR #8      ; r7  = fixed value
+
+       AND     r12, r6, r5             ; r12 = .S......S......S......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS..SSSSS.SSSSSS
+       ORR     r6, r6, r12             ; r6 |= ..SSSSSS..SSSSS.SSSSSS
+       BIC     r12,r5, r6, LSR #1      ; r12 = .o......o......o......
+       ADD     r6, r6, r12,LSR #8      ; r6  = fixed value
+       B       return102
+fix103
+       ; r7 and r6 are the values, at least one of which has overflowed
+       ; r12 = r7 & mask = .s......s......s......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r7, r7, r12             ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r7, LSR #1      ; r12 = .o......o......o......
+       ADD     r7, r7, r12,LSR #8      ; r7  = fixed value
+
+       AND     r12, r6, r5             ; r12 = .S......S......S......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r6, r6, r12             ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r6, LSR #1      ; r12 = .o......o......o......
+       ADD     r6, r6, r12,LSR #8      ; r6  = fixed value
+       B       return103
+fix104
+       ; r7 and r6 are the values, at least one of which has overflowed
+       ; r12 = r7 & mask = .s......s......s......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r7, r7, r12             ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r7, LSR #1      ; r12 = .o......o......o......
+       ADD     r7, r7, r12,LSR #8      ; r7  = fixed value
+
+       AND     r12, r6, r5             ; r12 = .S......S......S......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r6, r6, r12             ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r6, LSR #1      ; r12 = .o......o......o......
+       ADD     r6, r6, r12,LSR #8      ; r6  = fixed value
+       B       return104
+fix105
+       ; r6 is the value, which has has overflowed
+       ; r12 = r7 & mask = .s......s......s......
+       SUB     r12,r12,r12,LSR #8      ; r12 = ..SSSSSS.SSSSSS.SSSSSS
+       ORR     r6, r6, r12             ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
+       BIC     r12,r5, r6, LSR #1      ; r12 = .o......o......o......
+       ADD     r6, r6, r12,LSR #8      ; r6  = fixed value
+       B       return105
+
+       END
diff --git a/android/android-opencv/jni/yuv420rgb888c.c b/android/android-opencv/jni/yuv420rgb888c.c
new file mode 100644 (file)
index 0000000..c7ec52e
--- /dev/null
@@ -0,0 +1,208 @@
+/* YUV-> RGB conversion code. (YUV420 to RGB565)
+ *
+ * Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
+ * Productions Ltd.
+ *
+ * Licensed under the GNU GPL. If you need it under another license, contact
+ * me and ask.
+ *
+ *  This program is free software ; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation ; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY ; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program ; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+ *
+ *
+ * The algorithm used here is based heavily on one created by Sophie Wilson
+ * of Acorn/e-14/Broadcomm. Many thanks.
+ *
+ * Additional tweaks (in the fast fixup code) are from Paul Gardiner.
+ *
+ * The old implementation of YUV -> RGB did:
+ *
+ * R = CLAMP((Y-16)*1.164 +           1.596*V)
+ * G = CLAMP((Y-16)*1.164 - 0.391*U - 0.813*V)
+ * B = CLAMP((Y-16)*1.164 + 2.018*U          )
+ *
+ * We're going to bend that here as follows:
+ *
+ * R = CLAMP(y +           1.596*V)
+ * G = CLAMP(y - 0.383*U - 0.813*V)
+ * B = CLAMP(y + 1.976*U          )
+ *
+ * where y = 0               for       Y <=  16,
+ *       y = (  Y-16)*1.164, for  16 < Y <= 239,
+ *       y = (239-16)*1.164, for 239 < Y
+ *
+ * i.e. We clamp Y to the 16 to 239 range (which it is supposed to be in
+ * anyway). We then pick the B_U factor so that B never exceeds 511. We then
+ * shrink the G_U factor in line with that to avoid a colour shift as much as
+ * possible.
+ *
+ * We're going to use tables to do it faster, but rather than doing it using
+ * 5 tables as as the above suggests, we're going to do it using just 3.
+ *
+ * We do this by working in parallel within a 32 bit word, and using one
+ * table each for Y U and V.
+ *
+ * Source Y values are    0 to 255, so    0.. 260 after scaling
+ * Source U values are -128 to 127, so  -49.. 49(G), -253..251(B) after
+ * Source V values are -128 to 127, so -204..203(R), -104..103(G) after
+ *
+ * So total summed values:
+ * -223 <= R <= 481, -173 <= G <= 431, -253 <= B < 511
+ *
+ * We need to pack R G and B into a 32 bit word, and because of Bs range we
+ * need 2 bits above the valid range of B to detect overflow, and another one
+ * to detect the sense of the overflow. We therefore adopt the following
+ * representation:
+ *
+ * osGGGGGgggggosBBBBBbbbosRRRRRrrr
+ *
+ * Each such word breaks down into 3 ranges.
+ *
+ * osGGGGGggggg   osBBBBBbbb   osRRRRRrrr
+ *
+ * Thus we have 8 bits for each B and R table entry, and 10 bits for G (good
+ * as G is the most noticable one). The s bit for each represents the sign,
+ * and o represents the overflow.
+ *
+ * For R and B we pack the table by taking the 11 bit representation of their
+ * values, and toggling bit 10 in the U and V tables.
+ *
+ * For the green case we calculate 4*G (thus effectively using 10 bits for the
+ * valid range) truncate to 12 bits. We toggle bit 11 in the Y table.
+ */
+
+#include "yuv2rgb.h"
+
+enum
+{
+    FLAGS         = 0x40080100
+};
+
+#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
+#define READY(Y)    tables[Y]
+#define FIXUP(Y)                 \
+do {                             \
+    int tmp = (Y) & FLAGS;       \
+    if (tmp != 0)                \
+    {                            \
+        tmp  -= tmp>>8;          \
+        (Y)  |= tmp;             \
+        tmp   = FLAGS & ~(Y>>1); \
+        (Y)  += tmp>>8;          \
+    }                            \
+} while (0 == 1)
+
+#define STORE(Y,DSTPTR)           \
+do {                              \
+    uint32_t Y2       = (Y);      \
+    uint8_t  *DSTPTR2 = (DSTPTR); \
+    (DSTPTR2)[0] = (Y2);          \
+    (DSTPTR2)[1] = (Y2)>>22;      \
+    (DSTPTR2)[2] = (Y2)>>11;      \
+} while (0 == 1)
+
+void yuv420_2_rgb888(uint8_t  *dst_ptr,
+               const uint8_t  *y_ptr,
+               const uint8_t  *u_ptr,
+               const uint8_t  *v_ptr,
+                     int32_t   width,
+                     int32_t   height,
+                     int32_t   y_span,
+                     int32_t   uv_span,
+                     int32_t   dst_span,
+               const uint32_t *tables,
+                     int32_t   dither)
+{
+    height -= 1;
+    while (height > 0)
+    {
+        height -= width<<16;
+        height += 1<<16;
+        while (height < 0)
+        {
+            /* Do 2 column pairs */
+            uint32_t uv, y0, y1;
+
+            uv  = READUV(*u_ptr++,*v_ptr++);
+            y1  = uv + READY(y_ptr[y_span]);
+            y0  = uv + READY(*y_ptr++);
+            FIXUP(y1);
+            FIXUP(y0);
+            STORE(y1, &dst_ptr[dst_span]);
+            STORE(y0, dst_ptr);
+            dst_ptr += 3;
+            y1  = uv + READY(y_ptr[y_span]);
+            y0  = uv + READY(*y_ptr++);
+            FIXUP(y1);
+            FIXUP(y0);
+            STORE(y1, &dst_ptr[dst_span]);
+            STORE(y0, dst_ptr);
+            dst_ptr += 3;
+            height += (2<<16);
+        }
+        if ((height>>16) == 0)
+        {
+            /* Trailing column pair */
+            uint32_t uv, y0, y1;
+
+            uv = READUV(*u_ptr,*v_ptr);
+            y1 = uv + READY(y_ptr[y_span]);
+            y0 = uv + READY(*y_ptr++);
+            FIXUP(y1);
+            FIXUP(y0);
+            STORE(y0, &dst_ptr[dst_span]);
+            STORE(y1, dst_ptr);
+            dst_ptr += 3;
+        }
+        dst_ptr += dst_span*2-width*3;
+        y_ptr   += y_span*2-width;
+        u_ptr   += uv_span-(width>>1);
+        v_ptr   += uv_span-(width>>1);
+        height = (height<<16)>>16;
+        height -= 2;
+    }
+    if (height == 0)
+    {
+        /* Trail row */
+        height -= width<<16;
+        height += 1<<16;
+        while (height < 0)
+        {
+            /* Do a row pair */
+            uint32_t uv, y0, y1;
+
+            uv  = READUV(*u_ptr++,*v_ptr++);
+            y1  = uv + READY(*y_ptr++);
+            y0  = uv + READY(*y_ptr++);
+            FIXUP(y1);
+            FIXUP(y0);
+            STORE(y1, dst_ptr);
+            dst_ptr += 3;
+            STORE(y0, dst_ptr);
+            dst_ptr += 3;
+            height += (2<<16);
+        }
+        if ((height>>16) == 0)
+        {
+            /* Trailing pix */
+            uint32_t uv, y0;
+
+            uv = READUV(*u_ptr++,*v_ptr++);
+            y0 = uv + READY(*y_ptr++);
+            FIXUP(y0);
+            STORE(y0, dst_ptr);
+            dst_ptr += 3;
+        }
+    }
+}
diff --git a/android/android-opencv/jni/yuv420sp2rgb.c b/android/android-opencv/jni/yuv420sp2rgb.c
new file mode 100644 (file)
index 0000000..ef2eea3
--- /dev/null
@@ -0,0 +1,156 @@
+#include <string.h>
+#include <jni.h>
+
+#include <yuv420sp2rgb.h>
+#include <yuv2rgb.h>
+
+/*
+ YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
+ U/V plane containing 8 bit 2x2 subsampled chroma samples.
+ except the interleave order of U and V is reversed.
+
+ H V
+ Y Sample Period      1 1
+ U (Cb) Sample Period 2 2
+ V (Cr) Sample Period 2 2
+ */
+
+/*
+ size of a char:
+ find . -name limits.h -exec grep CHAR_BIT {} \;
+ */
+
+#ifndef max
+#define max(a,b) (a > b ? a : b )
+#define min(a,b) (a < b ? a : b )
+#endif
+enum
+{
+    FLAGS         = 0x40080100
+};
+
+#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
+#define READY(Y)    tables[Y]
+#define FIXUP(Y)                 \
+do {                             \
+    int tmp = (Y) & FLAGS;       \
+    if (tmp != 0)                \
+    {                            \
+        tmp  -= tmp>>8;          \
+        (Y)  |= tmp;             \
+        tmp   = FLAGS & ~(Y>>1); \
+        (Y)  += tmp>>8;          \
+    }                            \
+} while (0 == 1)
+
+#define STORE(Y,DSTPTR)           \
+do {                              \
+    uint32_t Y2       = (Y);      \
+    uint8_t  *DSTPTR2 = (DSTPTR); \
+    (DSTPTR2)[2] = (Y2);          \
+    (DSTPTR2)[1] = (Y2)>>22;      \
+    (DSTPTR2)[0] = (Y2)>>11;      \
+} while (0 == 1)
+
+typedef unsigned char byte;
+const int bytes_per_pixel = 2;
+void color_convert_common(const unsigned char *pY, const unsigned char *pUV, int width, int height,
+                          unsigned char *buffer, int grey)
+{
+#define LOOKUP 1
+#if ! LOOKUP
+  int nR, nG, nB;
+#endif
+  int dest_span = 3 * width;
+  unsigned char *out = buffer;
+  if (grey)
+  {
+    memcpy(out, pY, width * height * sizeof(unsigned char));
+  }
+  else
+  {
+
+#if LOOKUP
+    const uint32_t* tables = yuv2rgb565_table;
+    const byte* nY = pY;
+    const byte* nUV = pUV;
+    int idx = 0;
+    while (nY+width < pUV)
+    {
+      int y = (idx / width);
+      int x = (idx % width);
+      byte Y = *nY;
+      byte Y2 = nY[width];
+      byte V = *nUV;
+      byte U = *(nUV + 1);
+      /* Do 2 row pairs */
+      uint32_t uv, y0, y1;
+
+      uv = READUV(U,V);
+      y1 = uv + READY(Y);
+      y0 = uv + READY(Y2);
+      FIXUP(y1);
+      FIXUP(y0);
+      STORE(y1, &out[dest_span]);
+      STORE(y0, out);
+      out += 3;
+      Y = *(++nY);
+      Y2 = nY[width];
+      y1 = uv + READY(Y);
+      y0 = uv + READY(Y2);
+      FIXUP(y1);
+      FIXUP(y0);
+      STORE(y1, &out[dest_span]);
+      STORE(y0, out);
+      out += 3;
+      height += (2 << 16);
+      ++nY;
+      nUV = pUV + (y / 2) * width + 2 * (x / 2);
+      idx+=2;
+    }
+#else
+    const byte* nY = pY;
+    const byte* nUV = pUV;
+    int idx = 0;
+    while (nY < pUV)
+    {
+
+      int y = (idx / width);
+      int x = (idx % width);
+      int Y = *nY;
+      int V = *nUV;
+      int U = *(nUV + 1);
+
+      Y -= 16;
+      V -= 128;
+      U -= 128;
+      if (y < 0)
+      y = 0;
+
+      nB = (int)(1192 * Y + 2066 * U);
+      nG = (int)(1192 * Y - 833 * V - 400 * U);
+      nR = (int)(1192 * Y + 1634 * V);
+
+      nR = min(262143, max(0, nR));
+      nG = min(262143, max(0, nG));
+      nB = min(262143, max(0, nB));
+
+      nR >>= 10;
+      nR &= 0xff;
+      nG >>= 10;
+      nG &= 0xff;
+      nB >>= 10;
+      nB &= 0xff;
+
+      *(out++) = (unsigned char)nR;
+      *(out++) = (unsigned char)nG;
+      *(out++) = (unsigned char)nB;
+      nY += 1;
+      nUV = pUV + (y / 2) * width + 2 * (x / 2);
+      ++idx;
+    }
+#endif
+  }
+
+
+}
diff --git a/android/android-opencv/project_create.sh b/android/android-opencv/project_create.sh
new file mode 100755 (executable)
index 0000000..0a1b6bd
--- /dev/null
@@ -0,0 +1,4 @@
+#!/bin/sh
+#this generates an ant based cli build of the android-jni project
+android update project --name android-opencv \
+--path .
diff --git a/android/android-opencv/res/drawable-mdpi/cameraback.jpg b/android/android-opencv/res/drawable-mdpi/cameraback.jpg
new file mode 100644 (file)
index 0000000..b53ffd4
Binary files /dev/null and b/android/android-opencv/res/drawable-mdpi/cameraback.jpg differ
diff --git a/android/android-opencv/res/layout/calibrationviewer.xml b/android/android-opencv/res/layout/calibrationviewer.xml
new file mode 100644 (file)
index 0000000..00dea19
--- /dev/null
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+  xmlns:android="http://schemas.android.com/apk/res/android"
+  android:layout_width="fill_parent"
+  android:layout_height="fill_parent"
+  android:orientation="vertical"
+  android:gravity="center_vertical|center_horizontal">
+   <TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content" 
+ android:layout_height="wrap_content" android:padding="20dip"/>
+
+</LinearLayout>
diff --git a/android/android-opencv/res/layout/camera.xml b/android/android-opencv/res/layout/camera.xml
new file mode 100644 (file)
index 0000000..560e3d4
--- /dev/null
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+       android:layout_width="fill_parent" android:layout_height="fill_parent"
+       android:background="@drawable/cameraback">
+       <!--<SurfaceView-->
+       <com.opencv.camera.NativePreviewer
+               android:id="@+id/nativepreviewer" android:layout_width="400dip"
+               android:layout_height="300dip" android:layout_alignParentLeft="true"
+               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
+               android:layout_marginRight="20dip" />
+       <LinearLayout android:id="@+id/glview_layout"
+               android:layout_width="400dip" android:layout_height="300dip"
+               android:layout_alignParentLeft="true" android:layout_margin="20dip"
+               android:gravity="center_horizontal|center_vertical"
+               android:layout_marginRight="20dip">
+       </LinearLayout>
+       <LinearLayout android:layout_width="wrap_content"
+               android:layout_height="fill_parent" android:orientation="vertical"
+               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
+               android:layout_alignParentRight="true">
+               <ImageButton android:src="@android:drawable/ic_menu_camera"
+                       android:id="@+id/button_capture" android:layout_width="60dip"
+                       android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
+                       <ImageButton android:src="@android:drawable/ic_menu_preferences"
+                       android:id="@+id/button_camera_settings" android:layout_width="60dip"
+                       android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
+       </LinearLayout>
+</RelativeLayout>
diff --git a/android/android-opencv/res/layout/camerasettings.xml b/android/android-opencv/res/layout/camerasettings.xml
new file mode 100644 (file)
index 0000000..4366ad3
--- /dev/null
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+       android:layout_width="fill_parent" android:layout_height="fill_parent"
+       android:orientation="vertical" android:gravity="center_vertical|center_horizontal">
+       <TextView android:text="@string/settings_text"
+               android:autoLink="web" android:layout_width="wrap_content"
+               android:layout_height="wrap_content" android:padding="20dip" />
+               
+       <LinearLayout android:id="@+id/LinearLayout01"
+               android:layout_width="wrap_content" android:layout_height="wrap_content"
+               android:gravity="center_vertical">
+               <TextView android:layout_width="wrap_content"
+                       android:layout_height="wrap_content" android:text="@string/image_size_prompt" />
+               <Spinner android:id="@+id/image_size" android:layout_width="fill_parent"
+                       android:layout_height="wrap_content" android:saveEnabled="true"
+                       android:prompt="@string/image_size_prompt" android:entries="@array/image_sizes">
+               </Spinner>
+       </LinearLayout>
+
+       <LinearLayout android:id="@+id/LinearLayout01"
+               android:layout_width="wrap_content" android:layout_height="wrap_content"
+               android:gravity="center_vertical">
+               <TextView android:layout_width="wrap_content"
+                       android:layout_height="wrap_content" android:text="@string/camera_mode_prompt" />
+               <Spinner android:id="@+id/camera_mode" android:layout_width="fill_parent"
+                       android:layout_height="wrap_content" android:saveEnabled="true"
+                       android:prompt="@string/camera_mode_prompt" android:entries="@array/camera_mode">
+               </Spinner>
+       </LinearLayout>
+
+               <LinearLayout android:id="@+id/LinearLayout01"
+               android:layout_width="wrap_content" android:layout_height="wrap_content"
+               android:gravity="center_vertical">
+               <TextView android:layout_width="wrap_content"
+                       android:layout_height="wrap_content" android:text="@string/whitebalance_prompt" />
+               <Spinner android:id="@+id/whitebalance" android:layout_width="fill_parent"
+                       android:layout_height="wrap_content" android:saveEnabled="true"
+                       android:prompt="@string/whitebalance_prompt" android:entries="@array/whitebalance">
+               </Spinner>
+</LinearLayout>
+
+
+</LinearLayout>
diff --git a/android/android-opencv/res/layout/chesssizer.xml b/android/android-opencv/res/layout/chesssizer.xml
new file mode 100644 (file)
index 0000000..b93bc0b
--- /dev/null
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+  xmlns:android="http://schemas.android.com/apk/res/android"
+  android:layout_width="fill_parent"
+  android:layout_height="fill_parent"
+  android:orientation="vertical"
+  android:gravity="center_vertical|center_horizontal">
+   <TextView android:text="@string/patterntext"  android:autoLink="web" android:layout_width="wrap_content" 
+ android:layout_height="wrap_content" android:padding="20dip"/>
+  <LinearLayout android:id="@+id/LinearLayout01"
+ android:layout_width="wrap_content" 
+ android:layout_height="wrap_content"
+ android:gravity="center_vertical">
+ <TextView android:layout_width="wrap_content"
+  android:layout_height="wrap_content"
+   android:text="Corners in width direction:"/>
+ <Spinner android:id="@+id/rows" 
+  android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+         android:saveEnabled="true"
+           android:prompt="@string/chesspromptx"
+android:entries="@array/chesssizes">
+</Spinner>
+ </LinearLayout>
+ <LinearLayout android:id="@+id/LinearLayout01"
+ android:layout_width="wrap_content" android:layout_height="wrap_content"
+ android:gravity="center_vertical">
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content" android:text="Corners in height direction:"/>
+ <Spinner android:id="@+id/cols" 
+  android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:saveEnabled="true"
+        android:prompt="@string/chessprompty"
+android:entries="@array/chesssizes">
+</Spinner>
+</LinearLayout>
+
+</LinearLayout>
diff --git a/android/android-opencv/res/values/attrs.xml b/android/android-opencv/res/values/attrs.xml
new file mode 100644 (file)
index 0000000..89727ff
--- /dev/null
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+<declare-styleable name="CameraParams">
+
+<attr name="preview_width" format="integer"/>
+<attr name="preview_height" format="integer"/>
+
+</declare-styleable>
+
+</resources>
\ No newline at end of file
diff --git a/android/android-opencv/res/values/chessnumbers.xml b/android/android-opencv/res/values/chessnumbers.xml
new file mode 100644 (file)
index 0000000..c0b37fa
--- /dev/null
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<string-array name="chesssizes">
+<item>2</item>
+<item>3</item>
+<item>4</item>
+<item>5</item>
+<item>6</item>
+<item>7</item>
+<item>8</item>
+<item>9</item>
+<item>10</item>
+<item>11</item>
+<item>12</item>
+<item>13</item>
+</string-array>
+<string name="chesspromptx">
+Choose the width:</string>
+<string name="chessprompty">
+Choose the height:</string>
+</resources>
diff --git a/android/android-opencv/res/values/settingnumbers.xml b/android/android-opencv/res/values/settingnumbers.xml
new file mode 100644 (file)
index 0000000..54771c1
--- /dev/null
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<string-array name="image_sizes">
+<item>320x240</item>
+<item>400x300</item>
+<item>640x480</item>
+<item>800x600</item>
+<item>1000x800</item>
+</string-array>
+<string-array name="camera_mode">
+<item>color</item>
+<item>BW</item>
+</string-array>
+<string name="image_size_prompt">
+Image Size:\n(may not be exact)
+</string>
+<string name="camera_mode_prompt">
+Camera Mode:
+</string>
+
+<string-array name="whitebalance">
+<item>auto</item>
+<item>incandescent</item>
+<item>fluorescent</item>
+<item>daylight</item>
+<item>cloudy-daylight</item>
+</string-array>
+<string name="whitebalance_prompt">
+Whitebalance:
+</string>
+</resources>
\ No newline at end of file
diff --git a/android/android-opencv/res/values/strings.xml b/android/android-opencv/res/values/strings.xml
new file mode 100644 (file)
index 0000000..541de36
--- /dev/null
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="app_name">Calibration</string>
+       <string name="patternsize">Pattern Size</string>
+       <string name="patterntext">Please choose the width and height (number of inside corners) of the checker
+       board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
+       http://opencv.willowgarage.com/pattern</string>
+       
+       <string name="patternlink">http://opencv.willowgarage.com/pattern</string>
+       <string name="camera_settings_label">Camera Settings</string>
+       <string name="settings_text">Change the camera settings. Be aware that BW is much faster for previewing, than color. Also, if you change the image size, you should
+       rerun calibration.  Default values: BW and 640x480 are a good start.</string>
+       
+       <string name="calibration_service_started">Calibration calculations have started...</string>
+       <string name="calibration_service_stopped">Calibration calculations has stopped.</string>
+       <string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
+       <string name="calibration_service_label">Calibration</string>
+       <string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string> 
+       
+</resources>
diff --git a/android/android-opencv/src/com/opencv/OpenCV.java b/android/android-opencv/src/com/opencv/OpenCV.java
new file mode 100644 (file)
index 0000000..0fced80
--- /dev/null
@@ -0,0 +1,157 @@
+package com.opencv;
+
+import java.util.LinkedList;
+
+import android.app.Activity;
+import android.content.pm.ActivityInfo;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.Window;
+import android.view.WindowManager;
+import android.view.ViewGroup.LayoutParams;
+import android.widget.FrameLayout;
+import android.widget.LinearLayout;
+
+import com.opencv.camera.NativePreviewer;
+import com.opencv.camera.NativeProcessor;
+import com.opencv.camera.NativeProcessor.PoolCallback;
+import com.opencv.opengl.GL2CameraViewer;
+
+public class OpenCV extends Activity {
+       private NativePreviewer mPreview;
+
+       private GL2CameraViewer glview;
+
+       /*
+        * (non-Javadoc)
+        * 
+        * @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
+        */
+       @Override
+       public boolean onKeyUp(int keyCode, KeyEvent event) {
+
+               return super.onKeyUp(keyCode, event);
+       }
+
+       /*
+        * (non-Javadoc)
+        * 
+        * @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
+        */
+       @Override
+       public boolean onKeyLongPress(int keyCode, KeyEvent event) {
+
+               return super.onKeyLongPress(keyCode, event);
+       }
+
+       /**
+        * Avoid that the screen get's turned off by the system.
+        */
+       public void disableScreenTurnOff() {
+               getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
+                               WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+       }
+
+       /**
+        * Set's the orientation to landscape, as this is needed by AndAR.
+        */
+       public void setOrientation() {
+               setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
+       }
+
+       /**
+        * Maximize the application.
+        */
+       public void setFullscreen() {
+               requestWindowFeature(Window.FEATURE_NO_TITLE);
+               getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+                               WindowManager.LayoutParams.FLAG_FULLSCREEN);
+       }
+
+       public void setNoTitle() {
+               requestWindowFeature(Window.FEATURE_NO_TITLE);
+       }
+
+       @Override
+       public boolean onCreateOptionsMenu(Menu menu) {
+               // menu.add("Sample");
+               return true;
+       }
+
+       @Override
+       public boolean onOptionsItemSelected(MenuItem item) {
+               // if(item.getTitle().equals("Sample")){
+               // //do stuff...
+               // }
+
+               return true;
+
+       }
+
+       @Override
+       public void onOptionsMenuClosed(Menu menu) {
+               // TODO Auto-generated method stub
+               super.onOptionsMenuClosed(menu);
+       }
+
+       @Override
+       protected void onCreate(Bundle savedInstanceState) {
+               super.onCreate(savedInstanceState);
+
+               setFullscreen();
+               disableScreenTurnOff();
+
+               FrameLayout frame = new FrameLayout(getApplication());
+
+               // Create our Preview view and set it as the content of our activity.
+               mPreview = new NativePreviewer(getApplication(), 640, 480);
+
+               LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
+                               LayoutParams.WRAP_CONTENT);
+               params.height = getWindowManager().getDefaultDisplay().getHeight();
+               params.width = (int) (params.height * 4.0 / 2.88);
+
+               LinearLayout vidlay = new LinearLayout(getApplication());
+
+               vidlay.setGravity(Gravity.CENTER);
+               vidlay.addView(mPreview, params);
+               frame.addView(vidlay);
+               
+               // make the glview overlay ontop of video preview
+               mPreview.setZOrderMediaOverlay(false);
+               
+               glview = new GL2CameraViewer(getApplication(), false, 0, 0);
+               glview.setZOrderMediaOverlay(true);
+               glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
+                               LayoutParams.FILL_PARENT));
+               frame.addView(glview);
+
+               setContentView(frame);
+       }
+
+       @Override
+       protected void onPause() {
+               super.onPause();
+               
+               mPreview.onPause();
+               
+               glview.onPause();
+               
+
+       }
+
+       @Override
+       protected void onResume() {
+               super.onResume();
+               glview.onResume();
+               LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
+               callbackstack.add(glview.getDrawCallback());
+               mPreview.addCallbackStack(callbackstack);
+               mPreview.onResume();
+
+       }
+
+}
diff --git a/android/android-opencv/src/com/opencv/calibration/CalibrationViewer.java b/android/android-opencv/src/com/opencv/calibration/CalibrationViewer.java
new file mode 100644 (file)
index 0000000..2ae6b78
--- /dev/null
@@ -0,0 +1,47 @@
+package com.opencv.calibration;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.text.method.ScrollingMovementMethod;
+import android.util.Log;
+import android.widget.TextView;
+
+import com.opencv.R;
+
+public class CalibrationViewer extends Activity {
+
+       @Override
+       protected void onCreate(Bundle savedInstanceState) {
+               // TODO Auto-generated method stub
+               super.onCreate(savedInstanceState);
+               setContentView(R.layout.calibrationviewer);
+
+               Bundle extras = getIntent().getExtras();
+               String filename = extras.getString("calibfile");
+               if (filename != null) {
+                       TextView text = (TextView) findViewById(R.id.calibtext);
+                       text.setMovementMethod(new ScrollingMovementMethod());
+                       try {
+                               BufferedReader reader = new BufferedReader(new FileReader(
+                                               filename));
+                               while (reader.ready()) {
+                                       text.append(reader.readLine() +"\n");
+                               }
+
+                       } catch (FileNotFoundException e) {
+                               Log.e("opencv", "could not open calibration file at:"
+                                               + filename);
+                       } catch (IOException e) {
+                               Log.e("opencv", "error reading file: "
+                                               + filename);
+                       }
+               }
+
+       }
+
+}
diff --git a/android/android-opencv/src/com/opencv/calibration/Calibrator.java b/android/android-opencv/src/com/opencv/calibration/Calibrator.java
new file mode 100644 (file)
index 0000000..699a196
--- /dev/null
@@ -0,0 +1,124 @@
+package com.opencv.calibration;
+
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.locks.ReentrantLock;
+
+import android.os.AsyncTask;
+
+import com.opencv.camera.NativeProcessor;
+import com.opencv.camera.NativeProcessor.PoolCallback;
+import com.opencv.jni.Calibration;
+import com.opencv.jni.Size;
+import com.opencv.jni.image_pool;
+
+
+
+public class Calibrator implements PoolCallback {
+       private Calibration calibration;
+
+       static public interface CalibrationCallback{
+               public void onFoundChessboard(Calibrator calibrator);
+               public void onDoneCalibration(Calibrator calibration, File calibfile);
+               public void onFailedChessboard(Calibrator calibrator);
+       }
+       private CalibrationCallback callback;
+       public Calibrator(CalibrationCallback callback) {
+               calibration = new Calibration();
+               this.callback = callback;
+       }
+
+       public void resetCalibration(){
+               calibration.resetChess();
+       }
+
+       public void setPatternSize(Size size){
+               Size csize = calibration.getPatternsize();
+               if(size.getWidth() == csize.getWidth()&&
+                                  size.getHeight() == csize.getHeight())
+                                       return;
+               calibration.setPatternsize(size);       
+               resetCalibration();
+       }
+       public void setPatternSize(int width, int height){
+               Size patternsize = new Size(width,height);
+               setPatternSize(patternsize);
+       }
+       
+       private boolean capture_chess;
+
+       ReentrantLock lock = new ReentrantLock();
+       public void calibrate(File calibration_file) throws IOException{
+               if(getNumberPatternsDetected() < 3){
+                       return;
+               }
+               CalibrationTask calibtask = new CalibrationTask(calibration_file);
+               calibtask.execute((Object[])null);
+       }
+
+       public void queueChessCapture(){
+               capture_chess = true;
+       }
+       
+private class CalibrationTask extends AsyncTask<Object, Object, Object> {
+               File calibfile;
+       
+               public CalibrationTask(File calib) throws IOException{
+                       super();
+                       calibfile = calib;
+                       calibfile.createNewFile();
+               }
+       
+               @Override
+               protected Object doInBackground(Object... params) {
+                       lock.lock();
+                       try{
+                               calibration.calibrate(calibfile.getAbsolutePath());
+                       }
+                       finally{
+                               lock.unlock();
+                       }
+                       return null;
+               
+               }
+
+               @Override
+               protected void onPostExecute(Object result) {                   
+                       callback.onDoneCalibration(Calibrator.this, calibfile);
+               }
+
+       }
+       
+
+       //@Override
+       public void process(int idx, image_pool pool, long timestamp,
+                       NativeProcessor nativeProcessor) {
+               if(lock.tryLock()){
+                       try{
+                               if(capture_chess){
+                                       if(calibration.detectAndDrawChessboard(idx, pool)){
+                                               callback.onFoundChessboard(this);
+                                               
+                                       }else
+                                               callback.onFailedChessboard(this);
+                                       capture_chess = false;
+                               }
+                       }finally{
+                               lock.unlock();
+                       }
+               }
+       }
+
+
+       public int getNumberPatternsDetected(){
+               return calibration.getNumberDetectedChessboards();
+       }
+
+       public void setCallback(CalibrationCallback callback) {
+               this.callback = callback;
+               
+       }
+
+
+}
diff --git a/android/android-opencv/src/com/opencv/calibration/ChessBoardChooser.java b/android/android-opencv/src/com/opencv/calibration/ChessBoardChooser.java
new file mode 100644 (file)
index 0000000..461a37d
--- /dev/null
@@ -0,0 +1,75 @@
+package com.opencv.calibration;
+
+import com.opencv.R;
+import com.opencv.jni.Size;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.content.SharedPreferences.Editor;
+import android.os.Bundle;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.Spinner;
+
+public class ChessBoardChooser extends Activity {
+       public static final String CHESS_SIZE = "chess_size";
+       public static final int DEFAULT_WIDTH = 6;
+       public static final int DEFAULT_HEIGHT = 8;
+       public static final int LOWEST = 2;
+
+       class DimChooser implements OnItemSelectedListener {
+               private String dim;
+
+               public DimChooser(String dim) {
+                       this.dim = dim;
+               }
+
+               @Override
+               public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
+                               long arg3) {
+                       SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
+                       Editor editor = settings.edit();
+                       editor.putInt(dim, pos + LOWEST);
+                       editor.commit();
+               }
+
+               @Override
+               public void onNothingSelected(AdapterView<?> arg0) {
+               }
+       }
+
+       @Override
+       protected void onCreate(Bundle savedInstanceState) {
+               // TODO Auto-generated method stub
+               super.onCreate(savedInstanceState);
+               setContentView(R.layout.chesssizer);
+               // Restore preferences
+               SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
+               int width = settings.getInt("width", 6);
+
+               int height = settings.getInt("height", 8);
+
+               Spinner wspin, hspin;
+               wspin = (Spinner) findViewById(R.id.rows);
+               hspin = (Spinner) findViewById(R.id.cols);
+
+               wspin.setSelection(width - LOWEST);
+               hspin.setSelection(height - LOWEST);
+
+               wspin.setOnItemSelectedListener(new DimChooser("width"));
+               hspin.setOnItemSelectedListener(new DimChooser("height"));
+
+       }
+
+       public static Size getPatternSize(Context ctx) {
+               SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
+               int width = settings.getInt("width", 6);
+
+               int height = settings.getInt("height", 8);
+
+               return new Size(width, height);
+       }
+
+}
diff --git a/android/android-opencv/src/com/opencv/calibration/services/CalibrationService.java b/android/android-opencv/src/com/opencv/calibration/services/CalibrationService.java
new file mode 100644 (file)
index 0000000..754e2f1
--- /dev/null
@@ -0,0 +1,166 @@
+package com.opencv.calibration.services;
+
+import java.io.File;
+import java.io.IOException;
+
+import android.app.Notification;
+import android.app.NotificationManager;
+import android.app.PendingIntent;
+import android.app.Service;
+import android.content.Intent;
+import android.os.Binder;
+import android.os.IBinder;
+import android.util.Log;
+import android.widget.Toast;
+
+
+import com.opencv.R;
+import com.opencv.calibration.CalibrationViewer;
+import com.opencv.calibration.Calibrator;
+import com.opencv.calibration.Calibrator.CalibrationCallback;
+
+
+public class CalibrationService extends Service implements CalibrationCallback {
+
+       Class<?> activity;
+       int icon;
+       File calibration_file;
+       public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
+                       throws IOException {
+               activity = activitycaller;
+               icon = icon_id;
+               // Display a notification about us starting. We put an icon in the
+               // status bar.
+               showNotification();
+               this.calibration_file = calibration_file;
+               calibrator.setCallback(this);
+               calibrator.calibrate(calibration_file);
+               
+               
+       }
+
+       private NotificationManager mNM;
+
+       /**
+        * Class for clients to access. Because we know this service always runs in
+        * the same process as its clients, we don't need to deal with IPC.
+        */
+       public class CalibrationServiceBinder extends Binder {
+               public CalibrationService getService() {
+                       return CalibrationService.this;
+               }
+       }
+
+       @Override
+       public int onStartCommand(Intent intent, int flags, int startId) {
+               Log.i("LocalService", "Received start id " + startId + ": " + intent);
+               // We want this service to continue running until it is explicitly
+               // stopped, so return sticky.
+               return START_NOT_STICKY;
+       }
+
+       @Override
+       public void onCreate() {
+               mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
+
+               
+       }
+
+       @Override
+       public void onDestroy() {
+               // Cancel the persistent notification.
+               // mNM.cancel(R.string.calibration_service_started);
+
+               // Tell the user we stopped.
+               Toast.makeText(this, R.string.calibration_service_finished,
+                               Toast.LENGTH_SHORT).show();
+       }
+
+       private final IBinder mBinder = new CalibrationServiceBinder();
+
+       @Override
+       public IBinder onBind(Intent intent) {
+               return mBinder;
+       }
+
+       /**
+        * Show a notification while this service is running.
+        */
+       private void showNotification() {
+               // In this sample, we'll use the same text for the ticker and the
+               // expanded notification
+               CharSequence text = getText(R.string.calibration_service_started);
+
+               // Set the icon, scrolling text and timestamp
+               Notification notification = new Notification(icon, text,
+                               System.currentTimeMillis());
+
+               // The PendingIntent to launch our activity if the user selects this
+               // notification
+               PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
+                               new Intent(this, activity), 0);
+
+               // Set the info for the views that show in the notification panel.
+               notification.setLatestEventInfo(this,
+                               getText(R.string.calibration_service_label), text,
+                               contentIntent);
+
+               notification.defaults |= Notification.DEFAULT_SOUND;
+               // Send the notification.
+               // We use a layout id because it is a unique number. We use it later to
+               // cancel.
+               mNM.notify(R.string.calibration_service_started, notification);
+       }
+
+       /**
+        * Show a notification while this service is running.
+        */
+       private void doneNotification() {
+               // In this sample, we'll use the same text for the ticker and the
+               // expanded notification
+               CharSequence text = getText(R.string.calibration_service_finished);
+
+               // Set the icon, scrolling text and timestamp
+               Notification notification = new Notification(icon, text,
+                               System.currentTimeMillis());
+
+               Intent intent = new Intent(this,CalibrationViewer.class);
+               intent.putExtra("calibfile", calibration_file.getAbsolutePath());
+               // The PendingIntent to launch our activity if the user selects this
+               // notification
+               PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
+                               intent, 0);
+               
+
+               // Set the info for the views that show in the notification panel.
+               notification.setLatestEventInfo(this,
+                               getText(R.string.calibration_service_label), text,
+                               contentIntent);
+               
+
+               notification.defaults |= Notification.DEFAULT_SOUND;
+               // Send the notification.
+               // We use a layout id because it is a unique number. We use it later to
+               // cancel.
+               mNM.notify(R.string.calibration_service_started, notification);
+       }
+
+       @Override
+       public void onFoundChessboard(Calibrator calibrator) {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void onDoneCalibration(Calibrator calibration, File calibfile) {
+               doneNotification();
+               stopSelf();
+       }
+
+       @Override
+       public void onFailedChessboard(Calibrator calibrator) {
+               // TODO Auto-generated method stub
+
+       }
+
+}
diff --git a/android/android-opencv/src/com/opencv/camera/CameraActivity.java b/android/android-opencv/src/com/opencv/camera/CameraActivity.java
new file mode 100644 (file)
index 0000000..ddb35a5
--- /dev/null
@@ -0,0 +1,128 @@
+package com.opencv.camera;
+
+import java.util.LinkedList;
+
+import android.app.Activity;
+import android.content.pm.ActivityInfo;
+import android.os.Bundle;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.Window;
+import android.view.WindowManager;
+import android.widget.LinearLayout;
+
+import com.opencv.camera.CameraButtonsHandler.CaptureListener;
+import com.opencv.opengl.GL2CameraViewer;
+
+public abstract class CameraActivity extends Activity implements CaptureListener {
+
+       /** Called when the activity is first created. */
+       @Override
+       public void onCreate(Bundle savedInstanceState) {
+               super.onCreate(savedInstanceState);
+               setFullscreen();
+               setOrientation();
+               disableScreenTurnOff();
+               setContentView(com.opencv.R.layout.camera);
+               cameraButtonHandler = new CameraButtonsHandler(this,this);
+               mPreview = (NativePreviewer) findViewById(com.opencv.R.id.nativepreviewer);
+               LinearLayout glview_layout = (LinearLayout) findViewById(com.opencv.R.id.glview_layout);
+               glview = new GL2CameraViewer(getApplication(), true, 0, 0);
+               glview_layout.addView(glview);
+       }
+
+       /**
+        * Handle the capture button as follows...
+        */
+       @Override
+       public boolean onKeyUp(int keyCode, KeyEvent event) {
+
+               switch (keyCode) {
+               case KeyEvent.KEYCODE_CAMERA:
+               case KeyEvent.KEYCODE_SPACE:
+               case KeyEvent.KEYCODE_DPAD_CENTER:
+                       cameraButtonHandler.setIsCapture(true);
+                       return true;
+
+               default:
+                       return super.onKeyUp(keyCode, event);
+               }
+
+       }
+
+       /**
+        * Handle the capture button as follows... On some phones there is no
+        * capture button, only trackball
+        */
+       @Override
+       public boolean onTrackballEvent(MotionEvent event) {
+               if (event.getAction() == MotionEvent.ACTION_UP) {
+                       cameraButtonHandler.setIsCapture(true);
+                       return true;
+               }
+               return super.onTrackballEvent(event);
+       }
+
+       /**
+        * Avoid that the screen get's turned off by the system.
+        */
+       public void disableScreenTurnOff() {
+               getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
+                               WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+       }
+
+       /**
+        * Set's the orientation to landscape, as this is needed by AndAR.
+        */
+       public void setOrientation() {
+               setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
+       }
+
+       /**
+        * Maximize the application.
+        */
+       public void setFullscreen() {
+               requestWindowFeature(Window.FEATURE_NO_TITLE);
+               getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+                               WindowManager.LayoutParams.FLAG_FULLSCREEN);
+       }
+
+       @Override
+       protected void onPause() {
+               super.onPause();
+               mPreview.onPause();
+               glview.onPause();
+       }
+
+       @Override
+       protected void onResume() {
+               super.onResume();
+               mPreview.setParamsFromPrefs(getApplicationContext());
+               glview.onResume();
+               mPreview.onResume();
+               setCallbackStack();
+       }
+
+       protected void setCallbackStack() {
+               LinkedList<NativeProcessor.PoolCallback> callbackstack = getCallBackStack();
+               if (callbackstack == null){
+                       callbackstack = new LinkedList<NativeProcessor.PoolCallback>();
+                       callbackstack.add(glview.getDrawCallback());
+               }
+               mPreview.addCallbackStack(callbackstack);
+       }
+
+       /**
+        * Overide this and provide your processors to the camera
+        * 
+        * @return null for default drawing
+        */
+       protected abstract LinkedList<NativeProcessor.PoolCallback> getCallBackStack();
+       public void onCapture(){
+               
+       }
+
+       protected NativePreviewer mPreview;
+       protected GL2CameraViewer glview;
+       protected CameraButtonsHandler cameraButtonHandler;
+}
diff --git a/android/android-opencv/src/com/opencv/camera/CameraButtonsHandler.java b/android/android-opencv/src/com/opencv/camera/CameraButtonsHandler.java
new file mode 100644 (file)
index 0000000..bbf5c2d
--- /dev/null
@@ -0,0 +1,83 @@
+package com.opencv.camera;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.ImageButton;
+
+public class CameraButtonsHandler {
+       /** Constructs a buttons handler, will register with the capture button
+        * and the camera settings button. 
+        * @param a The activity that has inflated the com.opencv.R.layout.camera
+        * as its layout.
+        */
+       public CameraButtonsHandler(Activity a, CaptureListener l) {
+               ImageButton capture = (ImageButton) a
+                               .findViewById(com.opencv.R.id.button_capture);
+               ImageButton settings = (ImageButton) a
+                               .findViewById(com.opencv.R.id.button_camera_settings);
+               capture.setOnClickListener(capture_listener);
+               settings.setOnClickListener(settings_listener);
+               captureListener = l;
+               ctx = a;
+       }
+       
+       public CameraButtonsHandler(Activity a) {
+               ImageButton capture = (ImageButton) a
+                               .findViewById(com.opencv.R.id.button_capture);
+               ImageButton settings = (ImageButton) a
+                               .findViewById(com.opencv.R.id.button_camera_settings);
+               capture.setOnClickListener(capture_listener);
+               settings.setOnClickListener(settings_listener);
+               ctx = a;
+       }
+       
+       
+       /** Check if the capture button has been pressed
+        * @return true if the capture button has been pressed
+        */
+       synchronized public boolean isCapture(){
+               return capture_flag;
+       }
+       
+       /** Reset the capture flag 
+        */
+       synchronized public void resetIsCapture(){
+               capture_flag = false;
+       }
+       
+       /** Manually set the flag - call this on any event that should trigger
+        * a capture
+        * @param isCapture true if a capture should take place
+        */
+       synchronized public void setIsCapture(boolean isCapture){
+               capture_flag = isCapture;
+               if(capture_flag && captureListener != null){
+                       captureListener.onCapture();
+               }
+       }
+       
+       private OnClickListener capture_listener = new View.OnClickListener() {
+               @Override
+               public void onClick(View v) {
+                       setIsCapture(true);
+               }
+       };
+       private OnClickListener settings_listener = new View.OnClickListener() {
+               @Override
+               public void onClick(View v) {
+                       Intent configurer = new Intent(ctx,
+                                       CameraConfig.class);
+                       ctx.startActivity(configurer);
+               }
+       };
+
+       interface CaptureListener{
+               public void onCapture();
+       }
+       private CaptureListener captureListener;
+       private Context ctx;            
+       private boolean capture_flag = false;
+}
diff --git a/android/android-opencv/src/com/opencv/camera/CameraConfig.java b/android/android-opencv/src/com/opencv/camera/CameraConfig.java
new file mode 100644 (file)
index 0000000..fcedd1b
--- /dev/null
@@ -0,0 +1,214 @@
+package com.opencv.camera;
+
+import com.opencv.R;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.content.SharedPreferences.Editor;
+import android.os.Bundle;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.Spinner;
+
+public class CameraConfig extends Activity {
+       public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
+       public static final String CAMERA_MODE = "camera_mode";
+       public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
+       public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
+       public static final int CAMERA_MODE_BW = 0;
+       public static final int CAMERA_MODE_COLOR = 1;
+       private static final String WHITEBALANCE = "WHITEBALANCE";
+
+       public static int readCameraMode(Context ctx) {
+               // Restore preferences
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
+               return mode;
+       }
+       
+       public static String readWhitebalace(Context ctx) {
+               // Restore preferences
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               return settings.getString(WHITEBALANCE, "auto");
+       }
+
+       static public void setCameraMode(Context context, String mode) {
+               int m = 0;
+               if (mode.equals("BW")) {
+                       m = CAMERA_MODE_BW;
+               } else if (mode.equals("color"))
+                       m = CAMERA_MODE_COLOR;
+               setCameraMode(context, m);
+       }
+
+       private static String sizeToString(int[] size) {
+               return size[0] + "x" + size[1];
+       }
+
+       private static void parseStrToSize(String ssize, int[] size) {
+               String sz[] = ssize.split("x");
+               size[0] = Integer.valueOf(sz[0]);
+               size[1] = Integer.valueOf(sz[1]);
+       }
+
+       public static void readImageSize(Context ctx, int[] size) {
+               // Restore preferences
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               size[0] = settings.getInt(IMAGE_WIDTH, 640);
+               size[1] = settings.getInt(IMAGE_HEIGHT, 480);
+
+       }
+
+       public static void setCameraMode(Context ctx, int mode) {
+               // Restore preferences
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               Editor editor = settings.edit();
+               editor.putInt(CAMERA_MODE, mode);
+               editor.commit();
+       }
+
+       public static void setImageSize(Context ctx, String strsize) {
+               int size[] = { 0, 0 };
+               parseStrToSize(strsize, size);
+               setImageSize(ctx, size[0], size[1]);
+       }
+
+       public static void setImageSize(Context ctx, int width, int height) {
+               // Restore preferences
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               Editor editor = settings.edit();
+               editor.putInt(IMAGE_WIDTH, width);
+               editor.putInt(IMAGE_HEIGHT, height);
+               editor.commit();
+       }
+
+       @Override
+       protected void onCreate(Bundle savedInstanceState) {
+               // TODO Auto-generated method stub
+               super.onCreate(savedInstanceState);
+               setContentView(R.layout.camerasettings);
+               int mode = readCameraMode(this);
+               int size[] = { 0, 0 };
+               readImageSize(this, size);
+
+               final Spinner size_spinner;
+               final Spinner mode_spinner;
+               final Spinner whitebalance_spinner;
+               size_spinner = (Spinner) findViewById(R.id.image_size);
+               mode_spinner = (Spinner) findViewById(R.id.camera_mode);
+               whitebalance_spinner = (Spinner) findViewById(R.id.whitebalance);
+
+               String strsize = sizeToString(size);
+               String strmode = modeToString(mode);
+               String wbmode = readWhitebalace(getApplicationContext());
+
+               String sizes[] = getResources().getStringArray(R.array.image_sizes);
+
+               int i = 1;
+               for (String x : sizes) {
+                       if (x.equals(strsize))
+                               break;
+                       i++;
+               }
+               if(i <= sizes.length)
+                       size_spinner.setSelection(i-1);
+
+               i = 1;
+               String modes[] =  getResources().getStringArray(R.array.camera_mode);
+               for (String x :modes) {
+                       if (x.equals(strmode))
+                               break;
+                       i++;
+               }
+               if(i <= modes.length)
+                       mode_spinner.setSelection(i-1);
+               
+               i = 1;
+               String wbmodes[] =  getResources().getStringArray(R.array.whitebalance);
+               for (String x :wbmodes) {
+                       if (x.equals(wbmode))
+                               break;
+                       i++;
+               }
+               if(i <= wbmodes.length)
+                       whitebalance_spinner.setSelection(i-1);
+
+               size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
+
+                       @Override
+                       public void onItemSelected(AdapterView<?> arg0, View spinner,
+                                       int position, long arg3) {
+                               Object o = size_spinner.getItemAtPosition(position);
+                               if (o != null)
+                                       setImageSize(spinner.getContext(), (String) o);
+                       }
+
+                       @Override
+                       public void onNothingSelected(AdapterView<?> arg0) {
+
+                       }
+               });
+               mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
+
+                       @Override
+                       public void onItemSelected(AdapterView<?> arg0, View spinner,
+                                       int position, long arg3) {
+                               Object o = mode_spinner.getItemAtPosition(position);
+                               if (o != null)
+                                       setCameraMode(spinner.getContext(), (String) o);
+
+                       }
+
+                       @Override
+                       public void onNothingSelected(AdapterView<?> arg0) {
+
+                       }
+               });
+
+               whitebalance_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
+
+                       @Override
+                       public void onItemSelected(AdapterView<?> arg0, View spinner,
+                                       int position, long arg3) {
+                               Object o = whitebalance_spinner.getItemAtPosition(position);
+                               if (o != null)
+                                       setWhitebalance(spinner.getContext(), (String) o);
+
+                       }
+
+
+                       @Override
+                       public void onNothingSelected(AdapterView<?> arg0) {
+
+                       }
+               });
+
+       }
+
+       public static void setWhitebalance(Context ctx, String o) {
+               SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
+                               0);
+               Editor editor = settings.edit();
+               editor.putString(WHITEBALANCE, o);
+               editor.commit();
+               
+       }
+
+       private String modeToString(int mode) {
+               switch (mode) {
+               case CAMERA_MODE_BW:
+                       return "BW";
+               case CAMERA_MODE_COLOR:
+                       return "color";
+               default:
+                       return "";
+               }
+       }
+}
diff --git a/android/android-opencv/src/com/opencv/camera/NativePreviewer.java b/android/android-opencv/src/com/opencv/camera/NativePreviewer.java
new file mode 100644 (file)
index 0000000..4554d49
--- /dev/null
@@ -0,0 +1,482 @@
+package com.opencv.camera;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.Date;
+import java.util.LinkedList;
+import java.util.List;
+
+import android.content.Context;
+import android.graphics.PixelFormat;
+import android.hardware.Camera;
+import android.hardware.Camera.PreviewCallback;
+import android.hardware.Camera.Size;
+import android.os.Handler;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
+import com.opencv.camera.NativeProcessor.PoolCallback;
+
+public class NativePreviewer extends SurfaceView implements
+               SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
+
+       private String whitebalance_mode = "auto";
+
+       /**
+        * Constructor useful for defining a NativePreviewer in android layout xml
+        * 
+        * @param context
+        * @param attributes 
+        */
+       public NativePreviewer(Context context, AttributeSet attributes) {
+               super(context, attributes);
+               listAllCameraMethods();
+               // Install a SurfaceHolder.Callback so we get notified when the
+               // underlying surface is created and destroyed.
+               mHolder = getHolder();
+               mHolder.addCallback(this);
+               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+
+               /*
+                * TODO get this working! Can't figure out how to define these in xml
+                */
+               preview_width = attributes.getAttributeIntValue("opencv",
+                               "preview_width", 600);
+               preview_height = attributes.getAttributeIntValue("opencv",
+                               "preview_height", 600);
+               
+               Log.d("NativePreviewer", "Trying to use preview size of "
+                               + preview_width + " " + preview_height);
+
+               processor = new NativeProcessor();
+
+               setZOrderMediaOverlay(false);
+       }
+
+       /**
+        * 
+        * @param context
+        * @param preview_width
+        *            the desired camera preview width - will attempt to get as
+        *            close to this as possible
+        * @param preview_height
+        *            the desired camera preview height
+        */
+       public NativePreviewer(Context context, int preview_width,
+                       int preview_height) {
+               super(context);
+
+               listAllCameraMethods();
+               // Install a SurfaceHolder.Callback so we get notified when the
+               // underlying surface is created and destroyed.
+               mHolder = getHolder();
+               mHolder.addCallback(this);
+               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+
+               this.preview_width = preview_width;
+               this.preview_height = preview_height;
+
+               processor = new NativeProcessor();
+               setZOrderMediaOverlay(false);
+
+       }
+
+       /**
+        * Only call in the oncreate function of the instantiating activity
+        * 
+        * @param width
+        *            desired width
+        * @param height
+        *            desired height
+        */
+       public void setPreviewSize(int width, int height){
+               preview_width = width;
+               preview_height = height;
+               
+               Log.d("NativePreviewer", "Trying to use preview size of "
+                               + preview_width + " " + preview_height);
+
+       }
+       
+       public void setParamsFromPrefs(Context ctx){
+               int size[] ={0,0};
+               CameraConfig.readImageSize(ctx, size);
+               int mode = CameraConfig.readCameraMode(ctx);
+               setPreviewSize(size[0], size[1]);
+               setGrayscale(mode == CameraConfig.CAMERA_MODE_BW ? true : false);
+               whitebalance_mode = CameraConfig.readWhitebalace(ctx);
+       }
+
+       public void surfaceCreated(SurfaceHolder holder) {
+
+       }
+
+       public void surfaceDestroyed(SurfaceHolder holder) {
+               releaseCamera();
+       }
+
+       public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+
+               try {
+                       initCamera(mHolder);
+               } catch (InterruptedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+                       return;
+               }
+
+               // Now that the size is known, set up the camera parameters and begin
+               // the preview.
+
+               Camera.Parameters parameters = mCamera.getParameters();
+               List<Camera.Size> pvsizes = mCamera.getParameters()
+                               .getSupportedPreviewSizes();
+               int best_width = 1000000;
+               int best_height = 1000000;
+               int bdist = 100000;
+               for (Size x : pvsizes) {
+                       if (Math.abs(x.width - preview_width) < bdist) {
+                               bdist = Math.abs(x.width - preview_width);
+                               best_width = x.width;
+                               best_height = x.height;
+                       }
+               }
+               preview_width = best_width;
+               preview_height = best_height;
+               
+               Log.d("NativePreviewer", "Determined compatible preview size is: ("
+                               + preview_width + "," + preview_height + ")");
+
+               Log.d("NativePreviewer", "Supported params: "
+                               + mCamera.getParameters().flatten());
+
+               
+               // this is available in 8+
+               // parameters.setExposureCompensation(0);
+               if (parameters.getSupportedWhiteBalance().contains(whitebalance_mode)) {
+                       parameters.setWhiteBalance(whitebalance_mode);
+               }
+//             if (parameters.getSupportedAntibanding().contains(
+//                             Camera.Parameters.ANTIBANDING_OFF)) {
+//                     parameters.setAntibanding(Camera.Parameters.ANTIBANDING_OFF);
+//             }
+
+               List<String> fmodes = mCamera.getParameters().getSupportedFocusModes();
+               // for(String x: fmodes){
+
+               // }
+               
+       
+
+               if (parameters.get("meter-mode") != null)
+                       parameters.set("meter-mode", "meter-average");
+               int idx = fmodes.indexOf(Camera.Parameters.FOCUS_MODE_INFINITY);
+               if (idx != -1) {
+                       parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
+               } else if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1) {
+                       parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
+               }
+
+               if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1) {
+                       hasAutoFocus = true;
+               }
+
+               List<String> scenemodes = mCamera.getParameters()
+                               .getSupportedSceneModes();
+               if (scenemodes != null)
+                       if (scenemodes.indexOf(Camera.Parameters.SCENE_MODE_ACTION) != -1) {
+                               parameters.setSceneMode(Camera.Parameters.SCENE_MODE_ACTION);
+                               Log.d("NativePreviewer", "set scenemode to action");
+                       }
+
+               parameters.setPreviewSize(preview_width, preview_height);
+
+               mCamera.setParameters(parameters);
+
+               pixelinfo = new PixelFormat();
+               pixelformat = mCamera.getParameters().getPreviewFormat();
+               PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
+
+               Size preview_size = mCamera.getParameters().getPreviewSize();
+               preview_width = preview_size.width;
+               preview_height = preview_size.height;
+               int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
+                               / 8;
+
+               // Must call this before calling addCallbackBuffer to get all the
+               // reflection variables setup
+               initForACB();
+               initForPCWB();
+
+               // Use only one buffer, so that we don't preview to many frames and bog
+               // down system
+               byte[] buffer = new byte[bufSize];
+               addCallbackBuffer(buffer);
+               setPreviewCallbackWithBuffer();
+
+               mCamera.startPreview();
+
+       }
+
+       public void postautofocus(int delay) {
+               if (hasAutoFocus)
+                       handler.postDelayed(autofocusrunner, delay);
+
+       }
+
+       /**
+        * Demonstration of how to use onPreviewFrame. In this case I'm not
+        * processing the data, I'm just adding the buffer back to the buffer queue
+        * for re-use
+        */
+       public void onPreviewFrame(byte[] data, Camera camera) {
+
+               if (start == null) {
+                       start = new Date();
+               }
+
+               processor.post(data, preview_width, preview_height, pixelformat,
+                               System.nanoTime(), this);
+
+               fcount++;
+               if (fcount % 100 == 0) {
+                       double ms = (new Date()).getTime() - start.getTime();
+                       Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
+                       start = new Date();
+                       fcount = 0;
+               }
+
+       }
+
+       @Override
+       public void onDoneNativeProcessing(byte[] buffer) {
+               addCallbackBuffer(buffer);
+       }
+
+       public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
+               processor.addCallbackStack(callbackstack);
+       }
+
+       /**
+        * This must be called when the activity pauses, in Activity.onPause This
+        * has the side effect of clearing the callback stack.
+        * 
+        */
+       public void onPause() {
+
+               releaseCamera();
+
+               addCallbackStack(null);
+
+               processor.stop();
+
+       }
+
+       public void onResume() {
+
+               processor.start();
+
+       }
+
+       private Method mPCWB;
+
+       private void initForPCWB() {
+
+               try {
+
+                       mPCWB = Class.forName("android.hardware.Camera").getMethod(
+                                       "setPreviewCallbackWithBuffer", PreviewCallback.class);
+
+               } catch (Exception e) {
+                       Log.e("NativePreviewer",
+                                       "Problem setting up for setPreviewCallbackWithBuffer: "
+                                                       + e.toString());
+               }
+
+       }
+
+       /**
+        * This method allows you to add a byte buffer to the queue of buffers to be
+        * used by preview. See:
+        * http://android.git.kernel.org/?p=platform/frameworks
+        * /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
+        * b3d07b9620b4269ab33f78604a36327e536ce1
+        * 
+        * @param b
+        *            The buffer to register. Size should be width * height *
+        *            bitsPerPixel / 8.
+        */
+       private void addCallbackBuffer(byte[] b) {
+
+               try {
+
+                       mAcb.invoke(mCamera, b);
+               } catch (Exception e) {
+                       Log.e("NativePreviewer",
+                                       "invoking addCallbackBuffer failed: " + e.toString());
+               }
+       }
+
+       /**
+        * Use this method instead of setPreviewCallback if you want to use manually
+        * allocated buffers. Assumes that "this" implements Camera.PreviewCallback
+        */
+       private void setPreviewCallbackWithBuffer() {
+               // mCamera.setPreviewCallback(this);
+               // return;
+               try {
+
+                       // If we were able to find the setPreviewCallbackWithBuffer method
+                       // of Camera,
+                       // we can now invoke it on our Camera instance, setting 'this' to be
+                       // the
+                       // callback handler
+                       mPCWB.invoke(mCamera, this);
+
+                       // Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
+
+               } catch (Exception e) {
+
+                       Log.e("NativePreviewer", e.toString());
+               }
+       }
+
+       @SuppressWarnings("unused")
+       private void clearPreviewCallbackWithBuffer() {
+               // mCamera.setPreviewCallback(this);
+               // return;
+               try {
+
+                       // If we were able to find the setPreviewCallbackWithBuffer method
+                       // of Camera,
+                       // we can now invoke it on our Camera instance, setting 'this' to be
+                       // the
+                       // callback handler
+                       mPCWB.invoke(mCamera, (PreviewCallback) null);
+
+                       // Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
+
+               } catch (Exception e) {
+
+                       Log.e("NativePreviewer", e.toString());
+               }
+       }
+
+       /**
+        * These variables are re-used over and over by addCallbackBuffer
+        */
+       private Method mAcb;
+
+       private void initForACB() {
+               try {
+
+                       mAcb = Class.forName("android.hardware.Camera").getMethod(
+                                       "addCallbackBuffer", byte[].class);
+
+               } catch (Exception e) {
+                       Log.e("NativePreviewer",
+                                       "Problem setting up for addCallbackBuffer: " + e.toString());
+               }
+       }
+
+       private Runnable autofocusrunner = new Runnable() {
+
+               @Override
+               public void run() {
+                       mCamera.autoFocus(autocallback);
+               }
+       };
+
+       private Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
+
+               @Override
+               public void onAutoFocus(boolean success, Camera camera) {
+                       if (!success)
+                               postautofocus(1000);
+               }
+       };
+
+       /**
+        * This method will list all methods of the android.hardware.Camera class,
+        * even the hidden ones. With the information it provides, you can use the
+        * same approach I took below to expose methods that were written but hidden
+        * in eclair
+        */
+       private void listAllCameraMethods() {
+               try {
+                       Class<?> c = Class.forName("android.hardware.Camera");
+                       Method[] m = c.getMethods();
+                       for (int i = 0; i < m.length; i++) {
+                               Log.d("NativePreviewer", "  method:" + m[i].toString());
+                       }
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       Log.e("NativePreviewer", e.toString());
+               }
+       }
+
+       private void initCamera(SurfaceHolder holder) throws InterruptedException {
+               if (mCamera == null) {
+                       // The Surface has been created, acquire the camera and tell it
+                       // where
+                       // to draw.
+                       int i = 0;
+                       while (i++ < 5) {
+                               try {
+                                       mCamera = Camera.open();
+                                       break;
+                               } catch (RuntimeException e) {
+                                       Thread.sleep(200);
+                               }
+                       }
+                       try {
+                               mCamera.setPreviewDisplay(holder);
+                       } catch (IOException exception) {
+                               mCamera.release();
+                               mCamera = null;
+
+                       } catch (RuntimeException e) {
+                               Log.e("camera", "stacktrace", e);
+                       }
+               }
+       }
+
+       private void releaseCamera() {
+               if (mCamera != null) {
+                       // Surface will be destroyed when we return, so stop the preview.
+                       // Because the CameraDevice object is not a shared resource, it's
+                       // very
+                       // important to release it when the activity is paused.
+                       mCamera.stopPreview();
+                       mCamera.release();
+               }
+
+               // processor = null;
+               mCamera = null;
+               mAcb = null;
+               mPCWB = null;
+       }
+
+       private Handler handler = new Handler();
+
+       private Date start;
+       private int fcount = 0;
+       private boolean hasAutoFocus = false;
+       private SurfaceHolder mHolder;
+       private Camera mCamera;
+
+       private NativeProcessor processor;
+
+       private int preview_width, preview_height;
+       private int pixelformat;
+       private PixelFormat pixelinfo;
+
+       public void setGrayscale(boolean b) {
+               processor.setGrayscale(b);
+               
+       }
+
+}
\ No newline at end of file
diff --git a/android/android-opencv/src/com/opencv/camera/NativeProcessor.java b/android/android-opencv/src/com/opencv/camera/NativeProcessor.java
new file mode 100644 (file)
index 0000000..4dce3bb
--- /dev/null
@@ -0,0 +1,285 @@
+package com.opencv.camera;
+
+import java.util.LinkedList;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import android.graphics.PixelFormat;
+import android.util.Log;
+
+import com.opencv.jni.image_pool;
+import com.opencv.jni.opencv;
+
+/** The NativeProcessor is a native processing stack engine.
+ * 
+ * What this means is that the NativeProcessor handles loading
+ * live camera frames into native memory space, i.e. the image_pool
+ * and then calling a stack of PoolCallback's and passing them the
+ * image_pool.
+ * 
+ * The image_pool index 0 is populated with the live video image
+ * 
+ * And any modifications to this the pool are in place, so you may
+ * pass on changes to the pool to the next PoolCallback in the stack.
+ *
+ */
+public class NativeProcessor {
+       /** Users that would like to be able to have access to live video frames
+        * should implement a PoolCallback
+        * the idx and pool contain the images, specifically at idx == 0 is the
+        * live video frame.
+        */
+       static public interface PoolCallback {
+               void process(int idx, image_pool pool, long timestamp,
+                               NativeProcessor nativeProcessor);
+       }
+
+       
+
+       /**At every frame, each PoolCallback is called in order and is passed the
+        * the same pool and index
+        * 
+        * @param stack  A list of PoolCallback objects, that will be called in order
+        */
+       public void addCallbackStack(LinkedList<PoolCallback> stack) {
+
+               try {
+                       while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
+
+                       }
+                       try {
+                               nextStack = stack;
+                       } finally {
+                               stacklock.unlock();
+                       }
+               } catch (InterruptedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+
+               }
+
+       }
+
+       /**
+        * Create a NativeProcessor. The processor will not start running until
+        * start is called, at which point it will operate in its own thread and
+        * sleep until a post is called. The processor should not be started until
+        * an onSurfaceChange event, and should be shut down when the surface is
+        * destroyed by calling interupt.
+        * 
+        */
+       public NativeProcessor() {
+               gray_scale_only = false;
+       }
+
+       
+       
+       /** Grayscale only is much faster because the yuv does not get decoded, and grayscale is only one
+        * byter per pixel - giving fast opengl texture loading.
+        * 
+        * You still have access to the whole yuv image, but grayscale is only immediately available to
+        * use without further effort.
+        * 
+        * Suggestion - use grayscale only and save your yuv images to disk if you would like color images
+        * 
+        * Also, in grayscale mode, the images in the pool are only single channel, so please keep this in mind
+        * when accessing the color images - check the cv::Mat::channels() or cv::Mat::type() if your messing
+        * with color channels
+        * 
+        * @param grayscale true if you want to only process grayscale images
+        */
+       public void setGrayscale(boolean grayscale){
+               gray_scale_only = grayscale;
+       }
+       
+
+       /**
+        * A callback that allows the NativeProcessor to pass back the buffer when
+        * it has completed processing a frame.
+        */
+       static protected interface NativeProcessorCallback {
+               /**
+                * Called after processing, meant to be recieved by the NativePreviewer
+                * wich reuses the byte buffer for the camera preview...
+                * 
+                * @param buffer
+                *            the buffer passed to the NativeProcessor with post.
+                */
+               void onDoneNativeProcessing(byte[] buffer);
+       }
+
+       
+       protected void stop() {
+               mthread.interrupt();
+               try {
+                       mthread.join();
+               } catch (InterruptedException e) {
+                       Log.w("NativeProcessor",
+                                       "interupted while stoping " + e.getMessage());
+               }
+               mthread = null;
+       }
+
+       protected void start() {
+               mthread = new ProcessorThread();
+               mthread.start();
+       }
+       /**
+        * post is used to notify the processor that a preview frame is ready, this
+        * will return almost immediately. if the processor is busy, returns false
+        * and is essentially a nop.
+        * 
+        * @param buffer
+        *            a preview frame from the Android Camera onPreviewFrame
+        *            callback
+        * @param width
+        *            of preview frame
+        * @param height
+        *            of preview frame
+        * @param format
+        *            of preview frame
+        * @return true if the processor wasn't busy and accepted the post, false if
+        *         the processor is still processing.
+        */
+
+       protected boolean post(byte[] buffer, int width, int height, int format,
+                       long timestamp, NativeProcessorCallback callback) {
+
+               lock.lock();
+               try {
+                       NPPostObject pobj = new NPPostObject(buffer, width, height, format,
+                                       timestamp, callback);
+                       postobjects.addFirst(pobj);
+               } finally {
+                       lock.unlock();
+               }
+               return true;
+
+       }
+       
+       private class ProcessorThread extends Thread {
+
+               private void process(NPPostObject pobj) throws Exception {
+
+                       if (pobj.format == PixelFormat.YCbCr_420_SP) {
+                               // add as color image, because we know how to decode this
+                               opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
+                                               pobj.height, gray_scale_only);
+
+                       } else if (pobj.format == PixelFormat.YCbCr_422_SP) {
+                               // add as gray image, because this format is not coded
+                               // for...//TODO figure out how to decode this
+                               // format
+                               opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
+                                               pobj.height, true);
+                       } else
+                               throw new Exception("bad pixel format!");
+
+                       for (PoolCallback x : stack) {
+                               if (interrupted()) {
+                                       throw new InterruptedException(
+                                                       "Native Processor interupted while processing");
+                               }
+                               x.process(0, pool, pobj.timestamp, NativeProcessor.this);
+                       }
+
+                       pobj.done(); // tell the postobject that we're done doing
+                                                       // all the processing.
+
+               }
+
+               @Override
+               public void run() {
+
+                       try {
+                               while (true) {
+                                       yield();
+
+                                       while (!stacklock.tryLock(5, TimeUnit.MILLISECONDS)) {
+                                       }
+                                       try {
+                                               if (nextStack != null) {
+                                                       stack = nextStack;
+                                                       nextStack = null;
+                                               }
+                                       } finally {
+                                               stacklock.unlock();
+                                       }
+
+                                       NPPostObject pobj = null;
+
+                                       while (!lock.tryLock(5, TimeUnit.MILLISECONDS)) {
+                                       }
+                                       try {
+                                               if (postobjects.isEmpty())
+                                                       continue;
+                                               pobj = postobjects.removeLast();
+
+                                       } finally {
+                                               lock.unlock();
+
+                                       }
+
+                                       if (interrupted())
+                                               throw new InterruptedException();
+
+                                       if (stack != null && pobj != null)
+                                               process(pobj);
+
+                               }
+                       } catch (InterruptedException e) {
+
+                               Log.i("NativeProcessor",
+                                               "native processor interupted, ending now");
+
+                       } catch (Exception e) {
+
+                               e.printStackTrace();
+                       } finally {
+
+                       }
+               }
+
+       }
+       
+       static private class NPPostObject {
+               public NPPostObject(byte[] buffer, int width, int height, int format,
+                               long timestamp, NativeProcessorCallback callback) {
+                       this.buffer = buffer;
+                       this.width = width;
+                       this.height = height;
+                       this.format = format;
+                       this.timestamp = timestamp;
+                       this.callback = callback;
+               }
+
+               public void done() {
+                       callback.onDoneNativeProcessing(buffer);
+
+               }
+
+               int width, height;
+               byte[] buffer;
+               int format;
+               long timestamp;
+               NativeProcessorCallback callback;
+       }
+
+
+       private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
+
+       private image_pool pool = new image_pool();
+
+       private final Lock lock = new ReentrantLock();
+
+       private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
+       private boolean gray_scale_only;
+       
+       private Lock stacklock = new ReentrantLock();
+
+       private LinkedList<PoolCallback> nextStack;
+       
+       private ProcessorThread mthread;
+
+}
\ No newline at end of file
diff --git a/android/android-opencv/src/com/opencv/opengl/GL2CameraViewer.java b/android/android-opencv/src/com/opencv/opengl/GL2CameraViewer.java
new file mode 100644 (file)
index 0000000..237ae82
--- /dev/null
@@ -0,0 +1,411 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.opencv.opengl;
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+
+import com.opencv.camera.NativeProcessor;
+import com.opencv.camera.NativeProcessor.PoolCallback;
+import com.opencv.jni.glcamera;
+import com.opencv.jni.image_pool;
+
+import android.content.Context;
+import android.graphics.PixelFormat;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import android.util.Log;
+
+
+
+/**
+ * A simple GLSurfaceView sub-class that demonstrate how to perform
+ * OpenGL ES 2.0 rendering into a GL Surface. Note the following important
+ * details:
+ *
+ * - The class must use a custom context factory to enable 2.0 rendering.
+ *   See ContextFactory class definition below.
+ *
+ * - The class must use a custom EGLConfigChooser to be able to select
+ *   an EGLConfig that supports 2.0. This is done by providing a config
+ *   specification to eglChooseConfig() that has the attribute
+ *   EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
+ *   set. See ConfigChooser class definition below.
+ *
+ * - The class must select the surface's format, then choose an EGLConfig
+ *   that matches it exactly (with regards to red/green/blue/alpha channels
+ *   bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
+ */
+public class GL2CameraViewer extends GLSurfaceView{
+    private static String TAG = "GL2JNIView";
+    private static final boolean DEBUG = false;
+       private PoolCallback poolcallback = new PoolCallback() {
+               
+               @Override
+               public void process(int idx, image_pool pool, long timestamp,
+                               NativeProcessor nativeProcessor){
+               
+                       
+                               drawMatToGL(idx, pool);
+                               
+                               requestRender();
+                       
+                       
+               }
+       };
+
+        public GL2CameraViewer(Context context,AttributeSet attributeSet) {
+               super(context,attributeSet);
+               
+               init(false, 0, 0);
+               setZOrderMediaOverlay(true);
+        }
+    public GL2CameraViewer(Context context) {
+        super(context);
+        init(false, 0, 0);
+        setZOrderMediaOverlay(true);
+    }
+
+    public GL2CameraViewer(Context context, boolean translucent, int depth, int stencil) {
+        super(context);
+        init(translucent, depth, stencil);
+        setZOrderMediaOverlay(true);
+    }
+
+    private void init(boolean translucent, int depth, int stencil) {
+
+       
+        /* By default, GLSurfaceView() creates a RGB_565 opaque surface.
+         * If we want a translucent one, we should change the surface's
+         * format here, using PixelFormat.TRANSLUCENT for GL Surfaces
+         * is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
+         */
+        if (translucent) {
+            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
+        }
+
+        /* Setup the context factory for 2.0 rendering.
+         * See ContextFactory class definition below
+         */
+        setEGLContextFactory(new ContextFactory());
+
+        /* We need to choose an EGLConfig that matches the format of
+         * our surface exactly. This is going to be done in our
+         * custom config chooser. See ConfigChooser class definition
+         * below.
+         */
+        setEGLConfigChooser( translucent ?
+                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
+                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );
+
+        /* Set the renderer responsible for frame rendering */
+        setRenderer(new Renderer());
+        setRenderMode(RENDERMODE_WHEN_DIRTY);
+        
+    }
+
+    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
+        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
+            Log.w(TAG, "creating OpenGL ES 2.0 context");
+            checkEglError("Before eglCreateContext", egl);
+            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+            EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
+            checkEglError("After eglCreateContext", egl);
+            return context;
+        }
+
+        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
+            egl.eglDestroyContext(display, context);
+        }
+    }
+
+    private static void checkEglError(String prompt, EGL10 egl) {
+        int error;
+        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
+            Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
+        }
+    }
+
+    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
+            mRedSize = r;
+            mGreenSize = g;
+            mBlueSize = b;
+            mAlphaSize = a;
+            mDepthSize = depth;
+            mStencilSize = stencil;
+        }
+
+        /* This EGL config specification is used to specify 2.0 rendering.
+         * We use a minimum size of 4 bits for red/green/blue, but will
+         * perform actual matching in chooseConfig() below.
+         */
+        private static int EGL_OPENGL_ES2_BIT = 4;
+        private static int[] s_configAttribs2 =
+        {
+            EGL10.EGL_RED_SIZE, 4,
+            EGL10.EGL_GREEN_SIZE, 4,
+            EGL10.EGL_BLUE_SIZE, 4,
+            EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+            EGL10.EGL_NONE
+        };
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+
+            /* Get the number of minimally matching EGL configurations
+             */
+            int[] num_config = new int[1];
+            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+            int numConfigs = num_config[0];
+
+            if (numConfigs <= 0) {
+                throw new IllegalArgumentException("No configs match configSpec");
+            }
+
+            /* Allocate then read the array of minimally matching EGL configs
+             */
+            EGLConfig[] configs = new EGLConfig[numConfigs];
+            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
+
+            if (DEBUG) {
+                 printConfigs(egl, display, configs);
+            }
+            /* Now return the "best" one
+             */
+            return chooseConfig(egl, display, configs);
+        }
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs) {
+            for(EGLConfig config : configs) {
+                int d = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_DEPTH_SIZE, 0);
+                int s = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_STENCIL_SIZE, 0);
+
+                // We need at least mDepthSize and mStencilSize bits
+                if (d < mDepthSize || s < mStencilSize)
+                    continue;
+
+                // We want an *exact* match for red/green/blue/alpha
+                int r = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_RED_SIZE, 0);
+                int g = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_GREEN_SIZE, 0);
+                int b = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_BLUE_SIZE, 0);
+                int a = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_ALPHA_SIZE, 0);
+
+                if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
+                    return config;
+            }
+            return null;
+        }
+
+        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                EGLConfig config, int attribute, int defaultValue) {
+
+            if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+                return mValue[0];
+            }
+            return defaultValue;
+        }
+
+        private void printConfigs(EGL10 egl, EGLDisplay display,
+            EGLConfig[] configs) {
+            int numConfigs = configs.length;
+            Log.w(TAG, String.format("%d configurations", numConfigs));
+            for (int i = 0; i < numConfigs; i++) {
+                Log.w(TAG, String.format("Configuration %d:\n", i));
+                printConfig(egl, display, configs[i]);
+            }
+        }
+
+        private void printConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig config) {
+            int[] attributes = {
+                    EGL10.EGL_BUFFER_SIZE,
+                    EGL10.EGL_ALPHA_SIZE,
+                    EGL10.EGL_BLUE_SIZE,
+                    EGL10.EGL_GREEN_SIZE,
+                    EGL10.EGL_RED_SIZE,
+                    EGL10.EGL_DEPTH_SIZE,
+                    EGL10.EGL_STENCIL_SIZE,
+                    EGL10.EGL_CONFIG_CAVEAT,
+                    EGL10.EGL_CONFIG_ID,
+                    EGL10.EGL_LEVEL,
+                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
+                    EGL10.EGL_MAX_PBUFFER_PIXELS,
+                    EGL10.EGL_MAX_PBUFFER_WIDTH,
+                    EGL10.EGL_NATIVE_RENDERABLE,
+                    EGL10.EGL_NATIVE_VISUAL_ID,
+                    EGL10.EGL_NATIVE_VISUAL_TYPE,
+                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+                    EGL10.EGL_SAMPLES,
+                    EGL10.EGL_SAMPLE_BUFFERS,
+                    EGL10.EGL_SURFACE_TYPE,
+                    EGL10.EGL_TRANSPARENT_TYPE,
+                    EGL10.EGL_TRANSPARENT_RED_VALUE,
+                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+                    EGL10.EGL_LUMINANCE_SIZE,
+                    EGL10.EGL_ALPHA_MASK_SIZE,
+                    EGL10.EGL_COLOR_BUFFER_TYPE,
+                    EGL10.EGL_RENDERABLE_TYPE,
+                    0x3042 // EGL10.EGL_CONFORMANT
+            };
+            String[] names = {
+                    "EGL_BUFFER_SIZE",
+                    "EGL_ALPHA_SIZE",
+                    "EGL_BLUE_SIZE",
+                    "EGL_GREEN_SIZE",
+                    "EGL_RED_SIZE",
+                    "EGL_DEPTH_SIZE",
+                    "EGL_STENCIL_SIZE",
+                    "EGL_CONFIG_CAVEAT",
+                    "EGL_CONFIG_ID",
+                    "EGL_LEVEL",
+                    "EGL_MAX_PBUFFER_HEIGHT",
+                    "EGL_MAX_PBUFFER_PIXELS",
+                    "EGL_MAX_PBUFFER_WIDTH",
+                    "EGL_NATIVE_RENDERABLE",
+                    "EGL_NATIVE_VISUAL_ID",
+                    "EGL_NATIVE_VISUAL_TYPE",
+                    "EGL_PRESERVED_RESOURCES",
+                    "EGL_SAMPLES",
+                    "EGL_SAMPLE_BUFFERS",
+                    "EGL_SURFACE_TYPE",
+                    "EGL_TRANSPARENT_TYPE",
+                    "EGL_TRANSPARENT_RED_VALUE",
+                    "EGL_TRANSPARENT_GREEN_VALUE",
+                    "EGL_TRANSPARENT_BLUE_VALUE",
+                    "EGL_BIND_TO_TEXTURE_RGB",
+                    "EGL_BIND_TO_TEXTURE_RGBA",
+                    "EGL_MIN_SWAP_INTERVAL",
+                    "EGL_MAX_SWAP_INTERVAL",
+                    "EGL_LUMINANCE_SIZE",
+                    "EGL_ALPHA_MASK_SIZE",
+                    "EGL_COLOR_BUFFER_TYPE",
+                    "EGL_RENDERABLE_TYPE",
+                    "EGL_CONFORMANT"
+            };
+            int[] value = new int[1];
+            for (int i = 0; i < attributes.length; i++) {
+                int attribute = attributes[i];
+                String name = names[i];
+                if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
+                    Log.w(TAG, String.format("  %s: %d\n", name, value[0]));
+                } else {
+                    // Log.w(TAG, String.format("  %s: failed\n", name));
+                    while (egl.eglGetError() != EGL10.EGL_SUCCESS);
+                }
+            }
+        }
+
+        // Subclasses can adjust these values:
+        protected int mRedSize;
+        protected int mGreenSize;
+        protected int mBlueSize;
+        protected int mAlphaSize;
+        protected int mDepthSize;
+        protected int mStencilSize;
+        private int[] mValue = new int[1];
+    }
+
+    glcamera mglcamera;
+    public void drawMatToGL(int idx, image_pool pool){
+       if(mglcamera != null)
+               mglcamera.drawMatToGL(idx, pool);
+       else
+               Log.e("android-opencv", "null glcamera!!!!");
+    }
+    public void clear(){
+       if(mglcamera != null)
+               mglcamera.clear();
+       else
+               Log.e("android-opencv", "null glcamera!!!!");
+    }
+    
+    private class Renderer implements GLSurfaceView.Renderer {
+       
+        public void onDrawFrame(GL10 gl) {
+               
+            mglcamera.step();
+        }
+
+        public void onSurfaceChanged(GL10 gl, int width, int height) {
+               
+            mglcamera.init(width, height);
+        }
+
+        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+           
+        }
+    }
+
+
+       @Override
+       public void onPause() {
+               mglcamera = null;
+               // TODO Auto-generated method stub
+               super.onPause();
+               
+       }
+
+       @Override
+       public void onResume() {
+               mglcamera = new glcamera();
+               // TODO Auto-generated method stub
+               super.onResume();
+               
+       }
+
+       public PoolCallback getDrawCallback() {
+               // TODO Auto-generated method stub
+               return poolcallback;
+       }
+
+
+}
diff --git a/android/android-opencv/src/com/opencv/utils/BitmapBridge.java b/android/android-opencv/src/com/opencv/utils/BitmapBridge.java
new file mode 100644 (file)
index 0000000..573e7ae
--- /dev/null
@@ -0,0 +1,34 @@
+package com.opencv.utils;
+
+import java.nio.ByteBuffer;
+
+import com.opencv.jni.Mat;
+import com.opencv.jni.Size;
+import com.opencv.jni.opencv;
+
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+
+public class BitmapBridge {
+       static void copyBitmap(Bitmap bmap, Mat mat) throws Exception {
+               if ((bmap.getConfig() == null) || bmap.getConfig() == Config.ARGB_8888)
+                       throw new Exception("bad config");
+               Size sz = new Size(bmap.getWidth(), bmap.getHeight());
+               mat.create(sz, opencv.CV_8UC4);
+               ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
+                               * bmap.getHeight());
+               bmap.copyPixelsToBuffer(buffer);
+               opencv.copyBufferToMat(mat, buffer);
+
+       }
+
+       static Bitmap matToBitmap(Mat mat) {
+               Bitmap bmap = Bitmap.createBitmap(mat.getCols(), mat.getRows(),
+                               Config.ARGB_8888);
+               ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
+                               * bmap.getHeight());
+               opencv.copyMatToBuffer(buffer, mat);
+               bmap.copyPixelsFromBuffer(buffer);
+               return bmap;
+       }
+}
diff --git a/android/apps/CVCamera/CMakeLists.txt b/android/apps/CVCamera/CMakeLists.txt
new file mode 100644 (file)
index 0000000..e82fbdf
--- /dev/null
@@ -0,0 +1,5 @@
+cmake_minimum_required(VERSION 2.8)
+
+project(CVCamera)
+
+add_subdirectory(jni)
diff --git a/android/apps/CVCamera/Makefile b/android/apps/CVCamera/Makefile
deleted file mode 100644 (file)
index 7beb94c..0000000
+++ /dev/null
@@ -1,86 +0,0 @@
-# The path to the NDK, requires crystax version r-4 for now, due to support
-# for the standard library
-
-# load environment from local make file
-LOCAL_ENV_MK=local.env.mk
-ifneq "$(wildcard $(LOCAL_ENV_MK))" ""
-include $(LOCAL_ENV_MK)
-else
-$(shell cp sample.$(LOCAL_ENV_MK) $(LOCAL_ENV_MK))
-$(info ERROR local environement not setup! try:)
-$(info gedit $(LOCAL_ENV_MK))
-$(error Please setup the $(LOCAL_ENV_MK) - the default was just created')
-endif
-ifndef ARM_TARGETS
-ARM_TARGETS="armeabi armeabi-v7a"
-endif
-ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
-
-$(info OPENCV_CONFIG = $(OPENCV_CONFIG))
-
-ifndef PROJECT_PATH
-$(info PROJECT_PATH defaulting to this directory)
-PROJECT_PATH=.
-endif
-
-
-# The name of the native library
-LIBNAME = libcvcamera.so
-
-
-# Find all the C++ sources in the native folder
-SOURCES = $(wildcard jni/*.cpp)
-HEADERS = $(wildcard jni/*.h)
-
-ANDROID_MKS = $(wildcard jni/*.mk)
-
-SWIG_IS = $(wildcard jni/*.i)
-
-SWIG_MAIN = jni/cvcamera.i
-
-SWIG_JAVA_DIR = src/com/theveganrobot/cvcamera/jni
-SWIG_JAVA_OUT = $(wildcard $(SWIG_JAVA_DIR)/*.java)
-
-
-SWIG_C_DIR = jni/gen
-SWIG_C_OUT = $(SWIG_C_DIR)/cvcamera_swig.cpp
-
-BUILD_DEFS=OPENCV_CONFIG=$(OPENCV_CONFIG) \
-       PROJECT_PATH=$(PROJECT_PATH) \
-       V=$(V) \
-       $(NDK_FLAGS) \
-       ARM_TARGETS=$(ARM_TARGETS)
-
-# The real native library stripped of symbols
-LIB            = libs/armeabi-v7a/$(LIBNAME) libs/armeabi/$(LIBNAME)
-
-
-all:   $(LIB)
-
-
-#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
-$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
-       $(ANDROID_NDK_BASE)/ndk-build $(BUILD_DEFS)
-
-
-#this creates the swig wrappers
-$(SWIG_C_OUT): $(SWIG_IS)
-       make clean-swig &&\
-       mkdir -p $(SWIG_C_DIR) &&\
-       mkdir -p $(SWIG_JAVA_DIR) &&\
-       swig -java -c++ -I../../android-jni/jni -package  "com.theveganrobot.cvcamera.jni" \
-       -outdir $(SWIG_JAVA_DIR) \
-       -o $(SWIG_C_OUT) $(SWIG_MAIN)
-       
-       
-#clean targets
-.PHONY: clean  clean-swig cleanall
-
-#this deletes the generated swig java and the generated c wrapper
-clean-swig:
-       rm -f $(SWIG_JAVA_OUT) $(SWIG_C_OUT)
-       
-#does clean-swig and then uses the ndk-build clean
-clean: clean-swig
-       $(ANDROID_NDK_BASE)/ndk-build clean $(BUILD_DEFS)
-
diff --git a/android/apps/CVCamera/build.sh b/android/apps/CVCamera/build.sh
deleted file mode 100644 (file)
index 1497a39..0000000
+++ /dev/null
@@ -1 +0,0 @@
-make V=0
diff --git a/android/apps/CVCamera/clean.sh b/android/apps/CVCamera/clean.sh
deleted file mode 100644 (file)
index 121e391..0000000
+++ /dev/null
@@ -1 +0,0 @@
-make OPENCV_ROOT=../../opencv V=0 clean
index 66148fe91042fddfe65af4b0a081cd344ac3a60e..248b9a0428e1af9557ec2bc9ea610627369d8e40 100644 (file)
@@ -7,6 +7,6 @@
 # "build.properties", and override values to adapt the script to your
 # project structure.
 
-android.library.reference.1=../../android-jni
+android.library.reference.1=../../android-opencv
 # Project target.
 target=android-7
diff --git a/android/apps/CVCamera/jni/Android.mk b/android/apps/CVCamera/jni/Android.mk
deleted file mode 100644 (file)
index f5aa1b1..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-# date: Summer, 2010 
-# author: Ethan Rublee
-# contact: ethan.rublee@gmail.com
-#
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-#define OPENCV_INCLUDES and OPENCV_LIBS
-include $(OPENCV_CONFIG)
-
-LOCAL_LDLIBS += $(OPENCV_LIBS) $(ANDROID_OPENCV_LIBS) -llog -lGLESv2
-    
-LOCAL_C_INCLUDES +=  $(OPENCV_INCLUDES) $(ANDROID_OPENCV_INCLUDES)
-
-LOCAL_MODULE    := cvcamera
-
-LOCAL_SRC_FILES := Processor.cpp gen/cvcamera_swig.cpp
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/android/apps/CVCamera/jni/Application.mk b/android/apps/CVCamera/jni/Application.mk
deleted file mode 100644 (file)
index 0bbce43..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-# The ARMv7 is significanly faster due to the use of the hardware FPU
-APP_ABI := $(ARM_TARGETS)
\ No newline at end of file
diff --git a/android/apps/CVCamera/jni/CMakeLists.txt b/android/apps/CVCamera/jni/CMakeLists.txt
new file mode 100644 (file)
index 0000000..3ea00cb
--- /dev/null
@@ -0,0 +1,62 @@
+#########################################################
+# Find opencv and android-opencv
+#########################################################
+
+set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build 
+    CACHE PATH "The path where you built opencv for android")
+set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build 
+    CACHE PATH "The path where you built android-opencv")
+
+find_package(OpenCV REQUIRED)
+FIND_PACKAGE(AndroidOpenCV REQUIRED )
+
+#########################################################
+#c flags, included, and lib dependencies
+#########################################################
+
+#notice the "recycling" of CMAKE_C_FLAGS
+#this is necessary to pick up android flags
+set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
+
+INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
+
+set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${OpenCV_LIBS} )
+if(ANDROID)
+  set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl)
+endif(ANDROID)
+
+#########################################################
+#SWIG STUFF
+#########################################################
+#the java package to place swig generated java files in
+set(MY_PACKAGE com.theveganrobot.cvcamera.jni)
+
+if(NOT ANDROID)
+  #non android swig and jni
+  #jni is available by default on android
+  find_package(JNI REQUIRED)
+  include_directories(${JNI_INCLUDE_DIRS})
+  FIND_PACKAGE(SWIG)
+endif()
+
+INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
+
+if(ANDROID)
+  #this will set the output path for the java package
+  #and properly create the package declarations in generated java sources
+  SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
+endif(ANDROID)
+
+#this add's the swig path for the opencv wrappers
+SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" )
+
+SET_SOURCE_FILES_PROPERTIES(cvcamera.i PROPERTIES CPLUSPLUS ON)
+
+#add the swig module, giving it the name, java, and then all of the source files
+SWIG_ADD_MODULE(cvcamera java 
+                cvcamera.i #swig file
+                Processor.cpp #cpp files can be compiled to
+                )
+             
+#link the module like any other   
+target_link_libraries(cvcamera ${LIBRARY_DEPS} )
diff --git a/android/apps/CVCamera/sample.local.env.mk b/android/apps/CVCamera/sample.local.env.mk
deleted file mode 100644 (file)
index b0cf78f..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-#location of android-opencv port of OpenCV to android
-OPENCV_CONFIG=../../build/android-opencv.mk
-ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax
-ARM_TARGETS="armeabi armeabi-v7a"
index 66148fe91042fddfe65af4b0a081cd344ac3a60e..7f801f9c358567a4b0257d6cb9f7a25d37ca80a8 100644 (file)
@@ -7,6 +7,6 @@
 # "build.properties", and override values to adapt the script to your
 # project structure.
 
-android.library.reference.1=../../android-jni
+android.library.reference.1=../../android-opencv/
 # Project target.
 target=android-7
diff --git a/android/apps/Calibration/res/layout/calib_camera.xml b/android/apps/Calibration/res/layout/calib_camera.xml
new file mode 100644 (file)
index 0000000..35dc96f
--- /dev/null
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+       xmlns:opencv="http://schemas.android.com/apk/res/com.opencv"
+       android:layout_width="fill_parent" android:layout_height="fill_parent"
+       android:background="@drawable/cameraback">
+       <!--<com.opencv.camera.NativePreviewer -->
+       <!-- <SurfaceView -->
+
+       <com.opencv.camera.NativePreviewer              
+               android:id="@+id/nativepreviewer" android:layout_width="400dip"
+               android:layout_height="300dip" android:layout_alignParentLeft="true"
+               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
+               android:layout_marginRight="20dip"
+               />
+       <LinearLayout android:id="@+id/glview_layout"
+
+               android:layout_width="400dip" android:layout_height="300dip"
+               android:layout_alignParentLeft="true" android:layout_margin="20dip"
+               android:gravity="center_horizontal|center_vertical"
+               android:layout_marginRight="20dip">
+       </LinearLayout>
+       <LinearLayout android:layout_width="wrap_content"
+               android:layout_height="fill_parent" android:orientation="vertical"
+               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
+               android:layout_alignParentRight="true">
+
+
+               <ImageButton android:src="@android:drawable/ic_menu_camera"
+                       android:id="@+id/capture" android:layout_width="60dip"
+                       android:layout_height="60dip"></ImageButton>
+               <ImageButton android:src="@android:drawable/ic_menu_save"
+                       android:id="@+id/calibrate" android:layout_width="60dip"
+                       android:layout_height="60dip"></ImageButton>
+               <TextView android:id="@+id/numberpatterns"
+                       android:layout_width="wrap_content" android:layout_height="wrap_content"
+                       android:padding="10dip" android:background="@android:color/white"
+                       android:text="0" />
+
+
+       </LinearLayout>
+
+
+
+
+</RelativeLayout>
diff --git a/android/apps/Calibration/res/layout/camera.xml b/android/apps/Calibration/res/layout/camera.xml
deleted file mode 100644 (file)
index 35dc96f..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
-       xmlns:opencv="http://schemas.android.com/apk/res/com.opencv"
-       android:layout_width="fill_parent" android:layout_height="fill_parent"
-       android:background="@drawable/cameraback">
-       <!--<com.opencv.camera.NativePreviewer -->
-       <!-- <SurfaceView -->
-
-       <com.opencv.camera.NativePreviewer              
-               android:id="@+id/nativepreviewer" android:layout_width="400dip"
-               android:layout_height="300dip" android:layout_alignParentLeft="true"
-               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
-               android:layout_marginRight="20dip"
-               />
-       <LinearLayout android:id="@+id/glview_layout"
-
-               android:layout_width="400dip" android:layout_height="300dip"
-               android:layout_alignParentLeft="true" android:layout_margin="20dip"
-               android:gravity="center_horizontal|center_vertical"
-               android:layout_marginRight="20dip">
-       </LinearLayout>
-       <LinearLayout android:layout_width="wrap_content"
-               android:layout_height="fill_parent" android:orientation="vertical"
-               android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
-               android:layout_alignParentRight="true">
-
-
-               <ImageButton android:src="@android:drawable/ic_menu_camera"
-                       android:id="@+id/capture" android:layout_width="60dip"
-                       android:layout_height="60dip"></ImageButton>
-               <ImageButton android:src="@android:drawable/ic_menu_save"
-                       android:id="@+id/calibrate" android:layout_width="60dip"
-                       android:layout_height="60dip"></ImageButton>
-               <TextView android:id="@+id/numberpatterns"
-                       android:layout_width="wrap_content" android:layout_height="wrap_content"
-                       android:padding="10dip" android:background="@android:color/white"
-                       android:text="0" />
-
-
-       </LinearLayout>
-
-
-
-
-</RelativeLayout>
index ce9cd4c8858181c7d558a4e5413b54b65c896bc5..64217fb610ff625ba5605b04bce86b34200a1f75 100644 (file)
@@ -204,9 +204,9 @@ public class Calibration extends Activity implements CalibrationCallback {
 
                setFullscreen();
                disableScreenTurnOff();
-               setContentView(R.layout.camera);
+               setContentView(R.layout.calib_camera);
                mPreview = (NativePreviewer) findViewById(R.id.nativepreviewer);
-               mPreview.setPreviewSize(1000, 500);
+               mPreview.setPreviewSize(800, 400);
                mPreview.setGrayscale(true);
                LinearLayout glview_layout = (LinearLayout) findViewById(R.id.glview_layout);
                glview = new GL2CameraViewer(getApplication(), false, 0, 0);
diff --git a/android/apps/OpenCV_SAMPLE/AndroidManifest.xml b/android/apps/OpenCV_SAMPLE/AndroidManifest.xml
new file mode 100644 (file)
index 0000000..c129d81
--- /dev/null
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+       package="com.OpenCV_SAMPLE" android:versionCode="1"
+       android:versionName="1.0">
+       <application android:label="@string/app_name"
+               android:debuggable="true" android:icon="@drawable/icon">
+               <activity android:name=".OpenCV_SAMPLE" android:label="@string/app_name"
+                       android:screenOrientation="landscape" android:configChanges="orientation|keyboardHidden|keyboard">
+                       <intent-filter>
+                               <action android:name="android.intent.action.MAIN" />
+                               <category android:name="android.intent.category.LAUNCHER" />
+                       </intent-filter>
+               </activity>
+               <!-- These activities are defined in the android-opencv library, and just 
+                       reused here -->
+               <activity android:name="com.opencv.camera.CameraConfig"
+                       android:label="@string/app_name" android:screenOrientation="landscape"
+                       android:configChanges="orientation|keyboardHidden|keyboard">
+               </activity>
+       </application>
+
+       <uses-sdk android:minSdkVersion="7" />
+
+       <!-- set the opengl version -->
+       <uses-feature android:glEsVersion="0x00020000" />
+       <!-- use the camera -->
+       <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+
+</manifest> 
\ No newline at end of file
diff --git a/android/apps/OpenCV_SAMPLE/CMakeLists.txt b/android/apps/OpenCV_SAMPLE/CMakeLists.txt
new file mode 100644 (file)
index 0000000..954ba5b
--- /dev/null
@@ -0,0 +1,5 @@
+cmake_minimum_required(VERSION 2.8)
+
+project(OpenCV_SAMPLE)
+
+add_subdirectory(jni)
diff --git a/android/apps/OpenCV_SAMPLE/default.properties b/android/apps/OpenCV_SAMPLE/default.properties
new file mode 100644 (file)
index 0000000..5d6911a
--- /dev/null
@@ -0,0 +1,12 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system use,
+# "build.properties", and override values to adapt the script to your
+# project structure.
+
+# Project target.
+target=android-7
+android.library.reference.1=../../android-opencv/
diff --git a/android/apps/OpenCV_SAMPLE/jni/CMakeLists.txt b/android/apps/OpenCV_SAMPLE/jni/CMakeLists.txt
new file mode 100644 (file)
index 0000000..823101c
--- /dev/null
@@ -0,0 +1,68 @@
+#########################################################
+# Find opencv and android-opencv
+#########################################################
+
+set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build 
+    CACHE PATH "The path where you built opencv for android")
+set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build 
+    CACHE PATH "The path where you built android-opencv")
+
+find_package(OpenCV REQUIRED)
+FIND_PACKAGE(AndroidOpenCV REQUIRED )
+
+#########################################################
+#c flags, included, and lib dependencies
+#########################################################
+
+#notice the "recycling" of CMAKE_C_FLAGS
+#this is necessary to pick up android flags
+set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
+
+INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
+
+set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${OpenCV_LIBS} )
+if(ANDROID)
+  set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl)
+endif(ANDROID)
+
+#########################################################
+#SWIG STUFF
+#########################################################
+#the java package to place swig generated java files in
+set(MY_PACKAGE com.OpenCV_SAMPLE.jni )
+set(MY_MODULE OpenCV_SAMPLE )
+set(MY_SWIG 
+    OpenCV_SAMPLE.i #swig file
+    )
+set(MY_SRCS
+    cvsample.cpp #cpp files can be compiled to
+    )
+
+if(NOT ANDROID)
+  #non android swig and jni
+  #jni is available by default on android
+  find_package(JNI REQUIRED)
+  include_directories(${JNI_INCLUDE_DIRS})
+  FIND_PACKAGE(SWIG)
+endif()
+
+INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
+
+if(ANDROID)
+  #this will set the output path for the java package
+  #and properly create the package declarations in generated java sources
+  SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
+endif(ANDROID)
+
+#this add's the swig path for the opencv wrappers
+SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" )
+
+SET_SOURCE_FILES_PROPERTIES(${MY_SWIG} PROPERTIES CPLUSPLUS ON)
+
+#add the swig module, giving it the name, java, and then all of the source files
+SWIG_ADD_MODULE(${MY_MODULE} java 
+                ${MY_SWIG}
+                ${MY_SRCS}
+                )
+#link the module like any other   
+target_link_libraries(${MY_MODULE} ${LIBRARY_DEPS} )
diff --git a/android/apps/OpenCV_SAMPLE/jni/OpenCV_SAMPLE.i b/android/apps/OpenCV_SAMPLE/jni/OpenCV_SAMPLE.i
new file mode 100644 (file)
index 0000000..93b6c89
--- /dev/null
@@ -0,0 +1,48 @@
+/* File : foobar.i */
+%module OpenCV_SAMPLE
+
+/*
+ * the java import code muse be included for the opencv jni wrappers
+ * this means that the android project must reference opencv/android as a project
+ * see the default.properties for how this is done
+ */
+%pragma(java) jniclassimports=%{
+import com.opencv.jni.*; //import the android-opencv jni wrappers
+%}
+
+%pragma(java) jniclasscode=%{
+  static {
+    try {
+        //load up our shared libraries
+        System.loadLibrary("android-opencv");
+        System.loadLibrary("OpenCV_SAMPLE");
+      } catch (UnsatisfiedLinkError e) {
+        //badness
+        throw e;
+    }
+  }
+
+%}
+
+//import the android-cv.i file so that swig is aware of all that has been previous defined
+//notice that it is not an include....
+%import "android-cv.i"
+
+%{
+#include "cvsample.h"
+using cv::Mat;
+%}
+
+//make sure to import the image_pool as it is 
+//referenced by the Processor java generated
+//class
+%typemap(javaimports) CVSample "
+import com.opencv.jni.*;// import the opencv java bindings
+"
+class CVSample
+{
+public:
+  void canny(const Mat& input, Mat& output, int edgeThresh);
+  void invert(Mat& inout);
+  void blur(Mat& inout, int half_kernel_size);
+};
diff --git a/android/apps/OpenCV_SAMPLE/jni/cvsample.cpp b/android/apps/OpenCV_SAMPLE/jni/cvsample.cpp
new file mode 100644 (file)
index 0000000..0f9eea9
--- /dev/null
@@ -0,0 +1,27 @@
+#include "cvsample.h"
+#include <opencv2/imgproc/imgproc.hpp>
+
+void CVSample::canny(const cv::Mat& input, cv::Mat& output, int edgeThresh)
+{
+  if (input.empty())
+    return;
+  cv::Mat gray;
+  if (input.channels() == 3)
+  {
+    cv::cvtColor(input, gray, CV_RGB2GRAY);
+  }
+  else
+    gray = input;
+  cv::Canny(gray, output, edgeThresh, edgeThresh * 3, 3);
+}
+
+void CVSample::invert(cv::Mat& inout)
+{
+  cv::bitwise_not(inout, inout);
+}
+void CVSample::blur(cv::Mat& inout, int half_kernel_size)
+{
+  int ksz = half_kernel_size*2 + 1;
+  cv::Size kernel(ksz,ksz);
+  cv::blur(inout,inout,kernel);
+}
diff --git a/android/apps/OpenCV_SAMPLE/jni/cvsample.h b/android/apps/OpenCV_SAMPLE/jni/cvsample.h
new file mode 100644 (file)
index 0000000..345a35c
--- /dev/null
@@ -0,0 +1,11 @@
+#pragma once
+
+#include <opencv2/core/core.hpp>
+
+class CVSample
+{
+public:
+  void canny(const cv::Mat& input, cv::Mat& output, int edgeThresh);
+  void invert(cv::Mat& inout);
+  void blur(cv::Mat& inout, int half_kernel_size);
+};
diff --git a/android/apps/OpenCV_SAMPLE/project_create.sh b/android/apps/OpenCV_SAMPLE/project_create.sh
new file mode 100755 (executable)
index 0000000..c058d19
--- /dev/null
@@ -0,0 +1,3 @@
+#this generates an ant based cli build of the android-jni project
+android update project --name OpenCV_SAMPLE \
+--path .
diff --git a/android/apps/OpenCV_SAMPLE/res/drawable-hdpi/icon.png b/android/apps/OpenCV_SAMPLE/res/drawable-hdpi/icon.png
new file mode 100644 (file)
index 0000000..4e828ba
Binary files /dev/null and b/android/apps/OpenCV_SAMPLE/res/drawable-hdpi/icon.png differ
diff --git a/android/apps/OpenCV_SAMPLE/res/drawable-ldpi/icon.png b/android/apps/OpenCV_SAMPLE/res/drawable-ldpi/icon.png
new file mode 100644 (file)
index 0000000..5e11406
Binary files /dev/null and b/android/apps/OpenCV_SAMPLE/res/drawable-ldpi/icon.png differ
diff --git a/android/apps/OpenCV_SAMPLE/res/drawable-mdpi/icon.png b/android/apps/OpenCV_SAMPLE/res/drawable-mdpi/icon.png
new file mode 100644 (file)
index 0000000..a591eb8
Binary files /dev/null and b/android/apps/OpenCV_SAMPLE/res/drawable-mdpi/icon.png differ
diff --git a/android/apps/OpenCV_SAMPLE/res/layout/main.xml b/android/apps/OpenCV_SAMPLE/res/layout/main.xml
new file mode 100644 (file)
index 0000000..3a5f117
--- /dev/null
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="fill_parent"
+    android:layout_height="fill_parent"
+    >
+<TextView  
+    android:layout_width="fill_parent" 
+    android:layout_height="wrap_content" 
+    android:text="@string/hello"
+    />
+</LinearLayout>
diff --git a/android/apps/OpenCV_SAMPLE/res/menu/sample_menu.xml b/android/apps/OpenCV_SAMPLE/res/menu/sample_menu.xml
new file mode 100644 (file)
index 0000000..8cf02d7
--- /dev/null
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<menu
+  xmlns:android="http://schemas.android.com/apk/res/android">
+<item android:id="@+id/cv_menu_invert" android:title="Invert"></item>
+<item android:id="@+id/cv_menu_canny" android:title="Canny"></item>
+<item android:id="@+id/cv_menu_blur" android:title="Blur"></item>
+<item android:id="@+id/cv_menu_nothing" android:title="Nothing"></item>
+</menu>
diff --git a/android/apps/OpenCV_SAMPLE/res/values/strings.xml b/android/apps/OpenCV_SAMPLE/res/values/strings.xml
new file mode 100644 (file)
index 0000000..1b7d2e7
--- /dev/null
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="hello">Hello World, OpenCV_SAMPLE!</string>
+    <string name="app_name">OpenCV SAMPLE</string>
+</resources>
diff --git a/android/apps/OpenCV_SAMPLE/src/com/OpenCV_SAMPLE/OpenCV_SAMPLE.java b/android/apps/OpenCV_SAMPLE/src/com/OpenCV_SAMPLE/OpenCV_SAMPLE.java
new file mode 100644 (file)
index 0000000..7002cee
--- /dev/null
@@ -0,0 +1,87 @@
+package com.OpenCV_SAMPLE;
+
+import java.util.LinkedList;
+
+import android.os.Bundle;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+
+import com.OpenCV_SAMPLE.jni.CVSample;
+import com.opencv.camera.CameraActivity;
+import com.opencv.camera.NativeProcessor;
+import com.opencv.camera.NativeProcessor.PoolCallback;
+import com.opencv.jni.Mat;
+import com.opencv.jni.image_pool;
+
+public class OpenCV_SAMPLE extends CameraActivity {
+
+       private int do_what = R.id.cv_menu_nothing;
+
+       @Override
+       public boolean onCreateOptionsMenu(Menu menu) {
+               MenuInflater menu_flater = new MenuInflater(this);
+               menu_flater.inflate(R.menu.sample_menu, menu);
+               return true;
+       }
+
+       @Override
+       public boolean onMenuItemSelected(int featureId, MenuItem item) {
+               switch (item.getItemId()) {
+               case R.id.cv_menu_blur:
+               case R.id.cv_menu_canny:
+               case R.id.cv_menu_invert:
+               case R.id.cv_menu_nothing:
+                       do_what = item.getItemId();
+                       break;
+               default:
+                       return false;
+
+               }
+               return true;
+       }
+
+       /** Called when the activity is first created. */
+       @Override
+       public void onCreate(Bundle savedInstanceState) {
+               super.onCreate(savedInstanceState);
+       }
+
+       @Override
+       protected LinkedList<PoolCallback> getCallBackStack() {
+               LinkedList<PoolCallback> list = new LinkedList<NativeProcessor.PoolCallback>();
+               list.add(samplePoolCallback);
+               return list;
+       }
+
+       CVSample cvsample = new CVSample();
+       Mat canny = new Mat();
+       PoolCallback samplePoolCallback = new PoolCallback() {
+
+               @Override
+               public void process(int idx, image_pool pool, long timestamp,
+                               NativeProcessor nativeProcessor) {
+                       Mat grey = pool.getGrey(idx);
+                       Mat color = pool.getImage(idx);
+                       Mat draw_img = color;
+                       switch (do_what) {
+                       case R.id.cv_menu_blur:
+                               cvsample.blur(draw_img, 5);
+                               break;
+                       case R.id.cv_menu_canny:
+                               cvsample.canny(grey, canny, 15);
+                               draw_img = canny;
+                               break;
+                       case R.id.cv_menu_invert:
+                               cvsample.invert(draw_img);
+                               break;
+                       case R.id.cv_menu_nothing:
+                               break;
+                       }
+                       pool.addImage(idx + 1, draw_img);
+                       glview.getDrawCallback().process(idx + 1, pool, timestamp,
+                                       nativeProcessor);
+               }
+       };
+
+}
\ No newline at end of file
diff --git a/android/changes.Android.txt b/android/changes.Android.txt
deleted file mode 100644 (file)
index 8741a44..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-changes
-added some specific CMakeLists.txt changes see the changes.diff for these
-
-basically see
-mbstowcs is not supported - so see had to 
-#ifndef ANDROID
-string fromUtf16(const WString& str)
-WString toUtf16(const string& str)
-#endif
-
-ANDROID is always defined when building with the ndk
-
-_S is a bad variable name for android...
-
-added the zlib-android - because i couldn't figure out how to configure the
-existing zlib in 3rdparty
-
diff --git a/android/changes.diff b/android/changes.diff
deleted file mode 100644 (file)
index 801c472..0000000
+++ /dev/null
@@ -1,255 +0,0 @@
-Index: modules/highgui/CMakeLists.txt
-===================================================================
---- modules/highgui/CMakeLists.txt     (revision 3454)
-+++ modules/highgui/CMakeLists.txt     (working copy)
-@@ -1,3 +1,21 @@
-+if(ANDROID)
-+
-+set(high_gui_android_srcs src/bitstrm.cpp
-+    src/cap.cpp 
-+    src/grfmt_base.cpp 
-+    src/grfmt_bmp.cpp 
-+    src/grfmt_jpeg.cpp 
-+    src/grfmt_png.cpp 
-+    src/grfmt_tiff.cpp 
-+    src/loadsave.cpp 
-+    src/precomp.cpp 
-+    src/utils.cpp 
-+    src/grfmt_sunras.cpp 
-+    src/grfmt_pxm.cpp 
-+    src/window.cpp )
-+define_android_manual(highgui "${high_gui_android_srcs}" "$(LOCAL_PATH)/src  $(OPENCV_INCLUDES)")     
-+
-+else()
- # ----------------------------------------------------------------------------
- #  CMake file for highgui. See root CMakeLists.txt
- #   Some parts taken from version of Hartmut Seichter, HIT Lab NZ.
-@@ -332,3 +350,5 @@
- install(FILES ${highgui_ext_hdrs}
-         DESTINATION include/opencv2/highgui
-         COMPONENT main)
-+        
-+endif()#android
-Index: modules/core/src/persistence.cpp
-===================================================================
---- modules/core/src/persistence.cpp   (revision 3454)
-+++ modules/core/src/persistence.cpp   (working copy)
-@@ -114,12 +114,12 @@
- namespace cv
- {
--
-+#ifndef ANDROID
- string fromUtf16(const WString& str)
- {
-     cv::AutoBuffer<char> _buf(str.size()*4 + 1);
-     char* buf = _buf;
--        
-+
-     size_t sz = wcstombs(buf, str.c_str(), str.size());
-     if( sz == (size_t)-1 )
-         return string();
-@@ -131,14 +131,14 @@
- {
-     cv::AutoBuffer<wchar_t> _buf(str.size() + 1);
-     wchar_t* buf = _buf;
--        
-+
-     size_t sz = mbstowcs(buf, str.c_str(), str.size());
-     if( sz == (size_t)-1 )
-         return WString();
-     buf[sz] = '\0';
-     return WString(buf);
- }
--
-+#endif
- }
-Index: modules/features2d/src/sift.cpp
-===================================================================
---- modules/features2d/src/sift.cpp    (revision 3454)
-+++ modules/features2d/src/sift.cpp    (working copy)
-@@ -172,6 +172,7 @@
-   typedef Keypoints::iterator       KeypointsIter ;      ///< Keypoint list iter datatype
-   typedef Keypoints::const_iterator KeypointsConstIter ; ///< Keypoint list const iter datatype
-+#undef _S
-   /** @brief Constructors and destructors */
-   /*@{*/
-   Sift(const pixel_t* _im_pt, int _width, int _height,
-Index: modules/features2d/CMakeLists.txt
-===================================================================
---- modules/features2d/CMakeLists.txt  (revision 3454)
-+++ modules/features2d/CMakeLists.txt  (working copy)
-@@ -1 +1,2 @@
--define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
-\ No newline at end of file
-+define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
-+
-Index: modules/CMakeLists.txt
-===================================================================
---- modules/CMakeLists.txt     (revision 3454)
-+++ modules/CMakeLists.txt     (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-+endif()
-+
- add_subdirectory(calib3d)
- add_subdirectory(core)
- add_subdirectory(features2d)
-@@ -20,8 +24,11 @@
- endif()
- add_subdirectory(video)
-+
-+if(!ANDROID)
- add_subdirectory(haartraining)
- add_subdirectory(traincascade)
-+endif()
- #add_subdirectory(gpu)
-Index: 3rdparty/zlib/CMakeLists.txt
-===================================================================
---- 3rdparty/zlib/CMakeLists.txt       (revision 3503)
-+++ 3rdparty/zlib/CMakeLists.txt       (working copy)
-@@ -35,3 +35,5 @@
-         DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-         ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
-     )
-+    
-+endif()
-Index: 3rdparty/lapack/CMakeLists.txt
-===================================================================
---- 3rdparty/lapack/CMakeLists.txt     (revision 3503)
-+++ 3rdparty/lapack/CMakeLists.txt     (working copy)
-@@ -2,6 +2,10 @@
- #  CMake file for opencv_lapack. See root CMakeLists.txt
- #
- # ----------------------------------------------------------------------------
-+if(ANDROID)
-+define_3rdparty_module(opencv_lapack)
-+else()
-+
- project(opencv_lapack)
- # List of C++ files:
-@@ -57,3 +61,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
-       )
-+endif() #android
-Index: 3rdparty/libjasper/CMakeLists.txt
-===================================================================
---- 3rdparty/libjasper/CMakeLists.txt  (revision 3503)
-+++ 3rdparty/libjasper/CMakeLists.txt  (working copy)
-@@ -1,3 +1,8 @@
-+if(ANDROID)
-+define_3rdparty_module(jasper)
-+else()
-+
-+
- # ----------------------------------------------------------------------------
- #  CMake file for libjasper. See root CMakeLists.txt
- #
-@@ -4,6 +9,7 @@
- # ----------------------------------------------------------------------------
- project(libjasper)
-+
- add_definitions(-DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT  -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT)
- # List of C++ files:
-@@ -41,6 +47,8 @@
-     set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-implicit-function-declaration -Wno-unused")
- endif()
-+endif()#!android
-+
- set_target_properties(${the_target}
-       PROPERTIES
-       OUTPUT_NAME "${the_target}"
-Index: 3rdparty/libpng/CMakeLists.txt
-===================================================================
---- 3rdparty/libpng/CMakeLists.txt     (revision 3503)
-+++ 3rdparty/libpng/CMakeLists.txt     (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+define_3rdparty_module(png)
-+else()
-+#endif()#android
- # ----------------------------------------------------------------------------
- #  CMake file for libpng. See root CMakeLists.txt
- #
-@@ -38,3 +42,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
-       )
-+endif()#android
-Index: 3rdparty/libjpeg/CMakeLists.txt
-===================================================================
---- 3rdparty/libjpeg/CMakeLists.txt    (revision 3503)
-+++ 3rdparty/libjpeg/CMakeLists.txt    (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+define_3rdparty_module(jpeg)
-+else()
-+#endif()#android
- # ----------------------------------------------------------------------------
- #  CMake file for libjpeg. See root CMakeLists.txt
- #
-@@ -39,3 +43,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
-       )
-+endif()#android
-Index: 3rdparty/CMakeLists.txt
-===================================================================
---- 3rdparty/CMakeLists.txt    (revision 3503)
-+++ 3rdparty/CMakeLists.txt    (working copy)
-@@ -1,6 +1,22 @@
-+if(ANDROID)
-+    configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-+      add_subdirectory(libpng)
-+      add_subdirectory(libjpeg)
-+      add_subdirectory(libjasper)
-+      add_subdirectory(flann)
-+    add_subdirectory(lapack)
-+    
-+    #zlib is special? look in zlib-android
-+    #couldn't get the other one to compile for some reason...
-+    #config issue
-+    #add_subdirectory(zlib-android)
-+   
-+else()
-+
- add_subdirectory(flann)
- add_subdirectory(lapack)
- add_subdirectory(zlib)
-+
- if(WITH_JASPER AND NOT JASPER_FOUND)
-       add_subdirectory(libjasper)
- endif()
-@@ -13,3 +29,5 @@
- if(WITH_TIFF AND NOT TIFF_FOUND)
-       add_subdirectory(libtiff)
- endif()
-+
-+endif()#android
-Index: 3rdparty/flann/CMakeLists.txt
-===================================================================
---- 3rdparty/flann/CMakeLists.txt      (revision 3503)
-+++ 3rdparty/flann/CMakeLists.txt      (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+file(GLOB_RECURSE flann_sources_cpp *.cpp)
-+define_android_manual(flann "${flann_sources_cpp}" "$(LOCAL_PATH)/../include $(LOCAL_PATH)/../include/flann $(LOCAL_PATH)/nn $(LOCAL_PATH)/algorithms $(LOCAL_PATH)/util")            
-+else()
- if (DEFINED OPENCV_VERSION)
- # ----------------------------------------------------------------------------
-@@ -105,3 +109,4 @@
- )
- ENDIF()
-+endif()#android
diff --git a/android/cmake_android.sh b/android/cmake_android.sh
new file mode 100644 (file)
index 0000000..b94b9d8
--- /dev/null
@@ -0,0 +1,4 @@
+mkdir build
+cd build
+cmake -C ../CMakeCache.android.initial.cmake -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ../..
+
diff --git a/android/cmake_android_armeabi.sh b/android/cmake_android_armeabi.sh
new file mode 100644 (file)
index 0000000..cce096d
--- /dev/null
@@ -0,0 +1,4 @@
+mkdir build_armeabi
+cd build_armeabi
+cmake -C ../CMakeCache.android.initial.cmake -DARM_TARGETS=armeabi -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ../..
+
diff --git a/android/cvconfig.h.in b/android/cvconfig.h.in
deleted file mode 100644 (file)
index 2499bd2..0000000
+++ /dev/null
@@ -1,161 +0,0 @@
-/* Define to one of `_getb67', `GETB67', `getb67' for Cray-2 and Cray-YMP
-   systems. This function is required for `alloca.c' support on those systems.
-   */
-/* #undef CRAY_STACKSEG_END */
-
-/* Define to 1 if using `alloca.c'. */
-/* #undef C_ALLOCA */
-
-/* Define to 1 if you have `alloca', as a function or macro. */
-/* #undef HAVE_ALLOCA */
-
-/* Define to 1 if you have <alloca.h> and it should be used (not on Ultrix).
-   */
-#define HAVE_ALLOCA_H 1
-
-/* V4L capturing support */
-//#define HAVE_CAMV4L
-
-/* V4L2 capturing support */
-//#define HAVE_CAMV4L2
-
-/* Carbon windowing environment */
-/* #undef HAVE_CARBON */
-
-/* IEEE1394 capturing support */
-/* #undef HAVE_DC1394 */
-
-/* libdc1394 0.9.4 or 0.9.5 */
-/* #undef HAVE_DC1394_095 */
-
-/* IEEE1394 capturing support - libdc1394 v2.x */
-//#define HAVE_DC1394_2
-
-/* ffmpeg in Gentoo */
-/* #undef HAVE_GENTOO_FFMPEG */
-
-/* FFMpeg video library */
-/* #undef HAVE_FFMPEG */
-
-/* ffmpeg's libswscale */
-/* #undef HAVE_FFMPEG_SWSCALE */
-
-/* GStreamer multimedia framework */
-/* #undef HAVE_GSTREAMER */
-
-/* GStreamer with gstappsink & gstappsrc */
-/* #undef HAVE_GSTREAMER_APP */
-
-/* GTK+ 2.0 Thread support */
-//#define  HAVE_GTHREAD
-
-/* GTK+ 2.x toolkit */
-//#define  HAVE_GTK
-
-/* OpenEXR codec */
-/* #undef HAVE_ILMIMF */
-
-/* Apple ImageIO Framework */
-/* #undef HAVE_IMAGEIO */
-
-/* Define to 1 if you have the <inttypes.h> header file. */
-/* #undef HAVE_INTTYPES_H */
-
-/* JPEG-2000 codec */
-#define HAVE_JASPER
-
-/* IJG JPEG codec */
-#define HAVE_JPEG
-
-/* Define to 1 if you have the `dl' library (-ldl). */
-/* #undef HAVE_LIBDL */
-
-/* Define to 1 if you have the `gomp' library (-lgomp). */
-/* #undef HAVE_LIBGOMP */
-
-/* Define to 1 if you have the `m' library (-lm). */
-/* #undef HAVE_LIBM */
-
-/* libpng/png.h needs to be included */
-#undef  HAVE_LIBPNG_PNG_H
-
-/* Define to 1 if you have the `pthread' library (-lpthread). */
-//#define  HAVE_LIBPTHREAD 1
-
-/* Define to 1 if you have the `lrint' function. */
-/* #undef HAVE_LRINT */
-
-/* PNG codec */
-#define HAVE_PNG
-
-/* Define to 1 if you have the `png_get_valid' function. */
-/* #undef HAVE_PNG_GET_VALID */
-
-/* png.h needs to be included */
-#define  HAVE_PNG_H
-
-/* Define to 1 if you have the `png_set_tRNS_to_alpha' function. */
-/* #undef HAVE_PNG_SET_TRNS_TO_ALPHA */
-
-/* QuickTime video libraries */
-/* #undef HAVE_QUICKTIME */
-
-/* TIFF codec */
-/* #undef HAVE_TIFF */
-
-/* Unicap video capture library */
-/* #undef HAVE_UNICAP */
-
-/* Define to 1 if you have the <unistd.h> header file. */
-#define  HAVE_UNISTD_H 1
-
-/* Xine video library */
-/* #undef HAVE_XINE */
-
-/* LZ77 compression/decompression library (used for PNG) */
-/* #undef HAVE_ZLIB */
-
-/* Intel Integrated Performance Primitives */
-/* #undef HAVE_IPP */
-
-/* OpenCV compiled as static or dynamic libs */
-//#define  OPENCV_BUILD_SHARED_LIB
-
-/* Name of package */
-#define  PACKAGE "opencv"
-
-/* Define to the address where bug reports for this package should be sent. */
-//#define  PACKAGE_BUGREPORT "opencvlibrary-devel@lists.sourceforge.net"
-
-/* Define to the full name of this package. */
-#define  PACKAGE_NAME "opencv"
-
-/* Define to the full name and version of this package. */
-#define  PACKAGE_STRING "opencv 2.2.0"
-
-/* Define to the one symbol short name of this package. */
-#define  PACKAGE_TARNAME "opencv"
-
-/* Define to the version of this package. */
-#define  PACKAGE_VERSION "2.2.0"
-
-/* If using the C implementation of alloca, define if you know the
-   direction of stack growth for your system; otherwise it will be
-   automatically deduced at runtime.
-       STACK_DIRECTION > 0 => grows toward higher addresses
-       STACK_DIRECTION < 0 => grows toward lower addresses
-       STACK_DIRECTION = 0 => direction of growth unknown */
-/* #undef STACK_DIRECTION */
-
-/* Version number of package */
-#define  VERSION "2.2.0"
-
-/* Define to 1 if your processor stores words with the most significant byte
-   first (like Motorola and SPARC, unlike Intel and VAX). */
-/* #undef WORDS_BIGENDIAN */
-
-/* Intel Threading Building Blocks */
-/* #undef HAVE_TBB */
-
-/*the android ndk defines this somewhere and it messes with some variables*/
-#undef _S
diff --git a/android/default.properties.in b/android/default.properties.in
deleted file mode 100644 (file)
index 9d135cb..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-# 
-# This file must be checked in Version Control Systems.
-# 
-# To customize properties used by the Ant build system use,
-# "build.properties", and override values to adapt the script to your
-# project structure.
-
-# Project target.
-target=android-7
diff --git a/android/diff.txt b/android/diff.txt
deleted file mode 100644 (file)
index cf12e57..0000000
+++ /dev/null
@@ -1,211 +0,0 @@
-Index: modules/highgui/CMakeLists.txt
-===================================================================
---- modules/highgui/CMakeLists.txt     (revision 3454)
-+++ modules/highgui/CMakeLists.txt     (working copy)
-@@ -1,3 +1,21 @@
-+if(ANDROID)
-+
-+set(high_gui_android_srcs src/bitstrm.cpp
-+    src/cap.cpp 
-+    src/grfmt_base.cpp 
-+    src/grfmt_bmp.cpp 
-+    src/grfmt_jpeg.cpp 
-+    src/grfmt_png.cpp 
-+    src/grfmt_tiff.cpp 
-+    src/loadsave.cpp 
-+    src/precomp.cpp 
-+    src/utils.cpp 
-+    src/grfmt_sunras.cpp 
-+    src/grfmt_pxm.cpp 
-+    src/window.cpp )
-+define_android_manual(highgui "${high_gui_android_srcs}" "$(OpenCVInclude) $(LOCAL_PATH)/include")    
-+
-+else()
- # ----------------------------------------------------------------------------
- #  CMake file for highgui. See root CMakeLists.txt
- #   Some parts taken from version of Hartmut Seichter, HIT Lab NZ.
-@@ -332,3 +350,5 @@
- install(FILES ${highgui_ext_hdrs}
-         DESTINATION include/opencv2/highgui
-         COMPONENT main)
-+        
-+endif()#android
-Index: modules/features2d/src/sift.cpp
-===================================================================
---- modules/features2d/src/sift.cpp    (revision 3454)
-+++ modules/features2d/src/sift.cpp    (working copy)
-@@ -172,6 +172,7 @@
-   typedef Keypoints::iterator       KeypointsIter ;      ///< Keypoint list iter datatype
-   typedef Keypoints::const_iterator KeypointsConstIter ; ///< Keypoint list const iter datatype
-+#undef _S
-   /** @brief Constructors and destructors */
-   /*@{*/
-   Sift(const pixel_t* _im_pt, int _width, int _height,
-Index: modules/features2d/CMakeLists.txt
-===================================================================
---- modules/features2d/CMakeLists.txt  (revision 3454)
-+++ modules/features2d/CMakeLists.txt  (working copy)
-@@ -1 +1,2 @@
--define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
-\ No newline at end of file
-+define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
-+
-Index: modules/CMakeLists.txt
-===================================================================
---- modules/CMakeLists.txt     (revision 3454)
-+++ modules/CMakeLists.txt     (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-+endif()
-+
- add_subdirectory(calib3d)
- add_subdirectory(core)
- add_subdirectory(features2d)
-@@ -20,8 +24,11 @@
- endif()
- add_subdirectory(video)
-+
-+if(!ANDROID)
- add_subdirectory(haartraining)
- add_subdirectory(traincascade)
-+endif()
- #add_subdirectory(gpu)
-Index: 3rdparty/zlib/CMakeLists.txt
-===================================================================
---- 3rdparty/zlib/CMakeLists.txt       (revision 3454)
-+++ 3rdparty/zlib/CMakeLists.txt       (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+define_3rdparty_module(zlib)
-+else()
-+
- # ----------------------------------------------------------------------------
- #  CMake file for zlib. See root CMakeLists.txt
- #
-@@ -35,3 +39,5 @@
-         DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-         ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
-     )
-+    
-+endif()
-Index: 3rdparty/lapack/CMakeLists.txt
-===================================================================
---- 3rdparty/lapack/CMakeLists.txt     (revision 3454)
-+++ 3rdparty/lapack/CMakeLists.txt     (working copy)
-@@ -2,6 +2,10 @@
- #  CMake file for opencv_lapack. See root CMakeLists.txt
- #
- # ----------------------------------------------------------------------------
-+if(ANDROID)
-+define_3rdparty_module(opencv_lapack)
-+else()
-+
- project(opencv_lapack)
- # List of C++ files:
-@@ -57,3 +61,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
-       )
-+endif() #android
-Index: 3rdparty/libjasper/CMakeLists.txt
-===================================================================
---- 3rdparty/libjasper/CMakeLists.txt  (revision 3454)
-+++ 3rdparty/libjasper/CMakeLists.txt  (working copy)
-@@ -1,3 +1,8 @@
-+if(ANDROID)
-+define_3rdparty_module(jasper)
-+else()
-+
-+
- # ----------------------------------------------------------------------------
- #  CMake file for libjasper. See root CMakeLists.txt
- #
-@@ -4,6 +9,7 @@
- # ----------------------------------------------------------------------------
- project(libjasper)
-+
- add_definitions(-DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT  -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT)
- # List of C++ files:
-@@ -41,6 +47,8 @@
-     set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-implicit-function-declaration -Wno-unused")
- endif()
-+endif()#!android
-+
- set_target_properties(${the_target}
-       PROPERTIES
-       OUTPUT_NAME "${the_target}"
-Index: 3rdparty/libpng/CMakeLists.txt
-===================================================================
---- 3rdparty/libpng/CMakeLists.txt     (revision 3454)
-+++ 3rdparty/libpng/CMakeLists.txt     (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+define_3rdparty_module(png)
-+else()
-+#endif()#android
- # ----------------------------------------------------------------------------
- #  CMake file for libpng. See root CMakeLists.txt
- #
-@@ -38,3 +42,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
-       )
-+endif()#android
-Index: 3rdparty/libjpeg/CMakeLists.txt
-===================================================================
---- 3rdparty/libjpeg/CMakeLists.txt    (revision 3454)
-+++ 3rdparty/libjpeg/CMakeLists.txt    (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+define_3rdparty_module(jpeg)
-+else()
-+#endif()#android
- # ----------------------------------------------------------------------------
- #  CMake file for libjpeg. See root CMakeLists.txt
- #
-@@ -39,3 +43,4 @@
-       DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
-       ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
-       )
-+endif()#android
-Index: 3rdparty/CMakeLists.txt
-===================================================================
---- 3rdparty/CMakeLists.txt    (revision 3454)
-+++ 3rdparty/CMakeLists.txt    (working copy)
-@@ -1,3 +1,10 @@
-+if(ANDROID)
-+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-+      add_subdirectory(libpng)
-+      add_subdirectory(libjpeg)
-+      add_subdirectory(libjasper)
-+endif()
-+
- add_subdirectory(flann)
- add_subdirectory(lapack)
- add_subdirectory(zlib)
-Index: 3rdparty/flann/CMakeLists.txt
-===================================================================
---- 3rdparty/flann/CMakeLists.txt      (revision 3454)
-+++ 3rdparty/flann/CMakeLists.txt      (working copy)
-@@ -1,3 +1,7 @@
-+if(ANDROID)
-+file(GLOB_RECURSE flann_sources_cpp *.cpp)
-+define_android_manual(flann "${flann_sources_cpp}" "$(LOCAL_PATH)/../include $(LOCAL_PATH)/../include/flann $(LOCAL_PATH)/nn $(LOCAL_PATH)/algorithms $(LOCAL_PATH)/util")            
-+else()
- if (DEFINED OPENCV_VERSION)
- # ----------------------------------------------------------------------------
-@@ -105,3 +109,4 @@
- )
- ENDIF()
-+endif()#android
index ce50df37a984741e596f1b4b4d81b6354640e45a..c5bcc50422b18ed49429e8aa99c4cb965d951758 100644 (file)
@@ -1,7 +1,3 @@
-if(ANDROID)
- configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
-endif()
-
 add_subdirectory(calib3d)
 add_subdirectory(core)
 add_subdirectory(features2d)
@@ -13,7 +9,9 @@ if(MSVC OR MINGW)
    endif()
 endif()
 
+if(NOT ANDROID)
 add_subdirectory(ts)
+endif()
 add_subdirectory(highgui)
 add_subdirectory(imgproc)
 add_subdirectory(legacy)
@@ -26,10 +24,10 @@ if(PYTHONLIBS_FOUND AND BUILD_NEW_PYTHON_SUPPORT)
 endif()
 
 add_subdirectory(video)
+add_subdirectory(traincascade)
+add_subdirectory(haartraining)
 
 if(NOT ANDROID)
-add_subdirectory(haartraining)
-add_subdirectory(traincascade)
 add_subdirectory(gpu)
 endif()
 
index d092808110d53d1d5a696f39533efe7445bf2880..74d021d16bb45a6a7595e6d1f20e715a05e4136a 100644 (file)
@@ -39,6 +39,7 @@ set(cvhaartraining_lib_src
 
 add_library(opencv_haartraining_engine STATIC ${cvhaartraining_lib_src})
 
+if(NOT ANDROID)
 # -----------------------------------------------------------
 #  haartraining
 # -----------------------------------------------------------
@@ -75,3 +76,4 @@ set_target_properties(opencv_performance PROPERTIES
 install(TARGETS opencv_haartraining RUNTIME DESTINATION bin COMPONENT main)
 install(TARGETS opencv_createsamples RUNTIME DESTINATION bin COMPONENT main)
 install(TARGETS opencv_performance RUNTIME DESTINATION bin COMPONENT main)
+endif()
index 00171cf384e9d945b77b45245b5018ae34794b4b..f4d94584767c94ffdc62c234dfe1dd409643ab71 100644 (file)
@@ -3,7 +3,6 @@
 #   Some parts taken from version of Hartmut Seichter, HIT Lab NZ.
 #   Jose Luis Blanco, 2008
 # ----------------------------------------------------------------------------
-if(NOT ANDROID)
 project(opencv_highgui)
 
 if(WITH_JPEG)
@@ -204,6 +203,11 @@ if(APPLE)
        endif()
 endif(APPLE)
 
+if(HAVE_ANDROID_NATIVE_CAMERA)
+  set(highgui_srcs ${highgui_srcs} src/cap_android.cpp)
+  add_definitions(-DHAVE_ANDROID_NATIVE_CAMERA)
+endif()
+
 source_group("Src" FILES ${highgui_srcs} ${highgui_hdrs})
 source_group("Include" FILES ${highgui_ext_hdrs})
 
@@ -296,7 +300,6 @@ if( OPENNI_LIBRARY )
     target_link_libraries(${the_target} ${OPENNI_LIBRARY})
 endif()
 
-
 if(APPLE)
        target_link_libraries(${the_target} "-lbz2 -framework Cocoa -framework QuartzCore")
        if(WITH_CARBON)
@@ -317,25 +320,4 @@ install(TARGETS ${the_target}
 
 install(FILES ${highgui_ext_hdrs}
         DESTINATION include/opencv2/highgui
-        COMPONENT main)
-        
-endif()
-
-if(ANDROID)
-set(high_gui_android_srcs src/bitstrm.cpp
-    src/cap.cpp 
-    src/grfmt_base.cpp 
-    src/grfmt_bmp.cpp 
-    src/grfmt_jpeg.cpp
-    src/grfmt_jpeg2000.cpp
-    src/grfmt_png.cpp 
-    src/grfmt_tiff.cpp 
-    src/loadsave.cpp 
-    src/precomp.cpp 
-    src/utils.cpp 
-    src/grfmt_sunras.cpp 
-    src/grfmt_pxm.cpp 
-    src/window.cpp
-    src/cap_images.cpp )
-define_android_manual(opencv_highgui "${high_gui_android_srcs}" "$(LOCAL_PATH)/src  $(OPENCV_INCLUDES)")       
-endif()
+        COMPONENT main)
\ No newline at end of file
index ecf8d27a975cadb41e3da8b463e0c90936474abd..3c861f56082066ab8785558216d3a482944b0719 100644 (file)
@@ -290,7 +290,9 @@ enum
 
        CV_CAP_PVAPI    =800,   // PvAPI, Prosilica GigE SDK
 
-       CV_CAP_OPENNI   =900    // OpenNI (for Kinect)
+       CV_CAP_OPENNI   =900,   // OpenNI (for Kinect)
+
+       CV_CAP_ANDROID  =1000   // Android
 };
 
 /* start capturing frames from camera: index = camera_index + domain_offset (CV_CAP_*) */
@@ -387,6 +389,14 @@ enum
     CV_CAP_OPENNI_SXGA_15HZ    = 1
 };
 
+//supported by Android camera output formats
+enum
+{
+  CV_CAP_ANDROID_COLOR_FRAME = 1, //TODO: check RGB or BGR?
+  CV_CAP_ANDROID_GREY_FRAME  = 0,
+  CV_CAP_ANDROID_YUV_FRAME   = 2
+};
+
 /* retrieve or set capture properties */
 CVAPI(double) cvGetCaptureProperty( CvCapture* capture, int property_id );
 CVAPI(int)    cvSetCaptureProperty( CvCapture* capture, int property_id, double value );
index f7cedc1899f2f1a86245f8329fd71dfbf7bf40f0..807502ed0342667d7c5034345e5c5ec2dd2fcf36 100644 (file)
@@ -124,6 +124,7 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
         CV_CAP_QT,
         CV_CAP_UNICAP,
         CV_CAP_OPENNI,
+        CV_CAP_ANDROID,
         -1
     };
 
@@ -143,7 +144,7 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
         defined(HAVE_CAMV4L) || defined (HAVE_CAMV4L2) || defined(HAVE_GSTREAMER) || \
         defined(HAVE_DC1394_2) || defined(HAVE_DC1394) || defined(HAVE_CMU1394) || \
         defined(HAVE_GSTREAMER) || defined(HAVE_MIL) || defined(HAVE_QUICKTIME) || \
-        defined(HAVE_UNICAP) || defined(HAVE_PVAPI) || defined(HAVE_OPENNI)
+        defined(HAVE_UNICAP) || defined(HAVE_PVAPI) || defined(HAVE_OPENNI) || defined(HAVE_ANDROID_NATIVE_CAMERA)
         // local variable to memorize the captured device
         CvCapture *capture;
         #endif
@@ -250,7 +251,15 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
             return capture;
         break;
         #endif
-        
+
+               #ifdef HAVE_ANDROID_NATIVE_CAMERA
+        case CV_CAP_ANDROID:
+          capture = cvCreateCameraCapture_Android (index);
+        if (capture)
+            return capture;
+        break;
+        #endif
+
         }
     }
 
diff --git a/modules/highgui/src/cap_android.cpp b/modules/highgui/src/cap_android.cpp
new file mode 100644 (file)
index 0000000..8559652
--- /dev/null
@@ -0,0 +1,354 @@
+ /*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//                For Open Source Computer Vision Library
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#include "precomp.hpp"
+
+#ifdef HAVE_ANDROID_NATIVE_CAMERA
+
+#include <opencv2/imgproc/imgproc.hpp>
+#include <pthread.h>
+#include <android/log.h>
+#include "camera_activity.h"
+
+#define LOG_TAG "CV_CAP"
+#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
+#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
+#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
+
+class HighguiAndroidCameraActivity;
+
+class CvCapture_Android : public CvCapture
+{
+public:
+    CvCapture_Android();
+    virtual ~CvCapture_Android();
+
+    virtual double getProperty(int propIdx);
+    virtual bool setProperty(int probIdx, double propVal);
+    virtual bool grabFrame();
+    virtual IplImage* retrieveFrame(int outputType);
+    virtual int getCaptureDomain() { return CV_CAP_ANDROID; }
+
+    bool isOpened() const;
+
+protected:
+    struct OutputMap
+    {
+    public:
+        cv::Mat mat;
+        IplImage* getIplImagePtr();
+    private:
+        IplImage iplHeader;
+    };
+
+    CameraActivity* m_activity;
+
+private:
+    bool m_isOpened;
+
+    OutputMap *m_frameYUV;
+    OutputMap *m_frameYUVnext;
+    OutputMap m_frameGray;
+    OutputMap m_frameColor;
+    bool m_hasGray;
+    bool m_hasColor;
+
+    pthread_mutex_t m_nextFrameMutex;
+    pthread_cond_t m_nextFrameCond;
+    volatile bool m_waitingNextFrame;
+
+    int m_framesGrabbed;
+
+    friend class HighguiAndroidCameraActivity;
+
+    void onFrame(const void* buffer, int bufferSize);
+
+    void convertBufferToYUV(const void* buffer, int size, int width, int height);
+    static bool convertYUVToGrey(const cv::Mat& yuv, cv::Mat& resmat);
+    static bool convertYUVToColor(const cv::Mat& yuv, cv::Mat& resmat);
+};
+
+
+class HighguiAndroidCameraActivity : public CameraActivity
+{
+  public:
+    HighguiAndroidCameraActivity(CvCapture_Android* capture)
+    {
+      m_capture = capture;
+      m_framesReceived = 0;
+    }
+
+    virtual bool onFrameBuffer(void* buffer, int bufferSize)
+    {
+      LOGD("buffer addr:%p size:%d",buffer, bufferSize);
+      if(isConnected() && buffer != 0 && bufferSize > 0)
+      {
+       m_framesReceived++;
+       if (m_capture->m_waitingNextFrame)
+       {
+         m_capture->onFrame(buffer, bufferSize);
+         pthread_mutex_lock(&m_capture->m_nextFrameMutex);
+         m_capture->m_waitingNextFrame = false;//set flag that no more frames required at this moment
+         pthread_cond_broadcast(&m_capture->m_nextFrameCond);
+         pthread_mutex_unlock(&m_capture->m_nextFrameMutex);
+       }
+       return true;
+      }
+      return false;
+    }
+
+    void LogFramesRate()
+    {
+      LOGI("FRAMES received: %d  grabbed: %d", m_framesReceived, m_capture->m_framesGrabbed);
+    }
+
+  private:
+    CvCapture_Android* m_capture;
+    int m_framesReceived;
+};
+
+IplImage* CvCapture_Android::OutputMap::getIplImagePtr()
+{
+    if( mat.empty() )
+        return 0;
+
+    iplHeader = IplImage(mat);
+    return &iplHeader;
+}
+
+bool CvCapture_Android::isOpened() const
+{
+    return m_isOpened;
+}
+
+CvCapture_Android::CvCapture_Android()
+{
+  //defaults
+  m_activity = 0;
+  m_isOpened = false;
+  m_frameYUV = 0;
+  m_frameYUVnext = 0;
+  m_hasGray = false;
+  m_hasColor = false;
+  m_waitingNextFrame = false;
+  m_framesGrabbed = 0;
+
+  //try connect to camera
+  m_activity = new HighguiAndroidCameraActivity(this);
+
+  if (m_activity == 0) return;
+  pthread_mutex_init(&m_nextFrameMutex, NULL);
+  pthread_cond_init (&m_nextFrameCond, NULL);
+
+  CameraActivity::ErrorCode errcode = m_activity->connect();
+  if(errcode == CameraActivity::NO_ERROR)
+  {
+    m_isOpened = true;
+    m_frameYUV = new OutputMap();
+    m_frameYUVnext = new OutputMap();
+  }
+  else
+  {
+    LOGE("Native_camera returned opening error: %d", errcode);
+    delete m_activity;
+    m_activity = 0;
+  }
+}
+
+CvCapture_Android::~CvCapture_Android()
+{
+  if (m_activity)
+  {
+    ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
+
+    //m_activity->disconnect() will be automatically called inside destructor;
+    delete m_activity;
+    delete m_frameYUV;
+    delete m_frameYUVnext;
+    m_activity = 0;
+    m_frameYUV = 0;
+    m_frameYUVnext = 0;
+    
+    pthread_mutex_destroy(&m_nextFrameMutex);
+    pthread_cond_destroy(&m_nextFrameCond);
+  }
+}
+
+double CvCapture_Android::getProperty( int propIdx )
+{
+  switch ( propIdx )
+  {
+    case CV_CAP_PROP_FRAME_WIDTH:
+      return (double)CameraActivity::getFrameWidth();
+    case CV_CAP_PROP_FRAME_HEIGHT:
+      return (double)CameraActivity::getFrameHeight();
+    default:
+      CV_Error( CV_StsError, "Failed attempt to GET unsupported camera property." );
+      break;
+  }
+  return -1.0;
+}
+
+bool CvCapture_Android::setProperty( int propIdx, double propValue )
+{
+  bool res = false;
+  if( isOpened() )
+  {
+    switch ( propIdx )
+    {
+      default:
+       CV_Error( CV_StsError, "Failed attempt to SET unsupported camera property." );
+       break;
+    }
+  }
+
+  return res;
+}
+
+bool CvCapture_Android::grabFrame()
+{
+  if( !isOpened() )
+    return false;
+
+  pthread_mutex_lock(&m_nextFrameMutex);
+  m_waitingNextFrame = true;
+  pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex);
+  pthread_mutex_unlock(&m_nextFrameMutex);
+  m_framesGrabbed++;
+  return true;
+}
+
+void CvCapture_Android::onFrame(const void* buffer, int bufferSize)
+{
+   LOGD("Buffer available: %p + %d", buffer, bufferSize);
+
+   convertBufferToYUV(buffer, bufferSize, CameraActivity::getFrameWidth(), CameraActivity::getFrameHeight());
+
+   //swap current and new frames
+   OutputMap* tmp = m_frameYUV;
+   m_frameYUV = m_frameYUVnext;
+   m_frameYUVnext = tmp;
+
+   //discard cached frames
+   m_hasGray = false;
+   m_hasColor = false;
+}
+
+IplImage* CvCapture_Android::retrieveFrame( int outputType )
+{
+  IplImage* image = 0;
+  if (0 != m_frameYUV && !m_frameYUV->mat.empty())
+  {
+    switch(outputType)
+    {
+      case CV_CAP_ANDROID_YUV_FRAME:
+       image = m_frameYUV->getIplImagePtr();
+       break;
+      case CV_CAP_ANDROID_GREY_FRAME:
+       if (!m_hasGray)
+         if (!(m_hasGray = convertYUVToGrey(m_frameYUV->mat, m_frameGray.mat)))
+           image = 0;
+       image = m_frameGray.getIplImagePtr();
+       break;
+      case CV_CAP_ANDROID_COLOR_FRAME:
+       if (!m_hasColor)
+         if (!(m_hasColor = convertYUVToColor(m_frameYUV->mat, m_frameColor.mat)))
+           image = 0;
+       image = m_frameColor.getIplImagePtr();
+       break;
+      default:
+       LOGE("Unsupported frame output format: %d", outputType);
+       image = 0;
+       break;
+    }
+  }
+  return image;
+}
+
+
+void CvCapture_Android::convertBufferToYUV(const void* buffer, int size, int width, int height)
+{
+  cv::Size buffSize(width, height + (height / 2));
+  if (buffSize.area() != size)
+  {
+    LOGE("ERROR convertBufferToYuv_Mat: width=%d, height=%d, buffSize=%d x %d, buffSize.area()=%d, size=%d",
+        width, height, buffSize.width, buffSize.height, buffSize.area(), size);
+        
+    return;
+  }
+
+  m_frameYUVnext->mat.create(buffSize, CV_8UC1);
+  uchar* matBuff = m_frameYUVnext->mat.ptr<uchar> (0);
+  memcpy(matBuff, buffer, size);
+}
+
+bool CvCapture_Android::convertYUVToGrey(const cv::Mat& yuv, cv::Mat& resmat)
+{
+  if (yuv.empty())
+    return false;
+
+  resmat = yuv(cv::Range(0, yuv.rows * (2.0f / 3)), cv::Range::all());
+
+  return !resmat.empty();
+}
+
+bool CvCapture_Android::convertYUVToColor(const cv::Mat& yuv, cv::Mat& resmat)
+{
+  if (yuv.empty())
+    return false;
+
+  cv::cvtColor(yuv, resmat, CV_YUV2RGB);
+  return !resmat.empty();
+}
+
+
+CvCapture* cvCreateCameraCapture_Android( int /*index*/ )
+{
+    CvCapture_Android* capture = new CvCapture_Android();
+
+    if( capture->isOpened() )
+        return capture;
+
+    delete capture;
+    return 0;
+}
+
+#endif
index 931a168a6277d9a4eb63913e6630ff4bf6d1c410..50b41fd53497570c6aaee2bd4a08117cd13d5984 100644 (file)
@@ -122,6 +122,7 @@ CvVideoWriter* cvCreateVideoWriter_VFW( const char* filename, int fourcc,
                                         double fps, CvSize frameSize, int is_color );
 CvCapture* cvCreateCameraCapture_DShow( int index );
 CvCapture* cvCreateCameraCapture_OpenNI( int index );
+CvCapture* cvCreateCameraCapture_Android( int index );
 
 CVAPI(int) cvHaveImageReader(const char* filename);
 CVAPI(int) cvHaveImageWriter(const char* filename);