From 3a932b0f6c01872db603d4fe45260deb4cc8bc0e Mon Sep 17 00:00:00 2001 From: Ethan Rublee Date: Sat, 27 Nov 2010 07:59:22 +0000 Subject: [PATCH] Refactoring the image_pool for android, and adding some common utils for camera configuration. Also experimenting with optimization - grayscale preview is way faster than color right now. --- android/android-jni/AndroidManifest.xml | 4 + android/android-jni/Makefile | 8 +- android/android-jni/jni/Application.mk | 2 +- android/android-jni/jni/Calibration.cpp | 377 ++++++++-------- android/android-jni/jni/Calibration.h | 39 +- android/android-jni/jni/gl_code.cpp | 477 +++++++++++---------- android/android-jni/jni/glcamera.h | 47 +- android/android-jni/jni/image_pool.cpp | 117 ++--- android/android-jni/jni/image_pool.h | 84 ++-- android/android-jni/jni/image_pool.i | 6 +- android/android-jni/jni/yuv420sp2rgb.c | 136 +++--- .../android-jni/res/layout/calibrationviewer.xml | 11 + android/android-jni/res/layout/camerasettings.xml | 40 ++ android/android-jni/res/layout/chesssizer.xml | 40 ++ android/android-jni/res/values/attrs.xml | 11 + android/android-jni/res/values/chessnumbers.xml | 20 + android/android-jni/res/values/settingnumbers.xml | 20 + android/android-jni/res/values/strings.xml | 19 + android/android-jni/sample.local.env.mk | 1 + .../com/opencv/calibration/CalibrationViewer.java | 47 ++ .../com/opencv/calibration/ChessBoardChooser.java | 75 ++++ .../calibration/services/CalibrationService.java | 166 +++++++ .../src/com/opencv/camera/CameraConfig.java | 166 +++++++ .../src/com/opencv/camera/NativePreviewer.java | 414 ++++++++++-------- .../src/com/opencv/camera/NativeProcessor.java | 300 +++++++------ 25 files changed, 1655 insertions(+), 972 deletions(-) create mode 100644 android/android-jni/res/layout/calibrationviewer.xml create mode 100644 android/android-jni/res/layout/camerasettings.xml create mode 100644 android/android-jni/res/layout/chesssizer.xml create mode 100644 android/android-jni/res/values/attrs.xml create mode 100644 android/android-jni/res/values/chessnumbers.xml create mode 100644 android/android-jni/res/values/settingnumbers.xml create mode 100644 android/android-jni/res/values/strings.xml create mode 100644 android/android-jni/src/com/opencv/calibration/CalibrationViewer.java create mode 100644 android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java create mode 100644 android/android-jni/src/com/opencv/calibration/services/CalibrationService.java create mode 100644 android/android-jni/src/com/opencv/camera/CameraConfig.java diff --git a/android/android-jni/AndroidManifest.xml b/android/android-jni/AndroidManifest.xml index 64b428a..f0a9639 100644 --- a/android/android-jni/AndroidManifest.xml +++ b/android/android-jni/AndroidManifest.xml @@ -11,6 +11,10 @@ regular Android project. --> + + + + diff --git a/android/android-jni/Makefile b/android/android-jni/Makefile index db1c2a2..e208cca 100644 --- a/android/android-jni/Makefile +++ b/android/android-jni/Makefile @@ -12,6 +12,10 @@ $(info gedit $(LOCAL_ENV_MK)) $(error Please setup the $(LOCAL_ENV_MK) - the default was just created') endif +ifndef ARM_TARGETS +ARM_TARGETS=armeabi armeabi-v7a +endif + ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT) $(info OPENCV_CONFIG = $(OPENCV_CONFIG)) @@ -44,7 +48,7 @@ all: $(LIB) nogdb #calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR $(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS) $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ - PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS) + PROJECT_PATH=$(PROJECT_PATH) ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS) #this creates the swig wrappers @@ -70,5 +74,5 @@ clean-swig: #does clean-swig and then uses the ndk-build clean clean: clean-swig $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ - PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS) + PROJECT_PATH=$(PROJECT_PATH) clean ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS) diff --git a/android/android-jni/jni/Application.mk b/android/android-jni/jni/Application.mk index 5d44df5..f23b245 100644 --- a/android/android-jni/jni/Application.mk +++ b/android/android-jni/jni/Application.mk @@ -1,2 +1,2 @@ -APP_ABI := armeabi armeabi-v7a +APP_ABI := $(ARM_TARGETS) APP_MODULES := android-opencv diff --git a/android/android-jni/jni/Calibration.cpp b/android/android-jni/jni/Calibration.cpp index c3e95fe..2f62acc 100644 --- a/android/android-jni/jni/Calibration.cpp +++ b/android/android-jni/jni/Calibration.cpp @@ -7,255 +7,240 @@ #include "Calibration.h" - #include using namespace cv; -Calibration::Calibration():patternsize(6,8) +Calibration::Calibration() : + patternsize(6, 8) { } -Calibration::~Calibration() { +Calibration::~Calibration() +{ } - namespace { -double computeReprojectionErrors( - const vector >& objectPoints, const vector >& imagePoints, const vector& rvecs, - const vector& tvecs, const Mat& cameraMatrix, - const Mat& distCoeffs, vector& perViewErrors) { - vector imagePoints2; - int i, totalPoints = 0; - double totalErr = 0, err; - perViewErrors.resize(objectPoints.size()); - - for (i = 0; i < (int) objectPoints.size(); i++) { - projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, - distCoeffs, imagePoints2); - err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1 ); - int n = (int) objectPoints[i].size(); - perViewErrors[i] = err / n; - totalErr += err; - totalPoints += n; - } - - return totalErr / totalPoints; +double computeReprojectionErrors(const vector >& objectPoints, + const vector >& imagePoints, const vector& rvecs, const vector< + Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs, + vector& perViewErrors) +{ + vector imagePoints2; + int i, totalPoints = 0; + double totalErr = 0, err; + perViewErrors.resize(objectPoints.size()); + + for (i = 0; i < (int)objectPoints.size(); i++) + { + projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2); + err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1); + int n = (int)objectPoints[i].size(); + perViewErrors[i] = err / n; + totalErr += err; + totalPoints += n; + } + + return totalErr / totalPoints; } +void calcChessboardCorners(Size boardSize, float squareSize, vector& corners) +{ + corners.resize(0); -void calcChessboardCorners(Size boardSize, float squareSize, vector< - Point3f>& corners) { - corners.resize(0); - - for (int i = 0; i < boardSize.height; i++) - for (int j = 0; j < boardSize.width; j++) - corners.push_back(Point3f(float(j * squareSize), float(i - * squareSize), 0)); + for (int i = 0; i < boardSize.height; i++) + for (int j = 0; j < boardSize.width; j++) + corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0)); } /**from opencv/samples/cpp/calibration.cpp * */ -bool runCalibration(vector > imagePoints, - Size imageSize, Size boardSize, float squareSize, float aspectRatio, - int flags, Mat& cameraMatrix, Mat& distCoeffs, vector& rvecs, - vector& tvecs, vector& reprojErrs, double& totalAvgErr) { - cameraMatrix = Mat::eye(3, 3, CV_64F); - if (flags & CV_CALIB_FIX_ASPECT_RATIO) - cameraMatrix.at (0, 0) = aspectRatio; +bool runCalibration(vector > imagePoints, Size imageSize, Size boardSize, float squareSize, + float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector& rvecs, + vector& tvecs, vector& reprojErrs, double& totalAvgErr) +{ + cameraMatrix = Mat::eye(3, 3, CV_64F); + if (flags & CV_CALIB_FIX_ASPECT_RATIO) + cameraMatrix.at (0, 0) = aspectRatio; - distCoeffs = Mat::zeros(4, 1, CV_64F); + distCoeffs = Mat::zeros(4, 1, CV_64F); - vector > objectPoints(1); - calcChessboardCorners(boardSize, squareSize, objectPoints[0]); - for (size_t i = 1; i < imagePoints.size(); i++) - objectPoints.push_back(objectPoints[0]); + vector > objectPoints(1); + calcChessboardCorners(boardSize, squareSize, objectPoints[0]); + for (size_t i = 1; i < imagePoints.size(); i++) + objectPoints.push_back(objectPoints[0]); - calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, - distCoeffs, rvecs, tvecs, flags); + calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags); - bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET ) && checkRange( - distCoeffs, CV_CHECK_QUIET ); + bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET); - totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, - tvecs, cameraMatrix, distCoeffs, reprojErrs); + totalAvgErr + = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs); - return ok; + return ok; } -void saveCameraParams(const string& filename, Size imageSize, Size boardSize, - float squareSize, float aspectRatio, int flags, - const Mat& cameraMatrix, const Mat& distCoeffs, - const vector& rvecs, const vector& tvecs, - const vector& reprojErrs, - const vector >& imagePoints, double totalAvgErr) { - FileStorage fs(filename, FileStorage::WRITE); - - time_t t; - time(&t); - struct tm *t2 = localtime(&t); - char buf[1024]; - strftime(buf, sizeof(buf) - 1, "%c", t2); - - fs << "calibration_time" << buf; - - if (!rvecs.empty() || !reprojErrs.empty()) - fs << "nframes" << (int) std::max(rvecs.size(), reprojErrs.size()); - fs << "image_width" << imageSize.width; - fs << "image_height" << imageSize.height; - fs << "board_width" << boardSize.width; - fs << "board_height" << boardSize.height; - fs << "squareSize" << squareSize; - - if (flags & CV_CALIB_FIX_ASPECT_RATIO) - fs << "aspectRatio" << aspectRatio; - - if (flags != 0) { - sprintf(buf, "flags: %s%s%s%s", - flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" - : "", - flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", - flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point" - : "", - flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : ""); - cvWriteComment(*fs, buf, 0); - } - - fs << "flags" << flags; - - fs << "camera_matrix" << cameraMatrix; - fs << "distortion_coefficients" << distCoeffs; - - fs << "avg_reprojection_error" << totalAvgErr; - if (!reprojErrs.empty()) - fs << "per_view_reprojection_errors" << Mat(reprojErrs); - - if (!rvecs.empty() && !tvecs.empty()) { - Mat bigmat(rvecs.size(), 6, CV_32F); - for (size_t i = 0; i < rvecs.size(); i++) { - Mat r = bigmat(Range(i, i + 1), Range(0, 3)); - Mat t = bigmat(Range(i, i + 1), Range(3, 6)); - rvecs[i].copyTo(r); - tvecs[i].copyTo(t); - } - cvWriteComment( - *fs, - "a set of 6-tuples (rotation vector + translation vector) for each view", - 0); - fs << "extrinsic_parameters" << bigmat; - } - - if (!imagePoints.empty()) { - Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2); - for (size_t i = 0; i < imagePoints.size(); i++) { - Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols); - Mat(imagePoints[i]).copyTo(r); - } - fs << "image_points" << imagePtMat; - } +void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio, + int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector& rvecs, + const vector& tvecs, const vector& reprojErrs, + const vector >& imagePoints, double totalAvgErr) +{ + FileStorage fs(filename, FileStorage::WRITE); + + time_t t; + time(&t); + struct tm *t2 = localtime(&t); + char buf[1024]; + strftime(buf, sizeof(buf) - 1, "%c", t2); + + fs << "calibration_time" << buf; + + if (!rvecs.empty() || !reprojErrs.empty()) + fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size()); + fs << "image_width" << imageSize.width; + fs << "image_height" << imageSize.height; + fs << "board_width" << boardSize.width; + fs << "board_height" << boardSize.height; + fs << "squareSize" << squareSize; + + if (flags & CV_CALIB_FIX_ASPECT_RATIO) + fs << "aspectRatio" << aspectRatio; + + if (flags != 0) + { + sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags + & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT + ? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : ""); + cvWriteComment(*fs, buf, 0); + } + + fs << "flags" << flags; + + fs << "camera_matrix" << cameraMatrix; + fs << "distortion_coefficients" << distCoeffs; + + fs << "avg_reprojection_error" << totalAvgErr; + if (!reprojErrs.empty()) + fs << "per_view_reprojection_errors" << Mat(reprojErrs); + + if (!rvecs.empty() && !tvecs.empty()) + { + Mat bigmat(rvecs.size(), 6, CV_32F); + for (size_t i = 0; i < rvecs.size(); i++) + { + Mat r = bigmat(Range(i, i + 1), Range(0, 3)); + Mat t = bigmat(Range(i, i + 1), Range(3, 6)); + rvecs[i].copyTo(r); + tvecs[i].copyTo(t); + } + cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0); + fs << "extrinsic_parameters" << bigmat; + } + + if (!imagePoints.empty()) + { + Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2); + for (size_t i = 0; i < imagePoints.size(); i++) + { + Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols); + Mat(imagePoints[i]).copyTo(r); + } + fs << "image_points" << imagePtMat; + } } }//anon namespace -bool Calibration::detectAndDrawChessboard(int idx,image_pool* pool) { - - Mat grey; - pool->getGrey(idx, grey); - if (grey.empty()) - return false; - vector corners; +bool Calibration::detectAndDrawChessboard(int idx, image_pool* pool) +{ + Mat grey = pool->getGrey(idx); + if (grey.empty()) + return false; + vector corners; - IplImage iplgrey = grey; - if (!cvCheckChessboard(&iplgrey, patternsize)) - return false; - bool patternfound = findChessboardCorners(grey, patternsize, corners); + IplImage iplgrey = grey; + if (!cvCheckChessboard(&iplgrey, patternsize)) + return false; + bool patternfound = findChessboardCorners(grey, patternsize, corners); - Mat * img = pool->getImage(idx); + Mat img = pool->getImage(idx); - if (corners.size() < 1) - return false; + if (corners.size() < 1) + return false; - cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria( - CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1)); + cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1)); - if(patternfound) - imagepoints.push_back(corners); + if (patternfound) + imagepoints.push_back(corners); - drawChessboardCorners(*img, patternsize, Mat(corners), patternfound); + drawChessboardCorners(img, patternsize, Mat(corners), patternfound); - imgsize = grey.size(); + imgsize = grey.size(); - return patternfound; + return patternfound; } -void Calibration::drawText(int i, image_pool* pool, const char* ctext){ - // Use "y" to show that the baseLine is about - string text = ctext; - int fontFace = FONT_HERSHEY_COMPLEX_SMALL; - double fontScale = .8; - int thickness = .5; - - Mat img = *pool->getImage(i); - - int baseline=0; - Size textSize = getTextSize(text, fontFace, - fontScale, thickness, &baseline); - baseline += thickness; - - // center the text - Point textOrg((img.cols - textSize.width)/2, - (img.rows - textSize.height *2)); - - // draw the box - rectangle(img, textOrg + Point(0, baseline), - textOrg + Point(textSize.width, -textSize.height), - Scalar(0,0,255),CV_FILLED); - // ... and the baseline first - line(img, textOrg + Point(0, thickness), - textOrg + Point(textSize.width, thickness), - Scalar(0, 0, 255)); - - // then put the text itself - putText(img, text, textOrg, fontFace, fontScale, - Scalar::all(255), thickness, 8); -} +void Calibration::drawText(int i, image_pool* pool, const char* ctext) +{ + // Use "y" to show that the baseLine is about + string text = ctext; + int fontFace = FONT_HERSHEY_COMPLEX_SMALL; + double fontScale = .8; + int thickness = .5; + + Mat img = pool->getImage(i); + + int baseline = 0; + Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline); + baseline += thickness; + + // center the text + Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2)); -void Calibration::resetChess() { + // draw the box + rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255), + CV_FILLED); + // ... and the baseline first + line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255)); - imagepoints.clear(); + // then put the text itself + putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8); } -void Calibration::calibrate(const char* filename) { +void Calibration::resetChess() +{ - vector rvecs, tvecs; - vector reprojErrs; - double totalAvgErr = 0; - int flags = 0; - flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO; - bool writeExtrinsics = true; - bool writePoints = true; + imagepoints.clear(); +} - bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, - flags, K, distortion, rvecs, tvecs, reprojErrs, totalAvgErr); +void Calibration::calibrate(const char* filename) +{ + vector rvecs, tvecs; + vector reprojErrs; + double totalAvgErr = 0; + int flags = 0; + flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO; + bool writeExtrinsics = true; + bool writePoints = true; + bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs, + totalAvgErr); - if (ok){ + if (ok) + { - saveCameraParams(filename, imgsize, patternsize, 1.f, - 1.f, flags, K, distortion, writeExtrinsics ? rvecs - : vector (), writeExtrinsics ? tvecs - : vector (), writeExtrinsics ? reprojErrs - : vector (), writePoints ? imagepoints : vector< - vector > (), totalAvgErr); - } + saveCameraParams(filename, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector< + Mat> (), writeExtrinsics ? tvecs : vector (), writeExtrinsics ? reprojErrs : vector (), writePoints + ? imagepoints : vector > (), totalAvgErr); + } } -int Calibration::getNumberDetectedChessboards() { - return imagepoints.size(); +int Calibration::getNumberDetectedChessboards() +{ + return imagepoints.size(); } diff --git a/android/android-jni/jni/Calibration.h b/android/android-jni/jni/Calibration.h index f3a9453..6e0eef3 100644 --- a/android/android-jni/jni/Calibration.h +++ b/android/android-jni/jni/Calibration.h @@ -14,8 +14,6 @@ #include #include - - #include #include "image_pool.h" @@ -24,36 +22,33 @@ #define DETECT_STAR 1 #define DETECT_SURF 2 +class Calibration +{ +public: -class Calibration { - std::vector keypoints; - - vector > imagepoints; - - cv::Mat K; - cv::Mat distortion; - cv::Size imgsize; + Calibration(); + virtual ~Calibration(); + bool detectAndDrawChessboard(int idx, image_pool* pool); + void resetChess(); -public: + int getNumberDetectedChessboards(); - cv::Size patternsize; - - Calibration(); - virtual ~Calibration(); + void calibrate(const char* filename); - bool detectAndDrawChessboard(int idx, image_pool* pool); + void drawText(int idx, image_pool* pool, const char* text); - void resetChess(); + cv::Size patternsize; +private: + std::vector keypoints; - int getNumberDetectedChessboards(); + std::vector > imagepoints; - void calibrate(const char* filename); + cv::Mat K; + cv::Mat distortion; + cv::Size imgsize; - void drawText(int idx, image_pool* pool, const char* text); }; - - #endif /* PROCESSOR_H_ */ diff --git a/android/android-jni/jni/gl_code.cpp b/android/android-jni/jni/gl_code.cpp index c13374f..4512b9d 100644 --- a/android/android-jni/jni/gl_code.cpp +++ b/android/android-jni/jni/gl_code.cpp @@ -37,273 +37,286 @@ using namespace cv; #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) -static void printGLString(const char *name, GLenum s) { - const char *v = (const char *) glGetString(s); - LOGI("GL %s = %s\n", name, v); +static void printGLString(const char *name, GLenum s) +{ + const char *v = (const char *)glGetString(s); + LOGI("GL %s = %s\n", name, v); } -static void checkGlError(const char* op) { - for (GLint error = glGetError(); error; error = glGetError()) { - LOGI("after %s() glError (0x%x)\n", op, error); - } +static void checkGlError(const char* op) +{ + for (GLint error = glGetError(); error; error = glGetError()) + { + LOGI("after %s() glError (0x%x)\n", op, error); + } } static const char gVertexShader[] = "attribute vec4 a_position; \n" - "attribute vec2 a_texCoord; \n" - "varying vec2 v_texCoord; \n" - "void main() \n" - "{ \n" - " gl_Position = a_position; \n" - " v_texCoord = a_texCoord; \n" - "} \n"; - -static const char gFragmentShader[] = - "precision mediump float; \n" - "varying vec2 v_texCoord; \n" - "uniform sampler2D s_texture; \n" - "void main() \n" - "{ \n" - " gl_FragColor = texture2D( s_texture, v_texCoord );\n" - "} \n"; - -const GLfloat gTriangleVertices[] = { 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f }; -GLubyte testpixels[4 * 3] = { 255, 0, 0, // Red - 0, 255, 0, // Green - 0, 0, 255, // Blue - 255, 255, 0 // Yellow - }; - -GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, - int width, int height, int channels) { - - // Bind the texture - glActiveTexture(GL_TEXTURE0); - checkGlError("glActiveTexture"); - // Bind the texture object - glBindTexture(GL_TEXTURE_2D, _textureid); - checkGlError("glBindTexture"); - - GLenum format; - switch (channels) { - case 3: - format = GL_RGB; - break; - case 1: - format = GL_LUMINANCE; - break; - case 4: - format = GL_RGBA; - break; - } - // Load the texture - glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, - GL_UNSIGNED_BYTE, pixels); - - checkGlError("glTexImage2D"); - // Set the filtering mode - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST ); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST ); - - return _textureid; + "attribute vec2 a_texCoord; \n" + "varying vec2 v_texCoord; \n" + "void main() \n" + "{ \n" + " gl_Position = a_position; \n" + " v_texCoord = a_texCoord; \n" + "} \n"; + +static const char gFragmentShader[] = "precision mediump float; \n" + "varying vec2 v_texCoord; \n" + "uniform sampler2D s_texture; \n" + "void main() \n" + "{ \n" + " gl_FragColor = texture2D( s_texture, v_texCoord );\n" + "} \n"; + +const GLfloat gTriangleVertices[] = {0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f}; +GLubyte testpixels[4 * 3] = {255, 0, 0, // Red + 0, 255, 0, // Green + 0, 0, 255, // Blue + 255, 255, 0 // Yellow + }; + +GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels) +{ + + // Bind the texture + glActiveTexture( GL_TEXTURE0); + checkGlError("glActiveTexture"); + // Bind the texture object + glBindTexture(GL_TEXTURE_2D, _textureid); + checkGlError("glBindTexture"); + + GLenum format; + switch (channels) + { + case 3: + format = GL_RGB; + break; + case 1: + format = GL_LUMINANCE; + break; + case 4: + format = GL_RGBA; + break; + } + // Load the texture + glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels); + + checkGlError("glTexImage2D"); + // Set the filtering mode + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + + return _textureid; } -GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) { - GLuint shader = glCreateShader(shaderType); - if (shader) { - glShaderSource(shader, 1, &pSource, NULL); - glCompileShader(shader); - GLint compiled = 0; - glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); - if (!compiled) { - GLint infoLen = 0; - glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); - if (infoLen) { - char* buf = (char*) malloc(infoLen); - if (buf) { - glGetShaderInfoLog(shader, infoLen, NULL, buf); - LOGE("Could not compile shader %d:\n%s\n", - shaderType, buf); - free(buf); - } - glDeleteShader(shader); - shader = 0; - } - } - } - return shader; +GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) +{ + GLuint shader = glCreateShader(shaderType); + if (shader) + { + glShaderSource(shader, 1, &pSource, NULL); + glCompileShader(shader); + GLint compiled = 0; + glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); + if (!compiled) + { + GLint infoLen = 0; + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); + if (infoLen) + { + char* buf = (char*)malloc(infoLen); + if (buf) + { + glGetShaderInfoLog(shader, infoLen, NULL, buf); + LOGE("Could not compile shader %d:\n%s\n", + shaderType, buf); + free(buf); + } + glDeleteShader(shader); + shader = 0; + } + } + } + return shader; } -GLuint glcamera::createProgram(const char* pVertexSource, - const char* pFragmentSource) { - GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); - if (!vertexShader) { - return 0; - } - - GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); - if (!pixelShader) { - return 0; - } - - GLuint program = glCreateProgram(); - if (program) { - glAttachShader(program, vertexShader); - checkGlError("glAttachShader"); - glAttachShader(program, pixelShader); - checkGlError("glAttachShader"); - glLinkProgram(program); - GLint linkStatus = GL_FALSE; - glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); - if (linkStatus != GL_TRUE) { - GLint bufLength = 0; - glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); - if (bufLength) { - char* buf = (char*) malloc(bufLength); - if (buf) { - glGetProgramInfoLog(program, bufLength, NULL, buf); - LOGE("Could not link program:\n%s\n", buf); - free(buf); - } - } - glDeleteProgram(program); - program = 0; - } - } - return program; +GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource) +{ + GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); + if (!vertexShader) + { + return 0; + } + + GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); + if (!pixelShader) + { + return 0; + } + + GLuint program = glCreateProgram(); + if (program) + { + glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + glLinkProgram(program); + GLint linkStatus = GL_FALSE; + glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); + if (linkStatus != GL_TRUE) + { + GLint bufLength = 0; + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); + if (bufLength) + { + char* buf = (char*)malloc(bufLength); + if (buf) + { + glGetProgramInfoLog(program, bufLength, NULL, buf); + LOGE("Could not link program:\n%s\n", buf); + free(buf); + } + } + glDeleteProgram(program); + program = 0; + } + } + return program; } //GLuint textureID; -bool glcamera::setupGraphics(int w, int h) { - printGLString("Version", GL_VERSION); - printGLString("Vendor", GL_VENDOR); - printGLString("Renderer", GL_RENDERER); - printGLString("Extensions", GL_EXTENSIONS); - - LOGI("setupGraphics(%d, %d)", w, h); - gProgram = createProgram(gVertexShader, gFragmentShader); - if (!gProgram) { - LOGE("Could not create program."); - return false; - } - gvPositionHandle = glGetAttribLocation(gProgram, "a_position"); - gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord"); - - gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture"); - - // Use tightly packed data - glPixelStorei(GL_UNPACK_ALIGNMENT, 1); - - // Generate a texture object - glGenTextures(1, &textureID); - textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3); - - checkGlError("glGetAttribLocation"); - LOGI("glGetAttribLocation(\"vPosition\") = %d\n", - gvPositionHandle); - - glViewport(0, 0, w, h); - checkGlError("glViewport"); - return true; +bool glcamera::setupGraphics(int w, int h) +{ + printGLString("Version", GL_VERSION); + printGLString("Vendor", GL_VENDOR); + printGLString("Renderer", GL_RENDERER); + printGLString("Extensions", GL_EXTENSIONS); + + LOGI("setupGraphics(%d, %d)", w, h); + gProgram = createProgram(gVertexShader, gFragmentShader); + if (!gProgram) + { + LOGE("Could not create program."); + return false; + } + gvPositionHandle = glGetAttribLocation(gProgram, "a_position"); + gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord"); + + gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture"); + + // Use tightly packed data + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + + // Generate a texture object + glGenTextures(1, &textureID); + textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3); + + checkGlError("glGetAttribLocation"); + LOGI("glGetAttribLocation(\"vPosition\") = %d\n", + gvPositionHandle); + + glViewport(0, 0, w, h); + checkGlError("glViewport"); + return true; } -void glcamera::renderFrame() { - - GLfloat vVertices[] = { -1.0f, 1.0f, 0.0f, // Position 0 - 0.0f, 0.0f, // TexCoord 0 - -1.0f, -1.0f, 0.0f, // Position 1 - 0.0f, 1.0f, // TexCoord 1 - 1.0f, -1.0f, 0.0f, // Position 2 - 1.0f, 1.0f, // TexCoord 2 - 1.0f, 1.0f, 0.0f, // Position 3 - 1.0f, 0.0f // TexCoord 3 - }; - GLushort indices[] = { 0, 1, 2, 0, 2, 3 }; - GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture - - glClearColor(0.0f, 0.0f, 0.0f, 0.0f); - checkGlError("glClearColor"); - - glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); - checkGlError("glClear"); - - glUseProgram(gProgram); - checkGlError("glUseProgram"); - - // Load the vertex position - glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, - vVertices); - // Load the texture coordinate - glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, - &vVertices[3]); - - glEnableVertexAttribArray(gvPositionHandle); - glEnableVertexAttribArray(gvTexCoordHandle); - - // Bind the texture - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, textureID); - - // Set the sampler texture unit to 0 - glUniform1i(gvSamplerHandle, 0); - - glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices); - - //checkGlError("glVertexAttribPointer"); - //glEnableVertexAttribArray(gvPositionHandle); - //checkGlError("glEnableVertexAttribArray"); - //glDrawArrays(GL_TRIANGLES, 0, 3); - //checkGlError("glDrawArrays"); -} +void glcamera::renderFrame() +{ -void glcamera::init(int width, int height) { - newimage = false; - nimg = Mat(); - setupGraphics(width, height); + GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0 + 0.0f, 0.0f, // TexCoord 0 + -1.0f, -1.0f, 0.0f, // Position 1 + 0.0f, 1.0f, // TexCoord 1 + 1.0f, -1.0f, 0.0f, // Position 2 + 1.0f, 1.0f, // TexCoord 2 + 1.0f, 1.0f, 0.0f, // Position 3 + 1.0f, 0.0f // TexCoord 3 + }; + GLushort indices[] = {0, 1, 2, 0, 2, 3}; + GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture -} + glClearColor(0.0f, 0.0f, 0.0f, 0.0f); + checkGlError("glClearColor"); + + glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + checkGlError("glClear"); + + glUseProgram(gProgram); + checkGlError("glUseProgram"); + + // Load the vertex position + glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices); + // Load the texture coordinate + glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]); -void glcamera::step() { - if (newimage && !nimg.empty()) { + glEnableVertexAttribArray(gvPositionHandle); + glEnableVertexAttribArray(gvTexCoordHandle); - textureID = createSimpleTexture2D(textureID, - nimg.ptr (0), nimg.rows, nimg.cols, - nimg.channels()); - newimage = false; - } - renderFrame(); + // Bind the texture + glActiveTexture( GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, textureID); + // Set the sampler texture unit to 0 + glUniform1i(gvSamplerHandle, 0); + + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices); + + //checkGlError("glVertexAttribPointer"); + //glEnableVertexAttribArray(gvPositionHandle); + //checkGlError("glEnableVertexAttribArray"); + //glDrawArrays(GL_TRIANGLES, 0, 3); + //checkGlError("glDrawArrays"); } -void glcamera::setTextureImage(Ptr img) { +void glcamera::init(int width, int height) +{ + newimage = false; + nimg = Mat(); + setupGraphics(width, height); - //int p2 = (int)(std::log(img->size().width)/0.69315); - int sz = 256;//std::pow(2,p2); - Size size(sz, sz); +} - resize(*img, nimg, size,cv::INTER_NEAREST); +void glcamera::step() +{ + if (newimage && !nimg.empty()) + { - newimage = true; + textureID = createSimpleTexture2D(textureID, nimg.ptr (0), nimg.rows, nimg.cols, nimg.channels()); + newimage = false; + } + renderFrame(); } +#define NEAREST_POW2(x)((int)(0.5 + std::log(x)/0.69315) ) +void glcamera::setTextureImage(const Mat& img) +{ + Size size(256, 256); + resize(img, nimg, size, cv::INTER_NEAREST); + newimage = true; +} -void glcamera::drawMatToGL(int idx, image_pool* pool) { +void glcamera::drawMatToGL(int idx, image_pool* pool) +{ - Ptr img = pool->getImage(idx); + Mat img = pool->getImage(idx); - if (img.empty()) - return; //no image at input_idx! + if (img.empty()) + return; //no image at input_idx! - setTextureImage(img); + setTextureImage(img); } -glcamera::glcamera():newimage(false) { - LOGI("glcamera constructor"); +glcamera::glcamera() : + newimage(false) +{ + LOGI("glcamera constructor"); } -glcamera::~glcamera() { - LOGI("glcamera destructor"); +glcamera::~glcamera() +{ + LOGI("glcamera destructor"); } - diff --git a/android/android-jni/jni/glcamera.h b/android/android-jni/jni/glcamera.h index e960410..923fc53 100644 --- a/android/android-jni/jni/glcamera.h +++ b/android/android-jni/jni/glcamera.h @@ -6,35 +6,34 @@ #include #include "image_pool.h" -class glcamera { - Mat nimg; - bool newimage; - GLuint textureID; - - GLuint gProgram; - GLuint gvPositionHandle; - - GLuint gvTexCoordHandle; - GLuint gvSamplerHandle; +class glcamera +{ public: - glcamera(); - ~glcamera(); - void init(int width, int height); - void step(); + glcamera(); + ~glcamera(); + void init(int width, int height); + void step(); - void drawMatToGL(int idx, image_pool* pool); - void setTextureImage(Ptr img); + void drawMatToGL(int idx, image_pool* pool); + void setTextureImage(const cv::Mat& img); private: - GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, - int height, int channels); - GLuint loadShader(GLenum shaderType, const char* pSource); - GLuint - createProgram(const char* pVertexSource, - const char* pFragmentSource); - bool setupGraphics(int w, int h); - void renderFrame(); + GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels); + GLuint loadShader(GLenum shaderType, const char* pSource); + GLuint + createProgram(const char* pVertexSource, const char* pFragmentSource); + bool setupGraphics(int w, int h); + void renderFrame(); + cv::Mat nimg; + bool newimage; + GLuint textureID; + + GLuint gProgram; + GLuint gvPositionHandle; + + GLuint gvTexCoordHandle; + GLuint gvSamplerHandle; }; #endif diff --git a/android/android-jni/jni/image_pool.cpp b/android/android-jni/jni/image_pool.cpp index 42e76b4..6b0ccf4 100644 --- a/android/android-jni/jni/image_pool.cpp +++ b/android/android-jni/jni/image_pool.cpp @@ -5,92 +5,97 @@ #include #include +using namespace cv; + #define LOG_TAG "libandroid-opencv" #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) { -JNIEnv *env; -LOGI("JNI_OnLoad called for opencv"); -return JNI_VERSION_1_4; + JNIEnv *env; + LOGI("JNI_OnLoad called for opencv"); + return JNI_VERSION_1_4; } JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, - jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer, - jint jidx, jint jwidth, jint jheight, jboolean jgrey) { - image_pool *pool = (image_pool *) ppool; - - Ptr mat = pool->getYUV(jidx); + jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer, + jint jidx, jint jwidth, jint jheight, jboolean jgrey) +{ + int buff_height = jheight + (jheight/2); + Size buff_size(jwidth,buff_height); + image_pool *pool = (image_pool *) ppool; - if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) { - //pool->deleteGrey(jidx); - mat = new Mat(jheight * 2, jwidth, CV_8UC1); - } + Mat mat = pool->getYUV(jidx); - jsize sz = env->GetArrayLength(jbuffer); - uchar* buff = mat->ptr (0); + if (mat.empty() || mat.size() != buff_size ) + { + mat.create(buff_size, CV_8UC1); + } - env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff); + jsize sz = env->GetArrayLength(jbuffer); + uchar* buff = mat.ptr (0); - pool->addYUVMat(jidx, mat); - Ptr color = pool->getImage(jidx); - if (color.empty() || color->cols != jwidth || color->rows != jheight) { - //pool->deleteImage(jidx); - color = new Mat(jheight, jwidth, CV_8UC3); - } - if (!jgrey) { + env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff); - //doesn't work unfortunately.. - //cvtColor(*mat,*color, CV_YCrCb2RGB); - color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight, - color->ptr (0), false); + pool->addYUVMat(jidx, mat); - } + Mat color = pool->getImage(jidx); - if (jgrey) { - Mat grey; - pool->getGrey(jidx, grey); + if (!jgrey) + { - cvtColor(grey, *color, CV_GRAY2RGB); + if (color.cols != jwidth || color.rows != jheight || color.channels() != 3) + { + color.create(jheight, jwidth, CV_8UC3); + } + //doesn't work unfortunately.. + //TODO cvtColor(mat,color, CV_YCrCb2RGB); + color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight, + color.ptr (0), false); + } - } + if (jgrey) + { + Mat grey = pool->getGrey(jidx); + color = grey; + } - pool->addImage(jidx, color); + pool->addImage(jidx, color); } -image_pool::image_pool() { +image_pool::image_pool() +{ } -image_pool::~image_pool() { - __android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called"); +image_pool::~image_pool() +{ + __android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called"); } -cv::Ptr image_pool::getImage(int i) { - return imagesmap[i]; +Mat image_pool::getImage(int i) +{ + return imagesmap[i]; } -void image_pool::getGrey(int i, Mat & grey) { - - cv::Ptr tm = yuvImagesMap[i]; - if (tm.empty()) - return; - grey = (*tm)(Range(0, tm->rows / 2), Range::all()); +Mat image_pool::getGrey(int i) +{ + Mat tm = yuvImagesMap[i]; + if (tm.empty()) + return tm; + return tm(Range(0, tm.rows * (2.0f/3)), Range::all()); } -cv::Ptr image_pool::getYUV(int i) { - - return yuvImagesMap[i]; - +Mat image_pool::getYUV(int i) +{ + return yuvImagesMap[i]; } -void image_pool::addYUVMat(int i, cv::Ptr mat) { - - yuvImagesMap[i] = mat; - +void image_pool::addYUVMat(int i, Mat mat) +{ + yuvImagesMap[i] = mat; } -void image_pool::addImage(int i, cv::Ptr mat) { - - imagesmap[i] = mat; - +void image_pool::addImage(int i, Mat mat) +{ + imagesmap[i] = mat; } diff --git a/android/android-jni/jni/image_pool.h b/android/android-jni/jni/image_pool.h index bb0ea94..6882cb6 100644 --- a/android/android-jni/jni/image_pool.h +++ b/android/android-jni/jni/image_pool.h @@ -1,12 +1,14 @@ -#ifndef IMAGE_POOL_H -#define IMAGE_POOL_H +#ifndef IMAGE_POOL_H_ANDROID_KDJFKJ +#define IMAGE_POOL_H_ANDROID_KDJFKJ #include -#include #include -using namespace cv; + +#if ANDROID +#include #ifdef __cplusplus -extern "C" { +extern "C" +{ #endif JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); @@ -15,48 +17,48 @@ JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); // JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_); -JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool - (JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean); +JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, + jint, jint, jboolean); #ifdef __cplusplus } #endif +#endif +class image_pool +{ -//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat); - - -class image_pool { - std::map > imagesmap; - std::map > yuvImagesMap; - //uchar * mbuffer; - //int length; public: - image_pool(); - ~image_pool(); - cv::Ptr getImage(int i); - - void getGrey(int i, Mat & grey); - cv::Ptr getYUV(int i); - - int getCount(){ - return imagesmap.size(); - } - - void addImage(int i, Ptr< Mat> mat); - /** this function stores the given matrix in the the yuvImagesMap. Also, - * after this call getGrey will work, as the grey image is just the top - * half of the YUV mat. - * - * \param i index to store yuv image at - * \param mat the yuv matrix to store - */ - void addYUVMat(int i, Ptr< Mat> mat); - - - int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx); - - void getBitmap(int * outintarray, int size, int idx); - + image_pool(); + ~image_pool(); + cv::Mat getImage(int i); + cv::Mat getGrey(int i); + cv::Mat getYUV(int i); + + int getCount() + { + return imagesmap.size(); + } + + /** Adds a mat at the given index - will not do a deep copy, just images[i] = mat + * + */ + void addImage(int i, cv::Mat mat); + + /** this function stores the given matrix in the the yuvImagesMap. Also, + * after this call getGrey will work, as the grey image is just the top + * half of the YUV mat. + * + * \param i index to store yuv image at + * \param mat the yuv matrix to store + */ + void addYUVMat(int i, cv::Mat mat); + + // int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx); + // + // void getBitmap(int * outintarray, int size, int idx); +private: + std::map imagesmap; + std::map yuvImagesMap; }; #endif diff --git a/android/android-jni/jni/image_pool.i b/android/android-jni/jni/image_pool.i index aed623b..c1b3c6d 100644 --- a/android/android-jni/jni/image_pool.i +++ b/android/android-jni/jni/image_pool.i @@ -46,10 +46,8 @@ public: ~image_pool(); - Ptr getImage(int i); - - - void addImage(int i, Ptr< Mat> mat); + Mat getImage(int i); + void addImage(int i, Mat mat); diff --git a/android/android-jni/jni/yuv420sp2rgb.c b/android/android-jni/jni/yuv420sp2rgb.c index e1d6006..eec0e53 100644 --- a/android/android-jni/jni/yuv420sp2rgb.c +++ b/android/android-jni/jni/yuv420sp2rgb.c @@ -1,98 +1,80 @@ #include #include -#include - - -#ifndef max -#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; }) -#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; }) -#endif - +#include /* - YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved - U/V plane containing 8 bit 2x2 subsampled chroma samples. - except the interleave order of U and V is reversed. + YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved + U/V plane containing 8 bit 2x2 subsampled chroma samples. + except the interleave order of U and V is reversed. - H V - Y Sample Period 1 1 - U (Cb) Sample Period 2 2 - V (Cr) Sample Period 2 2 + H V + Y Sample Period 1 1 + U (Cb) Sample Period 2 2 + V (Cr) Sample Period 2 2 */ - /* size of a char: find . -name limits.h -exec grep CHAR_BIT {} \; */ +#ifndef max +#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; }) +#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; }) +#endif + const int bytes_per_pixel = 2; -void color_convert_common( - unsigned char *pY, unsigned char *pUV, - int width, int height, unsigned char *buffer, - int grey) +void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey) { - int i, j; - int nR, nG, nB; - int nY, nU, nV; - unsigned char *out = buffer; - int offset = 0; - if(grey){ - for (i = 0; i < height; i++) { - for (j = 0; j < width; j++) { - unsigned char nB = *(pY + i * width + j); + int i, j; + int nR, nG, nB; + int nY, nU, nV; + unsigned char *out = buffer; + int offset = 0; + + if (grey) + { + memcpy(out,pY,width*height*sizeof(unsigned char)); + } + else + // YUV 4:2:0 + for (i = 0; i < height; i++) + { + for (j = 0; j < width; j++) + { + nY = *(pY + i * width + j); + nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2)); + nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1); + + // Yuv Convert + nY -= 16; + nU -= 128; + nV -= 128; + + if (nY < 0) + nY = 0; - out[offset++] = (unsigned char)nB; - // out[offset++] = (unsigned char)nB; - // out[offset++] = (unsigned char)nB; - } - } - }else - // YUV 4:2:0 - for (i = 0; i < height; i++) { - for (j = 0; j < width; j++) { - nY = *(pY + i * width + j); - nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); - nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); - - // Yuv Convert - nY -= 16; - nU -= 128; - nV -= 128; - - if (nY < 0) - nY = 0; - - // nR = (int)(1.164 * nY + 2.018 * nU); - // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); - // nB = (int)(1.164 * nY + 1.596 * nV); - - nB = (int)(1192 * nY + 2066 * nU); - nG = (int)(1192 * nY - 833 * nV - 400 * nU); - nR = (int)(1192 * nY + 1634 * nV); - - nR = min(262143, max(0, nR)); - nG = min(262143, max(0, nG)); - nB = min(262143, max(0, nB)); - - - nR >>= 10; nR &= 0xff; - nG >>= 10; nG &= 0xff; - nB >>= 10; nB &= 0xff; + nB = (int)(1192 * nY + 2066 * nU); + nG = (int)(1192 * nY - 833 * nV - 400 * nU); + nR = (int)(1192 * nY + 1634 * nV); - out[offset++] = (unsigned char)nR; - out[offset++] = (unsigned char)nG; - out[offset++] = (unsigned char)nB; + nR = min(262143, max(0, nR)); + nG = min(262143, max(0, nG)); + nB = min(262143, max(0, nB)); - //out[offset++] = 0xff; //set alpha for ARGB 8888 format + nR >>= 10; + nR &= 0xff; + nG >>= 10; + nG &= 0xff; + nB >>= 10; + nB &= 0xff; + out[offset++] = (unsigned char)nR; + out[offset++] = (unsigned char)nG; + out[offset++] = (unsigned char)nB; + } + } - } - //offset = i * width * 3; //non power of two - //offset = i * texture_size + j;//power of two - //offset *= 3; //3 byte per pixel - //out = buffer + offset; - } -} +} diff --git a/android/android-jni/res/layout/calibrationviewer.xml b/android/android-jni/res/layout/calibrationviewer.xml new file mode 100644 index 0000000..00dea19 --- /dev/null +++ b/android/android-jni/res/layout/calibrationviewer.xml @@ -0,0 +1,11 @@ + + + + + diff --git a/android/android-jni/res/layout/camerasettings.xml b/android/android-jni/res/layout/camerasettings.xml new file mode 100644 index 0000000..42bbff0 --- /dev/null +++ b/android/android-jni/res/layout/camerasettings.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + diff --git a/android/android-jni/res/layout/chesssizer.xml b/android/android-jni/res/layout/chesssizer.xml new file mode 100644 index 0000000..b93bc0b --- /dev/null +++ b/android/android-jni/res/layout/chesssizer.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + diff --git a/android/android-jni/res/values/attrs.xml b/android/android-jni/res/values/attrs.xml new file mode 100644 index 0000000..89727ff --- /dev/null +++ b/android/android-jni/res/values/attrs.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/android/android-jni/res/values/chessnumbers.xml b/android/android-jni/res/values/chessnumbers.xml new file mode 100644 index 0000000..f0db569 --- /dev/null +++ b/android/android-jni/res/values/chessnumbers.xml @@ -0,0 +1,20 @@ + + + +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 + + +Choose the width: + +Choose the height: + diff --git a/android/android-jni/res/values/settingnumbers.xml b/android/android-jni/res/values/settingnumbers.xml new file mode 100644 index 0000000..4fe6de7 --- /dev/null +++ b/android/android-jni/res/values/settingnumbers.xml @@ -0,0 +1,20 @@ + + + +320x240 +400x300 +600x400 +800x600 +1000x800 + + +color +BW + + +Image Size:\n(may not be exact) + + +Camera Mode: + + \ No newline at end of file diff --git a/android/android-jni/res/values/strings.xml b/android/android-jni/res/values/strings.xml new file mode 100644 index 0000000..fa623e2 --- /dev/null +++ b/android/android-jni/res/values/strings.xml @@ -0,0 +1,19 @@ + + + Calibration + Pattern Size + Please choose the width and height (number of inside corners) of the checker + board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at + http://opencv.willowgarage.com/pattern + + http://opencv.willowgarage.com/pattern + Camera Settings + Change the camera settings + + Calibration calculations have started... + Calibration calculations has stopped. + Calibration finished, you camera is calibrated. + Calibration + Please capture atleast 10 images of the pattern! + + diff --git a/android/android-jni/sample.local.env.mk b/android/android-jni/sample.local.env.mk index cea5d71..74fbcdb 100644 --- a/android/android-jni/sample.local.env.mk +++ b/android/android-jni/sample.local.env.mk @@ -6,3 +6,4 @@ OPENCV_CONFIG=../build/android-opencv.mk #you can download the ndk from http://www.crystax.net/android/ndk-r4.php ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax +ARM_TARGETS=armeabi armeabi-v7a \ No newline at end of file diff --git a/android/android-jni/src/com/opencv/calibration/CalibrationViewer.java b/android/android-jni/src/com/opencv/calibration/CalibrationViewer.java new file mode 100644 index 0000000..2ae6b78 --- /dev/null +++ b/android/android-jni/src/com/opencv/calibration/CalibrationViewer.java @@ -0,0 +1,47 @@ +package com.opencv.calibration; + +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; + +import android.app.Activity; +import android.os.Bundle; +import android.text.method.ScrollingMovementMethod; +import android.util.Log; +import android.widget.TextView; + +import com.opencv.R; + +public class CalibrationViewer extends Activity { + + @Override + protected void onCreate(Bundle savedInstanceState) { + // TODO Auto-generated method stub + super.onCreate(savedInstanceState); + setContentView(R.layout.calibrationviewer); + + Bundle extras = getIntent().getExtras(); + String filename = extras.getString("calibfile"); + if (filename != null) { + TextView text = (TextView) findViewById(R.id.calibtext); + text.setMovementMethod(new ScrollingMovementMethod()); + try { + BufferedReader reader = new BufferedReader(new FileReader( + filename)); + while (reader.ready()) { + text.append(reader.readLine() +"\n"); + } + + } catch (FileNotFoundException e) { + Log.e("opencv", "could not open calibration file at:" + + filename); + } catch (IOException e) { + Log.e("opencv", "error reading file: " + + filename); + } + } + + } + +} diff --git a/android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java b/android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java new file mode 100644 index 0000000..b13a8e8 --- /dev/null +++ b/android/android-jni/src/com/opencv/calibration/ChessBoardChooser.java @@ -0,0 +1,75 @@ +package com.opencv.calibration; + +import com.opencv.R; +import com.opencv.jni.Size; + +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.content.SharedPreferences.Editor; +import android.os.Bundle; +import android.view.View; +import android.widget.AdapterView; +import android.widget.AdapterView.OnItemSelectedListener; +import android.widget.Spinner; + +public class ChessBoardChooser extends Activity { + public static final String CHESS_SIZE = "chess_size"; + public static final int DEFAULT_WIDTH = 6; + public static final int DEFAULT_HEIGHT = 8; + public static final int LOWEST = 3; + + class DimChooser implements OnItemSelectedListener { + private String dim; + + public DimChooser(String dim) { + this.dim = dim; + } + + @Override + public void onItemSelected(AdapterView arg0, View arg1, int pos, + long arg3) { + SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0); + Editor editor = settings.edit(); + editor.putInt(dim, pos + LOWEST); + editor.commit(); + } + + @Override + public void onNothingSelected(AdapterView arg0) { + } + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + // TODO Auto-generated method stub + super.onCreate(savedInstanceState); + setContentView(R.layout.chesssizer); + // Restore preferences + SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0); + int width = settings.getInt("width", 6); + + int height = settings.getInt("height", 8); + + Spinner wspin, hspin; + wspin = (Spinner) findViewById(R.id.rows); + hspin = (Spinner) findViewById(R.id.cols); + + wspin.setSelection(width - LOWEST); + hspin.setSelection(height - LOWEST); + + wspin.setOnItemSelectedListener(new DimChooser("width")); + hspin.setOnItemSelectedListener(new DimChooser("height")); + + } + + public static Size getPatternSize(Context ctx) { + SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0); + int width = settings.getInt("width", 6); + + int height = settings.getInt("height", 8); + + return new Size(width, height); + } + +} diff --git a/android/android-jni/src/com/opencv/calibration/services/CalibrationService.java b/android/android-jni/src/com/opencv/calibration/services/CalibrationService.java new file mode 100644 index 0000000..754e2f1 --- /dev/null +++ b/android/android-jni/src/com/opencv/calibration/services/CalibrationService.java @@ -0,0 +1,166 @@ +package com.opencv.calibration.services; + +import java.io.File; +import java.io.IOException; + +import android.app.Notification; +import android.app.NotificationManager; +import android.app.PendingIntent; +import android.app.Service; +import android.content.Intent; +import android.os.Binder; +import android.os.IBinder; +import android.util.Log; +import android.widget.Toast; + + +import com.opencv.R; +import com.opencv.calibration.CalibrationViewer; +import com.opencv.calibration.Calibrator; +import com.opencv.calibration.Calibrator.CalibrationCallback; + + +public class CalibrationService extends Service implements CalibrationCallback { + + Class activity; + int icon; + File calibration_file; + public void startCalibrating(Class activitycaller,int icon_id, Calibrator calibrator, File calibration_file) + throws IOException { + activity = activitycaller; + icon = icon_id; + // Display a notification about us starting. We put an icon in the + // status bar. + showNotification(); + this.calibration_file = calibration_file; + calibrator.setCallback(this); + calibrator.calibrate(calibration_file); + + + } + + private NotificationManager mNM; + + /** + * Class for clients to access. Because we know this service always runs in + * the same process as its clients, we don't need to deal with IPC. + */ + public class CalibrationServiceBinder extends Binder { + public CalibrationService getService() { + return CalibrationService.this; + } + } + + @Override + public int onStartCommand(Intent intent, int flags, int startId) { + Log.i("LocalService", "Received start id " + startId + ": " + intent); + // We want this service to continue running until it is explicitly + // stopped, so return sticky. + return START_NOT_STICKY; + } + + @Override + public void onCreate() { + mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); + + + } + + @Override + public void onDestroy() { + // Cancel the persistent notification. + // mNM.cancel(R.string.calibration_service_started); + + // Tell the user we stopped. + Toast.makeText(this, R.string.calibration_service_finished, + Toast.LENGTH_SHORT).show(); + } + + private final IBinder mBinder = new CalibrationServiceBinder(); + + @Override + public IBinder onBind(Intent intent) { + return mBinder; + } + + /** + * Show a notification while this service is running. + */ + private void showNotification() { + // In this sample, we'll use the same text for the ticker and the + // expanded notification + CharSequence text = getText(R.string.calibration_service_started); + + // Set the icon, scrolling text and timestamp + Notification notification = new Notification(icon, text, + System.currentTimeMillis()); + + // The PendingIntent to launch our activity if the user selects this + // notification + PendingIntent contentIntent = PendingIntent.getActivity(this, 0, + new Intent(this, activity), 0); + + // Set the info for the views that show in the notification panel. + notification.setLatestEventInfo(this, + getText(R.string.calibration_service_label), text, + contentIntent); + + notification.defaults |= Notification.DEFAULT_SOUND; + // Send the notification. + // We use a layout id because it is a unique number. We use it later to + // cancel. + mNM.notify(R.string.calibration_service_started, notification); + } + + /** + * Show a notification while this service is running. + */ + private void doneNotification() { + // In this sample, we'll use the same text for the ticker and the + // expanded notification + CharSequence text = getText(R.string.calibration_service_finished); + + // Set the icon, scrolling text and timestamp + Notification notification = new Notification(icon, text, + System.currentTimeMillis()); + + Intent intent = new Intent(this,CalibrationViewer.class); + intent.putExtra("calibfile", calibration_file.getAbsolutePath()); + // The PendingIntent to launch our activity if the user selects this + // notification + PendingIntent contentIntent = PendingIntent.getActivity(this, 0, + intent, 0); + + + // Set the info for the views that show in the notification panel. + notification.setLatestEventInfo(this, + getText(R.string.calibration_service_label), text, + contentIntent); + + + notification.defaults |= Notification.DEFAULT_SOUND; + // Send the notification. + // We use a layout id because it is a unique number. We use it later to + // cancel. + mNM.notify(R.string.calibration_service_started, notification); + } + + @Override + public void onFoundChessboard(Calibrator calibrator) { + // TODO Auto-generated method stub + + } + + @Override + public void onDoneCalibration(Calibrator calibration, File calibfile) { + doneNotification(); + stopSelf(); + } + + @Override + public void onFailedChessboard(Calibrator calibrator) { + // TODO Auto-generated method stub + + } + +} diff --git a/android/android-jni/src/com/opencv/camera/CameraConfig.java b/android/android-jni/src/com/opencv/camera/CameraConfig.java new file mode 100644 index 0000000..77f4e60 --- /dev/null +++ b/android/android-jni/src/com/opencv/camera/CameraConfig.java @@ -0,0 +1,166 @@ +package com.opencv.camera; + +import com.opencv.R; + +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.content.SharedPreferences.Editor; +import android.os.Bundle; +import android.view.View; +import android.widget.AdapterView; +import android.widget.AdapterView.OnItemSelectedListener; +import android.widget.Spinner; + +public class CameraConfig extends Activity { + public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS"; + public static final String CAMERA_MODE = "camera_mode"; + public static final String IMAGE_WIDTH = "IMAGE_WIDTH"; + public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT"; + public static final int CAMERA_MODE_BW = 0; + public static final int CAMERA_MODE_COLOR = 1; + + public static int readCameraMode(Context ctx) { + // Restore preferences + SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS, + 0); + int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW); + return mode; + } + + static public void setCameraMode(Context context, String mode) { + int m = 0; + if (mode.equals("BW")) { + m = CAMERA_MODE_BW; + } else if (mode.equals("color")) + m = CAMERA_MODE_COLOR; + setCameraMode(context, m); + } + + private static String sizeToString(int[] size) { + return size[0] + "x" + size[1]; + } + + private static void parseStrToSize(String ssize, int[] size) { + String sz[] = ssize.split("x"); + size[0] = Integer.valueOf(sz[0]); + size[1] = Integer.valueOf(sz[1]); + } + + public static void readImageSize(Context ctx, int[] size) { + // Restore preferences + SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS, + 0); + size[0] = settings.getInt(IMAGE_WIDTH, 600); + size[1] = settings.getInt(IMAGE_HEIGHT, 600); + + } + + public static void setCameraMode(Context ctx, int mode) { + // Restore preferences + SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS, + 0); + Editor editor = settings.edit(); + editor.putInt(CAMERA_MODE, mode); + editor.commit(); + } + + public static void setImageSize(Context ctx, String strsize) { + int size[] = { 0, 0 }; + parseStrToSize(strsize, size); + setImageSize(ctx, size[0], size[1]); + } + + public static void setImageSize(Context ctx, int width, int height) { + // Restore preferences + SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS, + 0); + Editor editor = settings.edit(); + editor.putInt(IMAGE_WIDTH, width); + editor.putInt(IMAGE_HEIGHT, height); + editor.commit(); + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + // TODO Auto-generated method stub + super.onCreate(savedInstanceState); + setContentView(R.layout.camerasettings); + int mode = readCameraMode(this); + int size[] = { 0, 0 }; + readImageSize(this, size); + + final Spinner size_spinner; + final Spinner mode_spinner; + size_spinner = (Spinner) findViewById(R.id.image_size); + mode_spinner = (Spinner) findViewById(R.id.camera_mode); + + String strsize = sizeToString(size); + String strmode = modeToString(mode); + + String sizes[] = getResources().getStringArray(R.array.image_sizes); + + int i = 1; + for (String x : sizes) { + if (x.equals(strsize)) + break; + i++; + } + if(i <= sizes.length) + size_spinner.setSelection(i-1); + + i = 1; + String modes[] = getResources().getStringArray(R.array.camera_mode); + for (String x :modes) { + if (x.equals(strmode)) + break; + i++; + } + if(i <= modes.length) + mode_spinner.setSelection(i-1); + + size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() { + + @Override + public void onItemSelected(AdapterView arg0, View spinner, + int position, long arg3) { + Object o = size_spinner.getItemAtPosition(position); + if (o != null) + setImageSize(spinner.getContext(), (String) o); + } + + @Override + public void onNothingSelected(AdapterView arg0) { + + } + }); + mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() { + + @Override + public void onItemSelected(AdapterView arg0, View spinner, + int position, long arg3) { + Object o = mode_spinner.getItemAtPosition(position); + if (o != null) + setCameraMode(spinner.getContext(), (String) o); + + } + + @Override + public void onNothingSelected(AdapterView arg0) { + + } + }); + + } + + private String modeToString(int mode) { + switch (mode) { + case CAMERA_MODE_BW: + return "BW"; + case CAMERA_MODE_COLOR: + return "color"; + default: + return ""; + } + } +} diff --git a/android/android-jni/src/com/opencv/camera/NativePreviewer.java b/android/android-jni/src/com/opencv/camera/NativePreviewer.java index 13af609..6707f8f 100644 --- a/android/android-jni/src/com/opencv/camera/NativePreviewer.java +++ b/android/android-jni/src/com/opencv/camera/NativePreviewer.java @@ -22,35 +22,44 @@ import com.opencv.camera.NativeProcessor.PoolCallback; public class NativePreviewer extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback { - SurfaceHolder mHolder; - Camera mCamera; - private NativeProcessor processor; - - private int preview_width, preview_height; - private int pixelformat; - private PixelFormat pixelinfo; - - public NativePreviewer(Context context,AttributeSet attributes){ - super(context,attributes); + /** Constructor useful for defining a NativePreviewer in android layout xml + * + * @param context + * @param attributes + */ + public NativePreviewer(Context context, AttributeSet attributes) { + super(context, attributes); listAllCameraMethods(); // Install a SurfaceHolder.Callback so we get notified when the // underlying surface is created and destroyed. mHolder = getHolder(); mHolder.addCallback(this); mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); - - this.preview_width = attributes.getAttributeIntValue("opencv", "preview_width", 600); - this.preview_height= attributes.getAttributeIntValue("opencv", "preview_height", 600); - processor = new NativeProcessor(); + /* TODO get this working! Can't figure out how to define these in xml + */ + preview_width = attributes.getAttributeIntValue("opencv", + "preview_width", 600); + preview_height = attributes.getAttributeIntValue("opencv", + "preview_height", 600); - setZOrderMediaOverlay(false); + Log.d("NativePreviewer", "Trying to use preview size of " + preview_width + " " + preview_height); + + processor = new NativeProcessor(); + + setZOrderMediaOverlay(false); } + + /** + * + * @param context + * @param preview_width the desired camera preview width - will attempt to get as close to this as possible + * @param preview_height the desired camera preview height + */ public NativePreviewer(Context context, int preview_width, int preview_height) { super(context); - listAllCameraMethods(); // Install a SurfaceHolder.Callback so we get notified when the @@ -63,62 +72,38 @@ public class NativePreviewer extends SurfaceView implements this.preview_height = preview_height; processor = new NativeProcessor(); - setZOrderMediaOverlay(false); + setZOrderMediaOverlay(false); } - Handler camerainiter = new Handler(); - void initCamera(SurfaceHolder holder) throws InterruptedException{ - if(mCamera == null){ - // The Surface has been created, acquire the camera and tell it where - // to draw. - int i = 0; - while(i++ < 5){ - try{ - mCamera = Camera.open(); - break; - }catch(RuntimeException e){ - Thread.sleep(200); - } - } - try { - mCamera.setPreviewDisplay(holder); - } catch (IOException exception) { - mCamera.release(); - mCamera = null; - - }catch(RuntimeException e){ - Log.e("camera", "stacktrace", e); - } - } - } - void releaseCamera(){ - if(mCamera !=null){ - // Surface will be destroyed when we return, so stop the preview. - // Because the CameraDevice object is not a shared resource, it's very - // important to release it when the activity is paused. - mCamera.stopPreview(); - mCamera.release(); - } + /** Only call in the oncreate function of the instantiating activity + * + * @param width desired width + * @param height desired height + */ + public void setPreviewSize(int width, int height){ + preview_width = width; + preview_height = height; + + Log.d("NativePreviewer", "Trying to use preview size of " + preview_width + " " + preview_height); - // processor = null; - mCamera = null; - mAcb = null; - mPCWB = null; + } + + public void setParamsFromPrefs(Context ctx){ + int size[] ={0,0}; + CameraConfig.readImageSize(ctx, size); + int mode = CameraConfig.readCameraMode(ctx); + setPreviewSize(size[0], size[1]); + setGrayscale(mode == CameraConfig.CAMERA_MODE_BW ? true : false); } public void surfaceCreated(SurfaceHolder holder) { - - } public void surfaceDestroyed(SurfaceHolder holder) { - releaseCamera(); - } - private boolean hasAutoFocus = false; public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { try { @@ -128,44 +113,48 @@ public class NativePreviewer extends SurfaceView implements e.printStackTrace(); return; } - - + // Now that the size is known, set up the camera parameters and begin // the preview. Camera.Parameters parameters = mCamera.getParameters(); - List pvsizes = mCamera.getParameters().getSupportedPreviewSizes(); + List pvsizes = mCamera.getParameters() + .getSupportedPreviewSizes(); int best_width = 1000000; int best_height = 1000000; - for(Size x: pvsizes){ - if(x.width - preview_width >= 0 && x.width <= best_width){ + int bdist = 100000; + for (Size x : pvsizes) { + if (Math.abs(x.width - preview_width) < bdist) { + bdist = Math.abs(x.width - preview_width); best_width = x.width; best_height = x.height; } } preview_width = best_width; preview_height = best_height; - List fmodes = mCamera.getParameters().getSupportedFocusModes(); - + Log.d("NativePreviewer", "Determined compatible preview size is: (" + preview_width + "," + preview_height+")"); + + List fmodes = mCamera.getParameters().getSupportedFocusModes(); + int idx = fmodes.indexOf(Camera.Parameters.FOCUS_MODE_INFINITY); - if(idx != -1){ + if (idx != -1) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY); - }else if(fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1){ + } else if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); } - if(fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1){ - hasAutoFocus = true; + if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1) { + hasAutoFocus = true; } - - List scenemodes = mCamera.getParameters().getSupportedSceneModes(); - if(scenemodes != null) - if(scenemodes.indexOf(Camera.Parameters.SCENE_MODE_STEADYPHOTO) != -1){ - parameters.setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO); - } - - + + List scenemodes = mCamera.getParameters() + .getSupportedSceneModes(); + if (scenemodes != null) + if (scenemodes.indexOf(Camera.Parameters.SCENE_MODE_STEADYPHOTO) != -1) { + parameters + .setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO); + } parameters.setPreviewSize(preview_width, preview_height); @@ -194,68 +183,83 @@ public class NativePreviewer extends SurfaceView implements mCamera.startPreview(); - //postautofocus(0); } + public void postautofocus(int delay) { - if(hasAutoFocus) + if (hasAutoFocus) handler.postDelayed(autofocusrunner, delay); - + } - private Runnable autofocusrunner = new Runnable() { - - @Override - public void run() { - mCamera.autoFocus(autocallback); - - } - }; - - Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() { - - @Override - public void onAutoFocus(boolean success, Camera camera) { - if(!success) - postautofocus(1000); - } - }; - Handler handler = new Handler(); /** - * This method will list all methods of the android.hardware.Camera class, - * even the hidden ones. With the information it provides, you can use the - * same approach I took below to expose methods that were written but hidden - * in eclair + * Demonstration of how to use onPreviewFrame. In this case I'm not + * processing the data, I'm just adding the buffer back to the buffer queue + * for re-use */ - private void listAllCameraMethods() { - try { - Class c = Class.forName("android.hardware.Camera"); - Method[] m = c.getMethods(); - for (int i = 0; i < m.length; i++) { - Log.d("NativePreviewer", " method:" + m[i].toString()); - } - } catch (Exception e) { - // TODO Auto-generated catch block - Log.e("NativePreviewer", e.toString()); + public void onPreviewFrame(byte[] data, Camera camera) { + + if (start == null) { + start = new Date(); } + + processor.post(data, preview_width, preview_height, pixelformat, + System.nanoTime(), this); + + fcount++; + if (fcount % 100 == 0) { + double ms = (new Date()).getTime() - start.getTime(); + Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0)); + start = new Date(); + fcount = 0; + } + + } + + @Override + public void onDoneNativeProcessing(byte[] buffer) { + addCallbackBuffer(buffer); + } + + public void addCallbackStack(LinkedList callbackstack) { + processor.addCallbackStack(callbackstack); } /** - * These variables are re-used over and over by addCallbackBuffer + * This must be called when the activity pauses, in Activity.onPause This + * has the side effect of clearing the callback stack. + * */ - Method mAcb; + public void onPause() { + + releaseCamera(); + + addCallbackStack(null); + + processor.stop(); + + } + + public void onResume() { + + processor.start(); + + } + + private Method mPCWB; + + private void initForPCWB() { - private void initForACB() { try { - mAcb = Class.forName("android.hardware.Camera").getMethod( - "addCallbackBuffer", byte[].class); + mPCWB = Class.forName("android.hardware.Camera").getMethod( + "setPreviewCallbackWithBuffer", PreviewCallback.class); } catch (Exception e) { - Log - .e("NativePreviewer", - "Problem setting up for addCallbackBuffer: " - + e.toString()); + Log.e("NativePreviewer", + "Problem setting up for setPreviewCallbackWithBuffer: " + + e.toString()); } + } /** @@ -275,26 +279,9 @@ public class NativePreviewer extends SurfaceView implements mAcb.invoke(mCamera, b); } catch (Exception e) { - Log.e("NativePreviewer", "invoking addCallbackBuffer failed: " - + e.toString()); - } - } - - Method mPCWB; - - private void initForPCWB() { - - try { - - mPCWB = Class.forName("android.hardware.Camera").getMethod( - "setPreviewCallbackWithBuffer", PreviewCallback.class); - - } catch (Exception e) { Log.e("NativePreviewer", - "Problem setting up for setPreviewCallbackWithBuffer: " - + e.toString()); + "invoking addCallbackBuffer failed: " + e.toString()); } - } /** @@ -321,7 +308,8 @@ public class NativePreviewer extends SurfaceView implements } } - protected void clearPreviewCallbackWithBuffer() { + @SuppressWarnings("unused") + private void clearPreviewCallbackWithBuffer() { // mCamera.setPreviewCallback(this); // return; try { @@ -341,69 +329,117 @@ public class NativePreviewer extends SurfaceView implements } } - Date start; - int fcount = 0; - boolean processing = false; - /** - * Demonstration of how to use onPreviewFrame. In this case I'm not - * processing the data, I'm just adding the buffer back to the buffer queue - * for re-use + * These variables are re-used over and over by addCallbackBuffer */ - public void onPreviewFrame(byte[] data, Camera camera) { + private Method mAcb; - if (start == null) { - start = new Date(); + private void initForACB() { + try { + + mAcb = Class.forName("android.hardware.Camera").getMethod( + "addCallbackBuffer", byte[].class); + + } catch (Exception e) { + Log.e("NativePreviewer", + "Problem setting up for addCallbackBuffer: " + e.toString()); } + } - - processor.post(data, preview_width, preview_height, pixelformat, System.nanoTime(), - this); - - fcount++; - if (fcount % 100 == 0) { - double ms = (new Date()).getTime() - start.getTime(); - Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0)); - start = new Date(); - fcount = 0; + private Runnable autofocusrunner = new Runnable() { + + @Override + public void run() { + mCamera.autoFocus(autocallback); } + }; - + private Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() { - } + @Override + public void onAutoFocus(boolean success, Camera camera) { + if (!success) + postautofocus(1000); + } + }; - @Override - public void onDoneNativeProcessing(byte[] buffer) { - addCallbackBuffer(buffer); + /** + * This method will list all methods of the android.hardware.Camera class, + * even the hidden ones. With the information it provides, you can use the + * same approach I took below to expose methods that were written but hidden + * in eclair + */ + private void listAllCameraMethods() { + try { + Class c = Class.forName("android.hardware.Camera"); + Method[] m = c.getMethods(); + for (int i = 0; i < m.length; i++) { + Log.d("NativePreviewer", " method:" + m[i].toString()); + } + } catch (Exception e) { + // TODO Auto-generated catch block + Log.e("NativePreviewer", e.toString()); + } } - public void addCallbackStack(LinkedList callbackstack) { - processor.addCallbackStack(callbackstack); + private void initCamera(SurfaceHolder holder) throws InterruptedException { + if (mCamera == null) { + // The Surface has been created, acquire the camera and tell it + // where + // to draw. + int i = 0; + while (i++ < 5) { + try { + mCamera = Camera.open(); + break; + } catch (RuntimeException e) { + Thread.sleep(200); + } + } + try { + mCamera.setPreviewDisplay(holder); + } catch (IOException exception) { + mCamera.release(); + mCamera = null; + + } catch (RuntimeException e) { + Log.e("camera", "stacktrace", e); + } + } } - /**This must be called when the activity pauses, in Activity.onPause - * This has the side effect of clearing the callback stack. - * - */ - public void onPause() { - - releaseCamera(); - - addCallbackStack(null); - - processor.stop(); - - - - - + private void releaseCamera() { + if (mCamera != null) { + // Surface will be destroyed when we return, so stop the preview. + // Because the CameraDevice object is not a shared resource, it's + // very + // important to release it when the activity is paused. + mCamera.stopPreview(); + mCamera.release(); + } + + // processor = null; + mCamera = null; + mAcb = null; + mPCWB = null; } - public void onResume() { - - - processor.start(); - + private Handler handler = new Handler(); + + private Date start; + private int fcount = 0; + private boolean hasAutoFocus = false; + private SurfaceHolder mHolder; + private Camera mCamera; + + private NativeProcessor processor; + + private int preview_width, preview_height; + private int pixelformat; + private PixelFormat pixelinfo; + + public void setGrayscale(boolean b) { + processor.setGrayscale(b); } diff --git a/android/android-jni/src/com/opencv/camera/NativeProcessor.java b/android/android-jni/src/com/opencv/camera/NativeProcessor.java index caddcfb..4dce3bb 100644 --- a/android/android-jni/src/com/opencv/camera/NativeProcessor.java +++ b/android/android-jni/src/com/opencv/camera/NativeProcessor.java @@ -11,8 +11,153 @@ import android.util.Log; import com.opencv.jni.image_pool; import com.opencv.jni.opencv; +/** The NativeProcessor is a native processing stack engine. + * + * What this means is that the NativeProcessor handles loading + * live camera frames into native memory space, i.e. the image_pool + * and then calling a stack of PoolCallback's and passing them the + * image_pool. + * + * The image_pool index 0 is populated with the live video image + * + * And any modifications to this the pool are in place, so you may + * pass on changes to the pool to the next PoolCallback in the stack. + * + */ public class NativeProcessor { + /** Users that would like to be able to have access to live video frames + * should implement a PoolCallback + * the idx and pool contain the images, specifically at idx == 0 is the + * live video frame. + */ + static public interface PoolCallback { + void process(int idx, image_pool pool, long timestamp, + NativeProcessor nativeProcessor); + } + + + + /**At every frame, each PoolCallback is called in order and is passed the + * the same pool and index + * + * @param stack A list of PoolCallback objects, that will be called in order + */ + public void addCallbackStack(LinkedList stack) { + + try { + while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) { + + } + try { + nextStack = stack; + } finally { + stacklock.unlock(); + } + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + + } + + } + + /** + * Create a NativeProcessor. The processor will not start running until + * start is called, at which point it will operate in its own thread and + * sleep until a post is called. The processor should not be started until + * an onSurfaceChange event, and should be shut down when the surface is + * destroyed by calling interupt. + * + */ + public NativeProcessor() { + gray_scale_only = false; + } + + + + /** Grayscale only is much faster because the yuv does not get decoded, and grayscale is only one + * byter per pixel - giving fast opengl texture loading. + * + * You still have access to the whole yuv image, but grayscale is only immediately available to + * use without further effort. + * + * Suggestion - use grayscale only and save your yuv images to disk if you would like color images + * + * Also, in grayscale mode, the images in the pool are only single channel, so please keep this in mind + * when accessing the color images - check the cv::Mat::channels() or cv::Mat::type() if your messing + * with color channels + * + * @param grayscale true if you want to only process grayscale images + */ + public void setGrayscale(boolean grayscale){ + gray_scale_only = grayscale; + } + + + /** + * A callback that allows the NativeProcessor to pass back the buffer when + * it has completed processing a frame. + */ + static protected interface NativeProcessorCallback { + /** + * Called after processing, meant to be recieved by the NativePreviewer + * wich reuses the byte buffer for the camera preview... + * + * @param buffer + * the buffer passed to the NativeProcessor with post. + */ + void onDoneNativeProcessing(byte[] buffer); + } + + protected void stop() { + mthread.interrupt(); + try { + mthread.join(); + } catch (InterruptedException e) { + Log.w("NativeProcessor", + "interupted while stoping " + e.getMessage()); + } + mthread = null; + } + + protected void start() { + mthread = new ProcessorThread(); + mthread.start(); + } + /** + * post is used to notify the processor that a preview frame is ready, this + * will return almost immediately. if the processor is busy, returns false + * and is essentially a nop. + * + * @param buffer + * a preview frame from the Android Camera onPreviewFrame + * callback + * @param width + * of preview frame + * @param height + * of preview frame + * @param format + * of preview frame + * @return true if the processor wasn't busy and accepted the post, false if + * the processor is still processing. + */ + + protected boolean post(byte[] buffer, int width, int height, int format, + long timestamp, NativeProcessorCallback callback) { + + lock.lock(); + try { + NPPostObject pobj = new NPPostObject(buffer, width, height, format, + timestamp, callback); + postobjects.addFirst(pobj); + } finally { + lock.unlock(); + } + return true; + + } + private class ProcessorThread extends Thread { private void process(NPPostObject pobj) throws Exception { @@ -20,7 +165,7 @@ public class NativeProcessor { if (pobj.format == PixelFormat.YCbCr_420_SP) { // add as color image, because we know how to decode this opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width, - pobj.height, false); + pobj.height, gray_scale_only); } else if (pobj.format == PixelFormat.YCbCr_422_SP) { // add as gray image, because this format is not coded @@ -31,7 +176,6 @@ public class NativeProcessor { } else throw new Exception("bad pixel format!"); - for (PoolCallback x : stack) { if (interrupted()) { throw new InterruptedException( @@ -39,11 +183,9 @@ public class NativeProcessor { } x.process(0, pool, pobj.timestamp, NativeProcessor.this); } - - + pobj.done(); // tell the postobject that we're done doing - // all the processing. - + // all the processing. } @@ -53,8 +195,8 @@ public class NativeProcessor { try { while (true) { yield(); - - while(!stacklock.tryLock(5, TimeUnit.MILLISECONDS)){ + + while (!stacklock.tryLock(5, TimeUnit.MILLISECONDS)) { } try { if (nextStack != null) { @@ -64,25 +206,26 @@ public class NativeProcessor { } finally { stacklock.unlock(); } - + NPPostObject pobj = null; - - while(!lock.tryLock(5, TimeUnit.MILLISECONDS)){ + + while (!lock.tryLock(5, TimeUnit.MILLISECONDS)) { } try { - if(postobjects.isEmpty()) continue; + if (postobjects.isEmpty()) + continue; pobj = postobjects.removeLast(); - + } finally { lock.unlock(); - + } - - if(interrupted())throw new InterruptedException(); - - if(stack != null && pobj != null) + + if (interrupted()) + throw new InterruptedException(); + + if (stack != null && pobj != null) process(pobj); - } } catch (InterruptedException e) { @@ -99,102 +242,10 @@ public class NativeProcessor { } } - - ProcessorThread mthread; - - static public interface PoolCallback { - void process(int idx, image_pool pool,long timestamp, NativeProcessor nativeProcessor); - } - - Lock stacklock = new ReentrantLock(); - - LinkedList nextStack; - - void addCallbackStack(LinkedList stack) { - - try { - while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) { - - } - try { - nextStack = stack; - } finally { - stacklock.unlock(); - } - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - - } - - } - - /** - * A callback that allows the NativeProcessor to pass back the buffer when - * it has completed processing a frame. - * - * @author ethan - * - */ - static public interface NativeProcessorCallback { - /** - * Called after processing, meant to be recieved by the NativePreviewer - * wich reuses the byte buffer for the camera preview... - * - * @param buffer - * the buffer passed to the NativeProcessor with post. - */ - void onDoneNativeProcessing(byte[] buffer); - } - - /** - * Create a NativeProcessor. The processor will not start running until - * start is called, at which point it will operate in its own thread and - * sleep until a post is called. The processor should not be started until - * an onSurfaceChange event, and should be shut down when the surface is - * destroyed by calling interupt. - * - */ - public NativeProcessor() { - - } - - /** - * post is used to notify the processor that a preview frame is ready, this - * will return almost immediately. if the processor is busy, returns false - * and is essentially a nop. - * - * @param buffer - * a preview frame from the Android Camera onPreviewFrame - * callback - * @param width - * of preview frame - * @param height - * of preview frame - * @param format - * of preview frame - * @return true if the processor wasn't busy and accepted the post, false if - * the processor is still processing. - */ - - public boolean post(byte[] buffer, int width, int height, int format,long timestamp, - NativeProcessorCallback callback) { - - lock.lock(); - try { - NPPostObject pobj = new NPPostObject(buffer, width, height, - format,timestamp, callback); - postobjects.addFirst(pobj); - } finally { - lock.unlock(); - } - return true; - - } - + static private class NPPostObject { - public NPPostObject(byte[] buffer, int width, int height, int format, long timestamp, - NativeProcessorCallback callback) { + public NPPostObject(byte[] buffer, int width, int height, int format, + long timestamp, NativeProcessorCallback callback) { this.buffer = buffer; this.width = width; this.height = height; @@ -215,6 +266,7 @@ public class NativeProcessor { NativeProcessorCallback callback; } + private LinkedList postobjects = new LinkedList(); private image_pool pool = new image_pool(); @@ -222,20 +274,12 @@ public class NativeProcessor { private final Lock lock = new ReentrantLock(); private LinkedList stack = new LinkedList(); + private boolean gray_scale_only; + + private Lock stacklock = new ReentrantLock(); - void stop() { - mthread.interrupt(); - try { - mthread.join(); - } catch (InterruptedException e) { - Log.w("NativeProcessor","interupted while stoping " + e.getMessage()); - } - mthread = null; - } - - void start() { - mthread = new ProcessorThread(); - mthread.start(); - } + private LinkedList nextStack; + + private ProcessorThread mthread; } \ No newline at end of file -- 2.7.4