Point truthPosition = new Point(img.cols() / 2, img.rows() / 2);
Rect r = new Rect(new Point(0, 0), truthPosition);
- Core.rectangle(img, r.tl(), r.br(), new Scalar(0), -1/* TODO: CV_FILLED */);
+ Core.rectangle(img, r.tl(), r.br(), new Scalar(0), -1 /*TODO: CV_FILLED*/);
List<Point> corners = new ArrayList<Point>();
corners.add(new Point(truthPosition.x + 1, truthPosition.y + 1));
Size winSize = new Size(2, 2);
Size zeroZone = new Size(-1, -1);
- TermCriteria criteria = new TermCriteria(2/* TODO: CV_TERMCRIT_EPS */, 0, 0.01);
+ TermCriteria criteria = new TermCriteria(2 /*TODO: CV_TERMCRIT_EPS*/, 0, 0.01);
Imgproc.cornerSubPix(img, corners, winSize, zeroZone, criteria);
assertPointEquals(truthPosition, corners.get(0), weakEPS);
}
public void testFitEllipse() {
- Mat points = new Mat(1, 5, CvType.CV_32FC2); // TODO: use the list of
- // Points
+ Mat points = new Mat(1, 5, CvType.CV_32FC2); // TODO: use the list of Points
points.put(0, 0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0);
RotatedRect rrect = new RotatedRect();
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
+import org.opencv.objdetect.Objdetect;
import org.opencv.test.OpenCVTestCase;
import org.opencv.test.OpenCVTestRunner;
Imgproc.cvtColor(rgbLena, greyLena, Imgproc.COLOR_RGB2GRAY);
// TODO: doesn't detect with 1.1 scale
- cc.detectMultiScale(greyLena, faces, 1.09, 2, 2 /*
- * TODO:
- * CV_HAAR_SCALE_IMAGE
- */, new Size(30, 30));
+ cc.detectMultiScale(greyLena, faces, 1.09, 3, Objdetect.CASCADE_SCALE_IMAGE, new Size(30, 30));
assertEquals(1, faces.size());
}
package org.opencv.test.video;
+import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
-import org.opencv.core.Core;
-import org.opencv.video.Video;
import org.opencv.test.OpenCVTestCase;
+import org.opencv.video.Video;
public class videoTest extends OpenCVTestCase {
Mat prevPts = new Mat(1, 3, CvType.CV_32FC2);
prevPts.put(0, 0, 1.0, 1.0, 5.0, 5.0, 10.0, 10.0);
- Size sz = new Size(5, 5);
+ Size sz = new Size(3, 3);
Video.calcOpticalFlowPyrLK(subLena1, subLena2, prevPts, nextPts, status, err, sz);
assertEquals(0, Core.countNonZero(status));
}
'OpenCV(Test)?Case',\r
'OpenCV(Test)?Runner',\r
'CvException',\r
+ 'CvType'\r
)\r
\r
funcs_ignore_list = (\r
#!/bin/sh
cd `dirname $0`
-BUILD_DIR=build_armeabi
-opencv_android=`pwd`/../../../android
+BUILD_DIR=build
+opencv_android=/home/kir/work/ros_opencv_trunk/opencv/android
opencv_build_dir=$opencv_android/$BUILD_DIR
mkdir -p $BUILD_DIR
#include <opencv2/core/core.hpp>\r
+#include <opencv2/imgproc/imgproc.hpp>\r
+#include <opencv2/contrib/contrib.hpp>\r
#include <opencv2/highgui/highgui.hpp>\r
\r
using namespace cv;\r
+\r
+int main11(int argc, char* argv[])\r
+{\r
+ TickMeter timer;\r
+ vector<double> times;\r
+\r
+ Scalar x;\r
+ double s = 0.0;\r
+ \r
+ int nIters = 100;\r
+ for (int i = 0; i < nIters; i++)\r
+ {\r
+ timer.start();\r
+ \r
+ Mat m(4096, 1024, CV_32F);\r
+ //m.setTo(Scalar(33.0));\r
+ randu(m, 0, 256);\r
+ x = sum(m);\r
+ \r
+ timer.stop();\r
+ times.push_back(timer.getTimeMilli());\r
+ timer.reset();\r
+ \r
+ x = sum(m);\r
+ s += x[0];\r
+ }\r
+\r
+ for (int i = 0; i < nIters; i++)\r
+ printf("time[%d] = %.2f ms\n", i, times[i]);\r
+ \r
+ printf("s = %f\n", s);\r
+}\r
+\r
+int main3(int argc, char* argv[])\r
+{\r
+ int w = 1280;\r
+ int h = 720;\r
+ \r
+ Mat m1(w, h, CV_8U);\r
+ Mat m2(w, h, CV_8U);\r
+ Mat m3(w, h, CV_8U);\r
+ Mat dst(w, h, CV_8U);\r
+ \r
+ Scalar x;\r
+ double s = 0.0;\r
+ \r
+ TickMeter timer1;\r
+ TickMeter timer2;\r
+ TickMeter timer3;\r
+ \r
+ int nIters = 100;\r
+ for (int i = 0; i < nIters; i++)\r
+ {\r
+ randu(m1, 0, 256);\r
+ randu(m2, 0, 256);\r
+ \r
+ equalizeHist(m1, m1);\r
+ equalizeHist(m2, m2);\r
+ \r
+ timer1.start();\r
+ add(m1, m2, dst);\r
+ timer1.stop();\r
+ \r
+ normalize(dst, dst, dst.total());\r
+ \r
+ timer2.start();\r
+ m3 = m1 + m2;\r
+ timer2.stop();\r
+ \r
+ timer3.start(); \r
+ dst = m3 + dst;\r
+ timer3.stop();\r
+ \r
+ x = sum(dst);\r
+ s += x[0];\r
+ }\r
+ \r
+ printf("s = %f\n", s);\r
+ printf("timer1 = %.2f ms\n", timer1.getTimeMilli()/nIters);\r
+ printf("timer2 = %.2f ms\n", timer2.getTimeMilli()/nIters);\r
+ printf("timer3 = %.2f ms\n", timer3.getTimeMilli()/nIters);\r
+}\r
+\r
const char* message = "Hello Android!";\r
\r
-int main(int argc, char* argv[])\r
+int main2(int argc, char* argv[])\r
{\r
// print message to console\r
printf("%s\n", message);\r
Size textsize = getTextSize(message, CV_FONT_HERSHEY_COMPLEX, 3, 5, 0);\r
Mat img(textsize.height + 20, textsize.width + 20, CV_32FC1, Scalar(230,230,230));\r
putText(img, message, Point(10, img.rows - 10), CV_FONT_HERSHEY_COMPLEX, 3, Scalar(0, 0, 0), 5);\r
-\r
+ \r
// save\show resulting image\r
#if ANDROID\r
imwrite("/mnt/sdcard/HelloAndroid.png", img);\r
imshow("test", img);\r
waitKey();\r
#endif\r
- return 0;\r
+\r
+return 0;\r
}\r
\r
using namespace std;
using namespace cv;
+int main(int argc, char** argv)
+{
+ vector<Point2f> points;
+ points.push_back(Point2f(1.0, 1.0));
+ points.push_back(Point2f(5.0, 1.0));
+ points.push_back(Point2f(4.0, 3.0));
+ points.push_back(Point2f(6.0, 2.0));
+
+ RotatedRect rrect = minAreaRect(points);
+
+ cout << rrect.center << endl;
+
+ return 0;
+}
+
void help()
{
cout << "\nThis program demonstrates the cascade recognizer. Now you can use Haar or LBP features.\n"
String cascadeName = "../../data/haarcascades/haarcascade_frontalface_alt.xml";
String nestedCascadeName = "../../data/haarcascades/haarcascade_eye_tree_eyeglasses.xml";
-int main( int argc, const char** argv )
+int main1( int argc, const char** argv )
{
CvCapture* capture = 0;
Mat frame, frameCopy, image;