Merge pull request #30 from tripzero/master
[profile/ivi/automotive-message-broker.git] / plugins / opencvlux / opencvluxplugin.cpp
1 /*
2 Copyright (C) 2012 Intel Corporation
3
4 This library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with this library; if not, write to the Free Software
16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19 #include "opencvluxplugin.h"
20 #include "timestamp.h"
21 #include <listplusplus.h>
22 #include <superptr.hpp>
23
24 #include <iostream>
25 #include <opencv2/imgproc/imgproc.hpp>
26 #include <opencv2/objdetect/objdetect.hpp>
27
28 #include <QFuture>
29 #include <QFutureWatcher>
30 #include <QtConcurrent/QtConcurrent>
31
32
33 #ifdef OPENCL
34 #include <opencv2/ocl/ocl.hpp>
35 #endif
36
37 #ifdef CUDA
38 #include <opencv2/gpu/gpu.hpp>
39 #endif
40
41 using namespace std;
42
43 const std::string VideoLogging = "VideoLogging";
44 const std::string DriverDrowsiness = "DriverDrowsiness";
45 const std::string OpenCL = "OpenCL";
46
47 #include "debugout.h"
48
49 extern "C" AbstractSource * create(AbstractRoutingEngine* routingengine, map<string, string> config)
50 {
51         VehicleProperty::registerProperty(VideoLogging, [](){
52                 return new BasicPropertyType<bool>(VideoLogging, false);
53         });
54
55         VehicleProperty::registerProperty(DriverDrowsiness, [](){
56                 return new OpenCvLuxPlugin::DriverDrowsinessType(DriverDrowsiness, false);
57         });
58
59         VehicleProperty::registerProperty(OpenCL, [](){
60                 return new BasicPropertyType<bool>(OpenCL, false);
61         });
62
63         return new OpenCvLuxPlugin(routingengine, config);
64 }
65
66 OpenCvLuxPlugin::OpenCvLuxPlugin(AbstractRoutingEngine* re, map<string, string> config)
67         :AbstractSource(re, config), lastLux(0), speed(0), latitude(0), longitude(0), extBrightness(new VehicleProperty::ExteriorBrightnessType(0))
68 {
69         driverDrowsiness = amb::make_unique(new DriverDrowsinessType(DriverDrowsiness, false));
70         openCl = amb::make_unique(new BasicPropertyType<bool>(OpenCL, false));
71
72         shared = amb::make_unique(new Shared);
73         shared->parent = this;
74
75         shared->m_capture = NULL;
76         shared->mWriter = nullptr;
77
78         shared->threaded = false;
79         shared->kinect = false;
80         shared->useOpenCl = false;
81         shared->useCuda = false;
82         shared->loggingOn = false;
83         shared->ddd = false;
84
85         shared->fps=30;
86         device="0";
87         shared->pixelLowerBound = 0;
88         shared->pixelUpperBound = 255;
89
90
91         if(config.find("threaded") != config.end())
92         {
93                 shared->threaded = config["threaded"] == "true";
94         }
95
96         if(config.find("kinect") != config.end())
97         {
98                 shared->kinect = config["kinect"] == "true";
99         }
100
101         if(config.find("fps") != config.end())
102         {
103                 shared->fps = boost::lexical_cast<int>(config["fps"]);
104         }
105
106         if(config.find("device") != config.end())
107         {
108                 device = config["device"].c_str();
109         }
110
111         if(config.find("pixelLowerBound") != config.end())
112         {
113                 shared->pixelLowerBound = boost::lexical_cast<int>(config["pixelLowerBound"]);
114
115                 if(shared->pixelLowerBound < 0)
116                 {
117                         shared->pixelLowerBound = 0;
118                 }
119         }
120
121         if(config.find("pixelUpperBound") != config.end())
122         {
123                 shared->pixelUpperBound = boost::lexical_cast<int>(config["pixelUpperBound"]);
124
125                 if(shared->pixelUpperBound > 255)
126                         shared->pixelUpperBound = 255;
127         }
128
129         if(config.find("opencl") != config.end())
130         {
131 #ifdef OPENCL
132                 shared->useOpenCl = config["opencl"] == "true";
133                 openCl->setValue(shared->useOpenCl);
134 #else
135                 DebugOut(DebugOut::Warning) << "You really don't have openCL support.  Disabling." << endl;
136                 shared->useOpenCl = false;
137 #endif
138
139         }
140
141         if(config.find("cuda") != config.end())
142         {
143 #ifdef CUDA
144                 shared->useCuda = config["cuda"] == "true";
145 #endif
146         }
147
148         if(config.find("ddd") != config.end())
149         {
150                 shared->ddd = config["ddd"] == "true";
151         }
152
153         if(config.find("logging") != config.end())
154         {
155                 shared->loggingOn = config["logging"] == "true";
156         }
157
158
159 #ifdef OPENCL
160
161         cv::ocl::PlatformsInfo platforms;
162         cv::ocl::getOpenCLPlatforms(platforms);
163
164         for(auto p : platforms)
165         {
166                 DebugOut(1)<<"platform: "<<p->platformName<<" vendor: "<<p->platformVendor<<endl;
167         }
168
169         cv::ocl::DevicesInfo info;
170         cv::ocl::getOpenCLDevices(info, cv::ocl::CVCL_DEVICE_TYPE_ALL);
171
172         DebugOut(1)<<"There are "<<info.size()<<" OpenCL devices on this system"<<endl;
173
174         if(!info.size())
175         {
176                 DebugOut(1)<<"No CL devices.  Disabling OpenCL"<<endl;
177                 shared->useOpenCl = false;
178         }
179         else
180         {
181                 cv::ocl::setDevice(info[0]);
182         }
183
184         for(auto i : info)
185         {
186                 for(auto name : i->deviceName)
187                 {
188                         DebugOut(1)<<"OpenCLDeviceName: "<<name<<endl;
189                 }
190         }
191
192 #endif
193
194 #ifdef CUDA
195         if(shared->useCuda)
196         {
197                 int devices = cv::gpu::getCudaEnabledDeviceCount();
198                 DebugOut()<<"There are "<<devices<<" CUDA devices on this system"<<endl;
199                 if(devices)
200                 {
201                         DebugOut()<<"We will use 0 as the default device"<<endl;
202                         cv::gpu::DeviceInfo info(0);
203                         DebugOut()<<"Cuda Device Name: "<<info.name()<<endl;
204                         DebugOut()<<"Version: "<<info.majorVersion()<<"major"<<info.minorVersion()<<"minor"<<endl;
205                         DebugOut()<<"Streaming processor count: "<<info.multiProcessorCount()<<endl;
206                         cv::gpu::setDevice(0);
207                 }
208                 else
209                 {
210                         DebugOut(DebugOut::Warning)<<"No CUDA device found.  Disabling CUDA."<<endl;
211                         shared->useCuda = false;
212                 }
213         }
214 #endif
215
216         if(shared->ddd)
217         {
218 #ifdef OPENCL
219                 faceCascade = amb::make_unique(new cv::ocl::OclCascadeClassifier());
220                 eyeCascade = amb::make_unique(new cv::ocl::OclCascadeClassifier());
221 #else
222                 faceCascade = amb::make_unique(new cv::CascadeClassifier());
223                 eyeCascade = amb::make_unique(new cv::CascadeClassifier());
224 #endif
225
226                 std::string faceFile = CV_DATA "/haarcascades/haarcascade_frontalface_alt.xml";
227                 if(!faceCascade->load(faceFile))
228                 {
229                         DebugOut(DebugOut::Warning) << "Could not load face cascade: " << faceFile <<". Disabling ddd" << endl;
230                         shared->ddd = false;
231                 }
232
233                 std::string eyeFile = CV_DATA "/haarcascades/haarcascade_eye_tree_eyeglasses.xml";
234
235                 if(!eyeCascade->load(eyeFile))
236                 {
237                         DebugOut(DebugOut::Warning) << "Could not load eye cascade: " << eyeFile <<". Disabling ddd" << endl;
238                         shared->ddd = false;
239                 }
240
241         }
242
243         routingEngine->subscribeToProperty(VehicleProperty::VehicleSpeed, this);
244         routingEngine->subscribeToProperty(VehicleProperty::Latitude, this);
245         routingEngine->subscribeToProperty(VehicleProperty::Longitude, this);
246 }
247
248 OpenCvLuxPlugin::~OpenCvLuxPlugin()
249 {
250
251 }
252
253 const string OpenCvLuxPlugin::uuid()
254 {
255         return "3c7a1ea0-7d2e-11e2-9e96-0800200c9a66";
256 }
257
258 void OpenCvLuxPlugin::getPropertyAsync(AsyncPropertyReply *reply)
259 {
260         reply->timedout = [this](AsyncPropertyReply* reply) {
261                 removeOne(&replyQueue, reply);
262         };
263
264         if(!shared->m_capture || !shared->m_capture->isOpened())
265         {
266                 /// we want to turn on the camera for one shot to get an image and determine the intensity
267
268                 if(init())
269                         grabImage(shared.get());
270         }
271
272         if(reply->property == VehicleProperty::ExteriorBrightness)
273         {
274                 replyQueue.push_back(reply);
275         }
276         else if(reply->property == VideoLogging)
277         {
278                 BasicPropertyType<bool> tmp(VideoLogging, shared->loggingOn);
279                 reply->value = &tmp;
280                 reply->success = true;
281                 reply->completed(reply);
282         }
283         else  ///We don't support what you are asking for.  Reply false
284         {
285                 reply->value = nullptr;
286                 reply->success = false;
287                 reply->error = AsyncPropertyReply::InvalidOperation;
288                 reply->completed(reply);
289         }
290 }
291
292 void OpenCvLuxPlugin::getRangePropertyAsync(AsyncRangePropertyReply *reply)
293 {
294         throw std::runtime_error("OpenCVLuxPlugin does not support this operation.  We should never hit this method.");
295 }
296
297 AsyncPropertyReply *OpenCvLuxPlugin::setProperty(AsyncSetPropertyRequest request )
298 {
299         AsyncPropertyReply *reply = new AsyncPropertyReply(request);
300
301         reply->success = false;
302         reply->error = AsyncPropertyReply::InvalidOperation;
303
304         if(request.property == VideoLogging)
305         {
306                 shared->loggingOn = request.value->value<bool>();
307                 reply->success = true;
308                 reply->error = AsyncPropertyReply::NoError;
309         }
310
311         reply->completed(reply);
312
313         return reply;
314 }
315
316 void OpenCvLuxPlugin::subscribeToPropertyChanges(VehicleProperty::Property property)
317 {
318         if(!shared->mRequests.size())
319         {
320                 if(init())
321                         g_timeout_add(1000 / shared->fps, grabImage, shared.get());
322         }
323
324         shared->mRequests.push_back(property);
325 }
326
327 void OpenCvLuxPlugin::unsubscribeToPropertyChanges(VehicleProperty::Property property)
328 {
329         removeOne(&shared->mRequests,property);
330 }
331
332 PropertyList OpenCvLuxPlugin::supported()
333 {
334         PropertyList props;
335         props.push_back(VehicleProperty::ExteriorBrightness);
336         props.push_back(DriverDrowsiness);
337         props.push_back(VideoLogging);
338
339         return props;
340 }
341
342 int OpenCvLuxPlugin::supportedOperations()
343 {
344         return Get | Set;
345 }
346
347 void OpenCvLuxPlugin::propertyChanged(AbstractPropertyType *value)
348 {
349         QMutexLocker lock(&mutex);
350         if(value->name == VehicleProperty::VehicleSpeed)
351         {
352                 speed = value->value<uint16_t>();
353         }
354         else if(value->name == VehicleProperty::Latitude)
355         {
356                 latitude = value->value<double>();
357         }
358         else if(value->name == VehicleProperty::Longitude)
359         {
360                 longitude = value->value<double>();
361         }
362 }
363
364 void OpenCvLuxPlugin::supportedChanged(const PropertyList &)
365 {
366         DebugOut()<<"OpenCvLuxPlugin::supported changed."<<endl;
367 }
368
369 static int grabImage(void *data)
370 {
371         OpenCvLuxPlugin::Shared* shared = static_cast<OpenCvLuxPlugin::Shared*>(data);
372
373         cv::Mat m_image;
374
375         if(shared->kinect)
376         {
377                 shared->m_capture->grab();
378                 shared->m_capture->retrieve( m_image, CV_CAP_OPENNI_GRAY_IMAGE );
379         }
380         else
381         {
382                 *(shared->m_capture.get()) >> m_image;
383         }
384
385         if(shared->ddd)
386         {
387                 try
388                 {
389                         shared->parent->detectEyes(m_image);
390                 }
391                 catch(...)
392                 {
393                         DebugOut(DebugOut::Warning) << "DDD detection exception: "<< endl;
394                 }
395         }
396
397         if(shared->threaded)
398         {
399                 QFutureWatcher<uint> *watcher = new QFutureWatcher<uint>();
400                 QObject::connect(watcher, &QFutureWatcher<uint>::finished, shared->parent, &OpenCvLuxPlugin::imgProcResult);
401
402                 QFuture<uint> future = QtConcurrent::run(evalImage, m_image, shared);
403                 watcher->setFuture(future);
404
405                 QtConcurrent::run(shared->parent, &OpenCvLuxPlugin::writeVideoFrame, m_image);
406         }
407         else
408         {
409                 shared->parent->writeVideoFrame(m_image);
410                 try
411                 {
412                         int lux = evalImage(m_image, shared);
413                         shared->parent->updateProperty(lux);
414                 }
415                 catch(...)
416                 {
417                         DebugOut(DebugOut::Warning) << "Exception caught "<< __FUNCTION__ << endl;
418                 }
419
420                 //detectLight(m_image,shared);
421
422         }
423
424         if(shared->mRequests.size())
425         {
426                 return true;
427         }
428
429         return false;
430 }
431
432 static uint evalImage(cv::Mat qImg, OpenCvLuxPlugin::Shared *shared)
433 {
434         if(qImg.empty())
435         {
436                 DebugOut(DebugOut::Warning)<<"Empty image frame."<<endl;
437                 return 0;
438         }
439
440         cv::Scalar avgPixelIntensity;
441
442
443         if(shared->useOpenCl)
444         {
445 #ifdef OPENCL
446                 cv::Scalar stdDev;
447                 cv::ocl::oclMat src(qImg), gray;
448                 cv::ocl::cvtColor(src, gray, CV_BGR2GRAY);
449                 cv::ocl::meanStdDev(src, avgPixelIntensity, stdDev);
450 #endif
451         }
452         else if(shared->useCuda)
453         {
454 #ifdef CUDA
455                 cv::gpu::GpuMat src(qImg), dest;
456                 cv::gpu::cvtColor(src, dest, CV_BGR2GRAY);
457                 cv::Scalar stdDev;
458                 try
459                 {
460
461                         cv::gpu::meanStdDev(dest, avgPixelIntensity, stdDev);
462                 }
463                 catch(...)
464                 {
465                         DebugOut(DebugOut::Error)<<"CUDA pixel intensity calculation failed."<<endl;
466                 }
467 #endif
468         }
469         else
470         {
471                 avgPixelIntensity = cv::mean(qImg);
472         }
473
474
475         double val = avgPixelIntensity.val[0];
476
477         double qualifiedPixel = (val - shared->pixelLowerBound);
478
479         if(qualifiedPixel < 0) qualifiedPixel = 0;
480
481         uint lux = qualifiedPixel * (130000.0 / shared->pixelUpperBound);
482
483         DebugOut(7)<<"average pixel value: "<<qualifiedPixel<<" lux: "<<lux<<endl;
484
485         return lux;
486 }
487
488
489 bool OpenCvLuxPlugin::init()
490 {
491         if(!shared->m_capture && shared->kinect)
492         {
493                 shared->m_capture = amb::make_unique(new cv::VideoCapture(CV_CAP_OPENNI));
494         }
495         else if(!shared->m_capture)
496         {
497                 if(device == "")
498                         shared->m_capture = amb::make_unique(new cv::VideoCapture(0));
499                 else
500                         shared->m_capture = amb::make_unique(new cv::VideoCapture(atoi(device.c_str())));
501         }
502
503         if(!shared->m_capture->isOpened())
504         {
505                 DebugOut()<<"we failed to open camera device ("<<device<<") or no camera found"<<endl;
506                 return false;
507         }
508
509         if(configuration.find("logging") != configuration.end() &&
510                         configuration["logging"] == "true" &&
511                         (!shared->mWriter || !shared->mWriter->isOpened()))
512         {
513                 cv::Size s = cv::Size((int) shared->m_capture->get(CV_CAP_PROP_FRAME_WIDTH),
514                                                           (int) shared->m_capture->get(CV_CAP_PROP_FRAME_HEIGHT));
515
516                 std::string codec = configuration["codec"];
517
518                 if(codec.empty() || codec.size() != 4)
519                 {
520                         DebugOut(DebugOut::Warning)<<"Invalid codec.  Using default: MJPG"<<endl;
521                         codec = "MJPG";
522                 }
523
524                 std::string filename = configuration["logfile"];
525
526                 if(filename.empty()) filename = "/tmp/video.avi";
527
528                 boost::algorithm::to_upper(codec);
529
530                 shared->mWriter = amb::make_unique(new cv::VideoWriter(filename, CV_FOURCC(codec.at(0), codec.at(1), codec.at(2), codec.at(3)),30,s));
531         }
532
533         DebugOut()<<"camera frame width: "<<shared->m_capture->get(CV_CAP_PROP_FRAME_WIDTH)<<endl;
534         DebugOut()<<"camera frame height: "<<shared->m_capture->get(CV_CAP_PROP_FRAME_HEIGHT)<<endl;
535         DebugOut()<<"camera frame fps: "<<shared->m_capture->get(CV_CAP_PROP_FPS)<<endl;
536
537         return true;
538 }
539
540
541
542 void OpenCvLuxPlugin::writeVideoFrame(cv::Mat f)
543 {
544         QMutexLocker locker(&mutex);
545
546         if(shared->loggingOn && speed > 0)
547         {
548                 cv::Mat frame;
549                 f.copyTo(frame);
550
551                 std::stringstream str;
552                 str<<"Speed: "<<speed<<" kph, Lat/Lon: "<<latitude<<"/"<<longitude;
553
554                 std::stringstream datestr;
555                 datestr<<QDateTime::currentDateTime().toString().toStdString();
556
557                 locker.unlock();
558
559                 std::string text = str.str();
560
561                 int fontFace = cv::FONT_HERSHEY_SIMPLEX;
562                 double fontScale = 1;
563                 int thickness = 3;
564
565                 int baseline=0;
566                 cv::Size textSize = cv::getTextSize(text, fontFace,
567                                                                         fontScale, thickness, &baseline);
568
569                 cv::Size dateTextSize = cv::getTextSize(datestr.str(), fontFace,
570                                                                         fontScale, thickness, &baseline);
571
572                 baseline += thickness;
573
574                 // center the text
575                 cv::Point textOrg((frame.cols - textSize.width)/2,
576                                           (frame.rows - textSize.height));
577
578                 cv::Point dateOrg((frame.cols - dateTextSize.width)/2, dateTextSize.height);
579
580                 // then put the text itself
581                 cv::putText(frame, text, textOrg, fontFace, fontScale,
582                                 cv::Scalar::all(255), thickness, 8);
583
584                 cv::putText(frame, datestr.str(), dateOrg, fontFace, fontScale,
585                                 cv::Scalar::all(255), thickness, 8);
586
587                 (*shared->mWriter) << frame;
588                 (*shared->mWriter) << frame;
589         }
590 }
591
592 void OpenCvLuxPlugin::updateProperty(uint lux)
593 {
594          extBrightness->setValue(lux);
595
596         for(auto reply : replyQueue)
597         {
598                 reply->value = extBrightness.get();
599                 reply->success = true;
600                 try{
601                         if(reply->completed)
602                                 reply->completed(reply);
603                 }
604                 catch(...)
605                 {
606                         DebugOut(DebugOut::Warning)<<"reply failed"<<endl;
607                 }
608         }
609
610         replyQueue.clear();
611
612         if(lux != lastLux && shared->mRequests.size())
613         {
614                 lastLux = lux;
615                 routingEngine->updateProperty(extBrightness.get(), uuid());
616         }
617
618
619 }
620
621 void OpenCvLuxPlugin::imgProcResult()
622 {
623         try
624         {
625                 QFutureWatcher<uint> *watcher = dynamic_cast<QFutureWatcher<uint>*>(sender());
626
627                 uint lux = watcher->result();
628                 shared->parent->updateProperty(lux);
629
630                 watcher->deleteLater();
631         }
632         catch(...)
633         {
634                 DebugOut(DebugOut::Warning) << "exception caught getting img processing result" << endl;
635         }
636 }
637
638 TrafficLight::Color detectLight(cv::Mat img, OpenCvLuxPlugin::Shared *shared)
639 {
640
641         cv::Mat gray;
642
643         if(shared->useOpenCl)
644         {
645 #ifdef OPENCL
646                 cv::ocl::oclMat src(img), gray2;
647                 cv::ocl::cvtColor(src, gray2, CV_BGR2GRAY);
648                 cv::ocl::GaussianBlur(gray2, gray2, cv::Size(9,9), 2, 2);
649
650                 gray = gray2;
651 #endif
652         }
653         else
654         {
655                 cv::cvtColor(img, gray, CV_BGR2GRAY);
656                 cv::GaussianBlur(gray, gray, cv::Size(9,9), 2, 2);
657         }
658
659         std::vector<cv::Vec3f> circles;
660
661         //cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, gray.rows/8, 200, 100, 0, 0);
662
663         cv::HoughCircles( gray, circles, CV_HOUGH_GRADIENT, 2, 30, 231, 50, 0, 0 );
664
665         for(auto i = 0; i < circles.size(); i++)
666         {
667                 cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
668                 int radius = cvRound(circles[i][2]);
669
670                 cv::Rect rect(center.x - radius / 2, center.y - radius / 2, radius, radius);
671
672                 try {
673
674                         cv::Mat light(img, rect);
675
676                         cv::circle( img, center, radius, cv::Scalar(0,0,255), 3, 8, 0 );
677
678                         cv::rectangle(img, rect,cv::Scalar(255,0,0));
679
680                         cv::Scalar avgPixel = cv::mean(light);
681
682                         if(avgPixel[2] > 128 && avgPixel[0] < 128 && avgPixel[1] < 128)
683                         {
684                                 DebugOut(1)<<"Red Light!!!"<<endl;
685                                 return TrafficLight::Red;
686                         }
687                         else if(avgPixel[1] > 128 && avgPixel[0] < 128 && avgPixel[2] < 128)
688                         {
689                                 DebugOut(1)<<"Green Light!!!"<<endl;
690                                 return TrafficLight::Green;
691                         }
692                         else if(avgPixel[0] > 128 && avgPixel[1] < 128 && avgPixel[2] < 128)
693                         {
694                                 DebugOut(1)<<"Yellow Light!!!"<<endl;
695                                 return TrafficLight::Yellow;
696                         }
697                 }
698                 catch(...)
699                 {
700
701                 }
702
703         }
704
705         cv::namedWindow( "Hough Circle Transform Demo", CV_WINDOW_AUTOSIZE );
706         cv::imshow( "Hough Circle Transform Demo", img );
707 }
708
709
710 void OpenCvLuxPlugin::detectEyes(cv::Mat frame)
711 {
712         if(frame.empty())
713                 return;
714
715         bool hasEyes = false;
716         std::vector<cv::Rect> faces;
717         cv::Mat frameGray;
718 #ifdef OPENCL
719         cv::ocl::oclMat gray2;
720 #endif
721
722         if(shared->useOpenCl)
723         {
724 #ifdef OPENCL
725                 cv::ocl::oclMat src(frame);
726                 cv::ocl::cvtColor(src, gray2, CV_BGR2GRAY);
727                 cv::ocl::equalizeHist(gray2, gray2);
728                 cv::ocl::OclCascadeClassifier* fc = static_cast<cv::ocl::OclCascadeClassifier*>(faceCascade.get());
729                 fc->detectMultiScale(gray2, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30));
730 #endif
731         }
732         else
733         {
734                 cv::cvtColor(frame, frameGray, CV_BGR2GRAY);
735                 cv::equalizeHist(frameGray, frameGray);
736                 faceCascade->detectMultiScale(frameGray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30));
737         }
738
739         for( size_t i = 0; i < faces.size(); i++ )
740         {
741                 cv::Point center( faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5 );
742                 cv::ellipse( frame, center, cv::Size( faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
743
744                 std::vector<cv::Rect> eyes;
745
746                 if(shared->useOpenCl)
747                 {
748 #ifdef OPENCL
749                         cv::ocl::oclMat faceROI = gray2(faces[i]);
750                         cv::ocl::OclCascadeClassifier* ec = static_cast<cv::ocl::OclCascadeClassifier*>(eyeCascade.get());
751                         ec->detectMultiScale( faceROI, eyes, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );
752 #endif
753                 }
754                 else
755                 {
756                         cv::Mat faceROI = frameGray(faces[i]);
757                         eyeCascade->detectMultiScale( faceROI, eyes, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );
758                 }
759
760                 if(eyes.size())
761                 {
762                         hasEyes = true;
763                         DebugOut() << "Number of eyes: " << eyes.size() << endl;
764                 }
765
766                 for( size_t j = 0; j < eyes.size(); j++ )
767                 {
768                         cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5, faces[i].y + eyes[j].y + eyes[j].height*0.5 );
769                         int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
770                         cv::circle( frame, center, radius, cv::Scalar( 255, 0, 0 ), 4, 8, 0 );
771                 }
772         }
773
774         routingEngine->updateProperty(driverDrowsiness.get(),this->uuid());
775 }