Merge pull request #36 from tripzero/master
[profile/ivi/automotive-message-broker.git] / plugins / opencvlux / opencvluxplugin.cpp
1 /*
2 Copyright (C) 2012 Intel Corporation
3
4 This library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with this library; if not, write to the Free Software
16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19 #include "opencvluxplugin.h"
20 #include "timestamp.h"
21 #include <listplusplus.h>
22 #include <superptr.hpp>
23 #include <ambplugin.h>
24
25 #include <iostream>
26 #include <thread>
27 #include <opencv2/opencv.hpp>
28 #include <opencv2/core/ocl.hpp>
29
30 #include <QFuture>
31 #include <QFutureWatcher>
32 #include <QtConcurrent/QtConcurrent>
33
34
35 #ifdef OPENCL
36 #include <opencv2/core/ocl.hpp>
37 #endif
38
39 using namespace std;
40
41 const std::string VideoLogging = "VideoLogging";
42 const std::string DriverDrowsiness = "DriverDrowsiness";
43 const std::string OpenCL = "OpenCL";
44
45 #include "debugout.h"
46
47 extern "C" AbstractSource * create(AbstractRoutingEngine* routingengine, map<string, string> config)
48 {
49         auto plugin = new AmbPlugin<OpenCvLuxPlugin>(routingengine, config);
50         plugin->init();
51
52         return plugin;
53 }
54
55 OpenCvLuxPlugin::OpenCvLuxPlugin(AbstractRoutingEngine* re, map<string, string> config, AbstractSource &parent)
56         :AmbPluginImpl(re, config, parent), lastLux(0), speed(0), latitude(0), longitude(0)
57 {
58         videoLogging = addPropertySupport(Zone::None, [](){
59                 return new BasicPropertyType<bool>(VideoLogging, false);
60         });
61
62         driverDrowsiness = addPropertySupport(Zone::None, [](){
63                 return new OpenCvLuxPlugin::DriverDrowsinessType(DriverDrowsiness, false);
64         });
65
66         openCl = addPropertySupport(Zone::None, [](){
67                 return new BasicPropertyType<bool>(OpenCL, false);
68         });
69
70         extBrightness = addPropertySupport<VehicleProperty::ExteriorBrightnessType>(Zone::None);
71
72         shared = amb::make_unique(new Shared);
73         shared->parent = this;
74
75         shared->m_capture = NULL;
76         shared->mWriter = nullptr;
77
78         shared->threaded = false;
79         shared->kinect = false;
80         shared->useCuda = false;
81         shared->loggingOn = false;
82         shared->ddd = false;
83
84         shared->fps=30;
85         device="0";
86         shared->pixelLowerBound = 0;
87         shared->pixelUpperBound = 255;
88
89
90         if(config.find("threaded") != config.end())
91         {
92                 shared->threaded = config["threaded"] == "true";
93         }
94
95         if(config.find("kinect") != config.end())
96         {
97                 shared->kinect = config["kinect"] == "true";
98         }
99
100         if(config.find("fps") != config.end())
101         {
102                 shared->fps = boost::lexical_cast<int>(config["fps"]);
103         }
104
105         if(config.find("device") != config.end())
106         {
107                 device = config["device"].c_str();
108         }
109
110         if(config.find("pixelLowerBound") != config.end())
111         {
112                 shared->pixelLowerBound = boost::lexical_cast<int>(config["pixelLowerBound"]);
113
114                 if(shared->pixelLowerBound < 0)
115                 {
116                         shared->pixelLowerBound = 0;
117                 }
118         }
119
120         if(config.find("pixelUpperBound") != config.end())
121         {
122                 shared->pixelUpperBound = boost::lexical_cast<int>(config["pixelUpperBound"]);
123
124                 if(shared->pixelUpperBound > 255)
125                         shared->pixelUpperBound = 255;
126         }
127
128         if(config.find("opencl") != config.end())
129         {
130                 if(cv::ocl::haveOpenCL())
131                 {
132                         bool useOcl = config["opencl"] == "true";
133                         cv::ocl::setUseOpenCL(useOcl);
134                         openCl->setValue(cv::ocl::useOpenCL());
135                 }
136                 else
137                 {
138                         DebugOut(DebugOut::Warning) << "You really don't have openCL support." << endl;
139                 }
140         }
141
142         if(config.find("ddd") != config.end())
143         {
144                 shared->ddd = config["ddd"] == "true";
145         }
146
147         if(config.find("logging") != config.end())
148         {
149                 shared->loggingOn = config["logging"] == "true";
150         }
151
152         if(shared->ddd)
153         {
154                 faceCascade = amb::make_unique(new cv::CascadeClassifier());
155                 eyeCascade = amb::make_unique(new cv::CascadeClassifier());
156
157                 std::string faceFile = config["faceCascade"];
158
159                 if(!faceCascade->load(faceFile))
160                 {
161                         DebugOut(DebugOut::Warning) << "Could not load face cascade: " << faceFile <<". Disabling ddd" << endl;
162                         shared->ddd = false;
163                 }
164
165                 std::string eyeFile = config["eyeCascade"];
166
167                 if(!eyeCascade->load(eyeFile))
168                 {
169                         DebugOut(DebugOut::Warning) << "Could not load eye cascade: " << eyeFile <<". Disabling ddd" << endl;
170                         shared->ddd = false;
171                 }
172
173         }
174
175         routingEngine->subscribeToProperty(VehicleProperty::VehicleSpeed, &parent);
176         routingEngine->subscribeToProperty(VehicleProperty::Latitude, &parent);
177         routingEngine->subscribeToProperty(VehicleProperty::Longitude, &parent);
178 }
179
180 OpenCvLuxPlugin::~OpenCvLuxPlugin()
181 {
182
183 }
184
185 const string OpenCvLuxPlugin::uuid() const
186 {
187         return "3c7a1ea0-7d2e-11e2-9e96-0800200c9a66";
188 }
189
190 void OpenCvLuxPlugin::getPropertyAsync(AsyncPropertyReply *reply)
191 {
192         reply->timedout = [this](AsyncPropertyReply* reply) {
193                 removeOne(&replyQueue, reply);
194         };
195
196         if(!shared->m_capture || !shared->m_capture->isOpened())
197         {
198                 /// we want to turn on the camera for one shot to get an image and determine the intensity
199
200                 init();
201                 grabImage(shared.get());
202         }
203
204         AmbPluginImpl::getPropertyAsync(reply);
205 }
206
207 void OpenCvLuxPlugin::getRangePropertyAsync(AsyncRangePropertyReply *reply)
208 {
209         throw std::runtime_error("OpenCVLuxPlugin does not support this operation.  We should never hit this method.");
210 }
211
212 AsyncPropertyReply *OpenCvLuxPlugin::setProperty(AsyncSetPropertyRequest request )
213 {
214         if(request.property == VideoLogging)
215         {
216                 shared->loggingOn = request.value->value<bool>();
217         }
218         if(request.property == OpenCL)
219         {
220                 QMutexLocker lock(&mutex);
221                 cv::ocl::setUseOpenCL(request.value->value<bool>());
222         }
223
224         return AmbPluginImpl::setProperty(request);;
225 }
226
227 void OpenCvLuxPlugin::subscribeToPropertyChanges(VehicleProperty::Property property)
228 {
229         if(!shared->mRequests.size())
230         {
231                 init();
232                 g_timeout_add(1000 / shared->fps, grabImage, shared.get());
233         }
234
235         shared->mRequests.push_back(property);
236 }
237
238 void OpenCvLuxPlugin::unsubscribeToPropertyChanges(VehicleProperty::Property property)
239 {
240         removeOne(&shared->mRequests,property);
241 }
242
243 void OpenCvLuxPlugin::propertyChanged(AbstractPropertyType *value)
244 {
245         QMutexLocker lock(&mutex);
246         if(value->name == VehicleProperty::VehicleSpeed)
247         {
248                 speed = value->value<uint16_t>();
249         }
250         else if(value->name == VehicleProperty::Latitude)
251         {
252                 latitude = value->value<double>();
253         }
254         else if(value->name == VehicleProperty::Longitude)
255         {
256                 longitude = value->value<double>();
257         }
258 }
259
260 void OpenCvLuxPlugin::supportedChanged(const PropertyList &)
261 {
262         DebugOut()<<"OpenCvLuxPlugin::supported changed."<<endl;
263 }
264
265 static int grabImage(void *data)
266 {
267         OpenCvLuxPlugin::Shared* shared = static_cast<OpenCvLuxPlugin::Shared*>(data);
268
269         shared->frameCount++;
270
271         if(shared->frameCount >= shared->fps)
272                 shared->frameCount = 0;
273
274         if(shared->frameCount % 4 != 0)
275         {
276                 return true;
277         }
278
279         if(!shared->m_capture->isOpened())
280                 return false;
281
282         cv::UMat m_image;
283
284         *(shared->m_capture.get()) >> m_image;
285
286         if(shared->ddd)
287         {
288                 try
289                 {
290                         shared->parent->detectEyes(m_image);
291                 }
292                 catch(...)
293                 {
294                         DebugOut(DebugOut::Warning) << "DDD detection exception: "<< endl;
295                 }
296         }
297
298         if(shared->threaded)
299         {
300                 QFutureWatcher<uint> *watcher = new QFutureWatcher<uint>();
301                 QObject::connect(watcher, &QFutureWatcher<uint>::finished, shared->parent, &OpenCvLuxPlugin::imgProcResult);
302
303                 QFuture<uint> future = QtConcurrent::run(evalImage, m_image, shared);
304                 watcher->setFuture(future);
305
306                 QtConcurrent::run(shared->parent, &OpenCvLuxPlugin::writeVideoFrame, m_image);
307         }
308         else
309         {
310                 if(shared->parent->videoLogging->value<bool>())
311                         shared->parent->writeVideoFrame(m_image);
312                 try
313                 {
314                         uint16_t lux = evalImage(m_image, shared);
315                         shared->parent->updateProperty(lux);
316                 }
317                 catch(...)
318                 {
319                         DebugOut(DebugOut::Warning) << "Exception caught "<< __FUNCTION__ << endl;
320                 }
321
322                 //detectLight(m_image,shared);
323         }
324
325         if(shared->mRequests.size())
326         {
327                 return true;
328         }
329
330         return false;
331 }
332
333 static uint evalImage(cv::UMat qImg, OpenCvLuxPlugin::Shared *shared)
334 {
335         if(qImg.empty())
336         {
337                 DebugOut(DebugOut::Warning)<<"Empty image frame."<<endl;
338                 return 0;
339         }
340
341         cv::Scalar avgPixelIntensity;
342
343         avgPixelIntensity = cv::mean(qImg);
344
345         double val = avgPixelIntensity.val[0];
346
347         double qualifiedPixel = (val - shared->pixelLowerBound);
348
349         if(qualifiedPixel < 0) qualifiedPixel = 0;
350
351         uint lux = qualifiedPixel * (130000.0 / shared->pixelUpperBound);
352
353         DebugOut(7)<<"average pixel value: "<<qualifiedPixel<<" lux: "<<lux<<endl;
354
355         return lux;
356 }
357
358
359 void OpenCvLuxPlugin::init()
360 {
361         if(!shared->m_capture)
362         {
363                 if(device == "")
364                         shared->m_capture = amb::make_unique(new cv::VideoCapture(0));
365                 else
366                         shared->m_capture = amb::make_unique(new cv::VideoCapture(atoi(device.c_str())));
367         }
368
369         if(!shared->m_capture->isOpened())
370         {
371                 DebugOut() << "we failed to open camera device (" << device << ") or no camera found" << endl;
372                 return;
373         }
374
375         if(!shared->mWriter || !shared->mWriter->isOpened())
376         {
377                 cv::Size s = cv::Size((int) shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH),
378                                                           (int) shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT));
379
380                 std::string codec = configuration["codec"];
381
382                 if(codec.empty() || codec.size() != 4)
383                 {
384                         DebugOut(DebugOut::Warning)<<"Invalid codec: "<<codec <<". Using default: MJPG"<<endl;
385                         codec = "MJPG";
386                 }
387
388                 std::string filename = configuration["logfile"];
389
390                 if(filename.empty()) filename = "/tmp/video.avi";
391
392                 boost::algorithm::to_upper(codec);
393
394                 shared->mWriter = amb::make_unique(new cv::VideoWriter(filename, cv::VideoWriter::fourcc(codec.at(0), codec.at(1), codec.at(2), codec.at(3)),30,s));
395         }
396
397         DebugOut()<<"camera frame width: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH)<<endl;
398         DebugOut()<<"camera frame height: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT)<<endl;
399         DebugOut()<<"camera frame fps: "<<shared->m_capture->get(cv::CAP_PROP_FPS)<<endl;
400
401         return;
402 }
403
404
405
406 void OpenCvLuxPlugin::writeVideoFrame(cv::UMat f)
407 {
408         QMutexLocker locker(&mutex);
409
410         if(shared->loggingOn && speed > 0)
411         {
412                 cv::Mat frame;
413                 f.copyTo(frame);
414
415                 std::stringstream str;
416                 str<<"Speed: "<<speed<<" kph, Lat/Lon: "<<latitude<<"/"<<longitude;
417
418                 std::stringstream datestr;
419                 datestr<<QDateTime::currentDateTime().toString().toStdString();
420
421                 locker.unlock();
422
423                 std::string text = str.str();
424
425                 int fontFace = cv::FONT_HERSHEY_SIMPLEX;
426                 double fontScale = 1;
427                 int thickness = 3;
428
429                 int baseline=0;
430                 cv::Size textSize = cv::getTextSize(text, fontFace,
431                                                                         fontScale, thickness, &baseline);
432
433                 cv::Size dateTextSize = cv::getTextSize(datestr.str(), fontFace,
434                                                                         fontScale, thickness, &baseline);
435
436                 baseline += thickness;
437
438                 // center the text
439                 cv::Point textOrg((frame.cols - textSize.width)/2,
440                                           (frame.rows - textSize.height));
441
442                 cv::Point dateOrg((frame.cols - dateTextSize.width)/2, dateTextSize.height);
443
444                 // then put the text itself
445                 cv::putText(frame, text, textOrg, fontFace, fontScale,
446                                 cv::Scalar::all(255), thickness, 8);
447
448                 cv::putText(frame, datestr.str(), dateOrg, fontFace, fontScale,
449                                 cv::Scalar::all(255), thickness, 8);
450
451                 (*shared->mWriter) << frame;
452                 (*shared->mWriter) << frame;
453         }
454 }
455
456 void OpenCvLuxPlugin::updateProperty(uint16_t lux)
457 {
458         extBrightness->setValue(lux);
459
460         if(lux != lastLux && shared->mRequests.size())
461         {
462                 lastLux = lux;
463                 routingEngine->updateProperty(extBrightness.get(), uuid());
464         }
465 }
466
467 void OpenCvLuxPlugin::imgProcResult()
468 {
469         try
470         {
471                 QFutureWatcher<uint> *watcher = dynamic_cast<QFutureWatcher<uint>*>(sender());
472
473                 uint lux = watcher->result();
474                 shared->parent->updateProperty(lux);
475
476                 watcher->deleteLater();
477         }
478         catch(...)
479         {
480                 DebugOut(DebugOut::Warning) << "exception caught getting img processing result" << endl;
481         }
482 }
483
484 TrafficLight::Color detectLight(cv::UMat img, OpenCvLuxPlugin::Shared *shared)
485 {
486
487         cv::UMat gray;
488
489         cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
490         cv::GaussianBlur(gray, gray, cv::Size(9,9), 2, 2);
491
492         std::vector<cv::Vec3f> circles;
493
494         //cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, gray.rows/8, 200, 100, 0, 0);
495
496         cv::HoughCircles( gray, circles, cv::HOUGH_GRADIENT, 2, 30, 231, 50, 0, 0 );
497
498         for(auto i = 0; i < circles.size(); i++)
499         {
500                 cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
501                 int radius = cvRound(circles[i][2]);
502
503                 cv::Rect rect(center.x - radius / 2, center.y - radius / 2, radius, radius);
504
505                 try {
506
507                         cv::UMat light(img, rect);
508
509                         cv::circle(img, center, radius, cv::Scalar(0,0,255), 3, 8, 0 );
510
511                         cv::rectangle(img, rect.tl(), rect.br(), cv::Scalar(255,0,0));
512
513                         cv::Scalar avgPixel = cv::mean(light);
514
515                         if(avgPixel[2] > 128 && avgPixel[0] < 128 && avgPixel[1] < 128)
516                         {
517                                 DebugOut(1)<<"Red Light!!!"<<endl;
518                                 return TrafficLight::Red;
519                         }
520                         else if(avgPixel[1] > 128 && avgPixel[0] < 128 && avgPixel[2] < 128)
521                         {
522                                 DebugOut(1)<<"Green Light!!!"<<endl;
523                                 return TrafficLight::Green;
524                         }
525                         else if(avgPixel[0] > 128 && avgPixel[1] < 128 && avgPixel[2] < 128)
526                         {
527                                 DebugOut(1)<<"Yellow Light!!!"<<endl;
528                                 return TrafficLight::Yellow;
529                         }
530                 }
531                 catch(...)
532                 {
533
534                 }
535
536         }
537 }
538
539
540 void OpenCvLuxPlugin::detectEyes(cv::UMat frame)
541 {
542         if(frame.empty())
543                 return;
544
545         bool hasEyes = false;
546         std::vector<cv::Rect> faces;
547         cv::UMat frameGray;
548
549
550         cv::cvtColor(frame, frameGray, cv::COLOR_BGR2GRAY);
551         cv::equalizeHist(frameGray, frameGray);
552         faceCascade->detectMultiScale(frameGray, faces, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30));
553
554         for( size_t i = 0; i < faces.size(); i++ )
555         {
556                 cv::Point center( faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5 );
557                 cv::ellipse( frame, center, cv::Size( faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
558
559                 std::vector<cv::Rect> eyes;
560
561
562                 cv::UMat faceROI(frameGray(faces[i]));
563                 eyeCascade->detectMultiScale( faceROI, eyes, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30) );
564
565                 for( size_t j = 0; j < eyes.size(); j++ )
566                 {
567                         cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5, faces[i].y + eyes[j].y + eyes[j].height*0.5 );
568                         int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
569                         cv::circle( frame, center, radius, cv::Scalar( 255, 0, 0 ), 4, 8, 0 );
570                 }
571
572                 if(eyes.size())
573                 {
574                         hasEyes = true;
575                         DebugOut() << "Number of eyes: " << eyes.size() << endl;
576
577                         if(driverDrowsiness->value<bool>())
578                         {
579                                 driverDrowsiness->setValue(false);
580                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
581                         }
582                 }
583                 else
584                 {
585                         if(!driverDrowsiness->value<bool>())
586                         {
587                                 driverDrowsiness->setValue(true);
588                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
589                         }
590                 }
591         }
592 }