5205d076f8688cdc85d5a7c839233b716d1a2377
[profile/ivi/automotive-message-broker.git] / plugins / opencvlux / opencvluxplugin.cpp
1 /*
2 Copyright (C) 2012 Intel Corporation
3
4 This library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with this library; if not, write to the Free Software
16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19 #include "opencvluxplugin.h"
20 #include "timestamp.h"
21 #include <listplusplus.h>
22 #include <superptr.hpp>
23 #include <ambplugin.h>
24
25 #include <iostream>
26 #include <thread>
27 #include <opencv2/opencv.hpp>
28 #include <opencv2/core/ocl.hpp>
29
30 #include <QFuture>
31 #include <QFutureWatcher>
32 #include <QtConcurrent/QtConcurrent>
33
34
35 #include <opencv2/core/ocl.hpp>
36
37 using namespace std;
38
39 const std::string VideoLogging = "VideoLogging";
40 const std::string DriverDrowsiness = "DriverDrowsiness";
41 const std::string OpenCL = "OpenCL";
42
43 #include "debugout.h"
44
45 extern "C" AbstractSource * create(AbstractRoutingEngine* routingengine, map<string, string> config)
46 {
47         auto plugin = new AmbPlugin<OpenCvLuxPlugin>(routingengine, config);
48         plugin->init();
49
50         return plugin;
51 }
52
53 OpenCvLuxPlugin::OpenCvLuxPlugin(AbstractRoutingEngine* re, map<string, string> config, AbstractSource &parent)
54         :AmbPluginImpl(re, config, parent), lastLux(0), speed(0), latitude(0), longitude(0)
55 {
56         videoLogging = addPropertySupport(Zone::None, [](){
57                 return new BasicPropertyType<bool>(VideoLogging, false);
58         });
59
60         driverDrowsiness = addPropertySupport(Zone::None, [](){
61                 return new OpenCvLuxPlugin::DriverDrowsinessType(DriverDrowsiness, false);
62         });
63
64         openCl = addPropertySupport(Zone::None, [](){
65                 return new BasicPropertyType<bool>(OpenCL, false);
66         });
67
68         extBrightness = addPropertySupport<VehicleProperty::ExteriorBrightnessType>(Zone::None);
69
70         shared = amb::make_unique(new Shared);
71         shared->parent = this;
72
73         shared->m_capture = NULL;
74         shared->mWriter = nullptr;
75
76         shared->threaded = false;
77         shared->kinect = false;
78         shared->useCuda = false;
79         shared->loggingOn = false;
80         shared->ddd = false;
81
82         shared->fps=30;
83         device="0";
84         shared->pixelLowerBound = 0;
85         shared->pixelUpperBound = 255;
86
87
88         if(config.find("threaded") != config.end())
89         {
90                 shared->threaded = config["threaded"] == "true";
91         }
92
93         if(config.find("kinect") != config.end())
94         {
95                 shared->kinect = config["kinect"] == "true";
96         }
97
98         if(config.find("fps") != config.end())
99         {
100                 shared->fps = boost::lexical_cast<int>(config["fps"]);
101         }
102
103         if(config.find("device") != config.end())
104         {
105                 device = config["device"].c_str();
106         }
107
108         if(config.find("pixelLowerBound") != config.end())
109         {
110                 shared->pixelLowerBound = boost::lexical_cast<int>(config["pixelLowerBound"]);
111
112                 if(shared->pixelLowerBound < 0)
113                 {
114                         shared->pixelLowerBound = 0;
115                 }
116         }
117
118         if(config.find("pixelUpperBound") != config.end())
119         {
120                 shared->pixelUpperBound = boost::lexical_cast<int>(config["pixelUpperBound"]);
121
122                 if(shared->pixelUpperBound > 255)
123                         shared->pixelUpperBound = 255;
124         }
125
126         if(config.find("opencl") != config.end())
127         {
128                 if(cv::ocl::haveOpenCL())
129                 {
130                         bool useOcl = config["opencl"] == "true";
131                         cv::ocl::setUseOpenCL(useOcl);
132                         openCl->setValue(cv::ocl::useOpenCL());
133                 }
134                 else
135                 {
136                         DebugOut(DebugOut::Warning) << "You really don't have openCL support." << endl;
137                 }
138         }
139
140         if(config.find("ddd") != config.end())
141         {
142                 shared->ddd = config["ddd"] == "true";
143         }
144
145         if(config.find("logging") != config.end())
146         {
147                 shared->loggingOn = config["logging"] == "true";
148         }
149
150         if(shared->ddd)
151         {
152                 faceCascade = amb::make_unique(new cv::CascadeClassifier());
153                 eyeCascade = amb::make_unique(new cv::CascadeClassifier());
154
155                 std::string faceFile = config["faceCascade"];
156
157                 if(!faceCascade->load(faceFile))
158                 {
159                         DebugOut(DebugOut::Warning) << "Could not load face cascade: " << faceFile <<". Disabling ddd" << endl;
160                         shared->ddd = false;
161                 }
162
163                 std::string eyeFile = config["eyeCascade"];
164
165                 if(!eyeCascade->load(eyeFile))
166                 {
167                         DebugOut(DebugOut::Warning) << "Could not load eye cascade: " << eyeFile <<". Disabling ddd" << endl;
168                         shared->ddd = false;
169                 }
170
171         }
172
173         routingEngine->subscribeToProperty(VehicleProperty::VehicleSpeed, &parent);
174         routingEngine->subscribeToProperty(VehicleProperty::Latitude, &parent);
175         routingEngine->subscribeToProperty(VehicleProperty::Longitude, &parent);
176 }
177
178 OpenCvLuxPlugin::~OpenCvLuxPlugin()
179 {
180
181 }
182
183 const string OpenCvLuxPlugin::uuid() const
184 {
185         return "3c7a1ea0-7d2e-11e2-9e96-0800200c9a66";
186 }
187
188 void OpenCvLuxPlugin::getPropertyAsync(AsyncPropertyReply *reply)
189 {
190         reply->timedout = [this](AsyncPropertyReply* reply) {
191                 removeOne(&replyQueue, reply);
192         };
193
194         if(!shared->m_capture || !shared->m_capture->isOpened())
195         {
196                 /// we want to turn on the camera for one shot to get an image and determine the intensity
197
198                 init();
199                 grabImage(shared.get());
200         }
201
202         AmbPluginImpl::getPropertyAsync(reply);
203 }
204
205 void OpenCvLuxPlugin::getRangePropertyAsync(AsyncRangePropertyReply *reply)
206 {
207         throw std::runtime_error("OpenCVLuxPlugin does not support this operation.  We should never hit this method.");
208 }
209
210 AsyncPropertyReply *OpenCvLuxPlugin::setProperty(AsyncSetPropertyRequest request )
211 {
212         if(request.property == VideoLogging)
213         {
214                 shared->loggingOn = request.value->value<bool>();
215         }
216         if(request.property == OpenCL)
217         {
218                 QMutexLocker lock(&mutex);
219                 cv::ocl::setUseOpenCL(request.value->value<bool>());
220         }
221
222         return AmbPluginImpl::setProperty(request);;
223 }
224
225 void OpenCvLuxPlugin::subscribeToPropertyChanges(VehicleProperty::Property property)
226 {
227         if(!shared->mRequests.size())
228         {
229                 init();
230                 g_timeout_add(1000 / shared->fps, grabImage, shared.get());
231         }
232
233         shared->mRequests.push_back(property);
234 }
235
236 void OpenCvLuxPlugin::unsubscribeToPropertyChanges(VehicleProperty::Property property)
237 {
238         removeOne(&shared->mRequests,property);
239 }
240
241 void OpenCvLuxPlugin::propertyChanged(AbstractPropertyType *value)
242 {
243         QMutexLocker lock(&mutex);
244         if(value->name == VehicleProperty::VehicleSpeed)
245         {
246                 speed = value->value<uint16_t>();
247         }
248         else if(value->name == VehicleProperty::Latitude)
249         {
250                 latitude = value->value<double>();
251         }
252         else if(value->name == VehicleProperty::Longitude)
253         {
254                 longitude = value->value<double>();
255         }
256 }
257
258 void OpenCvLuxPlugin::supportedChanged(const PropertyList &)
259 {
260         DebugOut()<<"OpenCvLuxPlugin::supported changed."<<endl;
261 }
262
263 static int grabImage(void *data)
264 {
265         OpenCvLuxPlugin::Shared* shared = static_cast<OpenCvLuxPlugin::Shared*>(data);
266
267         shared->frameCount++;
268
269         if(shared->frameCount >= shared->fps)
270                 shared->frameCount = 0;
271
272         if(shared->frameCount % 4 != 0)
273         {
274                 return true;
275         }
276
277         if(!shared->m_capture->isOpened())
278                 return false;
279
280         cv::UMat m_image;
281
282         *(shared->m_capture.get()) >> m_image;
283
284         if(shared->ddd)
285         {
286                 try
287                 {
288                         shared->parent->detectEyes(m_image);
289                 }
290                 catch(...)
291                 {
292                         DebugOut(DebugOut::Warning) << "DDD detection exception: "<< endl;
293                 }
294         }
295
296         if(shared->threaded)
297         {
298                 QFutureWatcher<uint> *watcher = new QFutureWatcher<uint>();
299                 QObject::connect(watcher, &QFutureWatcher<uint>::finished, shared->parent, &OpenCvLuxPlugin::imgProcResult);
300
301                 QFuture<uint> future = QtConcurrent::run(evalImage, m_image, shared);
302                 watcher->setFuture(future);
303
304                 QtConcurrent::run(shared->parent, &OpenCvLuxPlugin::writeVideoFrame, m_image);
305         }
306         else
307         {
308                 if(shared->parent->videoLogging->value<bool>())
309                         shared->parent->writeVideoFrame(m_image);
310                 try
311                 {
312                         uint16_t lux = evalImage(m_image, shared);
313                         shared->parent->updateProperty(lux);
314                 }
315                 catch(...)
316                 {
317                         DebugOut(DebugOut::Warning) << "Exception caught "<< __FUNCTION__ << endl;
318                 }
319
320                 //detectLight(m_image,shared);
321         }
322
323         if(shared->mRequests.size())
324         {
325                 return true;
326         }
327
328         return false;
329 }
330
331 static uint evalImage(cv::UMat qImg, OpenCvLuxPlugin::Shared *shared)
332 {
333         if(qImg.empty())
334         {
335                 DebugOut(DebugOut::Warning)<<"Empty image frame."<<endl;
336                 return 0;
337         }
338
339         cv::Scalar avgPixelIntensity;
340
341         avgPixelIntensity = cv::mean(qImg);
342
343         double val = avgPixelIntensity.val[0];
344
345         double qualifiedPixel = (val - shared->pixelLowerBound);
346
347         if(qualifiedPixel < 0) qualifiedPixel = 0;
348
349         uint lux = qualifiedPixel * (130000.0 / shared->pixelUpperBound);
350
351         DebugOut(7)<<"average pixel value: "<<qualifiedPixel<<" lux: "<<lux<<endl;
352
353         return lux;
354 }
355
356
357 void OpenCvLuxPlugin::init()
358 {
359         if(!shared->m_capture)
360         {
361                 if(device == "")
362                         shared->m_capture = amb::make_unique(new cv::VideoCapture(0));
363                 else
364                         shared->m_capture = amb::make_unique(new cv::VideoCapture(atoi(device.c_str())));
365         }
366
367         if(!shared->m_capture->isOpened())
368         {
369                 DebugOut() << "we failed to open camera device (" << device << ") or no camera found" << endl;
370                 return;
371         }
372
373         if(!shared->mWriter || !shared->mWriter->isOpened())
374         {
375                 cv::Size s = cv::Size((int) shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH),
376                                                           (int) shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT));
377
378                 std::string codec = configuration["codec"];
379
380                 if(codec.empty() || codec.size() != 4)
381                 {
382                         DebugOut(DebugOut::Warning)<<"Invalid codec: "<<codec <<". Using default: MJPG"<<endl;
383                         codec = "MJPG";
384                 }
385
386                 std::string filename = configuration["logfile"];
387
388                 if(filename.empty()) filename = "/tmp/video.avi";
389
390                 boost::algorithm::to_upper(codec);
391
392                 shared->mWriter = amb::make_unique(new cv::VideoWriter(filename, cv::VideoWriter::fourcc(codec.at(0), codec.at(1), codec.at(2), codec.at(3)),30,s));
393         }
394
395         DebugOut()<<"camera frame width: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH)<<endl;
396         DebugOut()<<"camera frame height: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT)<<endl;
397         DebugOut()<<"camera frame fps: "<<shared->m_capture->get(cv::CAP_PROP_FPS)<<endl;
398
399         return;
400 }
401
402
403
404 void OpenCvLuxPlugin::writeVideoFrame(cv::UMat f)
405 {
406         QMutexLocker locker(&mutex);
407
408         if(shared->loggingOn && speed > 0)
409         {
410                 cv::Mat frame;
411                 f.copyTo(frame);
412
413                 std::stringstream str;
414                 str<<"Speed: "<<speed<<" kph, Lat/Lon: "<<latitude<<"/"<<longitude;
415
416                 std::stringstream datestr;
417                 datestr<<QDateTime::currentDateTime().toString().toStdString();
418
419                 locker.unlock();
420
421                 std::string text = str.str();
422
423                 int fontFace = cv::FONT_HERSHEY_SIMPLEX;
424                 double fontScale = 1;
425                 int thickness = 3;
426
427                 int baseline=0;
428                 cv::Size textSize = cv::getTextSize(text, fontFace,
429                                                                         fontScale, thickness, &baseline);
430
431                 cv::Size dateTextSize = cv::getTextSize(datestr.str(), fontFace,
432                                                                         fontScale, thickness, &baseline);
433
434                 baseline += thickness;
435
436                 // center the text
437                 cv::Point textOrg((frame.cols - textSize.width)/2,
438                                           (frame.rows - textSize.height));
439
440                 cv::Point dateOrg((frame.cols - dateTextSize.width)/2, dateTextSize.height);
441
442                 // then put the text itself
443                 cv::putText(frame, text, textOrg, fontFace, fontScale,
444                                 cv::Scalar::all(255), thickness, 8);
445
446                 cv::putText(frame, datestr.str(), dateOrg, fontFace, fontScale,
447                                 cv::Scalar::all(255), thickness, 8);
448
449                 (*shared->mWriter) << frame;
450                 (*shared->mWriter) << frame;
451         }
452 }
453
454 void OpenCvLuxPlugin::updateProperty(uint16_t lux)
455 {
456         extBrightness->setValue(lux);
457
458         if(lux != lastLux && shared->mRequests.size())
459         {
460                 lastLux = lux;
461                 routingEngine->updateProperty(extBrightness.get(), uuid());
462         }
463 }
464
465 void OpenCvLuxPlugin::imgProcResult()
466 {
467         try
468         {
469                 QFutureWatcher<uint> *watcher = dynamic_cast<QFutureWatcher<uint>*>(sender());
470
471                 uint lux = watcher->result();
472                 shared->parent->updateProperty(lux);
473
474                 watcher->deleteLater();
475         }
476         catch(...)
477         {
478                 DebugOut(DebugOut::Warning) << "exception caught getting img processing result" << endl;
479         }
480 }
481
482 TrafficLight::Color detectLight(cv::UMat img, OpenCvLuxPlugin::Shared *shared)
483 {
484
485         cv::UMat gray;
486
487         cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
488         cv::GaussianBlur(gray, gray, cv::Size(9,9), 2, 2);
489
490         std::vector<cv::Vec3f> circles;
491
492         //cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, gray.rows/8, 200, 100, 0, 0);
493
494         cv::HoughCircles( gray, circles, cv::HOUGH_GRADIENT, 2, 30, 231, 50, 0, 0 );
495
496         for(auto i = 0; i < circles.size(); i++)
497         {
498                 cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
499                 int radius = cvRound(circles[i][2]);
500
501                 cv::Rect rect(center.x - radius / 2, center.y - radius / 2, radius, radius);
502
503                 try {
504
505                         cv::UMat light(img, rect);
506
507                         cv::circle(img, center, radius, cv::Scalar(0,0,255), 3, 8, 0 );
508
509                         cv::rectangle(img, rect.tl(), rect.br(), cv::Scalar(255,0,0));
510
511                         cv::Scalar avgPixel = cv::mean(light);
512
513                         if(avgPixel[2] > 128 && avgPixel[0] < 128 && avgPixel[1] < 128)
514                         {
515                                 DebugOut(1)<<"Red Light!!!"<<endl;
516                                 return TrafficLight::Red;
517                         }
518                         else if(avgPixel[1] > 128 && avgPixel[0] < 128 && avgPixel[2] < 128)
519                         {
520                                 DebugOut(1)<<"Green Light!!!"<<endl;
521                                 return TrafficLight::Green;
522                         }
523                         else if(avgPixel[0] > 128 && avgPixel[1] < 128 && avgPixel[2] < 128)
524                         {
525                                 DebugOut(1)<<"Yellow Light!!!"<<endl;
526                                 return TrafficLight::Yellow;
527                         }
528                 }
529                 catch(...)
530                 {
531
532                 }
533
534         }
535 }
536
537
538 void OpenCvLuxPlugin::detectEyes(cv::UMat frame)
539 {
540         if(frame.empty())
541                 return;
542
543         bool hasEyes = false;
544         std::vector<cv::Rect> faces;
545         cv::UMat frameGray;
546
547
548         cv::cvtColor(frame, frameGray, cv::COLOR_BGR2GRAY);
549         cv::equalizeHist(frameGray, frameGray);
550         faceCascade->detectMultiScale(frameGray, faces, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30));
551
552         for( size_t i = 0; i < faces.size(); i++ )
553         {
554                 cv::Point center( faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5 );
555                 cv::ellipse( frame, center, cv::Size( faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
556
557                 std::vector<cv::Rect> eyes;
558
559
560                 cv::UMat faceROI(frameGray(faces[i]));
561                 eyeCascade->detectMultiScale( faceROI, eyes, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30) );
562
563                 for( size_t j = 0; j < eyes.size(); j++ )
564                 {
565                         cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5, faces[i].y + eyes[j].y + eyes[j].height*0.5 );
566                         int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
567                         cv::circle( frame, center, radius, cv::Scalar( 255, 0, 0 ), 4, 8, 0 );
568                 }
569
570                 if(eyes.size())
571                 {
572                         hasEyes = true;
573                         DebugOut() << "Number of eyes: " << eyes.size() << endl;
574
575                         if(driverDrowsiness->value<bool>())
576                         {
577                                 driverDrowsiness->setValue(false);
578                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
579                         }
580                 }
581                 else
582                 {
583                         if(!driverDrowsiness->value<bool>())
584                         {
585                                 driverDrowsiness->setValue(true);
586                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
587                         }
588                 }
589         }
590 }