reverted varianttype
[profile/ivi/automotive-message-broker.git] / plugins / opencvlux / opencvluxplugin.cpp
1 /*
2 Copyright (C) 2012 Intel Corporation
3
4 This library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with this library; if not, write to the Free Software
16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19 #include "opencvluxplugin.h"
20 #include "timestamp.h"
21 #include <listplusplus.h>
22 #include <superptr.hpp>
23 #include <ambplugin.h>
24
25 #include <iostream>
26 #include <thread>
27 #include <opencv2/opencv.hpp>
28 #include <opencv2/core/ocl.hpp>
29
30 #include <QFuture>
31 #include <QFutureWatcher>
32 #include <QtConcurrent/QtConcurrent>
33
34
35 #include <opencv2/core/ocl.hpp>
36
37 using namespace std;
38
39 const std::string VideoLogging = "VideoLogging";
40 const std::string DriverDrowsiness = "DriverDrowsiness";
41 const std::string OpenCL = "OpenCL";
42
43 #include "debugout.h"
44
45 extern "C" void create(AbstractRoutingEngine* routingengine, map<string, string> config)
46 {
47         auto plugin = new AmbPlugin<OpenCvLuxPlugin>(routingengine, config);
48         plugin->init();
49 }
50
51 OpenCvLuxPlugin::OpenCvLuxPlugin(AbstractRoutingEngine* re, map<string, string> config, AbstractSource &parent)
52         :AmbPluginImpl(re, config, parent), lastLux(0), speed(0), latitude(0), longitude(0)
53 {
54         videoLogging = addPropertySupport(Zone::None, [](){
55                 return new BasicPropertyType<bool>(VideoLogging, false);
56         });
57
58         driverDrowsiness = addPropertySupport(Zone::None, [](){
59                 return new OpenCvLuxPlugin::DriverDrowsinessType(DriverDrowsiness, false);
60         });
61
62         openCl = addPropertySupport(Zone::None, [](){
63                 return new BasicPropertyType<bool>(OpenCL, false);
64         });
65
66         extBrightness = addPropertySupport<VehicleProperty::ExteriorBrightnessType>(Zone::None);
67
68         shared = amb::make_unique(new Shared);
69         shared->parent = this;
70
71         shared->m_capture = NULL;
72         shared->mWriter = nullptr;
73
74         shared->threaded = false;
75         shared->kinect = false;
76         shared->useCuda = false;
77         shared->loggingOn = false;
78         shared->ddd = false;
79
80         shared->fps=30;
81         device="0";
82         shared->pixelLowerBound = 0;
83         shared->pixelUpperBound = 255;
84
85
86         if(config.find("threaded") != config.end())
87         {
88                 shared->threaded = config["threaded"] == "true";
89         }
90
91         if(config.find("kinect") != config.end())
92         {
93                 shared->kinect = config["kinect"] == "true";
94         }
95
96         if(config.find("fps") != config.end())
97         {
98                 shared->fps = boost::lexical_cast<int>(config["fps"]);
99         }
100
101         if(config.find("device") != config.end())
102         {
103                 device = config["device"].c_str();
104         }
105
106         if(config.find("pixelLowerBound") != config.end())
107         {
108                 shared->pixelLowerBound = boost::lexical_cast<int>(config["pixelLowerBound"]);
109
110                 if(shared->pixelLowerBound < 0)
111                 {
112                         shared->pixelLowerBound = 0;
113                 }
114         }
115
116         if(config.find("pixelUpperBound") != config.end())
117         {
118                 shared->pixelUpperBound = boost::lexical_cast<int>(config["pixelUpperBound"]);
119
120                 if(shared->pixelUpperBound > 255)
121                         shared->pixelUpperBound = 255;
122         }
123
124         if(config.find("opencl") != config.end())
125         {
126                 if(cv::ocl::haveOpenCL())
127                 {
128                         bool useOcl = config["opencl"] == "true";
129                         cv::ocl::setUseOpenCL(useOcl);
130                         openCl->setValue(cv::ocl::useOpenCL());
131                 }
132                 else
133                 {
134                         DebugOut(DebugOut::Warning) << "You really don't have openCL support." << endl;
135                 }
136         }
137
138         if(config.find("ddd") != config.end())
139         {
140                 shared->ddd = config["ddd"] == "true";
141         }
142
143         if(config.find("logging") != config.end())
144         {
145                 shared->loggingOn = config["logging"] == "true";
146         }
147
148         if(shared->ddd)
149         {
150                 faceCascade = amb::make_unique(new cv::CascadeClassifier());
151                 eyeCascade = amb::make_unique(new cv::CascadeClassifier());
152
153                 std::string faceFile = config["faceCascade"];
154
155                 if(!faceCascade->load(faceFile))
156                 {
157                         DebugOut(DebugOut::Warning) << "Could not load face cascade: " << faceFile <<". Disabling ddd" << endl;
158                         shared->ddd = false;
159                 }
160
161                 std::string eyeFile = config["eyeCascade"];
162
163                 if(!eyeCascade->load(eyeFile))
164                 {
165                         DebugOut(DebugOut::Warning) << "Could not load eye cascade: " << eyeFile <<". Disabling ddd" << endl;
166                         shared->ddd = false;
167                 }
168
169         }
170
171         routingEngine->subscribeToProperty(VehicleProperty::VehicleSpeed, &parent);
172         routingEngine->subscribeToProperty(VehicleProperty::Latitude, &parent);
173         routingEngine->subscribeToProperty(VehicleProperty::Longitude, &parent);
174 }
175
176 OpenCvLuxPlugin::~OpenCvLuxPlugin()
177 {
178
179 }
180
181 const string OpenCvLuxPlugin::uuid() const
182 {
183         return "3c7a1ea0-7d2e-11e2-9e96-0800200c9a66";
184 }
185
186 void OpenCvLuxPlugin::getPropertyAsync(AsyncPropertyReply *reply)
187 {
188         reply->timedout = [this](AsyncPropertyReply* reply) {
189                 removeOne(&replyQueue, reply);
190         };
191
192         if(!shared->m_capture || !shared->m_capture->isOpened())
193         {
194                 /// we want to turn on the camera for one shot to get an image and determine the intensity
195
196                 init();
197                 grabImage(shared.get());
198         }
199
200         AmbPluginImpl::getPropertyAsync(reply);
201 }
202
203 void OpenCvLuxPlugin::getRangePropertyAsync(AsyncRangePropertyReply *reply)
204 {
205         throw std::runtime_error("OpenCVLuxPlugin does not support this operation.  We should never hit this method.");
206 }
207
208 AsyncPropertyReply *OpenCvLuxPlugin::setProperty(AsyncSetPropertyRequest request )
209 {
210         if(request.property == VideoLogging)
211         {
212                 shared->loggingOn = request.value->value<bool>();
213         }
214         if(request.property == OpenCL)
215         {
216                 QMutexLocker lock(&mutex);
217                 cv::ocl::setUseOpenCL(request.value->value<bool>());
218         }
219
220         return AmbPluginImpl::setProperty(request);;
221 }
222
223 void OpenCvLuxPlugin::subscribeToPropertyChanges(VehicleProperty::Property property)
224 {
225         if(!shared->mRequests.size())
226         {
227                 init();
228                 g_timeout_add(1000 / shared->fps, grabImage, shared.get());
229         }
230
231         shared->mRequests.push_back(property);
232 }
233
234 void OpenCvLuxPlugin::unsubscribeToPropertyChanges(VehicleProperty::Property property)
235 {
236         removeOne(&shared->mRequests,property);
237 }
238
239 void OpenCvLuxPlugin::propertyChanged(AbstractPropertyType *value)
240 {
241         QMutexLocker lock(&mutex);
242         if(value->name == VehicleProperty::VehicleSpeed)
243         {
244                 speed = value->value<uint16_t>();
245         }
246         else if(value->name == VehicleProperty::Latitude)
247         {
248                 latitude = value->value<double>();
249         }
250         else if(value->name == VehicleProperty::Longitude)
251         {
252                 longitude = value->value<double>();
253         }
254 }
255
256 void OpenCvLuxPlugin::supportedChanged(const PropertyList &)
257 {
258         DebugOut()<<"OpenCvLuxPlugin::supported changed."<<endl;
259 }
260
261 static int grabImage(void *data)
262 {
263         OpenCvLuxPlugin::Shared* shared = static_cast<OpenCvLuxPlugin::Shared*>(data);
264
265         shared->frameCount++;
266
267         if(shared->frameCount >= shared->fps)
268                 shared->frameCount = 0;
269
270         if(shared->frameCount % 4 != 0)
271         {
272                 return true;
273         }
274
275         if(!shared->m_capture->isOpened())
276                 return false;
277
278         cv::UMat m_image;
279
280         *(shared->m_capture.get()) >> m_image;
281
282         if(shared->ddd)
283         {
284                 try
285                 {
286                         shared->parent->detectEyes(m_image);
287                 }
288                 catch(...)
289                 {
290                         DebugOut(DebugOut::Warning) << "DDD detection exception: "<< endl;
291                 }
292         }
293
294         if(shared->threaded)
295         {
296                 QFutureWatcher<uint> *watcher = new QFutureWatcher<uint>();
297                 QObject::connect(watcher, &QFutureWatcher<uint>::finished, shared->parent, &OpenCvLuxPlugin::imgProcResult);
298
299                 QFuture<uint> future = QtConcurrent::run(evalImage, m_image, shared);
300                 watcher->setFuture(future);
301
302                 QtConcurrent::run(shared->parent, &OpenCvLuxPlugin::writeVideoFrame, m_image);
303         }
304         else
305         {
306                 if(shared->parent->videoLogging->value<bool>())
307                         shared->parent->writeVideoFrame(m_image);
308                 try
309                 {
310                         uint16_t lux = evalImage(m_image, shared);
311                         shared->parent->updateProperty(lux);
312                 }
313                 catch(...)
314                 {
315                         DebugOut(DebugOut::Warning) << "Exception caught "<< __FUNCTION__ << endl;
316                 }
317
318                 //detectLight(m_image,shared);
319         }
320
321         if(shared->mRequests.size())
322         {
323                 return true;
324         }
325
326         return false;
327 }
328
329 static uint evalImage(cv::UMat qImg, OpenCvLuxPlugin::Shared *shared)
330 {
331         if(qImg.empty())
332         {
333                 DebugOut(DebugOut::Warning)<<"Empty image frame."<<endl;
334                 return 0;
335         }
336
337         cv::Scalar avgPixelIntensity;
338
339         avgPixelIntensity = cv::mean(qImg);
340
341         double val = avgPixelIntensity.val[0];
342
343         double qualifiedPixel = (val - shared->pixelLowerBound);
344
345         if(qualifiedPixel < 0) qualifiedPixel = 0;
346
347         uint lux = qualifiedPixel * (130000.0 / shared->pixelUpperBound);
348
349         DebugOut(7)<<"average pixel value: "<<qualifiedPixel<<" lux: "<<lux<<endl;
350
351         return lux;
352 }
353
354
355 void OpenCvLuxPlugin::init()
356 {
357         if(!shared->m_capture)
358         {
359                 if(device == "")
360                         shared->m_capture = amb::make_unique(new cv::VideoCapture(0));
361                 else
362                         shared->m_capture = amb::make_unique(new cv::VideoCapture(atoi(device.c_str())));
363         }
364
365         if(!shared->m_capture->isOpened())
366         {
367                 DebugOut() << "we failed to open camera device (" << device << ") or no camera found" << endl;
368                 return;
369         }
370
371         if(!shared->mWriter || !shared->mWriter->isOpened())
372         {
373                 cv::Size s = cv::Size((int) shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH),
374                                                           (int) shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT));
375
376                 std::string codec = configuration["codec"];
377
378                 if(codec.empty() || codec.size() != 4)
379                 {
380                         DebugOut(DebugOut::Warning)<<"Invalid codec: "<<codec <<". Using default: MJPG"<<endl;
381                         codec = "MJPG";
382                 }
383
384                 std::string filename = configuration["logfile"];
385
386                 if(filename.empty()) filename = "/tmp/video.avi";
387
388                 boost::algorithm::to_upper(codec);
389
390                 shared->mWriter = amb::make_unique(new cv::VideoWriter(filename, cv::VideoWriter::fourcc(codec.at(0), codec.at(1), codec.at(2), codec.at(3)),30,s));
391         }
392
393         DebugOut()<<"camera frame width: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_WIDTH)<<endl;
394         DebugOut()<<"camera frame height: "<<shared->m_capture->get(cv::CAP_PROP_FRAME_HEIGHT)<<endl;
395         DebugOut()<<"camera frame fps: "<<shared->m_capture->get(cv::CAP_PROP_FPS)<<endl;
396
397         return;
398 }
399
400
401
402 void OpenCvLuxPlugin::writeVideoFrame(cv::UMat f)
403 {
404         QMutexLocker locker(&mutex);
405
406         if(shared->loggingOn && speed > 0)
407         {
408                 cv::Mat frame;
409                 f.copyTo(frame);
410
411                 std::stringstream str;
412                 str<<"Speed: "<<speed<<" kph, Lat/Lon: "<<latitude<<"/"<<longitude;
413
414                 std::stringstream datestr;
415                 datestr<<QDateTime::currentDateTime().toString().toStdString();
416
417                 locker.unlock();
418
419                 std::string text = str.str();
420
421                 int fontFace = cv::FONT_HERSHEY_SIMPLEX;
422                 double fontScale = 1;
423                 int thickness = 3;
424
425                 int baseline=0;
426                 cv::Size textSize = cv::getTextSize(text, fontFace,
427                                                                         fontScale, thickness, &baseline);
428
429                 cv::Size dateTextSize = cv::getTextSize(datestr.str(), fontFace,
430                                                                         fontScale, thickness, &baseline);
431
432                 baseline += thickness;
433
434                 // center the text
435                 cv::Point textOrg((frame.cols - textSize.width)/2,
436                                           (frame.rows - textSize.height));
437
438                 cv::Point dateOrg((frame.cols - dateTextSize.width)/2, dateTextSize.height);
439
440                 // then put the text itself
441                 cv::putText(frame, text, textOrg, fontFace, fontScale,
442                                 cv::Scalar::all(255), thickness, 8);
443
444                 cv::putText(frame, datestr.str(), dateOrg, fontFace, fontScale,
445                                 cv::Scalar::all(255), thickness, 8);
446
447                 (*shared->mWriter) << frame;
448                 (*shared->mWriter) << frame;
449         }
450 }
451
452 void OpenCvLuxPlugin::updateProperty(uint16_t lux)
453 {
454         extBrightness->setValue(lux);
455
456         if(lux != lastLux && shared->mRequests.size())
457         {
458                 lastLux = lux;
459                 routingEngine->updateProperty(extBrightness.get(), uuid());
460         }
461 }
462
463 void OpenCvLuxPlugin::imgProcResult()
464 {
465         try
466         {
467                 QFutureWatcher<uint> *watcher = dynamic_cast<QFutureWatcher<uint>*>(sender());
468
469                 uint lux = watcher->result();
470                 shared->parent->updateProperty(lux);
471
472                 watcher->deleteLater();
473         }
474         catch(...)
475         {
476                 DebugOut(DebugOut::Warning) << "exception caught getting img processing result" << endl;
477         }
478 }
479
480 TrafficLight::Color detectLight(cv::UMat img, OpenCvLuxPlugin::Shared *shared)
481 {
482
483         cv::UMat gray;
484
485         cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
486         cv::GaussianBlur(gray, gray, cv::Size(9,9), 2, 2);
487
488         std::vector<cv::Vec3f> circles;
489
490         //cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, gray.rows/8, 200, 100, 0, 0);
491
492         cv::HoughCircles( gray, circles, cv::HOUGH_GRADIENT, 2, 30, 231, 50, 0, 0 );
493
494         for(auto i = 0; i < circles.size(); i++)
495         {
496                 cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
497                 int radius = cvRound(circles[i][2]);
498
499                 cv::Rect rect(center.x - radius / 2, center.y - radius / 2, radius, radius);
500
501                 try {
502
503                         cv::UMat light(img, rect);
504
505                         cv::circle(img, center, radius, cv::Scalar(0,0,255), 3, 8, 0 );
506
507                         cv::rectangle(img, rect.tl(), rect.br(), cv::Scalar(255,0,0));
508
509                         cv::Scalar avgPixel = cv::mean(light);
510
511                         if(avgPixel[2] > 128 && avgPixel[0] < 128 && avgPixel[1] < 128)
512                         {
513                                 DebugOut(1)<<"Red Light!!!"<<endl;
514                                 return TrafficLight::Red;
515                         }
516                         else if(avgPixel[1] > 128 && avgPixel[0] < 128 && avgPixel[2] < 128)
517                         {
518                                 DebugOut(1)<<"Green Light!!!"<<endl;
519                                 return TrafficLight::Green;
520                         }
521                         else if(avgPixel[0] > 128 && avgPixel[1] < 128 && avgPixel[2] < 128)
522                         {
523                                 DebugOut(1)<<"Yellow Light!!!"<<endl;
524                                 return TrafficLight::Yellow;
525                         }
526                 }
527                 catch(...)
528                 {
529
530                 }
531
532         }
533 }
534
535
536 void OpenCvLuxPlugin::detectEyes(cv::UMat frame)
537 {
538         if(frame.empty())
539                 return;
540
541         bool hasEyes = false;
542         std::vector<cv::Rect> faces;
543         cv::UMat frameGray;
544
545
546         cv::cvtColor(frame, frameGray, cv::COLOR_BGR2GRAY);
547         cv::equalizeHist(frameGray, frameGray);
548         faceCascade->detectMultiScale(frameGray, faces, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30));
549
550         for( size_t i = 0; i < faces.size(); i++ )
551         {
552                 cv::Point center( faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5 );
553                 cv::ellipse( frame, center, cv::Size( faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
554
555                 std::vector<cv::Rect> eyes;
556
557
558                 cv::UMat faceROI(frameGray(faces[i]));
559                 eyeCascade->detectMultiScale( faceROI, eyes, 1.1, 2, 0 | cv::CASCADE_SCALE_IMAGE, cv::Size(30, 30) );
560
561                 for( size_t j = 0; j < eyes.size(); j++ )
562                 {
563                         cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5, faces[i].y + eyes[j].y + eyes[j].height*0.5 );
564                         int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
565                         cv::circle( frame, center, radius, cv::Scalar( 255, 0, 0 ), 4, 8, 0 );
566                 }
567
568                 if(eyes.size())
569                 {
570                         hasEyes = true;
571                         DebugOut() << "Number of eyes: " << eyes.size() << endl;
572
573                         if(driverDrowsiness->value<bool>())
574                         {
575                                 driverDrowsiness->setValue(false);
576                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
577                         }
578                 }
579                 else
580                 {
581                         if(!driverDrowsiness->value<bool>())
582                         {
583                                 driverDrowsiness->setValue(true);
584                                 routingEngine->updateProperty(driverDrowsiness.get(), this->uuid());
585                         }
586                 }
587         }
588 }