Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / video_engine / vie_capturer.cc
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "webrtc/video_engine/vie_capturer.h"
12
13 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
14 #include "webrtc/modules/interface/module_common_types.h"
15 #include "webrtc/modules/utility/interface/process_thread.h"
16 #include "webrtc/modules/video_capture/include/video_capture_factory.h"
17 #include "webrtc/modules/video_processing/main/interface/video_processing.h"
18 #include "webrtc/modules/video_render/include/video_render_defines.h"
19 #include "webrtc/system_wrappers/interface/clock.h"
20 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
21 #include "webrtc/system_wrappers/interface/event_wrapper.h"
22 #include "webrtc/system_wrappers/interface/logging.h"
23 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
24 #include "webrtc/system_wrappers/interface/trace_event.h"
25 #include "webrtc/video_engine/include/vie_image_process.h"
26 #include "webrtc/video_engine/overuse_frame_detector.h"
27 #include "webrtc/video_engine/vie_defines.h"
28 #include "webrtc/video_engine/vie_encoder.h"
29
30 namespace webrtc {
31
32 const int kThreadWaitTimeMs = 100;
33
34 ViECapturer::ViECapturer(int capture_id,
35                          int engine_id,
36                          const Config& config,
37                          ProcessThread& module_process_thread)
38     : ViEFrameProviderBase(capture_id, engine_id),
39       capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
40       deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
41       capture_module_(NULL),
42       external_capture_module_(NULL),
43       module_process_thread_(module_process_thread),
44       capture_id_(capture_id),
45       incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
46       capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
47                                                    this, kHighPriority,
48                                                    "ViECaptureThread")),
49       capture_event_(*EventWrapper::Create()),
50       deliver_event_(*EventWrapper::Create()),
51       effect_filter_(NULL),
52       image_proc_module_(NULL),
53       image_proc_module_ref_counter_(0),
54       deflicker_frame_stats_(NULL),
55       brightness_frame_stats_(NULL),
56       current_brightness_level_(Normal),
57       reported_brightness_level_(Normal),
58       denoising_enabled_(false),
59       observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
60       observer_(NULL),
61       overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
62   unsigned int t_id = 0;
63   if (!capture_thread_.Start(t_id)) {
64     assert(false);
65   }
66   module_process_thread_.RegisterModule(overuse_detector_.get());
67 }
68
69 ViECapturer::~ViECapturer() {
70   module_process_thread_.DeRegisterModule(overuse_detector_.get());
71
72   // Stop the thread.
73   deliver_cs_->Enter();
74   capture_cs_->Enter();
75   capture_thread_.SetNotAlive();
76   capture_event_.Set();
77   capture_cs_->Leave();
78   deliver_cs_->Leave();
79
80   // Stop the camera input.
81   if (capture_module_) {
82     module_process_thread_.DeRegisterModule(capture_module_);
83     capture_module_->DeRegisterCaptureDataCallback();
84     capture_module_->Release();
85     capture_module_ = NULL;
86   }
87   if (capture_thread_.Stop()) {
88     // Thread stopped.
89     delete &capture_thread_;
90     delete &capture_event_;
91     delete &deliver_event_;
92   } else {
93     assert(false);
94   }
95
96   if (image_proc_module_) {
97     VideoProcessingModule::Destroy(image_proc_module_);
98   }
99   if (deflicker_frame_stats_) {
100     delete deflicker_frame_stats_;
101     deflicker_frame_stats_ = NULL;
102   }
103   delete brightness_frame_stats_;
104 }
105
106 ViECapturer* ViECapturer::CreateViECapture(
107     int capture_id,
108     int engine_id,
109     const Config& config,
110     VideoCaptureModule* capture_module,
111     ProcessThread& module_process_thread) {
112   ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
113                                          module_process_thread);
114   if (!capture || capture->Init(capture_module) != 0) {
115     delete capture;
116     capture = NULL;
117   }
118   return capture;
119 }
120
121 int32_t ViECapturer::Init(VideoCaptureModule* capture_module) {
122   assert(capture_module_ == NULL);
123   capture_module_ = capture_module;
124   capture_module_->RegisterCaptureDataCallback(*this);
125   capture_module_->AddRef();
126   if (module_process_thread_.RegisterModule(capture_module_) != 0) {
127     return -1;
128   }
129
130   return 0;
131 }
132
133 ViECapturer* ViECapturer::CreateViECapture(
134     int capture_id,
135     int engine_id,
136     const Config& config,
137     const char* device_unique_idUTF8,
138     const uint32_t device_unique_idUTF8Length,
139     ProcessThread& module_process_thread) {
140   ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
141                                          module_process_thread);
142   if (!capture ||
143       capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
144     delete capture;
145     capture = NULL;
146   }
147   return capture;
148 }
149
150 int32_t ViECapturer::Init(const char* device_unique_idUTF8,
151                           uint32_t device_unique_idUTF8Length) {
152   assert(capture_module_ == NULL);
153   if (device_unique_idUTF8 == NULL) {
154     capture_module_  = VideoCaptureFactory::Create(
155         ViEModuleId(engine_id_, capture_id_), external_capture_module_);
156   } else {
157     capture_module_ = VideoCaptureFactory::Create(
158         ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
159   }
160   if (!capture_module_) {
161     return -1;
162   }
163   capture_module_->AddRef();
164   capture_module_->RegisterCaptureDataCallback(*this);
165   if (module_process_thread_.RegisterModule(capture_module_) != 0) {
166     return -1;
167   }
168
169   return 0;
170 }
171
172 int ViECapturer::FrameCallbackChanged() {
173   if (Started() && !CaptureCapabilityFixed()) {
174     // Reconfigure the camera if a new size is required and the capture device
175     // does not provide encoded frames.
176     int best_width;
177     int best_height;
178     int best_frame_rate;
179     VideoCaptureCapability capture_settings;
180     capture_module_->CaptureSettings(capture_settings);
181     GetBestFormat(&best_width, &best_height, &best_frame_rate);
182     if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
183       if (best_width != capture_settings.width ||
184           best_height != capture_settings.height ||
185           best_frame_rate != capture_settings.maxFPS ||
186           capture_settings.codecType != kVideoCodecUnknown) {
187         Stop();
188         Start(requested_capability_);
189       }
190     }
191   }
192   return 0;
193 }
194
195 int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
196   int width;
197   int height;
198   int frame_rate;
199   VideoCaptureCapability capability;
200   requested_capability_ = capture_capability;
201
202   if (!CaptureCapabilityFixed()) {
203     // Ask the observers for best size.
204     GetBestFormat(&width, &height, &frame_rate);
205     if (width == 0) {
206       width = kViECaptureDefaultWidth;
207     }
208     if (height == 0) {
209       height = kViECaptureDefaultHeight;
210     }
211     if (frame_rate == 0) {
212       frame_rate = kViECaptureDefaultFramerate;
213     }
214     capability.height = height;
215     capability.width = width;
216     capability.maxFPS = frame_rate;
217     capability.rawType = kVideoI420;
218     capability.codecType = kVideoCodecUnknown;
219   } else {
220     // Width, height and type specified with call to Start, not set by
221     // observers.
222     capability.width = requested_capability_.width;
223     capability.height = requested_capability_.height;
224     capability.maxFPS = requested_capability_.maxFPS;
225     capability.rawType = requested_capability_.rawType;
226     capability.interlaced = requested_capability_.interlaced;
227   }
228   return capture_module_->StartCapture(capability);
229 }
230
231 int32_t ViECapturer::Stop() {
232   requested_capability_ = CaptureCapability();
233   return capture_module_->StopCapture();
234 }
235
236 bool ViECapturer::Started() {
237   return capture_module_->CaptureStarted();
238 }
239
240 const char* ViECapturer::CurrentDeviceName() const {
241   return capture_module_->CurrentDeviceName();
242 }
243
244 void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
245   overuse_detector_->SetObserver(observer);
246 }
247
248 void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) {
249   overuse_detector_->SetOptions(options);
250 }
251
252 void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
253                                      int* avg_encode_time_ms,
254                                      int* encode_usage_percent,
255                                      int* capture_queue_delay_ms_per_s) const {
256   *capture_jitter_ms = overuse_detector_->CaptureJitterMs();
257   *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
258   *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
259   *capture_queue_delay_ms_per_s =
260       overuse_detector_->AvgCaptureQueueDelayMsPerS();
261 }
262
263 int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
264   capture_module_->SetCaptureDelay(delay_ms);
265   return 0;
266 }
267
268 int32_t ViECapturer::SetRotateCapturedFrames(
269   const RotateCapturedFrame rotation) {
270   VideoCaptureRotation converted_rotation = kCameraRotate0;
271   switch (rotation) {
272     case RotateCapturedFrame_0:
273       converted_rotation = kCameraRotate0;
274       break;
275     case RotateCapturedFrame_90:
276       converted_rotation = kCameraRotate90;
277       break;
278     case RotateCapturedFrame_180:
279       converted_rotation = kCameraRotate180;
280       break;
281     case RotateCapturedFrame_270:
282       converted_rotation = kCameraRotate270;
283       break;
284   }
285   return capture_module_->SetCaptureRotation(converted_rotation);
286 }
287
288 int ViECapturer::IncomingFrame(unsigned char* video_frame,
289                                unsigned int video_frame_length,
290                                uint16_t width,
291                                uint16_t height,
292                                RawVideoType video_type,
293                                unsigned long long capture_time) {  // NOLINT
294   if (!external_capture_module_) {
295     return -1;
296   }
297   VideoCaptureCapability capability;
298   capability.width = width;
299   capability.height = height;
300   capability.rawType = video_type;
301   return external_capture_module_->IncomingFrame(video_frame,
302                                                  video_frame_length,
303                                                  capability, capture_time);
304 }
305
306 int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
307                                    unsigned long long capture_time) {  // NOLINT
308   if (!external_capture_module_) {
309     return -1;
310   }
311
312   int size_y = video_frame.height * video_frame.y_pitch;
313   int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
314   int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
315   CriticalSectionScoped cs(incoming_frame_cs_.get());
316   int ret = incoming_frame_.CreateFrame(size_y,
317                                        video_frame.y_plane,
318                                        size_u,
319                                        video_frame.u_plane,
320                                        size_v,
321                                        video_frame.v_plane,
322                                        video_frame.width,
323                                        video_frame.height,
324                                        video_frame.y_pitch,
325                                        video_frame.u_pitch,
326                                        video_frame.v_pitch);
327
328   if (ret < 0) {
329     LOG_F(LS_ERROR) << "Could not create I420Frame.";
330     return -1;
331   }
332
333   return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_,
334                                                           capture_time);
335 }
336
337 void ViECapturer::SwapFrame(I420VideoFrame* frame) {
338   external_capture_module_->IncomingI420VideoFrame(frame,
339                                                    frame->render_time_ms());
340 }
341
342 void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
343                                           I420VideoFrame& video_frame) {
344   CriticalSectionScoped cs(capture_cs_.get());
345   // Make sure we render this frame earlier since we know the render time set
346   // is slightly off since it's being set when the frame has been received from
347   // the camera, and not when the camera actually captured the frame.
348   video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
349
350   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
351                            "render_time", video_frame.render_time_ms());
352
353   captured_frame_.SwapFrame(&video_frame);
354   capture_event_.Set();
355   overuse_detector_->FrameCaptured(captured_frame_.width(),
356                                    captured_frame_.height());
357   return;
358 }
359
360 void ViECapturer::OnCaptureDelayChanged(const int32_t id,
361                                         const int32_t delay) {
362   LOG(LS_INFO) << "Capture delayed change to " << delay
363                << " for device " << id;
364
365   // Deliver the network delay to all registered callbacks.
366   ViEFrameProviderBase::SetFrameDelay(delay);
367 }
368
369 int32_t ViECapturer::RegisterEffectFilter(
370     ViEEffectFilter* effect_filter) {
371   CriticalSectionScoped cs(deliver_cs_.get());
372
373   if (effect_filter != NULL && effect_filter_ != NULL) {
374     LOG_F(LS_ERROR) << "Effect filter already registered.";
375     return -1;
376   }
377   effect_filter_ = effect_filter;
378   return 0;
379 }
380
381 int32_t ViECapturer::IncImageProcRefCount() {
382   if (!image_proc_module_) {
383     assert(image_proc_module_ref_counter_ == 0);
384     image_proc_module_ = VideoProcessingModule::Create(
385         ViEModuleId(engine_id_, capture_id_));
386     if (!image_proc_module_) {
387       LOG_F(LS_ERROR) << "Could not create video processing module.";
388       return -1;
389     }
390   }
391   image_proc_module_ref_counter_++;
392   return 0;
393 }
394
395 int32_t ViECapturer::DecImageProcRefCount() {
396   image_proc_module_ref_counter_--;
397   if (image_proc_module_ref_counter_ == 0) {
398     // Destroy module.
399     VideoProcessingModule::Destroy(image_proc_module_);
400     image_proc_module_ = NULL;
401   }
402   return 0;
403 }
404
405 int32_t ViECapturer::EnableDenoising(bool enable) {
406   CriticalSectionScoped cs(deliver_cs_.get());
407   if (enable) {
408     if (denoising_enabled_) {
409       // Already enabled, nothing need to be done.
410       return 0;
411     }
412     denoising_enabled_ = true;
413     if (IncImageProcRefCount() != 0) {
414       return -1;
415     }
416   } else {
417     if (denoising_enabled_ == false) {
418       // Already disabled, nothing need to be done.
419       return 0;
420     }
421     denoising_enabled_ = false;
422     DecImageProcRefCount();
423   }
424   return 0;
425 }
426
427 int32_t ViECapturer::EnableDeflickering(bool enable) {
428   CriticalSectionScoped cs(deliver_cs_.get());
429   if (enable) {
430     if (deflicker_frame_stats_) {
431       return -1;
432     }
433     if (IncImageProcRefCount() != 0) {
434       return -1;
435     }
436     deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
437   } else {
438     if (deflicker_frame_stats_ == NULL) {
439       return -1;
440     }
441     DecImageProcRefCount();
442     delete deflicker_frame_stats_;
443     deflicker_frame_stats_ = NULL;
444   }
445   return 0;
446 }
447
448 int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
449   CriticalSectionScoped cs(deliver_cs_.get());
450   if (enable) {
451     if (brightness_frame_stats_) {
452       return -1;
453     }
454     if (IncImageProcRefCount() != 0) {
455       return -1;
456     }
457     brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
458   } else {
459     DecImageProcRefCount();
460     if (brightness_frame_stats_ == NULL) {
461       return -1;
462     }
463     delete brightness_frame_stats_;
464     brightness_frame_stats_ = NULL;
465   }
466   return 0;
467 }
468
469 bool ViECapturer::ViECaptureThreadFunction(void* obj) {
470   return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
471 }
472
473 bool ViECapturer::ViECaptureProcess() {
474   if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
475     overuse_detector_->FrameProcessingStarted();
476     int64_t encode_start_time = -1;
477     deliver_cs_->Enter();
478     if (SwapCapturedAndDeliverFrameIfAvailable()) {
479       encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
480       DeliverI420Frame(&deliver_frame_);
481     }
482     deliver_cs_->Leave();
483     if (current_brightness_level_ != reported_brightness_level_) {
484       CriticalSectionScoped cs(observer_cs_.get());
485       if (observer_) {
486         observer_->BrightnessAlarm(id_, current_brightness_level_);
487         reported_brightness_level_ = current_brightness_level_;
488       }
489     }
490     // Update the overuse detector with the duration.
491     if (encode_start_time != -1) {
492       overuse_detector_->FrameEncoded(
493           Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
494     }
495   }
496   // We're done!
497   return true;
498 }
499
500 void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
501   // Apply image enhancement and effect filter.
502   if (deflicker_frame_stats_) {
503     if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
504                                           *video_frame) == 0) {
505       image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
506     } else {
507       LOG_F(LS_ERROR) << "Could not get frame stats.";
508     }
509   }
510   if (denoising_enabled_) {
511     image_proc_module_->Denoising(video_frame);
512   }
513   if (brightness_frame_stats_) {
514     if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
515                                           *video_frame) == 0) {
516       int32_t brightness = image_proc_module_->BrightnessDetection(
517           *video_frame, *brightness_frame_stats_);
518
519       switch (brightness) {
520       case VideoProcessingModule::kNoWarning:
521         current_brightness_level_ = Normal;
522         break;
523       case VideoProcessingModule::kDarkWarning:
524         current_brightness_level_ = Dark;
525         break;
526       case VideoProcessingModule::kBrightWarning:
527         current_brightness_level_ = Bright;
528         break;
529       default:
530         break;
531       }
532     }
533   }
534   if (effect_filter_) {
535     unsigned int length = CalcBufferSize(kI420,
536                                          video_frame->width(),
537                                          video_frame->height());
538     scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
539     ExtractBuffer(*video_frame, length, video_buffer.get());
540     effect_filter_->Transform(length,
541                               video_buffer.get(),
542                               video_frame->ntp_time_ms(),
543                               video_frame->timestamp(),
544                               video_frame->width(),
545                               video_frame->height());
546   }
547   // Deliver the captured frame to all observers (channels, renderer or file).
548   ViEFrameProviderBase::DeliverFrame(video_frame);
549 }
550
551 int ViECapturer::DeregisterFrameCallback(
552     const ViEFrameCallback* callbackObject) {
553   return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
554 }
555
556 bool ViECapturer::IsFrameCallbackRegistered(
557     const ViEFrameCallback* callbackObject) {
558   CriticalSectionScoped cs(provider_cs_.get());
559   return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
560 }
561
562 bool ViECapturer::CaptureCapabilityFixed() {
563   return requested_capability_.width != 0 &&
564       requested_capability_.height != 0 &&
565       requested_capability_.maxFPS != 0;
566 }
567
568 int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
569   {
570     CriticalSectionScoped cs(observer_cs_.get());
571     if (observer_) {
572       LOG_F(LS_ERROR) << "Observer already registered.";
573       return -1;
574     }
575     observer_ = observer;
576   }
577   capture_module_->RegisterCaptureCallback(*this);
578   capture_module_->EnableFrameRateCallback(true);
579   capture_module_->EnableNoPictureAlarm(true);
580   return 0;
581 }
582
583 int32_t ViECapturer::DeRegisterObserver() {
584   capture_module_->EnableFrameRateCallback(false);
585   capture_module_->EnableNoPictureAlarm(false);
586   capture_module_->DeRegisterCaptureCallback();
587
588   CriticalSectionScoped cs(observer_cs_.get());
589   observer_ = NULL;
590   return 0;
591 }
592
593 bool ViECapturer::IsObserverRegistered() {
594   CriticalSectionScoped cs(observer_cs_.get());
595   return observer_ != NULL;
596 }
597
598 void ViECapturer::OnCaptureFrameRate(const int32_t id,
599                                      const uint32_t frame_rate) {
600   CriticalSectionScoped cs(observer_cs_.get());
601   observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
602 }
603
604 void ViECapturer::OnNoPictureAlarm(const int32_t id,
605                                    const VideoCaptureAlarm alarm) {
606   LOG(LS_WARNING) << "OnNoPictureAlarm " << id;
607
608   CriticalSectionScoped cs(observer_cs_.get());
609   CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
610   observer_->NoPictureAlarm(id, vie_alarm);
611 }
612
613 bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
614   CriticalSectionScoped cs(capture_cs_.get());
615   if (captured_frame_.IsZeroSize())
616     return false;
617
618   deliver_frame_.SwapFrame(&captured_frame_);
619   captured_frame_.ResetSize();
620   return true;
621 }
622
623 }  // namespace webrtc