Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / video_engine / vie_capturer.cc
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "webrtc/video_engine/vie_capturer.h"
12
13 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
14 #include "webrtc/modules/interface/module_common_types.h"
15 #include "webrtc/modules/utility/interface/process_thread.h"
16 #include "webrtc/modules/video_capture/include/video_capture_factory.h"
17 #include "webrtc/modules/video_processing/main/interface/video_processing.h"
18 #include "webrtc/modules/video_render/include/video_render_defines.h"
19 #include "webrtc/system_wrappers/interface/clock.h"
20 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
21 #include "webrtc/system_wrappers/interface/event_wrapper.h"
22 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
23 #include "webrtc/system_wrappers/interface/trace.h"
24 #include "webrtc/system_wrappers/interface/trace_event.h"
25 #include "webrtc/video_engine/include/vie_base.h"
26 #include "webrtc/video_engine/include/vie_image_process.h"
27 #include "webrtc/video_engine/overuse_frame_detector.h"
28 #include "webrtc/video_engine/vie_defines.h"
29 #include "webrtc/video_engine/vie_encoder.h"
30
31 namespace webrtc {
32
33 const int kThreadWaitTimeMs = 100;
34
35 ViECapturer::ViECapturer(int capture_id,
36                          int engine_id,
37                          const Config& config,
38                          ProcessThread& module_process_thread)
39     : ViEFrameProviderBase(capture_id, engine_id),
40       capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
41       deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
42       capture_module_(NULL),
43       external_capture_module_(NULL),
44       module_process_thread_(module_process_thread),
45       capture_id_(capture_id),
46       incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
47       capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
48                                                    this, kHighPriority,
49                                                    "ViECaptureThread")),
50       capture_event_(*EventWrapper::Create()),
51       deliver_event_(*EventWrapper::Create()),
52       effect_filter_(NULL),
53       image_proc_module_(NULL),
54       image_proc_module_ref_counter_(0),
55       deflicker_frame_stats_(NULL),
56       brightness_frame_stats_(NULL),
57       current_brightness_level_(Normal),
58       reported_brightness_level_(Normal),
59       denoising_enabled_(false),
60       observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
61       observer_(NULL),
62       overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
63                                                  kNormalUseStdDevMs,
64                                                  kOveruseStdDevMs)) {
65   WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
66                "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
67                capture_id, engine_id);
68   unsigned int t_id = 0;
69   if (capture_thread_.Start(t_id)) {
70     WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
71                  "%s: thread started: %u", __FUNCTION__, t_id);
72   } else {
73     assert(false);
74   }
75   module_process_thread_.RegisterModule(overuse_detector_.get());
76 }
77
78 ViECapturer::~ViECapturer() {
79   WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
80                "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
81                capture_id_, engine_id_);
82   module_process_thread_.DeRegisterModule(overuse_detector_.get());
83
84   // Stop the thread.
85   deliver_cs_->Enter();
86   capture_cs_->Enter();
87   capture_thread_.SetNotAlive();
88   capture_event_.Set();
89   capture_cs_->Leave();
90   deliver_cs_->Leave();
91
92   // Stop the camera input.
93   if (capture_module_) {
94     module_process_thread_.DeRegisterModule(capture_module_);
95     capture_module_->DeRegisterCaptureDataCallback();
96     capture_module_->Release();
97     capture_module_ = NULL;
98   }
99   if (capture_thread_.Stop()) {
100     // Thread stopped.
101     delete &capture_thread_;
102     delete &capture_event_;
103     delete &deliver_event_;
104   } else {
105     assert(false);
106     WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
107                  ViEId(engine_id_, capture_id_),
108                  "%s: Not able to stop capture thread for device %d, leaking",
109                  __FUNCTION__, capture_id_);
110   }
111
112   if (image_proc_module_) {
113     VideoProcessingModule::Destroy(image_proc_module_);
114   }
115   if (deflicker_frame_stats_) {
116     delete deflicker_frame_stats_;
117     deflicker_frame_stats_ = NULL;
118   }
119   delete brightness_frame_stats_;
120 }
121
122 ViECapturer* ViECapturer::CreateViECapture(
123     int capture_id,
124     int engine_id,
125     const Config& config,
126     VideoCaptureModule* capture_module,
127     ProcessThread& module_process_thread) {
128   ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
129                                          module_process_thread);
130   if (!capture || capture->Init(capture_module) != 0) {
131     delete capture;
132     capture = NULL;
133   }
134   return capture;
135 }
136
137 int32_t ViECapturer::Init(VideoCaptureModule* capture_module) {
138   assert(capture_module_ == NULL);
139   capture_module_ = capture_module;
140   capture_module_->RegisterCaptureDataCallback(*this);
141   capture_module_->AddRef();
142   if (module_process_thread_.RegisterModule(capture_module_) != 0) {
143     return -1;
144   }
145
146   return 0;
147 }
148
149 ViECapturer* ViECapturer::CreateViECapture(
150     int capture_id,
151     int engine_id,
152     const Config& config,
153     const char* device_unique_idUTF8,
154     const uint32_t device_unique_idUTF8Length,
155     ProcessThread& module_process_thread) {
156   ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
157                                          module_process_thread);
158   if (!capture ||
159       capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
160     delete capture;
161     capture = NULL;
162   }
163   return capture;
164 }
165
166 int32_t ViECapturer::Init(const char* device_unique_idUTF8,
167                           uint32_t device_unique_idUTF8Length) {
168   assert(capture_module_ == NULL);
169   if (device_unique_idUTF8 == NULL) {
170     capture_module_  = VideoCaptureFactory::Create(
171         ViEModuleId(engine_id_, capture_id_), external_capture_module_);
172   } else {
173     capture_module_ = VideoCaptureFactory::Create(
174         ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
175   }
176   if (!capture_module_) {
177     return -1;
178   }
179   capture_module_->AddRef();
180   capture_module_->RegisterCaptureDataCallback(*this);
181   if (module_process_thread_.RegisterModule(capture_module_) != 0) {
182     return -1;
183   }
184
185   return 0;
186 }
187
188 int ViECapturer::FrameCallbackChanged() {
189   if (Started() && !CaptureCapabilityFixed()) {
190     // Reconfigure the camera if a new size is required and the capture device
191     // does not provide encoded frames.
192     int best_width;
193     int best_height;
194     int best_frame_rate;
195     VideoCaptureCapability capture_settings;
196     capture_module_->CaptureSettings(capture_settings);
197     GetBestFormat(&best_width, &best_height, &best_frame_rate);
198     if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
199       if (best_width != capture_settings.width ||
200           best_height != capture_settings.height ||
201           best_frame_rate != capture_settings.maxFPS ||
202           capture_settings.codecType != kVideoCodecUnknown) {
203         Stop();
204         Start(requested_capability_);
205       }
206     }
207   }
208   return 0;
209 }
210
211 int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
212   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
213                __FUNCTION__);
214   int width;
215   int height;
216   int frame_rate;
217   VideoCaptureCapability capability;
218   requested_capability_ = capture_capability;
219
220   if (!CaptureCapabilityFixed()) {
221     // Ask the observers for best size.
222     GetBestFormat(&width, &height, &frame_rate);
223     if (width == 0) {
224       width = kViECaptureDefaultWidth;
225     }
226     if (height == 0) {
227       height = kViECaptureDefaultHeight;
228     }
229     if (frame_rate == 0) {
230       frame_rate = kViECaptureDefaultFramerate;
231     }
232     capability.height = height;
233     capability.width = width;
234     capability.maxFPS = frame_rate;
235     capability.rawType = kVideoI420;
236     capability.codecType = kVideoCodecUnknown;
237   } else {
238     // Width, height and type specified with call to Start, not set by
239     // observers.
240     capability.width = requested_capability_.width;
241     capability.height = requested_capability_.height;
242     capability.maxFPS = requested_capability_.maxFPS;
243     capability.rawType = requested_capability_.rawType;
244     capability.interlaced = requested_capability_.interlaced;
245   }
246   return capture_module_->StartCapture(capability);
247 }
248
249 int32_t ViECapturer::Stop() {
250   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
251                __FUNCTION__);
252   requested_capability_ = CaptureCapability();
253   return capture_module_->StopCapture();
254 }
255
256 bool ViECapturer::Started() {
257   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
258                __FUNCTION__);
259   return capture_module_->CaptureStarted();
260 }
261
262 const char* ViECapturer::CurrentDeviceName() const {
263   return capture_module_->CurrentDeviceName();
264 }
265
266 void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
267   overuse_detector_->SetObserver(observer);
268 }
269
270 void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
271                                      int* avg_encode_time_ms,
272                                      int* encode_usage_percent,
273                                      int* capture_queue_delay_ms_per_s) const {
274   *capture_jitter_ms = overuse_detector_->last_capture_jitter_ms();
275   *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
276   *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
277   *capture_queue_delay_ms_per_s =
278       overuse_detector_->AvgCaptureQueueDelayMsPerS();
279 }
280
281 int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
282   capture_module_->SetCaptureDelay(delay_ms);
283   return 0;
284 }
285
286 int32_t ViECapturer::SetRotateCapturedFrames(
287   const RotateCapturedFrame rotation) {
288   VideoCaptureRotation converted_rotation = kCameraRotate0;
289   switch (rotation) {
290     case RotateCapturedFrame_0:
291       converted_rotation = kCameraRotate0;
292       break;
293     case RotateCapturedFrame_90:
294       converted_rotation = kCameraRotate90;
295       break;
296     case RotateCapturedFrame_180:
297       converted_rotation = kCameraRotate180;
298       break;
299     case RotateCapturedFrame_270:
300       converted_rotation = kCameraRotate270;
301       break;
302   }
303   return capture_module_->SetCaptureRotation(converted_rotation);
304 }
305
306 int ViECapturer::IncomingFrame(unsigned char* video_frame,
307                                unsigned int video_frame_length,
308                                uint16_t width,
309                                uint16_t height,
310                                RawVideoType video_type,
311                                unsigned long long capture_time) {  // NOLINT
312   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
313                "ExternalCapture::IncomingFrame width %d, height %d, "
314                "capture_time %u", width, height, capture_time);
315
316   if (!external_capture_module_) {
317     return -1;
318   }
319   VideoCaptureCapability capability;
320   capability.width = width;
321   capability.height = height;
322   capability.rawType = video_type;
323   return external_capture_module_->IncomingFrame(video_frame,
324                                                  video_frame_length,
325                                                  capability, capture_time);
326 }
327
328 int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
329                                    unsigned long long capture_time) {  // NOLINT
330   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
331                "ExternalCapture::IncomingFrame width %d, height %d, "
332                " capture_time %u", video_frame.width, video_frame.height,
333                capture_time);
334
335   if (!external_capture_module_) {
336     return -1;
337   }
338
339   int size_y = video_frame.height * video_frame.y_pitch;
340   int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
341   int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
342   CriticalSectionScoped cs(incoming_frame_cs_.get());
343   int ret = incoming_frame_.CreateFrame(size_y,
344                                        video_frame.y_plane,
345                                        size_u,
346                                        video_frame.u_plane,
347                                        size_v,
348                                        video_frame.v_plane,
349                                        video_frame.width,
350                                        video_frame.height,
351                                        video_frame.y_pitch,
352                                        video_frame.u_pitch,
353                                        video_frame.v_pitch);
354
355   if (ret < 0) {
356     WEBRTC_TRACE(kTraceError,
357                  kTraceVideo,
358                  ViEId(engine_id_, capture_id_),
359                  "Failed to create I420VideoFrame");
360     return -1;
361   }
362
363   return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_,
364                                                           capture_time);
365 }
366
367 void ViECapturer::SwapFrame(I420VideoFrame* frame) {
368   external_capture_module_->IncomingI420VideoFrame(frame,
369                                                    frame->render_time_ms());
370 }
371
372 void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
373                                           I420VideoFrame& video_frame) {
374   WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
375                "%s(capture_id: %d)", __FUNCTION__, capture_id);
376   CriticalSectionScoped cs(capture_cs_.get());
377   // Make sure we render this frame earlier since we know the render time set
378   // is slightly off since it's being set when the frame has been received from
379   // the camera, and not when the camera actually captured the frame.
380   video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
381
382   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
383                            "render_time", video_frame.render_time_ms());
384
385   captured_frame_.SwapFrame(&video_frame);
386   capture_event_.Set();
387   overuse_detector_->FrameCaptured(captured_frame_.width(),
388                                    captured_frame_.height());
389   return;
390 }
391
392 void ViECapturer::OnCaptureDelayChanged(const int32_t id,
393                                         const int32_t delay) {
394   WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
395                "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
396                delay);
397
398   // Deliver the network delay to all registered callbacks.
399   ViEFrameProviderBase::SetFrameDelay(delay);
400 }
401
402 int32_t ViECapturer::RegisterEffectFilter(
403     ViEEffectFilter* effect_filter) {
404   CriticalSectionScoped cs(deliver_cs_.get());
405
406   if (!effect_filter) {
407     if (!effect_filter_) {
408       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
409                    "%s: no effect filter added for capture device %d",
410                    __FUNCTION__, capture_id_);
411       return -1;
412     }
413     WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
414                  "%s: deregister effect filter for device %d", __FUNCTION__,
415                  capture_id_);
416   } else {
417     if (effect_filter_) {
418       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
419                    "%s: effect filter already added for capture device %d",
420                    __FUNCTION__, capture_id_);
421       return -1;
422     }
423     WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
424                  "%s: register effect filter for device %d", __FUNCTION__,
425                  capture_id_);
426   }
427   effect_filter_ = effect_filter;
428   return 0;
429 }
430
431 int32_t ViECapturer::IncImageProcRefCount() {
432   if (!image_proc_module_) {
433     assert(image_proc_module_ref_counter_ == 0);
434     image_proc_module_ = VideoProcessingModule::Create(
435         ViEModuleId(engine_id_, capture_id_));
436     if (!image_proc_module_) {
437       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
438                    "%s: could not create video processing module",
439                    __FUNCTION__);
440       return -1;
441     }
442   }
443   image_proc_module_ref_counter_++;
444   return 0;
445 }
446
447 int32_t ViECapturer::DecImageProcRefCount() {
448   image_proc_module_ref_counter_--;
449   if (image_proc_module_ref_counter_ == 0) {
450     // Destroy module.
451     VideoProcessingModule::Destroy(image_proc_module_);
452     image_proc_module_ = NULL;
453   }
454   return 0;
455 }
456
457 int32_t ViECapturer::EnableDenoising(bool enable) {
458   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
459                "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
460                capture_id_, enable);
461
462   CriticalSectionScoped cs(deliver_cs_.get());
463   if (enable) {
464     if (denoising_enabled_) {
465       // Already enabled, nothing need to be done.
466       return 0;
467     }
468     denoising_enabled_ = true;
469     if (IncImageProcRefCount() != 0) {
470       return -1;
471     }
472   } else {
473     if (denoising_enabled_ == false) {
474       // Already disabled, nothing need to be done.
475       return 0;
476     }
477     denoising_enabled_ = false;
478     DecImageProcRefCount();
479   }
480
481   return 0;
482 }
483
484 int32_t ViECapturer::EnableDeflickering(bool enable) {
485   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
486                "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
487                capture_id_, enable);
488
489   CriticalSectionScoped cs(deliver_cs_.get());
490   if (enable) {
491     if (deflicker_frame_stats_) {
492       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
493                    "%s: deflickering already enabled", __FUNCTION__);
494       return -1;
495     }
496     if (IncImageProcRefCount() != 0) {
497       return -1;
498     }
499     deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
500   } else {
501     if (deflicker_frame_stats_ == NULL) {
502       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
503                    "%s: deflickering not enabled", __FUNCTION__);
504       return -1;
505     }
506     DecImageProcRefCount();
507     delete deflicker_frame_stats_;
508     deflicker_frame_stats_ = NULL;
509   }
510   return 0;
511 }
512
513 int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
514   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
515                "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
516                capture_id_, enable);
517
518   CriticalSectionScoped cs(deliver_cs_.get());
519   if (enable) {
520     if (brightness_frame_stats_) {
521       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
522                    "%s: BrightnessAlarm already enabled", __FUNCTION__);
523       return -1;
524     }
525     if (IncImageProcRefCount() != 0) {
526       return -1;
527     }
528     brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
529   } else {
530     DecImageProcRefCount();
531     if (brightness_frame_stats_ == NULL) {
532       WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
533                    "%s: deflickering not enabled", __FUNCTION__);
534       return -1;
535     }
536     delete brightness_frame_stats_;
537     brightness_frame_stats_ = NULL;
538   }
539   return 0;
540 }
541
542 bool ViECapturer::ViECaptureThreadFunction(void* obj) {
543   return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
544 }
545
546 bool ViECapturer::ViECaptureProcess() {
547   if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
548     overuse_detector_->FrameProcessingStarted();
549     int64_t encode_start_time = -1;
550     deliver_cs_->Enter();
551     if (SwapCapturedAndDeliverFrameIfAvailable()) {
552       encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
553       DeliverI420Frame(&deliver_frame_);
554     }
555     deliver_cs_->Leave();
556     if (current_brightness_level_ != reported_brightness_level_) {
557       CriticalSectionScoped cs(observer_cs_.get());
558       if (observer_) {
559         observer_->BrightnessAlarm(id_, current_brightness_level_);
560         reported_brightness_level_ = current_brightness_level_;
561       }
562     }
563     // Update the overuse detector with the duration.
564     if (encode_start_time != -1) {
565       overuse_detector_->FrameEncoded(
566           Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
567     }
568   }
569   // We're done!
570   return true;
571 }
572
573 void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
574   // Apply image enhancement and effect filter.
575   if (deflicker_frame_stats_) {
576     if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
577                                           *video_frame) == 0) {
578       image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
579     } else {
580       WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
581                    "%s: could not get frame stats for captured frame",
582                    __FUNCTION__);
583     }
584   }
585   if (denoising_enabled_) {
586     image_proc_module_->Denoising(video_frame);
587   }
588   if (brightness_frame_stats_) {
589     if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
590                                           *video_frame) == 0) {
591       int32_t brightness = image_proc_module_->BrightnessDetection(
592           *video_frame, *brightness_frame_stats_);
593
594       switch (brightness) {
595       case VideoProcessingModule::kNoWarning:
596         current_brightness_level_ = Normal;
597         break;
598       case VideoProcessingModule::kDarkWarning:
599         current_brightness_level_ = Dark;
600         break;
601       case VideoProcessingModule::kBrightWarning:
602         current_brightness_level_ = Bright;
603         break;
604       default:
605         WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
606                      "%s: Brightness detection failed", __FUNCTION__);
607       }
608     }
609   }
610   if (effect_filter_) {
611     unsigned int length = CalcBufferSize(kI420,
612                                          video_frame->width(),
613                                          video_frame->height());
614     scoped_array<uint8_t> video_buffer(new uint8_t[length]);
615     ExtractBuffer(*video_frame, length, video_buffer.get());
616     effect_filter_->Transform(length, video_buffer.get(),
617                               video_frame->timestamp(), video_frame->width(),
618                               video_frame->height());
619   }
620   // Deliver the captured frame to all observers (channels, renderer or file).
621   ViEFrameProviderBase::DeliverFrame(video_frame);
622 }
623
624 int ViECapturer::DeregisterFrameCallback(
625     const ViEFrameCallback* callbackObject) {
626   return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
627 }
628
629 bool ViECapturer::IsFrameCallbackRegistered(
630     const ViEFrameCallback* callbackObject) {
631   CriticalSectionScoped cs(provider_cs_.get());
632   return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
633 }
634
635 bool ViECapturer::CaptureCapabilityFixed() {
636   return requested_capability_.width != 0 &&
637       requested_capability_.height != 0 &&
638       requested_capability_.maxFPS != 0;
639 }
640
641 int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
642   {
643     CriticalSectionScoped cs(observer_cs_.get());
644     if (observer_) {
645       WEBRTC_TRACE(kTraceError,
646                    kTraceVideo,
647                    ViEId(engine_id_, capture_id_),
648                    "%s Observer already registered",
649                    __FUNCTION__,
650                    capture_id_);
651       return -1;
652     }
653     observer_ = observer;
654   }
655   capture_module_->RegisterCaptureCallback(*this);
656   capture_module_->EnableFrameRateCallback(true);
657   capture_module_->EnableNoPictureAlarm(true);
658   return 0;
659 }
660
661 int32_t ViECapturer::DeRegisterObserver() {
662   capture_module_->EnableFrameRateCallback(false);
663   capture_module_->EnableNoPictureAlarm(false);
664   capture_module_->DeRegisterCaptureCallback();
665
666   CriticalSectionScoped cs(observer_cs_.get());
667   observer_ = NULL;
668   return 0;
669 }
670
671 bool ViECapturer::IsObserverRegistered() {
672   CriticalSectionScoped cs(observer_cs_.get());
673   return observer_ != NULL;
674 }
675
676 void ViECapturer::OnCaptureFrameRate(const int32_t id,
677                                      const uint32_t frame_rate) {
678   WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
679                "OnCaptureFrameRate %d", frame_rate);
680
681   CriticalSectionScoped cs(observer_cs_.get());
682   observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
683 }
684
685 void ViECapturer::OnNoPictureAlarm(const int32_t id,
686                                    const VideoCaptureAlarm alarm) {
687   WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
688                "OnNoPictureAlarm %d", alarm);
689
690   CriticalSectionScoped cs(observer_cs_.get());
691   CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
692   observer_->NoPictureAlarm(id, vie_alarm);
693 }
694
695 bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
696   CriticalSectionScoped cs(capture_cs_.get());
697   if (captured_frame_.IsZeroSize())
698     return false;
699
700   deliver_frame_.SwapFrame(&captured_frame_);
701   captured_frame_.ResetSize();
702   return true;
703 }
704
705 }  // namespace webrtc