2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
11 #include "webrtc/video_engine/vie_capturer.h"
13 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
14 #include "webrtc/modules/interface/module_common_types.h"
15 #include "webrtc/modules/utility/interface/process_thread.h"
16 #include "webrtc/modules/video_capture/include/video_capture_factory.h"
17 #include "webrtc/modules/video_processing/main/interface/video_processing.h"
18 #include "webrtc/modules/video_render/include/video_render_defines.h"
19 #include "webrtc/system_wrappers/interface/clock.h"
20 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
21 #include "webrtc/system_wrappers/interface/event_wrapper.h"
22 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
23 #include "webrtc/system_wrappers/interface/trace.h"
24 #include "webrtc/system_wrappers/interface/trace_event.h"
25 #include "webrtc/video_engine/include/vie_base.h"
26 #include "webrtc/video_engine/include/vie_image_process.h"
27 #include "webrtc/video_engine/overuse_frame_detector.h"
28 #include "webrtc/video_engine/vie_defines.h"
29 #include "webrtc/video_engine/vie_encoder.h"
33 const int kThreadWaitTimeMs = 100;
35 ViECapturer::ViECapturer(int capture_id,
38 ProcessThread& module_process_thread)
39 : ViEFrameProviderBase(capture_id, engine_id),
40 capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
41 deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
42 capture_module_(NULL),
43 external_capture_module_(NULL),
44 module_process_thread_(module_process_thread),
45 capture_id_(capture_id),
46 incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
47 capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
50 capture_event_(*EventWrapper::Create()),
51 deliver_event_(*EventWrapper::Create()),
53 image_proc_module_(NULL),
54 image_proc_module_ref_counter_(0),
55 deflicker_frame_stats_(NULL),
56 brightness_frame_stats_(NULL),
57 current_brightness_level_(Normal),
58 reported_brightness_level_(Normal),
59 denoising_enabled_(false),
60 observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
62 overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
65 WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
66 "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
67 capture_id, engine_id);
68 unsigned int t_id = 0;
69 if (capture_thread_.Start(t_id)) {
70 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
71 "%s: thread started: %u", __FUNCTION__, t_id);
75 module_process_thread_.RegisterModule(overuse_detector_.get());
78 ViECapturer::~ViECapturer() {
79 WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
80 "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
81 capture_id_, engine_id_);
82 module_process_thread_.DeRegisterModule(overuse_detector_.get());
87 capture_thread_.SetNotAlive();
92 // Stop the camera input.
93 if (capture_module_) {
94 module_process_thread_.DeRegisterModule(capture_module_);
95 capture_module_->DeRegisterCaptureDataCallback();
96 capture_module_->Release();
97 capture_module_ = NULL;
99 if (capture_thread_.Stop()) {
101 delete &capture_thread_;
102 delete &capture_event_;
103 delete &deliver_event_;
106 WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
107 ViEId(engine_id_, capture_id_),
108 "%s: Not able to stop capture thread for device %d, leaking",
109 __FUNCTION__, capture_id_);
112 if (image_proc_module_) {
113 VideoProcessingModule::Destroy(image_proc_module_);
115 if (deflicker_frame_stats_) {
116 delete deflicker_frame_stats_;
117 deflicker_frame_stats_ = NULL;
119 delete brightness_frame_stats_;
122 ViECapturer* ViECapturer::CreateViECapture(
125 const Config& config,
126 VideoCaptureModule* capture_module,
127 ProcessThread& module_process_thread) {
128 ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
129 module_process_thread);
130 if (!capture || capture->Init(capture_module) != 0) {
137 int32_t ViECapturer::Init(VideoCaptureModule* capture_module) {
138 assert(capture_module_ == NULL);
139 capture_module_ = capture_module;
140 capture_module_->RegisterCaptureDataCallback(*this);
141 capture_module_->AddRef();
142 if (module_process_thread_.RegisterModule(capture_module_) != 0) {
149 ViECapturer* ViECapturer::CreateViECapture(
152 const Config& config,
153 const char* device_unique_idUTF8,
154 const uint32_t device_unique_idUTF8Length,
155 ProcessThread& module_process_thread) {
156 ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
157 module_process_thread);
159 capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
166 int32_t ViECapturer::Init(const char* device_unique_idUTF8,
167 uint32_t device_unique_idUTF8Length) {
168 assert(capture_module_ == NULL);
169 if (device_unique_idUTF8 == NULL) {
170 capture_module_ = VideoCaptureFactory::Create(
171 ViEModuleId(engine_id_, capture_id_), external_capture_module_);
173 capture_module_ = VideoCaptureFactory::Create(
174 ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
176 if (!capture_module_) {
179 capture_module_->AddRef();
180 capture_module_->RegisterCaptureDataCallback(*this);
181 if (module_process_thread_.RegisterModule(capture_module_) != 0) {
188 int ViECapturer::FrameCallbackChanged() {
189 if (Started() && !CaptureCapabilityFixed()) {
190 // Reconfigure the camera if a new size is required and the capture device
191 // does not provide encoded frames.
195 VideoCaptureCapability capture_settings;
196 capture_module_->CaptureSettings(capture_settings);
197 GetBestFormat(&best_width, &best_height, &best_frame_rate);
198 if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
199 if (best_width != capture_settings.width ||
200 best_height != capture_settings.height ||
201 best_frame_rate != capture_settings.maxFPS ||
202 capture_settings.codecType != kVideoCodecUnknown) {
204 Start(requested_capability_);
211 int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
212 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
217 VideoCaptureCapability capability;
218 requested_capability_ = capture_capability;
220 if (!CaptureCapabilityFixed()) {
221 // Ask the observers for best size.
222 GetBestFormat(&width, &height, &frame_rate);
224 width = kViECaptureDefaultWidth;
227 height = kViECaptureDefaultHeight;
229 if (frame_rate == 0) {
230 frame_rate = kViECaptureDefaultFramerate;
232 capability.height = height;
233 capability.width = width;
234 capability.maxFPS = frame_rate;
235 capability.rawType = kVideoI420;
236 capability.codecType = kVideoCodecUnknown;
238 // Width, height and type specified with call to Start, not set by
240 capability.width = requested_capability_.width;
241 capability.height = requested_capability_.height;
242 capability.maxFPS = requested_capability_.maxFPS;
243 capability.rawType = requested_capability_.rawType;
244 capability.interlaced = requested_capability_.interlaced;
246 return capture_module_->StartCapture(capability);
249 int32_t ViECapturer::Stop() {
250 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
252 requested_capability_ = CaptureCapability();
253 return capture_module_->StopCapture();
256 bool ViECapturer::Started() {
257 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
259 return capture_module_->CaptureStarted();
262 const char* ViECapturer::CurrentDeviceName() const {
263 return capture_module_->CurrentDeviceName();
266 void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
267 overuse_detector_->SetObserver(observer);
270 void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
271 int* avg_encode_time_ms,
272 int* encode_usage_percent,
273 int* capture_queue_delay_ms_per_s) const {
274 *capture_jitter_ms = overuse_detector_->last_capture_jitter_ms();
275 *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
276 *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
277 *capture_queue_delay_ms_per_s =
278 overuse_detector_->AvgCaptureQueueDelayMsPerS();
281 int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
282 capture_module_->SetCaptureDelay(delay_ms);
286 int32_t ViECapturer::SetRotateCapturedFrames(
287 const RotateCapturedFrame rotation) {
288 VideoCaptureRotation converted_rotation = kCameraRotate0;
290 case RotateCapturedFrame_0:
291 converted_rotation = kCameraRotate0;
293 case RotateCapturedFrame_90:
294 converted_rotation = kCameraRotate90;
296 case RotateCapturedFrame_180:
297 converted_rotation = kCameraRotate180;
299 case RotateCapturedFrame_270:
300 converted_rotation = kCameraRotate270;
303 return capture_module_->SetCaptureRotation(converted_rotation);
306 int ViECapturer::IncomingFrame(unsigned char* video_frame,
307 unsigned int video_frame_length,
310 RawVideoType video_type,
311 unsigned long long capture_time) { // NOLINT
312 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
313 "ExternalCapture::IncomingFrame width %d, height %d, "
314 "capture_time %u", width, height, capture_time);
316 if (!external_capture_module_) {
319 VideoCaptureCapability capability;
320 capability.width = width;
321 capability.height = height;
322 capability.rawType = video_type;
323 return external_capture_module_->IncomingFrame(video_frame,
325 capability, capture_time);
328 int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
329 unsigned long long capture_time) { // NOLINT
330 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
331 "ExternalCapture::IncomingFrame width %d, height %d, "
332 " capture_time %u", video_frame.width, video_frame.height,
335 if (!external_capture_module_) {
339 int size_y = video_frame.height * video_frame.y_pitch;
340 int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
341 int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
342 CriticalSectionScoped cs(incoming_frame_cs_.get());
343 int ret = incoming_frame_.CreateFrame(size_y,
353 video_frame.v_pitch);
356 WEBRTC_TRACE(kTraceError,
358 ViEId(engine_id_, capture_id_),
359 "Failed to create I420VideoFrame");
363 return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_,
367 void ViECapturer::SwapFrame(I420VideoFrame* frame) {
368 external_capture_module_->IncomingI420VideoFrame(frame,
369 frame->render_time_ms());
372 void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
373 I420VideoFrame& video_frame) {
374 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
375 "%s(capture_id: %d)", __FUNCTION__, capture_id);
376 CriticalSectionScoped cs(capture_cs_.get());
377 // Make sure we render this frame earlier since we know the render time set
378 // is slightly off since it's being set when the frame has been received from
379 // the camera, and not when the camera actually captured the frame.
380 video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
382 TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
383 "render_time", video_frame.render_time_ms());
385 captured_frame_.SwapFrame(&video_frame);
386 capture_event_.Set();
387 overuse_detector_->FrameCaptured(captured_frame_.width(),
388 captured_frame_.height());
392 void ViECapturer::OnCaptureDelayChanged(const int32_t id,
393 const int32_t delay) {
394 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
395 "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
398 // Deliver the network delay to all registered callbacks.
399 ViEFrameProviderBase::SetFrameDelay(delay);
402 int32_t ViECapturer::RegisterEffectFilter(
403 ViEEffectFilter* effect_filter) {
404 CriticalSectionScoped cs(deliver_cs_.get());
406 if (!effect_filter) {
407 if (!effect_filter_) {
408 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
409 "%s: no effect filter added for capture device %d",
410 __FUNCTION__, capture_id_);
413 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
414 "%s: deregister effect filter for device %d", __FUNCTION__,
417 if (effect_filter_) {
418 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
419 "%s: effect filter already added for capture device %d",
420 __FUNCTION__, capture_id_);
423 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
424 "%s: register effect filter for device %d", __FUNCTION__,
427 effect_filter_ = effect_filter;
431 int32_t ViECapturer::IncImageProcRefCount() {
432 if (!image_proc_module_) {
433 assert(image_proc_module_ref_counter_ == 0);
434 image_proc_module_ = VideoProcessingModule::Create(
435 ViEModuleId(engine_id_, capture_id_));
436 if (!image_proc_module_) {
437 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
438 "%s: could not create video processing module",
443 image_proc_module_ref_counter_++;
447 int32_t ViECapturer::DecImageProcRefCount() {
448 image_proc_module_ref_counter_--;
449 if (image_proc_module_ref_counter_ == 0) {
451 VideoProcessingModule::Destroy(image_proc_module_);
452 image_proc_module_ = NULL;
457 int32_t ViECapturer::EnableDenoising(bool enable) {
458 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
459 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
460 capture_id_, enable);
462 CriticalSectionScoped cs(deliver_cs_.get());
464 if (denoising_enabled_) {
465 // Already enabled, nothing need to be done.
468 denoising_enabled_ = true;
469 if (IncImageProcRefCount() != 0) {
473 if (denoising_enabled_ == false) {
474 // Already disabled, nothing need to be done.
477 denoising_enabled_ = false;
478 DecImageProcRefCount();
484 int32_t ViECapturer::EnableDeflickering(bool enable) {
485 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
486 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
487 capture_id_, enable);
489 CriticalSectionScoped cs(deliver_cs_.get());
491 if (deflicker_frame_stats_) {
492 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
493 "%s: deflickering already enabled", __FUNCTION__);
496 if (IncImageProcRefCount() != 0) {
499 deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
501 if (deflicker_frame_stats_ == NULL) {
502 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
503 "%s: deflickering not enabled", __FUNCTION__);
506 DecImageProcRefCount();
507 delete deflicker_frame_stats_;
508 deflicker_frame_stats_ = NULL;
513 int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
514 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
515 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
516 capture_id_, enable);
518 CriticalSectionScoped cs(deliver_cs_.get());
520 if (brightness_frame_stats_) {
521 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
522 "%s: BrightnessAlarm already enabled", __FUNCTION__);
525 if (IncImageProcRefCount() != 0) {
528 brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
530 DecImageProcRefCount();
531 if (brightness_frame_stats_ == NULL) {
532 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
533 "%s: deflickering not enabled", __FUNCTION__);
536 delete brightness_frame_stats_;
537 brightness_frame_stats_ = NULL;
542 bool ViECapturer::ViECaptureThreadFunction(void* obj) {
543 return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
546 bool ViECapturer::ViECaptureProcess() {
547 if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
548 overuse_detector_->FrameProcessingStarted();
549 int64_t encode_start_time = -1;
550 deliver_cs_->Enter();
551 if (SwapCapturedAndDeliverFrameIfAvailable()) {
552 encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
553 DeliverI420Frame(&deliver_frame_);
555 deliver_cs_->Leave();
556 if (current_brightness_level_ != reported_brightness_level_) {
557 CriticalSectionScoped cs(observer_cs_.get());
559 observer_->BrightnessAlarm(id_, current_brightness_level_);
560 reported_brightness_level_ = current_brightness_level_;
563 // Update the overuse detector with the duration.
564 if (encode_start_time != -1) {
565 overuse_detector_->FrameEncoded(
566 Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
573 void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
574 // Apply image enhancement and effect filter.
575 if (deflicker_frame_stats_) {
576 if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
577 *video_frame) == 0) {
578 image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
580 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
581 "%s: could not get frame stats for captured frame",
585 if (denoising_enabled_) {
586 image_proc_module_->Denoising(video_frame);
588 if (brightness_frame_stats_) {
589 if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
590 *video_frame) == 0) {
591 int32_t brightness = image_proc_module_->BrightnessDetection(
592 *video_frame, *brightness_frame_stats_);
594 switch (brightness) {
595 case VideoProcessingModule::kNoWarning:
596 current_brightness_level_ = Normal;
598 case VideoProcessingModule::kDarkWarning:
599 current_brightness_level_ = Dark;
601 case VideoProcessingModule::kBrightWarning:
602 current_brightness_level_ = Bright;
605 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
606 "%s: Brightness detection failed", __FUNCTION__);
610 if (effect_filter_) {
611 unsigned int length = CalcBufferSize(kI420,
612 video_frame->width(),
613 video_frame->height());
614 scoped_array<uint8_t> video_buffer(new uint8_t[length]);
615 ExtractBuffer(*video_frame, length, video_buffer.get());
616 effect_filter_->Transform(length, video_buffer.get(),
617 video_frame->timestamp(), video_frame->width(),
618 video_frame->height());
620 // Deliver the captured frame to all observers (channels, renderer or file).
621 ViEFrameProviderBase::DeliverFrame(video_frame);
624 int ViECapturer::DeregisterFrameCallback(
625 const ViEFrameCallback* callbackObject) {
626 return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
629 bool ViECapturer::IsFrameCallbackRegistered(
630 const ViEFrameCallback* callbackObject) {
631 CriticalSectionScoped cs(provider_cs_.get());
632 return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
635 bool ViECapturer::CaptureCapabilityFixed() {
636 return requested_capability_.width != 0 &&
637 requested_capability_.height != 0 &&
638 requested_capability_.maxFPS != 0;
641 int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
643 CriticalSectionScoped cs(observer_cs_.get());
645 WEBRTC_TRACE(kTraceError,
647 ViEId(engine_id_, capture_id_),
648 "%s Observer already registered",
653 observer_ = observer;
655 capture_module_->RegisterCaptureCallback(*this);
656 capture_module_->EnableFrameRateCallback(true);
657 capture_module_->EnableNoPictureAlarm(true);
661 int32_t ViECapturer::DeRegisterObserver() {
662 capture_module_->EnableFrameRateCallback(false);
663 capture_module_->EnableNoPictureAlarm(false);
664 capture_module_->DeRegisterCaptureCallback();
666 CriticalSectionScoped cs(observer_cs_.get());
671 bool ViECapturer::IsObserverRegistered() {
672 CriticalSectionScoped cs(observer_cs_.get());
673 return observer_ != NULL;
676 void ViECapturer::OnCaptureFrameRate(const int32_t id,
677 const uint32_t frame_rate) {
678 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
679 "OnCaptureFrameRate %d", frame_rate);
681 CriticalSectionScoped cs(observer_cs_.get());
682 observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
685 void ViECapturer::OnNoPictureAlarm(const int32_t id,
686 const VideoCaptureAlarm alarm) {
687 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
688 "OnNoPictureAlarm %d", alarm);
690 CriticalSectionScoped cs(observer_cs_.get());
691 CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
692 observer_->NoPictureAlarm(id, vie_alarm);
695 bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
696 CriticalSectionScoped cs(capture_cs_.get());
697 if (captured_frame_.IsZeroSize())
700 deliver_frame_.SwapFrame(&captured_frame_);
701 captured_frame_.ResetSize();
705 } // namespace webrtc