1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/video/capture/win/video_capture_device_win.h"
13 #include "base/strings/sys_string_conversions.h"
14 #include "base/win/scoped_co_mem.h"
15 #include "base/win/scoped_variant.h"
16 #include "media/video/capture/win/video_capture_device_mf_win.h"
18 using base::win::ScopedCoMem;
19 using base::win::ScopedComPtr;
20 using base::win::ScopedVariant;
24 // Finds and creates a DirectShow Video Capture filter matching the device_name.
26 HRESULT VideoCaptureDeviceWin::GetDeviceFilter(
27 const VideoCaptureDevice::Name& device_name,
28 IBaseFilter** filter) {
31 ScopedComPtr<ICreateDevEnum> dev_enum;
32 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
37 ScopedComPtr<IEnumMoniker> enum_moniker;
38 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
39 enum_moniker.Receive(), 0);
40 // CreateClassEnumerator returns S_FALSE on some Windows OS
41 // when no camera exist. Therefore the FAILED macro can't be used.
45 ScopedComPtr<IMoniker> moniker;
46 ScopedComPtr<IBaseFilter> capture_filter;
48 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
49 ScopedComPtr<IPropertyBag> prop_bag;
50 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
56 // Find the description or friendly name.
57 static const wchar_t* kPropertyNames[] = {
58 L"DevicePath", L"Description", L"FriendlyName"
62 i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
63 prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
65 if (name.type() == VT_BSTR) {
66 std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
67 if (device_path.compare(device_name.id()) == 0) {
68 // We have found the requested device
69 hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
70 capture_filter.ReceiveVoid());
71 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
78 *filter = capture_filter.Detach();
79 if (!*filter && SUCCEEDED(hr))
80 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
85 // Check if a Pin matches a category.
87 bool VideoCaptureDeviceWin::PinMatchesCategory(IPin* pin, REFGUID category) {
90 ScopedComPtr<IKsPropertySet> ks_property;
91 HRESULT hr = ks_property.QueryFrom(pin);
95 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
96 &pin_category, sizeof(pin_category), &return_value);
97 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
98 found = (pin_category == category);
104 // Finds a IPin on a IBaseFilter given the direction an category.
106 ScopedComPtr<IPin> VideoCaptureDeviceWin::GetPin(IBaseFilter* filter,
107 PIN_DIRECTION pin_dir,
109 ScopedComPtr<IPin> pin;
110 ScopedComPtr<IEnumPins> pin_emum;
111 HRESULT hr = filter->EnumPins(pin_emum.Receive());
112 if (pin_emum == NULL)
115 // Get first unconnected pin.
116 hr = pin_emum->Reset(); // set to first pin
117 while ((hr = pin_emum->Next(1, pin.Receive(), NULL)) == S_OK) {
118 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
119 hr = pin->QueryDirection(&this_pin_dir);
120 if (pin_dir == this_pin_dir) {
121 if (category == GUID_NULL || PinMatchesCategory(pin, category))
132 VideoPixelFormat VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat(
133 const GUID& sub_type) {
135 const GUID& sub_type;
136 VideoPixelFormat format;
137 } pixel_formats[] = {
138 { kMediaSubTypeI420, PIXEL_FORMAT_I420 },
139 { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 },
140 { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 },
141 { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 },
142 { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG },
143 { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY },
144 { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB },
145 { kMediaSubTypeHDYC, PIXEL_FORMAT_UYVY },
147 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) {
148 if (sub_type == pixel_formats[i].sub_type)
149 return pixel_formats[i].format;
153 StringFromGUID2(sub_type, guid_str, arraysize(guid_str));
154 DVLOG(2) << "Device (also) supports an unknown media type " << guid_str;
156 return PIXEL_FORMAT_UNKNOWN;
159 void VideoCaptureDeviceWin::ScopedMediaType::Free() {
163 DeleteMediaType(media_type_);
167 AM_MEDIA_TYPE** VideoCaptureDeviceWin::ScopedMediaType::Receive() {
168 DCHECK(!media_type_);
172 // Release the format block for a media type.
173 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
174 void VideoCaptureDeviceWin::ScopedMediaType::FreeMediaType(AM_MEDIA_TYPE* mt) {
175 if (mt->cbFormat != 0) {
176 CoTaskMemFree(mt->pbFormat);
180 if (mt->pUnk != NULL) {
182 // pUnk should not be used.
188 // Delete a media type structure that was allocated on the heap.
189 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
190 void VideoCaptureDeviceWin::ScopedMediaType::DeleteMediaType(
198 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
199 : device_name_(device_name),
204 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
205 DCHECK(CalledOnValidThread());
207 media_control_->Stop();
209 if (graph_builder_) {
211 graph_builder_->RemoveFilter(sink_filter_);
216 graph_builder_->RemoveFilter(capture_filter_);
219 graph_builder_->RemoveFilter(mjpg_filter_);
223 bool VideoCaptureDeviceWin::Init() {
224 DCHECK(CalledOnValidThread());
225 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
226 if (!capture_filter_) {
227 DVLOG(2) << "Failed to create capture filter.";
231 output_capture_pin_ =
232 GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE);
233 if (!output_capture_pin_) {
234 DVLOG(2) << "Failed to get capture output pin";
238 // Create the sink filter used for receiving Captured frames.
239 sink_filter_ = new SinkFilter(this);
240 if (sink_filter_ == NULL) {
241 DVLOG(2) << "Failed to create send filter";
245 input_sink_pin_ = sink_filter_->GetPin(0);
247 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
248 CLSCTX_INPROC_SERVER);
250 DVLOG(2) << "Failed to create graph builder.";
254 hr = graph_builder_.QueryInterface(media_control_.Receive());
256 DVLOG(2) << "Failed to create media control builder.";
260 hr = graph_builder_->AddFilter(capture_filter_, NULL);
262 DVLOG(2) << "Failed to add the capture device to the graph.";
266 hr = graph_builder_->AddFilter(sink_filter_, NULL);
268 DVLOG(2)<< "Failed to add the send filter to the graph.";
272 return CreateCapabilityMap();
275 void VideoCaptureDeviceWin::AllocateAndStart(
276 const VideoCaptureParams& params,
277 scoped_ptr<VideoCaptureDevice::Client> client) {
278 DCHECK(CalledOnValidThread());
282 client_ = client.Pass();
284 // Get the camera capability that best match the requested resolution.
285 const VideoCaptureCapabilityWin& found_capability =
286 capabilities_.GetBestMatchedFormat(
287 params.requested_format.frame_size.width(),
288 params.requested_format.frame_size.height(),
289 params.requested_format.frame_rate);
290 VideoCaptureFormat format = found_capability.supported_format;
292 // Reduce the frame rate if the requested frame rate is lower
293 // than the capability.
294 if (format.frame_rate > params.requested_format.frame_rate)
295 format.frame_rate = params.requested_format.frame_rate;
297 ScopedComPtr<IAMStreamConfig> stream_config;
298 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
300 SetErrorState("Can't get the Capture format settings");
304 int count = 0, size = 0;
305 hr = stream_config->GetNumberOfCapabilities(&count, &size);
307 SetErrorState("Failed to GetNumberOfCapabilities");
311 scoped_ptr<BYTE[]> caps(new BYTE[size]);
312 ScopedMediaType media_type;
314 // Get the windows capability from the capture device.
315 hr = stream_config->GetStreamCaps(
316 found_capability.stream_index, media_type.Receive(), caps.get());
318 if (media_type->formattype == FORMAT_VideoInfo) {
320 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
321 if (format.frame_rate > 0)
322 h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
324 // Set the sink filter to request this format.
325 sink_filter_->SetRequestedMediaFormat(format);
326 // Order the capture device to use this format.
327 hr = stream_config->SetFormat(media_type.get());
331 SetErrorState("Failed to set capture device output format");
333 if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
334 // Create MJPG filter if we need it.
335 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
338 input_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL);
339 output_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL);
340 hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
344 mjpg_filter_.Release();
345 input_mjpg_pin_.Release();
346 output_mjpg_pin_.Release();
350 SetAntiFlickerInCaptureFilter();
352 if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
353 // Connect the camera to the MJPEG decoder.
354 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
356 // Connect the MJPEG filter to the Capture filter.
357 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
359 } else if (media_type->subtype == kMediaSubTypeHDYC) {
360 // HDYC pixel format, used by the DeckLink capture card, needs an AVI
361 // decompressor filter after source, let |graph_builder_| add it.
362 hr = graph_builder_->Connect(output_capture_pin_, input_sink_pin_);
364 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
369 SetErrorState("Failed to connect the Capture graph.");
373 hr = media_control_->Pause();
375 SetErrorState("Failed to Pause the Capture device. "
376 "Is it already occupied?");
380 // Get the format back from the sink filter after the filter have been
382 capture_format_ = sink_filter_->ResultingFormat();
385 hr = media_control_->Run();
387 SetErrorState("Failed to start the Capture device.");
394 void VideoCaptureDeviceWin::StopAndDeAllocate() {
395 DCHECK(CalledOnValidThread());
396 if (state_ != kCapturing)
399 HRESULT hr = media_control_->Stop();
401 SetErrorState("Failed to stop the capture graph.");
405 graph_builder_->Disconnect(output_capture_pin_);
406 graph_builder_->Disconnect(input_sink_pin_);
408 // If the _mjpg filter exist disconnect it even if it has not been used.
410 graph_builder_->Disconnect(input_mjpg_pin_);
411 graph_builder_->Disconnect(output_mjpg_pin_);
415 SetErrorState("Failed to Stop the Capture device");
422 // Implements SinkFilterObserver::SinkFilterObserver.
423 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
425 client_->OnIncomingCapturedData(
426 buffer, length, capture_format_, 0, base::TimeTicks::Now());
429 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
430 DCHECK(CalledOnValidThread());
431 ScopedComPtr<IAMStreamConfig> stream_config;
432 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
434 DVLOG(2) << "Failed to get IAMStreamConfig interface from "
439 // Get interface used for getting the frame rate.
440 ScopedComPtr<IAMVideoControl> video_control;
441 hr = capture_filter_.QueryInterface(video_control.Receive());
442 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
444 int count = 0, size = 0;
445 hr = stream_config->GetNumberOfCapabilities(&count, &size);
447 DVLOG(2) << "Failed to GetNumberOfCapabilities";
451 scoped_ptr<BYTE[]> caps(new BYTE[size]);
452 for (int i = 0; i < count; ++i) {
453 ScopedMediaType media_type;
454 hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
455 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
456 // macros here since they'll trigger incorrectly.
458 DVLOG(2) << "Failed to GetStreamCaps";
462 if (media_type->majortype == MEDIATYPE_Video &&
463 media_type->formattype == FORMAT_VideoInfo) {
464 VideoCaptureCapabilityWin capability(i);
465 capability.supported_format.pixel_format =
466 TranslateMediaSubtypeToPixelFormat(media_type->subtype);
467 if (capability.supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
471 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
472 capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
473 h->bmiHeader.biHeight);
475 // Try to get a better |time_per_frame| from IAMVideoControl. If not, use
476 // the value from VIDEOINFOHEADER.
477 REFERENCE_TIME time_per_frame = h->AvgTimePerFrame;
479 ScopedCoMem<LONGLONG> max_fps;
481 SIZE size = {capability.supported_format.frame_size.width(),
482 capability.supported_format.frame_size.height()};
484 // GetFrameRateList doesn't return max frame rate always
485 // eg: Logitech Notebook. This may be due to a bug in that API
486 // because GetFrameRateList array is reversed in the above camera. So
487 // a util method written. Can't assume the first value will return
489 hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
490 &list_size, &max_fps);
491 // Sometimes |list_size| will be > 0, but max_fps will be NULL. Some
492 // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates
493 // into success, so explicitly check S_OK. See http://crbug.com/306237.
494 if (hr == S_OK && list_size > 0 && max_fps) {
495 time_per_frame = *std::min_element(max_fps.get(),
496 max_fps.get() + list_size);
500 capability.supported_format.frame_rate =
502 ? (kSecondsToReferenceTime / static_cast<float>(time_per_frame))
505 // DirectShow works at the moment only on integer frame_rate but the
506 // best capability matching class works on rational frame rates.
507 capability.frame_rate_numerator = capability.supported_format.frame_rate;
508 capability.frame_rate_denominator = 1;
510 capabilities_.Add(capability);
514 return !capabilities_.empty();
517 // Set the power line frequency removal in |capture_filter_| if available.
518 void VideoCaptureDeviceWin::SetAntiFlickerInCaptureFilter() {
519 const int power_line_frequency = GetPowerLineFrequencyForLocation();
520 if (power_line_frequency != kPowerLine50Hz &&
521 power_line_frequency != kPowerLine60Hz) {
524 ScopedComPtr<IKsPropertySet> ks_propset;
525 DWORD type_support = 0;
527 if (SUCCEEDED(hr = ks_propset.QueryFrom(capture_filter_)) &&
528 SUCCEEDED(hr = ks_propset->QuerySupported(PROPSETID_VIDCAP_VIDEOPROCAMP,
529 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, &type_support)) &&
530 (type_support & KSPROPERTY_SUPPORT_SET)) {
531 KSPROPERTY_VIDEOPROCAMP_S data = {};
532 data.Property.Set = PROPSETID_VIDCAP_VIDEOPROCAMP;
533 data.Property.Id = KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY;
534 data.Property.Flags = KSPROPERTY_TYPE_SET;
535 data.Value = (power_line_frequency == kPowerLine50Hz) ? 1 : 2;
536 data.Flags = KSPROPERTY_VIDEOPROCAMP_FLAGS_MANUAL;
537 hr = ks_propset->Set(PROPSETID_VIDCAP_VIDEOPROCAMP,
538 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY,
539 &data, sizeof(data), &data, sizeof(data));
540 DVLOG_IF(ERROR, FAILED(hr)) << "Anti-flicker setting failed.";
541 DVLOG_IF(2, SUCCEEDED(hr)) << "Anti-flicker set correctly.";
543 DVLOG(2) << "Anti-flicker setting not supported.";
547 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) {
548 DCHECK(CalledOnValidThread());
551 client_->OnError(reason);