Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / media / video / capture / win / video_capture_device_win.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/video/capture/win/video_capture_device_win.h"
6
7 #include <ks.h>
8 #include <ksmedia.h>
9
10 #include <algorithm>
11 #include <list>
12
13 #include "base/strings/sys_string_conversions.h"
14 #include "base/win/scoped_co_mem.h"
15 #include "base/win/scoped_variant.h"
16 #include "media/video/capture/win/video_capture_device_mf_win.h"
17
18 using base::win::ScopedCoMem;
19 using base::win::ScopedComPtr;
20 using base::win::ScopedVariant;
21
22 namespace media {
23
24 // Finds and creates a DirectShow Video Capture filter matching the device_name.
25 // static
26 HRESULT VideoCaptureDeviceWin::GetDeviceFilter(
27     const VideoCaptureDevice::Name& device_name,
28     IBaseFilter** filter) {
29   DCHECK(filter);
30
31   ScopedComPtr<ICreateDevEnum> dev_enum;
32   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
33                                        CLSCTX_INPROC);
34   if (FAILED(hr))
35     return hr;
36
37   ScopedComPtr<IEnumMoniker> enum_moniker;
38   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
39                                        enum_moniker.Receive(), 0);
40   // CreateClassEnumerator returns S_FALSE on some Windows OS
41   // when no camera exist. Therefore the FAILED macro can't be used.
42   if (hr != S_OK)
43     return NULL;
44
45   ScopedComPtr<IMoniker> moniker;
46   ScopedComPtr<IBaseFilter> capture_filter;
47   DWORD fetched = 0;
48   while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
49     ScopedComPtr<IPropertyBag> prop_bag;
50     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
51     if (FAILED(hr)) {
52       moniker.Release();
53       continue;
54     }
55
56     // Find the description or friendly name.
57     static const wchar_t* kPropertyNames[] = {
58       L"DevicePath", L"Description", L"FriendlyName"
59     };
60     ScopedVariant name;
61     for (size_t i = 0;
62          i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
63       prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
64     }
65     if (name.type() == VT_BSTR) {
66       std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
67       if (device_path.compare(device_name.id()) == 0) {
68         // We have found the requested device
69         hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
70                                    capture_filter.ReceiveVoid());
71         DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
72         break;
73       }
74     }
75     moniker.Release();
76   }
77
78   *filter = capture_filter.Detach();
79   if (!*filter && SUCCEEDED(hr))
80     hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
81
82   return hr;
83 }
84
85 // Check if a Pin matches a category.
86 // static
87 bool VideoCaptureDeviceWin::PinMatchesCategory(IPin* pin, REFGUID category) {
88   DCHECK(pin);
89   bool found = false;
90   ScopedComPtr<IKsPropertySet> ks_property;
91   HRESULT hr = ks_property.QueryFrom(pin);
92   if (SUCCEEDED(hr)) {
93     GUID pin_category;
94     DWORD return_value;
95     hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
96                           &pin_category, sizeof(pin_category), &return_value);
97     if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
98       found = (pin_category == category);
99     }
100   }
101   return found;
102 }
103
104 // Finds a IPin on a IBaseFilter given the direction an category.
105 // static
106 ScopedComPtr<IPin> VideoCaptureDeviceWin::GetPin(IBaseFilter* filter,
107                                                  PIN_DIRECTION pin_dir,
108                                                  REFGUID category) {
109   ScopedComPtr<IPin> pin;
110   ScopedComPtr<IEnumPins> pin_emum;
111   HRESULT hr = filter->EnumPins(pin_emum.Receive());
112   if (pin_emum == NULL)
113     return pin;
114
115   // Get first unconnected pin.
116   hr = pin_emum->Reset();  // set to first pin
117   while ((hr = pin_emum->Next(1, pin.Receive(), NULL)) == S_OK) {
118     PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
119     hr = pin->QueryDirection(&this_pin_dir);
120     if (pin_dir == this_pin_dir) {
121       if (category == GUID_NULL || PinMatchesCategory(pin, category))
122         return pin;
123     }
124     pin.Release();
125   }
126
127   DCHECK(!pin);
128   return pin;
129 }
130
131 // static
132 VideoPixelFormat VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat(
133     const GUID& sub_type) {
134   static struct {
135     const GUID& sub_type;
136     VideoPixelFormat format;
137   } pixel_formats[] = {
138     { kMediaSubTypeI420, PIXEL_FORMAT_I420 },
139     { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 },
140     { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 },
141     { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 },
142     { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG },
143     { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY },
144     { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB },
145     { kMediaSubTypeHDYC, PIXEL_FORMAT_UYVY },
146   };
147   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) {
148     if (sub_type == pixel_formats[i].sub_type)
149       return pixel_formats[i].format;
150   }
151 #ifndef NDEBUG
152   WCHAR guid_str[128];
153   StringFromGUID2(sub_type, guid_str, arraysize(guid_str));
154   DVLOG(2) << "Device (also) supports an unknown media type " << guid_str;
155 #endif
156   return PIXEL_FORMAT_UNKNOWN;
157 }
158
159 void VideoCaptureDeviceWin::ScopedMediaType::Free() {
160   if (!media_type_)
161     return;
162
163   DeleteMediaType(media_type_);
164   media_type_= NULL;
165 }
166
167 AM_MEDIA_TYPE** VideoCaptureDeviceWin::ScopedMediaType::Receive() {
168   DCHECK(!media_type_);
169   return &media_type_;
170 }
171
172 // Release the format block for a media type.
173 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
174 void VideoCaptureDeviceWin::ScopedMediaType::FreeMediaType(AM_MEDIA_TYPE* mt) {
175   if (mt->cbFormat != 0) {
176     CoTaskMemFree(mt->pbFormat);
177     mt->cbFormat = 0;
178     mt->pbFormat = NULL;
179   }
180   if (mt->pUnk != NULL) {
181     NOTREACHED();
182     // pUnk should not be used.
183     mt->pUnk->Release();
184     mt->pUnk = NULL;
185   }
186 }
187
188 // Delete a media type structure that was allocated on the heap.
189 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
190 void VideoCaptureDeviceWin::ScopedMediaType::DeleteMediaType(
191     AM_MEDIA_TYPE* mt) {
192   if (mt != NULL) {
193     FreeMediaType(mt);
194     CoTaskMemFree(mt);
195   }
196 }
197
198 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
199     : device_name_(device_name),
200       state_(kIdle) {
201   DetachFromThread();
202 }
203
204 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
205   DCHECK(CalledOnValidThread());
206   if (media_control_)
207     media_control_->Stop();
208
209   if (graph_builder_) {
210     if (sink_filter_) {
211       graph_builder_->RemoveFilter(sink_filter_);
212       sink_filter_ = NULL;
213     }
214
215     if (capture_filter_)
216       graph_builder_->RemoveFilter(capture_filter_);
217
218     if (mjpg_filter_)
219       graph_builder_->RemoveFilter(mjpg_filter_);
220   }
221 }
222
223 bool VideoCaptureDeviceWin::Init() {
224   DCHECK(CalledOnValidThread());
225   HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
226   if (!capture_filter_) {
227     DVLOG(2) << "Failed to create capture filter.";
228     return false;
229   }
230
231   output_capture_pin_ =
232       GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE);
233   if (!output_capture_pin_) {
234     DVLOG(2) << "Failed to get capture output pin";
235     return false;
236   }
237
238   // Create the sink filter used for receiving Captured frames.
239   sink_filter_ = new SinkFilter(this);
240   if (sink_filter_ == NULL) {
241     DVLOG(2) << "Failed to create send filter";
242     return false;
243   }
244
245   input_sink_pin_ = sink_filter_->GetPin(0);
246
247   hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
248                                      CLSCTX_INPROC_SERVER);
249   if (FAILED(hr)) {
250     DVLOG(2) << "Failed to create graph builder.";
251     return false;
252   }
253
254   hr = graph_builder_.QueryInterface(media_control_.Receive());
255   if (FAILED(hr)) {
256     DVLOG(2) << "Failed to create media control builder.";
257     return false;
258   }
259
260   hr = graph_builder_->AddFilter(capture_filter_, NULL);
261   if (FAILED(hr)) {
262     DVLOG(2) << "Failed to add the capture device to the graph.";
263     return false;
264   }
265
266   hr = graph_builder_->AddFilter(sink_filter_, NULL);
267   if (FAILED(hr)) {
268     DVLOG(2)<< "Failed to add the send filter to the graph.";
269     return false;
270   }
271
272   return CreateCapabilityMap();
273 }
274
275 void VideoCaptureDeviceWin::AllocateAndStart(
276     const VideoCaptureParams& params,
277     scoped_ptr<VideoCaptureDevice::Client> client) {
278   DCHECK(CalledOnValidThread());
279   if (state_ != kIdle)
280     return;
281
282   client_ = client.Pass();
283
284   // Get the camera capability that best match the requested resolution.
285   const VideoCaptureCapabilityWin& found_capability =
286       capabilities_.GetBestMatchedFormat(
287           params.requested_format.frame_size.width(),
288           params.requested_format.frame_size.height(),
289           params.requested_format.frame_rate);
290   VideoCaptureFormat format = found_capability.supported_format;
291
292   // Reduce the frame rate if the requested frame rate is lower
293   // than the capability.
294   if (format.frame_rate > params.requested_format.frame_rate)
295     format.frame_rate = params.requested_format.frame_rate;
296
297   ScopedComPtr<IAMStreamConfig> stream_config;
298   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
299   if (FAILED(hr)) {
300     SetErrorState("Can't get the Capture format settings");
301     return;
302   }
303
304   int count = 0, size = 0;
305   hr = stream_config->GetNumberOfCapabilities(&count, &size);
306   if (FAILED(hr)) {
307     SetErrorState("Failed to GetNumberOfCapabilities");
308     return;
309   }
310
311   scoped_ptr<BYTE[]> caps(new BYTE[size]);
312   ScopedMediaType media_type;
313
314   // Get the windows capability from the capture device.
315   hr = stream_config->GetStreamCaps(
316       found_capability.stream_index, media_type.Receive(), caps.get());
317   if (SUCCEEDED(hr)) {
318     if (media_type->formattype == FORMAT_VideoInfo) {
319       VIDEOINFOHEADER* h =
320           reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
321       if (format.frame_rate > 0)
322         h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
323     }
324     // Set the sink filter to request this format.
325     sink_filter_->SetRequestedMediaFormat(format);
326     // Order the capture device to use this format.
327     hr = stream_config->SetFormat(media_type.get());
328   }
329
330   if (FAILED(hr))
331     SetErrorState("Failed to set capture device output format");
332
333   if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
334     // Create MJPG filter if we need it.
335     hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
336
337     if (SUCCEEDED(hr)) {
338       input_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL);
339       output_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL);
340       hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
341     }
342
343     if (FAILED(hr)) {
344       mjpg_filter_.Release();
345       input_mjpg_pin_.Release();
346       output_mjpg_pin_.Release();
347     }
348   }
349
350   SetAntiFlickerInCaptureFilter();
351
352   if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
353     // Connect the camera to the MJPEG decoder.
354     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
355                                        NULL);
356     // Connect the MJPEG filter to the Capture filter.
357     hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
358                                         NULL);
359   } else if (media_type->subtype == kMediaSubTypeHDYC) {
360     // HDYC pixel format, used by the DeckLink capture card, needs an AVI
361     // decompressor filter after source, let |graph_builder_| add it.
362     hr = graph_builder_->Connect(output_capture_pin_, input_sink_pin_);
363   } else {
364     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
365                                        NULL);
366   }
367
368   if (FAILED(hr)) {
369     SetErrorState("Failed to connect the Capture graph.");
370     return;
371   }
372
373   hr = media_control_->Pause();
374   if (FAILED(hr)) {
375     SetErrorState("Failed to Pause the Capture device. "
376                   "Is it already occupied?");
377     return;
378   }
379
380   // Get the format back from the sink filter after the filter have been
381   // connected.
382   capture_format_ = sink_filter_->ResultingFormat();
383
384   // Start capturing.
385   hr = media_control_->Run();
386   if (FAILED(hr)) {
387     SetErrorState("Failed to start the Capture device.");
388     return;
389   }
390
391   state_ = kCapturing;
392 }
393
394 void VideoCaptureDeviceWin::StopAndDeAllocate() {
395   DCHECK(CalledOnValidThread());
396   if (state_ != kCapturing)
397     return;
398
399   HRESULT hr = media_control_->Stop();
400   if (FAILED(hr)) {
401     SetErrorState("Failed to stop the capture graph.");
402     return;
403   }
404
405   graph_builder_->Disconnect(output_capture_pin_);
406   graph_builder_->Disconnect(input_sink_pin_);
407
408   // If the _mjpg filter exist disconnect it even if it has not been used.
409   if (mjpg_filter_) {
410     graph_builder_->Disconnect(input_mjpg_pin_);
411     graph_builder_->Disconnect(output_mjpg_pin_);
412   }
413
414   if (FAILED(hr)) {
415     SetErrorState("Failed to Stop the Capture device");
416     return;
417   }
418   client_.reset();
419   state_ = kIdle;
420 }
421
422 // Implements SinkFilterObserver::SinkFilterObserver.
423 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
424                                           int length) {
425   client_->OnIncomingCapturedData(
426       buffer, length, capture_format_, 0, base::TimeTicks::Now());
427 }
428
429 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
430   DCHECK(CalledOnValidThread());
431   ScopedComPtr<IAMStreamConfig> stream_config;
432   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
433   if (FAILED(hr)) {
434     DVLOG(2) << "Failed to get IAMStreamConfig interface from "
435                 "capture device";
436     return false;
437   }
438
439   // Get interface used for getting the frame rate.
440   ScopedComPtr<IAMVideoControl> video_control;
441   hr = capture_filter_.QueryInterface(video_control.Receive());
442   DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
443
444   int count = 0, size = 0;
445   hr = stream_config->GetNumberOfCapabilities(&count, &size);
446   if (FAILED(hr)) {
447     DVLOG(2) << "Failed to GetNumberOfCapabilities";
448     return false;
449   }
450
451   scoped_ptr<BYTE[]> caps(new BYTE[size]);
452   for (int i = 0; i < count; ++i) {
453     ScopedMediaType media_type;
454     hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
455     // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
456     // macros here since they'll trigger incorrectly.
457     if (hr != S_OK) {
458       DVLOG(2) << "Failed to GetStreamCaps";
459       return false;
460     }
461
462     if (media_type->majortype == MEDIATYPE_Video &&
463         media_type->formattype == FORMAT_VideoInfo) {
464       VideoCaptureCapabilityWin capability(i);
465       capability.supported_format.pixel_format =
466           TranslateMediaSubtypeToPixelFormat(media_type->subtype);
467       if (capability.supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
468         continue;
469
470       VIDEOINFOHEADER* h =
471           reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
472       capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
473                                                      h->bmiHeader.biHeight);
474
475       // Try to get a better |time_per_frame| from IAMVideoControl.  If not, use
476       // the value from VIDEOINFOHEADER.
477       REFERENCE_TIME time_per_frame = h->AvgTimePerFrame;
478       if (video_control) {
479         ScopedCoMem<LONGLONG> max_fps;
480         LONG list_size = 0;
481         SIZE size = {capability.supported_format.frame_size.width(),
482                      capability.supported_format.frame_size.height()};
483
484         // GetFrameRateList doesn't return max frame rate always
485         // eg: Logitech Notebook. This may be due to a bug in that API
486         // because GetFrameRateList array is reversed in the above camera. So
487         // a util method written. Can't assume the first value will return
488         // the max fps.
489         hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
490                                              &list_size, &max_fps);
491         // Sometimes |list_size| will be > 0, but max_fps will be NULL.  Some
492         // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates
493         // into success, so explicitly check S_OK.  See http://crbug.com/306237.
494         if (hr == S_OK && list_size > 0 && max_fps) {
495           time_per_frame = *std::min_element(max_fps.get(),
496                                              max_fps.get() + list_size);
497         }
498       }
499
500       capability.supported_format.frame_rate =
501           (time_per_frame > 0)
502               ? (kSecondsToReferenceTime / static_cast<float>(time_per_frame))
503               : 0.0;
504
505       // DirectShow works at the moment only on integer frame_rate but the
506       // best capability matching class works on rational frame rates.
507       capability.frame_rate_numerator = capability.supported_format.frame_rate;
508       capability.frame_rate_denominator = 1;
509
510       capabilities_.Add(capability);
511     }
512   }
513
514   return !capabilities_.empty();
515 }
516
517 // Set the power line frequency removal in |capture_filter_| if available.
518 void VideoCaptureDeviceWin::SetAntiFlickerInCaptureFilter() {
519   const int power_line_frequency = GetPowerLineFrequencyForLocation();
520   if (power_line_frequency != kPowerLine50Hz &&
521       power_line_frequency != kPowerLine60Hz) {
522     return;
523   }
524   ScopedComPtr<IKsPropertySet> ks_propset;
525   DWORD type_support = 0;
526   HRESULT hr;
527   if (SUCCEEDED(hr = ks_propset.QueryFrom(capture_filter_)) &&
528       SUCCEEDED(hr = ks_propset->QuerySupported(PROPSETID_VIDCAP_VIDEOPROCAMP,
529           KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, &type_support)) &&
530       (type_support & KSPROPERTY_SUPPORT_SET)) {
531     KSPROPERTY_VIDEOPROCAMP_S data = {};
532     data.Property.Set = PROPSETID_VIDCAP_VIDEOPROCAMP;
533     data.Property.Id = KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY;
534     data.Property.Flags = KSPROPERTY_TYPE_SET;
535     data.Value = (power_line_frequency == kPowerLine50Hz) ? 1 : 2;
536     data.Flags = KSPROPERTY_VIDEOPROCAMP_FLAGS_MANUAL;
537     hr = ks_propset->Set(PROPSETID_VIDCAP_VIDEOPROCAMP,
538                          KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY,
539                          &data, sizeof(data), &data, sizeof(data));
540     DVLOG_IF(ERROR, FAILED(hr)) << "Anti-flicker setting failed.";
541     DVLOG_IF(2, SUCCEEDED(hr)) << "Anti-flicker set correctly.";
542   } else {
543     DVLOG(2) << "Anti-flicker setting not supported.";
544   }
545 }
546
547 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) {
548   DCHECK(CalledOnValidThread());
549   DVLOG(1) << reason;
550   state_ = kError;
551   client_->OnError(reason);
552 }
553 }  // namespace media