Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / media / video / capture / win / video_capture_device_win.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/video/capture/win/video_capture_device_win.h"
6
7 #include <ks.h>
8 #include <ksmedia.h>
9
10 #include <algorithm>
11 #include <list>
12
13 #include "base/command_line.h"
14 #include "base/strings/string_util.h"
15 #include "base/strings/sys_string_conversions.h"
16 #include "base/win/metro.h"
17 #include "base/win/scoped_co_mem.h"
18 #include "base/win/scoped_variant.h"
19 #include "base/win/windows_version.h"
20 #include "media/base/media_switches.h"
21 #include "media/video/capture/win/video_capture_device_mf_win.h"
22
23 using base::win::ScopedCoMem;
24 using base::win::ScopedComPtr;
25 using base::win::ScopedVariant;
26
27 namespace media {
28 namespace {
29
30 // Finds and creates a DirectShow Video Capture filter matching the device_name.
31 HRESULT GetDeviceFilter(const VideoCaptureDevice::Name& device_name,
32                         IBaseFilter** filter) {
33   DCHECK(filter);
34
35   ScopedComPtr<ICreateDevEnum> dev_enum;
36   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
37                                        CLSCTX_INPROC);
38   if (FAILED(hr))
39     return hr;
40
41   ScopedComPtr<IEnumMoniker> enum_moniker;
42   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
43                                        enum_moniker.Receive(), 0);
44   // CreateClassEnumerator returns S_FALSE on some Windows OS
45   // when no camera exist. Therefore the FAILED macro can't be used.
46   if (hr != S_OK)
47     return NULL;
48
49   ScopedComPtr<IMoniker> moniker;
50   ScopedComPtr<IBaseFilter> capture_filter;
51   DWORD fetched = 0;
52   while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
53     ScopedComPtr<IPropertyBag> prop_bag;
54     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
55     if (FAILED(hr)) {
56       moniker.Release();
57       continue;
58     }
59
60     // Find the description or friendly name.
61     static const wchar_t* kPropertyNames[] = {
62       L"DevicePath", L"Description", L"FriendlyName"
63     };
64     ScopedVariant name;
65     for (size_t i = 0;
66          i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
67       prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
68     }
69     if (name.type() == VT_BSTR) {
70       std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
71       if (device_path.compare(device_name.id()) == 0) {
72         // We have found the requested device
73         hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
74                                    capture_filter.ReceiveVoid());
75         DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
76         break;
77       }
78     }
79     moniker.Release();
80   }
81
82   *filter = capture_filter.Detach();
83   if (!*filter && SUCCEEDED(hr))
84     hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
85
86   return hr;
87 }
88
89 // Check if a Pin matches a category.
90 bool PinMatchesCategory(IPin* pin, REFGUID category) {
91   DCHECK(pin);
92   bool found = false;
93   ScopedComPtr<IKsPropertySet> ks_property;
94   HRESULT hr = ks_property.QueryFrom(pin);
95   if (SUCCEEDED(hr)) {
96     GUID pin_category;
97     DWORD return_value;
98     hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
99                           &pin_category, sizeof(pin_category), &return_value);
100     if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
101       found = (pin_category == category);
102     }
103   }
104   return found;
105 }
106
107 // Finds a IPin on a IBaseFilter given the direction an category.
108 ScopedComPtr<IPin> GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir,
109                           REFGUID category) {
110   ScopedComPtr<IPin> pin;
111   ScopedComPtr<IEnumPins> pin_emum;
112   HRESULT hr = filter->EnumPins(pin_emum.Receive());
113   if (pin_emum == NULL)
114     return pin;
115
116   // Get first unconnected pin.
117   hr = pin_emum->Reset();  // set to first pin
118   while ((hr = pin_emum->Next(1, pin.Receive(), NULL)) == S_OK) {
119     PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
120     hr = pin->QueryDirection(&this_pin_dir);
121     if (pin_dir == this_pin_dir) {
122       if (category == GUID_NULL || PinMatchesCategory(pin, category))
123         return pin;
124     }
125     pin.Release();
126   }
127
128   DCHECK(!pin);
129   return pin;
130 }
131
132 // Release the format block for a media type.
133 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
134 void FreeMediaType(AM_MEDIA_TYPE* mt) {
135   if (mt->cbFormat != 0) {
136     CoTaskMemFree(mt->pbFormat);
137     mt->cbFormat = 0;
138     mt->pbFormat = NULL;
139   }
140   if (mt->pUnk != NULL) {
141     NOTREACHED();
142     // pUnk should not be used.
143     mt->pUnk->Release();
144     mt->pUnk = NULL;
145   }
146 }
147
148 // Delete a media type structure that was allocated on the heap.
149 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
150 void DeleteMediaType(AM_MEDIA_TYPE* mt) {
151   if (mt != NULL) {
152     FreeMediaType(mt);
153     CoTaskMemFree(mt);
154   }
155 }
156
157 // A utility class that wraps the AM_MEDIA_TYPE type and guarantees that
158 // we free the structure when exiting the scope.  DCHECKing is also done to
159 // avoid memory leaks.
160 class ScopedMediaType {
161  public:
162   ScopedMediaType() : media_type_(NULL) {}
163   ~ScopedMediaType() { Free(); }
164
165   AM_MEDIA_TYPE* operator->() { return media_type_; }
166   AM_MEDIA_TYPE* get() { return media_type_; }
167
168   void Free() {
169     if (!media_type_)
170       return;
171
172     DeleteMediaType(media_type_);
173     media_type_= NULL;
174   }
175
176   AM_MEDIA_TYPE** Receive() {
177     DCHECK(!media_type_);
178     return &media_type_;
179   }
180
181  private:
182   AM_MEDIA_TYPE* media_type_;
183 };
184
185 VideoPixelFormat TranslateMediaSubtypeToPixelFormat(const GUID& sub_type) {
186   static struct {
187     const GUID& sub_type;
188     VideoPixelFormat format;
189   } pixel_formats[] = {
190     { kMediaSubTypeI420, PIXEL_FORMAT_I420 },
191     { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 },
192     { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 },
193     { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 },
194     { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG },
195     { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY },
196     { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB },
197   };
198   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) {
199     if (sub_type == pixel_formats[i].sub_type)
200       return pixel_formats[i].format;
201   }
202 #ifndef NDEBUG
203   WCHAR guid_str[128];
204   StringFromGUID2(sub_type, guid_str, arraysize(guid_str));
205   DVLOG(2) << "Device (also) supports an unknown media type " << guid_str;
206 #endif
207   return PIXEL_FORMAT_UNKNOWN;
208 }
209
210 }  // namespace
211
212 // static
213 void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
214   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
215   // Use Media Foundation for Metro processes (after and including Win8) and
216   // DirectShow for any other versions, unless forced via flag. Media Foundation
217   // can also be forced if appropriate flag is set and we are in Windows 7 or
218   // 8 in non-Metro mode.
219   if ((base::win::IsMetroProcess() &&
220       !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
221       (base::win::GetVersion() >= base::win::VERSION_WIN7 &&
222       cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
223     VideoCaptureDeviceMFWin::GetDeviceNames(device_names);
224   } else {
225     VideoCaptureDeviceWin::GetDeviceNames(device_names);
226   }
227 }
228
229 // static
230 void VideoCaptureDevice::GetDeviceSupportedFormats(const Name& device,
231     VideoCaptureFormats* formats) {
232   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
233   // Use Media Foundation for Metro processes (after and including Win8) and
234   // DirectShow for any other versions, unless forced via flag. Media Foundation
235   // can also be forced if appropriate flag is set and we are in Windows 7 or
236   // 8 in non-Metro mode.
237   if ((base::win::IsMetroProcess() &&
238       !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
239       (base::win::GetVersion() >= base::win::VERSION_WIN7 &&
240       cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
241     VideoCaptureDeviceMFWin::GetDeviceSupportedFormats(device, formats);
242   } else {
243     VideoCaptureDeviceWin::GetDeviceSupportedFormats(device, formats);
244   }
245 }
246
247 // static
248 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
249   VideoCaptureDevice* ret = NULL;
250   if (device_name.capture_api_type() == Name::MEDIA_FOUNDATION) {
251     DCHECK(VideoCaptureDeviceMFWin::PlatformSupported());
252     scoped_ptr<VideoCaptureDeviceMFWin> device(
253         new VideoCaptureDeviceMFWin(device_name));
254     DVLOG(1) << " MediaFoundation Device: " << device_name.name();
255     if (device->Init())
256       ret = device.release();
257   } else if (device_name.capture_api_type() == Name::DIRECT_SHOW) {
258     scoped_ptr<VideoCaptureDeviceWin> device(
259         new VideoCaptureDeviceWin(device_name));
260     DVLOG(1) << " DirectShow Device: " << device_name.name();
261     if (device->Init())
262       ret = device.release();
263   } else{
264     NOTREACHED() << " Couldn't recognize VideoCaptureDevice type";
265   }
266
267   return ret;
268 }
269
270 // static
271 void VideoCaptureDeviceWin::GetDeviceNames(Names* device_names) {
272   DCHECK(device_names);
273
274   ScopedComPtr<ICreateDevEnum> dev_enum;
275   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
276                                        CLSCTX_INPROC);
277   if (FAILED(hr))
278     return;
279
280   ScopedComPtr<IEnumMoniker> enum_moniker;
281   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
282                                        enum_moniker.Receive(), 0);
283   // CreateClassEnumerator returns S_FALSE on some Windows OS
284   // when no camera exist. Therefore the FAILED macro can't be used.
285   if (hr != S_OK)
286     return;
287
288   device_names->clear();
289
290   // Name of a fake DirectShow filter that exist on computers with
291   // GTalk installed.
292   static const char kGoogleCameraAdapter[] = "google camera adapter";
293
294   // Enumerate all video capture devices.
295   ScopedComPtr<IMoniker> moniker;
296   int index = 0;
297   while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
298     ScopedComPtr<IPropertyBag> prop_bag;
299     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
300     if (FAILED(hr)) {
301       moniker.Release();
302       continue;
303     }
304
305     // Find the description or friendly name.
306     ScopedVariant name;
307     hr = prop_bag->Read(L"Description", name.Receive(), 0);
308     if (FAILED(hr))
309       hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
310
311     if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
312       // Ignore all VFW drivers and the special Google Camera Adapter.
313       // Google Camera Adapter is not a real DirectShow camera device.
314       // VFW are very old Video for Windows drivers that can not be used.
315       const wchar_t* str_ptr = V_BSTR(&name);
316       const int name_length = arraysize(kGoogleCameraAdapter) - 1;
317
318       if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
319           lstrlenW(str_ptr) < name_length ||
320           (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
321                                   kGoogleCameraAdapter)))) {
322         std::string id;
323         std::string device_name(base::SysWideToUTF8(str_ptr));
324         name.Reset();
325         hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
326         if (FAILED(hr) || name.type() != VT_BSTR) {
327           id = device_name;
328         } else {
329           DCHECK_EQ(name.type(), VT_BSTR);
330           id = base::SysWideToUTF8(V_BSTR(&name));
331         }
332
333         device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW));
334       }
335     }
336     moniker.Release();
337   }
338 }
339
340 // static
341 void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device,
342     VideoCaptureFormats* formats) {
343   DVLOG(1) << "GetDeviceSupportedFormats for " << device.name();
344   ScopedComPtr<ICreateDevEnum> dev_enum;
345   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
346                                        CLSCTX_INPROC);
347   if (FAILED(hr))
348     return;
349
350   ScopedComPtr<IEnumMoniker> enum_moniker;
351   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
352                                        enum_moniker.Receive(), 0);
353   // CreateClassEnumerator returns S_FALSE on some Windows OS when no camera
354   // exists. Therefore the FAILED macro can't be used.
355   if (hr != S_OK)
356     return;
357
358   // Walk the capture devices. No need to check for "google camera adapter",
359   // since this is already skipped in the enumeration of GetDeviceNames().
360   ScopedComPtr<IMoniker> moniker;
361   int index = 0;
362   ScopedVariant device_id;
363   while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
364     ScopedComPtr<IPropertyBag> prop_bag;
365     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
366     if (FAILED(hr)) {
367       moniker.Release();
368       continue;
369     }
370
371     device_id.Reset();
372     hr = prop_bag->Read(L"DevicePath", device_id.Receive(), 0);
373     if (FAILED(hr)) {
374       DVLOG(1) << "Couldn't read a device's DevicePath.";
375       return;
376     }
377     if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id)))
378       break;
379     moniker.Release();
380   }
381
382   if (moniker.get()) {
383     base::win::ScopedComPtr<IBaseFilter> capture_filter;
384     hr = GetDeviceFilter(device, capture_filter.Receive());
385     if (!capture_filter) {
386       DVLOG(2) << "Failed to create capture filter.";
387       return;
388     }
389
390     base::win::ScopedComPtr<IPin> output_capture_pin(
391         GetPin(capture_filter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE));
392     if (!output_capture_pin) {
393       DVLOG(2) << "Failed to get capture output pin";
394       return;
395     }
396
397     ScopedComPtr<IAMStreamConfig> stream_config;
398     hr = output_capture_pin.QueryInterface(stream_config.Receive());
399     if (FAILED(hr)) {
400       DVLOG(2) << "Failed to get IAMStreamConfig interface from "
401                   "capture device";
402       return;
403     }
404
405     int count = 0, size = 0;
406     hr = stream_config->GetNumberOfCapabilities(&count, &size);
407     if (FAILED(hr)) {
408       DVLOG(2) << "Failed to GetNumberOfCapabilities";
409       return;
410     }
411
412     scoped_ptr<BYTE[]> caps(new BYTE[size]);
413     for (int i = 0; i < count; ++i) {
414       ScopedMediaType media_type;
415       hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
416       // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
417       // macros here since they'll trigger incorrectly.
418       if (hr != S_OK) {
419         DVLOG(2) << "Failed to GetStreamCaps";
420         return;
421       }
422
423       if (media_type->majortype == MEDIATYPE_Video &&
424           media_type->formattype == FORMAT_VideoInfo) {
425         VideoCaptureFormat format;
426         format.pixel_format =
427             TranslateMediaSubtypeToPixelFormat(media_type->subtype);
428         if (format.pixel_format == PIXEL_FORMAT_UNKNOWN)
429           continue;
430         VIDEOINFOHEADER* h =
431             reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
432         format.frame_size.SetSize(h->bmiHeader.biWidth,
433                                   h->bmiHeader.biHeight);
434         // Trust the frame rate from the VIDEOINFOHEADER.
435         format.frame_rate = (h->AvgTimePerFrame > 0) ?
436             static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) :
437             0;
438         formats->push_back(format);
439         DVLOG(1) << device.name() << " resolution: "
440              << format.frame_size.ToString() << ", fps: " << format.frame_rate
441              << ", pixel format: " << format.pixel_format;
442       }
443     }
444   }
445 }
446
447 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
448     : device_name_(device_name),
449       state_(kIdle) {
450   DetachFromThread();
451 }
452
453 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
454   DCHECK(CalledOnValidThread());
455   if (media_control_)
456     media_control_->Stop();
457
458   if (graph_builder_) {
459     if (sink_filter_) {
460       graph_builder_->RemoveFilter(sink_filter_);
461       sink_filter_ = NULL;
462     }
463
464     if (capture_filter_)
465       graph_builder_->RemoveFilter(capture_filter_);
466
467     if (mjpg_filter_)
468       graph_builder_->RemoveFilter(mjpg_filter_);
469   }
470 }
471
472 bool VideoCaptureDeviceWin::Init() {
473   DCHECK(CalledOnValidThread());
474   HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
475   if (!capture_filter_) {
476     DVLOG(2) << "Failed to create capture filter.";
477     return false;
478   }
479
480   output_capture_pin_ =
481       GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE);
482   if (!output_capture_pin_) {
483     DVLOG(2) << "Failed to get capture output pin";
484     return false;
485   }
486
487   // Create the sink filter used for receiving Captured frames.
488   sink_filter_ = new SinkFilter(this);
489   if (sink_filter_ == NULL) {
490     DVLOG(2) << "Failed to create send filter";
491     return false;
492   }
493
494   input_sink_pin_ = sink_filter_->GetPin(0);
495
496   hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
497                                      CLSCTX_INPROC_SERVER);
498   if (FAILED(hr)) {
499     DVLOG(2) << "Failed to create graph builder.";
500     return false;
501   }
502
503   hr = graph_builder_.QueryInterface(media_control_.Receive());
504   if (FAILED(hr)) {
505     DVLOG(2) << "Failed to create media control builder.";
506     return false;
507   }
508
509   hr = graph_builder_->AddFilter(capture_filter_, NULL);
510   if (FAILED(hr)) {
511     DVLOG(2) << "Failed to add the capture device to the graph.";
512     return false;
513   }
514
515   hr = graph_builder_->AddFilter(sink_filter_, NULL);
516   if (FAILED(hr)) {
517     DVLOG(2)<< "Failed to add the send filter to the graph.";
518     return false;
519   }
520
521   return CreateCapabilityMap();
522 }
523
524 void VideoCaptureDeviceWin::AllocateAndStart(
525     const VideoCaptureParams& params,
526     scoped_ptr<VideoCaptureDevice::Client> client) {
527   DCHECK(CalledOnValidThread());
528   if (state_ != kIdle)
529     return;
530
531   client_ = client.Pass();
532
533   // Get the camera capability that best match the requested resolution.
534   const VideoCaptureCapabilityWin& found_capability =
535       capabilities_.GetBestMatchedFormat(
536           params.requested_format.frame_size.width(),
537           params.requested_format.frame_size.height(),
538           params.requested_format.frame_rate);
539   VideoCaptureFormat format = found_capability.supported_format;
540
541   // Reduce the frame rate if the requested frame rate is lower
542   // than the capability.
543   if (format.frame_rate > params.requested_format.frame_rate)
544     format.frame_rate = params.requested_format.frame_rate;
545
546   ScopedComPtr<IAMStreamConfig> stream_config;
547   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
548   if (FAILED(hr)) {
549     SetErrorState("Can't get the Capture format settings");
550     return;
551   }
552
553   int count = 0, size = 0;
554   hr = stream_config->GetNumberOfCapabilities(&count, &size);
555   if (FAILED(hr)) {
556     DVLOG(2) << "Failed to GetNumberOfCapabilities";
557     return;
558   }
559
560   scoped_ptr<BYTE[]> caps(new BYTE[size]);
561   ScopedMediaType media_type;
562
563   // Get the windows capability from the capture device.
564   hr = stream_config->GetStreamCaps(
565       found_capability.stream_index, media_type.Receive(), caps.get());
566   if (SUCCEEDED(hr)) {
567     if (media_type->formattype == FORMAT_VideoInfo) {
568       VIDEOINFOHEADER* h =
569           reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
570       if (format.frame_rate > 0)
571         h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
572     }
573     // Set the sink filter to request this format.
574     sink_filter_->SetRequestedMediaFormat(format);
575     // Order the capture device to use this format.
576     hr = stream_config->SetFormat(media_type.get());
577   }
578
579   if (FAILED(hr))
580     SetErrorState("Failed to set capture device output format");
581
582   if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
583     // Create MJPG filter if we need it.
584     hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
585
586     if (SUCCEEDED(hr)) {
587       input_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL);
588       output_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL);
589       hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
590     }
591
592     if (FAILED(hr)) {
593       mjpg_filter_.Release();
594       input_mjpg_pin_.Release();
595       output_mjpg_pin_.Release();
596     }
597   }
598
599   SetAntiFlickerInCaptureFilter();
600
601   if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
602     // Connect the camera to the MJPEG decoder.
603     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
604                                        NULL);
605     // Connect the MJPEG filter to the Capture filter.
606     hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
607                                         NULL);
608   } else {
609     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
610                                        NULL);
611   }
612
613   if (FAILED(hr)) {
614     SetErrorState("Failed to connect the Capture graph.");
615     return;
616   }
617
618   hr = media_control_->Pause();
619   if (FAILED(hr)) {
620     SetErrorState("Failed to Pause the Capture device. "
621                   "Is it already occupied?");
622     return;
623   }
624
625   // Get the format back from the sink filter after the filter have been
626   // connected.
627   capture_format_ = sink_filter_->ResultingFormat();
628
629   // Start capturing.
630   hr = media_control_->Run();
631   if (FAILED(hr)) {
632     SetErrorState("Failed to start the Capture device.");
633     return;
634   }
635
636   state_ = kCapturing;
637 }
638
639 void VideoCaptureDeviceWin::StopAndDeAllocate() {
640   DCHECK(CalledOnValidThread());
641   if (state_ != kCapturing)
642     return;
643
644   HRESULT hr = media_control_->Stop();
645   if (FAILED(hr)) {
646     SetErrorState("Failed to stop the capture graph.");
647     return;
648   }
649
650   graph_builder_->Disconnect(output_capture_pin_);
651   graph_builder_->Disconnect(input_sink_pin_);
652
653   // If the _mjpg filter exist disconnect it even if it has not been used.
654   if (mjpg_filter_) {
655     graph_builder_->Disconnect(input_mjpg_pin_);
656     graph_builder_->Disconnect(output_mjpg_pin_);
657   }
658
659   if (FAILED(hr)) {
660     SetErrorState("Failed to Stop the Capture device");
661     return;
662   }
663   client_.reset();
664   state_ = kIdle;
665 }
666
667 // Implements SinkFilterObserver::SinkFilterObserver.
668 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
669                                           int length) {
670   client_->OnIncomingCapturedData(
671       buffer, length, capture_format_, 0, base::TimeTicks::Now());
672 }
673
674 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
675   DCHECK(CalledOnValidThread());
676   ScopedComPtr<IAMStreamConfig> stream_config;
677   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
678   if (FAILED(hr)) {
679     DVLOG(2) << "Failed to get IAMStreamConfig interface from "
680                 "capture device";
681     return false;
682   }
683
684   // Get interface used for getting the frame rate.
685   ScopedComPtr<IAMVideoControl> video_control;
686   hr = capture_filter_.QueryInterface(video_control.Receive());
687   DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
688
689   int count = 0, size = 0;
690   hr = stream_config->GetNumberOfCapabilities(&count, &size);
691   if (FAILED(hr)) {
692     DVLOG(2) << "Failed to GetNumberOfCapabilities";
693     return false;
694   }
695
696   scoped_ptr<BYTE[]> caps(new BYTE[size]);
697   for (int i = 0; i < count; ++i) {
698     ScopedMediaType media_type;
699     hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
700     // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
701     // macros here since they'll trigger incorrectly.
702     if (hr != S_OK) {
703       DVLOG(2) << "Failed to GetStreamCaps";
704       return false;
705     }
706
707     if (media_type->majortype == MEDIATYPE_Video &&
708         media_type->formattype == FORMAT_VideoInfo) {
709       VideoCaptureCapabilityWin capability(i);
710       capability.supported_format.pixel_format =
711           TranslateMediaSubtypeToPixelFormat(media_type->subtype);
712       if (capability.supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
713         continue;
714
715       VIDEOINFOHEADER* h =
716           reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
717       capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
718                                                      h->bmiHeader.biHeight);
719
720       // Try to get a better |time_per_frame| from IAMVideoControl.  If not, use
721       // the value from VIDEOINFOHEADER.
722       REFERENCE_TIME time_per_frame = h->AvgTimePerFrame;
723       if (video_control) {
724         ScopedCoMem<LONGLONG> max_fps;
725         LONG list_size = 0;
726         SIZE size = {capability.supported_format.frame_size.width(),
727                      capability.supported_format.frame_size.height()};
728
729         // GetFrameRateList doesn't return max frame rate always
730         // eg: Logitech Notebook. This may be due to a bug in that API
731         // because GetFrameRateList array is reversed in the above camera. So
732         // a util method written. Can't assume the first value will return
733         // the max fps.
734         hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
735                                              &list_size, &max_fps);
736         // Sometimes |list_size| will be > 0, but max_fps will be NULL.  Some
737         // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates
738         // into success, so explicitly check S_OK.  See http://crbug.com/306237.
739         if (hr == S_OK && list_size > 0 && max_fps) {
740           time_per_frame = *std::min_element(max_fps.get(),
741                                              max_fps.get() + list_size);
742         }
743       }
744
745       capability.supported_format.frame_rate =
746           (time_per_frame > 0)
747               ? static_cast<int>(kSecondsToReferenceTime / time_per_frame)
748               : 0;
749
750       // DirectShow works at the moment only on integer frame_rate but the
751       // best capability matching class works on rational frame rates.
752       capability.frame_rate_numerator = capability.supported_format.frame_rate;
753       capability.frame_rate_denominator = 1;
754
755       capabilities_.Add(capability);
756     }
757   }
758
759   return !capabilities_.empty();
760 }
761
762 // Set the power line frequency removal in |capture_filter_| if available.
763 void VideoCaptureDeviceWin::SetAntiFlickerInCaptureFilter() {
764   const int power_line_frequency = GetPowerLineFrequencyForLocation();
765   if (power_line_frequency != kPowerLine50Hz &&
766       power_line_frequency != kPowerLine60Hz) {
767     return;
768   }
769   ScopedComPtr<IKsPropertySet> ks_propset;
770   DWORD type_support = 0;
771   HRESULT hr;
772   if (SUCCEEDED(hr = ks_propset.QueryFrom(capture_filter_)) &&
773       SUCCEEDED(hr = ks_propset->QuerySupported(PROPSETID_VIDCAP_VIDEOPROCAMP,
774           KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, &type_support)) &&
775       (type_support & KSPROPERTY_SUPPORT_SET)) {
776     KSPROPERTY_VIDEOPROCAMP_S data = {};
777     data.Property.Set = PROPSETID_VIDCAP_VIDEOPROCAMP;
778     data.Property.Id = KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY;
779     data.Property.Flags = KSPROPERTY_TYPE_SET;
780     data.Value = (power_line_frequency == kPowerLine50Hz) ? 1 : 2;
781     data.Flags = KSPROPERTY_VIDEOPROCAMP_FLAGS_MANUAL;
782     hr = ks_propset->Set(PROPSETID_VIDCAP_VIDEOPROCAMP,
783                          KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY,
784                          &data, sizeof(data), &data, sizeof(data));
785     DVLOG_IF(ERROR, FAILED(hr)) << "Anti-flicker setting failed.";
786     DVLOG_IF(2, SUCCEEDED(hr)) << "Anti-flicker set correctly.";
787   } else {
788     DVLOG(2) << "Anti-flicker setting not supported.";
789   }
790 }
791
792 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) {
793   DCHECK(CalledOnValidThread());
794   DVLOG(1) << reason;
795   state_ = kError;
796   client_->OnError(reason);
797 }
798 }  // namespace media