Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / media / video / capture / win / video_capture_device_win.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/video/capture/win/video_capture_device_win.h"
6
7 #include <algorithm>
8 #include <list>
9
10 #include "base/command_line.h"
11 #include "base/strings/string_util.h"
12 #include "base/strings/sys_string_conversions.h"
13 #include "base/win/metro.h"
14 #include "base/win/scoped_co_mem.h"
15 #include "base/win/scoped_variant.h"
16 #include "base/win/windows_version.h"
17 #include "media/base/media_switches.h"
18 #include "media/video/capture/win/video_capture_device_mf_win.h"
19
20 using base::win::ScopedCoMem;
21 using base::win::ScopedComPtr;
22 using base::win::ScopedVariant;
23
24 namespace media {
25 namespace {
26
27 // Finds and creates a DirectShow Video Capture filter matching the device_name.
28 HRESULT GetDeviceFilter(const VideoCaptureDevice::Name& device_name,
29                         IBaseFilter** filter) {
30   DCHECK(filter);
31
32   ScopedComPtr<ICreateDevEnum> dev_enum;
33   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
34                                        CLSCTX_INPROC);
35   if (FAILED(hr))
36     return hr;
37
38   ScopedComPtr<IEnumMoniker> enum_moniker;
39   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
40                                        enum_moniker.Receive(), 0);
41   // CreateClassEnumerator returns S_FALSE on some Windows OS
42   // when no camera exist. Therefore the FAILED macro can't be used.
43   if (hr != S_OK)
44     return NULL;
45
46   ScopedComPtr<IMoniker> moniker;
47   ScopedComPtr<IBaseFilter> capture_filter;
48   DWORD fetched = 0;
49   while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
50     ScopedComPtr<IPropertyBag> prop_bag;
51     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
52     if (FAILED(hr)) {
53       moniker.Release();
54       continue;
55     }
56
57     // Find the description or friendly name.
58     static const wchar_t* kPropertyNames[] = {
59       L"DevicePath", L"Description", L"FriendlyName"
60     };
61     ScopedVariant name;
62     for (size_t i = 0;
63          i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
64       prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
65     }
66     if (name.type() == VT_BSTR) {
67       std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
68       if (device_path.compare(device_name.id()) == 0) {
69         // We have found the requested device
70         hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
71                                    capture_filter.ReceiveVoid());
72         DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
73         break;
74       }
75     }
76     moniker.Release();
77   }
78
79   *filter = capture_filter.Detach();
80   if (!*filter && SUCCEEDED(hr))
81     hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
82
83   return hr;
84 }
85
86 // Check if a Pin matches a category.
87 bool PinMatchesCategory(IPin* pin, REFGUID category) {
88   DCHECK(pin);
89   bool found = false;
90   ScopedComPtr<IKsPropertySet> ks_property;
91   HRESULT hr = ks_property.QueryFrom(pin);
92   if (SUCCEEDED(hr)) {
93     GUID pin_category;
94     DWORD return_value;
95     hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
96                           &pin_category, sizeof(pin_category), &return_value);
97     if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
98       found = (pin_category == category);
99     }
100   }
101   return found;
102 }
103
104 // Finds a IPin on a IBaseFilter given the direction an category.
105 HRESULT GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, REFGUID category,
106                IPin** pin) {
107   DCHECK(pin);
108   ScopedComPtr<IEnumPins> pin_emum;
109   HRESULT hr = filter->EnumPins(pin_emum.Receive());
110   if (pin_emum == NULL)
111     return hr;
112
113   // Get first unconnected pin.
114   hr = pin_emum->Reset();  // set to first pin
115   while ((hr = pin_emum->Next(1, pin, NULL)) == S_OK) {
116     PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
117     hr = (*pin)->QueryDirection(&this_pin_dir);
118     if (pin_dir == this_pin_dir) {
119       if (category == GUID_NULL || PinMatchesCategory(*pin, category))
120         return S_OK;
121     }
122     (*pin)->Release();
123   }
124
125   return E_FAIL;
126 }
127
128 // Release the format block for a media type.
129 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
130 void FreeMediaType(AM_MEDIA_TYPE* mt) {
131   if (mt->cbFormat != 0) {
132     CoTaskMemFree(mt->pbFormat);
133     mt->cbFormat = 0;
134     mt->pbFormat = NULL;
135   }
136   if (mt->pUnk != NULL) {
137     NOTREACHED();
138     // pUnk should not be used.
139     mt->pUnk->Release();
140     mt->pUnk = NULL;
141   }
142 }
143
144 // Delete a media type structure that was allocated on the heap.
145 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
146 void DeleteMediaType(AM_MEDIA_TYPE* mt) {
147   if (mt != NULL) {
148     FreeMediaType(mt);
149     CoTaskMemFree(mt);
150   }
151 }
152
153 }  // namespace
154
155 // static
156 void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
157   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
158   // Use Media Foundation for Metro processes (after and including Win8) and
159   // DirectShow for any other versions, unless forced via flag. Media Foundation
160   // can also be forced if appropriate flag is set and we are in Windows 7 or
161   // 8 in non-Metro mode.
162   if ((base::win::IsMetroProcess() &&
163       !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
164       (base::win::GetVersion() >= base::win::VERSION_WIN7 &&
165       cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
166     VideoCaptureDeviceMFWin::GetDeviceNames(device_names);
167   } else {
168     VideoCaptureDeviceWin::GetDeviceNames(device_names);
169   }
170 }
171
172 // static
173 void VideoCaptureDevice::GetDeviceSupportedFormats(const Name& device,
174     VideoCaptureFormats* formats) {
175   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
176   // Use Media Foundation for Metro processes (after and including Win8) and
177   // DirectShow for any other versions, unless forced via flag. Media Foundation
178   // can also be forced if appropriate flag is set and we are in Windows 7 or
179   // 8 in non-Metro mode.
180   if ((base::win::IsMetroProcess() &&
181       !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
182       (base::win::GetVersion() >= base::win::VERSION_WIN7 &&
183       cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
184     VideoCaptureDeviceMFWin::GetDeviceSupportedFormats(device, formats);
185   } else {
186     VideoCaptureDeviceWin::GetDeviceSupportedFormats(device, formats);
187   }
188 }
189
190 // static
191 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
192   VideoCaptureDevice* ret = NULL;
193   if (device_name.capture_api_type() == Name::MEDIA_FOUNDATION) {
194     DCHECK(VideoCaptureDeviceMFWin::PlatformSupported());
195     scoped_ptr<VideoCaptureDeviceMFWin> device(
196         new VideoCaptureDeviceMFWin(device_name));
197     DVLOG(1) << " MediaFoundation Device: " << device_name.name();
198     if (device->Init())
199       ret = device.release();
200   } else if (device_name.capture_api_type() == Name::DIRECT_SHOW) {
201     scoped_ptr<VideoCaptureDeviceWin> device(
202         new VideoCaptureDeviceWin(device_name));
203     DVLOG(1) << " DirectShow Device: " << device_name.name();
204     if (device->Init())
205       ret = device.release();
206   } else{
207     NOTREACHED() << " Couldn't recognize VideoCaptureDevice type";
208   }
209
210   return ret;
211 }
212
213 // static
214 void VideoCaptureDeviceWin::GetDeviceNames(Names* device_names) {
215   DCHECK(device_names);
216
217   ScopedComPtr<ICreateDevEnum> dev_enum;
218   HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
219                                        CLSCTX_INPROC);
220   if (FAILED(hr))
221     return;
222
223   ScopedComPtr<IEnumMoniker> enum_moniker;
224   hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
225                                        enum_moniker.Receive(), 0);
226   // CreateClassEnumerator returns S_FALSE on some Windows OS
227   // when no camera exist. Therefore the FAILED macro can't be used.
228   if (hr != S_OK)
229     return;
230
231   device_names->clear();
232
233   // Name of a fake DirectShow filter that exist on computers with
234   // GTalk installed.
235   static const char kGoogleCameraAdapter[] = "google camera adapter";
236
237   // Enumerate all video capture devices.
238   ScopedComPtr<IMoniker> moniker;
239   int index = 0;
240   while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
241     ScopedComPtr<IPropertyBag> prop_bag;
242     hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
243     if (FAILED(hr)) {
244       moniker.Release();
245       continue;
246     }
247
248     // Find the description or friendly name.
249     ScopedVariant name;
250     hr = prop_bag->Read(L"Description", name.Receive(), 0);
251     if (FAILED(hr))
252       hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
253
254     if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
255       // Ignore all VFW drivers and the special Google Camera Adapter.
256       // Google Camera Adapter is not a real DirectShow camera device.
257       // VFW is very old Video for Windows drivers that can not be used.
258       const wchar_t* str_ptr = V_BSTR(&name);
259       const int name_length = arraysize(kGoogleCameraAdapter) - 1;
260
261       if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
262           lstrlenW(str_ptr) < name_length ||
263           (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
264                                   kGoogleCameraAdapter)))) {
265         std::string id;
266         std::string device_name(base::SysWideToUTF8(str_ptr));
267         name.Reset();
268         hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
269         if (FAILED(hr) || name.type() != VT_BSTR) {
270           id = device_name;
271         } else {
272           DCHECK_EQ(name.type(), VT_BSTR);
273           id = base::SysWideToUTF8(V_BSTR(&name));
274         }
275
276         device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW));
277       }
278     }
279     moniker.Release();
280   }
281 }
282
283 // static
284 void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device,
285     VideoCaptureFormats* formats) {
286   NOTIMPLEMENTED();
287 }
288
289 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
290     : device_name_(device_name),
291       state_(kIdle) {
292   DetachFromThread();
293 }
294
295 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
296   DCHECK(CalledOnValidThread());
297   if (media_control_)
298     media_control_->Stop();
299
300   if (graph_builder_) {
301     if (sink_filter_) {
302       graph_builder_->RemoveFilter(sink_filter_);
303       sink_filter_ = NULL;
304     }
305
306     if (capture_filter_)
307       graph_builder_->RemoveFilter(capture_filter_);
308
309     if (mjpg_filter_)
310       graph_builder_->RemoveFilter(mjpg_filter_);
311   }
312 }
313
314 bool VideoCaptureDeviceWin::Init() {
315   DCHECK(CalledOnValidThread());
316   HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
317   if (!capture_filter_) {
318     DVLOG(2) << "Failed to create capture filter.";
319     return false;
320   }
321
322   hr = GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE,
323               output_capture_pin_.Receive());
324   if (!output_capture_pin_) {
325     DVLOG(2) << "Failed to get capture output pin";
326     return false;
327   }
328
329   // Create the sink filter used for receiving Captured frames.
330   sink_filter_ = new SinkFilter(this);
331   if (sink_filter_ == NULL) {
332     DVLOG(2) << "Failed to create send filter";
333     return false;
334   }
335
336   input_sink_pin_ = sink_filter_->GetPin(0);
337
338   hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
339                                      CLSCTX_INPROC_SERVER);
340   if (FAILED(hr)) {
341     DVLOG(2) << "Failed to create graph builder.";
342     return false;
343   }
344
345   hr = graph_builder_.QueryInterface(media_control_.Receive());
346   if (FAILED(hr)) {
347     DVLOG(2) << "Failed to create media control builder.";
348     return false;
349   }
350
351   hr = graph_builder_->AddFilter(capture_filter_, NULL);
352   if (FAILED(hr)) {
353     DVLOG(2) << "Failed to add the capture device to the graph.";
354     return false;
355   }
356
357   hr = graph_builder_->AddFilter(sink_filter_, NULL);
358   if (FAILED(hr)) {
359     DVLOG(2)<< "Failed to add the send filter to the graph.";
360     return false;
361   }
362
363   return CreateCapabilityMap();
364 }
365
366 void VideoCaptureDeviceWin::AllocateAndStart(
367     const VideoCaptureParams& params,
368     scoped_ptr<VideoCaptureDevice::Client> client) {
369   DCHECK(CalledOnValidThread());
370   if (state_ != kIdle)
371     return;
372
373   client_ = client.Pass();
374
375   // Get the camera capability that best match the requested resolution.
376   const VideoCaptureCapabilityWin& found_capability =
377       capabilities_.GetBestMatchedFormat(
378           params.requested_format.frame_size.width(),
379           params.requested_format.frame_size.height(),
380           params.requested_format.frame_rate);
381   VideoCaptureFormat format = found_capability.supported_format;
382
383   // Reduce the frame rate if the requested frame rate is lower
384   // than the capability.
385   if (format.frame_rate > params.requested_format.frame_rate)
386     format.frame_rate = params.requested_format.frame_rate;
387
388   AM_MEDIA_TYPE* pmt = NULL;
389   VIDEO_STREAM_CONFIG_CAPS caps;
390
391   ScopedComPtr<IAMStreamConfig> stream_config;
392   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
393   if (FAILED(hr)) {
394     SetErrorState("Can't get the Capture format settings");
395     return;
396   }
397
398   // Get the windows capability from the capture device.
399   hr = stream_config->GetStreamCaps(found_capability.stream_index, &pmt,
400                                     reinterpret_cast<BYTE*>(&caps));
401   if (SUCCEEDED(hr)) {
402     if (pmt->formattype == FORMAT_VideoInfo) {
403       VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
404       if (format.frame_rate > 0)
405         h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
406     }
407     // Set the sink filter to request this format.
408     sink_filter_->SetRequestedMediaFormat(format);
409     // Order the capture device to use this format.
410     hr = stream_config->SetFormat(pmt);
411   }
412
413   if (FAILED(hr))
414     SetErrorState("Failed to set capture device output format");
415
416   if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
417     // Create MJPG filter if we need it.
418     hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
419
420     if (SUCCEEDED(hr)) {
421       GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL, input_mjpg_pin_.Receive());
422       GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL,
423              output_mjpg_pin_.Receive());
424       hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
425     }
426
427     if (FAILED(hr)) {
428       mjpg_filter_.Release();
429       input_mjpg_pin_.Release();
430       output_mjpg_pin_.Release();
431     }
432   }
433
434   if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
435     // Connect the camera to the MJPEG decoder.
436     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
437                                        NULL);
438     // Connect the MJPEG filter to the Capture filter.
439     hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
440                                         NULL);
441   } else {
442     hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
443                                        NULL);
444   }
445
446   if (FAILED(hr)) {
447     SetErrorState("Failed to connect the Capture graph.");
448     return;
449   }
450
451   hr = media_control_->Pause();
452   if (FAILED(hr)) {
453     SetErrorState("Failed to Pause the Capture device. "
454                   "Is it already occupied?");
455     return;
456   }
457
458   // Get the format back from the sink filter after the filter have been
459   // connected.
460   capture_format_ = sink_filter_->ResultingFormat();
461
462   // Start capturing.
463   hr = media_control_->Run();
464   if (FAILED(hr)) {
465     SetErrorState("Failed to start the Capture device.");
466     return;
467   }
468
469   state_ = kCapturing;
470 }
471
472 void VideoCaptureDeviceWin::StopAndDeAllocate() {
473   DCHECK(CalledOnValidThread());
474   if (state_ != kCapturing)
475     return;
476
477   HRESULT hr = media_control_->Stop();
478   if (FAILED(hr)) {
479     SetErrorState("Failed to stop the capture graph.");
480     return;
481   }
482
483   graph_builder_->Disconnect(output_capture_pin_);
484   graph_builder_->Disconnect(input_sink_pin_);
485
486   // If the _mjpg filter exist disconnect it even if it has not been used.
487   if (mjpg_filter_) {
488     graph_builder_->Disconnect(input_mjpg_pin_);
489     graph_builder_->Disconnect(output_mjpg_pin_);
490   }
491
492   if (FAILED(hr)) {
493     SetErrorState("Failed to Stop the Capture device");
494     return;
495   }
496   client_.reset();
497   state_ = kIdle;
498 }
499
500 // Implements SinkFilterObserver::SinkFilterObserver.
501 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
502                                           int length) {
503   client_->OnIncomingCapturedFrame(
504       buffer, length, base::TimeTicks::Now(), 0, capture_format_);
505 }
506
507 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
508   DCHECK(CalledOnValidThread());
509   ScopedComPtr<IAMStreamConfig> stream_config;
510   HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
511   if (FAILED(hr)) {
512     DVLOG(2) << "Failed to get IAMStreamConfig interface from "
513                 "capture device";
514     return false;
515   }
516
517   // Get interface used for getting the frame rate.
518   ScopedComPtr<IAMVideoControl> video_control;
519   hr = capture_filter_.QueryInterface(video_control.Receive());
520   DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
521
522   AM_MEDIA_TYPE* media_type = NULL;
523   VIDEO_STREAM_CONFIG_CAPS caps;
524   int count, size;
525
526   hr = stream_config->GetNumberOfCapabilities(&count, &size);
527   if (FAILED(hr)) {
528     DVLOG(2) << "Failed to GetNumberOfCapabilities";
529     return false;
530   }
531
532   for (int i = 0; i < count; ++i) {
533     hr = stream_config->GetStreamCaps(i, &media_type,
534                                       reinterpret_cast<BYTE*>(&caps));
535     // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
536     // macros here since they'll trigger incorrectly.
537     if (hr != S_OK) {
538       DVLOG(2) << "Failed to GetStreamCaps";
539       return false;
540     }
541
542     if (media_type->majortype == MEDIATYPE_Video &&
543         media_type->formattype == FORMAT_VideoInfo) {
544       VideoCaptureCapabilityWin capability(i);
545       VIDEOINFOHEADER* h =
546           reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
547       capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
548                                                      h->bmiHeader.biHeight);
549
550       // Try to get a better |time_per_frame| from IAMVideoControl.  If not, use
551       // the value from VIDEOINFOHEADER.
552       REFERENCE_TIME time_per_frame = h->AvgTimePerFrame;
553       if (video_control) {
554         ScopedCoMem<LONGLONG> max_fps;
555         LONG list_size = 0;
556         SIZE size = {capability.supported_format.frame_size.width(),
557                      capability.supported_format.frame_size.height()};
558
559         // GetFrameRateList doesn't return max frame rate always
560         // eg: Logitech Notebook. This may be due to a bug in that API
561         // because GetFrameRateList array is reversed in the above camera. So
562         // a util method written. Can't assume the first value will return
563         // the max fps.
564         hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
565                                              &list_size, &max_fps);
566         // Sometimes |list_size| will be > 0, but max_fps will be NULL.  Some
567         // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates
568         // into success, so explicitly check S_OK.  See http://crbug.com/306237.
569         if (hr == S_OK && list_size > 0 && max_fps) {
570           time_per_frame = *std::min_element(max_fps.get(),
571                                              max_fps.get() + list_size);
572         }
573       }
574
575       capability.supported_format.frame_rate =
576           (time_per_frame > 0)
577               ? static_cast<int>(kSecondsToReferenceTime / time_per_frame)
578               : 0;
579
580       // DirectShow works at the moment only on integer frame_rate but the
581       // best capability matching class works on rational frame rates.
582       capability.frame_rate_numerator = capability.supported_format.frame_rate;
583       capability.frame_rate_denominator = 1;
584
585       // We can't switch MEDIATYPE :~(.
586       if (media_type->subtype == kMediaSubTypeI420) {
587         capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
588       } else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
589         // This is identical to PIXEL_FORMAT_I420.
590         capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
591       } else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
592         capability.supported_format.pixel_format = PIXEL_FORMAT_RGB24;
593       } else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
594         capability.supported_format.pixel_format = PIXEL_FORMAT_YUY2;
595       } else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
596         capability.supported_format.pixel_format = PIXEL_FORMAT_MJPEG;
597       } else if (media_type->subtype == MEDIASUBTYPE_UYVY) {
598         capability.supported_format.pixel_format = PIXEL_FORMAT_UYVY;
599       } else if (media_type->subtype == MEDIASUBTYPE_ARGB32) {
600         capability.supported_format.pixel_format = PIXEL_FORMAT_ARGB;
601       } else {
602         WCHAR guid_str[128];
603         StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
604         DVLOG(2) << "Device supports (also) an unknown media type " << guid_str;
605         continue;
606       }
607       capabilities_.Add(capability);
608     }
609     DeleteMediaType(media_type);
610     media_type = NULL;
611   }
612
613   return !capabilities_.empty();
614 }
615
616 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) {
617   DCHECK(CalledOnValidThread());
618   DVLOG(1) << reason;
619   state_ = kError;
620   client_->OnError(reason);
621 }
622 }  // namespace media