0b62867811c7f63948482aa4c797746d7b066829
[platform/framework/web/crosswalk.git] / src / media / video / capture / mac / video_capture_device_qtkit_mac.mm
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #import "media/video/capture/mac/video_capture_device_qtkit_mac.h"
6
7 #import <QTKit/QTKit.h>
8
9 #include "base/debug/crash_logging.h"
10 #include "base/logging.h"
11 #include "base/mac/scoped_nsexception_enabler.h"
12 #include "media/video/capture/mac/video_capture_device_mac.h"
13 #include "media/video/capture/video_capture_device.h"
14 #include "media/video/capture/video_capture_types.h"
15 #include "ui/gfx/size.h"
16
17 @implementation VideoCaptureDeviceQTKit
18
19 #pragma mark Class methods
20
21 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
22   // Third-party drivers often throw exceptions, which are fatal in
23   // Chromium (see comments in scoped_nsexception_enabler.h).  The
24   // following catches any exceptions and continues in an orderly
25   // fashion with no devices detected.
26   NSArray* captureDevices =
27       base::mac::RunBlockIgnoringExceptions(^{
28           return [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
29       });
30
31   for (QTCaptureDevice* device in captureDevices) {
32     if (![[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue])
33       [deviceNames setObject:[device localizedDisplayName]
34                       forKey:[device uniqueID]];
35   }
36 }
37
38 + (NSDictionary*)deviceNames {
39   NSMutableDictionary* deviceNames =
40       [[[NSMutableDictionary alloc] init] autorelease];
41
42   // TODO(shess): Post to the main thread to see if that helps
43   // http://crbug.com/139164
44   [self performSelectorOnMainThread:@selector(getDeviceNames:)
45                          withObject:deviceNames
46                       waitUntilDone:YES];
47   return deviceNames;
48 }
49
50 #pragma mark Public methods
51
52 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
53   self = [super init];
54   if (self) {
55     frameReceiver_ = frameReceiver;
56     lock_ = [[NSLock alloc] init];
57   }
58   return self;
59 }
60
61 - (void)dealloc {
62   [captureSession_ release];
63   [captureDeviceInput_ release];
64   [super dealloc];
65 }
66
67 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
68   [lock_ lock];
69   frameReceiver_ = frameReceiver;
70   [lock_ unlock];
71 }
72
73 - (BOOL)setCaptureDevice:(NSString*)deviceId {
74   if (deviceId) {
75     // Set the capture device.
76     if (captureDeviceInput_) {
77       DLOG(ERROR) << "Video capture device already set.";
78       return NO;
79     }
80
81     NSArray *captureDevices =
82         [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
83     NSArray *captureDevicesNames =
84         [captureDevices valueForKey:@"uniqueID"];
85     NSUInteger index = [captureDevicesNames indexOfObject:deviceId];
86     if (index == NSNotFound) {
87       DLOG(ERROR) << "Video capture device not found.";
88       return NO;
89     }
90     QTCaptureDevice *device = [captureDevices objectAtIndex:index];
91     if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute]
92             boolValue]) {
93       DLOG(ERROR) << "Cannot open suspended video capture device.";
94       return NO;
95     }
96     NSError *error;
97     if (![device open:&error]) {
98       DLOG(ERROR) << "Could not open video capture device."
99                   << [[error localizedDescription] UTF8String];
100       return NO;
101     }
102     captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device];
103     captureSession_ = [[QTCaptureSession alloc] init];
104
105     QTCaptureDecompressedVideoOutput *captureDecompressedOutput =
106         [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease];
107     [captureDecompressedOutput setDelegate:self];
108     if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) {
109       DLOG(ERROR) << "Could not connect video capture output."
110                   << [[error localizedDescription] UTF8String];
111       return NO;
112     }
113
114     // This key can be used to check if video capture code was related to a
115     // particular crash.
116     base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice");
117
118     // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2).
119     NSDictionary *captureDictionary = [NSDictionary
120         dictionaryWithObject:
121             [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
122                       forKey:(id)kCVPixelBufferPixelFormatTypeKey];
123     [captureDecompressedOutput setPixelBufferAttributes:captureDictionary];
124
125     return YES;
126   } else {
127     // Remove the previously set capture device.
128     if (!captureDeviceInput_) {
129       DLOG(ERROR) << "No video capture device set.";
130       return YES;
131     }
132     if ([[captureSession_ inputs] count] > 0) {
133       // The device is still running.
134       [self stopCapture];
135     }
136     if ([[captureSession_ outputs] count] > 0) {
137       // Only one output is set for |captureSession_|.
138       DCHECK_EQ([[captureSession_ outputs] count], 1u);
139       id output = [[captureSession_ outputs] objectAtIndex:0];
140       [output setDelegate:nil];
141
142       // TODO(shess): QTKit achieves thread safety by posting messages
143       // to the main thread.  As part of -addOutput:, it posts a
144       // message to the main thread which in turn posts a notification
145       // which will run in a future spin after the original method
146       // returns.  -removeOutput: can post a main-thread message in
147       // between while holding a lock which the notification handler
148       // will need.  Posting either -addOutput: or -removeOutput: to
149       // the main thread should fix it, remove is likely safer.
150       // http://crbug.com/152757
151       [captureSession_ performSelectorOnMainThread:@selector(removeOutput:)
152                                         withObject:output
153                                      waitUntilDone:YES];
154     }
155     [captureSession_ release];
156     captureSession_ = nil;
157     [captureDeviceInput_ release];
158     captureDeviceInput_ = nil;
159     return YES;
160   }
161 }
162
163 - (BOOL)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate {
164   if (!captureDeviceInput_) {
165     DLOG(ERROR) << "No video capture device set.";
166     return NO;
167   }
168   if ([[captureSession_ outputs] count] != 1) {
169     DLOG(ERROR) << "Video capture capabilities already set.";
170     return NO;
171   }
172   if (frameRate <= 0) {
173     DLOG(ERROR) << "Wrong frame rate.";
174     return NO;
175   }
176
177   frameRate_ = frameRate;
178
179   QTCaptureDecompressedVideoOutput *output =
180       [[captureSession_ outputs] objectAtIndex:0];
181
182   // Set up desired output properties. The old capture dictionary is used to
183   // retrieve the initial pixel format, which must be maintained.
184   NSDictionary* videoSettingsDictionary = @{
185     (id)kCVPixelBufferWidthKey : @(width),
186     (id)kCVPixelBufferHeightKey : @(height),
187     (id)kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes]
188         valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]
189   };
190   [output setPixelBufferAttributes:videoSettingsDictionary];
191
192   [output setMinimumVideoFrameInterval:(NSTimeInterval)1/(float)frameRate];
193   return YES;
194 }
195
196 - (BOOL)startCapture {
197   if ([[captureSession_ outputs] count] == 0) {
198     // Capture properties not set.
199     DLOG(ERROR) << "Video capture device not initialized.";
200     return NO;
201   }
202   if ([[captureSession_ inputs] count] == 0) {
203     NSError *error;
204     if (![captureSession_ addInput:captureDeviceInput_ error:&error]) {
205       DLOG(ERROR) << "Could not connect video capture device."
206                   << [[error localizedDescription] UTF8String];
207       return NO;
208     }
209     NSNotificationCenter * notificationCenter =
210         [NSNotificationCenter defaultCenter];
211     [notificationCenter addObserver:self
212                            selector:@selector(handleNotification:)
213                                name:QTCaptureSessionRuntimeErrorNotification
214                              object:captureSession_];
215     [captureSession_ startRunning];
216   }
217   return YES;
218 }
219
220 - (void)stopCapture {
221   if ([[captureSession_ inputs] count] == 1) {
222     [captureSession_ removeInput:captureDeviceInput_];
223     [captureSession_ stopRunning];
224   }
225
226   [[NSNotificationCenter defaultCenter] removeObserver:self];
227 }
228
229 // |captureOutput| is called by the capture device to deliver a new frame.
230 - (void)captureOutput:(QTCaptureOutput*)captureOutput
231   didOutputVideoFrame:(CVImageBufferRef)videoFrame
232      withSampleBuffer:(QTSampleBuffer*)sampleBuffer
233        fromConnection:(QTCaptureConnection*)connection {
234   [lock_ lock];
235   if(!frameReceiver_) {
236     [lock_ unlock];
237     return;
238   }
239
240   // Lock the frame and calculate frame size.
241   const int kLockFlags = 0;
242   if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags)
243       == kCVReturnSuccess) {
244     void *baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
245     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
246     size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
247     size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
248     size_t frameSize = bytesPerRow * frameHeight;
249
250     // TODO(shess): bytesPerRow may not correspond to frameWidth_*2,
251     // but VideoCaptureController::OnIncomingCapturedData() requires
252     // it to do so.  Plumbing things through is intrusive, for now
253     // just deliver an adjusted buffer.
254     // TODO(nick): This workaround could probably be eliminated by using
255     // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports
256     // pitches.
257     UInt8* addressToPass = static_cast<UInt8*>(baseAddress);
258     // UYVY is 2 bytes per pixel.
259     size_t expectedBytesPerRow = frameWidth * 2;
260     if (bytesPerRow > expectedBytesPerRow) {
261       // TODO(shess): frameHeight and frameHeight_ are not the same,
262       // try to do what the surrounding code seems to assume.
263       // Ironically, captureCapability and frameSize are ignored
264       // anyhow.
265       adjustedFrame_.resize(expectedBytesPerRow * frameHeight);
266       // std::vector is contiguous according to standard.
267       UInt8* adjustedAddress = &adjustedFrame_[0];
268
269       for (size_t y = 0; y < frameHeight; ++y) {
270         memcpy(adjustedAddress + y * expectedBytesPerRow,
271                addressToPass + y * bytesPerRow,
272                expectedBytesPerRow);
273       }
274
275       addressToPass = adjustedAddress;
276       frameSize = frameHeight * expectedBytesPerRow;
277     }
278
279     media::VideoCaptureFormat captureFormat(gfx::Size(frameWidth, frameHeight),
280                                             frameRate_,
281                                             media::PIXEL_FORMAT_UYVY);
282
283     // The aspect ratio dictionary is often missing, in which case we report
284     // a pixel aspect ratio of 0:0.
285     int aspectNumerator = 0, aspectDenominator = 0;
286     CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment(
287         videoFrame, kCVImageBufferPixelAspectRatioKey, NULL);
288     if (aspectRatioDict) {
289       CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue(
290           aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey);
291       CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue(
292           aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey);
293       DCHECK(aspectNumeratorRef && aspectDenominatorRef) <<
294           "Aspect Ratio dictionary missing its entries.";
295       CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator);
296       CFNumberGetValue(
297           aspectDenominatorRef, kCFNumberIntType, &aspectDenominator);
298     }
299
300     // Deliver the captured video frame.
301     frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
302         aspectNumerator, aspectDenominator);
303
304     CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
305   }
306   [lock_ unlock];
307 }
308
309 - (void)handleNotification:(NSNotification*)errorNotification {
310   NSError * error = (NSError*)[[errorNotification userInfo]
311       objectForKey:QTCaptureSessionErrorKey];
312   NSString* str_error =
313       [NSString stringWithFormat:@"%@: %@",
314                                  [error localizedDescription],
315                                  [error localizedFailureReason]];
316
317   frameReceiver_->ReceiveError([str_error UTF8String]);
318 }
319
320 @end