Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / media / video / capture / mac / video_capture_device_avfoundation_mac.mm
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #import "media/video/capture/mac/video_capture_device_avfoundation_mac.h"
6
7 #import <CoreVideo/CoreVideo.h>
8
9 #include "base/logging.h"
10 #include "base/mac/foundation_util.h"
11 #include "media/video/capture/mac/video_capture_device_mac.h"
12 #include "ui/gfx/size.h"
13
14 @implementation VideoCaptureDeviceAVFoundation
15
16 #pragma mark Class methods
17
18 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
19   // At this stage we already know that AVFoundation is supported and the whole
20   // library is loaded and initialised, by the device monitoring.
21   NSArray* devices = [AVCaptureDeviceGlue devices];
22   for (CrAVCaptureDevice* device in devices) {
23     if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
24          [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
25         ![device isSuspended]) {
26       [deviceNames setObject:[device localizedName]
27                       forKey:[device uniqueID]];
28     }
29   }
30 }
31
32 + (NSDictionary*)deviceNames {
33   NSMutableDictionary* deviceNames =
34       [[[NSMutableDictionary alloc] init] autorelease];
35   // The device name retrieval is not going to happen in the main thread, and
36   // this might cause instabilities (it did in QTKit), so keep an eye here.
37   [self getDeviceNames:deviceNames];
38   return deviceNames;
39 }
40
41 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
42  supportedFormats:(media::VideoCaptureFormats*)formats{
43   NSArray* devices = [AVCaptureDeviceGlue devices];
44   CrAVCaptureDevice* device = nil;
45   for (device in devices) {
46     if ([[device uniqueID] UTF8String] == name.id())
47       break;
48   }
49   if (device == nil)
50     return;
51   for (CrAVCaptureDeviceFormat* format in device.formats) {
52     // MediaSubType comes is a CMPixelFormatType but can be used as
53     // CVPixelFormatType as well according to CMFormatDescription.h
54     media::VideoPixelFormat pixelFormat = media::PIXEL_FORMAT_UNKNOWN;
55     switch (CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
56                 [format formatDescription])) {
57       case kCVPixelFormatType_422YpCbCr8:  // Typical.
58         pixelFormat = media::PIXEL_FORMAT_UYVY;
59         break;
60       case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
61         pixelFormat = media::PIXEL_FORMAT_YUY2;
62         break;
63       case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
64         pixelFormat = media::PIXEL_FORMAT_MJPEG;
65       default:
66         break;
67     }
68
69   CoreMediaGlue::CMVideoDimensions dimensions =
70         CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
71             [format formatDescription]);
72
73   for (CrAVFrameRateRange* frameRate in
74            [format videoSupportedFrameRateRanges]) {
75       media::VideoCaptureFormat format(
76           gfx::Size(dimensions.width, dimensions.height),
77           static_cast<int>(frameRate.maxFrameRate),
78           pixelFormat);
79       formats->push_back(format);
80       DVLOG(2) << name.name() << " resolution: "
81                << format.frame_size.ToString() << ", fps: "
82                << format.frame_rate << ", pixel format: "
83                << format.pixel_format;
84     }
85   }
86
87 }
88
89 #pragma mark Public methods
90
91 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
92   if ((self = [super init])) {
93     DCHECK(main_thread_checker_.CalledOnValidThread());
94     DCHECK(frameReceiver);
95     [self setFrameReceiver:frameReceiver];
96     captureSession_.reset(
97         [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
98   }
99   return self;
100 }
101
102 - (void)dealloc {
103   [self stopCapture];
104   [super dealloc];
105 }
106
107 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
108   base::AutoLock lock(lock_);
109   frameReceiver_ = frameReceiver;
110 }
111
112 - (BOOL)setCaptureDevice:(NSString*)deviceId {
113   DCHECK(captureSession_);
114   DCHECK(main_thread_checker_.CalledOnValidThread());
115
116   if (!deviceId) {
117     // First stop the capture session, if it's running.
118     [self stopCapture];
119     // Now remove the input and output from the capture session.
120     [captureSession_ removeOutput:captureVideoDataOutput_];
121     if (captureDeviceInput_) {
122       [captureSession_ removeInput:captureDeviceInput_];
123       // No need to release |captureDeviceInput_|, is owned by the session.
124       captureDeviceInput_ = nil;
125     }
126     return YES;
127   }
128
129   // Look for input device with requested name.
130   captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
131   if (!captureDevice_) {
132     DLOG(ERROR) << "Could not open video capture device.";
133     return NO;
134   }
135
136   // Create the capture input associated with the device. Easy peasy.
137   NSError* error = nil;
138   captureDeviceInput_ = [AVCaptureDeviceInputGlue
139       deviceInputWithDevice:captureDevice_
140                       error:&error];
141   if (!captureDeviceInput_) {
142     captureDevice_ = nil;
143     DLOG(ERROR) << "Could not create video capture input: "
144                 << [[error localizedDescription] UTF8String];
145     return NO;
146   }
147   [captureSession_ addInput:captureDeviceInput_];
148
149   // Create a new data output for video. The data output is configured to
150   // discard late frames by default.
151   captureVideoDataOutput_.reset(
152       [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
153   if (!captureVideoDataOutput_) {
154     [captureSession_ removeInput:captureDeviceInput_];
155     DLOG(ERROR) << "Could not create video data output.";
156     return NO;
157   }
158   [captureVideoDataOutput_
159       setSampleBufferDelegate:self
160                         queue:dispatch_get_global_queue(
161                             DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
162   [captureSession_ addOutput:captureVideoDataOutput_];
163   return YES;
164 }
165
166 - (BOOL)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate {
167   // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
168   // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
169   // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
170   DCHECK((![captureSession_ isRunning] &&
171       main_thread_checker_.CalledOnValidThread()) ||
172       callback_thread_checker_.CalledOnValidThread());
173
174   frameWidth_ = width;
175   frameHeight_ = height;
176   frameRate_ = frameRate;
177
178   // The capture output has to be configured, despite Mac documentation
179   // detailing that setting the sessionPreset would be enough. The reason for
180   // this mismatch is probably because most of the AVFoundation docs are written
181   // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
182   // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
183   // cropping and preservation.
184   NSDictionary* videoSettingsDictionary = @{
185     (id)kCVPixelBufferWidthKey : @(width),
186     (id)kCVPixelBufferHeightKey : @(height),
187     (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_422YpCbCr8),
188     AVFoundationGlue::AVVideoScalingModeKey() :
189         AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
190   };
191   [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
192
193   CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
194       connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
195   // Check selector existence, related to bugs http://crbug.com/327532 and
196   // http://crbug.com/328096.
197   if ([captureConnection
198            respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
199       [captureConnection isVideoMinFrameDurationSupported]) {
200     [captureConnection setVideoMinFrameDuration:
201         CoreMediaGlue::CMTimeMake(1, frameRate)];
202   }
203   if ([captureConnection
204            respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
205       [captureConnection isVideoMaxFrameDurationSupported]) {
206     [captureConnection setVideoMaxFrameDuration:
207         CoreMediaGlue::CMTimeMake(1, frameRate)];
208   }
209   return YES;
210 }
211
212 - (BOOL)startCapture {
213   DCHECK(main_thread_checker_.CalledOnValidThread());
214   if (!captureSession_) {
215     DLOG(ERROR) << "Video capture session not initialized.";
216     return NO;
217   }
218   // Connect the notifications.
219   NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
220   [nc addObserver:self
221          selector:@selector(onVideoError:)
222              name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
223            object:captureSession_];
224   [captureSession_ startRunning];
225   return YES;
226 }
227
228 - (void)stopCapture {
229   DCHECK(main_thread_checker_.CalledOnValidThread());
230   if ([captureSession_ isRunning])
231     [captureSession_ stopRunning];  // Synchronous.
232   [[NSNotificationCenter defaultCenter] removeObserver:self];
233 }
234
235 #pragma mark Private methods
236
237 // |captureOutput| is called by the capture device to deliver a new frame.
238 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
239     didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
240            fromConnection:(CrAVCaptureConnection*)connection {
241   // AVFoundation calls from a number of threads, depending on, at least, if
242   // Chrome is on foreground or background. Sample the actual thread here.
243   callback_thread_checker_.DetachFromThread();
244   callback_thread_checker_.CalledOnValidThread();
245   CVImageBufferRef videoFrame =
246       CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
247   // Lock the frame and calculate frame size.
248   const int kLockFlags = 0;
249   if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) ==
250           kCVReturnSuccess) {
251     void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
252     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
253     size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
254     size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
255     size_t frameSize = bytesPerRow * frameHeight;
256     UInt8* addressToPass = reinterpret_cast<UInt8*>(baseAddress);
257
258     media::VideoCaptureFormat captureFormat(
259         gfx::Size(frameWidth, frameHeight),
260         frameRate_,
261         media::PIXEL_FORMAT_UYVY);
262     base::AutoLock lock(lock_);
263     if (!frameReceiver_)
264       return;
265     frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat, 0, 0);
266     CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
267   }
268 }
269
270 - (void)onVideoError:(NSNotification*)errorNotification {
271   NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
272       objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
273   NSString* str_error =
274       [NSString stringWithFormat:@"%@: %@",
275                                  [error localizedDescription],
276                                  [error localizedFailureReason]];
277
278   base::AutoLock lock(lock_);
279   if (frameReceiver_)
280     frameReceiver_->ReceiveError([str_error UTF8String]);
281 }
282
283 @end