2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
11 #if !defined(__has_feature) || !__has_feature(objc_arc)
12 #error "This file requires ARC support."
15 #import <UIKit/UIKit.h>
17 #import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
18 #import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
20 #include "webrtc/system_wrappers/interface/trace.h"
22 using namespace webrtc;
23 using namespace webrtc::videocapturemodule;
25 @interface RTCVideoCaptureIosObjC (hidden)
26 - (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
29 @implementation RTCVideoCaptureIosObjC {
30 webrtc::videocapturemodule::VideoCaptureIos* _owner;
31 webrtc::VideoCaptureCapability _capability;
32 AVCaptureSession* _captureSession;
34 AVCaptureConnection* _connection;
35 BOOL _captureChanging; // Guarded by _captureChangingCondition.
36 NSCondition* _captureChangingCondition;
39 @synthesize frameRotation = _framRotation;
41 - (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
42 if (self == [super init]) {
44 _captureId = captureId;
45 _captureSession = [[AVCaptureSession alloc] init];
46 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
47 NSString* version = [[UIDevice currentDevice] systemVersion];
48 if ([version integerValue] >= 7) {
49 _captureSession.usesApplicationAudioSession = NO;
52 _captureChanging = NO;
53 _captureChangingCondition = [[NSCondition alloc] init];
55 if (!_captureSession || !_captureChangingCondition) {
59 // create and configure a new output (using callbacks)
60 AVCaptureVideoDataOutput* captureOutput =
61 [[AVCaptureVideoDataOutput alloc] init];
62 NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
64 NSNumber* val = [NSNumber
65 numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
66 NSDictionary* videoSettings =
67 [NSDictionary dictionaryWithObject:val forKey:key];
68 captureOutput.videoSettings = videoSettings;
71 if ([_captureSession canAddOutput:captureOutput]) {
72 [_captureSession addOutput:captureOutput];
74 WEBRTC_TRACE(kTraceError,
77 "%s:%s:%d Could not add output to AVCaptureSession ",
83 NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
84 [notify addObserver:self
85 selector:@selector(onVideoError:)
86 name:AVCaptureSessionRuntimeErrorNotification
87 object:_captureSession];
88 [notify addObserver:self
89 selector:@selector(statusBarOrientationDidChange:)
90 name:@"StatusBarOrientationDidChange"
97 - (void)directOutputToSelf {
99 setSampleBufferDelegate:self
100 queue:dispatch_get_global_queue(
101 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
104 - (void)directOutputToNil {
105 [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
108 - (void)statusBarOrientationDidChange:(NSNotification*)notification {
109 [self setRelativeVideoOrientation];
113 [[NSNotificationCenter defaultCenter] removeObserver:self];
116 - (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
117 [self waitForCaptureChangeToFinish];
118 // check to see if the camera is already set
119 if (_captureSession) {
120 NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
121 if ([currentInputs count] > 0) {
122 AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
123 if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
129 return [self changeCaptureInputByUniqueId:uniqueId];
132 - (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
133 [self waitForCaptureChangeToFinish];
134 if (!_captureSession) {
138 // check limits of the resolution
139 if (capability.maxFPS < 0 || capability.maxFPS > 60) {
143 if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
144 if (capability.width > 1920 || capability.height > 1080) {
147 } else if ([_captureSession
148 canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
149 if (capability.width > 1280 || capability.height > 720) {
152 } else if ([_captureSession
153 canSetSessionPreset:AVCaptureSessionPreset640x480]) {
154 if (capability.width > 640 || capability.height > 480) {
157 } else if ([_captureSession
158 canSetSessionPreset:AVCaptureSessionPreset352x288]) {
159 if (capability.width > 352 || capability.height > 288) {
162 } else if (capability.width < 0 || capability.height < 0) {
166 _capability = capability;
168 AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
172 [self directOutputToSelf];
174 _captureChanging = YES;
176 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
177 ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
181 - (AVCaptureVideoDataOutput*)currentOutput {
182 return [[_captureSession outputs] firstObject];
185 - (void)startCaptureInBackgroundWithOutput:
186 (AVCaptureVideoDataOutput*)currentOutput {
187 NSString* captureQuality =
188 [NSString stringWithString:AVCaptureSessionPresetLow];
189 if (_capability.width >= 1920 || _capability.height >= 1080) {
191 [NSString stringWithString:AVCaptureSessionPreset1920x1080];
192 } else if (_capability.width >= 1280 || _capability.height >= 720) {
193 captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
194 } else if (_capability.width >= 640 || _capability.height >= 480) {
195 captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
196 } else if (_capability.width >= 352 || _capability.height >= 288) {
197 captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
200 // begin configuration for the AVCaptureSession
201 [_captureSession beginConfiguration];
203 // picture resolution
204 [_captureSession setSessionPreset:captureQuality];
206 // take care of capture framerate now
207 NSArray* sessionInputs = _captureSession.inputs;
208 AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ?
209 sessionInputs[0] : nil;
210 AVCaptureDevice* inputDevice = deviceInput.device;
212 AVCaptureDeviceFormat* activeFormat = inputDevice.activeFormat;
213 NSArray* supportedRanges = activeFormat.videoSupportedFrameRateRanges;
214 AVFrameRateRange* targetRange = [supportedRanges count] > 0 ?
215 supportedRanges[0] : nil;
216 // Find the largest supported framerate less than capability maxFPS.
217 for (AVFrameRateRange* range in supportedRanges) {
218 if (range.maxFrameRate <= _capability.maxFPS &&
219 targetRange.maxFrameRate <= range.maxFrameRate) {
223 if (targetRange && [inputDevice lockForConfiguration:NULL]) {
224 inputDevice.activeVideoMinFrameDuration = targetRange.minFrameDuration;
225 inputDevice.activeVideoMaxFrameDuration = targetRange.minFrameDuration;
226 [inputDevice unlockForConfiguration];
230 _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
231 [self setRelativeVideoOrientation];
233 // finished configuring, commit settings to AVCaptureSession.
234 [_captureSession commitConfiguration];
236 [_captureSession startRunning];
237 [self signalCaptureChangeEnd];
240 - (void)setRelativeVideoOrientation {
241 if (!_connection.supportsVideoOrientation)
243 switch ([UIApplication sharedApplication].statusBarOrientation) {
244 case UIInterfaceOrientationPortrait:
245 #if defined(__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
246 case UIInterfaceOrientationUnknown:
248 _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
250 case UIInterfaceOrientationPortraitUpsideDown:
251 _connection.videoOrientation =
252 AVCaptureVideoOrientationPortraitUpsideDown;
254 case UIInterfaceOrientationLandscapeLeft:
255 _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
257 case UIInterfaceOrientationLandscapeRight:
258 _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
263 - (void)onVideoError:(NSNotification*)notification {
264 NSLog(@"onVideoError: %@", notification);
265 // TODO(sjlee): make the specific error handling with this notification.
266 WEBRTC_TRACE(kTraceError,
269 "%s:%s:%d [AVCaptureSession startRunning] error.",
275 - (BOOL)stopCapture {
276 [self waitForCaptureChangeToFinish];
277 [self directOutputToNil];
279 if (!_captureSession) {
283 _captureChanging = YES;
284 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
285 ^(void) { [self stopCaptureInBackground]; });
289 - (void)stopCaptureInBackground {
290 [_captureSession stopRunning];
291 [self signalCaptureChangeEnd];
294 - (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
295 [self waitForCaptureChangeToFinish];
296 NSArray* currentInputs = [_captureSession inputs];
297 // remove current input
298 if ([currentInputs count] > 0) {
299 AVCaptureInput* currentInput =
300 (AVCaptureInput*)[currentInputs objectAtIndex:0];
302 [_captureSession removeInput:currentInput];
305 // Look for input device with the name requested (as our input param)
306 // get list of available capture devices
307 int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
308 if (captureDeviceCount <= 0) {
312 AVCaptureDevice* captureDevice =
313 [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
315 if (!captureDevice) {
319 // now create capture session input out of AVCaptureDevice
320 NSError* deviceError = nil;
321 AVCaptureDeviceInput* newCaptureInput =
322 [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
325 if (!newCaptureInput) {
326 const char* errorMessage = [[deviceError localizedDescription] UTF8String];
328 WEBRTC_TRACE(kTraceError,
331 "%s:%s:%d deviceInputWithDevice error:%s",
340 // try to add our new capture device to the capture session
341 [_captureSession beginConfiguration];
343 BOOL addedCaptureInput = NO;
344 if ([_captureSession canAddInput:newCaptureInput]) {
345 [_captureSession addInput:newCaptureInput];
346 addedCaptureInput = YES;
348 addedCaptureInput = NO;
351 [_captureSession commitConfiguration];
353 return addedCaptureInput;
356 - (void)captureOutput:(AVCaptureOutput*)captureOutput
357 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
358 fromConnection:(AVCaptureConnection*)connection {
359 const int kFlags = 0;
360 CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
362 if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
366 const int kYPlaneIndex = 0;
367 const int kUVPlaneIndex = 1;
369 uint8_t* baseAddress =
370 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
371 int yPlaneBytesPerRow =
372 CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
373 int yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
374 int uvPlaneBytesPerRow =
375 CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
376 int uvPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
378 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
380 VideoCaptureCapability tempCaptureCapability;
381 tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
382 tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
383 tempCaptureCapability.maxFPS = _capability.maxFPS;
384 tempCaptureCapability.rawType = kVideoNV12;
386 _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
388 CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
391 - (void)signalCaptureChangeEnd {
392 [_captureChangingCondition lock];
393 _captureChanging = NO;
394 [_captureChangingCondition signal];
395 [_captureChangingCondition unlock];
398 - (void)waitForCaptureChangeToFinish {
399 [_captureChangingCondition lock];
400 while (_captureChanging) {
401 [_captureChangingCondition wait];
403 [_captureChangingCondition unlock];