Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / modules / video_capture / ios / rtc_video_capture_ios_objc.mm
1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #if !defined(__has_feature) || !__has_feature(objc_arc)
12 #error "This file requires ARC support."
13 #endif
14
15 #import <UIKit/UIKit.h>
16
17 #import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
18 #import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
19
20 #include "webrtc/system_wrappers/interface/trace.h"
21
22 using namespace webrtc;
23 using namespace webrtc::videocapturemodule;
24
25 @interface RTCVideoCaptureIosObjC (hidden)
26 - (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
27 @end
28
29 @implementation RTCVideoCaptureIosObjC {
30   webrtc::videocapturemodule::VideoCaptureIos* _owner;
31   webrtc::VideoCaptureCapability _capability;
32   AVCaptureSession* _captureSession;
33   int _captureId;
34   AVCaptureConnection* _connection;
35   BOOL _captureChanging;  // Guarded by _captureChangingCondition.
36   NSCondition* _captureChangingCondition;
37 }
38
39 @synthesize frameRotation = _framRotation;
40
41 - (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
42   if (self == [super init]) {
43     _owner = owner;
44     _captureId = captureId;
45     _captureSession = [[AVCaptureSession alloc] init];
46 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
47     NSString* version = [[UIDevice currentDevice] systemVersion];
48     if ([version integerValue] >= 7) {
49       _captureSession.usesApplicationAudioSession = NO;
50     }
51 #endif
52     _captureChanging = NO;
53     _captureChangingCondition = [[NSCondition alloc] init];
54
55     if (!_captureSession || !_captureChangingCondition) {
56       return nil;
57     }
58
59     // create and configure a new output (using callbacks)
60     AVCaptureVideoDataOutput* captureOutput =
61         [[AVCaptureVideoDataOutput alloc] init];
62     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
63
64     NSNumber* val = [NSNumber
65         numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
66     NSDictionary* videoSettings =
67         [NSDictionary dictionaryWithObject:val forKey:key];
68     captureOutput.videoSettings = videoSettings;
69
70     // add new output
71     if ([_captureSession canAddOutput:captureOutput]) {
72       [_captureSession addOutput:captureOutput];
73     } else {
74       WEBRTC_TRACE(kTraceError,
75                    kTraceVideoCapture,
76                    _captureId,
77                    "%s:%s:%d Could not add output to AVCaptureSession ",
78                    __FILE__,
79                    __FUNCTION__,
80                    __LINE__);
81     }
82
83     NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
84     [notify addObserver:self
85                selector:@selector(onVideoError:)
86                    name:AVCaptureSessionRuntimeErrorNotification
87                  object:_captureSession];
88     [notify addObserver:self
89                selector:@selector(statusBarOrientationDidChange:)
90                    name:@"StatusBarOrientationDidChange"
91                  object:nil];
92   }
93
94   return self;
95 }
96
97 - (void)directOutputToSelf {
98   [[self currentOutput]
99       setSampleBufferDelegate:self
100                         queue:dispatch_get_global_queue(
101                                   DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
102 }
103
104 - (void)directOutputToNil {
105   [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
106 }
107
108 - (void)statusBarOrientationDidChange:(NSNotification*)notification {
109   [self setRelativeVideoOrientation];
110 }
111
112 - (void)dealloc {
113   [[NSNotificationCenter defaultCenter] removeObserver:self];
114 }
115
116 - (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
117   [self waitForCaptureChangeToFinish];
118   // check to see if the camera is already set
119   if (_captureSession) {
120     NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
121     if ([currentInputs count] > 0) {
122       AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
123       if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
124         return YES;
125       }
126     }
127   }
128
129   return [self changeCaptureInputByUniqueId:uniqueId];
130 }
131
132 - (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
133   [self waitForCaptureChangeToFinish];
134   if (!_captureSession) {
135     return NO;
136   }
137
138   // check limits of the resolution
139   if (capability.maxFPS < 0 || capability.maxFPS > 60) {
140     return NO;
141   }
142
143   if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
144     if (capability.width > 1920 || capability.height > 1080) {
145       return NO;
146     }
147   } else if ([_captureSession
148                  canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
149     if (capability.width > 1280 || capability.height > 720) {
150       return NO;
151     }
152   } else if ([_captureSession
153                  canSetSessionPreset:AVCaptureSessionPreset640x480]) {
154     if (capability.width > 640 || capability.height > 480) {
155       return NO;
156     }
157   } else if ([_captureSession
158                  canSetSessionPreset:AVCaptureSessionPreset352x288]) {
159     if (capability.width > 352 || capability.height > 288) {
160       return NO;
161     }
162   } else if (capability.width < 0 || capability.height < 0) {
163     return NO;
164   }
165
166   _capability = capability;
167
168   AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
169   if (!currentOutput)
170     return NO;
171
172   [self directOutputToSelf];
173
174   _captureChanging = YES;
175   dispatch_async(
176       dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
177       ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
178   return YES;
179 }
180
181 - (AVCaptureVideoDataOutput*)currentOutput {
182   return [[_captureSession outputs] firstObject];
183 }
184
185 - (void)startCaptureInBackgroundWithOutput:
186             (AVCaptureVideoDataOutput*)currentOutput {
187   NSString* captureQuality =
188       [NSString stringWithString:AVCaptureSessionPresetLow];
189   if (_capability.width >= 1920 || _capability.height >= 1080) {
190     captureQuality =
191         [NSString stringWithString:AVCaptureSessionPreset1920x1080];
192   } else if (_capability.width >= 1280 || _capability.height >= 720) {
193     captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
194   } else if (_capability.width >= 640 || _capability.height >= 480) {
195     captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
196   } else if (_capability.width >= 352 || _capability.height >= 288) {
197     captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
198   }
199
200   // begin configuration for the AVCaptureSession
201   [_captureSession beginConfiguration];
202
203   // picture resolution
204   [_captureSession setSessionPreset:captureQuality];
205
206   // take care of capture framerate now
207   NSArray* sessionInputs = _captureSession.inputs;
208   AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ?
209       sessionInputs[0] : nil;
210   AVCaptureDevice* inputDevice = deviceInput.device;
211   if (inputDevice) {
212     AVCaptureDeviceFormat* activeFormat = inputDevice.activeFormat;
213     NSArray* supportedRanges = activeFormat.videoSupportedFrameRateRanges;
214     AVFrameRateRange* targetRange = [supportedRanges count] > 0 ?
215         supportedRanges[0] : nil;
216     // Find the largest supported framerate less than capability maxFPS.
217     for (AVFrameRateRange* range in supportedRanges) {
218       if (range.maxFrameRate <= _capability.maxFPS &&
219           targetRange.maxFrameRate <= range.maxFrameRate) {
220         targetRange = range;
221       }
222     }
223     if (targetRange && [inputDevice lockForConfiguration:NULL]) {
224       inputDevice.activeVideoMinFrameDuration = targetRange.minFrameDuration;
225       inputDevice.activeVideoMaxFrameDuration = targetRange.minFrameDuration;
226       [inputDevice unlockForConfiguration];
227     }
228   }
229
230   _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
231   [self setRelativeVideoOrientation];
232
233   // finished configuring, commit settings to AVCaptureSession.
234   [_captureSession commitConfiguration];
235
236   [_captureSession startRunning];
237   [self signalCaptureChangeEnd];
238 }
239
240 - (void)setRelativeVideoOrientation {
241   if (!_connection.supportsVideoOrientation)
242     return;
243   switch ([UIApplication sharedApplication].statusBarOrientation) {
244     case UIInterfaceOrientationPortrait:
245 #if defined(__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
246     case UIInterfaceOrientationUnknown:
247 #endif
248       _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
249       break;
250     case UIInterfaceOrientationPortraitUpsideDown:
251       _connection.videoOrientation =
252           AVCaptureVideoOrientationPortraitUpsideDown;
253       break;
254     case UIInterfaceOrientationLandscapeLeft:
255       _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
256       break;
257     case UIInterfaceOrientationLandscapeRight:
258       _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
259       break;
260   }
261 }
262
263 - (void)onVideoError:(NSNotification*)notification {
264   NSLog(@"onVideoError: %@", notification);
265   // TODO(sjlee): make the specific error handling with this notification.
266   WEBRTC_TRACE(kTraceError,
267                kTraceVideoCapture,
268                _captureId,
269                "%s:%s:%d [AVCaptureSession startRunning] error.",
270                __FILE__,
271                __FUNCTION__,
272                __LINE__);
273 }
274
275 - (BOOL)stopCapture {
276   [self waitForCaptureChangeToFinish];
277   [self directOutputToNil];
278
279   if (!_captureSession) {
280     return NO;
281   }
282
283   _captureChanging = YES;
284   dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
285                  ^(void) { [self stopCaptureInBackground]; });
286   return YES;
287 }
288
289 - (void)stopCaptureInBackground {
290   [_captureSession stopRunning];
291   [self signalCaptureChangeEnd];
292 }
293
294 - (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
295   [self waitForCaptureChangeToFinish];
296   NSArray* currentInputs = [_captureSession inputs];
297   // remove current input
298   if ([currentInputs count] > 0) {
299     AVCaptureInput* currentInput =
300         (AVCaptureInput*)[currentInputs objectAtIndex:0];
301
302     [_captureSession removeInput:currentInput];
303   }
304
305   // Look for input device with the name requested (as our input param)
306   // get list of available capture devices
307   int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
308   if (captureDeviceCount <= 0) {
309     return NO;
310   }
311
312   AVCaptureDevice* captureDevice =
313       [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
314
315   if (!captureDevice) {
316     return NO;
317   }
318
319   // now create capture session input out of AVCaptureDevice
320   NSError* deviceError = nil;
321   AVCaptureDeviceInput* newCaptureInput =
322       [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
323                                             error:&deviceError];
324
325   if (!newCaptureInput) {
326     const char* errorMessage = [[deviceError localizedDescription] UTF8String];
327
328     WEBRTC_TRACE(kTraceError,
329                  kTraceVideoCapture,
330                  _captureId,
331                  "%s:%s:%d deviceInputWithDevice error:%s",
332                  __FILE__,
333                  __FUNCTION__,
334                  __LINE__,
335                  errorMessage);
336
337     return NO;
338   }
339
340   // try to add our new capture device to the capture session
341   [_captureSession beginConfiguration];
342
343   BOOL addedCaptureInput = NO;
344   if ([_captureSession canAddInput:newCaptureInput]) {
345     [_captureSession addInput:newCaptureInput];
346     addedCaptureInput = YES;
347   } else {
348     addedCaptureInput = NO;
349   }
350
351   [_captureSession commitConfiguration];
352
353   return addedCaptureInput;
354 }
355
356 - (void)captureOutput:(AVCaptureOutput*)captureOutput
357     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
358            fromConnection:(AVCaptureConnection*)connection {
359   const int kFlags = 0;
360   CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
361
362   if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
363     return;
364   }
365
366   const int kYPlaneIndex = 0;
367   const int kUVPlaneIndex = 1;
368
369   uint8_t* baseAddress =
370       (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
371   int yPlaneBytesPerRow =
372       CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
373   int yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
374   int uvPlaneBytesPerRow =
375       CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
376   int uvPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
377   int frameSize =
378       yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
379
380   VideoCaptureCapability tempCaptureCapability;
381   tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
382   tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
383   tempCaptureCapability.maxFPS = _capability.maxFPS;
384   tempCaptureCapability.rawType = kVideoNV12;
385
386   _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
387
388   CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
389 }
390
391 - (void)signalCaptureChangeEnd {
392   [_captureChangingCondition lock];
393   _captureChanging = NO;
394   [_captureChangingCondition signal];
395   [_captureChangingCondition unlock];
396 }
397
398 - (void)waitForCaptureChangeToFinish {
399   [_captureChangingCondition lock];
400   while (_captureChanging) {
401     [_captureChangingCondition wait];
402   }
403   [_captureChangingCondition unlock];
404 }
405 @end