{
AVCaptureSession* captureSession;
AVCaptureConnection* videoCaptureConnection;
- AVCaptureConnection* audioCaptureConnection;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
UIDeviceOrientation currentDeviceOrientation;
@property (nonatomic, retain) AVCaptureSession* captureSession;
@property (nonatomic, retain) AVCaptureConnection* videoCaptureConnection;
-@property (nonatomic, retain) AVCaptureConnection* audioCaptureConnection;
@property (nonatomic, readonly) BOOL running;
@property (nonatomic, readonly) BOOL captureSessionLoaded;
#ifdef __cplusplus
// delegate method for processing image frames
- (void)processImage:(cv::Mat&)image;
-
#endif
@end
-@interface CvVideoCamera : CvAbstractCamera<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate>
+@interface CvVideoCamera : CvAbstractCamera<AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureVideoDataOutput *videoDataOutput;
AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
AVAssetWriter* recordAssetWriter;
+ CMTime lastSampleTime;
+
}
@property (nonatomic, assign) id<CvVideoCameraDelegate> delegate;
@property (nonatomic, retain) AVAssetWriterInput* recordAssetWriterInput;
@property (nonatomic, retain) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
@property (nonatomic, retain) AVAssetWriter* recordAssetWriter;
-@property (nonatomic, readonly) int64_t timestampMs;
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
- (void)layoutPreviewLayer;
#pragma mark - Private Interface
-@interface CvAbstractCamera () {
- AVCaptureAudioDataOutput *audioOut;
-}
+@interface CvAbstractCamera ()
@property (nonatomic, retain) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer;
@synthesize captureSession;
@synthesize captureVideoPreviewLayer;
@synthesize videoCaptureConnection;
-@synthesize audioCaptureConnection;
@synthesize running;
@synthesize captureSessionLoaded;
@synthesize useAVCaptureVideoPreviewLayer;
self.captureSession = nil;
self.captureVideoPreviewLayer = nil;
self.videoCaptureConnection = nil;
- self.audioCaptureConnection = nil;
captureSessionLoaded = NO;
}
} else {
NSLog(@"[Camera] Error: could not set session preset");
}
-
}
-#if 0
-- (void)sampleCaptureSessionSetup {
-
- if ( _captureSession ) {
- return;
- }
-
- _captureSession = [[AVCaptureSession alloc] init];
-
- [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession];
- _applicationWillEnterForegroundNotificationObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[UIApplication sharedApplication] queue:nil usingBlock:^(NSNotification *note) {
- // Retain self while the capture session is alive by referencing it in this observer block which is tied to the session lifetime
- // Client must stop us running before we can be deallocated
- [self applicationWillEnterForeground];
- }];
-
-#if RECORD_AUDIO
- /* Audio */
- AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
- if ( [_captureSession canAddInput:audioIn] ) {
- [_captureSession addInput:audioIn];
- }
- [audioIn release];
-
- AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
- // Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio
- dispatch_queue_t audioCaptureQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.audio", DISPATCH_QUEUE_SERIAL );
- [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
- [audioCaptureQueue release];
-
- if ( [_captureSession canAddOutput:audioOut] ) {
- [_captureSession addOutput:audioOut];
- }
- self.audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
- [audioOut release];
-#endif // RECORD_AUDIO
-
- /* Video */
- AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- _videoDevice = videoDevice;
- AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:nil];
- if ( [_captureSession canAddInput:videoIn] ) {
- [_captureSession addInput:videoIn];
- }
- [videoIn release];
-
- AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
- videoOut.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(_renderer.inputPixelFormat) };
- [videoOut setSampleBufferDelegate:self queue:_videoDataOutputQueue];
-
- // RosyWriter records videos and we prefer not to have any dropped frames in the video recording.
- // By setting alwaysDiscardsLateVideoFrames to NO we ensure that minor fluctuations in system load or in our processing time for a given frame won't cause framedrops.
- // We do however need to ensure that on average we can process frames in realtime.
- // If we were doing preview only we would probably want to set alwaysDiscardsLateVideoFrames to YES.
- videoOut.alwaysDiscardsLateVideoFrames = NO;
-
- if ( [_captureSession canAddOutput:videoOut] ) {
- [_captureSession addOutput:videoOut];
- }
-
- _videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
-
- int frameRate;
- NSString *sessionPreset = AVCaptureSessionPresetHigh;
- CMTime frameDuration = kCMTimeInvalid;
- // For single core systems like iPhone 4 and iPod Touch 4th Generation we use a lower resolution and framerate to maintain real-time performance.
- if ( [NSProcessInfo processInfo].processorCount == 1 )
- {
- if ( [_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480] ) {
- sessionPreset = AVCaptureSessionPreset640x480;
- }
- frameRate = 15;
- }
- else
- {
-#if ! USE_OPENGL_RENDERER
- // When using the CPU renderers or the CoreImage renderer we lower the resolution to 720p so that all devices can maintain real-time performance (this is primarily for A5 based devices like iPhone 4s and iPod Touch 5th Generation).
- if ( [_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720] ) {
- sessionPreset = AVCaptureSessionPreset1280x720;
- }
-#endif // ! USE_OPENGL_RENDERER
-
- frameRate = 30;
- }
-
- _captureSession.sessionPreset = sessionPreset;
-
- frameDuration = CMTimeMake( 1, frameRate );
-
- NSError *error = nil;
- if ( [videoDevice lockForConfiguration:&error] ) {
- videoDevice.activeVideoMaxFrameDuration = frameDuration;
- videoDevice.activeVideoMinFrameDuration = frameDuration;
- [videoDevice unlockForConfiguration];
- }
- else {
- NSLog( @"videoDevice lockForConfiguration returned error %@", error );
- }
-
- // Get the recommended compression settings after configuring the session/device.
-#if RECORD_AUDIO
- _audioCompressionSettings = [[audioOut recommendedAudioSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
-#endif
- _videoCompressionSettings = [[videoOut recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
-
- self.videoOrientation = _videoConnection.videoOrientation;
-
- [videoOut release];
-
- return;
-}
-#endif
-
- (void)createCaptureDevice;
{
// setup the device
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
-
-#if 0
- AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- NSError *error = nil;
- //AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
- AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:nil];
- if (audioInput) {
- NSLog(@"Adding audio capture devices ");
- [self.captureSession addInput:audioInput];
- [audioInput release];
- }
-
-
- // Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio
- audioOut = [[AVCaptureAudioDataOutput alloc] init];
- dispatch_queue_t audioCaptureQueue = dispatch_queue_create("opencv.ios.audio", DISPATCH_QUEUE_SERIAL );
- [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
-
- if ( [self.captureSession canAddOutput:audioOut] ) {
- [self.captureSession addOutput:audioOut];
- NSLog(@"audioOut added ");
- }
-
- [audioCaptureQueue release];
-
- self.audioCaptureConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
-
- NSLog(@"Audio has been setup with callback ");
-#endif
-
}
@interface CvVideoCamera () {
- NSString* mediaPath;
- int recordCountDown;
- CMTime _lastSampleTime;
- int64_t _timestampMs;
- dispatch_queue_t movieWriterQueue;
-
+ int recordingCountDown;
}
-
- (void)createVideoDataOutput;
- (void)createVideoFileOutput;
-- (void)createMovieFileOutput;
-- (NSString*) mediaFileString;
+
@property (nonatomic, retain) CALayer *customPreviewLayer;
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
-@property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-@property (nonatomic, retain) dispatch_queue_t movieWriterQueue;
@end
@synthesize customPreviewLayer;
@synthesize videoDataOutput;
-@synthesize movieFileOutput;
@synthesize recordVideo;
@synthesize rotateVideo;
@synthesize recordPixelBufferAdaptor;
@synthesize recordAssetWriter;
-@synthesize timestampMs = _timestampMs;
-
-
-
#pragma mark - Constructors
- (id)initWithParentView:(UIView*)parent;
{
- recordCountDown = 1000000000;
self = [super initWithParentView:parent];
if (self) {
self.useAVCaptureVideoPreviewLayer = NO;
self.recordVideo = NO;
self.rotateVideo = NO;
}
- movieWriterQueue = nil;
return self;
}
- (void)start;
{
- recordCountDown = 5;
- movieWriterQueue = nil;
+ recordingCountDown = 10;
[super start];
if (self.recordVideo == YES) {
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
}
-
if (error == nil) {
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
}
}
-
}
self.recordAssetWriter = nil;
self.recordAssetWriterInput = nil;
self.recordPixelBufferAdaptor = nil;
- if (movieWriterQueue)
- dispatch_release(movieWriterQueue);
- self.movieWriterQueue = nil;
}
[self.customPreviewLayer removeFromSuperlayer];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
- //self.videoCaptureConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
- //[self.videoCaptureConnection setEnabled:YES];
-
// set default FPS
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
- if (self.recordVideo == YES && movieWriterQueue == nil) {
- movieWriterQueue = dispatch_queue_create("opencv_movieWriter", DISPATCH_QUEUE_SERIAL);
- }
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
}
-- (void)createMovieFileOutput;
-{
- NSLog(@"createVideoFileOutput...");
- self.movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
- CMTime maxDuration = CMTimeMake(30*60, 1);
- movieFileOutput.maxRecordedDuration = maxDuration;
- movieFileOutput.minFreeDiskSpaceLimit = (1024L)*(1024L*1024L);
- movieFileOutput.maxRecordedFileSize = (400L)*(1024L*1024L);
-
-
- if ([self.captureSession canAddOutput:movieFileOutput]) {
- [captureSession addOutput:movieFileOutput];
- NSLog(@"Successfully added movie output ");
- }
- else {
- NSLog(@"Couldn't add movie output ");
- }
-
- if (self.recordVideo == YES)
- [self.movieFileOutput startRecordingToOutputFileURL:[self mediaFileURL] recordingDelegate:self];
-}
-
- (void)createVideoFileOutput;
{
[self createVideoDataOutput];
if (self.recordVideo == YES) {
[self createVideoFileOutput];
- //[self createMovieFileOutput];
}
}
return pxbuffer;
}
-
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
- didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
- fromConnections:(NSArray *)connections
- error:(NSError *)error {
-
-#if 0
- BOOL recordedSuccessfully = YES;
- if ([error code] != noErr) {
- // A problem occurred: Find out if the recording was successful.
- id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
- if (value) {
- recordedSuccessfully = [value boolValue];
- }
- }
-#endif
- NSLog(@"Capture File output done ");
-}
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
-{
- if (connection == self.audioCaptureConnection) {
- NSLog(@"Audio sample did drop ");
- return;
- }
- NSLog(@"Video Frame did drop ");
-}
-
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
(void)captureOutput;
(void)connection;
-
- if (connection == self.audioCaptureConnection) {
- //NSLog(@"Audio Sample came in ");
- return;
- }
-
- //NSLog(@"Video sample came in ");
if (self.delegate) {
// convert from Core Media to Core Video
}
-
- CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- int64_t msec = lastSampleTime.value / (lastSampleTime.timescale / 1000);
- _timestampMs = msec;
- //NSLog(@"Timestamp %u / %u, msec = %lu ", lastSampleTime.value, lastSampleTime.timescale, msec);
-
-
// delegate image processing to the delegate
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
+ CGImage* dstImage;
+
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
[self.delegate processImage:image];
}
- CGImage* dstImage;
-
// check if matrix data pointer or dimensions were changed by the delegate
bool iOSimage = false;
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
// render buffer
- //dispatch_sync(dispatch_get_main_queue(), ^{
dispatch_sync(dispatch_get_main_queue(), ^{
self.customPreviewLayer.contents = (__bridge id)dstImage;
});
- if (recordCountDown > 0)
- recordCountDown--;
-
- if (self.recordVideo == YES && recordCountDown <= 0) {
- //CMTimeShow(lastSampleTime);
+ recordingCountDown--;
+ if (self.recordVideo == YES && recordingCountDown < 0) {
+ lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+// CMTimeShow(lastSampleTime);
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordAssetWriter startWriting];
- [self.recordAssetWriter startSessionAtSourceTime:_lastSampleTime];
+ [self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
return;
if (pixelBuffer != nullptr)
CVPixelBufferRelease(pixelBuffer);
}
+
}
+
// cleanup
CGImageRelease(dstImage);
- (NSURL *)videoFileURL;
{
- //NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
- NSString *outputPath = self.videoFileString;
+ NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
}
-- (NSURL *)mediaFileURL;
-{
- NSString *outputPath = self.mediaFileString;
- NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
- NSFileManager *fileManager = [NSFileManager defaultManager];
- if ([fileManager fileExistsAtPath:outputPath]) {
- NSLog(@"file exists");
- }
- NSLog(@"media URL %@", outputURL);
- return outputURL;
-}
- (NSString *)videoFileString;
{
return outputPath;
}
-
-- (NSString*) mediaFileString {
- NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"media.mov"];
- return outputPath;
-}
@end