2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "avfvideosrc.h"
26 #import <AVFoundation/AVFoundation.h>
28 #import <AppKit/AppKit.h>
30 #include <gst/video/video.h>
31 #include <gst/gl/gstglcontext.h>
32 #include "coremediabuffer.h"
33 #include "videotexturecache.h"
35 #define DEFAULT_DEVICE_INDEX -1
36 #define DEFAULT_DO_STATS FALSE
38 #define DEVICE_FPS_N 25
39 #define DEVICE_FPS_D 1
41 #define BUFFER_QUEUE_SIZE 2
43 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
44 #define GST_CAT_DEFAULT gst_avf_video_src_debug
46 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
51 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
52 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
54 "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
56 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
57 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
59 "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
62 "format = (string) { NV12, UYVY, YUY2 }, "
63 "framerate = " GST_VIDEO_FPS_RANGE ", "
64 "width = " GST_VIDEO_SIZE_RANGE ", "
65 "height = " GST_VIDEO_SIZE_RANGE "; "
68 "format = (string) BGRA, "
69 "framerate = " GST_VIDEO_FPS_RANGE ", "
70 "width = " GST_VIDEO_SIZE_RANGE ", "
71 "height = " GST_VIDEO_SIZE_RANGE "; "
74 typedef enum _QueueState {
76 HAS_BUFFER_OR_STOP_REQUEST,
79 #define gst_avf_video_src_parent_class parent_class
80 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
82 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
90 AVCaptureSession *session;
91 AVCaptureInput *input;
92 AVCaptureVideoDataOutput *output;
93 AVCaptureDevice *device;
94 AVCaptureConnection *connection;
95 CMClockRef inputClock;
97 dispatch_queue_t mainQueue;
98 dispatch_queue_t workerQueue;
99 NSConditionLock *bufQueueLock;
100 NSMutableArray *bufQueue;
104 GstVideoFormat format;
106 GstClockTime latency;
109 GstClockTime lastSampling;
113 BOOL captureScreenCursor;
114 BOOL captureScreenMouseClicks;
117 GstVideoTextureCache *textureCache;
121 - (id)initWithSrc:(GstPushSrc *)src;
124 @property int deviceIndex;
125 @property BOOL doStats;
127 @property BOOL captureScreen;
128 @property BOOL captureScreenCursor;
129 @property BOOL captureScreenMouseClicks;
131 - (BOOL)openScreenInput;
132 - (BOOL)openDeviceInput;
135 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
137 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
139 - (GstCaps *)getDeviceCaps;
140 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
141 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
142 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
143 - (GstCaps *)getCaps;
144 - (BOOL)setCaps:(GstCaps *)new_caps;
149 - (BOOL)query:(GstQuery *)query;
150 - (GstStateChangeReturn)changeState:(GstStateChange)transition;
151 - (GstFlowReturn)create:(GstBuffer **)buf;
152 - (GstCaps *)fixate:(GstCaps *)caps;
153 - (void)updateStatistics;
154 - (void)captureOutput:(AVCaptureOutput *)captureOutput
155 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
156 fromConnection:(AVCaptureConnection *)connection;
160 @implementation GstAVFVideoSrcImpl
162 @synthesize deviceIndex, doStats, fps, captureScreen,
163 captureScreenCursor, captureScreenMouseClicks;
167 return [self initWithSrc:NULL];
170 - (id)initWithSrc:(GstPushSrc *)src
172 if ((self = [super init])) {
173 element = GST_ELEMENT_CAST (src);
174 baseSrc = GST_BASE_SRC_CAST (src);
177 deviceIndex = DEFAULT_DEVICE_INDEX;
179 captureScreenCursor = NO;
180 captureScreenMouseClicks = NO;
185 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
187 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
189 gst_base_src_set_live (baseSrc, TRUE);
190 gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
198 dispatch_release (mainQueue);
200 dispatch_release (workerQueue);
206 - (BOOL)openDeviceInput
208 NSString *mediaType = AVMediaTypeVideo;
211 if (deviceIndex == DEFAULT_DEVICE_INDEX) {
212 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
214 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
215 ("No video capture devices found"), (NULL));
219 NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
220 if (deviceIndex >= [devices count]) {
221 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
222 ("Invalid video capture device index"), (NULL));
225 device = [devices objectAtIndex:deviceIndex];
227 g_assert (device != nil);
230 GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
232 input = [AVCaptureDeviceInput deviceInputWithDevice:device
235 GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
236 ("Failed to open device: %s",
237 [[err localizedDescription] UTF8String]),
247 - (BOOL)openScreenInput
252 CGDirectDisplayID displayId;
254 GST_DEBUG_OBJECT (element, "Opening screen input");
256 displayId = [self getDisplayIdFromDeviceIndex];
260 AVCaptureScreenInput *screenInput =
261 [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
265 [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
266 forKey:@"capturesCursor"];
268 } @catch (NSException *exception) {
269 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
270 GST_WARNING ("An unexpected error occured: %s",
271 [[exception reason] UTF8String]);
273 GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
275 screenInput.capturesMouseClicks = captureScreenMouseClicks;
284 BOOL success = NO, *successPtr = &success;
286 GST_DEBUG_OBJECT (element, "Opening device");
288 dispatch_sync (mainQueue, ^{
292 ret = [self openScreenInput];
294 ret = [self openDeviceInput];
299 output = [[AVCaptureVideoDataOutput alloc] init];
300 [output setSampleBufferDelegate:self
302 output.alwaysDiscardsLateVideoFrames = YES;
303 output.videoSettings = nil; /* device native format */
305 session = [[AVCaptureSession alloc] init];
306 [session addInput:input];
307 [session addOutput:output];
309 /* retained by session */
310 connection = [[output connections] firstObject];
311 inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
316 GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
323 GST_DEBUG_OBJECT (element, "Closing device");
325 dispatch_sync (mainQueue, ^{
326 g_assert (![session isRunning]);
331 [session removeInput:input];
332 [session removeOutput:output];
343 if (!captureScreen) {
349 gst_caps_unref (caps);
354 #define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
355 (gst_caps_new_simple ("video/x-raw", \
356 "width", G_TYPE_INT, w, \
357 "height", G_TYPE_INT, h, \
358 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
359 "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
362 #define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
363 (gst_caps_new_simple ("video/x-raw", \
364 "width", G_TYPE_INT, w, \
365 "height", G_TYPE_INT, h, \
366 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
367 "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
370 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
372 GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
374 switch ([pixel_format integerValue]) {
375 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
376 gst_format = GST_VIDEO_FORMAT_NV12;
378 case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
379 gst_format = GST_VIDEO_FORMAT_UYVY;
381 case kCVPixelFormatType_32BGRA: /* BGRA */
382 gst_format = GST_VIDEO_FORMAT_BGRA;
384 case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
385 gst_format = GST_VIDEO_FORMAT_YUY2;
388 GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
389 [[pixel_format stringValue] UTF8String]);
397 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex
399 NSDictionary *description;
401 NSArray *screens = [NSScreen screens];
403 if (deviceIndex == DEFAULT_DEVICE_INDEX)
404 return kCGDirectMainDisplay;
405 if (deviceIndex >= [screens count]) {
406 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
407 ("Invalid screen capture device index"), (NULL));
410 description = [[screens objectAtIndex:deviceIndex] deviceDescription];
411 displayId = [description objectForKey:@"NSScreenNumber"];
412 return [displayId unsignedIntegerValue];
416 - (GstCaps *)getDeviceCaps
418 NSArray *formats = [device valueForKey:@"formats"];
419 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
420 GstCaps *result_caps, *result_gl_caps;
422 GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
424 GstVideoFormat gl_format = GST_VIDEO_FORMAT_BGRA;
427 GST_DEBUG_OBJECT (element, "Getting device caps");
429 result_caps = gst_caps_new_empty ();
430 result_gl_caps = gst_caps_new_empty ();
432 /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
433 * available in iOS >= 7.0. We use a dynamic approach with key-value
434 * coding or performSelector */
435 for (NSObject *f in [formats reverseObjectEnumerator]) {
436 CMFormatDescriptionRef formatDescription;
437 CMVideoDimensions dimensions;
439 /* formatDescription can't be retrieved with valueForKey so use a selector here */
440 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
441 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
442 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
443 int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
444 gdouble min_fps, max_fps;
446 [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
447 gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
449 [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
450 gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
452 for (NSNumber *pixel_format in pixel_formats) {
453 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
455 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
456 if (min_fps != max_fps)
457 gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
459 gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
462 if (gst_format == gl_format) {
464 if (min_fps != max_fps) {
465 gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
466 dimensions.width, dimensions.height,
467 min_fps_n, min_fps_d,
468 max_fps_n, max_fps_d);
470 gl_caps = GST_AVF_CAPS_NEW (gl_format,
471 dimensions.width, dimensions.height,
472 max_fps_n, max_fps_d);
474 gst_caps_set_features (gl_caps, 0,
475 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
477 gst_caps_set_simple (gl_caps,
478 "texture-target", G_TYPE_STRING,
480 GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
482 GST_GL_TEXTURE_TARGET_2D_STR,
485 gst_caps_append (result_gl_caps, gl_caps);
491 result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
493 GST_INFO_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result_gl_caps);
495 return result_gl_caps;
498 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
501 gboolean found_format = FALSE, found_framerate = FALSE;
502 NSArray *formats = [device valueForKey:@"formats"];
503 gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
505 GST_DEBUG_OBJECT (element, "Setting device caps");
507 if ([device lockForConfiguration:NULL] == YES) {
508 for (NSObject *f in formats) {
509 CMFormatDescriptionRef formatDescription;
510 CMVideoDimensions dimensions;
512 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
513 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
514 if (dimensions.width == info->width && dimensions.height == info->height) {
516 [device setValue:f forKey:@"activeFormat"];
517 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
518 gdouble min_frame_rate, max_frame_rate;
520 [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
521 [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
522 if ((framerate >= min_frame_rate - 0.00001) &&
523 (framerate <= max_frame_rate + 0.00001)) {
524 NSValue *min_frame_duration, *max_frame_duration;
526 found_framerate = TRUE;
527 min_frame_duration = [rate valueForKey:@"minFrameDuration"];
528 max_frame_duration = [rate valueForKey:@"maxFrameDuration"];
529 [device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
531 /* Only available on OSX >= 10.8 and iOS >= 7.0 */
532 // Restrict activeVideoMaxFrameDuration to the minimum value so we get a better capture frame rate
533 [device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
534 } @catch (NSException *exception) {
535 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
536 GST_WARNING ("An unexcepted error occured: %s",
537 [exception.reason UTF8String]);
546 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
549 if (!found_framerate) {
550 GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
554 GST_WARNING ("Couldn't lock device for configuration");
560 - (BOOL)getSessionPresetCaps:(GstCaps *)result
562 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
563 for (NSNumber *pixel_format in pixel_formats) {
564 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
565 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
569 if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
570 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
572 if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
573 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
574 if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
575 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
576 if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
577 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
578 if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
579 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
580 if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
581 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
584 GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
589 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
591 GST_DEBUG_OBJECT (element, "Setting session presset caps");
593 if ([device lockForConfiguration:NULL] != YES) {
594 GST_WARNING ("Couldn't lock device for configuration");
598 switch (info->width) {
600 session.sessionPreset = AVCaptureSessionPresetLow;
603 session.sessionPreset = AVCaptureSessionPreset352x288;
606 session.sessionPreset = AVCaptureSessionPresetMedium;
609 session.sessionPreset = AVCaptureSessionPreset640x480;
612 session.sessionPreset = AVCaptureSessionPreset1280x720;
616 session.sessionPreset = AVCaptureSessionPreset1920x1080;
620 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
629 NSArray *pixel_formats;
632 return NULL; /* BaseSrc will return template caps */
634 result = gst_caps_new_empty ();
635 pixel_formats = output.availableVideoCVPixelFormatTypes;
639 CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
640 for (NSNumber *pixel_format in pixel_formats) {
641 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
642 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
643 gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
644 "width", G_TYPE_INT, (int)rect.size.width,
645 "height", G_TYPE_INT, (int)rect.size.height,
646 "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
650 GST_WARNING ("Screen capture is not supported by iOS");
656 result = gst_caps_merge (result, [self getDeviceCaps]);
657 } @catch (NSException *exception) {
658 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
659 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
663 /* Fallback on session presets API for iOS < 7.0 */
664 [self getSessionPresetCaps:result];
670 - (BOOL)setCaps:(GstCaps *)new_caps
673 BOOL success = YES, *successPtr = &success;
675 gst_video_info_init (&info);
676 gst_video_info_from_caps (&info, new_caps);
679 height = info.height;
680 format = info.finfo->format;
681 latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
683 dispatch_sync (mainQueue, ^{
686 g_assert (![session isRunning]);
690 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
691 screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
693 GST_WARNING ("Screen capture is not supported by iOS");
700 /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
701 *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
702 if (*successPtr != YES)
705 } @catch (NSException *exception) {
707 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
708 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
713 /* Fallback on session presets API for iOS < 7.0 */
714 *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
715 if (*successPtr != YES)
721 case GST_VIDEO_FORMAT_NV12:
722 newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
724 case GST_VIDEO_FORMAT_UYVY:
725 newformat = kCVPixelFormatType_422YpCbCr8;
727 case GST_VIDEO_FORMAT_YUY2:
728 newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
730 case GST_VIDEO_FORMAT_BGRA:
731 newformat = kCVPixelFormatType_32BGRA;
735 GST_WARNING ("Unsupported output format %s",
736 gst_video_format_to_string (format));
740 GST_INFO_OBJECT (element,
741 "width: %d height: %d format: %s", width, height,
742 gst_video_format_to_string (format));
744 output.videoSettings = [NSDictionary
745 dictionaryWithObject:[NSNumber numberWithInt:newformat]
746 forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
749 gst_caps_unref (caps);
750 caps = gst_caps_copy (new_caps);
753 gst_video_texture_cache_free (textureCache);
756 GstCapsFeatures *features = gst_caps_get_features (caps, 0);
757 if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
758 GstGLContext *context = query_gl_context (GST_BASE_SRC_PAD (baseSrc));
759 textureCache = gst_video_texture_cache_new (context);
760 gst_video_texture_cache_set_format (textureCache, format, caps);
761 gst_object_unref (context);
764 GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT
765 ", pushing textures %d", caps, textureCache != NULL);
767 [session startRunning];
769 /* Unlock device configuration only after session is started so the session
770 * won't reset the capture formats */
771 [device unlockForConfiguration];
779 bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
780 bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
784 latency = GST_CLOCK_TIME_NONE;
786 lastSampling = GST_CLOCK_TIME_NONE;
795 dispatch_sync (mainQueue, ^{ [session stopRunning]; });
796 dispatch_sync (workerQueue, ^{});
798 [bufQueueLock release];
804 gst_video_texture_cache_free (textureCache);
810 - (BOOL)query:(GstQuery *)query
814 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
815 if (device != nil && caps != NULL) {
816 GstClockTime min_latency, max_latency;
818 min_latency = max_latency = latency;
821 GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
822 " max %" GST_TIME_FORMAT,
823 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
824 gst_query_set_latency (query, TRUE, min_latency, max_latency);
827 result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
837 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
846 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
851 - (GstStateChangeReturn)changeState:(GstStateChange)transition
853 GstStateChangeReturn ret;
855 if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
856 if (![self openDevice])
857 return GST_STATE_CHANGE_FAILURE;
860 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
862 if (transition == GST_STATE_CHANGE_READY_TO_NULL)
868 - (void)captureOutput:(AVCaptureOutput *)captureOutput
869 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
870 fromConnection:(AVCaptureConnection *)aConnection
872 GstClockTime timestamp, duration;
877 [bufQueueLock unlock];
881 [self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
883 if (timestamp == GST_CLOCK_TIME_NONE) {
884 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
888 if ([bufQueue count] == BUFFER_QUEUE_SIZE)
889 [bufQueue removeLastObject];
891 [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
892 @"timestamp": @(timestamp),
893 @"duration": @(duration)}
896 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
899 - (GstFlowReturn)create:(GstBuffer **)buf
901 CMSampleBufferRef sbuf;
902 CVImageBufferRef image_buf;
903 CVPixelBufferRef pixel_buf;
904 size_t cur_width, cur_height;
905 GstClockTime timestamp, duration;
907 [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
909 [bufQueueLock unlock];
910 return GST_FLOW_FLUSHING;
913 NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
914 sbuf = (CMSampleBufferRef) dic[@"sbuf"];
915 timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
916 duration = (GstClockTime) [dic[@"duration"] longLongValue];
918 [bufQueue removeLastObject];
919 [bufQueueLock unlockWithCondition:
920 ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
922 /* Check output frame size dimensions */
923 image_buf = CMSampleBufferGetImageBuffer (sbuf);
925 pixel_buf = (CVPixelBufferRef) image_buf;
926 cur_width = CVPixelBufferGetWidth (pixel_buf);
927 cur_height = CVPixelBufferGetHeight (pixel_buf);
929 if (width != cur_width || height != cur_height) {
930 /* Set new caps according to current frame dimensions */
931 GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
932 width, height, (int)cur_width, (int)cur_height);
935 gst_caps_set_simple (caps,
936 "width", G_TYPE_INT, width,
937 "height", G_TYPE_INT, height,
939 gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
943 *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL);
946 return GST_FLOW_ERROR;
950 if (textureCache != NULL) {
951 *buf = gst_video_texture_cache_get_gl_buffer (textureCache, *buf);
953 return GST_FLOW_ERROR;
956 GST_BUFFER_OFFSET (*buf) = offset++;
957 GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
958 GST_BUFFER_TIMESTAMP (*buf) = timestamp;
959 GST_BUFFER_DURATION (*buf) = duration;
962 [self updateStatistics];
967 static GstGLContext *
968 query_gl_context (GstPad *srcpad)
970 GstGLContext *gl_context = NULL;
971 GstContext *context = NULL;
974 query = gst_query_new_context ("gst.gl.local_context");
975 if (gst_pad_peer_query (srcpad, query)) {
976 gst_query_parse_context (query, &context);
978 const GstStructure *s = gst_context_get_structure (context);
979 gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &gl_context, NULL);
982 gst_query_unref (query);
988 caps_filter_out_gl_memory (GstCapsFeatures * features, GstStructure * structure,
991 return !gst_caps_features_contains (features,
992 GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
996 - (GstCaps *)fixate:(GstCaps *)new_caps
998 GstGLContext *context;
1000 new_caps = gst_caps_make_writable (new_caps);
1002 context = query_gl_context (GST_BASE_SRC_PAD (baseSrc));
1004 gst_caps_filter_and_map_in_place (new_caps, caps_filter_out_gl_memory, NULL);
1006 gst_object_unref (context);
1008 return gst_caps_fixate (new_caps);
1011 - (void)getSampleBuffer:(CMSampleBufferRef)sbuf
1012 timestamp:(GstClockTime *)outTimestamp
1013 duration:(GstClockTime *)outDuration
1015 CMSampleTimingInfo time_info;
1016 GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
1017 CMItemCount num_timings;
1021 timestamp = GST_CLOCK_TIME_NONE;
1022 duration = GST_CLOCK_TIME_NONE;
1023 if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
1024 avf_timestamp = gst_util_uint64_scale (GST_SECOND,
1025 time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
1027 if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
1028 duration = gst_util_uint64_scale (GST_SECOND,
1029 time_info.duration.value, time_info.duration.timescale);
1031 now = CMClockGetTime(inputClock);
1032 input_clock_now = gst_util_uint64_scale (GST_SECOND,
1033 now.value, now.timescale);
1034 input_clock_diff = input_clock_now - avf_timestamp;
1036 GST_OBJECT_LOCK (element);
1037 clock = GST_ELEMENT_CLOCK (element);
1039 running_time = gst_clock_get_time (clock) - element->base_time;
1040 /* We use presentationTimeStamp to determine how much time it took
1041 * between capturing and receiving the frame in our delegate
1042 * (e.g. how long it spent in AVF queues), then we subtract that time
1043 * from our running time to get the actual timestamp.
1045 if (running_time >= input_clock_diff)
1046 timestamp = running_time - input_clock_diff;
1048 timestamp = running_time;
1050 GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
1051 ", AVF clock diff: %"GST_TIME_FORMAT
1052 ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
1053 GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
1054 GST_TIME_ARGS (input_clock_diff),
1055 GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
1057 /* no clock, can't set timestamps */
1058 timestamp = GST_CLOCK_TIME_NONE;
1060 GST_OBJECT_UNLOCK (element);
1063 *outTimestamp = timestamp;
1064 *outDuration = duration;
1067 - (void)updateStatistics
1071 GST_OBJECT_LOCK (element);
1072 clock = GST_ELEMENT_CLOCK (element);
1074 gst_object_ref (clock);
1075 GST_OBJECT_UNLOCK (element);
1077 if (clock != NULL) {
1078 GstClockTime now = gst_clock_get_time (clock);
1079 gst_object_unref (clock);
1083 if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
1084 if (now - lastSampling >= GST_SECOND) {
1085 GST_OBJECT_LOCK (element);
1087 GST_OBJECT_UNLOCK (element);
1089 g_object_notify (G_OBJECT (element), "fps");
1113 PROP_CAPTURE_SCREEN,
1114 PROP_CAPTURE_SCREEN_CURSOR,
1115 PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1120 static void gst_avf_video_src_finalize (GObject * obj);
1121 static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1122 GValue * value, GParamSpec * pspec);
1123 static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1124 const GValue * value, GParamSpec * pspec);
1125 static GstStateChangeReturn gst_avf_video_src_change_state (
1126 GstElement * element, GstStateChange transition);
1127 static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1129 static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1131 static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1132 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1133 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1135 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1136 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1137 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1139 static gboolean gst_avf_video_src_negotiate (GstBaseSrc * basesrc);
1140 static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
1145 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1147 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1148 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1149 GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1150 GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1152 gobject_class->finalize = gst_avf_video_src_finalize;
1153 gobject_class->get_property = gst_avf_video_src_get_property;
1154 gobject_class->set_property = gst_avf_video_src_set_property;
1156 gstelement_class->change_state = gst_avf_video_src_change_state;
1158 gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1159 gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1160 gstbasesrc_class->start = gst_avf_video_src_start;
1161 gstbasesrc_class->stop = gst_avf_video_src_stop;
1162 gstbasesrc_class->query = gst_avf_video_src_query;
1163 gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1164 gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1165 gstbasesrc_class->fixate = gst_avf_video_src_fixate;
1166 gstbasesrc_class->negotiate = gst_avf_video_src_negotiate;
1168 gstpushsrc_class->create = gst_avf_video_src_create;
1170 gst_element_class_set_metadata (gstelement_class,
1171 "Video Source (AVFoundation)", "Source/Video",
1172 "Reads frames from an iOS AVFoundation device",
1173 "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1175 gst_element_class_add_pad_template (gstelement_class,
1176 gst_static_pad_template_get (&src_template));
1178 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1179 g_param_spec_int ("device-index", "Device Index",
1180 "The zero-based device index",
1181 -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1182 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1183 g_object_class_install_property (gobject_class, PROP_DO_STATS,
1184 g_param_spec_boolean ("do-stats", "Enable statistics",
1185 "Enable logging of statistics", DEFAULT_DO_STATS,
1186 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1187 g_object_class_install_property (gobject_class, PROP_FPS,
1188 g_param_spec_int ("fps", "Frames per second",
1189 "Last measured framerate, if statistics are enabled",
1190 -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1192 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1193 g_param_spec_boolean ("capture-screen", "Enable screen capture",
1194 "Enable screen capture functionality", FALSE,
1195 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1196 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1197 g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1198 "Enable cursor capture while capturing screen", FALSE,
1199 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1200 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1201 g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1202 "Enable mouse clicks capture while capturing screen", FALSE,
1203 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1206 GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1207 0, "iOS AVFoundation video source");
1210 #define OBJC_CALLOUT_BEGIN() \
1211 NSAutoreleasePool *pool; \
1213 pool = [[NSAutoreleasePool alloc] init]
1214 #define OBJC_CALLOUT_END() \
1219 gst_avf_video_src_init (GstAVFVideoSrc * src)
1221 OBJC_CALLOUT_BEGIN ();
1222 src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1223 OBJC_CALLOUT_END ();
1227 gst_avf_video_src_finalize (GObject * obj)
1229 OBJC_CALLOUT_BEGIN ();
1230 [GST_AVF_VIDEO_SRC_IMPL (obj) release];
1231 OBJC_CALLOUT_END ();
1233 G_OBJECT_CLASS (parent_class)->finalize (obj);
1237 gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1240 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1244 case PROP_CAPTURE_SCREEN:
1245 g_value_set_boolean (value, impl.captureScreen);
1247 case PROP_CAPTURE_SCREEN_CURSOR:
1248 g_value_set_boolean (value, impl.captureScreenCursor);
1250 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1251 g_value_set_boolean (value, impl.captureScreenMouseClicks);
1254 case PROP_DEVICE_INDEX:
1255 g_value_set_int (value, impl.deviceIndex);
1258 g_value_set_boolean (value, impl.doStats);
1261 GST_OBJECT_LOCK (object);
1262 g_value_set_int (value, impl.fps);
1263 GST_OBJECT_UNLOCK (object);
1266 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1272 gst_avf_video_src_set_property (GObject * object, guint prop_id,
1273 const GValue * value, GParamSpec * pspec)
1275 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1279 case PROP_CAPTURE_SCREEN:
1280 impl.captureScreen = g_value_get_boolean (value);
1282 case PROP_CAPTURE_SCREEN_CURSOR:
1283 impl.captureScreenCursor = g_value_get_boolean (value);
1285 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1286 impl.captureScreenMouseClicks = g_value_get_boolean (value);
1289 case PROP_DEVICE_INDEX:
1290 impl.deviceIndex = g_value_get_int (value);
1293 impl.doStats = g_value_get_boolean (value);
1296 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1301 static GstStateChangeReturn
1302 gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1304 GstStateChangeReturn ret;
1306 OBJC_CALLOUT_BEGIN ();
1307 ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1308 OBJC_CALLOUT_END ();
1314 gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1318 OBJC_CALLOUT_BEGIN ();
1319 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1320 OBJC_CALLOUT_END ();
1326 gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1330 OBJC_CALLOUT_BEGIN ();
1331 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1332 OBJC_CALLOUT_END ();
1338 gst_avf_video_src_start (GstBaseSrc * basesrc)
1342 OBJC_CALLOUT_BEGIN ();
1343 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1344 OBJC_CALLOUT_END ();
1350 gst_avf_video_src_stop (GstBaseSrc * basesrc)
1354 OBJC_CALLOUT_BEGIN ();
1355 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1356 OBJC_CALLOUT_END ();
1362 gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1366 OBJC_CALLOUT_BEGIN ();
1367 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1368 OBJC_CALLOUT_END ();
1374 gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1378 OBJC_CALLOUT_BEGIN ();
1379 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1380 OBJC_CALLOUT_END ();
1386 gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1390 OBJC_CALLOUT_BEGIN ();
1391 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1392 OBJC_CALLOUT_END ();
1397 static GstFlowReturn
1398 gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1402 OBJC_CALLOUT_BEGIN ();
1403 ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1404 OBJC_CALLOUT_END ();
1410 gst_avf_video_src_negotiate (GstBaseSrc * basesrc)
1412 /* FIXME: We don't support reconfiguration yet */
1413 if (gst_pad_has_current_caps (GST_BASE_SRC_PAD (basesrc)))
1416 return GST_BASE_SRC_CLASS (parent_class)->negotiate (basesrc);
1421 gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
1425 OBJC_CALLOUT_BEGIN ();
1426 ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
1427 OBJC_CALLOUT_END ();