2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "avfvideosrc.h"
26 #import <AVFoundation/AVFoundation.h>
28 #import <AppKit/AppKit.h>
30 #include <gst/video/video.h>
31 #include <gst/gl/gstglcontext.h>
32 #include "coremediabuffer.h"
33 #include "corevideotexturecache.h"
35 #define DEFAULT_DEVICE_INDEX -1
36 #define DEFAULT_DO_STATS FALSE
38 #define DEVICE_FPS_N 25
39 #define DEVICE_FPS_D 1
41 #define BUFFER_QUEUE_SIZE 2
43 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
44 #define GST_CAT_DEFAULT gst_avf_video_src_debug
46 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
49 GST_STATIC_CAPS ("video/x-raw, "
50 "format = (string) { NV12, UYVY, YUY2 }, "
51 "framerate = " GST_VIDEO_FPS_RANGE ", "
52 "width = " GST_VIDEO_SIZE_RANGE ", "
53 "height = " GST_VIDEO_SIZE_RANGE "; "
56 "format = (string) BGRA, "
57 "framerate = " GST_VIDEO_FPS_RANGE ", "
58 "width = " GST_VIDEO_SIZE_RANGE ", "
59 "height = " GST_VIDEO_SIZE_RANGE "; "
61 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
62 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
66 typedef enum _QueueState {
68 HAS_BUFFER_OR_STOP_REQUEST,
71 #define gst_avf_video_src_parent_class parent_class
72 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
74 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
82 AVCaptureSession *session;
83 AVCaptureInput *input;
84 AVCaptureVideoDataOutput *output;
85 AVCaptureDevice *device;
86 CMClockRef inputClock;
88 dispatch_queue_t mainQueue;
89 dispatch_queue_t workerQueue;
90 NSConditionLock *bufQueueLock;
91 NSMutableArray *bufQueue;
95 GstVideoFormat format;
99 GstClockTime startAVFTimestamp;
100 GstClockTime startTimestamp;
102 GstClockTime lastSampling;
106 BOOL captureScreenCursor;
107 BOOL captureScreenMouseClicks;
110 GstCoreVideoTextureCache *textureCache;
114 - (id)initWithSrc:(GstPushSrc *)src;
117 @property int deviceIndex;
118 @property BOOL doStats;
120 @property BOOL captureScreen;
121 @property BOOL captureScreenCursor;
122 @property BOOL captureScreenMouseClicks;
124 - (BOOL)openScreenInput;
125 - (BOOL)openDeviceInput;
128 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
130 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
132 - (BOOL)getDeviceCaps:(GstCaps *)result;
133 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
134 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
135 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
136 - (GstCaps *)getCaps;
137 - (BOOL)setCaps:(GstCaps *)new_caps;
142 - (BOOL)query:(GstQuery *)query;
143 - (GstStateChangeReturn)changeState:(GstStateChange)transition;
144 - (GstFlowReturn)create:(GstBuffer **)buf;
145 - (void)updateStatistics;
146 - (void)captureOutput:(AVCaptureOutput *)captureOutput
147 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
148 fromConnection:(AVCaptureConnection *)connection;
152 @implementation GstAVFVideoSrcImpl
154 @synthesize deviceIndex, doStats, fps, captureScreen,
155 captureScreenCursor, captureScreenMouseClicks;
159 return [self initWithSrc:NULL];
162 - (id)initWithSrc:(GstPushSrc *)src
164 if ((self = [super init])) {
165 element = GST_ELEMENT_CAST (src);
166 baseSrc = GST_BASE_SRC_CAST (src);
169 deviceIndex = DEFAULT_DEVICE_INDEX;
171 captureScreenCursor = NO;
172 captureScreenMouseClicks = NO;
177 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
179 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
181 gst_base_src_set_live (baseSrc, TRUE);
182 gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
190 dispatch_release (mainQueue);
192 dispatch_release (workerQueue);
198 - (BOOL)openDeviceInput
200 NSString *mediaType = AVMediaTypeVideo;
203 if (deviceIndex == DEFAULT_DEVICE_INDEX) {
204 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
206 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
207 ("No video capture devices found"), (NULL));
211 NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
212 if (deviceIndex >= [devices count]) {
213 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
214 ("Invalid video capture device index"), (NULL));
217 device = [devices objectAtIndex:deviceIndex];
219 g_assert (device != nil);
222 GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
224 input = [AVCaptureDeviceInput deviceInputWithDevice:device
227 GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
228 ("Failed to open device: %s",
229 [[err localizedDescription] UTF8String]),
239 - (BOOL)openScreenInput
244 CGDirectDisplayID displayId;
246 GST_DEBUG_OBJECT (element, "Opening screen input");
248 displayId = [self getDisplayIdFromDeviceIndex];
252 AVCaptureScreenInput *screenInput =
253 [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
257 [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
258 forKey:@"capturesCursor"];
260 } @catch (NSException *exception) {
261 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
262 GST_WARNING ("An unexpected error occured: %s",
263 [[exception reason] UTF8String]);
265 GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
267 screenInput.capturesMouseClicks = captureScreenMouseClicks;
276 BOOL success = NO, *successPtr = &success;
278 GST_DEBUG_OBJECT (element, "Opening device");
280 dispatch_sync (mainQueue, ^{
284 ret = [self openScreenInput];
286 ret = [self openDeviceInput];
291 output = [[AVCaptureVideoDataOutput alloc] init];
292 [output setSampleBufferDelegate:self
294 output.alwaysDiscardsLateVideoFrames = YES;
295 output.videoSettings = nil; /* device native format */
297 session = [[AVCaptureSession alloc] init];
298 [session addInput:input];
299 [session addOutput:output];
304 GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
311 GST_DEBUG_OBJECT (element, "Closing device");
313 dispatch_sync (mainQueue, ^{
314 g_assert (![session isRunning]);
316 [session removeInput:input];
317 [session removeOutput:output];
328 if (!captureScreen) {
334 gst_caps_unref (caps);
340 #define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
341 (gst_caps_new_simple ("video/x-raw", \
342 "width", G_TYPE_INT, w, \
343 "height", G_TYPE_INT, h, \
344 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
345 "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
348 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
350 GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
352 switch ([pixel_format integerValue]) {
353 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
354 gst_format = GST_VIDEO_FORMAT_NV12;
356 case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
357 gst_format = GST_VIDEO_FORMAT_UYVY;
359 case kCVPixelFormatType_32BGRA: /* BGRA */
360 gst_format = GST_VIDEO_FORMAT_BGRA;
362 case kCVPixelFormatType_32RGBA: /* RGBA */
363 gst_format = GST_VIDEO_FORMAT_RGBA;
365 case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
366 gst_format = GST_VIDEO_FORMAT_YUY2;
369 GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
370 [[pixel_format stringValue] UTF8String]);
378 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex
380 NSDictionary *description;
382 NSArray *screens = [NSScreen screens];
384 if (deviceIndex == DEFAULT_DEVICE_INDEX)
385 return kCGDirectMainDisplay;
386 if (deviceIndex >= [screens count]) {
387 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
388 ("Invalid screen capture device index"), (NULL));
391 description = [[screens objectAtIndex:deviceIndex] deviceDescription];
392 displayId = [description objectForKey:@"NSScreenNumber"];
393 return [displayId unsignedIntegerValue];
397 - (BOOL)getDeviceCaps:(GstCaps *)result
399 NSArray *formats = [device valueForKey:@"formats"];
400 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
402 GST_DEBUG_OBJECT (element, "Getting device caps");
404 /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
405 * available in iOS >= 7.0. We use a dynamic approach with key-value
406 * coding or performSelector */
407 for (NSObject *f in [formats reverseObjectEnumerator]) {
408 CMFormatDescriptionRef formatDescription;
409 CMVideoDimensions dimensions;
411 /* formatDescription can't be retrieved with valueForKey so use a selector here */
412 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
413 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
414 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
418 [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
419 gst_util_double_to_fraction (max_fps, &fps_n, &fps_d);
421 for (NSNumber *pixel_format in pixel_formats) {
422 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
423 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
424 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, fps_n, fps_d));
426 if (gst_format == GST_VIDEO_FORMAT_BGRA) {
427 GstCaps *rgba_caps = GST_AVF_CAPS_NEW (GST_VIDEO_FORMAT_RGBA, dimensions.width, dimensions.height, fps_n, fps_d);
428 gst_caps_set_features (rgba_caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
429 gst_caps_append (result, rgba_caps);
434 GST_LOG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result);
438 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
441 gboolean found_format = FALSE, found_framerate = FALSE;
442 NSArray *formats = [device valueForKey:@"formats"];
443 gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
445 GST_DEBUG_OBJECT (element, "Setting device caps");
447 if ([device lockForConfiguration:NULL] == YES) {
448 for (NSObject *f in formats) {
449 CMFormatDescriptionRef formatDescription;
450 CMVideoDimensions dimensions;
452 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
453 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
454 if (dimensions.width == info->width && dimensions.height == info->height) {
456 [device setValue:f forKey:@"activeFormat"];
457 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
458 gdouble max_frame_rate;
460 [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
461 if (abs (framerate - max_frame_rate) < 0.00001) {
462 NSValue *min_frame_duration, *max_frame_duration;
464 found_framerate = TRUE;
465 min_frame_duration = [rate valueForKey:@"minFrameDuration"];
466 max_frame_duration = [rate valueForKey:@"maxFrameDuration"];
467 [device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
469 /* Only available on OSX >= 10.8 and iOS >= 7.0 */
470 // Restrict activeVideoMaxFrameDuration to the minimum value so we get a better capture frame rate
471 [device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
472 } @catch (NSException *exception) {
473 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
474 GST_WARNING ("An unexcepted error occured: %s",
475 [exception.reason UTF8String]);
484 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
487 if (!found_framerate) {
488 GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
492 GST_WARNING ("Couldn't lock device for configuration");
498 - (BOOL)getSessionPresetCaps:(GstCaps *)result
500 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
501 for (NSNumber *pixel_format in pixel_formats) {
502 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
503 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
507 if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
508 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
510 if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
511 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
512 if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
513 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
514 if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
515 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
516 if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
517 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
518 if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
519 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
522 GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
527 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
529 GST_DEBUG_OBJECT (element, "Setting session presset caps");
531 if ([device lockForConfiguration:NULL] != YES) {
532 GST_WARNING ("Couldn't lock device for configuration");
536 switch (info->width) {
538 session.sessionPreset = AVCaptureSessionPresetLow;
541 session.sessionPreset = AVCaptureSessionPreset352x288;
544 session.sessionPreset = AVCaptureSessionPresetMedium;
547 session.sessionPreset = AVCaptureSessionPreset640x480;
550 session.sessionPreset = AVCaptureSessionPreset1280x720;
554 session.sessionPreset = AVCaptureSessionPreset1920x1080;
558 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
567 NSArray *pixel_formats;
570 return NULL; /* BaseSrc will return template caps */
572 result = gst_caps_new_empty ();
573 pixel_formats = output.availableVideoCVPixelFormatTypes;
577 CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
578 for (NSNumber *pixel_format in pixel_formats) {
579 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
580 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
581 gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
582 "width", G_TYPE_INT, (int)rect.size.width,
583 "height", G_TYPE_INT, (int)rect.size.height,
584 "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
588 GST_WARNING ("Screen capture is not supported by iOS");
595 [self getDeviceCaps:result];
597 } @catch (NSException *exception) {
599 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
600 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
604 /* Fallback on session presets API for iOS < 7.0 */
605 [self getSessionPresetCaps:result];
611 - (BOOL)setCaps:(GstCaps *)new_caps
614 BOOL success = YES, *successPtr = &success;
616 gst_video_info_init (&info);
617 gst_video_info_from_caps (&info, new_caps);
620 height = info.height;
621 format = info.finfo->format;
622 latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
624 dispatch_sync (mainQueue, ^{
627 g_assert (![session isRunning]);
631 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
632 screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
634 GST_WARNING ("Screen capture is not supported by iOS");
641 /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
642 *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
643 if (*successPtr != YES)
646 } @catch (NSException *exception) {
648 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
649 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
654 /* Fallback on session presets API for iOS < 7.0 */
655 *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
656 if (*successPtr != YES)
662 case GST_VIDEO_FORMAT_NV12:
663 newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
665 case GST_VIDEO_FORMAT_UYVY:
666 newformat = kCVPixelFormatType_422YpCbCr8;
668 case GST_VIDEO_FORMAT_YUY2:
669 newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
671 case GST_VIDEO_FORMAT_RGBA:
672 /* In order to do RGBA, we negotiate BGRA (since RGBA is not supported
673 * if not in textures) and then we get RGBA textures via
674 * CVOpenGL*TextureCacheCreateTextureFromImage. Computers. */
675 case GST_VIDEO_FORMAT_BGRA:
676 newformat = kCVPixelFormatType_32BGRA;
680 GST_WARNING ("Unsupported output format %s",
681 gst_video_format_to_string (format));
685 GST_DEBUG_OBJECT(element,
686 "Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
688 GST_FOURCC_ARGS (gst_video_format_to_fourcc (format)));
690 output.videoSettings = [NSDictionary
691 dictionaryWithObject:[NSNumber numberWithInt:newformat]
692 forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
695 gst_caps_unref (caps);
696 caps = gst_caps_copy (new_caps);
697 [session startRunning];
699 /* Unlock device configuration only after session is started so the session
700 * won't reset the capture formats */
701 [device unlockForConfiguration];
709 bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
710 bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
714 latency = GST_CLOCK_TIME_NONE;
715 startAVFTimestamp = GST_CLOCK_TIME_NONE;
716 startTimestamp = GST_CLOCK_TIME_NONE;
719 lastSampling = GST_CLOCK_TIME_NONE;
728 dispatch_sync (mainQueue, ^{ [session stopRunning]; });
729 dispatch_sync (workerQueue, ^{});
731 [bufQueueLock release];
738 gst_core_video_texture_cache_free (textureCache);
744 - (BOOL)query:(GstQuery *)query
748 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
749 if (device != nil && caps != NULL) {
750 GstClockTime min_latency, max_latency;
752 min_latency = max_latency = latency;
755 GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
756 " max %" GST_TIME_FORMAT,
757 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
758 gst_query_set_latency (query, TRUE, min_latency, max_latency);
761 result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
767 - (BOOL)decideAllocation:(GstQuery *)query
769 useVideoMeta = gst_query_find_allocation_meta (query,
770 GST_VIDEO_META_API_TYPE, NULL);
773 if (gst_query_find_allocation_meta (query,
774 GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
775 GstGLContext *context;
776 const GstStructure *upload_meta_params;
778 gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
779 if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
780 GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
782 GstCapsFeatures *features;
784 gst_query_parse_allocation (query, &query_caps, NULL);
785 features = gst_caps_get_features (query_caps, 0);
786 if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
787 textureCache = gst_core_video_texture_cache_new (context);
788 gst_core_video_texture_cache_set_format (textureCache,
791 gst_object_unref (context);
802 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
811 [bufQueueLock unlock];
816 - (GstStateChangeReturn)changeState:(GstStateChange)transition
818 GstStateChangeReturn ret;
820 if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
821 if (![self openDevice])
822 return GST_STATE_CHANGE_FAILURE;
825 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
827 if (transition == GST_STATE_CHANGE_READY_TO_NULL)
833 - (void)captureOutput:(AVCaptureOutput *)captureOutput
834 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
835 fromConnection:(AVCaptureConnection *)connection
837 GstClockTime timestamp, duration;
842 [bufQueueLock unlock];
846 if (inputClock == nil)
847 inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
848 [self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
850 if ([bufQueue count] == BUFFER_QUEUE_SIZE)
851 [bufQueue removeLastObject];
853 [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
854 @"timestamp": @(timestamp),
855 @"duration": @(duration)}
858 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
861 - (GstFlowReturn)create:(GstBuffer **)buf
863 CMSampleBufferRef sbuf;
864 CVImageBufferRef image_buf;
865 CVPixelBufferRef pixel_buf;
866 size_t cur_width, cur_height;
867 GstClockTime timestamp, duration;
869 [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
871 [bufQueueLock unlock];
872 return GST_FLOW_FLUSHING;
875 NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
876 sbuf = (CMSampleBufferRef) dic[@"sbuf"];
877 timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
878 duration = (GstClockTime) [dic[@"duration"] longLongValue];
880 [bufQueue removeLastObject];
881 [bufQueueLock unlockWithCondition:
882 ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
884 /* Check output frame size dimensions */
885 image_buf = CMSampleBufferGetImageBuffer (sbuf);
887 pixel_buf = (CVPixelBufferRef) image_buf;
888 cur_width = CVPixelBufferGetWidth (pixel_buf);
889 cur_height = CVPixelBufferGetHeight (pixel_buf);
891 if (width != cur_width || height != cur_height) {
892 /* Set new caps according to current frame dimensions */
893 GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
894 width, height, (int)cur_width, (int)cur_height);
897 gst_caps_set_simple (caps,
898 "width", G_TYPE_INT, width,
899 "height", G_TYPE_INT, height,
901 gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
905 *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL);
906 if (format == GST_VIDEO_FORMAT_RGBA) {
907 /* So now buf contains BGRA data (!) . Since downstream is actually going to
908 * use the GL upload meta to get RGBA textures (??), we need to override the
909 * VideoMeta format (!!!). Yes this is confusing, see setCaps: */
910 GstVideoMeta *video_meta = gst_buffer_get_video_meta (*buf);
912 video_meta->format = format;
917 if (textureCache != NULL)
918 *buf = gst_core_video_texture_cache_get_gl_buffer (textureCache, *buf);
920 GST_BUFFER_OFFSET (*buf) = offset++;
921 GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (buf) + 1;
922 GST_BUFFER_TIMESTAMP (*buf) = timestamp;
923 GST_BUFFER_DURATION (*buf) = duration;
926 [self updateStatistics];
931 - (void)getSampleBuffer:(CMSampleBufferRef)sbuf
932 timestamp:(GstClockTime *)outTimestamp
933 duration:(GstClockTime *)outDuration
935 CMSampleTimingInfo time_info;
936 GstClockTime timestamp, duration, inputClockNow, running_time;
937 CMItemCount num_timings;
941 timestamp = GST_CLOCK_TIME_NONE;
942 duration = GST_CLOCK_TIME_NONE;
943 if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
944 timestamp = gst_util_uint64_scale (GST_SECOND,
945 time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
947 if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
948 duration = gst_util_uint64_scale (GST_SECOND,
949 time_info.duration.value, time_info.duration.timescale);
951 now = CMClockGetTime(inputClock);
952 inputClockNow = gst_util_uint64_scale (GST_SECOND,
953 now.value, now.timescale);
955 GST_OBJECT_LOCK (element);
956 clock = GST_ELEMENT_CLOCK (element);
957 running_time = gst_clock_get_time (clock) - element->base_time;
958 timestamp = running_time + (inputClockNow - timestamp);
959 GST_OBJECT_UNLOCK (element);
962 *outTimestamp = timestamp;
963 *outDuration = duration;
966 - (void)updateStatistics
970 GST_OBJECT_LOCK (element);
971 clock = GST_ELEMENT_CLOCK (element);
973 gst_object_ref (clock);
974 GST_OBJECT_UNLOCK (element);
977 GstClockTime now = gst_clock_get_time (clock);
978 gst_object_unref (clock);
982 if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
983 if (now - lastSampling >= GST_SECOND) {
984 GST_OBJECT_LOCK (element);
986 GST_OBJECT_UNLOCK (element);
988 g_object_notify (G_OBJECT (element), "fps");
1012 PROP_CAPTURE_SCREEN,
1013 PROP_CAPTURE_SCREEN_CURSOR,
1014 PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1019 static void gst_avf_video_src_finalize (GObject * obj);
1020 static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1021 GValue * value, GParamSpec * pspec);
1022 static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1023 const GValue * value, GParamSpec * pspec);
1024 static GstStateChangeReturn gst_avf_video_src_change_state (
1025 GstElement * element, GstStateChange transition);
1026 static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1028 static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1030 static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1031 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1032 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1034 static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc,
1036 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1037 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1038 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1040 static gboolean gst_avf_video_src_negotiate (GstBaseSrc * basesrc);
1044 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1046 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1047 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1048 GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1049 GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1051 gobject_class->finalize = gst_avf_video_src_finalize;
1052 gobject_class->get_property = gst_avf_video_src_get_property;
1053 gobject_class->set_property = gst_avf_video_src_set_property;
1055 gstelement_class->change_state = gst_avf_video_src_change_state;
1057 gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1058 gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1059 gstbasesrc_class->start = gst_avf_video_src_start;
1060 gstbasesrc_class->stop = gst_avf_video_src_stop;
1061 gstbasesrc_class->query = gst_avf_video_src_query;
1062 gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1063 gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1064 gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
1065 gstbasesrc_class->negotiate = gst_avf_video_src_negotiate;
1067 gstpushsrc_class->create = gst_avf_video_src_create;
1069 gst_element_class_set_metadata (gstelement_class,
1070 "Video Source (AVFoundation)", "Source/Video",
1071 "Reads frames from an iOS AVFoundation device",
1072 "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1074 gst_element_class_add_pad_template (gstelement_class,
1075 gst_static_pad_template_get (&src_template));
1077 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1078 g_param_spec_int ("device-index", "Device Index",
1079 "The zero-based device index",
1080 -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1081 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1082 g_object_class_install_property (gobject_class, PROP_DO_STATS,
1083 g_param_spec_boolean ("do-stats", "Enable statistics",
1084 "Enable logging of statistics", DEFAULT_DO_STATS,
1085 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1086 g_object_class_install_property (gobject_class, PROP_FPS,
1087 g_param_spec_int ("fps", "Frames per second",
1088 "Last measured framerate, if statistics are enabled",
1089 -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1091 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1092 g_param_spec_boolean ("capture-screen", "Enable screen capture",
1093 "Enable screen capture functionality", FALSE,
1094 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1095 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1096 g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1097 "Enable cursor capture while capturing screen", FALSE,
1098 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1099 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1100 g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1101 "Enable mouse clicks capture while capturing screen", FALSE,
1102 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1105 GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1106 0, "iOS AVFoundation video source");
1109 #define OBJC_CALLOUT_BEGIN() \
1110 NSAutoreleasePool *pool; \
1112 pool = [[NSAutoreleasePool alloc] init]
1113 #define OBJC_CALLOUT_END() \
1118 gst_avf_video_src_init (GstAVFVideoSrc * src)
1120 OBJC_CALLOUT_BEGIN ();
1121 src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1122 OBJC_CALLOUT_END ();
1126 gst_avf_video_src_finalize (GObject * obj)
1128 OBJC_CALLOUT_BEGIN ();
1129 [GST_AVF_VIDEO_SRC_IMPL (obj) release];
1130 OBJC_CALLOUT_END ();
1132 G_OBJECT_CLASS (parent_class)->finalize (obj);
1136 gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1139 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1143 case PROP_CAPTURE_SCREEN:
1144 g_value_set_boolean (value, impl.captureScreen);
1146 case PROP_CAPTURE_SCREEN_CURSOR:
1147 g_value_set_boolean (value, impl.captureScreenCursor);
1149 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1150 g_value_set_boolean (value, impl.captureScreenMouseClicks);
1153 case PROP_DEVICE_INDEX:
1154 g_value_set_int (value, impl.deviceIndex);
1157 g_value_set_boolean (value, impl.doStats);
1160 GST_OBJECT_LOCK (object);
1161 g_value_set_int (value, impl.fps);
1162 GST_OBJECT_UNLOCK (object);
1165 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1171 gst_avf_video_src_set_property (GObject * object, guint prop_id,
1172 const GValue * value, GParamSpec * pspec)
1174 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1178 case PROP_CAPTURE_SCREEN:
1179 impl.captureScreen = g_value_get_boolean (value);
1181 case PROP_CAPTURE_SCREEN_CURSOR:
1182 impl.captureScreenCursor = g_value_get_boolean (value);
1184 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1185 impl.captureScreenMouseClicks = g_value_get_boolean (value);
1188 case PROP_DEVICE_INDEX:
1189 impl.deviceIndex = g_value_get_int (value);
1192 impl.doStats = g_value_get_boolean (value);
1195 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1200 static GstStateChangeReturn
1201 gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1203 GstStateChangeReturn ret;
1205 OBJC_CALLOUT_BEGIN ();
1206 ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1207 OBJC_CALLOUT_END ();
1213 gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1217 OBJC_CALLOUT_BEGIN ();
1218 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1219 OBJC_CALLOUT_END ();
1225 gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1229 OBJC_CALLOUT_BEGIN ();
1230 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1231 OBJC_CALLOUT_END ();
1237 gst_avf_video_src_start (GstBaseSrc * basesrc)
1241 OBJC_CALLOUT_BEGIN ();
1242 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1243 OBJC_CALLOUT_END ();
1249 gst_avf_video_src_stop (GstBaseSrc * basesrc)
1253 OBJC_CALLOUT_BEGIN ();
1254 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1255 OBJC_CALLOUT_END ();
1261 gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1265 OBJC_CALLOUT_BEGIN ();
1266 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1267 OBJC_CALLOUT_END ();
1273 gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc, GstQuery * query)
1277 OBJC_CALLOUT_BEGIN ();
1278 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) decideAllocation:query];
1279 OBJC_CALLOUT_END ();
1285 gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1289 OBJC_CALLOUT_BEGIN ();
1290 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1291 OBJC_CALLOUT_END ();
1297 gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1301 OBJC_CALLOUT_BEGIN ();
1302 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1303 OBJC_CALLOUT_END ();
1308 static GstFlowReturn
1309 gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1313 OBJC_CALLOUT_BEGIN ();
1314 ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1315 OBJC_CALLOUT_END ();
1321 gst_avf_video_src_negotiate (GstBaseSrc * basesrc)
1323 /* FIXME: We don't support reconfiguration yet */
1324 if (gst_pad_has_current_caps (GST_BASE_SRC_PAD (basesrc)))
1327 return GST_BASE_SRC_CLASS (parent_class)->negotiate (basesrc);