2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "avfvideosrc.h"
26 #import <AVFoundation/AVFoundation.h>
27 #include <gst/video/video.h>
28 #include "coremediabuffer.h"
30 #define DEFAULT_DEVICE_INDEX -1
31 #define DEFAULT_DO_STATS FALSE
33 #define DEVICE_FPS_N 25
34 #define DEVICE_FPS_D 1
36 #define BUFFER_QUEUE_SIZE 2
38 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
39 #define GST_CAT_DEFAULT gst_avf_video_src_debug
41 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
44 GST_STATIC_CAPS ("video/x-raw, "
45 "format = (string) { NV12, UYVY, YUY2 }, "
46 "framerate = " GST_VIDEO_FPS_RANGE ", "
47 "width = " GST_VIDEO_SIZE_RANGE ", "
48 "height = " GST_VIDEO_SIZE_RANGE "; "
51 "format = (string) BGRA, "
52 "framerate = " GST_VIDEO_FPS_RANGE ", "
53 "width = " GST_VIDEO_SIZE_RANGE ", "
54 "height = " GST_VIDEO_SIZE_RANGE "; "
57 typedef enum _QueueState {
59 HAS_BUFFER_OR_STOP_REQUEST,
62 #define gst_avf_video_src_parent_class parent_class
63 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
65 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
73 CGDirectDisplayID displayId;
76 AVCaptureSession *session;
77 AVCaptureInput *input;
78 AVCaptureVideoDataOutput *output;
79 AVCaptureDevice *device;
81 dispatch_queue_t mainQueue;
82 dispatch_queue_t workerQueue;
83 NSConditionLock *bufQueueLock;
84 NSMutableArray *bufQueue;
88 GstVideoFormat format;
90 GstClockTime duration;
93 GstClockTime lastSampling;
97 BOOL captureScreenCursor;
98 BOOL captureScreenMouseClicks;
104 - (id)initWithSrc:(GstPushSrc *)src;
107 @property int deviceIndex;
108 @property BOOL doStats;
110 @property BOOL captureScreen;
111 @property BOOL captureScreenCursor;
112 @property BOOL captureScreenMouseClicks;
114 - (BOOL)openScreenInput;
115 - (BOOL)openDeviceInput;
118 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
119 - (BOOL)getDeviceCaps:(GstCaps *)result;
120 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
121 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
122 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
123 - (GstCaps *)getCaps;
124 - (BOOL)setCaps:(GstCaps *)new_caps;
129 - (BOOL)query:(GstQuery *)query;
130 - (GstStateChangeReturn)changeState:(GstStateChange)transition;
131 - (GstFlowReturn)create:(GstBuffer **)buf;
132 - (void)timestampBuffer:(GstBuffer *)buf;
133 - (void)updateStatistics;
134 - (void)captureOutput:(AVCaptureOutput *)captureOutput
135 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
136 fromConnection:(AVCaptureConnection *)connection;
140 @implementation GstAVFVideoSrcImpl
142 @synthesize deviceIndex, doStats, fps, captureScreen,
143 captureScreenCursor, captureScreenMouseClicks;
147 return [self initWithSrc:NULL];
150 - (id)initWithSrc:(GstPushSrc *)src
152 if ((self = [super init])) {
153 element = GST_ELEMENT_CAST (src);
154 baseSrc = GST_BASE_SRC_CAST (src);
157 deviceIndex = DEFAULT_DEVICE_INDEX;
159 captureScreenCursor = NO;
160 captureScreenMouseClicks = NO;
163 displayId = kCGDirectMainDisplay;
167 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
169 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
171 gst_base_src_set_live (baseSrc, TRUE);
172 gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
180 dispatch_release (mainQueue);
182 dispatch_release (workerQueue);
188 - (BOOL)openDeviceInput
190 NSString *mediaType = AVMediaTypeVideo;
193 if (deviceIndex == -1) {
194 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
196 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
197 ("No video capture devices found"), (NULL));
201 NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
202 if (deviceIndex >= [devices count]) {
203 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
204 ("Invalid video capture device index"), (NULL));
207 device = [devices objectAtIndex:deviceIndex];
209 g_assert (device != nil);
212 GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
214 input = [AVCaptureDeviceInput deviceInputWithDevice:device
217 GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
218 ("Failed to open device: %s",
219 [[err localizedDescription] UTF8String]),
229 - (BOOL)openScreenInput
234 GST_DEBUG_OBJECT (element, "Opening screen input");
236 AVCaptureScreenInput *screenInput =
237 [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
241 [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
242 forKey:@"capturesCursor"];
244 } @catch (NSException *exception) {
245 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
246 GST_WARNING ("An unexpected error occured: %s",
247 [[exception reason] UTF8String]);
249 GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
251 screenInput.capturesMouseClicks = captureScreenMouseClicks;
260 BOOL success = NO, *successPtr = &success;
262 GST_DEBUG_OBJECT (element, "Opening device");
264 dispatch_sync (mainQueue, ^{
268 ret = [self openScreenInput];
270 ret = [self openDeviceInput];
275 output = [[AVCaptureVideoDataOutput alloc] init];
276 [output setSampleBufferDelegate:self
278 output.alwaysDiscardsLateVideoFrames = YES;
279 output.videoSettings = nil; /* device native format */
281 session = [[AVCaptureSession alloc] init];
282 [session addInput:input];
283 [session addOutput:output];
288 GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
295 GST_DEBUG_OBJECT (element, "Closing device");
297 dispatch_sync (mainQueue, ^{
298 g_assert (![session isRunning]);
300 [session removeInput:input];
301 [session removeOutput:output];
312 if (!captureScreen) {
318 gst_caps_unref (caps);
322 #define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
323 (gst_caps_new_simple ("video/x-raw", \
324 "width", G_TYPE_INT, w, \
325 "height", G_TYPE_INT, h, \
326 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
327 "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
330 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
332 GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
334 switch ([pixel_format integerValue]) {
335 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
336 gst_format = GST_VIDEO_FORMAT_NV12;
338 case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
339 gst_format = GST_VIDEO_FORMAT_UYVY;
341 case kCVPixelFormatType_32BGRA: /* BGRA */
342 gst_format = GST_VIDEO_FORMAT_BGRA;
344 case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
345 gst_format = GST_VIDEO_FORMAT_YUY2;
348 GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
349 [[pixel_format stringValue] UTF8String]);
356 - (BOOL)getDeviceCaps:(GstCaps *)result
358 NSArray *formats = [device valueForKey:@"formats"];
359 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
361 GST_DEBUG_OBJECT (element, "Getting device caps");
363 /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
364 * available in iOS >= 7.0. We use a dynamic approach with key-value
365 * coding or performSelector */
366 for (NSObject *f in [formats reverseObjectEnumerator]) {
367 CMFormatDescriptionRef formatDescription;
368 CMVideoDimensions dimensions;
370 /* formatDescription can't be retrieved with valueForKey so use a selector here */
371 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
372 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
373 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
377 [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
378 gst_util_double_to_fraction (max_fps, &fps_n, &fps_d);
380 for (NSNumber *pixel_format in pixel_formats) {
381 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
382 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
383 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, fps_n, fps_d));
387 GST_LOG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result);
391 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
394 gboolean found_format = FALSE, found_framerate = FALSE;
395 NSArray *formats = [device valueForKey:@"formats"];
396 gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
398 GST_DEBUG_OBJECT (element, "Setting device caps");
400 if ([device lockForConfiguration:NULL] == YES) {
401 for (NSObject *f in formats) {
402 CMFormatDescriptionRef formatDescription;
403 CMVideoDimensions dimensions;
405 formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
406 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
407 if (dimensions.width == info->width && dimensions.height == info->height) {
409 [device setValue:f forKey:@"activeFormat"];
410 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
411 gdouble max_frame_rate;
413 [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
414 if (abs (framerate - max_frame_rate) < 0.00001) {
415 NSValue *min_frame_duration, *max_frame_duration;
417 found_framerate = TRUE;
418 min_frame_duration = [rate valueForKey:@"minFrameDuration"];
419 max_frame_duration = [rate valueForKey:@"maxFrameDuration"];
420 [device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
422 /* Only available on OSX >= 10.8 and iOS >= 7.0 */
423 [device setValue:max_frame_duration forKey:@"activeVideoMaxFrameDuration"];
424 } @catch (NSException *exception) {
425 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
426 GST_WARNING ("An unexcepted error occured: %s",
427 [exception.reason UTF8String]);
436 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
439 if (!found_framerate) {
440 GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
444 GST_WARNING ("Couldn't lock device for configuration");
450 - (BOOL)getSessionPresetCaps:(GstCaps *)result
452 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
453 for (NSNumber *pixel_format in pixel_formats) {
454 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
455 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
459 if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
460 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
462 if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
463 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
464 if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
465 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
466 if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
467 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
468 if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
469 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
470 if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
471 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
474 GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
479 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
481 GST_DEBUG_OBJECT (element, "Setting session presset caps");
483 if ([device lockForConfiguration:NULL] != YES) {
484 GST_WARNING ("Couldn't lock device for configuration");
488 switch (info->width) {
490 session.sessionPreset = AVCaptureSessionPresetLow;
493 session.sessionPreset = AVCaptureSessionPreset352x288;
496 session.sessionPreset = AVCaptureSessionPresetMedium;
499 session.sessionPreset = AVCaptureSessionPreset640x480;
502 session.sessionPreset = AVCaptureSessionPreset1280x720;
506 session.sessionPreset = AVCaptureSessionPreset1920x1080;
510 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
519 NSArray *pixel_formats;
522 return NULL; /* BaseSrc will return template caps */
524 result = gst_caps_new_empty ();
525 pixel_formats = output.availableVideoCVPixelFormatTypes;
529 CGRect rect = CGDisplayBounds (displayId);
530 for (NSNumber *pixel_format in pixel_formats) {
531 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
532 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
533 gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
534 "width", G_TYPE_INT, (int)rect.size.width,
535 "height", G_TYPE_INT, (int)rect.size.height,
536 "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
540 GST_WARNING ("Screen capture is not supported by iOS");
547 [self getDeviceCaps:result];
549 } @catch (NSException *exception) {
551 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
552 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
556 /* Fallback on session presets API for iOS < 7.0 */
557 [self getSessionPresetCaps:result];
563 - (BOOL)setCaps:(GstCaps *)new_caps
566 BOOL success = YES, *successPtr = &success;
568 gst_video_info_init (&info);
569 gst_video_info_from_caps (&info, new_caps);
572 height = info.height;
573 format = info.finfo->format;
575 dispatch_sync (mainQueue, ^{
578 g_assert (![session isRunning]);
582 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
583 screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
585 GST_WARNING ("Screen capture is not supported by iOS");
592 /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
593 *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
594 if (*successPtr != YES)
597 } @catch (NSException *exception) {
599 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
600 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
605 /* Fallback on session presets API for iOS < 7.0 */
606 *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
607 if (*successPtr != YES)
613 case GST_VIDEO_FORMAT_NV12:
614 newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
616 case GST_VIDEO_FORMAT_UYVY:
617 newformat = kCVPixelFormatType_422YpCbCr8;
619 case GST_VIDEO_FORMAT_YUY2:
620 newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
622 case GST_VIDEO_FORMAT_BGRA:
623 newformat = kCVPixelFormatType_32BGRA;
627 GST_WARNING ("Unsupported output format %s",
628 gst_video_format_to_string (format));
632 GST_DEBUG_OBJECT(element,
633 "Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
635 GST_FOURCC_ARGS (gst_video_format_to_fourcc (format)));
637 output.videoSettings = [NSDictionary
638 dictionaryWithObject:[NSNumber numberWithInt:newformat]
639 forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
641 caps = gst_caps_copy (new_caps);
642 [session startRunning];
644 /* Unlock device configuration only after session is started so the session
645 * won't reset the capture formats */
646 [device unlockForConfiguration];
654 bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
655 bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
658 duration = gst_util_uint64_scale (GST_SECOND, DEVICE_FPS_D, DEVICE_FPS_N);
661 lastSampling = GST_CLOCK_TIME_NONE;
670 dispatch_sync (mainQueue, ^{ [session stopRunning]; });
671 dispatch_sync (workerQueue, ^{});
673 [bufQueueLock release];
681 - (BOOL)query:(GstQuery *)query
685 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
687 GstClockTime min_latency, max_latency;
689 min_latency = max_latency = duration; /* for now */
692 GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
693 " max %" GST_TIME_FORMAT,
694 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
695 gst_query_set_latency (query, TRUE, min_latency, max_latency);
698 result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
704 - (BOOL)decideAllocation:(GstQuery *)query
706 useVideoMeta = gst_query_find_allocation_meta (query,
707 GST_VIDEO_META_API_TYPE, NULL);
716 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
725 [bufQueueLock unlock];
730 - (GstStateChangeReturn)changeState:(GstStateChange)transition
732 GstStateChangeReturn ret;
734 if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
735 if (![self openDevice])
736 return GST_STATE_CHANGE_FAILURE;
739 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
741 if (transition == GST_STATE_CHANGE_READY_TO_NULL)
747 - (void)captureOutput:(AVCaptureOutput *)captureOutput
748 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
749 fromConnection:(AVCaptureConnection *)connection
754 [bufQueueLock unlock];
758 if ([bufQueue count] == BUFFER_QUEUE_SIZE)
759 [bufQueue removeLastObject];
761 [bufQueue insertObject:(id)sampleBuffer
764 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
767 - (GstFlowReturn)create:(GstBuffer **)buf
769 CMSampleBufferRef sbuf;
770 CVImageBufferRef image_buf;
771 CVPixelBufferRef pixel_buf;
772 size_t cur_width, cur_height;
774 [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
776 [bufQueueLock unlock];
777 return GST_FLOW_FLUSHING;
780 sbuf = (CMSampleBufferRef) [bufQueue lastObject];
782 [bufQueue removeLastObject];
783 [bufQueueLock unlockWithCondition:
784 ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
786 /* Check output frame size dimensions */
787 image_buf = CMSampleBufferGetImageBuffer (sbuf);
789 pixel_buf = (CVPixelBufferRef) image_buf;
790 cur_width = CVPixelBufferGetWidth (pixel_buf);
791 cur_height = CVPixelBufferGetHeight (pixel_buf);
793 if (width != cur_width || height != cur_height) {
794 /* Set new caps according to current frame dimensions */
795 GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
796 width, height, (int)cur_width, (int)cur_height);
799 gst_caps_set_simple (caps,
800 "width", G_TYPE_INT, width,
801 "height", G_TYPE_INT, height,
803 gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
807 *buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
810 [self timestampBuffer:*buf];
813 [self updateStatistics];
818 - (void)timestampBuffer:(GstBuffer *)buf
821 GstClockTime timestamp;
823 GST_OBJECT_LOCK (element);
824 clock = GST_ELEMENT_CLOCK (element);
826 gst_object_ref (clock);
827 timestamp = element->base_time;
829 timestamp = GST_CLOCK_TIME_NONE;
831 GST_OBJECT_UNLOCK (element);
834 timestamp = gst_clock_get_time (clock) - timestamp;
835 if (timestamp > duration)
836 timestamp -= duration;
840 gst_object_unref (clock);
844 GST_BUFFER_OFFSET (buf) = offset++;
845 GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
846 GST_BUFFER_TIMESTAMP (buf) = timestamp;
847 GST_BUFFER_DURATION (buf) = duration;
850 - (void)updateStatistics
854 GST_OBJECT_LOCK (element);
855 clock = GST_ELEMENT_CLOCK (element);
857 gst_object_ref (clock);
858 GST_OBJECT_UNLOCK (element);
861 GstClockTime now = gst_clock_get_time (clock);
862 gst_object_unref (clock);
866 if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
867 if (now - lastSampling >= GST_SECOND) {
868 GST_OBJECT_LOCK (element);
870 GST_OBJECT_UNLOCK (element);
872 g_object_notify (G_OBJECT (element), "fps");
897 PROP_CAPTURE_SCREEN_CURSOR,
898 PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
903 static void gst_avf_video_src_finalize (GObject * obj);
904 static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
905 GValue * value, GParamSpec * pspec);
906 static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
907 const GValue * value, GParamSpec * pspec);
908 static GstStateChangeReturn gst_avf_video_src_change_state (
909 GstElement * element, GstStateChange transition);
910 static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
912 static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
914 static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
915 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
916 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
918 static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc,
920 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
921 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
922 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
927 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
929 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
930 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
931 GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
932 GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
934 gobject_class->finalize = gst_avf_video_src_finalize;
935 gobject_class->get_property = gst_avf_video_src_get_property;
936 gobject_class->set_property = gst_avf_video_src_set_property;
938 gstelement_class->change_state = gst_avf_video_src_change_state;
940 gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
941 gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
942 gstbasesrc_class->start = gst_avf_video_src_start;
943 gstbasesrc_class->stop = gst_avf_video_src_stop;
944 gstbasesrc_class->query = gst_avf_video_src_query;
945 gstbasesrc_class->unlock = gst_avf_video_src_unlock;
946 gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
947 gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
949 gstpushsrc_class->create = gst_avf_video_src_create;
951 gst_element_class_set_metadata (gstelement_class,
952 "Video Source (AVFoundation)", "Source/Video",
953 "Reads frames from an iOS AVFoundation device",
954 "Ole André Vadla Ravnås <oleavr@soundrop.com>");
956 gst_element_class_add_pad_template (gstelement_class,
957 gst_static_pad_template_get (&src_template));
959 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
960 g_param_spec_int ("device-index", "Device Index",
961 "The zero-based device index",
962 -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
963 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
964 g_object_class_install_property (gobject_class, PROP_DO_STATS,
965 g_param_spec_boolean ("do-stats", "Enable statistics",
966 "Enable logging of statistics", DEFAULT_DO_STATS,
967 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
968 g_object_class_install_property (gobject_class, PROP_FPS,
969 g_param_spec_int ("fps", "Frames per second",
970 "Last measured framerate, if statistics are enabled",
971 -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
973 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
974 g_param_spec_boolean ("capture-screen", "Enable screen capture",
975 "Enable screen capture functionality", FALSE,
976 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
977 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
978 g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
979 "Enable cursor capture while capturing screen", FALSE,
980 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
981 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
982 g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
983 "Enable mouse clicks capture while capturing screen", FALSE,
984 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
987 GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
988 0, "iOS AVFoundation video source");
991 #define OBJC_CALLOUT_BEGIN() \
992 NSAutoreleasePool *pool; \
994 pool = [[NSAutoreleasePool alloc] init]
995 #define OBJC_CALLOUT_END() \
1000 gst_avf_video_src_init (GstAVFVideoSrc * src)
1002 OBJC_CALLOUT_BEGIN ();
1003 src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1004 OBJC_CALLOUT_END ();
1008 gst_avf_video_src_finalize (GObject * obj)
1010 OBJC_CALLOUT_BEGIN ();
1011 [GST_AVF_VIDEO_SRC_IMPL (obj) release];
1012 OBJC_CALLOUT_END ();
1014 G_OBJECT_CLASS (parent_class)->finalize (obj);
1018 gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1021 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1025 case PROP_CAPTURE_SCREEN:
1026 g_value_set_boolean (value, impl.captureScreen);
1028 case PROP_CAPTURE_SCREEN_CURSOR:
1029 g_value_set_boolean (value, impl.captureScreenCursor);
1031 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1032 g_value_set_boolean (value, impl.captureScreenMouseClicks);
1035 case PROP_DEVICE_INDEX:
1036 g_value_set_int (value, impl.deviceIndex);
1039 g_value_set_boolean (value, impl.doStats);
1042 GST_OBJECT_LOCK (object);
1043 g_value_set_int (value, impl.fps);
1044 GST_OBJECT_UNLOCK (object);
1047 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1053 gst_avf_video_src_set_property (GObject * object, guint prop_id,
1054 const GValue * value, GParamSpec * pspec)
1056 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1060 case PROP_CAPTURE_SCREEN:
1061 impl.captureScreen = g_value_get_boolean (value);
1063 case PROP_CAPTURE_SCREEN_CURSOR:
1064 impl.captureScreenCursor = g_value_get_boolean (value);
1066 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1067 impl.captureScreenMouseClicks = g_value_get_boolean (value);
1070 case PROP_DEVICE_INDEX:
1071 impl.deviceIndex = g_value_get_int (value);
1074 impl.doStats = g_value_get_boolean (value);
1077 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1082 static GstStateChangeReturn
1083 gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1085 GstStateChangeReturn ret;
1087 OBJC_CALLOUT_BEGIN ();
1088 ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1089 OBJC_CALLOUT_END ();
1095 gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1099 OBJC_CALLOUT_BEGIN ();
1100 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1101 OBJC_CALLOUT_END ();
1107 gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1111 OBJC_CALLOUT_BEGIN ();
1112 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1113 OBJC_CALLOUT_END ();
1119 gst_avf_video_src_start (GstBaseSrc * basesrc)
1123 OBJC_CALLOUT_BEGIN ();
1124 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1125 OBJC_CALLOUT_END ();
1131 gst_avf_video_src_stop (GstBaseSrc * basesrc)
1135 OBJC_CALLOUT_BEGIN ();
1136 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1137 OBJC_CALLOUT_END ();
1143 gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1147 OBJC_CALLOUT_BEGIN ();
1148 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1149 OBJC_CALLOUT_END ();
1155 gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc, GstQuery * query)
1159 OBJC_CALLOUT_BEGIN ();
1160 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) decideAllocation:query];
1161 OBJC_CALLOUT_END ();
1167 gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1171 OBJC_CALLOUT_BEGIN ();
1172 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1173 OBJC_CALLOUT_END ();
1179 gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1183 OBJC_CALLOUT_BEGIN ();
1184 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1185 OBJC_CALLOUT_END ();
1190 static GstFlowReturn
1191 gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1195 OBJC_CALLOUT_BEGIN ();
1196 ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1197 OBJC_CALLOUT_END ();