2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
3 * Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 #include "avfvideosrc.h"
26 #include "glcontexthelper.h"
28 #import <AVFoundation/AVFoundation.h>
30 #import <AppKit/AppKit.h>
32 #include <gst/video/video.h>
33 #include <gst/gl/gstglcontext.h>
34 #include "coremediabuffer.h"
35 #include "videotexturecache-gl.h"
37 #define DEFAULT_DEVICE_INDEX -1
38 #define DEFAULT_POSITION GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
39 #define DEFAULT_ORIENTATION GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
40 #define DEFAULT_DEVICE_TYPE GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
41 #define DEFAULT_DO_STATS FALSE
43 #define DEVICE_FPS_N 25
44 #define DEVICE_FPS_D 1
46 #define BUFFER_QUEUE_SIZE 2
48 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
49 #define GST_CAT_DEFAULT gst_avf_video_src_debug
51 static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
52 static CMVideoDimensions
53 get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
55 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
60 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
61 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
63 "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
65 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
66 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
68 "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
71 "format = (string) { NV12, UYVY, YUY2 }, "
72 "framerate = " GST_VIDEO_FPS_RANGE ", "
73 "width = " GST_VIDEO_SIZE_RANGE ", "
74 "height = " GST_VIDEO_SIZE_RANGE "; "
77 "format = (string) BGRA, "
78 "framerate = " GST_VIDEO_FPS_RANGE ", "
79 "width = " GST_VIDEO_SIZE_RANGE ", "
80 "height = " GST_VIDEO_SIZE_RANGE "; "
83 typedef enum _QueueState {
85 HAS_BUFFER_OR_STOP_REQUEST,
88 #define gst_avf_video_src_parent_class parent_class
89 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
91 #define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
93 gst_avf_video_source_position_get_type (void)
95 static GType avf_video_source_position_type = 0;
97 if (!avf_video_source_position_type) {
98 static GEnumValue position_types[] = {
99 { GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
100 { GST_AVF_VIDEO_SOURCE_POSITION_BACK, "Back-facing camera", "back" },
101 { GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT, "Default", "default" },
105 avf_video_source_position_type =
106 g_enum_register_static ("GstAVFVideoSourcePosition",
110 return avf_video_source_position_type;
113 #define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
115 gst_avf_video_source_orientation_get_type (void)
117 static GType avf_video_source_orientation_type = 0;
119 if (!avf_video_source_orientation_type) {
120 static GEnumValue orientation_types[] = {
121 { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
122 { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
123 { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
124 { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
125 { GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
129 avf_video_source_orientation_type =
130 g_enum_register_static ("GstAVFVideoSourceOrientation",
134 return avf_video_source_orientation_type;
137 #define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
139 gst_avf_video_source_device_type_get_type (void)
141 static GType avf_video_source_device_type_type = 0;
143 if (!avf_video_source_device_type_type) {
144 static GEnumValue device_type_types[] = {
145 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
146 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
147 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
148 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
152 avf_video_source_device_type_type =
153 g_enum_register_static ("GstAVFVideoSourceDeviceType",
157 return avf_video_source_device_type_type;
160 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
166 const gchar *deviceName;
167 GstAVFVideoSourcePosition position;
168 GstAVFVideoSourceOrientation orientation;
169 GstAVFVideoSourceDeviceType deviceType;
172 AVCaptureSession *session;
173 AVCaptureInput *input;
174 AVCaptureVideoDataOutput *output;
175 AVCaptureDevice *device;
176 AVCaptureConnection *connection;
177 CMClockRef inputClock;
179 NSCondition *permissionCond;
180 BOOL permissionRequestPending;
181 BOOL permissionStopRequest;
183 dispatch_queue_t mainQueue;
184 dispatch_queue_t workerQueue;
185 NSConditionLock *bufQueueLock;
186 NSMutableArray *bufQueue;
190 GstVideoFormat format;
192 GstClockTime latency;
195 GstClockTime lastSampling;
199 BOOL captureScreenCursor;
200 BOOL captureScreenMouseClicks;
207 GstGLContextHelper *ctxh;
208 GstVideoTextureCache *textureCache;
212 - (id)initWithSrc:(GstPushSrc *)src;
215 @property int deviceIndex;
216 @property const gchar *deviceName;
217 @property GstAVFVideoSourcePosition position;
218 @property GstAVFVideoSourceOrientation orientation;
219 @property GstAVFVideoSourceDeviceType deviceType;
220 @property BOOL doStats;
222 @property BOOL captureScreen;
223 @property BOOL captureScreenCursor;
224 @property BOOL captureScreenMouseClicks;
225 @property guint cropX;
226 @property guint cropY;
227 @property guint cropWidth;
228 @property guint cropHeight;
230 - (BOOL)openScreenInput;
231 - (BOOL)openDeviceInput;
234 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
236 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
237 - (float)getScaleFactorFromDeviceIndex;
239 - (GstCaps *)getDeviceCaps;
240 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
241 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
242 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
243 - (GstCaps *)getCaps;
244 - (BOOL)setCaps:(GstCaps *)new_caps;
249 - (BOOL)query:(GstQuery *)query;
250 - (void)setContext:(GstContext *)context;
251 - (GstFlowReturn)create:(GstBuffer **)buf;
252 - (GstCaps *)fixate:(GstCaps *)caps;
253 - (BOOL)decideAllocation:(GstQuery *)query;
254 - (void)updateStatistics;
255 - (void)captureOutput:(AVCaptureOutput *)captureOutput
256 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
257 fromConnection:(AVCaptureConnection *)connection;
263 static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
264 switch (deviceType) {
265 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
266 return AVCaptureDeviceTypeBuiltInWideAngleCamera;
267 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
268 return AVCaptureDeviceTypeBuiltInTelephotoCamera;
269 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
270 return AVCaptureDeviceTypeBuiltInDuoCamera;
271 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
272 g_assert_not_reached();
276 static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
278 case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
279 return AVCaptureDevicePositionFront;
280 case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
281 return AVCaptureDevicePositionBack;
282 case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
283 g_assert_not_reached();
288 static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
289 switch (orientation) {
290 case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
291 return AVCaptureVideoOrientationPortrait;
292 case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
293 return AVCaptureVideoOrientationPortraitUpsideDown;
294 case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
295 return AVCaptureVideoOrientationLandscapeLeft;
296 case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
297 return AVCaptureVideoOrientationLandscapeRight;
298 case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
299 g_assert_not_reached();
305 @implementation GstAVFVideoSrcImpl
307 @synthesize deviceIndex, deviceName, position, orientation, deviceType, doStats,
308 fps, captureScreen, captureScreenCursor, captureScreenMouseClicks, cropX, cropY, cropWidth, cropHeight;
312 return [self initWithSrc:NULL];
315 - (id)initWithSrc:(GstPushSrc *)src
317 if ((self = [super init])) {
318 element = GST_ELEMENT_CAST (src);
319 baseSrc = GST_BASE_SRC_CAST (src);
322 deviceIndex = DEFAULT_DEVICE_INDEX;
324 position = DEFAULT_POSITION;
325 orientation = DEFAULT_ORIENTATION;
326 deviceType = DEFAULT_DEVICE_TYPE;
328 captureScreenCursor = NO;
329 captureScreenMouseClicks = NO;
332 ctxh = gst_gl_context_helper_new (element);
334 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
336 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
338 permissionCond = [[NSCondition alloc] init];
340 gst_base_src_set_live (baseSrc, TRUE);
341 gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
352 permissionCond = nil;
355 - (BOOL)openDeviceInput
357 NSString *mediaType = AVMediaTypeVideo;
360 // Since Mojave, permissions are now supposed to be explicitly granted
361 // before capturing from the camera
362 if (@available(macOS 10.14, *)) {
363 // Check if permission has already been granted (or denied)
364 AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
365 switch (authStatus) {
366 case AVAuthorizationStatusDenied:
367 // The user has explicitly denied permission for media capture.
368 GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
369 ("Device video access permission has been explicitly denied before"), ("Authorization status: %d", (int)authStatus));
371 case AVAuthorizationStatusRestricted:
372 // The user is not allowed to access media capture devices.
373 GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
374 ("Device video access permission cannot be granted by the user"), ("Authorization status: %d", (int)authStatus));
376 case AVAuthorizationStatusAuthorized:
377 // The user has explicitly granted permission for media capture,
378 // or explicit user permission is not necessary for the media type in question.
379 GST_DEBUG_OBJECT (element, "Device video access permission has already been granted");
381 case AVAuthorizationStatusNotDetermined:
382 // Explicit user permission is required for media capture,
383 // but the user has not yet granted or denied such permission.
384 GST_DEBUG_OBJECT (element, "Requesting device video access permission");
386 [permissionCond lock];
387 permissionRequestPending = YES;
388 [permissionCond unlock];
390 [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
391 GST_DEBUG_OBJECT (element, "Device video access permission %s", granted ? "granted" : "not granted");
392 // Check if permission has been granted
394 GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
395 ("Device video access permission has been denied"), ("Authorization status: %d", (int)AVAuthorizationStatusDenied));
397 [permissionCond lock];
398 permissionRequestPending = NO;
399 [permissionCond broadcast];
400 [permissionCond unlock];
406 if (deviceIndex == DEFAULT_DEVICE_INDEX) {
408 if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
409 device = [AVCaptureDevice
410 defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
412 position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
414 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
417 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
420 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
421 ("No video capture devices found"), (NULL));
424 } else { // deviceIndex takes priority over position and deviceType
425 NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
426 if (deviceIndex >= [devices count]) {
427 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
428 ("Invalid video capture device index"), (NULL));
431 device = [devices objectAtIndex:deviceIndex];
433 g_assert (device != nil);
435 deviceName = [[device localizedName] UTF8String];
436 GST_INFO ("Opening '%s'", deviceName);
438 input = [AVCaptureDeviceInput deviceInputWithDevice:device
441 GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
442 ("Failed to open device: %s",
443 [[err localizedDescription] UTF8String]),
451 - (BOOL)openScreenInput
456 CGDirectDisplayID displayId;
457 int screenHeight, screenWidth;
459 GST_DEBUG_OBJECT (element, "Opening screen input");
461 displayId = [self getDisplayIdFromDeviceIndex];
465 AVCaptureScreenInput *screenInput =
466 [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
469 [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
470 forKey:@"capturesCursor"];
472 } @catch (NSException *exception) {
473 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
474 GST_WARNING ("An unexpected error occurred: %s",
475 [[exception reason] UTF8String]);
477 GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
480 screenHeight = CGDisplayPixelsHigh (displayId);
481 screenWidth = CGDisplayPixelsWide (displayId);
483 if (cropX + cropWidth > screenWidth || cropY + cropHeight > screenHeight) {
484 GST_WARNING ("Capture region outside of screen bounds, ignoring");
486 /* If width/height is not specified, assume max possible values */
487 int rectWidth = cropWidth ? cropWidth : (screenWidth - cropX);
488 int rectHeight = cropHeight ? cropHeight : (screenHeight - cropY);
490 /* cropRect (0,0) is bottom left, which feels counterintuitive.
491 * Make cropY relative to the top edge instead */
492 CGRect cropRect = CGRectMake (cropX, screenHeight - cropY - rectHeight,
493 rectWidth, rectHeight);
494 [screenInput setCropRect:cropRect];
497 screenInput.capturesMouseClicks = captureScreenMouseClicks;
505 BOOL success = NO, *successPtr = &success;
507 GST_DEBUG_OBJECT (element, "Opening device");
509 dispatch_sync (mainQueue, ^{
513 ret = [self openScreenInput];
515 ret = [self openDeviceInput];
520 output = [[AVCaptureVideoDataOutput alloc] init];
521 [output setSampleBufferDelegate:self
523 output.alwaysDiscardsLateVideoFrames = YES;
524 output.videoSettings = nil; /* device native format */
526 session = [[AVCaptureSession alloc] init];
527 [session addInput:input];
528 [session addOutput:output];
530 /* retained by session */
531 connection = [[output connections] firstObject];
533 if (orientation != DEFAULT_ORIENTATION)
534 connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
536 inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
540 GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeeded" : "failed");
547 GST_DEBUG_OBJECT (element, "Closing device");
549 dispatch_sync (mainQueue, ^{
550 g_assert (![session isRunning]);
555 [session removeInput:input];
556 [session removeOutput:output];
564 if (!captureScreen) {
569 gst_caps_unref (caps);
574 #define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
575 (gst_caps_new_simple ("video/x-raw", \
576 "width", G_TYPE_INT, w, \
577 "height", G_TYPE_INT, h, \
578 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
579 "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
582 #define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
583 (gst_caps_new_simple ("video/x-raw", \
584 "width", G_TYPE_INT, w, \
585 "height", G_TYPE_INT, h, \
586 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
587 "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
590 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
592 GstVideoFormat gst_format = get_gst_video_format(pixel_format);
593 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
594 GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
595 [[pixel_format stringValue] UTF8String]);
601 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex
603 NSDictionary *description;
605 NSArray *screens = [NSScreen screens];
607 if (deviceIndex == DEFAULT_DEVICE_INDEX)
608 return kCGDirectMainDisplay;
609 if (deviceIndex >= [screens count]) {
610 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
611 ("Invalid screen capture device index"), (NULL));
614 description = [[screens objectAtIndex:deviceIndex] deviceDescription];
615 displayId = [description objectForKey:@"NSScreenNumber"];
616 return [displayId unsignedIntegerValue];
619 - (float)getScaleFactorFromDeviceIndex
621 NSArray *screens = [NSScreen screens];
623 if (deviceIndex == DEFAULT_DEVICE_INDEX)
624 return [[NSScreen mainScreen] backingScaleFactor];
625 if (deviceIndex >= [screens count]) {
626 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
627 ("Invalid screen capture device index"), (NULL));
630 return [[screens objectAtIndex:deviceIndex] backingScaleFactor];
635 - (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
637 return get_oriented_dimensions(orientation, dimensions);
640 - (GstCaps *)getDeviceCaps
642 GST_DEBUG_OBJECT (element, "Getting device caps");
643 GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
644 GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
649 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
652 gboolean found_format = FALSE, found_framerate = FALSE;
653 NSArray *formats = [device valueForKey:@"formats"];
654 gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
656 GST_DEBUG_OBJECT (element, "Setting device caps");
658 if ([device lockForConfiguration:NULL] == YES) {
659 for (NSObject *f in formats) {
660 CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
661 CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
662 dimensions = [self orientedDimensions:dimensions];
663 if (dimensions.width == info->width && dimensions.height == info->height) {
665 [device setValue:f forKey:@"activeFormat"];
666 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
667 gdouble min_frame_rate, max_frame_rate;
669 [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
670 [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
671 if ((framerate >= min_frame_rate - 0.00001) &&
672 (framerate <= max_frame_rate + 0.00001)) {
673 NSValue *frame_duration_value;
674 found_framerate = TRUE;
675 if (min_frame_rate == max_frame_rate) {
676 /* on mac we get tight ranges and an exception is raised if the
677 * frame duration doesn't match the one reported in the range to
678 * the last decimal point
680 frame_duration_value = [rate valueForKey:@"minFrameDuration"];
682 // Invert fps_n and fps_d to get frame duration value and timescale (or numerator and denominator)
683 frame_duration_value = [NSValue valueWithCMTime:CMTimeMake (info->fps_d, info->fps_n)];
685 [device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
687 /* Only available on OSX >= 10.8 and iOS >= 7.0 */
688 [device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
689 } @catch (NSException *exception) {
690 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
691 GST_WARNING ("An unexcepted error occurred: %s",
692 [exception.reason UTF8String]);
699 if (found_framerate) {
705 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
708 if (!found_framerate) {
709 GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
713 GST_WARNING ("Couldn't lock device for configuration");
719 - (BOOL)getSessionPresetCaps:(GstCaps *)result
721 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
722 for (NSNumber *pixel_format in pixel_formats) {
723 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
724 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
728 if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
729 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
731 if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
732 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
733 if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
734 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
735 if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
736 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
737 if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
738 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
739 if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
740 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
743 GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
748 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
750 GST_DEBUG_OBJECT (element, "Setting session presset caps");
752 if ([device lockForConfiguration:NULL] != YES) {
753 GST_WARNING ("Couldn't lock device for configuration");
757 switch (info->width) {
759 session.sessionPreset = AVCaptureSessionPresetLow;
762 session.sessionPreset = AVCaptureSessionPreset352x288;
765 session.sessionPreset = AVCaptureSessionPresetMedium;
768 session.sessionPreset = AVCaptureSessionPreset640x480;
771 session.sessionPreset = AVCaptureSessionPreset1280x720;
775 session.sessionPreset = AVCaptureSessionPreset1920x1080;
779 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
788 NSArray *pixel_formats;
791 return NULL; /* BaseSrc will return template caps */
793 result = gst_caps_new_empty ();
794 pixel_formats = output.availableVideoCVPixelFormatTypes;
799 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
800 if (CGRectIsEmpty (screenInput.cropRect)) {
801 rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
803 rect = screenInput.cropRect;
806 float scale = [self getScaleFactorFromDeviceIndex];
807 for (NSNumber *pixel_format in pixel_formats) {
808 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
809 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
810 gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
811 "width", G_TYPE_INT, (int)(rect.size.width * scale),
812 "height", G_TYPE_INT, (int)(rect.size.height * scale),
813 "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
817 GST_WARNING ("Screen capture is not supported by iOS");
823 result = gst_caps_merge (result, [self getDeviceCaps]);
824 } @catch (NSException *exception) {
825 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
826 GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
830 /* Fallback on session presets API for iOS < 7.0 */
831 [self getSessionPresetCaps:result];
837 - (BOOL)setCaps:(GstCaps *)new_caps
840 BOOL success = YES, *successPtr = &success;
842 gst_video_info_init (&info);
843 gst_video_info_from_caps (&info, new_caps);
846 height = info.height;
847 format = info.finfo->format;
848 latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
850 dispatch_sync (mainQueue, ^{
855 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
856 screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
858 GST_WARNING ("Screen capture is not supported by iOS");
865 /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
866 *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
867 if (*successPtr != YES)
870 } @catch (NSException *exception) {
872 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
873 GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
878 /* Fallback on session presets API for iOS < 7.0 */
879 *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
880 if (*successPtr != YES)
886 case GST_VIDEO_FORMAT_NV12:
887 newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
889 case GST_VIDEO_FORMAT_UYVY:
890 newformat = kCVPixelFormatType_422YpCbCr8;
892 case GST_VIDEO_FORMAT_YUY2:
893 newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
895 case GST_VIDEO_FORMAT_BGRA:
896 newformat = kCVPixelFormatType_32BGRA;
900 GST_WARNING ("Unsupported output format %s",
901 gst_video_format_to_string (format));
905 GST_INFO_OBJECT (element,
906 "width: %d height: %d format: %s", width, height,
907 gst_video_format_to_string (format));
909 output.videoSettings = [NSDictionary
910 dictionaryWithObject:[NSNumber numberWithInt:newformat]
911 forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
913 gst_caps_replace (&caps, new_caps);
914 GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
916 if (![session isRunning]) {
919 /* If permissions are still pending, wait for a response before
920 * starting the capture running, or else we'll get black frames */
921 [permissionCond lock];
922 if (permissionRequestPending && !permissionStopRequest) {
923 GST_DEBUG_OBJECT (element, "Waiting for pending device access permission.");
925 [permissionCond wait];
926 } while (permissionRequestPending && !permissionStopRequest);
928 stopping = permissionStopRequest;
929 [permissionCond unlock];
932 [session startRunning];
935 /* Unlock device configuration only after session is started so the session
936 * won't reset the capture formats */
937 [device unlockForConfiguration];
945 [permissionCond lock];
946 permissionRequestPending = NO;
947 permissionStopRequest = NO;
948 [permissionCond unlock];
950 if (![self openDevice])
953 bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
954 bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
958 latency = GST_CLOCK_TIME_NONE;
960 lastSampling = GST_CLOCK_TIME_NONE;
969 dispatch_sync (mainQueue, ^{ [session stopRunning]; });
970 dispatch_sync (workerQueue, ^{});
976 g_object_unref (textureCache);
980 gst_gl_context_helper_free (ctxh);
988 - (BOOL)query:(GstQuery *)query
992 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
993 if (input != nil && caps != NULL) {
994 GstClockTime min_latency, max_latency;
996 min_latency = max_latency = latency;
999 GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
1000 " max %" GST_TIME_FORMAT,
1001 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1002 gst_query_set_latency (query, TRUE, min_latency, max_latency);
1005 result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
1013 [bufQueueLock lock];
1015 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
1017 [permissionCond lock];
1018 permissionStopRequest = YES;
1019 [permissionCond broadcast];
1020 [permissionCond unlock];
1027 [bufQueueLock lock];
1029 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1031 [permissionCond lock];
1032 permissionStopRequest = NO;
1033 [permissionCond unlock];
1038 - (void)captureOutput:(AVCaptureOutput *)captureOutput
1039 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
1040 fromConnection:(AVCaptureConnection *)aConnection
1042 GstClockTime timestamp, duration;
1044 [bufQueueLock lock];
1047 [bufQueueLock unlock];
1051 [self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
1053 if (timestamp == GST_CLOCK_TIME_NONE) {
1054 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1058 if ([bufQueue count] == BUFFER_QUEUE_SIZE)
1059 [bufQueue removeLastObject];
1061 [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
1062 @"timestamp": @(timestamp),
1063 @"duration": @(duration)}
1066 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
1069 - (GstFlowReturn)create:(GstBuffer **)buf
1071 CMSampleBufferRef sbuf;
1072 CVImageBufferRef image_buf;
1073 CVPixelBufferRef pixel_buf;
1074 size_t cur_width, cur_height;
1075 GstClockTime timestamp, duration;
1077 [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
1079 [bufQueueLock unlock];
1080 return GST_FLOW_FLUSHING;
1083 NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
1084 sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
1085 timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
1086 duration = (GstClockTime) [dic[@"duration"] longLongValue];
1088 [bufQueue removeLastObject];
1089 [bufQueueLock unlockWithCondition:
1090 ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1092 /* Check output frame size dimensions */
1093 image_buf = CMSampleBufferGetImageBuffer (sbuf);
1095 pixel_buf = (CVPixelBufferRef) image_buf;
1096 cur_width = CVPixelBufferGetWidth (pixel_buf);
1097 cur_height = CVPixelBufferGetHeight (pixel_buf);
1099 if (width != cur_width || height != cur_height) {
1100 /* Set new caps according to current frame dimensions */
1101 GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
1102 width, height, (int)cur_width, (int)cur_height);
1104 height = cur_height;
1105 gst_caps_set_simple (caps,
1106 "width", G_TYPE_INT, width,
1107 "height", G_TYPE_INT, height,
1109 gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
1113 *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
1116 return GST_FLOW_ERROR;
1120 GST_BUFFER_OFFSET (*buf) = offset++;
1121 GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
1122 GST_BUFFER_TIMESTAMP (*buf) = timestamp;
1123 GST_BUFFER_DURATION (*buf) = duration;
1126 [self updateStatistics];
1131 - (GstCaps *)fixate:(GstCaps *)new_caps
1133 GstStructure *structure;
1135 new_caps = gst_caps_make_writable (new_caps);
1136 new_caps = gst_caps_truncate (new_caps);
1137 structure = gst_caps_get_structure (new_caps, 0);
1138 /* crank up to 11. This is what the presets do, but we don't use the presets
1140 gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
1141 gst_structure_fixate_field_nearest_fraction (structure, "framerate", 30, 1);
1143 return gst_caps_fixate (new_caps);
1146 - (BOOL)decideAllocation:(GstQuery *)query
1148 GstCaps *alloc_caps;
1149 GstCapsFeatures *features;
1152 ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
1156 gst_query_parse_allocation (query, &alloc_caps, NULL);
1157 features = gst_caps_get_features (alloc_caps, 0);
1158 if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
1159 GstVideoTextureCacheGL *cache_gl;
1161 cache_gl = textureCache ? GST_VIDEO_TEXTURE_CACHE_GL (textureCache) : NULL;
1163 gst_gl_context_helper_ensure_context (ctxh);
1164 GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
1165 ctxh->context, cache_gl ? cache_gl->ctx : NULL);
1166 if (cache_gl && cache_gl->ctx != ctxh->context) {
1167 g_object_unref (textureCache);
1168 textureCache = NULL;
1171 textureCache = gst_video_texture_cache_gl_new (ctxh->context);
1172 gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
1178 - (void)setContext:(GstContext *)context
1180 GST_INFO_OBJECT (element, "setting context %s",
1181 gst_context_get_context_type (context));
1182 gst_gl_handle_set_context (element, context,
1183 &ctxh->display, &ctxh->other_context);
1184 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
1187 - (void)getSampleBuffer:(CMSampleBufferRef)sbuf
1188 timestamp:(GstClockTime *)outTimestamp
1189 duration:(GstClockTime *)outDuration
1191 CMSampleTimingInfo time_info;
1192 GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
1193 CMItemCount num_timings;
1197 timestamp = GST_CLOCK_TIME_NONE;
1198 duration = GST_CLOCK_TIME_NONE;
1199 if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
1200 avf_timestamp = gst_util_uint64_scale (GST_SECOND,
1201 time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
1203 if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
1204 duration = gst_util_uint64_scale (GST_SECOND,
1205 time_info.duration.value, time_info.duration.timescale);
1207 now = CMClockGetTime(inputClock);
1208 input_clock_now = gst_util_uint64_scale (GST_SECOND,
1209 now.value, now.timescale);
1210 input_clock_diff = input_clock_now - avf_timestamp;
1212 GST_OBJECT_LOCK (element);
1213 clock = GST_ELEMENT_CLOCK (element);
1215 running_time = gst_clock_get_time (clock) - element->base_time;
1216 /* We use presentationTimeStamp to determine how much time it took
1217 * between capturing and receiving the frame in our delegate
1218 * (e.g. how long it spent in AVF queues), then we subtract that time
1219 * from our running time to get the actual timestamp.
1221 if (running_time >= input_clock_diff)
1222 timestamp = running_time - input_clock_diff;
1224 timestamp = running_time;
1226 GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
1227 ", AVF clock diff: %"GST_TIME_FORMAT
1228 ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
1229 GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
1230 GST_TIME_ARGS (input_clock_diff),
1231 GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
1233 /* no clock, can't set timestamps */
1234 timestamp = GST_CLOCK_TIME_NONE;
1236 GST_OBJECT_UNLOCK (element);
1239 *outTimestamp = timestamp;
1240 *outDuration = duration;
1243 - (void)updateStatistics
1247 GST_OBJECT_LOCK (element);
1248 clock = GST_ELEMENT_CLOCK (element);
1250 gst_object_ref (clock);
1251 GST_OBJECT_UNLOCK (element);
1253 if (clock != NULL) {
1254 GstClockTime now = gst_clock_get_time (clock);
1255 gst_object_unref (clock);
1259 if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
1260 if (now - lastSampling >= GST_SECOND) {
1261 GST_OBJECT_LOCK (element);
1263 GST_OBJECT_UNLOCK (element);
1265 g_object_notify (G_OBJECT (element), "fps");
1293 PROP_CAPTURE_SCREEN,
1294 PROP_CAPTURE_SCREEN_CURSOR,
1295 PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1296 PROP_CAPTURE_SCREEN_CROP_X,
1297 PROP_CAPTURE_SCREEN_CROP_Y,
1298 PROP_CAPTURE_SCREEN_CROP_WIDTH,
1299 PROP_CAPTURE_SCREEN_CROP_HEIGHT,
1304 static void gst_avf_video_src_finalize (GObject * obj);
1305 static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1306 GValue * value, GParamSpec * pspec);
1307 static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1308 const GValue * value, GParamSpec * pspec);
1309 static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1311 static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1313 static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1314 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1315 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1317 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1318 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1319 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1321 static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
1323 static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1325 static void gst_avf_video_src_set_context (GstElement * element,
1326 GstContext * context);
1329 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1331 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1332 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1333 GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1334 GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1336 gobject_class->finalize = gst_avf_video_src_finalize;
1337 gobject_class->get_property = gst_avf_video_src_get_property;
1338 gobject_class->set_property = gst_avf_video_src_set_property;
1340 gstelement_class->set_context = gst_avf_video_src_set_context;
1342 gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1343 gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1344 gstbasesrc_class->start = gst_avf_video_src_start;
1345 gstbasesrc_class->stop = gst_avf_video_src_stop;
1346 gstbasesrc_class->query = gst_avf_video_src_query;
1347 gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1348 gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1349 gstbasesrc_class->fixate = gst_avf_video_src_fixate;
1350 gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
1352 gstpushsrc_class->create = gst_avf_video_src_create;
1354 gst_element_class_set_metadata (gstelement_class,
1355 "Video Source (AVFoundation)", "Source/Video/Hardware",
1356 "Reads frames from an iOS/MacOS AVFoundation device",
1357 "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1359 gst_element_class_add_static_pad_template (gstelement_class, &src_template);
1361 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1362 g_param_spec_int ("device-index", "Device Index",
1363 "The zero-based device index",
1364 -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1365 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1366 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
1367 g_param_spec_string ("device-name", "Device Name",
1368 "The name of the currently opened capture device",
1369 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1370 g_object_class_install_property (gobject_class, PROP_POSITION,
1371 g_param_spec_enum ("position", "Position",
1372 "The position of the capture device (front or back-facing)",
1373 GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
1374 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1375 g_object_class_install_property (gobject_class, PROP_ORIENTATION,
1376 g_param_spec_enum ("orientation", "Orientation",
1377 "The orientation of the video",
1378 GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
1379 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1380 g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
1381 g_param_spec_enum ("device-type", "Device Type",
1382 "The general type of a video capture device",
1383 GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
1384 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1385 g_object_class_install_property (gobject_class, PROP_DO_STATS,
1386 g_param_spec_boolean ("do-stats", "Enable statistics",
1387 "Enable logging of statistics", DEFAULT_DO_STATS,
1388 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1389 g_object_class_install_property (gobject_class, PROP_FPS,
1390 g_param_spec_int ("fps", "Frames per second",
1391 "Last measured framerate, if statistics are enabled",
1392 -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1394 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1395 g_param_spec_boolean ("capture-screen", "Enable screen capture",
1396 "Enable screen capture functionality", FALSE,
1397 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1398 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1399 g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1400 "Enable cursor capture while capturing screen", FALSE,
1401 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1402 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1403 g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1404 "Enable mouse clicks capture while capturing screen", FALSE,
1405 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1406 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CROP_X,
1407 g_param_spec_uint ("screen-crop-x", "Screen capture crop X",
1408 "Horizontal coordinate of top left corner of the screen capture area",
1409 0, G_MAXUINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1410 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CROP_Y,
1411 g_param_spec_uint ("screen-crop-y", "Screen capture crop Y",
1412 "Vertical coordinate of top left corner of the screen capture area",
1413 0, G_MAXUINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1414 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CROP_WIDTH,
1415 g_param_spec_uint ("screen-crop-width", "Screen capture crop width",
1416 "Width of the screen capture area (0 = maximum)",
1417 0, G_MAXUINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1418 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CROP_HEIGHT,
1419 g_param_spec_uint ("screen-crop-height", "Screen capture crop height",
1420 "Height of the screen capture area (0 = maximum)",
1421 0, G_MAXUINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1424 GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1425 0, "iOS/MacOS AVFoundation video source");
1427 gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_POSITION, 0);
1428 gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, 0);
1429 gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, 0);
1433 gst_avf_video_src_init (GstAVFVideoSrc * src)
1435 src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1439 gst_avf_video_src_finalize (GObject * obj)
1441 CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
1443 G_OBJECT_CLASS (parent_class)->finalize (obj);
1447 gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1450 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1454 case PROP_CAPTURE_SCREEN:
1455 g_value_set_boolean (value, impl.captureScreen);
1457 case PROP_CAPTURE_SCREEN_CURSOR:
1458 g_value_set_boolean (value, impl.captureScreenCursor);
1460 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1461 g_value_set_boolean (value, impl.captureScreenMouseClicks);
1463 case PROP_CAPTURE_SCREEN_CROP_X:
1464 g_value_set_uint (value, impl.cropX);
1466 case PROP_CAPTURE_SCREEN_CROP_Y:
1467 g_value_set_uint (value, impl.cropY);
1469 case PROP_CAPTURE_SCREEN_CROP_WIDTH:
1470 g_value_set_uint (value, impl.cropWidth);
1472 case PROP_CAPTURE_SCREEN_CROP_HEIGHT:
1473 g_value_set_uint (value, impl.cropHeight);
1476 case PROP_DEVICE_INDEX:
1477 g_value_set_int (value, impl.deviceIndex);
1479 case PROP_DEVICE_NAME:
1480 g_value_set_string (value, impl.deviceName);
1483 g_value_set_enum (value, impl.position);
1485 case PROP_ORIENTATION:
1486 g_value_set_enum (value, impl.orientation);
1488 case PROP_DEVICE_TYPE:
1489 g_value_set_enum (value, impl.deviceType);
1492 g_value_set_boolean (value, impl.doStats);
1495 GST_OBJECT_LOCK (object);
1496 g_value_set_int (value, impl.fps);
1497 GST_OBJECT_UNLOCK (object);
1500 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1506 gst_avf_video_src_set_property (GObject * object, guint prop_id,
1507 const GValue * value, GParamSpec * pspec)
1509 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1513 case PROP_CAPTURE_SCREEN:
1514 impl.captureScreen = g_value_get_boolean (value);
1516 case PROP_CAPTURE_SCREEN_CURSOR:
1517 impl.captureScreenCursor = g_value_get_boolean (value);
1519 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1520 impl.captureScreenMouseClicks = g_value_get_boolean (value);
1522 case PROP_CAPTURE_SCREEN_CROP_X:
1523 impl.cropX = g_value_get_uint (value);
1525 case PROP_CAPTURE_SCREEN_CROP_Y:
1526 impl.cropY = g_value_get_uint (value);
1528 case PROP_CAPTURE_SCREEN_CROP_WIDTH:
1529 impl.cropWidth = g_value_get_uint (value);
1531 case PROP_CAPTURE_SCREEN_CROP_HEIGHT:
1532 impl.cropHeight = g_value_get_uint (value);
1535 case PROP_DEVICE_INDEX:
1536 impl.deviceIndex = g_value_get_int (value);
1539 impl.position = g_value_get_enum(value);
1541 case PROP_ORIENTATION:
1542 impl.orientation = g_value_get_enum(value);
1544 case PROP_DEVICE_TYPE:
1545 impl.deviceType = g_value_get_enum(value);
1548 impl.doStats = g_value_get_boolean (value);
1551 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1557 gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1561 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1567 gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1571 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1577 gst_avf_video_src_start (GstBaseSrc * basesrc)
1581 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1587 gst_avf_video_src_stop (GstBaseSrc * basesrc)
1591 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1597 gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1601 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1607 gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1611 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1617 gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1621 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1626 static GstFlowReturn
1627 gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1631 ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1638 gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
1642 ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
1648 gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1653 ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
1659 gst_avf_video_src_set_context (GstElement * element, GstContext * context)
1661 [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
1665 gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
1667 NSArray *formats = [device valueForKey:@"formats"];
1668 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
1669 GstCaps *result_caps, *result_gl_caps;
1671 GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
1673 GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
1676 result_caps = gst_caps_new_empty ();
1677 result_gl_caps = gst_caps_new_empty ();
1679 /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
1680 * available in iOS >= 7.0. We use a dynamic approach with key-value
1681 * coding or performSelector */
1682 for (NSObject *f in [formats reverseObjectEnumerator]) {
1683 /* formatDescription can't be retrieved with valueForKey so use a selector here */
1684 CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
1685 CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
1686 dimensions = get_oriented_dimensions (orientation, dimensions);
1688 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
1689 int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
1690 gdouble min_fps, max_fps;
1692 [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
1693 gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
1695 [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
1696 gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
1698 for (NSNumber *pixel_format in pixel_formats) {
1699 GstVideoFormat gst_format = get_gst_video_format (pixel_format);
1701 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
1702 if (min_fps != max_fps)
1703 gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
1705 gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
1708 if (gst_format == gl_format) {
1710 if (min_fps != max_fps) {
1711 gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
1712 dimensions.width, dimensions.height,
1713 min_fps_n, min_fps_d,
1714 max_fps_n, max_fps_d);
1716 gl_caps = GST_AVF_CAPS_NEW (gl_format,
1717 dimensions.width, dimensions.height,
1718 max_fps_n, max_fps_d);
1720 gst_caps_set_features (gl_caps, 0,
1721 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
1723 gst_caps_set_simple (gl_caps,
1724 "texture-target", G_TYPE_STRING,
1726 GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
1728 GST_GL_TEXTURE_TARGET_2D_STR,
1731 gst_caps_append (result_gl_caps, gl_caps);
1737 result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
1739 return result_gl_caps;
1742 static GstVideoFormat
1743 get_gst_video_format (NSNumber *pixel_format)
1745 GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
1747 switch ([pixel_format integerValue]) {
1748 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
1749 gst_format = GST_VIDEO_FORMAT_NV12;
1751 case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
1752 gst_format = GST_VIDEO_FORMAT_UYVY;
1754 case kCVPixelFormatType_32BGRA: /* BGRA */
1755 gst_format = GST_VIDEO_FORMAT_BGRA;
1757 case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
1758 gst_format = GST_VIDEO_FORMAT_YUY2;
1767 static CMVideoDimensions
1768 get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
1770 CMVideoDimensions orientedDimensions;
1771 if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
1772 orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
1773 orientedDimensions.width = dimensions.height;
1774 orientedDimensions.height = dimensions.width;
1776 orientedDimensions = dimensions;
1778 return orientedDimensions;