2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
3 * Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 #include "avfvideosrc.h"
26 #include "glcontexthelper.h"
28 #import <AVFoundation/AVFoundation.h>
30 #import <AppKit/AppKit.h>
32 #include <gst/video/video.h>
33 #include <gst/gl/gstglcontext.h>
34 #include "coremediabuffer.h"
35 #include "videotexturecache.h"
37 #define DEFAULT_DEVICE_INDEX -1
38 #define DEFAULT_POSITION GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
39 #define DEFAULT_ORIENTATION GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
40 #define DEFAULT_DEVICE_TYPE GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
41 #define DEFAULT_DO_STATS FALSE
43 #define DEVICE_FPS_N 25
44 #define DEVICE_FPS_D 1
46 #define BUFFER_QUEUE_SIZE 2
48 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
49 #define GST_CAT_DEFAULT gst_avf_video_src_debug
51 static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
52 static CMVideoDimensions
53 get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
55 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
60 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
61 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
63 "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
65 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
66 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
68 "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
71 "format = (string) { NV12, UYVY, YUY2 }, "
72 "framerate = " GST_VIDEO_FPS_RANGE ", "
73 "width = " GST_VIDEO_SIZE_RANGE ", "
74 "height = " GST_VIDEO_SIZE_RANGE "; "
77 "format = (string) BGRA, "
78 "framerate = " GST_VIDEO_FPS_RANGE ", "
79 "width = " GST_VIDEO_SIZE_RANGE ", "
80 "height = " GST_VIDEO_SIZE_RANGE "; "
83 typedef enum _QueueState {
85 HAS_BUFFER_OR_STOP_REQUEST,
88 #define gst_avf_video_src_parent_class parent_class
89 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
91 #define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
93 gst_avf_video_source_position_get_type (void)
95 static GType avf_video_source_position_type = 0;
97 if (!avf_video_source_position_type) {
98 static GEnumValue position_types[] = {
99 { GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
100 { GST_AVF_VIDEO_SOURCE_POSITION_BACK, "Back-facing camera", "back" },
101 { GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT, "Default", "default" },
105 avf_video_source_position_type =
106 g_enum_register_static ("GstAVFVideoSourcePosition",
110 return avf_video_source_position_type;
113 #define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
115 gst_avf_video_source_orientation_get_type (void)
117 static GType avf_video_source_orientation_type = 0;
119 if (!avf_video_source_orientation_type) {
120 static GEnumValue orientation_types[] = {
121 { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
122 { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
123 { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
124 { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
125 { GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
129 avf_video_source_orientation_type =
130 g_enum_register_static ("GstAVFVideoSourceOrientation",
134 return avf_video_source_orientation_type;
137 #define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
139 gst_avf_video_source_device_type_get_type (void)
141 static GType avf_video_source_device_type_type = 0;
143 if (!avf_video_source_device_type_type) {
144 static GEnumValue device_type_types[] = {
145 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
146 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
147 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
148 { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
152 avf_video_source_device_type_type =
153 g_enum_register_static ("GstAVFVideoSourceDeviceType",
157 return avf_video_source_device_type_type;
160 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
166 const gchar *deviceName;
167 GstAVFVideoSourcePosition position;
168 GstAVFVideoSourceOrientation orientation;
169 GstAVFVideoSourceDeviceType deviceType;
172 AVCaptureSession *session;
173 AVCaptureInput *input;
174 AVCaptureVideoDataOutput *output;
175 AVCaptureDevice *device;
176 AVCaptureConnection *connection;
177 CMClockRef inputClock;
179 dispatch_queue_t mainQueue;
180 dispatch_queue_t workerQueue;
181 NSConditionLock *bufQueueLock;
182 NSMutableArray *bufQueue;
186 GstVideoFormat format;
188 GstClockTime latency;
191 GstClockTime lastSampling;
195 BOOL captureScreenCursor;
196 BOOL captureScreenMouseClicks;
199 GstGLContextHelper *ctxh;
200 GstVideoTextureCache *textureCache;
204 - (id)initWithSrc:(GstPushSrc *)src;
207 @property int deviceIndex;
208 @property const gchar *deviceName;
209 @property GstAVFVideoSourcePosition position;
210 @property GstAVFVideoSourceOrientation orientation;
211 @property GstAVFVideoSourceDeviceType deviceType;
212 @property BOOL doStats;
214 @property BOOL captureScreen;
215 @property BOOL captureScreenCursor;
216 @property BOOL captureScreenMouseClicks;
218 - (BOOL)openScreenInput;
219 - (BOOL)openDeviceInput;
222 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
224 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
225 - (float)getScaleFactorFromDeviceIndex;
227 - (GstCaps *)getDeviceCaps;
228 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
229 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
230 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
231 - (GstCaps *)getCaps;
232 - (BOOL)setCaps:(GstCaps *)new_caps;
237 - (BOOL)query:(GstQuery *)query;
238 - (void)setContext:(GstContext *)context;
239 - (GstStateChangeReturn)changeState:(GstStateChange)transition;
240 - (GstFlowReturn)create:(GstBuffer **)buf;
241 - (GstCaps *)fixate:(GstCaps *)caps;
242 - (BOOL)decideAllocation:(GstQuery *)query;
243 - (void)updateStatistics;
244 - (void)captureOutput:(AVCaptureOutput *)captureOutput
245 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
246 fromConnection:(AVCaptureConnection *)connection;
252 static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
253 switch (deviceType) {
254 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
255 return AVCaptureDeviceTypeBuiltInWideAngleCamera;
256 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
257 return AVCaptureDeviceTypeBuiltInTelephotoCamera;
258 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
259 return AVCaptureDeviceTypeBuiltInDuoCamera;
260 case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
261 g_assert_not_reached();
265 static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
267 case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
268 return AVCaptureDevicePositionFront;
269 case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
270 return AVCaptureDevicePositionBack;
271 case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
272 g_assert_not_reached();
277 static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
278 switch (orientation) {
279 case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
280 return AVCaptureVideoOrientationPortrait;
281 case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
282 return AVCaptureVideoOrientationPortraitUpsideDown;
283 case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
284 return AVCaptureVideoOrientationLandscapeLeft;
285 case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
286 return AVCaptureVideoOrientationLandscapeRight;
287 case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
288 g_assert_not_reached();
294 @implementation GstAVFVideoSrcImpl
296 @synthesize deviceIndex, deviceName, position, orientation, deviceType, doStats,
297 fps, captureScreen, captureScreenCursor, captureScreenMouseClicks;
301 return [self initWithSrc:NULL];
304 - (id)initWithSrc:(GstPushSrc *)src
306 if ((self = [super init])) {
307 element = GST_ELEMENT_CAST (src);
308 baseSrc = GST_BASE_SRC_CAST (src);
311 deviceIndex = DEFAULT_DEVICE_INDEX;
313 position = DEFAULT_POSITION;
314 orientation = DEFAULT_ORIENTATION;
315 deviceType = DEFAULT_DEVICE_TYPE;
317 captureScreenCursor = NO;
318 captureScreenMouseClicks = NO;
321 ctxh = gst_gl_context_helper_new (element);
323 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
325 dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
327 gst_base_src_set_live (baseSrc, TRUE);
328 gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
340 - (BOOL)openDeviceInput
342 NSString *mediaType = AVMediaTypeVideo;
345 if (deviceIndex == DEFAULT_DEVICE_INDEX) {
347 if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
348 device = [AVCaptureDevice
349 defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
351 position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
353 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
356 device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
359 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
360 ("No video capture devices found"), (NULL));
363 } else { // deviceIndex takes priority over position and deviceType
364 NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
365 if (deviceIndex >= [devices count]) {
366 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
367 ("Invalid video capture device index"), (NULL));
370 device = [devices objectAtIndex:deviceIndex];
372 g_assert (device != nil);
374 deviceName = [[device localizedName] UTF8String];
375 GST_INFO ("Opening '%s'", deviceName);
377 input = [AVCaptureDeviceInput deviceInputWithDevice:device
380 GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
381 ("Failed to open device: %s",
382 [[err localizedDescription] UTF8String]),
390 - (BOOL)openScreenInput
395 CGDirectDisplayID displayId;
397 GST_DEBUG_OBJECT (element, "Opening screen input");
399 displayId = [self getDisplayIdFromDeviceIndex];
403 AVCaptureScreenInput *screenInput =
404 [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
408 [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
409 forKey:@"capturesCursor"];
411 } @catch (NSException *exception) {
412 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
413 GST_WARNING ("An unexpected error occured: %s",
414 [[exception reason] UTF8String]);
416 GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
418 screenInput.capturesMouseClicks = captureScreenMouseClicks;
426 BOOL success = NO, *successPtr = &success;
428 GST_DEBUG_OBJECT (element, "Opening device");
430 dispatch_sync (mainQueue, ^{
434 ret = [self openScreenInput];
436 ret = [self openDeviceInput];
441 output = [[AVCaptureVideoDataOutput alloc] init];
442 [output setSampleBufferDelegate:self
444 output.alwaysDiscardsLateVideoFrames = YES;
445 output.videoSettings = nil; /* device native format */
447 session = [[AVCaptureSession alloc] init];
448 [session addInput:input];
449 [session addOutput:output];
451 /* retained by session */
452 connection = [[output connections] firstObject];
454 if (orientation != DEFAULT_ORIENTATION)
455 connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
457 inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
462 GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
469 GST_DEBUG_OBJECT (element, "Closing device");
471 dispatch_sync (mainQueue, ^{
472 g_assert (![session isRunning]);
477 [session removeInput:input];
478 [session removeOutput:output];
486 if (!captureScreen) {
491 gst_caps_unref (caps);
496 #define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
497 (gst_caps_new_simple ("video/x-raw", \
498 "width", G_TYPE_INT, w, \
499 "height", G_TYPE_INT, h, \
500 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
501 "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
504 #define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
505 (gst_caps_new_simple ("video/x-raw", \
506 "width", G_TYPE_INT, w, \
507 "height", G_TYPE_INT, h, \
508 "format", G_TYPE_STRING, gst_video_format_to_string (format), \
509 "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
512 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
514 GstVideoFormat gst_format = get_gst_video_format(pixel_format);
515 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
516 GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
517 [[pixel_format stringValue] UTF8String]);
523 - (CGDirectDisplayID)getDisplayIdFromDeviceIndex
525 NSDictionary *description;
527 NSArray *screens = [NSScreen screens];
529 if (deviceIndex == DEFAULT_DEVICE_INDEX)
530 return kCGDirectMainDisplay;
531 if (deviceIndex >= [screens count]) {
532 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
533 ("Invalid screen capture device index"), (NULL));
536 description = [[screens objectAtIndex:deviceIndex] deviceDescription];
537 displayId = [description objectForKey:@"NSScreenNumber"];
538 return [displayId unsignedIntegerValue];
541 - (float)getScaleFactorFromDeviceIndex
543 NSArray *screens = [NSScreen screens];
545 if (deviceIndex == DEFAULT_DEVICE_INDEX)
546 return [[NSScreen mainScreen] backingScaleFactor];
547 if (deviceIndex >= [screens count]) {
548 GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
549 ("Invalid screen capture device index"), (NULL));
552 return [[screens objectAtIndex:deviceIndex] backingScaleFactor];
557 - (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
559 return get_oriented_dimensions(orientation, dimensions);
562 - (GstCaps *)getDeviceCaps
564 GST_DEBUG_OBJECT (element, "Getting device caps");
565 GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
566 GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
571 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
574 gboolean found_format = FALSE, found_framerate = FALSE;
575 NSArray *formats = [device valueForKey:@"formats"];
576 gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
578 GST_DEBUG_OBJECT (element, "Setting device caps");
580 if ([device lockForConfiguration:NULL] == YES) {
581 for (NSObject *f in formats) {
582 CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
583 CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
584 dimensions = [self orientedDimensions:dimensions];
585 if (dimensions.width == info->width && dimensions.height == info->height) {
587 [device setValue:f forKey:@"activeFormat"];
588 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
589 gdouble min_frame_rate, max_frame_rate;
591 [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
592 [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
593 if ((framerate >= min_frame_rate - 0.00001) &&
594 (framerate <= max_frame_rate + 0.00001)) {
595 NSValue *frame_duration_value;
596 found_framerate = TRUE;
597 if (min_frame_rate == max_frame_rate) {
598 /* on mac we get tight ranges and an exception is raised if the
599 * frame duration doesn't match the one reported in the range to
600 * the last decimal point
602 frame_duration_value = [rate valueForKey:@"minFrameDuration"];
604 // Invert fps_n and fps_d to get frame duration value and timescale (or numerator and denominator)
605 frame_duration_value = [NSValue valueWithCMTime:CMTimeMake (info->fps_d, info->fps_n)];
607 [device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
609 /* Only available on OSX >= 10.8 and iOS >= 7.0 */
610 [device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
611 } @catch (NSException *exception) {
612 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
613 GST_WARNING ("An unexcepted error occured: %s",
614 [exception.reason UTF8String]);
623 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
626 if (!found_framerate) {
627 GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
631 GST_WARNING ("Couldn't lock device for configuration");
637 - (BOOL)getSessionPresetCaps:(GstCaps *)result
639 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
640 for (NSNumber *pixel_format in pixel_formats) {
641 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
642 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
646 if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
647 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
649 if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
650 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
651 if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
652 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
653 if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
654 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
655 if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
656 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
657 if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
658 gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
661 GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
666 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
668 GST_DEBUG_OBJECT (element, "Setting session presset caps");
670 if ([device lockForConfiguration:NULL] != YES) {
671 GST_WARNING ("Couldn't lock device for configuration");
675 switch (info->width) {
677 session.sessionPreset = AVCaptureSessionPresetLow;
680 session.sessionPreset = AVCaptureSessionPreset352x288;
683 session.sessionPreset = AVCaptureSessionPresetMedium;
686 session.sessionPreset = AVCaptureSessionPreset640x480;
689 session.sessionPreset = AVCaptureSessionPreset1280x720;
693 session.sessionPreset = AVCaptureSessionPreset1920x1080;
697 GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
706 NSArray *pixel_formats;
709 return NULL; /* BaseSrc will return template caps */
711 result = gst_caps_new_empty ();
712 pixel_formats = output.availableVideoCVPixelFormatTypes;
716 CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
717 float scale = [self getScaleFactorFromDeviceIndex];
718 for (NSNumber *pixel_format in pixel_formats) {
719 GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
720 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
721 gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
722 "width", G_TYPE_INT, (int)(rect.size.width * scale),
723 "height", G_TYPE_INT, (int)(rect.size.height * scale),
724 "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
728 GST_WARNING ("Screen capture is not supported by iOS");
734 result = gst_caps_merge (result, [self getDeviceCaps]);
735 } @catch (NSException *exception) {
736 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
737 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
741 /* Fallback on session presets API for iOS < 7.0 */
742 [self getSessionPresetCaps:result];
748 - (BOOL)setCaps:(GstCaps *)new_caps
751 BOOL success = YES, *successPtr = &success;
753 gst_video_info_init (&info);
754 gst_video_info_from_caps (&info, new_caps);
757 height = info.height;
758 format = info.finfo->format;
759 latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
761 dispatch_sync (mainQueue, ^{
766 AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
767 screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
769 GST_WARNING ("Screen capture is not supported by iOS");
776 /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
777 *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
778 if (*successPtr != YES)
781 } @catch (NSException *exception) {
783 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
784 GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
789 /* Fallback on session presets API for iOS < 7.0 */
790 *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
791 if (*successPtr != YES)
797 case GST_VIDEO_FORMAT_NV12:
798 newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
800 case GST_VIDEO_FORMAT_UYVY:
801 newformat = kCVPixelFormatType_422YpCbCr8;
803 case GST_VIDEO_FORMAT_YUY2:
804 newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
806 case GST_VIDEO_FORMAT_BGRA:
807 newformat = kCVPixelFormatType_32BGRA;
811 GST_WARNING ("Unsupported output format %s",
812 gst_video_format_to_string (format));
816 GST_INFO_OBJECT (element,
817 "width: %d height: %d format: %s", width, height,
818 gst_video_format_to_string (format));
820 output.videoSettings = [NSDictionary
821 dictionaryWithObject:[NSNumber numberWithInt:newformat]
822 forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
824 gst_caps_replace (&caps, new_caps);
825 GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
827 if (![session isRunning])
828 [session startRunning];
830 /* Unlock device configuration only after session is started so the session
831 * won't reset the capture formats */
832 [device unlockForConfiguration];
840 bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
841 bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
845 latency = GST_CLOCK_TIME_NONE;
847 lastSampling = GST_CLOCK_TIME_NONE;
856 dispatch_sync (mainQueue, ^{ [session stopRunning]; });
857 dispatch_sync (workerQueue, ^{});
863 gst_video_texture_cache_free (textureCache);
867 gst_gl_context_helper_free (ctxh);
873 - (BOOL)query:(GstQuery *)query
877 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
878 if (device != nil && caps != NULL) {
879 GstClockTime min_latency, max_latency;
881 min_latency = max_latency = latency;
884 GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
885 " max %" GST_TIME_FORMAT,
886 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
887 gst_query_set_latency (query, TRUE, min_latency, max_latency);
890 result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
900 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
909 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
914 - (GstStateChangeReturn)changeState:(GstStateChange)transition
916 GstStateChangeReturn ret;
918 if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
919 if (![self openDevice])
920 return GST_STATE_CHANGE_FAILURE;
923 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
925 if (transition == GST_STATE_CHANGE_READY_TO_NULL)
931 - (void)captureOutput:(AVCaptureOutput *)captureOutput
932 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
933 fromConnection:(AVCaptureConnection *)aConnection
935 GstClockTime timestamp, duration;
940 [bufQueueLock unlock];
944 [self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
946 if (timestamp == GST_CLOCK_TIME_NONE) {
947 [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
951 if ([bufQueue count] == BUFFER_QUEUE_SIZE)
952 [bufQueue removeLastObject];
954 [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
955 @"timestamp": @(timestamp),
956 @"duration": @(duration)}
959 [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
962 - (GstFlowReturn)create:(GstBuffer **)buf
964 CMSampleBufferRef sbuf;
965 CVImageBufferRef image_buf;
966 CVPixelBufferRef pixel_buf;
967 size_t cur_width, cur_height;
968 GstClockTime timestamp, duration;
970 [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
972 [bufQueueLock unlock];
973 return GST_FLOW_FLUSHING;
976 NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
977 sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
978 timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
979 duration = (GstClockTime) [dic[@"duration"] longLongValue];
981 [bufQueue removeLastObject];
982 [bufQueueLock unlockWithCondition:
983 ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
985 /* Check output frame size dimensions */
986 image_buf = CMSampleBufferGetImageBuffer (sbuf);
988 pixel_buf = (CVPixelBufferRef) image_buf;
989 cur_width = CVPixelBufferGetWidth (pixel_buf);
990 cur_height = CVPixelBufferGetHeight (pixel_buf);
992 if (width != cur_width || height != cur_height) {
993 /* Set new caps according to current frame dimensions */
994 GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
995 width, height, (int)cur_width, (int)cur_height);
998 gst_caps_set_simple (caps,
999 "width", G_TYPE_INT, width,
1000 "height", G_TYPE_INT, height,
1002 gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
1006 *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
1009 return GST_FLOW_ERROR;
1013 GST_BUFFER_OFFSET (*buf) = offset++;
1014 GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
1015 GST_BUFFER_TIMESTAMP (*buf) = timestamp;
1016 GST_BUFFER_DURATION (*buf) = duration;
1019 [self updateStatistics];
1024 - (GstCaps *)fixate:(GstCaps *)new_caps
1026 GstStructure *structure;
1028 new_caps = gst_caps_make_writable (new_caps);
1029 new_caps = gst_caps_truncate (new_caps);
1030 structure = gst_caps_get_structure (new_caps, 0);
1031 /* crank up to 11. This is what the presets do, but we don't use the presets
1033 gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
1034 gst_structure_fixate_field_nearest_fraction (structure, "framerate", G_MAXINT, 1);
1036 return gst_caps_fixate (new_caps);
1039 - (BOOL)decideAllocation:(GstQuery *)query
1041 GstCaps *alloc_caps;
1042 GstCapsFeatures *features;
1045 ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
1049 gst_query_parse_allocation (query, &alloc_caps, NULL);
1050 features = gst_caps_get_features (alloc_caps, 0);
1051 if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
1052 gst_gl_context_helper_ensure_context (ctxh);
1053 GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
1054 ctxh->context, textureCache ? textureCache->ctx : NULL);
1055 if (textureCache && textureCache->ctx != ctxh->context) {
1056 gst_video_texture_cache_free (textureCache);
1057 textureCache = NULL;
1059 textureCache = gst_video_texture_cache_new (ctxh->context);
1060 gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
1066 - (void)setContext:(GstContext *)context
1068 GST_INFO_OBJECT (element, "setting context %s",
1069 gst_context_get_context_type (context));
1070 gst_gl_handle_set_context (element, context,
1071 &ctxh->display, &ctxh->other_context);
1072 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
1075 - (void)getSampleBuffer:(CMSampleBufferRef)sbuf
1076 timestamp:(GstClockTime *)outTimestamp
1077 duration:(GstClockTime *)outDuration
1079 CMSampleTimingInfo time_info;
1080 GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
1081 CMItemCount num_timings;
1085 timestamp = GST_CLOCK_TIME_NONE;
1086 duration = GST_CLOCK_TIME_NONE;
1087 if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
1088 avf_timestamp = gst_util_uint64_scale (GST_SECOND,
1089 time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
1091 if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
1092 duration = gst_util_uint64_scale (GST_SECOND,
1093 time_info.duration.value, time_info.duration.timescale);
1095 now = CMClockGetTime(inputClock);
1096 input_clock_now = gst_util_uint64_scale (GST_SECOND,
1097 now.value, now.timescale);
1098 input_clock_diff = input_clock_now - avf_timestamp;
1100 GST_OBJECT_LOCK (element);
1101 clock = GST_ELEMENT_CLOCK (element);
1103 running_time = gst_clock_get_time (clock) - element->base_time;
1104 /* We use presentationTimeStamp to determine how much time it took
1105 * between capturing and receiving the frame in our delegate
1106 * (e.g. how long it spent in AVF queues), then we subtract that time
1107 * from our running time to get the actual timestamp.
1109 if (running_time >= input_clock_diff)
1110 timestamp = running_time - input_clock_diff;
1112 timestamp = running_time;
1114 GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
1115 ", AVF clock diff: %"GST_TIME_FORMAT
1116 ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
1117 GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
1118 GST_TIME_ARGS (input_clock_diff),
1119 GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
1121 /* no clock, can't set timestamps */
1122 timestamp = GST_CLOCK_TIME_NONE;
1124 GST_OBJECT_UNLOCK (element);
1127 *outTimestamp = timestamp;
1128 *outDuration = duration;
1131 - (void)updateStatistics
1135 GST_OBJECT_LOCK (element);
1136 clock = GST_ELEMENT_CLOCK (element);
1138 gst_object_ref (clock);
1139 GST_OBJECT_UNLOCK (element);
1141 if (clock != NULL) {
1142 GstClockTime now = gst_clock_get_time (clock);
1143 gst_object_unref (clock);
1147 if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
1148 if (now - lastSampling >= GST_SECOND) {
1149 GST_OBJECT_LOCK (element);
1151 GST_OBJECT_UNLOCK (element);
1153 g_object_notify (G_OBJECT (element), "fps");
1181 PROP_CAPTURE_SCREEN,
1182 PROP_CAPTURE_SCREEN_CURSOR,
1183 PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1188 static void gst_avf_video_src_finalize (GObject * obj);
1189 static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1190 GValue * value, GParamSpec * pspec);
1191 static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1192 const GValue * value, GParamSpec * pspec);
1193 static GstStateChangeReturn gst_avf_video_src_change_state (
1194 GstElement * element, GstStateChange transition);
1195 static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1197 static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1199 static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1200 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1201 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1203 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1204 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1205 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1207 static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
1209 static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1211 static void gst_avf_video_src_set_context (GstElement * element,
1212 GstContext * context);
1215 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1217 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1218 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1219 GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1220 GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1222 gobject_class->finalize = gst_avf_video_src_finalize;
1223 gobject_class->get_property = gst_avf_video_src_get_property;
1224 gobject_class->set_property = gst_avf_video_src_set_property;
1226 gstelement_class->change_state = gst_avf_video_src_change_state;
1227 gstelement_class->set_context = gst_avf_video_src_set_context;
1229 gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1230 gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1231 gstbasesrc_class->start = gst_avf_video_src_start;
1232 gstbasesrc_class->stop = gst_avf_video_src_stop;
1233 gstbasesrc_class->query = gst_avf_video_src_query;
1234 gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1235 gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1236 gstbasesrc_class->fixate = gst_avf_video_src_fixate;
1237 gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
1239 gstpushsrc_class->create = gst_avf_video_src_create;
1241 gst_element_class_set_metadata (gstelement_class,
1242 "Video Source (AVFoundation)", "Source/Video/Hardware",
1243 "Reads frames from an iOS AVFoundation device",
1244 "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1246 gst_element_class_add_static_pad_template (gstelement_class, &src_template);
1248 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1249 g_param_spec_int ("device-index", "Device Index",
1250 "The zero-based device index",
1251 -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1252 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1253 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
1254 g_param_spec_string ("device-name", "Device Name",
1255 "The name of the currently opened capture device",
1256 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1257 g_object_class_install_property (gobject_class, PROP_POSITION,
1258 g_param_spec_enum ("position", "Position",
1259 "The position of the capture device (front or back-facing)",
1260 GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
1261 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1262 g_object_class_install_property (gobject_class, PROP_ORIENTATION,
1263 g_param_spec_enum ("orientation", "Orientation",
1264 "The orientation of the video",
1265 GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
1266 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1267 g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
1268 g_param_spec_enum ("device-type", "Device Type",
1269 "The general type of a video capture device",
1270 GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
1271 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1272 g_object_class_install_property (gobject_class, PROP_DO_STATS,
1273 g_param_spec_boolean ("do-stats", "Enable statistics",
1274 "Enable logging of statistics", DEFAULT_DO_STATS,
1275 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1276 g_object_class_install_property (gobject_class, PROP_FPS,
1277 g_param_spec_int ("fps", "Frames per second",
1278 "Last measured framerate, if statistics are enabled",
1279 -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1281 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1282 g_param_spec_boolean ("capture-screen", "Enable screen capture",
1283 "Enable screen capture functionality", FALSE,
1284 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1285 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1286 g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1287 "Enable cursor capture while capturing screen", FALSE,
1288 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1289 g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1290 g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1291 "Enable mouse clicks capture while capturing screen", FALSE,
1292 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1295 GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1296 0, "iOS AVFoundation video source");
1300 gst_avf_video_src_init (GstAVFVideoSrc * src)
1302 src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1306 gst_avf_video_src_finalize (GObject * obj)
1308 CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
1310 G_OBJECT_CLASS (parent_class)->finalize (obj);
1314 gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1317 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1321 case PROP_CAPTURE_SCREEN:
1322 g_value_set_boolean (value, impl.captureScreen);
1324 case PROP_CAPTURE_SCREEN_CURSOR:
1325 g_value_set_boolean (value, impl.captureScreenCursor);
1327 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1328 g_value_set_boolean (value, impl.captureScreenMouseClicks);
1331 case PROP_DEVICE_INDEX:
1332 g_value_set_int (value, impl.deviceIndex);
1334 case PROP_DEVICE_NAME:
1335 g_value_set_string (value, impl.deviceName);
1338 g_value_set_enum(value, impl.position);
1340 case PROP_ORIENTATION:
1341 g_value_set_enum(value, impl.orientation);
1343 case PROP_DEVICE_TYPE:
1344 g_value_set_enum(value, impl.deviceType);
1347 g_value_set_boolean (value, impl.doStats);
1350 GST_OBJECT_LOCK (object);
1351 g_value_set_int (value, impl.fps);
1352 GST_OBJECT_UNLOCK (object);
1355 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1361 gst_avf_video_src_set_property (GObject * object, guint prop_id,
1362 const GValue * value, GParamSpec * pspec)
1364 GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1368 case PROP_CAPTURE_SCREEN:
1369 impl.captureScreen = g_value_get_boolean (value);
1371 case PROP_CAPTURE_SCREEN_CURSOR:
1372 impl.captureScreenCursor = g_value_get_boolean (value);
1374 case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1375 impl.captureScreenMouseClicks = g_value_get_boolean (value);
1378 case PROP_DEVICE_INDEX:
1379 impl.deviceIndex = g_value_get_int (value);
1382 impl.position = g_value_get_enum(value);
1384 case PROP_ORIENTATION:
1385 impl.orientation = g_value_get_enum(value);
1387 case PROP_DEVICE_TYPE:
1388 impl.deviceType = g_value_get_enum(value);
1391 impl.doStats = g_value_get_boolean (value);
1394 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1399 static GstStateChangeReturn
1400 gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1402 GstStateChangeReturn ret;
1404 ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1410 gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1414 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1420 gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1424 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1430 gst_avf_video_src_start (GstBaseSrc * basesrc)
1434 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1440 gst_avf_video_src_stop (GstBaseSrc * basesrc)
1444 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1450 gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1454 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1460 gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1464 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1470 gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1474 ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1479 static GstFlowReturn
1480 gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1484 ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1491 gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
1495 ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
1501 gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1506 ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
1512 gst_avf_video_src_set_context (GstElement * element, GstContext * context)
1514 [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
1518 gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
1520 NSArray *formats = [device valueForKey:@"formats"];
1521 NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
1522 GstCaps *result_caps, *result_gl_caps;
1524 GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
1526 GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
1529 result_caps = gst_caps_new_empty ();
1530 result_gl_caps = gst_caps_new_empty ();
1532 /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
1533 * available in iOS >= 7.0. We use a dynamic approach with key-value
1534 * coding or performSelector */
1535 for (NSObject *f in [formats reverseObjectEnumerator]) {
1536 /* formatDescription can't be retrieved with valueForKey so use a selector here */
1537 CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
1538 CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
1539 dimensions = get_oriented_dimensions (orientation, dimensions);
1541 for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
1542 int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
1543 gdouble min_fps, max_fps;
1545 [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
1546 gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
1548 [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
1549 gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
1551 for (NSNumber *pixel_format in pixel_formats) {
1552 GstVideoFormat gst_format = get_gst_video_format (pixel_format);
1554 if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
1555 if (min_fps != max_fps)
1556 gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
1558 gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
1561 if (gst_format == gl_format) {
1563 if (min_fps != max_fps) {
1564 gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
1565 dimensions.width, dimensions.height,
1566 min_fps_n, min_fps_d,
1567 max_fps_n, max_fps_d);
1569 gl_caps = GST_AVF_CAPS_NEW (gl_format,
1570 dimensions.width, dimensions.height,
1571 max_fps_n, max_fps_d);
1573 gst_caps_set_features (gl_caps, 0,
1574 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
1576 gst_caps_set_simple (gl_caps,
1577 "texture-target", G_TYPE_STRING,
1579 GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
1581 GST_GL_TEXTURE_TARGET_2D_STR,
1584 gst_caps_append (result_gl_caps, gl_caps);
1590 result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
1592 return result_gl_caps;
1595 static GstVideoFormat
1596 get_gst_video_format (NSNumber *pixel_format)
1598 GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
1600 switch ([pixel_format integerValue]) {
1601 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
1602 gst_format = GST_VIDEO_FORMAT_NV12;
1604 case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
1605 gst_format = GST_VIDEO_FORMAT_UYVY;
1607 case kCVPixelFormatType_32BGRA: /* BGRA */
1608 gst_format = GST_VIDEO_FORMAT_BGRA;
1610 case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
1611 gst_format = GST_VIDEO_FORMAT_YUY2;
1620 static CMVideoDimensions
1621 get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
1623 CMVideoDimensions orientedDimensions;
1624 if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
1625 orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
1626 orientedDimensions.width = dimensions.height;
1627 orientedDimensions.height = dimensions.width;
1629 orientedDimensions = dimensions;
1631 return orientedDimensions;