avfvideosrc: Explicitly request device video permissions for macOS 10.14+
[platform/upstream/gstreamer.git] / sys / applemedia / avfvideosrc.m
index f83c8c0..5a91d1d 100644 (file)
@@ -1,5 +1,6 @@
 /*
  * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
+ * Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
  *
  * This library is free software; you can redistribute it and/or
  * modify it under the terms of the GNU Library General Public
 #endif
 
 #include "avfvideosrc.h"
+#include "glcontexthelper.h"
 
 #import <AVFoundation/AVFoundation.h>
+#if !HAVE_IOS
+#import <AppKit/AppKit.h>
+#endif
 #include <gst/video/video.h>
+#include <gst/gl/gstglcontext.h>
 #include "coremediabuffer.h"
+#include "videotexturecache.h"
 
 #define DEFAULT_DEVICE_INDEX  -1
+#define DEFAULT_POSITION      GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
+#define DEFAULT_ORIENTATION   GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
+#define DEFAULT_DEVICE_TYPE   GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
 #define DEFAULT_DO_STATS      FALSE
 
 #define DEVICE_FPS_N          25
 GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
 #define GST_CAT_DEFAULT gst_avf_video_src_debug
 
+static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
+static CMVideoDimensions
+get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
+
 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
     GST_PAD_SRC,
     GST_PAD_ALWAYS,
-    GST_STATIC_CAPS ("video/x-raw, "
+    GST_STATIC_CAPS (
+#if !HAVE_IOS
+        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
+        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
+            "UYVY") ", "
+        "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
+#else
+        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
+        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
+            "NV12") ", "
+        "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
+#endif
+        "video/x-raw, "
         "format = (string) { NV12, UYVY, YUY2 }, "
         "framerate = " GST_VIDEO_FPS_RANGE ", "
         "width = " GST_VIDEO_SIZE_RANGE ", "
@@ -62,21 +88,92 @@ typedef enum _QueueState {
 #define gst_avf_video_src_parent_class parent_class
 G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
 
+#define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
+static GType
+gst_avf_video_source_position_get_type (void)
+{
+  static GType avf_video_source_position_type = 0;
+
+  if (!avf_video_source_position_type) {
+    static GEnumValue position_types[] = {
+      { GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
+      { GST_AVF_VIDEO_SOURCE_POSITION_BACK,  "Back-facing camera", "back"  },
+      { GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT,  "Default", "default"  },
+      { 0, NULL, NULL },
+    };
+
+    avf_video_source_position_type =
+    g_enum_register_static ("GstAVFVideoSourcePosition",
+                            position_types);
+  }
+
+  return avf_video_source_position_type;
+}
+
+#define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
+static GType
+gst_avf_video_source_orientation_get_type (void)
+{
+  static GType avf_video_source_orientation_type = 0;
+
+  if (!avf_video_source_orientation_type) {
+    static GEnumValue orientation_types[] = {
+      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
+      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
+      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
+      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
+      { GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
+      { 0, NULL, NULL },
+    };
+
+    avf_video_source_orientation_type =
+    g_enum_register_static ("GstAVFVideoSourceOrientation",
+                            orientation_types);
+  }
+
+  return avf_video_source_orientation_type;
+}
+
+#define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
+static GType
+gst_avf_video_source_device_type_get_type (void)
+{
+  static GType avf_video_source_device_type_type = 0;
+
+  if (!avf_video_source_device_type_type) {
+    static GEnumValue device_type_types[] = {
+      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
+      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
+      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
+      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
+      { 0, NULL, NULL },
+    };
+
+    avf_video_source_device_type_type =
+    g_enum_register_static ("GstAVFVideoSourceDeviceType",
+                            device_type_types);
+  }
+
+  return avf_video_source_device_type_type;
+}
+
 @interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
   GstElement *element;
   GstBaseSrc *baseSrc;
   GstPushSrc *pushSrc;
 
   gint deviceIndex;
+  const gchar *deviceName;
+  GstAVFVideoSourcePosition position;
+  GstAVFVideoSourceOrientation orientation;
+  GstAVFVideoSourceDeviceType deviceType;
   BOOL doStats;
-#if !HAVE_IOS
-  CGDirectDisplayID displayId;
-#endif
 
   AVCaptureSession *session;
   AVCaptureInput *input;
   AVCaptureVideoDataOutput *output;
   AVCaptureDevice *device;
+  AVCaptureConnection *connection;
   CMClockRef inputClock;
 
   dispatch_queue_t mainQueue;
@@ -90,8 +187,6 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
   gint width, height;
   GstClockTime latency;
   guint64 offset;
-  GstClockTime startAVFTimestamp;
-  GstClockTime startTimestamp;
 
   GstClockTime lastSampling;
   guint count;
@@ -101,6 +196,8 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
   BOOL captureScreenMouseClicks;
 
   BOOL useVideoMeta;
+  GstGLContextHelper *ctxh;
+  GstVideoTextureCache *textureCache;
 }
 
 - (id)init;
@@ -108,6 +205,10 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
 - (void)finalize;
 
 @property int deviceIndex;
+@property const gchar *deviceName;
+@property GstAVFVideoSourcePosition position;
+@property GstAVFVideoSourceOrientation orientation;
+@property GstAVFVideoSourceDeviceType deviceType;
 @property BOOL doStats;
 @property int fps;
 @property BOOL captureScreen;
@@ -119,7 +220,11 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
 - (BOOL)openDevice;
 - (void)closeDevice;
 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
-- (BOOL)getDeviceCaps:(GstCaps *)result;
+#if !HAVE_IOS
+- (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
+- (float)getScaleFactorFromDeviceIndex;
+#endif
+- (GstCaps *)getDeviceCaps;
 - (BOOL)setDeviceCaps:(GstVideoInfo *)info;
 - (BOOL)getSessionPresetCaps:(GstCaps *)result;
 - (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
@@ -130,8 +235,11 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
 - (BOOL)unlock;
 - (BOOL)unlockStop;
 - (BOOL)query:(GstQuery *)query;
+- (void)setContext:(GstContext *)context;
 - (GstStateChangeReturn)changeState:(GstStateChange)transition;
 - (GstFlowReturn)create:(GstBuffer **)buf;
+- (GstCaps *)fixate:(GstCaps *)caps;
+- (BOOL)decideAllocation:(GstQuery *)query;
 - (void)updateStatistics;
 - (void)captureOutput:(AVCaptureOutput *)captureOutput
 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
@@ -139,10 +247,54 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
 @end
 
+#if HAVE_IOS
+
+static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
+  switch (deviceType) {
+    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
+      return AVCaptureDeviceTypeBuiltInWideAngleCamera;
+    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
+      return AVCaptureDeviceTypeBuiltInTelephotoCamera;
+    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
+      return AVCaptureDeviceTypeBuiltInDuoCamera;
+    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
+      g_assert_not_reached();
+  }
+}
+
+static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
+  switch (position) {
+    case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
+      return AVCaptureDevicePositionFront;
+    case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
+      return AVCaptureDevicePositionBack;
+    case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
+      g_assert_not_reached();
+  }
+
+}
+
+static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
+  switch (orientation) {
+    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
+      return AVCaptureVideoOrientationPortrait;
+    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
+      return AVCaptureVideoOrientationPortraitUpsideDown;
+    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
+      return AVCaptureVideoOrientationLandscapeLeft;
+    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
+      return AVCaptureVideoOrientationLandscapeRight;
+    case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
+      g_assert_not_reached();
+  }
+}
+
+#endif
+
 @implementation GstAVFVideoSrcImpl
 
-@synthesize deviceIndex, doStats, fps, captureScreen,
-            captureScreenCursor, captureScreenMouseClicks;
+@synthesize deviceIndex, deviceName, position, orientation, deviceType, doStats,
+    fps, captureScreen, captureScreenCursor, captureScreenMouseClicks;
 
 - (id)init
 {
@@ -157,14 +309,16 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     pushSrc = src;
 
     deviceIndex = DEFAULT_DEVICE_INDEX;
+    deviceName = NULL;
+    position = DEFAULT_POSITION;
+    orientation = DEFAULT_ORIENTATION;
+    deviceType = DEFAULT_DEVICE_TYPE;
     captureScreen = NO;
     captureScreenCursor = NO;
     captureScreenMouseClicks = NO;
     useVideoMeta = NO;
-#if !HAVE_IOS
-    displayId = kCGDirectMainDisplay;
-#endif
-
+    textureCache = NULL;
+    ctxh = gst_gl_context_helper_new (element);
     mainQueue =
         dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
     workerQueue =
@@ -179,12 +333,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
 - (void)finalize
 {
-  dispatch_release (mainQueue);
   mainQueue = NULL;
-  dispatch_release (workerQueue);
   workerQueue = NULL;
-
-  [super finalize];
 }
 
 - (BOOL)openDeviceInput
@@ -192,14 +342,25 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   NSString *mediaType = AVMediaTypeVideo;
   NSError *err;
 
-  if (deviceIndex == -1) {
-    device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
+  if (deviceIndex == DEFAULT_DEVICE_INDEX) {
+#ifdef HAVE_IOS
+    if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
+      device = [AVCaptureDevice
+                defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
+                mediaType:mediaType
+                position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
+    } else {
+      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
+    }
+#else
+      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
+#endif
     if (device == nil) {
       GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
                           ("No video capture devices found"), (NULL));
       return NO;
     }
-  } else {
+  } else { // deviceIndex takes priority over position and deviceType
     NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
     if (deviceIndex >= [devices count]) {
       GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
@@ -209,9 +370,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     device = [devices objectAtIndex:deviceIndex];
   }
   g_assert (device != nil);
-  [device retain];
 
-  GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
+  deviceName = [[device localizedName] UTF8String];
+  GST_INFO ("Opening '%s'", deviceName);
 
   input = [AVCaptureDeviceInput deviceInputWithDevice:device
                                                 error:&err];
@@ -220,11 +381,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
         ("Failed to open device: %s",
         [[err localizedDescription] UTF8String]),
         (NULL));
-    [device release];
     device = nil;
     return NO;
   }
-  [input retain];
   return YES;
 }
 
@@ -233,8 +392,14 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 #if HAVE_IOS
   return NO;
 #else
+  CGDirectDisplayID displayId;
+
   GST_DEBUG_OBJECT (element, "Opening screen input");
 
+  displayId = [self getDisplayIdFromDeviceIndex];
+  if (displayId == 0)
+    return NO;
+
   AVCaptureScreenInput *screenInput =
       [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
 
@@ -252,7 +417,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   }
   screenInput.capturesMouseClicks = captureScreenMouseClicks;
   input = screenInput;
-  [input retain];
   return YES;
 #endif
 }
@@ -263,6 +427,45 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
   GST_DEBUG_OBJECT (element, "Opening device");
 
+  // Since Mojave, permissions are now supposed to be explicitly granted
+  // before performing anything on a device
+  if (@available(macOS 10.14, *)) {
+    // Check if permission has already been granted (or denied)
+    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
+    switch (authStatus) {
+      case AVAuthorizationStatusDenied:
+        // The user has explicitly denied permission for media capture.
+        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
+          ("Device video access permission has been explicitly denied before"), ("Authorization status: %d", (int)authStatus));
+          return success;
+      case AVAuthorizationStatusRestricted:
+        // The user is not allowed to access media capture devices.
+        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
+          ("Device video access permission cannot be granted by the user"), ("Authorization status: %d", (int)authStatus));
+        return success;
+      case AVAuthorizationStatusAuthorized:
+        // The user has explicitly granted permission for media capture,
+        // or explicit user permission is not necessary for the media type in question.
+        GST_DEBUG_OBJECT (element, "Device video access permission has already been granted");
+        break;
+      case AVAuthorizationStatusNotDetermined:
+        // Explicit user permission is required for media capture,
+        // but the user has not yet granted or denied such permission.
+        dispatch_sync (mainQueue, ^{
+          [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
+            GST_DEBUG_OBJECT (element, "Device video access permission %s", granted ? "granted" : "not granted");
+          }];
+        });
+        // Check if permission has been granted
+        AVAuthorizationStatus videoAuthorizationStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
+        if (videoAuthorizationStatus != AVAuthorizationStatusAuthorized) {
+          GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
+            ("Device video access permission has just been denied"), ("Authorization status: %d", (int)videoAuthorizationStatus));
+          return success;
+        }
+    }
+  }
+
   dispatch_sync (mainQueue, ^{
     BOOL ret;
 
@@ -284,6 +487,14 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     [session addInput:input];
     [session addOutput:output];
 
+    /* retained by session */
+    connection = [[output connections] firstObject];
+#ifdef HAVE_IOS
+    if (orientation != DEFAULT_ORIENTATION)
+      connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
+#endif
+    inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
+
     *successPtr = YES;
   });
 
@@ -299,25 +510,25 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   dispatch_sync (mainQueue, ^{
     g_assert (![session isRunning]);
 
+    connection = nil;
+    inputClock = nil;
+
     [session removeInput:input];
     [session removeOutput:output];
 
-    [session release];
     session = nil;
 
-    [input release];
     input = nil;
 
-    [output release];
     output = nil;
 
     if (!captureScreen) {
-      [device release];
       device = nil;
     }
 
     if (caps)
       gst_caps_unref (caps);
+    caps = NULL;
   });
 }
 
@@ -329,65 +540,71 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
         "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d),             \
         NULL))
 
+#define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
+    (gst_caps_new_simple ("video/x-raw",                              \
+        "width", G_TYPE_INT, w,                                       \
+        "height", G_TYPE_INT, h,                                      \
+        "format", G_TYPE_STRING, gst_video_format_to_string (format), \
+        "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
+        NULL))
+
 - (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
 {
-  GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
-
-  switch ([pixel_format integerValue]) {
-  case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
-    gst_format = GST_VIDEO_FORMAT_NV12;
-    break;
-  case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
-    gst_format = GST_VIDEO_FORMAT_UYVY;
-    break;
-  case kCVPixelFormatType_32BGRA: /* BGRA */
-    gst_format = GST_VIDEO_FORMAT_BGRA;
-    break;
-  case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
-    gst_format = GST_VIDEO_FORMAT_YUY2;
-    break;
-  default:
+  GstVideoFormat gst_format = get_gst_video_format(pixel_format);
+  if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
     GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
         [[pixel_format stringValue] UTF8String]);
-    break;
   }
-
   return gst_format;
 }
 
-- (BOOL)getDeviceCaps:(GstCaps *)result
+#if !HAVE_IOS
+- (CGDirectDisplayID)getDisplayIdFromDeviceIndex
 {
-  NSArray *formats = [device valueForKey:@"formats"];
-  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
+  NSDictionary *description;
+  NSNumber *displayId;
+  NSArray *screens = [NSScreen screens];
+
+  if (deviceIndex == DEFAULT_DEVICE_INDEX)
+    return kCGDirectMainDisplay;
+  if (deviceIndex >= [screens count]) {
+    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
+                        ("Invalid screen capture device index"), (NULL));
+    return 0;
+  }
+  description = [[screens objectAtIndex:deviceIndex] deviceDescription];
+  displayId = [description objectForKey:@"NSScreenNumber"];
+  return [displayId unsignedIntegerValue];
+}
 
-  GST_DEBUG_OBJECT (element, "Getting device caps");
+- (float)getScaleFactorFromDeviceIndex
+{
+  NSArray *screens = [NSScreen screens];
+
+  if (deviceIndex == DEFAULT_DEVICE_INDEX)
+    return [[NSScreen mainScreen] backingScaleFactor];
+  if (deviceIndex >= [screens count]) {
+    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
+                        ("Invalid screen capture device index"), (NULL));
+    return 1.0;
+  }
+  return [[screens objectAtIndex:deviceIndex] backingScaleFactor];
+}
+#endif
 
-  /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
-   * available in iOS >= 7.0. We use a dynamic approach with key-value
-   * coding or performSelector */
-  for (NSObject *f in [formats reverseObjectEnumerator]) {
-    CMFormatDescriptionRef formatDescription;
-    CMVideoDimensions dimensions;
 
-    /* formatDescription can't be retrieved with valueForKey so use a selector here */
-    formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
-    dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
-    for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
-      int fps_n, fps_d;
-      gdouble max_fps;
+- (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
+{
+  return get_oriented_dimensions(orientation, dimensions);
+}
 
-      [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
-      gst_util_double_to_fraction (max_fps, &fps_n, &fps_d);
+- (GstCaps *)getDeviceCaps
+{
+  GST_DEBUG_OBJECT (element, "Getting device caps");
+  GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
+  GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
 
-      for (NSNumber *pixel_format in pixel_formats) {
-        GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
-        if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
-          gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, fps_n, fps_d));
-      }
-    }
-  }
-  GST_LOG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result);
-  return YES;
+  return device_caps;
 }
 
 - (BOOL)setDeviceCaps:(GstVideoInfo *)info
@@ -401,28 +618,35 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
   if ([device lockForConfiguration:NULL] == YES) {
     for (NSObject *f in formats) {
-      CMFormatDescriptionRef formatDescription;
-      CMVideoDimensions dimensions;
-
-      formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
-      dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+      CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
+      CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+      dimensions = [self orientedDimensions:dimensions];
       if (dimensions.width == info->width && dimensions.height == info->height) {
         found_format = TRUE;
         [device setValue:f forKey:@"activeFormat"];
         for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
-          gdouble max_frame_rate;
+          gdouble min_frame_rate, max_frame_rate;
 
+          [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
           [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
-          if (abs (framerate - max_frame_rate) < 0.00001) {
-            NSValue *min_frame_duration, *max_frame_duration;
-
+          if ((framerate >= min_frame_rate - 0.00001) &&
+              (framerate <= max_frame_rate + 0.00001)) {
+            NSValue *frame_duration_value;
             found_framerate = TRUE;
-            min_frame_duration = [rate valueForKey:@"minFrameDuration"];
-            max_frame_duration = [rate valueForKey:@"maxFrameDuration"];
-            [device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
+            if (min_frame_rate == max_frame_rate) {
+              /* on mac we get tight ranges and an exception is raised if the
+               * frame duration doesn't match the one reported in the range to
+               * the last decimal point
+               */
+              frame_duration_value = [rate valueForKey:@"minFrameDuration"];
+            } else {
+              // Invert fps_n and fps_d to get frame duration value and timescale (or numerator and denominator)
+              frame_duration_value = [NSValue valueWithCMTime:CMTimeMake (info->fps_d, info->fps_n)];
+            }
+            [device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
             @try {
               /* Only available on OSX >= 10.8 and iOS >= 7.0 */
-              [device setValue:max_frame_duration forKey:@"activeVideoMaxFrameDuration"];
+              [device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
             } @catch (NSException *exception) {
               if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
                 GST_WARNING ("An unexcepted error occured: %s",
@@ -528,13 +752,14 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
   if (captureScreen) {
 #if !HAVE_IOS
-    CGRect rect = CGDisplayBounds (displayId);
+    CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
+    float scale = [self getScaleFactorFromDeviceIndex];
     for (NSNumber *pixel_format in pixel_formats) {
       GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
       if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
         gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
-            "width", G_TYPE_INT, (int)rect.size.width,
-            "height", G_TYPE_INT, (int)rect.size.height,
+            "width", G_TYPE_INT, (int)(rect.size.width * scale),
+            "height", G_TYPE_INT, (int)(rect.size.height * scale),
             "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
             NULL));
     }
@@ -545,11 +770,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   }
 
   @try {
-
-    [self getDeviceCaps:result];
-
+    result = gst_caps_merge (result, [self getDeviceCaps]);
   } @catch (NSException *exception) {
-
     if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
       GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
       return result;
@@ -578,8 +800,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   dispatch_sync (mainQueue, ^{
     int newformat;
 
-    g_assert (![session isRunning]);
-
     if (captureScreen) {
 #if !HAVE_IOS
       AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
@@ -617,13 +837,13 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
         newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
         break;
       case GST_VIDEO_FORMAT_UYVY:
-         newformat = kCVPixelFormatType_422YpCbCr8;
+        newformat = kCVPixelFormatType_422YpCbCr8;
         break;
       case GST_VIDEO_FORMAT_YUY2:
-         newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
+        newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
         break;
       case GST_VIDEO_FORMAT_BGRA:
-         newformat = kCVPixelFormatType_32BGRA;
+        newformat = kCVPixelFormatType_32BGRA;
         break;
       default:
         *successPtr = NO;
@@ -632,17 +852,19 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
         return;
     }
 
-    GST_DEBUG_OBJECT(element,
-       "Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
-       width, height,
-       GST_FOURCC_ARGS (gst_video_format_to_fourcc (format)));
+    GST_INFO_OBJECT (element,
+        "width: %d height: %d format: %s", width, height,
+        gst_video_format_to_string (format));
 
     output.videoSettings = [NSDictionary
         dictionaryWithObject:[NSNumber numberWithInt:newformat]
         forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
 
-    caps = gst_caps_copy (new_caps);
-    [session startRunning];
+    gst_caps_replace (&caps, new_caps);
+    GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
+
+    if (![session isRunning])
+      [session startRunning];
 
     /* Unlock device configuration only after session is started so the session
      * won't reset the capture formats */
@@ -660,9 +882,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
   offset = 0;
   latency = GST_CLOCK_TIME_NONE;
-  startAVFTimestamp = GST_CLOCK_TIME_NONE;
-  startTimestamp = GST_CLOCK_TIME_NONE;
-  inputClock = nil;
 
   lastSampling = GST_CLOCK_TIME_NONE;
   count = 0;
@@ -676,11 +895,16 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   dispatch_sync (mainQueue, ^{ [session stopRunning]; });
   dispatch_sync (workerQueue, ^{});
 
-  [bufQueueLock release];
   bufQueueLock = nil;
-  [bufQueue release];
   bufQueue = nil;
-  inputClock = nil;
+
+  if (textureCache)
+    gst_video_texture_cache_free (textureCache);
+  textureCache = NULL;
+
+  if (ctxh)
+    gst_gl_context_helper_free (ctxh);
+  ctxh = NULL;
 
   return YES;
 }
@@ -690,7 +914,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   BOOL result = NO;
 
   if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
-    if (device != nil) {
+    if (device != nil && caps != NULL) {
       GstClockTime min_latency, max_latency;
 
       min_latency = max_latency = latency;
@@ -708,14 +932,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   return result;
 }
 
-- (BOOL)decideAllocation:(GstQuery *)query
-{
-  useVideoMeta = gst_query_find_allocation_meta (query,
-      GST_VIDEO_META_API_TYPE, NULL);
-
-  return YES;
-}
-
 - (BOOL)unlock
 {
   [bufQueueLock lock];
@@ -729,7 +945,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 {
   [bufQueueLock lock];
   stopRequest = NO;
-  [bufQueueLock unlock];
+  [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
 
   return YES;
 }
@@ -753,7 +969,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
 
 - (void)captureOutput:(AVCaptureOutput *)captureOutput
 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
-       fromConnection:(AVCaptureConnection *)connection
+       fromConnection:(AVCaptureConnection *)aConnection
 {
   GstClockTime timestamp, duration;
 
@@ -764,14 +980,17 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     return;
   }
 
-  if (inputClock == nil)
-    inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
   [self getSampleBuffer:sampleBuffer timestamp:&timestamp duration:&duration];
 
+  if (timestamp == GST_CLOCK_TIME_NONE) {
+    [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
+    return;
+  }
+
   if ([bufQueue count] == BUFFER_QUEUE_SIZE)
     [bufQueue removeLastObject];
 
-  [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
+  [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
                            @"timestamp": @(timestamp),
                            @"duration": @(duration)}
                  atIndex:0];
@@ -794,7 +1013,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   }
 
   NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
-  sbuf = (CMSampleBufferRef) dic[@"sbuf"];
+  sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
   timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
   duration = (GstClockTime) [dic[@"duration"] longLongValue];
   CFRetain (sbuf);
@@ -823,11 +1042,15 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     }
   }
 
-  *buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
+  *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
+  if (*buf == NULL) {
+    CFRelease (sbuf);
+    return GST_FLOW_ERROR;
+  }
   CFRelease (sbuf);
 
   GST_BUFFER_OFFSET (*buf) = offset++;
-  GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (buf) + 1;
+  GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
   GST_BUFFER_TIMESTAMP (*buf) = timestamp;
   GST_BUFFER_DURATION (*buf) = duration;
 
@@ -837,12 +1060,63 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   return GST_FLOW_OK;
 }
 
+- (GstCaps *)fixate:(GstCaps *)new_caps
+{
+  GstStructure *structure;
+
+  new_caps = gst_caps_make_writable (new_caps);
+  new_caps = gst_caps_truncate (new_caps);
+  structure = gst_caps_get_structure (new_caps, 0);
+  /* crank up to 11. This is what the presets do, but we don't use the presets
+   * in ios >= 7.0 */
+  gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
+  gst_structure_fixate_field_nearest_fraction (structure, "framerate", G_MAXINT, 1);
+
+  return gst_caps_fixate (new_caps);
+}
+
+- (BOOL)decideAllocation:(GstQuery *)query
+{
+  GstCaps *alloc_caps;
+  GstCapsFeatures *features;
+  gboolean ret;
+
+  ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
+  if (!ret)
+    return ret;
+
+  gst_query_parse_allocation (query, &alloc_caps, NULL);
+  features = gst_caps_get_features (alloc_caps, 0);
+  if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
+    gst_gl_context_helper_ensure_context (ctxh);
+    GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
+        ctxh->context, textureCache ? textureCache->ctx : NULL);
+    if (textureCache && textureCache->ctx != ctxh->context) {
+      gst_video_texture_cache_free (textureCache);
+      textureCache = NULL;
+    }
+    textureCache = gst_video_texture_cache_new (ctxh->context);
+    gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
+  }
+
+  return TRUE;
+}
+
+- (void)setContext:(GstContext *)context
+{
+  GST_INFO_OBJECT (element, "setting context %s",
+          gst_context_get_context_type (context));
+  gst_gl_handle_set_context (element, context,
+          &ctxh->display, &ctxh->other_context);
+  GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+}
+
 - (void)getSampleBuffer:(CMSampleBufferRef)sbuf
               timestamp:(GstClockTime *)outTimestamp
                duration:(GstClockTime *)outDuration
 {
   CMSampleTimingInfo time_info;
-  GstClockTime timestamp, duration, inputClockNow, running_time;
+  GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
   CMItemCount num_timings;
   GstClock *clock;
   CMTime now;
@@ -850,19 +1124,42 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   timestamp = GST_CLOCK_TIME_NONE;
   duration = GST_CLOCK_TIME_NONE;
   if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
-    timestamp = gst_util_uint64_scale (GST_SECOND,
+    avf_timestamp = gst_util_uint64_scale (GST_SECOND,
             time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
-    duration = gst_util_uint64_scale (GST_SECOND,
-            time_info.duration.value, time_info.duration.timescale);
+
+    if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
+      duration = gst_util_uint64_scale (GST_SECOND,
+          time_info.duration.value, time_info.duration.timescale);
 
     now = CMClockGetTime(inputClock);
-    inputClockNow = gst_util_uint64_scale (GST_SECOND,
+    input_clock_now = gst_util_uint64_scale (GST_SECOND,
         now.value, now.timescale);
+    input_clock_diff = input_clock_now - avf_timestamp;
 
     GST_OBJECT_LOCK (element);
     clock = GST_ELEMENT_CLOCK (element);
-    running_time = gst_clock_get_time (clock) - element->base_time;
-    timestamp = running_time + (inputClockNow - timestamp);
+    if (clock) {
+      running_time = gst_clock_get_time (clock) - element->base_time;
+      /* We use presentationTimeStamp to determine how much time it took
+       * between capturing and receiving the frame in our delegate
+       * (e.g. how long it spent in AVF queues), then we subtract that time
+       * from our running time to get the actual timestamp.
+       */
+      if (running_time >= input_clock_diff)
+        timestamp = running_time - input_clock_diff;
+      else
+        timestamp = running_time;
+
+      GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
+          ", AVF clock diff: %"GST_TIME_FORMAT
+          ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
+          GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
+          GST_TIME_ARGS (input_clock_diff),
+          GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
+    } else {
+      /* no clock, can't set timestamps */
+      timestamp = GST_CLOCK_TIME_NONE;
+    }
     GST_OBJECT_UNLOCK (element);
   }
 
@@ -913,6 +1210,10 @@ enum
 {
   PROP_0,
   PROP_DEVICE_INDEX,
+  PROP_DEVICE_NAME,
+  PROP_POSITION,
+  PROP_ORIENTATION,
+  PROP_DEVICE_TYPE,
   PROP_DO_STATS,
   PROP_FPS,
 #if !HAVE_IOS
@@ -938,14 +1239,16 @@ static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
 static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
 static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
     GstQuery * query);
-static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc,
-    GstQuery * query);
 static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
 static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
 static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
     GstBuffer ** buf);
-static gboolean gst_avf_video_src_negotiate (GstBaseSrc * basesrc);
-
+static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
+    GstCaps * caps);
+static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
+    GstQuery * query);
+static void gst_avf_video_src_set_context (GstElement * element,
+        GstContext * context);
 
 static void
 gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
@@ -960,6 +1263,7 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
   gobject_class->set_property = gst_avf_video_src_set_property;
 
   gstelement_class->change_state = gst_avf_video_src_change_state;
+  gstelement_class->set_context = gst_avf_video_src_set_context;
 
   gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
   gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
@@ -968,24 +1272,42 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
   gstbasesrc_class->query = gst_avf_video_src_query;
   gstbasesrc_class->unlock = gst_avf_video_src_unlock;
   gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
+  gstbasesrc_class->fixate = gst_avf_video_src_fixate;
   gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
-  gstbasesrc_class->negotiate = gst_avf_video_src_negotiate;
 
   gstpushsrc_class->create = gst_avf_video_src_create;
 
   gst_element_class_set_metadata (gstelement_class,
-      "Video Source (AVFoundation)", "Source/Video",
+      "Video Source (AVFoundation)", "Source/Video/Hardware",
       "Reads frames from an iOS AVFoundation device",
       "Ole André Vadla Ravnås <oleavr@soundrop.com>");
 
-  gst_element_class_add_pad_template (gstelement_class,
-      gst_static_pad_template_get (&src_template));
+  gst_element_class_add_static_pad_template (gstelement_class, &src_template);
 
   g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
       g_param_spec_int ("device-index", "Device Index",
           "The zero-based device index",
           -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+      g_param_spec_string ("device-name", "Device Name",
+          "The name of the currently opened capture device",
+          NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_POSITION,
+                                   g_param_spec_enum ("position", "Position",
+                                                      "The position of the capture device (front or back-facing)",
+                                                      GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
+                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_ORIENTATION,
+                                   g_param_spec_enum ("orientation", "Orientation",
+                                                      "The orientation of the video",
+                                                      GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
+                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
+                                   g_param_spec_enum ("device-type", "Device Type",
+                                                      "The general type of a video capture device",
+                                                      GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
+                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
   g_object_class_install_property (gobject_class, PROP_DO_STATS,
       g_param_spec_boolean ("do-stats", "Enable statistics",
           "Enable logging of statistics", DEFAULT_DO_STATS,
@@ -1013,28 +1335,16 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
       0, "iOS AVFoundation video source");
 }
 
-#define OBJC_CALLOUT_BEGIN() \
-  NSAutoreleasePool *pool; \
-  \
-  pool = [[NSAutoreleasePool alloc] init]
-#define OBJC_CALLOUT_END() \
-  [pool release]
-
-
 static void
 gst_avf_video_src_init (GstAVFVideoSrc * src)
 {
-  OBJC_CALLOUT_BEGIN ();
-  src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
-  OBJC_CALLOUT_END ();
+  src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
 }
 
 static void
 gst_avf_video_src_finalize (GObject * obj)
 {
-  OBJC_CALLOUT_BEGIN ();
-  [GST_AVF_VIDEO_SRC_IMPL (obj) release];
-  OBJC_CALLOUT_END ();
+  CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
 
   G_OBJECT_CLASS (parent_class)->finalize (obj);
 }
@@ -1060,6 +1370,18 @@ gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
     case PROP_DEVICE_INDEX:
       g_value_set_int (value, impl.deviceIndex);
       break;
+    case PROP_DEVICE_NAME:
+      g_value_set_string (value, impl.deviceName);
+      break;
+    case PROP_POSITION:
+      g_value_set_enum(value, impl.position);
+      break;
+    case PROP_ORIENTATION:
+      g_value_set_enum(value, impl.orientation);
+      break;
+    case PROP_DEVICE_TYPE:
+      g_value_set_enum(value, impl.deviceType);
+      break;
     case PROP_DO_STATS:
       g_value_set_boolean (value, impl.doStats);
       break;
@@ -1095,6 +1417,15 @@ gst_avf_video_src_set_property (GObject * object, guint prop_id,
     case PROP_DEVICE_INDEX:
       impl.deviceIndex = g_value_get_int (value);
       break;
+    case PROP_POSITION:
+      impl.position = g_value_get_enum(value);
+      break;
+    case PROP_ORIENTATION:
+      impl.orientation = g_value_get_enum(value);
+      break;
+    case PROP_DEVICE_TYPE:
+      impl.deviceType = g_value_get_enum(value);
+      break;
     case PROP_DO_STATS:
       impl.doStats = g_value_get_boolean (value);
       break;
@@ -1109,9 +1440,7 @@ gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
 {
   GstStateChangeReturn ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1121,9 +1450,7 @@ gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
 {
   GstCaps *ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1133,9 +1460,7 @@ gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1145,9 +1470,7 @@ gst_avf_video_src_start (GstBaseSrc * basesrc)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1157,9 +1480,7 @@ gst_avf_video_src_stop (GstBaseSrc * basesrc)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1169,21 +1490,7 @@ gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
-  OBJC_CALLOUT_END ();
-
-  return ret;
-}
-
-static gboolean
-gst_avf_video_src_decide_allocation (GstBaseSrc * basesrc, GstQuery * query)
-{
-  gboolean ret;
-
-  OBJC_CALLOUT_BEGIN ();
-  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) decideAllocation:query];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1193,9 +1500,7 @@ gst_avf_video_src_unlock (GstBaseSrc * basesrc)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1205,9 +1510,7 @@ gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
 {
   gboolean ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
-  OBJC_CALLOUT_END ();
 
   return ret;
 }
@@ -1217,20 +1520,152 @@ gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
 {
   GstFlowReturn ret;
 
-  OBJC_CALLOUT_BEGIN ();
   ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
-  OBJC_CALLOUT_END ();
+
+  return ret;
+}
+
+
+static GstCaps *
+gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
+{
+  GstCaps *ret;
+
+  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
 
   return ret;
 }
 
 static gboolean
-gst_avf_video_src_negotiate (GstBaseSrc * basesrc)
+gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
+    GstQuery * query)
 {
-  /* FIXME: We don't support reconfiguration yet */
-  if (gst_pad_has_current_caps (GST_BASE_SRC_PAD (basesrc)))
-    return TRUE;
+  gboolean ret;
+
+  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
 
-  return GST_BASE_SRC_CLASS (parent_class)->negotiate (basesrc);
+  return ret;
+}
+
+static void
+gst_avf_video_src_set_context (GstElement * element, GstContext * context)
+{
+  [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
 }
 
+GstCaps*
+gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
+{
+  NSArray *formats = [device valueForKey:@"formats"];
+  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
+  GstCaps *result_caps, *result_gl_caps;
+#if !HAVE_IOS
+  GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
+#else
+  GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
+#endif
+
+  result_caps = gst_caps_new_empty ();
+  result_gl_caps = gst_caps_new_empty ();
+
+  /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
+   * available in iOS >= 7.0. We use a dynamic approach with key-value
+   * coding or performSelector */
+  for (NSObject *f in [formats reverseObjectEnumerator]) {
+    /* formatDescription can't be retrieved with valueForKey so use a selector here */
+    CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
+    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
+    dimensions = get_oriented_dimensions (orientation, dimensions);
+
+    for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
+      int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
+      gdouble min_fps, max_fps;
+
+      [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
+      gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
+
+      [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
+      gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
+
+      for (NSNumber *pixel_format in pixel_formats) {
+        GstVideoFormat gst_format = get_gst_video_format (pixel_format);
+
+        if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
+          if (min_fps != max_fps)
+            gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
+          else
+            gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
+        }
+
+        if (gst_format == gl_format) {
+          GstCaps *gl_caps;
+          if (min_fps != max_fps) {
+            gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
+                                                  dimensions.width, dimensions.height,
+                                                  min_fps_n, min_fps_d,
+                                                  max_fps_n, max_fps_d);
+          } else {
+            gl_caps = GST_AVF_CAPS_NEW (gl_format,
+                                        dimensions.width, dimensions.height,
+                                        max_fps_n, max_fps_d);
+          }
+          gst_caps_set_features (gl_caps, 0,
+                                 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
+                                                        NULL));
+          gst_caps_set_simple (gl_caps,
+                               "texture-target", G_TYPE_STRING,
+#if !HAVE_IOS
+                               GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
+#else
+                               GST_GL_TEXTURE_TARGET_2D_STR,
+#endif
+                               NULL);
+          gst_caps_append (result_gl_caps, gl_caps);
+        }
+      }
+    }
+  }
+
+  result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
+
+  return result_gl_caps;
+}
+
+static GstVideoFormat
+get_gst_video_format (NSNumber *pixel_format)
+{
+  GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
+
+  switch ([pixel_format integerValue]) {
+    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
+      gst_format = GST_VIDEO_FORMAT_NV12;
+      break;
+    case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
+      gst_format = GST_VIDEO_FORMAT_UYVY;
+      break;
+    case kCVPixelFormatType_32BGRA: /* BGRA */
+      gst_format = GST_VIDEO_FORMAT_BGRA;
+      break;
+    case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
+      gst_format = GST_VIDEO_FORMAT_YUY2;
+      break;
+    default:
+      break;
+  }
+
+  return gst_format;
+}
+
+static CMVideoDimensions
+get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
+{
+  CMVideoDimensions orientedDimensions;
+  if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
+      orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
+    orientedDimensions.width = dimensions.height;
+    orientedDimensions.height = dimensions.width;
+  } else {
+    orientedDimensions = dimensions;
+  }
+  return orientedDimensions;
+}