GstBuffer *next_buffer;
GstBuffer *next_sync;
- GstGLCAOpenGLLayer *layer;
+ gpointer layer;
gboolean keep_aspect_ratio;
static void
gst_ca_opengl_layer_sink_bin_init (GstCAOpenGLLayerSinkBin * self)
{
- GstGLCAOpenGLLayer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL);
+ gpointer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL);
g_signal_connect (sink, "notify::layer", G_CALLBACK (_on_notify_layer), self);
g_mutex_clear (&ca_sink->drawing_lock);
+ if (ca_sink->layer) {
+ CFRelease(ca_sink->layer);
+ ca_sink->layer = NULL;
+ }
+
GST_DEBUG ("finalized");
G_OBJECT_CLASS (parent_class)->finalize (object);
}
_create_layer (gpointer data)
{
GstCAOpenGLLayerSink *ca_sink = data;
+ id layer;
if (!ca_sink->layer) {
- ca_sink->layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc]
+ layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc]
initWithGstGLContext:GST_GL_CONTEXT_COCOA (ca_sink->context)];
- [ca_sink->layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
+
+ ca_sink->layer = (__bridge_retained gpointer)layer;
+ [layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
data:ca_sink notify:NULL];
- [ca_sink->layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize
+ [layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize
data:ca_sink notify:NULL];
g_object_notify (G_OBJECT (ca_sink), "layer");
}
break;
}
case GST_STATE_CHANGE_READY_TO_NULL:
+ if (ca_sink->layer) {
+ CFRelease(ca_sink->layer);
+ ca_sink->layer = NULL;
+ }
break;
default:
break;
/* The layer will automatically call the draw callback to draw the new
* content */
[CATransaction begin];
- [ca_sink->layer setNeedsDisplay];
+ [(__bridge GstGLCAOpenGLLayer *)(ca_sink->layer) setNeedsDisplay];
[CATransaction commit];
GST_TRACE ("post redisplay");
libgstgl_cocoa_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
+ -fobjc-arc \
$(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
gst_object_unref (self->draw_context);
GST_TRACE ("dealloc GstGLCAOpenGLLayer %p context %p", self, self->gst_gl_context);
-
- [super dealloc];
}
static void
_context_ready (gpointer data)
{
- GstGLCAOpenGLLayer *ca_layer = data;
+ GstGLCAOpenGLLayer *ca_layer = (__bridge GstGLCAOpenGLLayer *) data;
g_atomic_int_set (&ca_layer->can_draw, 1);
}
- (id)initWithGstGLContext:(GstGLContextCocoa *)parent_gl_context {
- [super init];
+ self = [super init];
_init_debug();
self.needsDisplayOnBoundsChange = YES;
gst_gl_window_send_message_async (GST_GL_CONTEXT (parent_gl_context)->window,
- (GstGLWindowCB) _context_ready, self, NULL);
+ (GstGLWindowCB) _context_ready, (__bridge_retained gpointer)self, (GDestroyNotify)CFRelease);
return self;
}
- (void)setResizeCallback:(GstGLWindowResizeCB)cb data:(gpointer)data
notify:(GDestroyNotify)notify {
if (self->resize_notify)
- self->resize_notify (self->resize_notify);
+ self->resize_notify (self->resize_data);
self->resize_cb = cb;
self->resize_data = data;
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLAPI context_api = GST_GL_API_NONE;
const GLint swapInterval = 1;
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
CGLPixelFormatObj fmt = NULL;
CGLContextObj glContext;
CGLPixelFormatAttribute attribs[] = {
if (window)
gst_object_unref (window);
- [pool release];
return TRUE;
{
if (window)
gst_object_unref (window);
- [pool release];
return FALSE;
}
}
static gboolean
gst_gl_display_cocoa_nsapp_iteration (gpointer data)
{
- NSAutoreleasePool *pool = nil;
NSEvent *event = nil;
if (![NSThread isMainThread]) {
return FALSE;
}
- pool = [[NSAutoreleasePool alloc] init];
while ((event = ([NSApp nextEventMatchingMask:NSAnyEventMask
untilDate:[NSDate dateWithTimeIntervalSinceNow:0.05]
[NSApp sendEvent:event];
}
- [pool release];
-
return TRUE;
}
static void
gst_gl_display_cocoa_open_and_attach_source (gpointer data)
{
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
-
if ([NSThread isMainThread]) {
/* The sharedApplication class method initializes
* the display environment and connects your program
GST_DEBUG ("NSApp iteration loop attached, id %d", nsapp_source_id);
}
-
- [pool release];
}
static gboolean
struct _GstGLWindowCocoaPrivate
{
- GstGLNSWindow *internal_win_id;
- NSView *external_view;
+ gpointer internal_win_id;
+ gpointer external_view;
gboolean visible;
gint preferred_width;
gint preferred_height;
/* atomic set when the internal NSView has been created */
int view_ready;
- dispatch_queue_t gl_queue;
+ gpointer gl_queue;
};
static void
window->priv->preferred_width = 320;
window->priv->preferred_height = 240;
- window->priv->gl_queue =
- dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
+ window->priv->gl_queue = (__bridge_retained gpointer)
+ (dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL));
}
static void
gst_gl_window_cocoa_finalize (GObject * object)
{
GstGLWindowCocoa *window = GST_GL_WINDOW_COCOA (object);
- dispatch_release (window->priv->gl_queue);
+ window->priv->gl_queue = NULL;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
{
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
+ GstGLNSWindow *internal_win_id;
NSRect mainRect = [[NSScreen mainScreen] visibleFrame];
gint h = priv->preferred_height;
gint y = mainRect.size.height > h ? (mainRect.size.height - h) * 0.5 : 0;
gst_object_unref (context);
- priv->internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask:
+ internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask:
(NSTitledWindowMask | NSClosableWindowMask |
NSResizableWindowMask | NSMiniaturizableWindowMask)
backing: NSBackingStoreBuffered defer: NO screen: nil gstWin: window_cocoa];
- GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id);
+ priv->internal_win_id = (__bridge_retained gpointer)internal_win_id;
+
+ GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id);
- [priv->internal_win_id setContentView:glView];
+ [internal_win_id setContentView:glView];
g_atomic_int_set (&window_cocoa->priv->view_ready, 1);
gst_gl_window_cocoa_close (GstGLWindow *window)
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
- [[window_cocoa->priv->internal_win_id contentView] removeFromSuperview];
- [window_cocoa->priv->internal_win_id release];
- window_cocoa->priv->internal_win_id = nil;
+ [[internal_win_id contentView] removeFromSuperview];
+ CFBridgingRelease(window_cocoa->priv->internal_win_id);
+ window_cocoa->priv->internal_win_id = NULL;
}
static guintptr
if (priv->internal_win_id) {
if (handle) {
- priv->external_view = (NSView *) handle;
+ priv->external_view = (gpointer)handle;
priv->visible = TRUE;
} else {
/* bring back our internal window */
dispatch_async (dispatch_get_main_queue (), ^{
- NSView *view = [window_cocoa->priv->internal_win_id contentView];
- [window_cocoa->priv->internal_win_id orderOut:window_cocoa->priv->internal_win_id];
+ GstGLNSWindow *internal_win_id =
+ (__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
+ NSView *external_view =
+ (__bridge NSView *)window_cocoa->priv->external_view;
- [window_cocoa->priv->external_view addSubview: view];
+ NSView *view = [internal_win_id contentView];
+ [internal_win_id orderOut:internal_win_id];
- [view setFrame: [window_cocoa->priv->external_view bounds]];
+ [external_view addSubview: view];
+
+ [view setFrame: [external_view bounds]];
[view setAutoresizingMask: NSViewWidthSizable|NSViewHeightSizable];
});
} else {
/* no internal window yet so delay it to the next drawing */
- priv->external_view = (NSView*) handle;
+ priv->external_view = (gpointer)handle;
priv->visible = FALSE;
}
}
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (data);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG_OBJECT (window_cocoa, "make the window available\n");
- [priv->internal_win_id makeMainWindow];
- [priv->internal_win_id orderFrontRegardless];
- [priv->internal_win_id setViewsNeedDisplay:YES];
+ [internal_win_id makeMainWindow];
+ [internal_win_id orderFrontRegardless];
+ [internal_win_id setViewsNeedDisplay:YES];
priv->visible = TRUE;
}
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view;
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return;
- view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView];
+ view = (GstGLNSView *)[internal_win_id contentView];
[view->layer queueResize];
}
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view;
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
/* As the view is created asynchronously in the main thread we cannot know
* exactly when it will be ready to draw to */
if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return;
- view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView];
+ view = (GstGLNSView *)[internal_win_id contentView];
/* this redraws the GstGLCAOpenGLLayer which calls
* gst_gl_window_cocoa_draw_thread(). Use an explicit CATransaction since we
{
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
- if (window_cocoa->priv->internal_win_id && ![priv->internal_win_id isClosed]) {
+ if (internal_win_id && ![internal_win_id isClosed]) {
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
/* draw opengl scene in the back buffer */
static void
gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height)
{
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
GstGLWindowCocoa *window_cocoa = view->window_cocoa;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLContext *context = gst_gl_window_get_context (window);
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
- if (window_cocoa->priv->internal_win_id && ![window_cocoa->priv->internal_win_id isClosed]) {
+ if (internal_win_id && ![internal_win_id isClosed]) {
const GstGLFuncs *gl;
NSRect bounds = [view bounds];
NSRect visibleRect = [view visibleRect];
}
gst_object_unref (context);
- [pool release];
}
static void
GstGLWindowCocoa *window_cocoa = (GstGLWindowCocoa *) window;
GstGLContext *context = gst_gl_window_get_context (window);
GThread *thread = gst_gl_context_get_thread (context);
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ dispatch_queue_t gl_queue = (__bridge dispatch_queue_t)priv->gl_queue;
if (thread == g_thread_self()) {
/* this case happens for nested calls happening from inside the GCD queue */
destroy (data);
gst_object_unref (context);
} else {
- dispatch_async (window_cocoa->priv->gl_queue, ^{
+ dispatch_async (gl_queue, ^{
gst_gl_context_activate (context, TRUE);
gst_object_unref (context);
callback (data);
m_isClosed = NO;
window_cocoa = cocoa;
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
+ NSView *external_view = (__bridge NSView *)priv->external_view;
self = [super initWithContentRect: contentRect
styleMask: styleMask backing: bufferingType
[self setBackgroundColor:[NSColor blackColor]];
- [self orderOut:window_cocoa->priv->internal_win_id];
+ [self orderOut:internal_win_id];
return self;
}
/* Called in the main thread which is never the gl thread */
- (BOOL) windowShouldClose:(id)sender {
+ GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
+ GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG ("user clicked the close button\n");
- [window_cocoa->priv->internal_win_id setClosed];
+ [internal_win_id setClosed];
gst_gl_window_send_message_async (GST_GL_WINDOW (window_cocoa),
(GstGLWindowCB) close_window_cb, gst_object_ref (window_cocoa),
(GDestroyNotify) gst_object_unref);
[self->layer setDrawCallback:(GstGLWindowCB)gst_gl_cocoa_draw_cb
data:window notify:NULL];
[self->layer setResizeCallback:(GstGLWindowResizeCB)gst_gl_cocoa_resize_cb
- data:self notify:NULL];
+ data:(__bridge_retained gpointer)self notify:(GDestroyNotify)CFRelease];
[self setLayerContentsRedrawPolicy:NSViewLayerContentsRedrawOnSetNeedsDisplay];
}
- (void) dealloc {
- [self->layer release];
-
- [super dealloc];
+ self->layer = nil;
}
- (void)renewGState {
libgstgl_eagl_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
+ -fobj-arc \
$(GL_CFLAGS) \
$(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
#define GST_IS_GL_CONTEXT_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_CONTEXT_EAGL))
#define GST_GL_CONTEXT_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_CONTEXT_EAGL, GstGLContextEaglClass))
+#define GS_GL_CONTEXT_EAGL_CONTEXT(obj) \
+ ((__bridge EAGLContext *)(obj->priv->eagl_context))
+#define GS_GL_CONTEXT_EAGL_LAYER(obj) \
+ ((__bridge CAEAGLLayer *)(obj->priv->eagl_layer))
+
typedef struct _GstGLContextEagl GstGLContextEagl;
typedef struct _GstGLContextEaglPrivate GstGLContextEaglPrivate;
typedef struct _GstGLContextEaglClass GstGLContextEaglClass;
struct _GstGLContextEaglPrivate
{
- EAGLContext *eagl_context;
+ gpointer eagl_context;
/* Used if we render to a window */
- CAEAGLLayer *eagl_layer;
+ gpointer eagl_layer;
GLuint framebuffer;
GLuint color_renderbuffer;
GLuint depth_renderbuffer;
int width, height;
glBindRenderbuffer (GL_RENDERBUFFER, eagl_context->priv->color_renderbuffer);
- [eagl_context->priv->eagl_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:eagl_context->priv->eagl_layer];
+ [GS_GL_CONTEXT_EAGL_CONTEXT(eagl_context) renderbufferStorage:GL_RENDERBUFFER fromDrawable:GS_GL_CONTEXT_EAGL_LAYER(eagl_context)];
glGetRenderbufferParameteriv (GL_RENDERBUFFER,
GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv (GL_RENDERBUFFER,
if (context_eagl->priv->eagl_layer) {
gst_gl_context_eagl_activate (context, TRUE);
- [context_eagl->priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil];
+ [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil];
glDeleteFramebuffers (1, &context_eagl->priv->framebuffer);
context_eagl->priv->framebuffer = 0;
UIView *window_handle = nil;
GstGLWindow *window = gst_gl_context_get_window (context);
if (window)
- window_handle = (UIView *) gst_gl_window_get_window_handle (window);
+ window_handle = (__bridge UIView *)((void *)gst_gl_window_get_window_handle (window));
if (!window_handle) {
GST_INFO_OBJECT (context, "window handle not set yet, not updating layer");
gst_gl_context_eagl_release_layer (context);
eagl_layer = (CAEAGLLayer *)[window_handle layer];
- [EAGLContext setCurrentContext:priv->eagl_context];
+ [EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)];
/* Allocate framebuffer */
glGenFramebuffers (1, &framebuffer);
/* Allocate color render buffer */
glGenRenderbuffers (1, &color_renderbuffer);
glBindRenderbuffer (GL_RENDERBUFFER, color_renderbuffer);
- [priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer];
+ [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer];
glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER, color_renderbuffer);
/* Get renderbuffer width/height */
glBindRenderbuffer (GL_RENDERBUFFER, 0);
glBindFramebuffer (GL_FRAMEBUFFER, 0);
- priv->eagl_layer = eagl_layer;
+ priv->eagl_layer = (__bridge_retained gpointer)eagl_layer;
priv->framebuffer = framebuffer;
priv->color_renderbuffer = color_renderbuffer;
priv->depth_renderbuffer = depth_renderbuffer;
EAGLSharegroup *share_group;
if (other_context) {
- EAGLContext *external_gl_context = (EAGLContext *)
+ EAGLContext *external_gl_context = (__bridge EAGLContext *)(void *)
gst_gl_context_get_gl_context (other_context);
share_group = [external_gl_context sharegroup];
} else {
share_group = nil;
}
- priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group];
+ priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group];
if (!priv->eagl_context) {
- priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group];
+ priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group];
}
if (!priv->eagl_context) {
g_set_error_literal (error, GST_GL_CONTEXT_ERROR,
return FALSE;
}
- if (share_group)
- [share_group release];
-
priv->eagl_layer = NULL;
priv->framebuffer = 0;
priv->color_renderbuffer = 0;
gst_gl_context_eagl_release_layer (context);
- [context_eagl->priv->eagl_context release];
- context_eagl->priv->eagl_context = nil;
+ CFRelease(context_eagl->priv->eagl_context);
+ context_eagl->priv->eagl_context = NULL;
}
static gboolean
return TRUE;
if (window)
- window_handle = (UIView *) gst_gl_window_get_window_handle (window);
+ window_handle = (__bridge UIView *)(void *)gst_gl_window_get_window_handle (window);
if (!window_handle) {
gst_object_unref (window);
if (!context_eagl->priv->eagl_layer)
return;
- [context_eagl->priv->eagl_context presentRenderbuffer:GL_RENDERBUFFER];
+ [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) presentRenderbuffer:GL_RENDERBUFFER];
}
static gboolean
}
GST_DEBUG ("Attaching context to thread %p", g_thread_self ());
- if ([EAGLContext setCurrentContext:context_eagl->priv->eagl_context] == NO) {
+ if ([EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)] == NO) {
GST_ERROR ("Couldn't make context current");
return FALSE;
}
#define GST_IS_GL_WINDOW_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_WINDOW_EAGL))
#define GST_GL_WINDOW_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_WINDOW_EAGL, GstGLWindowEaglClass))
+#define GS_GL_WINDOW_EAGL_VIEW(obj) \
+ ((__bridge UIView *)(obj->priv->view))
+#define GS_GL_WINDOW_EAGL_QUEUE(obj) \
+ ((__bridge dispatch_queue_t)(obj->priv->gl_queue))
+
typedef struct _GstGLWindowEagl GstGLWindowEagl;
typedef struct _GstGLWindowEaglPrivate GstGLWindowEaglPrivate;
typedef struct _GstGLWindowEaglClass GstGLWindowEaglClass;
struct _GstGLWindowEaglPrivate
{
- UIView *view;
+ gpointer view;
gint window_width, window_height;
gint preferred_width, preferred_height;
- dispatch_queue_t gl_queue;
+ gpointer gl_queue;
};
static void
{
window->priv = GST_GL_WINDOW_EAGL_GET_PRIVATE (window);
window->priv->gl_queue =
- dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
+ (__bridge_retained gpointer)dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
}
static void
gst_gl_window_eagl_finalize (GObject * object)
{
GstGLWindowEagl *window = GST_GL_WINDOW_EAGL (object);
- dispatch_release (window->priv->gl_queue);
+ CFRelease(window->priv->gl_queue);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
window_eagl = GST_GL_WINDOW_EAGL (window);
context = gst_gl_window_get_context (window);
- window_eagl->priv->view = (UIView *) handle;
+ window_eagl->priv->view = (gpointer)handle;
GST_INFO_OBJECT (context, "handle set, updating layer");
gst_gl_context_eagl_update_layer (context);
destroy (data);
gst_object_unref (context);
} else {
- dispatch_async (window_eagl->priv->gl_queue, ^{
+ dispatch_async ((__bridge dispatch_queue_t)(window_eagl->priv->gl_queue), ^{
gst_gl_context_activate (context, TRUE);
- gst_object_unref (context);
callback (data);
+ gst_object_unref (context);
if (destroy)
destroy (data);
});
CGSize size;
CAEAGLLayer *eagl_layer;
- eagl_layer = (CAEAGLLayer *)[window_eagl->priv->view layer];
+ eagl_layer = (CAEAGLLayer *)[GS_GL_WINDOW_EAGL_VIEW(window_eagl) layer];
size = eagl_layer.frame.size;
if (window->queue_resize || window_eagl->priv->window_width != size.width ||
libgstapplemedia_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
+ -fobjc-arc \
$(GST_OBJCFLAGS_WITH_VERSION) \
$(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
#define GST_TYPE_AVF_ASSET_SRC \
(gst_avf_asset_src_get_type())
+#define GST_AVF_ASSET_SRC_READER(obj) \
+ ((__bridge GstAVFAssetReader *)(obj->reader))
#define GST_AVF_ASSET_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVF_ASSET_SRC,GstAVFAssetSrc))
#define GST_AVF_ASSET_SRC_CLASS(klass) \
- (void) start : (GError **) error;
- (void) stop;
- (void) seekTo: (guint64) start : (guint64) stop : (GError **) error;
-- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type;
+- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type;
- (GstCaps *) getCaps: (GstAVFAssetReaderMediaType) type;
-- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index;
+- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index;
- (GstBuffer *) nextBuffer: (GstAVFAssetReaderMediaType) type : (GError **) error;
@end
gint selected_video_track;
gint selected_audio_track;
- GstAVFAssetReader *reader;
+ /* NOTE: ARC no longer allows Objective-C pointers in structs. */
+ /* Instead, use gpointer with explicit __bridge_* calls */
+ gpointer reader;
+
GstAVFAssetSrcState state;
GMutex lock;
GstEvent *seek_event;
#define MEDIA_TYPE_TO_STR(x) \
(x == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO ? "audio" : "video")
#define AVF_ASSET_READER_HAS_AUDIO(x) \
- ([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])
+ ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])
#define AVF_ASSET_READER_HAS_VIDEO(x) \
- ([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])
-#define OBJC_CALLOUT_BEGIN() \
- NSAutoreleasePool *pool; \
- \
- pool = [[NSAutoreleasePool alloc] init]
-#define OBJC_CALLOUT_END() \
- [pool release]
+ ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])
enum
{
gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
- OBJC_CALLOUT_BEGIN ();
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY: {
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
gst_avf_asset_src_stop_all (self);
return GST_STATE_CHANGE_FAILURE;
}
- self->reader = [[GstAVFAssetReader alloc] initWithURI:self->uri:&error];
+ self->reader = (__bridge_retained gpointer)([[GstAVFAssetReader alloc] initWithURI:self->uri:&error]);
if (error) {
GST_ELEMENT_ERROR (element, RESOURCE, FAILED, ("AVFAssetReader error"),
("%s", error->message));
gst_avf_asset_src_stop (self);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
- [self->reader release];
+ CFBridgingRelease(self->reader);
break;
default:
break;
}
- OBJC_CALLOUT_END ();
return ret;
}
ret = TRUE;
break;
case GST_QUERY_DURATION:
- gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration);
+ gst_query_set_duration (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE;
break;
case GST_QUERY_POSITION:
- gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position);
+ gst_query_set_position (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).position);
ret = TRUE;
break;
case GST_QUERY_SEEKING: {
GstFormat fmt;
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
if (fmt == GST_FORMAT_TIME) {
- gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, self->reader.duration);
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE;
}
break;
gboolean res = TRUE;
GError *error = NULL;
- OBJC_CALLOUT_BEGIN ();
self = GST_AVF_ASSET_SRC (gst_pad_get_parent_element (pad));
switch (GST_EVENT_TYPE (event)) {
stop = GST_CLOCK_TIME_NONE;
}
gst_avf_asset_src_send_event (self, gst_event_new_flush_start ());
- [self->reader seekTo: start: stop: &error];
+ [GST_AVF_ASSET_SRC_READER(self) seekTo: start: stop: &error];
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.rate = rate;
}
gst_object_unref (self);
- OBJC_CALLOUT_END ();
return res;
}
GstFlowReturn ret, combined_ret;
GError *error;
- OBJC_CALLOUT_BEGIN ();
GST_AVF_ASSET_SRC_LOCK (self);
if (self->state != GST_AVF_ASSET_SRC_STATE_READING) {
GST_AVF_ASSET_SRC_UNLOCK (self);
- goto exit;
+ return;
}
- buf = [self->reader nextBuffer:type:&error];
+ buf = [GST_AVF_ASSET_SRC_READER(self) nextBuffer:type:&error];
GST_AVF_ASSET_SRC_UNLOCK (self);
if (buf == NULL) {
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_ERROR);
gst_pad_pause_task (pad);
- goto exit;
+ return;
}
gst_pad_push_event (pad, gst_event_new_eos ());
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_EOS);
gst_pad_pause_task (pad);
- goto exit;
+ return;
}
ret = gst_pad_push (pad, buf);
gst_pad_pause_task (pad);
}
-exit:
- OBJC_CALLOUT_END ();
}
static void
GError *error = NULL;
gboolean ret = TRUE;
- OBJC_CALLOUT_BEGIN ();
- [self->reader start: &error];
+ [GST_AVF_ASSET_SRC_READER(self) start: &error];
if (error != NULL) {
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("AVFAssetReader could not start reading"), ("%s", error->message));
}
exit:
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret = TRUE;
- OBJC_CALLOUT_BEGIN ();
if (AVF_ASSET_READER_HAS_VIDEO (self)) {
ret |= gst_pad_push_event (self->videopad, gst_event_ref (event));
}
gst_event_unref (event);
- OBJC_CALLOUT_END ();
return ret;
}
{
GstSegment segment;
- OBJC_CALLOUT_BEGIN ();
if (self->state == GST_AVF_ASSET_SRC_STATE_STARTED) {
- goto exit;
+ return;
}
GST_DEBUG_OBJECT (self, "Creating pads and starting reader");
gst_segment_init (&segment, GST_FORMAT_TIME);
- segment.duration = self->reader.duration;
+ segment.duration = GST_AVF_ASSET_SRC_READER(self).duration;
/* We call AVFAssetReader's startReading when the pads are linked
* and no outputs can be added afterwards, so the tracks must be
* selected before adding any of the new pads */
if (AVF_ASSET_READER_HAS_AUDIO (self)) {
- [self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO:
+ [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO:
self->selected_audio_track];
}
if (AVF_ASSET_READER_HAS_VIDEO (self)) {
- [self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO:
+ [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO:
self->selected_video_track];
}
gst_pad_set_active (self->audiopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->audiopad);
gst_pad_set_caps (self->audiopad,
- [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]);
+ [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]);
gst_pad_push_event (self->audiopad, gst_event_new_caps (
- [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]));
+ [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]));
gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->audiopad);
}
gst_pad_set_active (self->videopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->videopad);
gst_pad_set_caps (self->videopad,
- [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]);
+ [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]);
gst_pad_push_event (self->videopad, gst_event_new_caps (
- [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]));
+ [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]));
gst_pad_push_event (self->videopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->videopad);
}
gst_element_no_more_pads (GST_ELEMENT (self));
self->state = GST_AVF_ASSET_SRC_STATE_STARTED;
-
-exit:
- OBJC_CALLOUT_END ();
}
static void
gst_avf_asset_src_stop (GstAVFAssetSrc *self)
{
gboolean has_audio, has_video;
- OBJC_CALLOUT_BEGIN();
if (self->state == GST_AVF_ASSET_SRC_STATE_STOPPED) {
- goto exit;
+ return;
}
GST_DEBUG ("Stopping tasks and removing pads");
has_audio = AVF_ASSET_READER_HAS_AUDIO (self);
has_video = AVF_ASSET_READER_HAS_VIDEO (self);
- [self->reader stop];
+ [GST_AVF_ASSET_SRC_READER(self) stop];
if (has_audio) {
gst_pad_stop_task (self->audiopad);
}
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
-
-exit:
- OBJC_CALLOUT_END ();
}
static gboolean
AVAsset *asset;
gboolean ret = FALSE;
- OBJC_CALLOUT_BEGIN ();
str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str];
asset = [AVAsset assetWithURL: url];
g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
"Invalid URI '%s' for avfassetsrc", uri);
}
- OBJC_CALLOUT_END ();
return ret;
}
- (void) releaseReader
{
- [video_track release];
- [audio_track release];
- [video_tracks release];
- [audio_tracks release];
- [reader release];
+ video_track = nil;
+ audio_track = nil;
+ video_tracks = nil;
+ audio_tracks = nil;
+ reader = nil;
}
- (void) initReader: (GError **) error
[nserror.description UTF8String]);
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_INIT, "%s",
[nserror.description UTF8String]);
- [asset release];
- [reader release];
+
return;
}
- audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain];
- video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain];
+ audio_tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
+ video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
GST_INFO ("Found %lu video tracks and %lu audio tracks",
(unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]);
str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str];
- asset = [[AVAsset assetWithURL: url] retain];
+ asset = [AVAsset assetWithURL: url];
if (!asset.playable) {
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_NOT_PLAYABLE,
"Media is not playable");
- [asset release];
+ asset = nil;
return nil;
}
return self;
}
-- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index
+- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index
{
NSArray *tracks;
AVAssetTrack *track;
- AVAssetReaderOutput **output;
+ AVAssetReaderOutput * __strong *output;
NSDictionary *settings;
NSString *mediaType;
gint *selected_track;
*output = [AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:track
outputSettings:settings];
- [*output retain];
[reader addOutput:*output];
return TRUE;
}
- (void) stop
{
- [self->reader cancelReading];
+ [reader cancelReading];
reading = FALSE;
}
-- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type
+- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type
{
if (type == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO) {
return [audio_tracks count] != 0;
return caps;
}
-- (oneway void) release
+- (void) dealloc
{
- [asset release];
-
+ asset = nil;
[self releaseReader];
if (audio_caps != NULL) {
#define GST_AVF_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVF_VIDEO_SRC, GstAVFVideoSrcClass))
#define GST_AVF_VIDEO_SRC_IMPL(obj) \
- ((GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl)
+ ((__bridge GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl)
#define GST_IS_AVF_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_VIDEO_SRC))
#define GST_IS_AVF_VIDEO_SRC_CLASS(klass) \
{
GstPushSrc push_src;
+ /* NOTE: ARC no longer allows Objective-C pointers in structs. */
+ /* Instead, use gpointer with explicit __bridge_* calls */
gpointer impl;
};
- (void)finalize
{
- dispatch_release (mainQueue);
mainQueue = NULL;
- dispatch_release (workerQueue);
workerQueue = NULL;
-
- [super finalize];
}
- (BOOL)openDeviceInput
device = [devices objectAtIndex:deviceIndex];
}
g_assert (device != nil);
- [device retain];
GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
("Failed to open device: %s",
[[err localizedDescription] UTF8String]),
(NULL));
- [device release];
device = nil;
return NO;
}
- [input retain];
return YES;
}
}
screenInput.capturesMouseClicks = captureScreenMouseClicks;
input = screenInput;
- [input retain];
return YES;
#endif
}
[session removeInput:input];
[session removeOutput:output];
- [session release];
session = nil;
- [input release];
input = nil;
- [output release];
output = nil;
if (!captureScreen) {
- [device release];
device = nil;
}
CMVideoDimensions dimensions;
/* formatDescription can't be retrieved with valueForKey so use a selector here */
- formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
+ formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
CMFormatDescriptionRef formatDescription;
CMVideoDimensions dimensions;
- formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
+ formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (dimensions.width == info->width && dimensions.height == info->height) {
found_format = TRUE;
dispatch_sync (mainQueue, ^{ [session stopRunning]; });
dispatch_sync (workerQueue, ^{});
- [bufQueueLock release];
bufQueueLock = nil;
- [bufQueue release];
bufQueue = nil;
if (textureCache)
if ([bufQueue count] == BUFFER_QUEUE_SIZE)
[bufQueue removeLastObject];
- [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
+ [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
@"timestamp": @(timestamp),
@"duration": @(duration)}
atIndex:0];
}
NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
- sbuf = (CMSampleBufferRef) dic[@"sbuf"];
+ sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
duration = (GstClockTime) [dic[@"duration"] longLongValue];
CFRetain (sbuf);
0, "iOS AVFoundation video source");
}
-#define OBJC_CALLOUT_BEGIN() \
- NSAutoreleasePool *pool; \
- \
- pool = [[NSAutoreleasePool alloc] init]
-#define OBJC_CALLOUT_END() \
- [pool release]
-
-
static void
gst_avf_video_src_init (GstAVFVideoSrc * src)
{
- OBJC_CALLOUT_BEGIN ();
- src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
- OBJC_CALLOUT_END ();
+ src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
}
static void
gst_avf_video_src_finalize (GObject * obj)
{
- OBJC_CALLOUT_BEGIN ();
- [GST_AVF_VIDEO_SRC_IMPL (obj) release];
- OBJC_CALLOUT_END ();
+ CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
{
GstStateChangeReturn ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
- OBJC_CALLOUT_END ();
return ret;
}
{
GstCaps *ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
- OBJC_CALLOUT_END ();
return ret;
}
{
GstFlowReturn ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
- OBJC_CALLOUT_END ();
return ret;
}
{
GstCaps *ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
- OBJC_CALLOUT_END ();
return ret;
}
{
gboolean ret;
- OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
- OBJC_CALLOUT_END ();
return ret;
}
static void
gst_avf_video_src_set_context (GstElement * element, GstContext * context)
{
- OBJC_CALLOUT_BEGIN ();
[GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
- OBJC_CALLOUT_END ();
}
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
#define GST_IS_AV_SAMPLE_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
+#define GST_AV_SAMPLE_VIDEO_SINK_LAYER(obj) \
+ ((__bridge AVSampleBufferDisplayLayer *)(obj->layer))
typedef struct _GstAVSampleVideoSink GstAVSampleVideoSink;
typedef struct _GstAVSampleVideoSinkClass GstAVSampleVideoSinkClass;
{
GstVideoSink video_sink;
- AVSampleBufferDisplayLayer *layer;
+ /* NOTE: ARC no longer allows Objective-C pointers in structs. */
+ /* Instead, use gpointer with explicit __bridge_* calls */
+ gpointer layer;
GstVideoInfo info;
gst_av_sample_video_sink_finalize (GObject * object)
{
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
- __block AVSampleBufferDisplayLayer *layer = av_sink->layer;
+ __block gpointer layer = av_sink->layer;
if (layer) {
dispatch_async (dispatch_get_main_queue (), ^{
- [layer release];
+ CFBridgingRelease(layer);
});
}
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
if ([NSThread isMainThread]) {
- av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
+ AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
+ av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
+ layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
} else {
dispatch_sync (dispatch_get_main_queue (), ^{
- av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
+ AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
+ av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
+ layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
});
}
{
if (av_sink->layer) {
if (av_sink->layer_requesting_data)
- [av_sink->layer stopRequestingMediaData];
+ [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
av_sink->layer_requesting_data = FALSE;
}
}
g_mutex_lock (&av_sink->render_lock);
_stop_requesting_data (av_sink);
g_mutex_unlock (&av_sink->render_lock);
- [av_sink->layer flushAndRemoveImage];
+ [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
}
return TRUE;
kCFBooleanTrue);
}
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
- [av_sink->layer enqueueSampleBuffer:sample_buf];
+ layer.videoGravity = AVLayerVideoGravityResize;
+ [layer enqueueSampleBuffer:sample_buf];
CFRelease (pbuf);
CFRelease (sample_buf);
{
av_sink->layer_requesting_data = TRUE;
- [av_sink->layer requestMediaDataWhenReadyOnQueue:
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
+ [layer requestMediaDataWhenReadyOnQueue:
dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
usingBlock:^{
while (TRUE) {
/* don't needlessly fill up avsamplebufferdisplaylayer's queue.
* This also allows us to skip displaying late frames */
- if (!av_sink->layer.readyForMoreMediaData)
+ if (!layer.readyForMoreMediaData)
break;
g_mutex_lock (&av_sink->render_lock);
MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
- if ([av_sink->layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
+ if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
- [[[av_sink->layer error] description] UTF8String]);
+ [[[layer error] description] UTF8String]);
return GST_FLOW_ERROR;
}
#endif
#define GST_IS_IOS_ASSET_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IOS_ASSET_SRC))
#define GST_IOS_ASSET_SRC_CAST(obj) ((GstIOSAssetSrc*) obj)
+#define GST_IOS_ASSET_SRC_ASSET(obj) \
+ (__bridge ALAssetRepresentation *)(obj->asset)
+#define GST_IOS_ASSET_SRC_LIBRARY(obj) \
+ (__bridge GstAssetsLibrary *)(obj->library)
+#define GST_IOS_ASSET_SRC_URL(obj) \
+ (__bridge NSURL *)(obj->url)
typedef struct _GstIOSAssetSrc GstIOSAssetSrc;
typedef struct _GstIOSAssetSrcClass GstIOSAssetSrcClass;
/*< private >*/
gchar * uri; /* asset uri */
- NSURL * url; /* asset url */
- ALAssetRepresentation * asset; /* asset representation */
- GstAssetsLibrary * library; /* assets library */
+
+ /* NOTE: ARC no longer allows Objective-C pointers in structs. */
+ /* Instead, use gpointer with explicit __bridge_* calls */
+ gpointer url; /* asset url */
+ gpointer asset; /* asset representation */
+ gpointer library; /* assets library */
};
struct _GstIOSAssetSrcClass {
#define DEFAULT_BLOCKSIZE 4*1024
-#define OBJC_CALLOUT_BEGIN() \
- NSAutoreleasePool *pool; \
- \
- pool = [[NSAutoreleasePool alloc] init]
-#define OBJC_CALLOUT_END() \
- [pool release]
enum
{
static void
gst_ios_asset_src_init (GstIOSAssetSrc * src)
{
- OBJC_CALLOUT_BEGIN ();
src->uri = NULL;
src->asset = NULL;
- src->library = [[[GstAssetsLibrary alloc] init] retain];
+ src->library = (__bridge_retained gpointer)[[GstAssetsLibrary alloc] init];
gst_base_src_set_blocksize (GST_BASE_SRC (src), DEFAULT_BLOCKSIZE);
- OBJC_CALLOUT_END ();
}
static void
gst_ios_asset_src_free_resources (GstIOSAssetSrc *src)
{
- OBJC_CALLOUT_BEGIN ();
if (src->asset != NULL) {
- [src->asset release];
+ CFBridgingRelease(src->asset);
src->asset = NULL;
}
if (src->url != NULL) {
- [src->url release];
+ CFBridgingRelease(src->url);
src->url = NULL;
}
g_free (src->uri);
src->uri = NULL;
}
- OBJC_CALLOUT_END ();
}
static void
{
GstIOSAssetSrc *src;
- OBJC_CALLOUT_BEGIN ();
src = GST_IOS_ASSET_SRC (object);
gst_ios_asset_src_free_resources (src);
- [src->library release];
+ CFBridgingRelease(src->library);
- OBJC_CALLOUT_END ();
G_OBJECT_CLASS (gst_ios_asset_src_parent_class)->finalize (object);
}
NSString *nsuristr;
NSURL *url;
- OBJC_CALLOUT_BEGIN ();
/* the element must be stopped in order to do this */
GST_OBJECT_LOCK (src);
state = GST_STATE (src);
}
GST_INFO_OBJECT (src, "URI : %s", src->uri);
- src->url = url;
+ src->url = (__bridge_retained gpointer)url;
src->uri = g_strdup (uri);
g_object_notify (G_OBJECT (src), "uri");
- OBJC_CALLOUT_END ();
return TRUE;
/* ERROR */
"Changing the 'uri' property on iosassetsrc when an asset is "
"open is not supported.");
GST_OBJECT_UNLOCK (src);
- OBJC_CALLOUT_END ();
return FALSE;
}
}
GstFlowReturn ret;
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
- OBJC_CALLOUT_BEGIN ();
buf = gst_buffer_new_and_alloc (length);
if (G_UNLIKELY (buf == NULL && length > 0)) {
GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length);
gst_buffer_map (buf, &info, GST_MAP_READWRITE);
/* No need to read anything if length is 0 */
- bytes_read = [src->asset getBytes: info.data
- fromOffset:offset
- length:length
- error:&err];
+ bytes_read = [GST_IOS_ASSET_SRC_ASSET(src) getBytes: info.data
+ fromOffset:offset
+ length:length
+ error:&err];
if (G_UNLIKELY (err != NULL)) {
goto could_not_read;
}
}
exit:
{
- OBJC_CALLOUT_END ();
return ret;
}
src = GST_IOS_ASSET_SRC (basesrc);
- OBJC_CALLOUT_BEGIN ();
- *size = (guint64) [src->asset size];
- OBJC_CALLOUT_END ();
+ *size = (guint64) [GST_IOS_ASSET_SRC_ASSET(src) size];
return TRUE;
}
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
gboolean ret = TRUE;
- OBJC_CALLOUT_BEGIN ();
- src->asset = [[src->library assetForURLSync: src->url] retain];
+ src->asset = (__bridge_retained gpointer)[GST_IOS_ASSET_SRC_LIBRARY(src) assetForURLSync: GST_IOS_ASSET_SRC_URL(src)];
if (src->asset == NULL) {
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ,
ret = FALSE;
};
- OBJC_CALLOUT_END ();
return ret;
}
{
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
- OBJC_CALLOUT_BEGIN ();
- [src->asset release];
- OBJC_CALLOUT_END ();
+ CFBridgingRelease(src->asset);
return TRUE;
}
dispatch_async(queue, ^{
[self assetForURL:uri resultBlock:
- ^(ALAsset *myasset)
- {
- self.asset = myasset;
- self.result = [myasset defaultRepresentation];
-
- dispatch_semaphore_signal(sema);
- }
- failureBlock:
- ^(NSError *myerror)
- {
- self.result = nil;
- dispatch_semaphore_signal(sema);
- }
+ ^(ALAsset *myasset)
+ {
+ self.asset = myasset;
+ self.result = [myasset defaultRepresentation];
+
+ dispatch_semaphore_signal(sema);
+ }
+ failureBlock:
+ ^(NSError *myerror)
+ {
+ self.result = nil;
+ dispatch_semaphore_signal(sema);
+ }
];
});
dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
- dispatch_release(sema);
return self.result;
}
{
NSThread * th = [[NSThread alloc] init];
[th start];
- [th release];
g_assert ([NSThread isMultiThreaded]);
}
#endif
typedef struct _TextureWrapper
{
#if HAVE_IOS
- CVOpenGLESTextureCacheRef *cache;
+ CVOpenGLESTextureCacheRef cache;
CVOpenGLESTextureRef texture;
#else
- CVOpenGLTextureCacheRef *cache;
+ CVOpenGLTextureCacheRef cache;
CVOpenGLTextureRef texture;
#endif
CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CVOpenGLESTextureCacheCreate (kCFAllocatorDefault, (CFDictionaryRef) cache_attrs,
- (CVEAGLContext) gst_gl_context_get_gl_context (ctx), NULL, &cache->cache);
+ (__bridge CVEAGLContext) (gpointer)gst_gl_context_get_gl_context (ctx), NULL, &cache->cache);
#else
gst_ios_surface_memory_init ();
#if 0
success: {
TextureWrapper *texture_data = g_new(TextureWrapper, 1);
- texture_data->cache = CFRetain(cache->cache);
+ CFRetain(cache->cache);
+ texture_data->cache = cache->cache;
texture_data->texture = texture;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size);
cocoa_videooverlay_SOURCES = cocoa-videooverlay.m
cocoa_videooverlay_OBJCFLAGS=-Wno-error=unused-command-line-argument \
+ -fobjc-arc \
-I$(top_srcdir)/gst-libs -I$(top_builddir)/gst-libs \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
$(GL_CFLAGS) $(GL_OBJCFLAGS)
static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window)
{
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
-
g_print ("end of stream\n");
gst_element_set_state ([window pipeline], GST_STATE_NULL);
g_main_loop_quit ([window loop]);
[window performSelectorOnMainThread:@selector(customClose) withObject:nil waitUntilDone:YES];
-
- [pool release];
}
static gpointer thread_func (MainWindow* window)
gboolean ok=FALSE;
GstBus *bus=NULL;
GThread *loop_thread=NULL;
- NSAutoreleasePool *pool=nil;
NSRect rect;
MainWindow *window=nil;
- pool = [[NSAutoreleasePool alloc] init];
[NSApplication sharedApplication];
g_print("app created\n");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
- g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), window);
- g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), window);
- g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), window);
- gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, window, NULL);
+ /* NOTE: window is not bridge_retained because its lifetime is just this function */
+ g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
+ g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
+ g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
+ gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, (__bridge gpointer)window, NULL);
gst_object_unref (bus);
loop_thread = g_thread_new (NULL,
- (GThreadFunc) thread_func, window);
+ (GThreadFunc) thread_func, (__bridge gpointer)window);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_thread_join (loop_thread);
- [window release];
-
- [pool release];
-
return 0;
}