2 * Initially based on gst-omx/omx/gstomxvideodec.c
4 * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
5 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
7 * Copyright (C) 2012, Collabora Ltd.
8 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
10 * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
12 * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
14 * Copyright (C) 2014-2015, Collabora Ltd.
15 * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
17 * Copyright (C) 2015, Edward Hervey
18 * Author: Edward Hervey <bilboed@gmail.com>
20 * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
22 * This library is free software; you can redistribute it and/or
23 * modify it under the terms of the GNU Lesser General Public
24 * License as published by the Free Software Foundation
25 * version 2.1 of the License.
27 * This library is distributed in the hope that it will be useful,
28 * but WITHOUT ANY WARRANTY; without even the implied warranty of
29 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
30 * Lesser General Public License for more details.
32 * You should have received a copy of the GNU Lesser General Public
33 * License along with this library; if not, write to the Free Software
34 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
43 #include <gst/gl/gl.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideoaffinetransformationmeta.h>
46 #include <gst/video/gstvideopool.h>
52 #define orc_memcpy memcpy
55 #include "gstamcvideodec.h"
56 #include "gstamc-constants.h"
58 GST_DEBUG_CATEGORY_STATIC (gst_amc_video_dec_debug_category);
59 #define GST_CAT_DEFAULT gst_amc_video_dec_debug_category
61 #define GST_VIDEO_DECODER_ERROR_FROM_ERROR(el, err) G_STMT_START { \
62 gchar *__dbg = g_strdup (err->message); \
63 GstVideoDecoder *__dec = GST_VIDEO_DECODER (el); \
64 GST_WARNING_OBJECT (el, "error: %s", __dbg); \
65 _gst_video_decoder_error (__dec, 1, \
66 err->domain, err->code, \
67 NULL, __dbg, __FILE__, GST_FUNCTION, __LINE__); \
68 g_clear_error (&err); \
71 #if GLIB_SIZEOF_VOID_P == 8
72 #define JLONG_TO_GST_AMC_VIDEO_DEC(value) (GstAmcVideoDec *)(value)
73 #define GST_AMC_VIDEO_DEC_TO_JLONG(value) (jlong)(value)
75 #define JLONG_TO_GST_AMC_VIDEO_DEC(value) (GstAmcVideoDec *)(jint)(value)
76 #define GST_AMC_VIDEO_DEC_TO_JLONG(value) (jlong)(jint)(value)
79 typedef struct _BufferIdentification BufferIdentification;
80 struct _BufferIdentification
88 gint64 frame_available_ts;
89 gboolean updated; /* only every call update_tex_image once */
90 gboolean released; /* only every call release_output_buffer once */
91 gboolean rendered; /* whether the release resulted in a render */
94 static struct gl_sync_result *
95 _gl_sync_result_ref (struct gl_sync_result *result)
97 g_assert (result != NULL);
99 g_atomic_int_inc (&result->refcount);
101 GST_TRACE ("gl_sync result %p ref", result);
107 _gl_sync_result_unref (struct gl_sync_result *result)
109 g_assert (result != NULL);
111 GST_TRACE ("gl_sync result %p unref", result);
113 if (g_atomic_int_dec_and_test (&result->refcount)) {
114 GST_TRACE ("freeing gl_sync result %p", result);
122 GstAmcVideoDec *sink; /* back reference for statistics, lock, cond, etc */
123 gint buffer_idx; /* idx of the AMC buffer we should render */
124 GstBuffer *buffer; /* back reference to the buffer */
125 GstGLMemory *oes_mem; /* where amc is rendering into. The same for every gl_sync */
126 GstAmcSurface *surface; /* java wrapper for where amc is rendering into */
127 guint gl_frame_no; /* effectively the frame id */
128 gint64 released_ts; /* microseconds from g_get_monotonic_time() */
129 struct gl_sync_result *result;
132 static struct gl_sync *
133 _gl_sync_ref (struct gl_sync *sync)
135 g_assert (sync != NULL);
137 g_atomic_int_inc (&sync->refcount);
139 GST_TRACE ("gl_sync %p ref", sync);
145 _gl_sync_unref (struct gl_sync *sync)
147 g_assert (sync != NULL);
149 GST_TRACE ("gl_sync %p unref", sync);
151 if (g_atomic_int_dec_and_test (&sync->refcount)) {
152 GST_TRACE ("freeing gl_sync %p", sync);
154 _gl_sync_result_unref (sync->result);
156 g_object_unref (sync->sink);
157 g_object_unref (sync->surface);
158 gst_memory_unref ((GstMemory *) sync->oes_mem);
165 _queue_compare_gl_sync (gconstpointer a, gconstpointer b)
167 const struct gl_sync *sync = a;
168 guint frame = GPOINTER_TO_INT (b);
170 return sync->gl_frame_no - frame;
174 _find_gl_sync_for_frame (GstAmcVideoDec * dec, guint frame)
176 return g_queue_find_custom (dec->gl_queue, GINT_TO_POINTER (frame),
177 (GCompareFunc) _queue_compare_gl_sync);
181 _attach_mem_to_context (GstGLContext * context, GstAmcVideoDec * self)
183 GST_TRACE_OBJECT (self, "attaching texture %p id %u to current context",
184 self->surface->texture, self->oes_mem->tex_id);
185 if (!gst_amc_surface_texture_attach_to_gl_context (self->surface->texture,
186 self->oes_mem->tex_id, &self->gl_error)) {
187 GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
188 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
190 self->gl_mem_attached = TRUE;
195 _dettach_mem_from_context (GstGLContext * context, GstAmcVideoDec * self)
198 guint tex_id = self->oes_mem ? self->oes_mem->tex_id : 0;
200 GST_TRACE_OBJECT (self, "detaching texture %p id %u from current context",
201 self->surface->texture, tex_id);
203 if (!gst_amc_surface_texture_detach_from_gl_context (self->surface->texture,
205 GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
206 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
209 self->gl_mem_attached = FALSE;
212 static BufferIdentification *
213 buffer_identification_new (GstClockTime timestamp)
215 BufferIdentification *id = g_slice_new (BufferIdentification);
217 id->timestamp = timestamp;
223 buffer_identification_free (BufferIdentification * id)
225 g_slice_free (BufferIdentification, id);
229 static void gst_amc_video_dec_finalize (GObject * object);
231 static GstStateChangeReturn
232 gst_amc_video_dec_change_state (GstElement * element,
233 GstStateChange transition);
234 static void gst_amc_video_dec_set_context (GstElement * element,
235 GstContext * context);
237 static gboolean gst_amc_video_dec_open (GstVideoDecoder * decoder);
238 static gboolean gst_amc_video_dec_close (GstVideoDecoder * decoder);
239 static gboolean gst_amc_video_dec_start (GstVideoDecoder * decoder);
240 static gboolean gst_amc_video_dec_stop (GstVideoDecoder * decoder);
241 static gboolean gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
242 GstVideoCodecState * state);
243 static gboolean gst_amc_video_dec_flush (GstVideoDecoder * decoder);
244 static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
245 GstVideoCodecFrame * frame);
246 static GstFlowReturn gst_amc_video_dec_finish (GstVideoDecoder * decoder);
247 static gboolean gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec,
249 static gboolean gst_amc_video_dec_src_query (GstVideoDecoder * bdec,
252 static GstFlowReturn gst_amc_video_dec_drain (GstAmcVideoDec * self);
253 static gboolean gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self);
255 gst_amc_video_dec_on_frame_available (JNIEnv * env, jobject thiz,
256 long long context, jobject surfaceTexture);
263 /* class initialization */
265 static void gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass);
266 static void gst_amc_video_dec_init (GstAmcVideoDec * self);
267 static void gst_amc_video_dec_base_init (gpointer g_class);
269 static GstVideoDecoderClass *parent_class = NULL;
272 gst_amc_video_dec_get_type (void)
274 static volatile gsize type = 0;
276 if (g_once_init_enter (&type)) {
278 static const GTypeInfo info = {
279 sizeof (GstAmcVideoDecClass),
280 gst_amc_video_dec_base_init,
282 (GClassInitFunc) gst_amc_video_dec_class_init,
285 sizeof (GstAmcVideoDec),
287 (GInstanceInitFunc) gst_amc_video_dec_init,
291 _type = g_type_register_static (GST_TYPE_VIDEO_DECODER, "GstAmcVideoDec",
294 GST_DEBUG_CATEGORY_INIT (gst_amc_video_dec_debug_category, "amcvideodec", 0,
295 "Android MediaCodec video decoder");
297 g_once_init_leave (&type, _type);
303 caps_to_mime (GstCaps * caps)
308 s = gst_caps_get_structure (caps, 0);
312 name = gst_structure_get_name (s);
314 if (strcmp (name, "video/mpeg") == 0) {
317 if (!gst_structure_get_int (s, "mpegversion", &mpegversion))
320 if (mpegversion == 4)
321 return "video/mp4v-es";
322 else if (mpegversion == 1 || mpegversion == 2)
323 return "video/mpeg2";
324 } else if (strcmp (name, "video/x-h263") == 0) {
326 } else if (strcmp (name, "video/x-h264") == 0) {
328 } else if (strcmp (name, "video/x-h265") == 0) {
330 } else if (strcmp (name, "video/x-vp8") == 0) {
331 return "video/x-vnd.on2.vp8";
332 } else if (strcmp (name, "video/x-vp9") == 0) {
333 return "video/x-vnd.on2.vp9";
334 } else if (strcmp (name, "video/x-divx") == 0) {
335 return "video/mp4v-es";
342 gst_amc_video_dec_base_init (gpointer g_class)
344 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
345 GstAmcVideoDecClass *amcvideodec_class = GST_AMC_VIDEO_DEC_CLASS (g_class);
346 const GstAmcCodecInfo *codec_info;
347 GstPadTemplate *templ;
348 GstCaps *sink_caps, *src_caps, *all_src_caps;
352 g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), gst_amc_codec_info_quark);
353 /* This happens for the base class and abstract subclasses */
357 amcvideodec_class->codec_info = codec_info;
359 gst_amc_codec_info_to_caps (codec_info, &sink_caps, &src_caps);
362 gst_caps_from_string ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY
363 "), format = (string) RGBA, texture-target = (string) external-oes");
365 if (codec_info->gl_output_only) {
366 gst_caps_unref (src_caps);
368 gst_caps_append (all_src_caps, src_caps);
371 /* Add pad templates */
373 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sink_caps);
374 gst_element_class_add_pad_template (element_class, templ);
375 gst_caps_unref (sink_caps);
378 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, all_src_caps);
379 gst_element_class_add_pad_template (element_class, templ);
380 gst_caps_unref (all_src_caps);
382 longname = g_strdup_printf ("Android MediaCodec %s", codec_info->name);
383 gst_element_class_set_metadata (element_class,
385 "Codec/Decoder/Video/Hardware",
386 longname, "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
391 gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass)
393 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
394 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
395 GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
397 parent_class = g_type_class_peek_parent (klass);
399 gobject_class->finalize = gst_amc_video_dec_finalize;
401 element_class->change_state =
402 GST_DEBUG_FUNCPTR (gst_amc_video_dec_change_state);
403 element_class->set_context =
404 GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_context);
406 videodec_class->start = GST_DEBUG_FUNCPTR (gst_amc_video_dec_start);
407 videodec_class->stop = GST_DEBUG_FUNCPTR (gst_amc_video_dec_stop);
408 videodec_class->open = GST_DEBUG_FUNCPTR (gst_amc_video_dec_open);
409 videodec_class->close = GST_DEBUG_FUNCPTR (gst_amc_video_dec_close);
410 videodec_class->flush = GST_DEBUG_FUNCPTR (gst_amc_video_dec_flush);
411 videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_format);
412 videodec_class->handle_frame =
413 GST_DEBUG_FUNCPTR (gst_amc_video_dec_handle_frame);
414 videodec_class->finish = GST_DEBUG_FUNCPTR (gst_amc_video_dec_finish);
415 videodec_class->decide_allocation =
416 GST_DEBUG_FUNCPTR (gst_amc_video_dec_decide_allocation);
417 videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_amc_video_dec_src_query);
421 gst_amc_video_dec_init (GstAmcVideoDec * self)
423 gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
424 gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (self), TRUE);
426 g_mutex_init (&self->drain_lock);
427 g_cond_init (&self->drain_cond);
429 g_mutex_init (&self->gl_lock);
430 g_cond_init (&self->gl_cond);
432 self->gl_queue = g_queue_new ();
436 gst_amc_video_dec_open (GstVideoDecoder * decoder)
438 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
439 GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
442 GST_DEBUG_OBJECT (self, "Opening decoder");
444 self->codec = gst_amc_codec_new (klass->codec_info->name, &err);
446 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
449 self->codec_config = AMC_CODEC_CONFIG_NONE;
451 self->started = FALSE;
452 self->flushing = TRUE;
454 GST_DEBUG_OBJECT (self, "Opened decoder");
460 gst_amc_video_dec_close (GstVideoDecoder * decoder)
462 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
464 GST_DEBUG_OBJECT (self, "Closing decoder");
466 if (self->downstream_supports_gl
467 && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
468 g_mutex_lock (&self->gl_lock);
469 GST_INFO_OBJECT (self, "shutting down gl queue pushed %u ready %u "
470 "released %u", self->gl_pushed_frame_count, self->gl_ready_frame_count,
471 self->gl_released_frame_count);
473 g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
474 self->gl_queue = g_queue_new ();
475 g_mutex_unlock (&self->gl_lock);
477 if (self->gl_mem_attached)
478 gst_gl_context_thread_add (self->gl_context,
479 (GstGLContextThreadFunc) _dettach_mem_from_context, self);
481 self->gl_pushed_frame_count = 0;
482 self->gl_ready_frame_count = 0;
483 self->gl_released_frame_count = 0;
484 self->gl_last_rendered_frame = 0;
487 gst_object_unref (self->surface);
488 self->surface = NULL;
491 if (self->listener) {
492 JNIEnv *env = gst_amc_jni_get_env ();
495 if (!gst_amc_jni_call_void_method (env, &err, self->listener,
496 self->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (NULL))) {
497 GST_ERROR_OBJECT (self, "Failed to unset back pointer on the listener. "
498 "crashes/hangs may ensue: %s", err ? err->message : "Unknown");
499 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
502 gst_amc_jni_object_unref (env, self->listener);
504 self->listener = NULL;
509 gst_amc_codec_release (self->codec, &err);
511 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
513 gst_amc_codec_free (self->codec);
516 self->started = FALSE;
517 self->flushing = TRUE;
518 self->downstream_supports_gl = FALSE;
521 self->codec_config = AMC_CODEC_CONFIG_NONE;
523 GST_DEBUG_OBJECT (self, "Freeing GL context: %" GST_PTR_FORMAT,
525 if (self->gl_context) {
526 gst_object_unref (self->gl_context);
527 self->gl_context = NULL;
531 gst_memory_unref ((GstMemory *) self->oes_mem);
532 self->oes_mem = NULL;
535 if (self->gl_display) {
536 gst_object_unref (self->gl_display);
537 self->gl_display = NULL;
540 if (self->other_gl_context) {
541 gst_object_unref (self->other_gl_context);
542 self->other_gl_context = NULL;
545 GST_DEBUG_OBJECT (self, "Closed decoder");
551 gst_amc_video_dec_finalize (GObject * object)
553 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (object);
555 g_mutex_clear (&self->drain_lock);
556 g_cond_clear (&self->drain_cond);
558 g_mutex_clear (&self->gl_lock);
559 g_cond_clear (&self->gl_cond);
561 if (self->gl_queue) {
562 g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
563 self->gl_queue = NULL;
566 G_OBJECT_CLASS (parent_class)->finalize (object);
570 gst_amc_video_dec_set_context (GstElement * element, GstContext * context)
572 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (element);
574 gst_gl_handle_set_context (element, context, &self->gl_display,
575 &self->other_gl_context);
577 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
580 static GstStateChangeReturn
581 gst_amc_video_dec_change_state (GstElement * element, GstStateChange transition)
583 GstAmcVideoDec *self;
584 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
587 g_return_val_if_fail (GST_IS_AMC_VIDEO_DEC (element),
588 GST_STATE_CHANGE_FAILURE);
589 self = GST_AMC_VIDEO_DEC (element);
591 GST_DEBUG_OBJECT (element, "changing state: %s => %s",
592 gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
593 gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
595 switch (transition) {
596 case GST_STATE_CHANGE_NULL_TO_READY:
598 case GST_STATE_CHANGE_READY_TO_PAUSED:
599 self->downstream_flow_ret = GST_FLOW_OK;
600 self->draining = FALSE;
601 self->started = FALSE;
603 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
605 case GST_STATE_CHANGE_PAUSED_TO_READY:
606 self->flushing = TRUE;
608 gst_amc_codec_flush (self->codec, &err);
610 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
612 g_mutex_lock (&self->drain_lock);
613 self->draining = FALSE;
614 g_cond_broadcast (&self->drain_cond);
615 g_mutex_unlock (&self->drain_lock);
621 if (ret == GST_STATE_CHANGE_FAILURE)
624 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
626 if (ret == GST_STATE_CHANGE_FAILURE)
629 switch (transition) {
630 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
632 case GST_STATE_CHANGE_PAUSED_TO_READY:
633 self->downstream_flow_ret = GST_FLOW_FLUSHING;
634 self->started = FALSE;
643 #define MAX_FRAME_DIST_TIME (5 * GST_SECOND)
644 #define MAX_FRAME_DIST_FRAMES (100)
646 static GstVideoCodecFrame *
647 _find_nearest_frame (GstAmcVideoDec * self, GstClockTime reference_timestamp)
649 GList *l, *best_l = NULL;
650 GList *finish_frames = NULL;
651 GstVideoCodecFrame *best = NULL;
652 guint64 best_timestamp = 0;
653 guint64 best_diff = G_MAXUINT64;
654 BufferIdentification *best_id = NULL;
657 frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
659 for (l = frames; l; l = l->next) {
660 GstVideoCodecFrame *tmp = l->data;
661 BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
662 guint64 timestamp, diff;
664 /* This happens for frames that were just added but
665 * which were not passed to the component yet. Ignore
671 timestamp = id->timestamp;
673 if (timestamp > reference_timestamp)
674 diff = timestamp - reference_timestamp;
676 diff = reference_timestamp - timestamp;
678 if (best == NULL || diff < best_diff) {
680 best_timestamp = timestamp;
685 /* For frames without timestamp we simply take the first frame */
686 if ((reference_timestamp == 0 && !GST_CLOCK_TIME_IS_VALID (timestamp))
693 for (l = frames; l && l != best_l; l = l->next) {
694 GstVideoCodecFrame *tmp = l->data;
695 BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
696 guint64 diff_time, diff_frames;
698 if (id->timestamp > best_timestamp)
701 if (id->timestamp == 0 || best_timestamp == 0)
704 diff_time = best_timestamp - id->timestamp;
705 diff_frames = best->system_frame_number - tmp->system_frame_number;
707 if (diff_time > MAX_FRAME_DIST_TIME
708 || diff_frames > MAX_FRAME_DIST_FRAMES) {
710 g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp));
716 g_warning ("%s: Too old frames, bug in decoder -- please file a bug",
717 GST_ELEMENT_NAME (self));
718 for (l = finish_frames; l; l = l->next) {
719 gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), l->data);
724 gst_video_codec_frame_ref (best);
726 g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
727 g_list_free (frames);
733 gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self)
735 gboolean ret = (self->codec_config == AMC_CODEC_CONFIG_NONE
736 || (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE
737 && self->downstream_supports_gl)
738 || (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE
739 && !self->downstream_supports_gl));
744 "Codec configuration (%d) is not compatible with downstream which %s support GL output",
745 self->codec_config, self->downstream_supports_gl ? "does" : "does not");
752 gst_amc_video_dec_set_src_caps (GstAmcVideoDec * self, GstAmcFormat * format)
754 GstVideoCodecState *output_state;
756 gint color_format, width, height;
757 gint stride, slice_height;
758 gint crop_left, crop_right;
759 gint crop_top, crop_bottom;
760 GstVideoFormat gst_format;
761 GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
765 if (!gst_amc_format_get_int (format, "color-format", &color_format, &err) ||
766 !gst_amc_format_get_int (format, "width", &width, &err) ||
767 !gst_amc_format_get_int (format, "height", &height, &err)) {
768 GST_ERROR_OBJECT (self, "Failed to get output format metadata: %s",
770 g_clear_error (&err);
774 if (!gst_amc_format_get_int (format, "stride", &stride, &err) ||
775 !gst_amc_format_get_int (format, "slice-height", &slice_height, &err)) {
776 GST_ERROR_OBJECT (self, "Failed to get stride and slice-height: %s",
778 g_clear_error (&err);
782 if (!gst_amc_format_get_int (format, "crop-left", &crop_left, &err) ||
783 !gst_amc_format_get_int (format, "crop-right", &crop_right, &err) ||
784 !gst_amc_format_get_int (format, "crop-top", &crop_top, &err) ||
785 !gst_amc_format_get_int (format, "crop-bottom", &crop_bottom, &err)) {
786 GST_ERROR_OBJECT (self, "Failed to get crop rectangle: %s", err->message);
787 g_clear_error (&err);
791 if (width == 0 || height == 0) {
792 GST_ERROR_OBJECT (self, "Height or width not set");
797 height = height - (height - crop_bottom - 1);
799 height = height - crop_top;
802 width = width - (width - crop_right - 1);
804 width = width - crop_left;
806 mime = caps_to_mime (self->input_state->caps);
808 GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
812 if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
813 gst_format = GST_VIDEO_FORMAT_RGBA;
816 gst_amc_color_format_to_video_format (klass->codec_info, mime,
820 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
821 GST_ERROR_OBJECT (self, "Unknown color format 0x%08x", color_format);
825 output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
826 gst_format, width, height, self->input_state);
828 /* FIXME: Special handling for multiview, untested */
829 if (color_format == COLOR_QCOM_FormatYVU420SemiPlanar32mMultiView) {
830 gst_video_multiview_video_info_change_mode (&output_state->info,
831 GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
834 memset (&self->color_format_info, 0, sizeof (self->color_format_info));
835 if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
836 if (output_state->caps)
837 gst_caps_unref (output_state->caps);
838 output_state->caps = gst_video_info_to_caps (&output_state->info);
839 gst_caps_set_features (output_state->caps, 0,
840 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
841 gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING,
842 "external-oes", NULL);
843 GST_DEBUG_OBJECT (self, "Configuring for Surface output");
845 /* The width/height values are used in other places for
846 * checking if the resolution changed. Set everything
847 * that makes sense here
849 self->color_format_info.color_format = COLOR_FormatAndroidOpaque;
850 self->color_format_info.width = width;
851 self->color_format_info.height = height;
852 self->color_format_info.crop_left = crop_left;
853 self->color_format_info.crop_right = crop_right;
854 self->color_format_info.crop_top = crop_top;
855 self->color_format_info.crop_bottom = crop_bottom;
860 self->format = gst_format;
862 self->height = height;
863 if (!gst_amc_color_format_info_set (&self->color_format_info,
864 klass->codec_info, mime, color_format, width, height, stride,
865 slice_height, crop_left, crop_right, crop_top, crop_bottom)) {
866 GST_ERROR_OBJECT (self, "Failed to set up GstAmcColorFormatInfo");
870 GST_DEBUG_OBJECT (self,
871 "Color format info: {color_format=%d (0x%08x), width=%d, height=%d, "
872 "stride=%d, slice-height=%d, crop-left=%d, crop-top=%d, "
873 "crop-right=%d, crop-bottom=%d, frame-size=%d}",
874 self->color_format_info.color_format,
875 self->color_format_info.color_format, self->color_format_info.width,
876 self->color_format_info.height, self->color_format_info.stride,
877 self->color_format_info.slice_height, self->color_format_info.crop_left,
878 self->color_format_info.crop_top, self->color_format_info.crop_right,
879 self->color_format_info.crop_bottom, self->color_format_info.frame_size);
882 ret = gst_video_decoder_negotiate (GST_VIDEO_DECODER (self));
884 gst_video_codec_state_unref (output_state);
885 self->input_state_changed = FALSE;
891 gst_amc_video_dec_fill_buffer (GstAmcVideoDec * self, GstAmcBuffer * buf,
892 const GstAmcBufferInfo * buffer_info, GstBuffer * outbuf)
894 GstVideoCodecState *state =
895 gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
896 GstVideoInfo *info = &state->info;
897 gboolean ret = FALSE;
899 if (self->color_format_info.color_format == COLOR_FormatAndroidOpaque)
903 gst_amc_color_format_copy (&self->color_format_info, buf, buffer_info,
904 info, outbuf, COLOR_FORMAT_COPY_OUT);
906 gst_video_codec_state_unref (state);
910 static const gfloat yflip_matrix[16] = {
911 1.0f, 0.0f, 0.0f, 0.0f,
912 0.0f, -1.0f, 0.0f, 0.0f,
913 0.0f, 0.0f, 1.0f, 0.0f,
914 0.0f, 1.0f, 0.0f, 1.0f
918 _amc_gl_set_sync (GstGLSyncMeta * sync_meta, GstGLContext * context)
923 _gl_sync_release_buffer (struct gl_sync *sync, gboolean render)
925 GError *error = NULL;
927 if (!sync->result->released) {
928 sync->released_ts = g_get_monotonic_time ();
930 if ((gint) (sync->sink->gl_released_frame_count -
931 sync->sink->gl_ready_frame_count) > 0) {
933 sync->sink->gl_released_frame_count -
934 sync->sink->gl_ready_frame_count - 1u;
935 sync->sink->gl_ready_frame_count += diff;
936 GST_LOG ("gl_sync %p possible \'on_frame_available\' listener miss "
937 "detected, attempting to work around. Jumping forward %u "
938 "frames for frame %u", sync, diff, sync->gl_frame_no);
941 GST_TRACE ("gl_sync %p release_output_buffer idx %u frame %u render %s",
942 sync, sync->buffer_idx, sync->gl_frame_no, render ? "TRUE" : "FALSE");
944 /* Release the frame into the surface */
945 sync->sink->gl_released_frame_count++;
947 /* Advance the ready counter ourselves if we aren't going to render
948 * and therefore receive a listener callback */
949 sync->sink->gl_ready_frame_count++;
952 if (!gst_amc_codec_release_output_buffer (sync->sink->codec,
953 sync->buffer_idx, render, &error)) {
954 GST_ERROR_OBJECT (sync->sink,
955 "gl_sync %p Failed to render buffer, index %d frame %u", sync,
956 sync->buffer_idx, sync->gl_frame_no);
959 sync->result->released = TRUE;
960 sync->result->rendered = render;
965 if (sync->sink->gl_error == NULL)
966 sync->sink->gl_error = error;
968 g_clear_error (&error);
973 _gl_sync_release_next_buffer (struct gl_sync *sync, gboolean render)
977 if ((l = _find_gl_sync_for_frame (sync->sink, sync->gl_frame_no + 1))) {
978 struct gl_sync *next = l->data;
980 _gl_sync_release_buffer (next, render);
982 GST_TRACE ("gl_sync %p no next frame available", sync);
986 #define I(x,y) ((y)*4+(x))
988 affine_inverse (float in[], float out[])
990 float s0, s1, s2, s3, s4, s5;
991 float c0, c1, c2, c3, c4, c5;
994 s0 = in[0] * in[I (1, 1)] - in[I (1, 0)] * in[I (0, 1)];
995 s1 = in[0] * in[I (1, 2)] - in[I (1, 0)] * in[I (0, 2)];
996 s2 = in[0] * in[I (1, 3)] - in[I (1, 0)] * in[I (0, 3)];
997 s3 = in[1] * in[I (1, 2)] - in[I (1, 1)] * in[I (0, 2)];
998 s4 = in[1] * in[I (1, 3)] - in[I (1, 1)] * in[I (0, 3)];
999 s5 = in[2] * in[I (1, 3)] - in[I (1, 2)] * in[I (0, 3)];
1001 c0 = in[I (2, 0)] * in[I (3, 1)] - in[I (3, 0)] * in[I (2, 1)];
1002 c1 = in[I (2, 0)] * in[I (3, 2)] - in[I (3, 0)] * in[I (2, 2)];
1003 c2 = in[I (2, 0)] * in[I (3, 3)] - in[I (3, 0)] * in[I (2, 3)];
1004 c3 = in[I (2, 1)] * in[I (3, 2)] - in[I (3, 1)] * in[I (2, 2)];
1005 c4 = in[I (2, 1)] * in[I (3, 3)] - in[I (3, 1)] * in[I (2, 3)];
1006 c5 = in[I (2, 2)] * in[I (3, 3)] - in[I (3, 2)] * in[I (2, 3)];
1008 det = s0 * c5 - s1 * c4 + s2 * c3 + s3 * c2 - s4 * c1 + s5 * c0;
1014 (in[I (1, 1)] * c5 - in[I (1, 2)] * c4 + in[I (1, 3)] * c3) * invdet;
1016 (-in[I (0, 1)] * c5 + in[I (0, 2)] * c4 - in[I (0, 3)] * c3) * invdet;
1018 (in[I (3, 1)] * s5 - in[I (3, 2)] * s4 + in[I (3, 3)] * s3) * invdet;
1020 (-in[I (2, 1)] * s5 + in[I (2, 2)] * s4 - in[I (2, 3)] * s3) * invdet;
1023 (-in[I (1, 0)] * c5 + in[I (1, 2)] * c2 - in[I (1, 3)] * c1) * invdet;
1025 (in[I (0, 0)] * c5 - in[I (0, 2)] * c2 + in[I (0, 3)] * c1) * invdet;
1027 (-in[I (3, 0)] * s5 + in[I (3, 2)] * s2 - in[I (3, 3)] * s1) * invdet;
1029 (in[I (2, 0)] * s5 - in[I (2, 2)] * s2 + in[I (2, 3)] * s1) * invdet;
1032 (in[I (1, 0)] * c4 - in[I (1, 1)] * c2 + in[I (1, 3)] * c0) * invdet;
1034 (-in[I (0, 0)] * c4 + in[I (0, 1)] * c2 - in[I (0, 3)] * c0) * invdet;
1036 (in[I (3, 0)] * s4 - in[I (3, 1)] * s2 + in[I (3, 3)] * s0) * invdet;
1038 (-in[I (2, 0)] * s4 + in[I (2, 1)] * s2 - in[I (2, 3)] * s0) * invdet;
1041 (-in[I (1, 0)] * c3 + in[I (1, 1)] * c1 - in[I (1, 2)] * c0) * invdet;
1043 (in[I (0, 0)] * c3 - in[I (0, 1)] * c1 + in[I (0, 2)] * c0) * invdet;
1045 (-in[I (3, 0)] * s3 + in[I (3, 1)] * s1 - in[I (3, 2)] * s0) * invdet;
1047 (in[I (2, 0)] * s3 - in[I (2, 1)] * s1 + in[I (2, 2)] * s0) * invdet;
1054 /* caller should remove from the gl_queue after calling this function.
1055 * _gl_sync_release_buffer must be called before this function */
1057 _gl_sync_render_unlocked (struct gl_sync *sync)
1059 GstVideoAffineTransformationMeta *af_meta;
1060 GError *error = NULL;
1064 GST_TRACE ("gl_sync %p result %p render (updated:%u)", sync, sync->result,
1065 sync->result->updated);
1067 if (sync->result->updated || !sync->result->rendered)
1070 /* FIXME: if this ever starts returning valid values we should attempt
1072 if (!gst_amc_surface_texture_get_timestamp (sync->surface->texture, &ts,
1074 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1075 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1078 GST_TRACE ("gl_sync %p rendering timestamp before update %" G_GINT64_FORMAT,
1081 GST_TRACE ("gl_sync %p update_tex_image", sync);
1082 if (!gst_amc_surface_texture_update_tex_image (sync->surface->texture,
1084 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1085 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1088 GST_TRACE ("gl_sync result %p updated", sync->result);
1089 sync->result->updated = TRUE;
1090 sync->sink->gl_last_rendered_frame = sync->gl_frame_no;
1092 if (!gst_amc_surface_texture_get_timestamp (sync->surface->texture, &ts,
1094 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1095 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1098 GST_TRACE ("gl_sync %p rendering timestamp after update %" G_GINT64_FORMAT,
1101 af_meta = gst_buffer_get_video_affine_transformation_meta (sync->buffer);
1103 GST_WARNING ("Failed to retreive the transformation meta from the "
1104 "gl_sync %p buffer %p", sync, sync->buffer);
1105 } else if (gst_amc_surface_texture_get_transform_matrix (sync->surface->
1106 texture, matrix, &error)) {
1109 /* The transform from mediacodec applies to the texture coords, but
1110 * GStreamer affine meta applies to the video geometry, which is the
1111 * opposite - so we invert it */
1112 if (affine_inverse (matrix, inv_mat)) {
1113 gst_video_affine_transformation_meta_apply_matrix (af_meta, inv_mat);
1116 ("Failed to invert display transform - the video won't display right. "
1117 "Transform matrix [ %f %f %f %f, %f %f %f %f, %f %f %f %f, %f %f %f %f ]",
1118 matrix[0], matrix[1], matrix[2], matrix[3], matrix[4], matrix[5],
1119 matrix[6], matrix[7], matrix[8], matrix[9], matrix[10], matrix[11],
1120 matrix[12], matrix[13], matrix[14], matrix[15]);
1122 gst_video_affine_transformation_meta_apply_matrix (af_meta, yflip_matrix);
1125 GST_LOG ("gl_sync %p successfully updated SurfaceTexture %p into "
1126 "OES texture %u", sync, sync->surface->texture, sync->oes_mem->tex_id);
1130 if (sync->sink->gl_error == NULL)
1131 sync->sink->gl_error = error;
1133 g_clear_error (&error);
1136 _gl_sync_release_next_buffer (sync, TRUE);
1140 _amc_gl_possibly_wait_for_gl_sync (struct gl_sync *sync, gint64 end_time)
1142 GST_TRACE ("gl_sync %p waiting for frame %u current %u updated %u ", sync,
1143 sync->gl_frame_no, sync->sink->gl_ready_frame_count,
1144 sync->result->updated);
1146 if ((gint) (sync->sink->gl_last_rendered_frame - sync->gl_frame_no) > 0) {
1147 GST_ERROR ("gl_sync %p unsuccessfully waited for frame %u. out of order "
1148 "wait detected", sync, sync->gl_frame_no);
1152 /* The number of frame callbacks (gl_ready_frame_count) is not a direct
1153 * relationship with the number of pushed buffers (gl_pushed_frame_count)
1154 * or even, the number of released buffers (gl_released_frame_count)
1155 * as, from the frameworks/native/include/gui/ConsumerBase.h file,
1157 * "...frames that are queued while in asynchronous mode only trigger the
1158 * callback if no previous frames are pending."
1160 * As a result, we need to advance the ready counter somehow ourselves when
1161 * such events happen. There is no reliable way of knowing when/if the frame
1162 * listener is going to fire. The only uniqueu identifier,
1163 * SurfaceTexture::get_timestamp seems to always return 0.
1165 * The maximum queue size as defined in
1166 * frameworks/native/include/gui/BufferQueue.h
1167 * is 32 of which a maximum of 30 can be acquired at a time so we picked a
1168 * number less than that to wait for before updating the ready frame count.
1171 while (!sync->result->updated
1172 && (gint) (sync->sink->gl_ready_frame_count - sync->gl_frame_no) < 0) {
1173 /* The time limit is need otherwise when amc decides to not emit the
1174 * frame listener (say, on orientation changes) we don't wait foreever */
1175 if (end_time == -1 || !g_cond_wait_until (&sync->sink->gl_cond,
1176 &sync->sink->gl_lock, end_time)) {
1177 GST_LOG ("gl_sync %p unsuccessfully waited for frame %u", sync,
1182 GST_LOG ("gl_sync %p successfully waited for frame %u", sync,
1189 _amc_gl_iterate_queue_unlocked (GstGLSyncMeta * sync_meta, gboolean wait)
1191 struct gl_sync *sync = sync_meta->data;
1192 struct gl_sync *tmp;
1193 gboolean ret = TRUE;
1196 while ((tmp = g_queue_peek_head (sync->sink->gl_queue))) {
1197 /* skip frames that are ahead of the current wait frame */
1198 if ((gint) (sync->gl_frame_no - tmp->gl_frame_no) < 0) {
1199 GST_TRACE ("gl_sync %p frame %u is ahead of gl_sync %p frame %u", tmp,
1200 tmp->gl_frame_no, sync, sync->gl_frame_no);
1204 _gl_sync_release_buffer (tmp, wait);
1206 /* Frames are currently pushed in order and waits need to be performed
1207 * in the same order */
1209 end_time = wait ? 30 * G_TIME_SPAN_MILLISECOND + tmp->released_ts : -1;
1210 if (!_amc_gl_possibly_wait_for_gl_sync (tmp, end_time))
1213 _gl_sync_render_unlocked (tmp);
1215 g_queue_pop_head (tmp->sink->gl_queue);
1216 _gl_sync_unref (tmp);
1224 GstGLSyncMeta *sync_meta;
1229 _amc_gl_wait_gl (GstGLContext * context, struct gl_wait *wait)
1231 struct gl_sync *sync = wait->sync_meta->data;
1233 g_mutex_lock (&sync->sink->gl_lock);
1234 wait->ret = _amc_gl_iterate_queue_unlocked (wait->sync_meta, TRUE);
1235 g_mutex_unlock (&sync->sink->gl_lock);
1239 _amc_gl_wait (GstGLSyncMeta * sync_meta, GstGLContext * context)
1241 struct gl_sync *sync = sync_meta->data;
1242 struct gl_wait wait;
1244 wait.sync_meta = sync_meta;
1246 gst_gl_context_thread_add (context,
1247 (GstGLContextThreadFunc) _amc_gl_wait_gl, &wait);
1250 GST_WARNING ("gl_sync %p could not wait for frame, took too long", sync);
1254 _amc_gl_copy (GstGLSyncMeta * src, GstBuffer * sbuffer, GstGLSyncMeta * dest,
1255 GstBuffer * dbuffer)
1257 struct gl_sync *sync = src->data;
1258 struct gl_sync *tmp;
1260 tmp = g_new0 (struct gl_sync, 1);
1262 GST_TRACE ("copying gl_sync %p to %p", sync, tmp);
1264 g_mutex_lock (&sync->sink->gl_lock);
1267 tmp->sink = sync->sink;
1268 tmp->buffer = dbuffer;
1269 tmp->oes_mem = (GstGLMemory *) gst_memory_ref ((GstMemory *) sync->oes_mem);
1270 tmp->surface = g_object_ref (sync->surface);
1271 tmp->gl_frame_no = sync->gl_frame_no;
1272 tmp->released_ts = sync->released_ts;
1273 tmp->result = sync->result;
1274 _gl_sync_result_ref (tmp->result);
1277 g_mutex_unlock (&sync->sink->gl_lock);
1281 _amc_gl_render_on_free (GstGLContext * context, GstGLSyncMeta * sync_meta)
1283 struct gl_sync *sync = sync_meta->data;
1285 g_mutex_lock (&sync->sink->gl_lock);
1286 /* just render as many frames as we have */
1287 _amc_gl_iterate_queue_unlocked (sync_meta, FALSE);
1288 g_mutex_unlock (&sync->sink->gl_lock);
1292 _amc_gl_free (GstGLSyncMeta * sync_meta, GstGLContext * context)
1294 struct gl_sync *sync = sync_meta->data;
1296 /* The wait render queue inside android is not very deep so when we drop
1297 * frames we need to signal that we have rendered them if we have any chance
1298 * of keeping up between the decoder, the android GL queue and downstream
1299 * OpenGL. If we don't do this, once we start dropping frames downstream,
1300 * it is very near to impossible for the pipeline to catch up. */
1301 gst_gl_context_thread_add (context,
1302 (GstGLContextThreadFunc) _amc_gl_render_on_free, sync_meta);
1303 _gl_sync_unref (sync);
1307 gst_amc_video_dec_loop (GstAmcVideoDec * self)
1309 GstVideoCodecFrame *frame;
1310 GstFlowReturn flow_ret = GST_FLOW_OK;
1311 GstClockTimeDiff deadline;
1314 GstAmcBufferInfo buffer_info;
1317 gboolean release_buffer = TRUE;
1319 GST_VIDEO_DECODER_STREAM_LOCK (self);
1322 /*if (self->input_state_changed) {
1323 idx = INFO_OUTPUT_FORMAT_CHANGED;
1325 GST_DEBUG_OBJECT (self, "Waiting for available output buffer");
1326 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1327 /* Wait at most 100ms here, some codecs don't fail dequeueing if
1328 * the codec is flushing, causing deadlocks during shutdown */
1330 gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000,
1332 GST_VIDEO_DECODER_STREAM_LOCK (self);
1335 GST_DEBUG_OBJECT (self, "dequeueOutputBuffer() returned %d (0x%x)", idx, idx);
1338 if (self->flushing) {
1339 g_clear_error (&err);
1344 case INFO_OUTPUT_BUFFERS_CHANGED:
1345 /* Handled internally */
1346 g_assert_not_reached ();
1348 case INFO_OUTPUT_FORMAT_CHANGED:{
1349 GstAmcFormat *format;
1350 gchar *format_string;
1352 GST_DEBUG_OBJECT (self, "Output format has changed");
1354 format = gst_amc_codec_get_output_format (self->codec, &err);
1358 format_string = gst_amc_format_to_string (format, &err);
1360 gst_amc_format_free (format);
1363 GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string);
1364 g_free (format_string);
1366 if (!gst_amc_video_dec_set_src_caps (self, format)) {
1367 gst_amc_format_free (format);
1370 gst_amc_format_free (format);
1374 case INFO_TRY_AGAIN_LATER:
1375 GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out");
1378 GST_ERROR_OBJECT (self, "Failure dequeueing output buffer");
1381 g_assert_not_reached ();
1388 GST_DEBUG_OBJECT (self,
1389 "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT
1390 " flags 0x%08x", idx, buffer_info.offset, buffer_info.size,
1391 buffer_info.presentation_time_us, buffer_info.flags);
1393 buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err);
1395 if (self->flushing) {
1396 g_clear_error (&err);
1399 goto failed_to_get_output_buffer;
1402 if (self->codec_config != AMC_CODEC_CONFIG_WITH_SURFACE && !buf)
1403 goto got_null_output_buffer;
1406 _find_nearest_frame (self,
1407 gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));
1409 is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);
1413 gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
1415 GST_WARNING_OBJECT (self,
1416 "Frame is too late, dropping (deadline %" GST_STIME_FORMAT ")",
1417 GST_STIME_ARGS (deadline));
1418 flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1419 } else if (frame && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
1421 GstGLSyncMeta *sync_meta;
1422 GstVideoCodecState *state;
1423 struct gl_sync *sync;
1424 gboolean first_buffer = FALSE;
1426 g_mutex_lock (&self->gl_lock);
1427 if (self->gl_error) {
1428 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
1429 g_mutex_unlock (&self->gl_lock);
1430 goto gl_output_error;
1432 g_mutex_unlock (&self->gl_lock);
1434 outbuf = gst_buffer_new ();
1436 state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
1438 if (!self->oes_mem) {
1439 GstGLBaseMemoryAllocator *base_mem_alloc;
1440 GstGLVideoAllocationParams *params;
1443 GST_GL_BASE_MEMORY_ALLOCATOR (gst_allocator_find
1444 (GST_GL_MEMORY_ALLOCATOR_NAME));
1446 params = gst_gl_video_allocation_params_new (self->gl_context, NULL,
1447 &state->info, 0, NULL, GST_GL_TEXTURE_TARGET_EXTERNAL_OES,
1450 self->oes_mem = (GstGLMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
1451 (GstGLAllocationParams *) params);
1452 gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
1453 gst_object_unref (base_mem_alloc);
1455 gst_gl_context_thread_add (self->gl_context,
1456 (GstGLContextThreadFunc) _attach_mem_to_context, self);
1458 first_buffer = TRUE;
1461 gst_video_codec_state_unref (state);
1463 gst_buffer_append_memory (outbuf,
1464 gst_memory_ref ((GstMemory *) self->oes_mem));
1466 sync = g_new0 (struct gl_sync, 1);
1468 sync->sink = g_object_ref (self);
1469 sync->buffer = outbuf;
1470 sync->surface = g_object_ref (self->surface);
1472 (GstGLMemory *) gst_memory_ref ((GstMemory *) self->oes_mem);
1473 sync->buffer_idx = idx;
1474 sync->result = g_new0 (struct gl_sync_result, 1);
1475 sync->result->refcount = 1;
1476 sync->result->updated = FALSE;
1478 GST_TRACE ("new gl_sync %p result %p", sync, sync->result);
1480 sync_meta = gst_buffer_add_gl_sync_meta_full (self->gl_context, outbuf,
1482 sync_meta->set_sync = _amc_gl_set_sync;
1483 sync_meta->wait = _amc_gl_wait;
1484 sync_meta->wait_cpu = _amc_gl_wait;
1485 sync_meta->copy = _amc_gl_copy;
1486 sync_meta->free = _amc_gl_free;
1488 /* The meta needs to be created now:
1489 * Later (in _gl_sync_render_unlocked) the buffer will be locked.
1491 gst_buffer_add_video_affine_transformation_meta (outbuf);
1493 g_mutex_lock (&self->gl_lock);
1495 self->gl_pushed_frame_count++;
1496 sync->gl_frame_no = self->gl_pushed_frame_count;
1497 g_queue_push_tail (self->gl_queue, _gl_sync_ref (sync));
1500 _gl_sync_release_buffer (sync, TRUE);
1501 if (self->gl_error) {
1502 gst_buffer_unref (outbuf);
1503 g_mutex_unlock (&self->gl_lock);
1504 goto gl_output_error;
1507 g_mutex_unlock (&self->gl_lock);
1509 GST_DEBUG_OBJECT (self, "push GL frame %u", sync->gl_frame_no);
1510 frame->output_buffer = outbuf;
1511 flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1513 release_buffer = FALSE;
1514 } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && !frame
1515 && buffer_info.size > 0) {
1518 /* This sometimes happens at EOS or if the input is not properly framed,
1519 * let's handle it gracefully by allocating a new buffer for the current
1520 * caps and filling it
1522 GST_ERROR_OBJECT (self, "No corresponding frame found");
1525 gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
1527 if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info, outbuf)) {
1528 gst_buffer_unref (outbuf);
1529 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1530 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1532 if (err && !self->flushing)
1533 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1534 g_clear_error (&err);
1535 gst_amc_buffer_free (buf);
1537 goto invalid_buffer;
1540 GST_BUFFER_PTS (outbuf) =
1541 gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
1543 flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
1544 } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && frame
1545 && buffer_info.size > 0) {
1547 gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
1548 frame)) != GST_FLOW_OK) {
1549 GST_ERROR_OBJECT (self, "Failed to allocate buffer");
1550 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1551 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1553 if (err && !self->flushing)
1554 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1555 g_clear_error (&err);
1556 gst_amc_buffer_free (buf);
1561 if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info,
1562 frame->output_buffer)) {
1563 gst_buffer_replace (&frame->output_buffer, NULL);
1564 gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1565 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1566 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1568 if (err && !self->flushing)
1569 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1570 g_clear_error (&err);
1571 gst_amc_buffer_free (buf);
1573 goto invalid_buffer;
1576 flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1577 } else if (frame != NULL) {
1578 flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1582 gst_amc_buffer_free (buf);
1586 if (release_buffer) {
1587 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err)) {
1588 if (self->flushing) {
1589 g_clear_error (&err);
1592 goto failed_release;
1596 if (is_eos || flow_ret == GST_FLOW_EOS) {
1597 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1598 g_mutex_lock (&self->drain_lock);
1599 if (self->draining) {
1600 GST_DEBUG_OBJECT (self, "Drained");
1601 self->draining = FALSE;
1602 g_cond_broadcast (&self->drain_cond);
1603 } else if (flow_ret == GST_FLOW_OK) {
1604 GST_DEBUG_OBJECT (self, "Component signalled EOS");
1605 flow_ret = GST_FLOW_EOS;
1607 g_mutex_unlock (&self->drain_lock);
1608 GST_VIDEO_DECODER_STREAM_LOCK (self);
1610 GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
1613 self->downstream_flow_ret = flow_ret;
1615 if (flow_ret != GST_FLOW_OK)
1618 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1624 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1625 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1626 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1627 self->downstream_flow_ret = GST_FLOW_ERROR;
1628 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1629 g_mutex_lock (&self->drain_lock);
1630 self->draining = FALSE;
1631 g_cond_broadcast (&self->drain_cond);
1632 g_mutex_unlock (&self->drain_lock);
1639 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1641 GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
1642 ("Failed to handle format"));
1643 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1644 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1645 self->downstream_flow_ret = GST_FLOW_ERROR;
1646 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1647 g_mutex_lock (&self->drain_lock);
1648 self->draining = FALSE;
1649 g_cond_broadcast (&self->drain_cond);
1650 g_mutex_unlock (&self->drain_lock);
1655 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1656 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1657 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1658 self->downstream_flow_ret = GST_FLOW_ERROR;
1659 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1660 g_mutex_lock (&self->drain_lock);
1661 self->draining = FALSE;
1662 g_cond_broadcast (&self->drain_cond);
1663 g_mutex_unlock (&self->drain_lock);
1668 GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1669 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1670 self->downstream_flow_ret = GST_FLOW_FLUSHING;
1671 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1677 if (flow_ret == GST_FLOW_EOS) {
1678 GST_DEBUG_OBJECT (self, "EOS");
1679 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1680 gst_event_new_eos ());
1681 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1682 } else if (flow_ret < GST_FLOW_EOS) {
1683 GST_ELEMENT_FLOW_ERROR (self, flow_ret);
1684 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1685 gst_event_new_eos ());
1686 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1687 } else if (flow_ret == GST_FLOW_FLUSHING) {
1688 GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1689 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1691 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1692 g_mutex_lock (&self->drain_lock);
1693 self->draining = FALSE;
1694 g_cond_broadcast (&self->drain_cond);
1695 g_mutex_unlock (&self->drain_lock);
1699 failed_to_get_output_buffer:
1701 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1702 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1703 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1704 self->downstream_flow_ret = GST_FLOW_ERROR;
1705 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1706 g_mutex_lock (&self->drain_lock);
1707 self->draining = FALSE;
1708 g_cond_broadcast (&self->drain_cond);
1709 g_mutex_unlock (&self->drain_lock);
1713 got_null_output_buffer:
1715 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1716 ("Got no output buffer"));
1717 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1718 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1719 self->downstream_flow_ret = GST_FLOW_ERROR;
1720 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1721 g_mutex_lock (&self->drain_lock);
1722 self->draining = FALSE;
1723 g_cond_broadcast (&self->drain_cond);
1724 g_mutex_unlock (&self->drain_lock);
1730 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1731 ("Invalid sized input buffer"));
1732 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1733 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1734 self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1735 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1736 g_mutex_lock (&self->drain_lock);
1737 self->draining = FALSE;
1738 g_cond_broadcast (&self->drain_cond);
1739 g_mutex_unlock (&self->drain_lock);
1745 gst_amc_buffer_free (buf);
1748 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1749 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1750 self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1751 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1752 g_mutex_lock (&self->drain_lock);
1753 self->draining = FALSE;
1754 g_cond_broadcast (&self->drain_cond);
1755 g_mutex_unlock (&self->drain_lock);
1761 gst_amc_video_dec_start (GstVideoDecoder * decoder)
1763 GstAmcVideoDec *self;
1765 self = GST_AMC_VIDEO_DEC (decoder);
1766 self->last_upstream_ts = 0;
1767 self->drained = TRUE;
1768 self->downstream_flow_ret = GST_FLOW_OK;
1769 self->started = FALSE;
1770 self->flushing = TRUE;
1776 gst_amc_video_dec_stop (GstVideoDecoder * decoder)
1778 GstAmcVideoDec *self;
1781 self = GST_AMC_VIDEO_DEC (decoder);
1782 GST_DEBUG_OBJECT (self, "Stopping decoder");
1783 self->flushing = TRUE;
1784 if (self->started) {
1785 gst_amc_codec_flush (self->codec, &err);
1787 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1788 gst_amc_codec_stop (self->codec, &err);
1790 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1791 self->started = FALSE;
1793 gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder));
1795 self->downstream_flow_ret = GST_FLOW_FLUSHING;
1796 self->drained = TRUE;
1797 g_mutex_lock (&self->drain_lock);
1798 self->draining = FALSE;
1799 g_cond_broadcast (&self->drain_cond);
1800 g_mutex_unlock (&self->drain_lock);
1801 g_free (self->codec_data);
1802 self->codec_data_size = 0;
1803 if (self->input_state)
1804 gst_video_codec_state_unref (self->input_state);
1805 self->input_state = NULL;
1806 GST_DEBUG_OBJECT (self, "Stopped decoder");
1811 gst_amc_video_dec_new_on_frame_available_listener (GstAmcVideoDec * decoder,
1812 JNIEnv * env, GError ** err)
1814 jobject listener = NULL;
1815 jclass listener_cls = NULL;
1816 jmethodID constructor_id = 0;
1818 JNINativeMethod amcOnFrameAvailableListener = {
1819 "native_onFrameAvailable",
1820 "(JLandroid/graphics/SurfaceTexture;)V",
1821 (void *) gst_amc_video_dec_on_frame_available,
1825 gst_amc_jni_get_application_class (env,
1826 "org/freedesktop/gstreamer/androidmedia/GstAmcOnFrameAvailableListener",
1828 if (!listener_cls) {
1832 (*env)->RegisterNatives (env, listener_cls, &amcOnFrameAvailableListener, 1);
1833 if ((*env)->ExceptionCheck (env)) {
1834 (*env)->ExceptionClear (env);
1839 gst_amc_jni_get_method_id (env, err, listener_cls, "<init>", "()V");
1840 if (!constructor_id) {
1844 decoder->set_context_id =
1845 gst_amc_jni_get_method_id (env, err, listener_cls, "setContext", "(J)V");
1846 if (!decoder->set_context_id) {
1851 gst_amc_jni_new_object (env, err, TRUE, listener_cls, constructor_id);
1856 if (!gst_amc_jni_call_void_method (env, err, listener,
1857 decoder->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (decoder))) {
1858 gst_amc_jni_object_unref (env, listener);
1863 gst_amc_jni_object_unref (env, listener_cls);
1869 gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
1870 GstVideoCodecState * state)
1872 GstAmcVideoDec *self;
1873 GstAmcVideoDecClass *klass;
1874 GstAmcFormat *format;
1876 gboolean is_format_change = FALSE;
1877 gboolean needs_disable = FALSE;
1878 gchar *format_string;
1879 guint8 *codec_data = NULL;
1880 gsize codec_data_size = 0;
1882 jobject jsurface = NULL;
1884 self = GST_AMC_VIDEO_DEC (decoder);
1885 klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
1887 GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps);
1889 /* Check if the caps change is a real format change or if only irrelevant
1890 * parts of the caps have changed or nothing at all.
1892 is_format_change |= self->color_format_info.width != state->info.width;
1893 is_format_change |= self->color_format_info.height != state->info.height;
1894 if (state->codec_data) {
1897 gst_buffer_map (state->codec_data, &cminfo, GST_MAP_READ);
1898 codec_data = g_memdup (cminfo.data, cminfo.size);
1899 codec_data_size = cminfo.size;
1901 is_format_change |= (!self->codec_data
1902 || self->codec_data_size != codec_data_size
1903 || memcmp (self->codec_data, codec_data, codec_data_size) != 0);
1904 gst_buffer_unmap (state->codec_data, &cminfo);
1905 } else if (self->codec_data) {
1906 is_format_change |= TRUE;
1909 needs_disable = self->started;
1911 /* If the component is not started and a real format change happens
1912 * we have to restart the component. If no real format change
1913 * happened we can just exit here.
1915 if (needs_disable && !is_format_change) {
1916 g_free (codec_data);
1918 codec_data_size = 0;
1920 /* Framerate or something minor changed */
1921 self->input_state_changed = TRUE;
1922 if (self->input_state)
1923 gst_video_codec_state_unref (self->input_state);
1924 self->input_state = gst_video_codec_state_ref (state);
1925 GST_DEBUG_OBJECT (self,
1926 "Already running and caps did not change the format");
1930 if (needs_disable && is_format_change) {
1931 gst_amc_video_dec_drain (self);
1932 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1933 gst_amc_video_dec_stop (GST_VIDEO_DECODER (self));
1934 GST_VIDEO_DECODER_STREAM_LOCK (self);
1935 gst_amc_video_dec_close (GST_VIDEO_DECODER (self));
1936 if (!gst_amc_video_dec_open (GST_VIDEO_DECODER (self))) {
1937 GST_ERROR_OBJECT (self, "Failed to open codec again");
1941 if (!gst_amc_video_dec_start (GST_VIDEO_DECODER (self))) {
1942 GST_ERROR_OBJECT (self, "Failed to start codec again");
1945 /* srcpad task is not running at this point */
1946 if (self->input_state)
1947 gst_video_codec_state_unref (self->input_state);
1948 self->input_state = NULL;
1950 g_free (self->codec_data);
1951 self->codec_data = codec_data;
1952 self->codec_data_size = codec_data_size;
1954 mime = caps_to_mime (state->caps);
1956 GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
1961 gst_amc_format_new_video (mime, state->info.width, state->info.height,
1964 GST_ERROR_OBJECT (self, "Failed to create video format");
1965 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1969 /* FIXME: This buffer needs to be valid until the codec is stopped again */
1970 if (self->codec_data) {
1971 gst_amc_format_set_buffer (format, "csd-0", self->codec_data,
1972 self->codec_data_size, &err);
1974 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1978 gboolean downstream_supports_gl = FALSE;
1979 GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
1980 GstPad *src_pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
1981 GstCaps *templ_caps = gst_pad_get_pad_template_caps (src_pad);
1982 GstCaps *downstream_caps = gst_pad_peer_query_caps (src_pad, templ_caps);
1984 gst_caps_unref (templ_caps);
1986 if (downstream_caps) {
1988 GstStaticCaps static_caps =
1989 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
1990 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, "RGBA"));
1991 GstCaps *gl_memory_caps = gst_static_caps_get (&static_caps);
1993 GST_DEBUG_OBJECT (self, "Available downstream caps: %" GST_PTR_FORMAT,
1996 /* Check if downstream caps supports
1997 * video/x-raw(memory:GLMemory),format=RGBA */
1998 n = gst_caps_get_size (downstream_caps);
1999 for (i = 0; i < n; i++) {
2000 GstCaps *caps = NULL;
2001 GstStructure *structure = gst_caps_get_structure (downstream_caps, i);
2002 GstCapsFeatures *features = gst_caps_get_features (downstream_caps, i);
2004 caps = gst_caps_new_full (gst_structure_copy (structure), NULL);
2008 gst_caps_set_features (caps, 0, gst_caps_features_copy (features));
2010 if (gst_caps_can_intersect (caps, gl_memory_caps)) {
2011 downstream_supports_gl = TRUE;
2014 gst_caps_unref (caps);
2015 if (downstream_supports_gl)
2019 gst_caps_unref (gl_memory_caps);
2021 /* If video/x-raw(memory:GLMemory),format=RGBA is supported,
2022 * update the video decoder output state accordingly and negotiate */
2023 if (downstream_supports_gl) {
2024 GstVideoCodecState *output_state = NULL;
2025 GstVideoCodecState *prev_output_state = NULL;
2027 prev_output_state = gst_video_decoder_get_output_state (decoder);
2030 gst_video_decoder_set_output_state (decoder, GST_VIDEO_FORMAT_RGBA,
2031 state->info.width, state->info.height, state);
2033 if (output_state->caps) {
2034 gst_caps_unref (output_state->caps);
2037 output_state->caps = gst_video_info_to_caps (&output_state->info);
2038 gst_caps_set_features (output_state->caps, 0,
2039 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
2041 /* gst_amc_video_dec_decide_allocation will update
2042 * self->downstream_supports_gl */
2043 if (!gst_video_decoder_negotiate (decoder)) {
2044 GST_ERROR_OBJECT (self, "Failed to negotiate");
2046 /* Rollback output state changes */
2047 if (prev_output_state) {
2048 output_state->info = prev_output_state->info;
2049 gst_caps_replace (&output_state->caps, prev_output_state->caps);
2051 gst_video_info_init (&output_state->info);
2052 gst_caps_replace (&output_state->caps, NULL);
2055 if (prev_output_state) {
2056 gst_video_codec_state_unref (prev_output_state);
2060 gst_caps_unref (downstream_caps);
2064 GST_INFO_OBJECT (self, "GL output: %s",
2065 self->downstream_supports_gl ? "enabled" : "disabled");
2067 if (klass->codec_info->gl_output_only && !self->downstream_supports_gl) {
2068 GST_ERROR_OBJECT (self,
2069 "Codec only supports GL output but downstream does not");
2073 if (self->downstream_supports_gl && self->surface) {
2074 jsurface = self->surface->jobject;
2075 } else if (self->downstream_supports_gl && !self->surface) {
2078 GstAmcSurfaceTexture *surface_texture = NULL;
2080 env = gst_amc_jni_get_env ();
2081 surface_texture = gst_amc_surface_texture_new (&err);
2082 if (!surface_texture) {
2083 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2087 if (self->listener) {
2088 if (!gst_amc_jni_call_void_method (env, &err, self->listener,
2089 self->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (NULL))) {
2094 gst_amc_jni_object_unref (env, self->listener);
2097 gst_amc_video_dec_new_on_frame_available_listener (self, env, &err);
2098 if (!self->listener) {
2103 if (!gst_amc_surface_texture_set_on_frame_available_listener
2104 (surface_texture, self->listener, &err)) {
2109 self->surface = gst_amc_surface_new (surface_texture, &err);
2110 jsurface = self->surface->jobject;
2113 g_object_unref (surface_texture);
2115 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2120 format_string = gst_amc_format_to_string (format, &err);
2122 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2123 GST_DEBUG_OBJECT (self, "Configuring codec with format: %s",
2124 GST_STR_NULL (format_string));
2125 g_free (format_string);
2127 if (!gst_amc_codec_configure (self->codec, format, jsurface, 0, &err)) {
2128 GST_ERROR_OBJECT (self, "Failed to configure codec");
2129 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2133 self->codec_config = AMC_CODEC_CONFIG_WITH_SURFACE;
2135 self->codec_config = AMC_CODEC_CONFIG_WITHOUT_SURFACE;
2138 gst_amc_format_free (format);
2140 if (!gst_amc_codec_start (self->codec, &err)) {
2141 GST_ERROR_OBJECT (self, "Failed to start codec");
2142 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2146 self->started = TRUE;
2147 self->input_state = gst_video_codec_state_ref (state);
2148 self->input_state_changed = TRUE;
2150 /* Start the srcpad loop again */
2151 self->flushing = FALSE;
2152 self->downstream_flow_ret = GST_FLOW_OK;
2153 gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2154 (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2160 gst_amc_video_dec_flush (GstVideoDecoder * decoder)
2162 GstAmcVideoDec *self;
2165 self = GST_AMC_VIDEO_DEC (decoder);
2167 GST_DEBUG_OBJECT (self, "Flushing decoder");
2169 if (!self->started) {
2170 GST_DEBUG_OBJECT (self, "Codec not started yet");
2174 self->flushing = TRUE;
2175 /* Wait until the srcpad loop is finished,
2176 * unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks
2177 * caused by using this lock from inside the loop function */
2178 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2179 GST_PAD_STREAM_LOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2180 GST_PAD_STREAM_UNLOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2181 GST_VIDEO_DECODER_STREAM_LOCK (self);
2182 gst_amc_codec_flush (self->codec, &err);
2184 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2185 self->flushing = FALSE;
2187 /* Start the srcpad loop again */
2188 self->last_upstream_ts = 0;
2189 self->drained = TRUE;
2190 self->downstream_flow_ret = GST_FLOW_OK;
2191 gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2192 (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2194 GST_DEBUG_OBJECT (self, "Flushed decoder");
2199 static GstFlowReturn
2200 gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
2201 GstVideoCodecFrame * frame)
2203 GstAmcVideoDec *self;
2206 GstAmcBufferInfo buffer_info;
2208 GstClockTime timestamp, duration, timestamp_offset = 0;
2212 memset (&minfo, 0, sizeof (minfo));
2214 self = GST_AMC_VIDEO_DEC (decoder);
2216 GST_DEBUG_OBJECT (self, "Handling frame");
2218 if (!self->started) {
2219 GST_ERROR_OBJECT (self, "Codec not started yet");
2220 gst_video_codec_frame_unref (frame);
2221 return GST_FLOW_NOT_NEGOTIATED;
2227 if (self->downstream_flow_ret != GST_FLOW_OK)
2228 goto downstream_error;
2230 timestamp = frame->pts;
2231 duration = frame->duration;
2233 gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ);
2235 while (offset < minfo.size) {
2236 /* Make sure to release the base class stream lock, otherwise
2237 * _loop() can't call _finish_frame() and we might block forever
2238 * because no input buffers are released */
2239 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2240 /* Wait at most 100ms here, some codecs don't fail dequeueing if
2241 * the codec is flushing, causing deadlocks during shutdown */
2242 idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err);
2243 GST_VIDEO_DECODER_STREAM_LOCK (self);
2246 if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) {
2247 g_clear_error (&err);
2252 case INFO_TRY_AGAIN_LATER:
2253 GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
2254 continue; /* next try */
2257 GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
2260 g_assert_not_reached ();
2267 if (self->flushing) {
2268 memset (&buffer_info, 0, sizeof (buffer_info));
2269 gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL);
2273 if (self->downstream_flow_ret != GST_FLOW_OK) {
2274 memset (&buffer_info, 0, sizeof (buffer_info));
2275 gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err);
2276 if (err && !self->flushing)
2277 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2278 g_clear_error (&err);
2279 goto downstream_error;
2282 /* Now handle the frame */
2284 /* Copy the buffer content in chunks of size as requested
2286 buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2288 goto failed_to_get_input_buffer;
2290 goto got_null_input_buffer;
2292 memset (&buffer_info, 0, sizeof (buffer_info));
2293 buffer_info.offset = 0;
2294 buffer_info.size = MIN (minfo.size - offset, buf->size);
2295 gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset,
2298 orc_memcpy (buf->data, minfo.data + offset, buffer_info.size);
2300 gst_amc_buffer_free (buf);
2303 /* Interpolate timestamps if we're passing the buffer
2304 * in multiple chunks */
2305 if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
2306 timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size);
2309 if (timestamp != GST_CLOCK_TIME_NONE) {
2310 buffer_info.presentation_time_us =
2311 gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
2312 self->last_upstream_ts = timestamp + timestamp_offset;
2314 if (duration != GST_CLOCK_TIME_NONE)
2315 self->last_upstream_ts += duration;
2318 BufferIdentification *id =
2319 buffer_identification_new (timestamp + timestamp_offset);
2320 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame))
2321 buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
2322 gst_video_codec_frame_set_user_data (frame, id,
2323 (GDestroyNotify) buffer_identification_free);
2326 offset += buffer_info.size;
2327 GST_DEBUG_OBJECT (self,
2328 "Queueing buffer %d: size %d time %" G_GINT64_FORMAT
2329 " flags 0x%08x", idx, buffer_info.size,
2330 buffer_info.presentation_time_us, buffer_info.flags);
2331 if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2333 if (self->flushing) {
2334 g_clear_error (&err);
2339 self->drained = FALSE;
2342 gst_buffer_unmap (frame->input_buffer, &minfo);
2343 gst_video_codec_frame_unref (frame);
2345 return self->downstream_flow_ret;
2349 GST_ERROR_OBJECT (self, "Downstream returned %s",
2350 gst_flow_get_name (self->downstream_flow_ret));
2352 gst_buffer_unmap (frame->input_buffer, &minfo);
2353 gst_video_codec_frame_unref (frame);
2354 return self->downstream_flow_ret;
2356 failed_to_get_input_buffer:
2358 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2360 gst_buffer_unmap (frame->input_buffer, &minfo);
2361 gst_video_codec_frame_unref (frame);
2362 return GST_FLOW_ERROR;
2364 got_null_input_buffer:
2366 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
2367 ("Got no input buffer"));
2369 gst_buffer_unmap (frame->input_buffer, &minfo);
2370 gst_video_codec_frame_unref (frame);
2371 return GST_FLOW_ERROR;
2375 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2377 gst_buffer_unmap (frame->input_buffer, &minfo);
2378 gst_video_codec_frame_unref (frame);
2379 return GST_FLOW_ERROR;
2383 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
2385 gst_buffer_unmap (frame->input_buffer, &minfo);
2386 gst_video_codec_frame_unref (frame);
2387 return GST_FLOW_ERROR;
2391 GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
2393 gst_buffer_unmap (frame->input_buffer, &minfo);
2394 gst_video_codec_frame_unref (frame);
2395 return GST_FLOW_FLUSHING;
2399 static GstFlowReturn
2400 gst_amc_video_dec_finish (GstVideoDecoder * decoder)
2402 GstAmcVideoDec *self;
2404 self = GST_AMC_VIDEO_DEC (decoder);
2406 return gst_amc_video_dec_drain (self);
2409 static GstFlowReturn
2410 gst_amc_video_dec_drain (GstAmcVideoDec * self)
2416 GST_DEBUG_OBJECT (self, "Draining codec");
2417 if (!self->started) {
2418 GST_DEBUG_OBJECT (self, "Codec not started yet");
2422 /* Don't send drain buffer twice, this doesn't work */
2423 if (self->drained) {
2424 GST_DEBUG_OBJECT (self, "Codec is drained already");
2428 /* Make sure to release the base class stream lock, otherwise
2429 * _loop() can't call _finish_frame() and we might block forever
2430 * because no input buffers are released */
2431 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2432 /* Send an EOS buffer to the component and let the base
2433 * class drop the EOS event. We will send it later when
2434 * the EOS buffer arrives on the output port.
2435 * Wait at most 0.5s here. */
2436 idx = gst_amc_codec_dequeue_input_buffer (self->codec, 500000, &err);
2437 GST_VIDEO_DECODER_STREAM_LOCK (self);
2441 GstAmcBufferInfo buffer_info;
2443 buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2445 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2446 g_mutex_lock (&self->drain_lock);
2447 self->draining = TRUE;
2449 memset (&buffer_info, 0, sizeof (buffer_info));
2450 buffer_info.size = 0;
2451 buffer_info.presentation_time_us =
2452 gst_util_uint64_scale (self->last_upstream_ts, 1, GST_USECOND);
2453 buffer_info.flags |= BUFFER_FLAG_END_OF_STREAM;
2455 gst_amc_buffer_set_position_and_limit (buf, NULL, 0, 0);
2456 gst_amc_buffer_free (buf);
2459 if (gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2461 GST_DEBUG_OBJECT (self, "Waiting until codec is drained");
2462 g_cond_wait (&self->drain_cond, &self->drain_lock);
2463 GST_DEBUG_OBJECT (self, "Drained codec");
2466 GST_ERROR_OBJECT (self, "Failed to queue input buffer");
2467 if (self->flushing) {
2468 g_clear_error (&err);
2469 ret = GST_FLOW_FLUSHING;
2471 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2472 ret = GST_FLOW_ERROR;
2476 self->drained = TRUE;
2477 self->draining = FALSE;
2478 g_mutex_unlock (&self->drain_lock);
2479 GST_VIDEO_DECODER_STREAM_LOCK (self);
2481 GST_ERROR_OBJECT (self, "Failed to get buffer for EOS: %d", idx);
2483 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2484 ret = GST_FLOW_ERROR;
2487 GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", idx);
2489 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2490 ret = GST_FLOW_ERROR;
2497 gst_amc_video_dec_src_query (GstVideoDecoder * bdec, GstQuery * query)
2499 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2501 switch (GST_QUERY_TYPE (query)) {
2502 case GST_QUERY_CONTEXT:
2504 if (gst_gl_handle_context_query ((GstElement *) self, query,
2505 self->gl_display, self->gl_context, self->other_gl_context))
2513 return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (bdec, query);
2517 _caps_are_rgba_with_gl_memory (GstCaps * caps)
2520 GstCapsFeatures *features;
2525 if (!gst_video_info_from_caps (&info, caps))
2528 if (info.finfo->format != GST_VIDEO_FORMAT_RGBA)
2531 if (!(features = gst_caps_get_features (caps, 0)))
2534 return gst_caps_features_contains (features,
2535 GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
2539 _find_local_gl_context (GstAmcVideoDec * self)
2541 if (gst_gl_query_local_gl_context (GST_ELEMENT (self), GST_PAD_SRC,
2548 gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
2550 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2551 gboolean need_pool = FALSE;
2552 GstCaps *caps = NULL;
2553 // GError *error = NULL;
2555 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
2558 self->downstream_supports_gl = FALSE;
2559 gst_query_parse_allocation (query, &caps, &need_pool);
2560 if (_caps_are_rgba_with_gl_memory (caps)) {
2562 if (!gst_gl_ensure_element_data (self, &self->gl_display,
2563 &self->other_gl_context))
2566 if (!_find_local_gl_context (self))
2569 if (!self->gl_context) {
2570 GST_OBJECT_LOCK (self->gl_display);
2572 if (self->gl_context) {
2573 gst_object_unref (self->gl_context);
2574 self->gl_context = NULL;
2576 /* just get a GL context. we don't care */
2578 gst_gl_display_get_gl_context_for_thread (self->gl_display, NULL);
2579 if (!self->gl_context) {
2580 if (!gst_gl_display_create_context (self->gl_display,
2581 self->other_gl_context, &self->gl_context, &error)) {
2582 GST_OBJECT_UNLOCK (mix->display);
2586 } while (!gst_gl_display_add_context (self->gl_display,
2588 GST_OBJECT_UNLOCK (self->gl_display);
2592 self->downstream_supports_gl = TRUE;
2596 return gst_amc_video_dec_check_codec_config (self);
2600 GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND, ("%s", error->message),
2602 g_clear_error (&error);
2609 gst_amc_video_dec_on_frame_available (JNIEnv * env, jobject thiz,
2610 long long context, jobject surfaceTexture)
2612 GstAmcVideoDec *self = JLONG_TO_GST_AMC_VIDEO_DEC (context);
2614 /* apparently we can be called after the decoder has been closed */
2618 g_mutex_lock (&self->gl_lock);
2619 self->gl_ready_frame_count++;
2620 GST_LOG_OBJECT (self, "frame %u available", self->gl_ready_frame_count);
2621 g_cond_broadcast (&self->gl_cond);
2622 g_mutex_unlock (&self->gl_lock);