exit 1;
}
-./configure || {
- echo 'configure failed';
- exit 1;
-}
+#./configure || {
+# echo 'configure failed';
+# exit 1;
+#}
-echo
-echo "Now type 'make' to compile this module."
-echo
+#echo
+#echo "Now type 'make' to compile this module."
+#echo
AC_PREREQ([2.53])
dnl TODO: fill in your package name and package version here
-AC_INIT([my-plugin-package],[0.10.0])
+AC_INIT([my-plugin-package],[1.0.0])
dnl required versions of gstreamer and plugins-base
-GST_REQUIRED=0.10.16
-GSTPB_REQUIRED=0.10.16
+GST_REQUIRED=1.0.0
+GSTPB_REQUIRED=1.0.0
AC_CONFIG_SRCDIR([src/gstmaru.c])
AC_CONFIG_HEADERS([config.h])
dnl This will export GST_CFLAGS and GST_LIBS variables for use in Makefile.am
dnl
dnl If you need libraries from gst-plugins-base here, also add:
-dnl for libgstaudio-0.10: gstreamer-audio-0.10 >= $GST_REQUIRED
-dnl for libgstvideo-0.10: gstreamer-video-0.10 >= $GST_REQUIRED
-dnl for libgsttag-0.10: gstreamer-tag-0.10 >= $GST_REQUIRED
-dnl for libgstpbutils-0.10: gstreamer-pbutils-0.10 >= $GST_REQUIRED
-dnl for libgstfft-0.10: gstreamer-fft-0.10 >= $GST_REQUIRED
-dnl for libgstinterfaces-0.10: gstreamer-interfaces-0.10 >= $GST_REQUIRED
-dnl for libgstrtp-0.10: gstreamer-rtp-0.10 >= $GST_REQUIRED
-dnl for libgstrtsp-0.10: gstreamer-rtsp-0.10 >= $GST_REQUIRED
+dnl for libgstaudio-1.0: gstreamer-audio-1.0 >= $GST_REQUIRED
+dnl for libgstvideo-1.0: gstreamer-video-1.0 >= $GST_REQUIRED
+dnl for libgsttag-1.0: gstreamer-tag-1.0 >= $GST_REQUIRED
+dnl for libgstpbutils-1.0: gstreamer-pbutils-1.0 >= $GST_REQUIRED
+dnl for libgstfft-1.0: gstreamer-fft-1.0 >= $GST_REQUIRED
+dnl for libgstinterfaces-1.0: gstreamer-interfaces-1.0 >= $GST_REQUIRED
+dnl for libgstrtp-1.0: gstreamer-rtp-1.0 >= $GST_REQUIRED
+dnl for libgstrtsp-1.0: gstreamer-rtsp-1.0 >= $GST_REQUIRED
dnl etc.
PKG_CHECK_MODULES(GST, [
- gstreamer-0.10 >= $GST_REQUIRED
- gstreamer-base-0.10 >= $GST_REQUIRED
- gstreamer-controller-0.10 >= $GST_REQUIRED
+ gstreamer-1.0 >= $GST_REQUIRED
+ gstreamer-base-1.0 >= $GST_REQUIRED
+ gstreamer-controller-1.0 >= $GST_REQUIRED
], [
AC_SUBST(GST_CFLAGS)
AC_SUBST(GST_LIBS)
AC_MSG_ERROR([
You need to install or upgrade the GStreamer development
packages on your system. On debian-based systems these are
- libgstreamer0.10-dev and libgstreamer-plugins-base0.10-dev.
- on RPM-based systems gstreamer0.10-devel, libgstreamer0.10-devel
+ libgstreamer1.0-dev and libgstreamer-plugins-base1.0-dev.
+ on RPM-based systems gstreamer1.0-devel, libgstreamer1.0-devel
or similar. The minimum version required is $GST_REQUIRED.
])
])
dnl set the plugindir where plugins should be installed (for src/Makefile.am)
if test "x${prefix}" = "x$HOME"; then
- plugindir="$HOME/.gstreamer-0.10/plugins"
+ plugindir="$HOME/.gstreamer-1.0/plugins"
else
- plugindir="\$(libdir)/gstreamer-0.10"
+ plugindir="\$(libdir)/gstreamer-1.0"
fi
AC_SUBST(plugindir)
AC_CONFIG_FILES([Makefile src/Makefile])
AC_OUTPUT
-
License: LGPL-2.0+
Source0: %{name}-%{version}.tar.gz
Source1001: packaging/%{name}.manifest
-BuildRequires: pkgconfig(gstreamer-0.10)
-BuildRequires: pkgconfig(gstreamer-plugins-base-0.10)
+BuildRequires: pkgconfig(gstreamer-1.0)
+BuildRequires: pkgconfig(gstreamer-plugins-base-1.0)
BuildRequires: pkgconfig(glib-2.0)
%description
%files
%manifest gst-plugins-emulator.manifest
%defattr(-,root,root,-)
-%{_libdir}/gstreamer-0.10/libgstemul.so
+%{_libdir}/gstreamer-1.0/libgstemul.so
/usr/share/license/%{name}
# sources used to compile this plug-in
libgstemul_la_SOURCES = gstmaru.c \
gstmaruutils.c \
- gstmarudec.c \
- gstmaruenc.c \
+ gstmaruviddec.c \
+ gstmaruauddec.c \
+ gstmaruvidenc.c \
+ gstmaruaudenc.c \
gstmaruinterface.c \
gstmaruinterface3.c \
gstmarudevice.c \
# compiler and linker flags used to compile this plugin, set in configure.ac
libgstemul_la_CFLAGS = $(GST_CFLAGS) -g
-libgstemul_la_LIBADD = $(GST_LIBS) -lgstaudio-0.10 -lgstpbutils-0.10
+libgstemul_la_LIBADD = $(GST_LIBS) -lgstaudio-1.0 -lgstpbutils-1.0
libgstemul_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstemul_la_LIBTOOLFLAGS = --tag=disable-static
#define GST_IS_MARUDEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MARUDEC))
-gboolean gst_marudec_register (GstPlugin *plugin, GList *element);
-gboolean gst_maruenc_register (GstPlugin *plugin, GList *element);
+gboolean gst_maruviddec_register (GstPlugin *plugin, GList *element);
+gboolean gst_maruvidenc_register (GstPlugin *plugin, GList *element);
+gboolean gst_maruauddec_register (GstPlugin *plugin, GList *element);
+gboolean gst_maruaudenc_register (GstPlugin *plugin, GList *element);
static GList *elements = NULL;
static gboolean codec_element_init = FALSE;
}
}
g_mutex_unlock (&gst_maru_mutex);
-
- if (!gst_marudec_register (plugin, elements)) {
+ if (!gst_maruviddec_register (plugin, elements)) {
+ GST_ERROR ("failed to register decoder elements");
+ return FALSE;
+ }
+ if (!gst_maruauddec_register (plugin, elements)) {
GST_ERROR ("failed to register decoder elements");
return FALSE;
}
- if (!gst_maruenc_register (plugin, elements)) {
+ if (!gst_maruvidenc_register (plugin, elements)) {
+ GST_ERROR ("failed to register encoder elements");
+ return FALSE;
+ }
+ if (!gst_maruaudenc_register (plugin, elements)) {
GST_ERROR ("failed to register encoder elements");
return FALSE;
}
-
return TRUE;
}
GST_PLUGIN_DEFINE (
GST_VERSION_MAJOR,
GST_VERSION_MINOR,
- "tizen-emul",
+ tizen-emul,
"Codecs for Tizen Emulator",
plugin_init,
- "0.3.0",
+ "1.2.0",
"LGPL",
"gst-plugins-emulator",
"http://www.tizen.org"
#include <sys/mman.h>
#include <glib.h>
#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiodecoder.h>
+#include <gst/video/gstvideodecoder.h>
#include "pixfmt.h"
GST_DEBUG_CATEGORY_EXTERN (maru_debug);
};
} __attribute__((packed)) CodecElement;
+typedef struct AVRational{
+ int num; ///< numerator
+ int den; ///< denominator
+} AVRational;
+
typedef struct {
int32_t width, height;
int32_t fps_n, fps_d;
CODEC_TYPE_ENCODE,
};
-enum AUDIO_SAMPLE_FORMAT {
+enum SampleFormat {
SAMPLE_FMT_NONE = -1,
SAMPLE_FMT_U8,
SAMPLE_FMT_S16,
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2013 Samsung Electronics Co., Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/* Modifications by Samsung Electronics Co., Ltd.
+ * 1. Provide a hardware buffer in order to avoid additional memcpy operations.
+ */
+
+
+
+#include "gstmarudevice.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+
+#define GST_MARUDEC_PARAMS_QDATA g_quark_from_static_string("marudec-params")
+
+typedef struct _GstMaruAudDecClass
+{
+ GstAudioDecoderClass parent_class;
+
+ CodecElement *codec;
+ GstPadTemplate *sinktempl;
+ GstPadTemplate *srctempl;
+} GstMaruAudDecClass;
+
+static GstElementClass *parent_class = NULL;
+
+static void gst_maruauddec_base_init (GstMaruAudDecClass *klass);
+static void gst_maruauddec_class_init (GstMaruAudDecClass *klass);
+static void gst_maruauddec_init (GstMaruAudDec *maruauddec);
+static void gst_maruauddec_finalize (GObject *object);
+
+static gboolean gst_maruauddec_start (GstAudioDecoder *decoder);
+static gboolean gst_maruauddec_stop (GstAudioDecoder *decoder);
+static void gst_maruauddec_flush (GstAudioDecoder *decoder, gboolean hard);
+static gboolean gst_maruauddec_set_format (GstAudioDecoder *decoder,
+ GstCaps *caps);
+static GstFlowReturn gst_maruauddec_handle_frame (GstAudioDecoder *decoder,
+ GstBuffer *inbuf);
+
+static gboolean gst_maruauddec_negotiate (GstMaruAudDec *maruauddec,
+ gboolean force);
+
+static void
+gst_maruauddec_base_init (GstMaruAudDecClass *klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstCaps *sinkcaps = NULL, *srccaps = NULL;
+ GstPadTemplate *sinktempl, *srctempl;
+ gchar *longname, *description;
+
+ CodecElement *codec = NULL;
+
+ codec =
+ (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
+ GST_MARUDEC_PARAMS_QDATA);\
+ g_assert (codec != NULL);
+
+ longname = g_strdup_printf ("Maru %s Decoder", codec->longname);
+ description = g_strdup_printf("Maru %s Decoder", codec->name);
+
+ gst_element_class_set_details_simple (element_class,
+ longname,
+ "Codec/Decoder/Audio",
+ description,
+ "SooYoung Ha <yoosah.ha@samsung.com>");
+
+ g_free (longname);
+ g_free (description);
+
+ sinkcaps = gst_maru_codecname_to_caps (codec->name, NULL, FALSE);
+ if (!sinkcaps) {
+ GST_DEBUG ("Couldn't get sink caps for decoder %s", codec->name);
+ sinkcaps = gst_caps_from_string ("unknown/unknown");
+ }
+
+ srccaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, FALSE, codec);
+ if (!srccaps) {
+ GST_DEBUG ("Couldn't get src caps for decoder %s", codec->name);
+ srccaps = gst_caps_from_string ("audio/x-raw");
+ }
+
+ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
+ GST_PAD_ALWAYS, sinkcaps);
+ srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, srctempl);
+ gst_element_class_add_pad_template (element_class, sinktempl);
+
+ klass->codec = codec;
+ klass->sinktempl = sinktempl;
+ klass->srctempl = srctempl;
+}
+
+static void
+gst_maruauddec_class_init (GstMaruAudDecClass *klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstAudioDecoderClass *gstauddecoder_class = GST_AUDIO_DECODER_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_maruauddec_finalize;
+
+ gstauddecoder_class->start = GST_DEBUG_FUNCPTR (gst_maruauddec_start);
+ gstauddecoder_class->stop = GST_DEBUG_FUNCPTR (gst_maruauddec_stop);
+ gstauddecoder_class->set_format = GST_DEBUG_FUNCPTR (gst_maruauddec_set_format);
+ gstauddecoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_maruauddec_handle_frame);
+ gstauddecoder_class->flush = GST_DEBUG_FUNCPTR (gst_maruauddec_flush);
+}
+
+static void
+gst_maruauddec_init (GstMaruAudDec *maruauddec)
+{
+ maruauddec->context = g_malloc0 (sizeof(CodecContext));
+ maruauddec->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+ maruauddec->opened = FALSE;
+
+ // TODO: check why
+ gst_audio_decoder_set_drainable (GST_AUDIO_DECODER (maruauddec), TRUE);
+ gst_audio_decoder_set_needs_format (GST_AUDIO_DECODER (maruauddec), TRUE);
+}
+
+static void
+gst_maruauddec_finalize (GObject *object)
+{
+ GstMaruAudDec *maruauddec = (GstMaruAudDec *) object;
+
+ GST_DEBUG_OBJECT (maruauddec, "finalize object and release context");
+
+ // TODO: check why
+ if (maruauddec->opened) {
+ gst_maru_avcodec_close (maruauddec->context, maruauddec->dev);
+ maruauddec->opened = FALSE;
+ }
+
+ g_free (maruauddec->context);
+ maruauddec->context = NULL;
+
+ g_free (maruauddec->dev);
+ maruauddec->dev = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_maruauddec_close (GstMaruAudDec *maruauddec, gboolean reset)
+{
+ GST_LOG_OBJECT (maruauddec, "closing maru codec");
+
+ gst_caps_replace (&maruauddec->last_caps, NULL);
+ gst_buffer_replace (&maruauddec->outbuf, NULL);
+
+ gst_maru_avcodec_close (maruauddec->context, maruauddec->dev);
+ maruauddec->opened = FALSE;
+
+ if (maruauddec->context) {
+ g_free(maruauddec->context->codecdata);
+ maruauddec->context->codecdata = NULL;
+ }
+
+ return TRUE;
+}
+
+gboolean gst_maruauddec_start(GstAudioDecoder *decoder)
+{
+ GstMaruAudDec *maruauddec = (GstMaruAudDec *) decoder;
+
+ GST_OBJECT_LOCK (maruauddec);
+ gst_maru_avcodec_close (maruauddec->context, maruauddec->dev);
+
+ GST_OBJECT_UNLOCK (maruauddec);
+
+ return TRUE;
+}
+
+gboolean gst_maruauddec_stop(GstAudioDecoder *decoder)
+{
+ GstMaruAudDec *maruauddec = (GstMaruAudDec *) decoder;
+
+ GST_OBJECT_LOCK (maruauddec);
+ gst_maruauddec_close (maruauddec, FALSE);
+ GST_OBJECT_UNLOCK (maruauddec);
+
+ // initialize 'GstAudioInfo' with default values
+ gst_audio_info_init (&maruauddec->info);
+
+ // TODO: check why
+ gst_caps_replace (&maruauddec->last_caps, NULL);
+
+ return TRUE;
+}
+
+static gboolean
+gst_maruauddec_open (GstMaruAudDec *maruauddec)
+{
+ GstMaruAudDecClass *oclass;
+
+ oclass = (GstMaruAudDecClass *) (G_OBJECT_GET_CLASS (maruauddec));
+
+ maruauddec->dev = g_try_malloc0 (sizeof(CodecDevice));
+ if (!maruauddec->dev) {
+ GST_ERROR_OBJECT (maruauddec, "failed to allocate memory for CodecDevice");
+ return FALSE;
+ }
+
+ if (gst_maru_avcodec_open (maruauddec->context, oclass->codec, maruauddec->dev) < 0) {
+ gst_maruauddec_close(maruauddec, TRUE);
+ GST_DEBUG_OBJECT (maruauddec,
+ "maru_%sdec: Failed to open codec", oclass->codec->name);
+ return FALSE;
+ }
+
+ maruauddec->opened = TRUE;
+ GST_LOG_OBJECT (maruauddec, "Opened codec %s", oclass->codec->name);
+
+ gst_audio_info_init (&maruauddec->info);
+
+ maruauddec->audio.sample_rate = 0;
+ maruauddec->audio.channels = 0;
+ maruauddec->audio.depth = 0;
+
+ return TRUE;
+}
+
+static gint
+gst_maruauddec_audio_frame (GstMaruAudDec *marudec,
+ CodecElement *codec, guint8 *data, guint size, GstTSInfo *dec_info,
+ GstBuffer **outbuf, GstFlowReturn *ret)
+{
+ GST_DEBUG (" >> ENTER");
+
+ gint len = -1;
+ gint have_data = FF_MAX_AUDIO_FRAME_SIZE;
+ GstClockTime out_timestamp, out_duration;
+ gint64 out_offset;
+ GstMapInfo outmapinfo;
+
+ void* buf = g_malloc0(FF_MAX_AUDIO_FRAME_SIZE);
+
+ GST_DEBUG_OBJECT (marudec, "decode audio, input buffer size %d", size);
+
+ len = interface->decode_audio (marudec->context, (int16_t *) buf, &have_data,
+ data, size, marudec->dev);
+
+ if (len >= 0 && have_data > 0) {
+ GST_DEBUG_OBJECT (marudec, "Creating output buffer");
+ if (!gst_maruauddec_negotiate (marudec, FALSE)) {
+ GST_DEBUG ("negotiation failed.");
+ return len;
+ } else {
+ GST_DEBUG ("negotiation passed.");
+ }
+
+ *outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (marudec), len);
+ gst_buffer_map (*outbuf, &outmapinfo, GST_MAP_READWRITE);
+ memcpy(outmapinfo.data, buf, len);
+ g_free(buf);
+
+ gst_buffer_unmap (*outbuf, &outmapinfo);
+ out_timestamp = dec_info->timestamp;
+
+ /* calculate based on number of samples */
+ out_duration = gst_util_uint64_scale (have_data, GST_SECOND,
+ marudec->info.finfo->depth * marudec->info.channels *
+ marudec->context->audio.sample_rate);
+
+ out_offset = dec_info->offset;
+
+ GST_DEBUG_OBJECT (marudec,
+ "Buffer created. Size: %d, timestamp: %" GST_TIME_FORMAT
+ ", duration: %" GST_TIME_FORMAT, have_data,
+ GST_TIME_ARGS (out_timestamp), GST_TIME_ARGS (out_duration));
+
+ GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp;
+ GST_BUFFER_DURATION (*outbuf) = out_duration;
+ GST_BUFFER_OFFSET (*outbuf) = out_offset;
+ }
+
+ if (len == -1 && !strcmp(codec->name, "aac")) {
+ GST_ELEMENT_ERROR (marudec, STREAM, DECODE, (NULL),
+ ("Decoding of AAC stream failed."));
+ *ret = GST_FLOW_ERROR;
+ }
+
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
+ *ret, *outbuf, len);
+
+ return len;
+}
+
+static gint
+gst_maruauddec_frame (GstMaruAudDec *marudec,
+ guint8 *data, guint size, GstTSInfo *dec_info, gint *got_data, GstFlowReturn *ret)
+{
+ GST_DEBUG (" >> ENTER ");
+
+ GstMaruAudDecClass *oclass;
+ GstBuffer *outbuf = marudec->outbuf;
+ gint have_data = 0, len = 0;
+
+ if (G_UNLIKELY (marudec->context->codec == NULL)) {
+ GST_ERROR_OBJECT (marudec, "no codec context");
+ return -1;
+ }
+ GST_LOG_OBJECT (marudec, "data:%p, size:%d", data, size);
+
+ *ret = GST_FLOW_OK;
+ oclass = (GstMaruAudDecClass *) (G_OBJECT_GET_CLASS (marudec));
+
+ switch (oclass->codec->media_type) {
+ case AVMEDIA_TYPE_AUDIO:
+ len = gst_maruauddec_audio_frame (marudec, oclass->codec, data, size,
+ dec_info, &marudec->outbuf, ret);
+ if (marudec->outbuf == NULL ) {
+ GST_DEBUG_OBJECT (marudec, "no buffer but keeping timestamp");
+ }
+ break;
+ default:
+ GST_ERROR_OBJECT (marudec, "Asked to decode non-audio/video frame!");
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (marudec->outbuf) {
+ have_data = 1;
+ }
+
+ if (len < 0 || have_data < 0) {
+ GST_WARNING_OBJECT (marudec,
+ "maru_%sdec: decoding error (len: %d, have_data: %d)",
+ oclass->codec->name, len, have_data);
+ *got_data = 0;
+ return len;
+ } else if (len == 0 && have_data == 0) {
+ *got_data = 0;
+ return len;
+ } else {
+ *got_data = 1;
+ }
+ if (marudec->outbuf) {
+ GST_DEBUG_OBJECT (marudec, "Decoded data, now storing buffer %p", outbuf);
+ } else {
+ GST_DEBUG_OBJECT (marudec, "We didn't get a decoded buffer");
+ }
+ return len;
+}
+
+static void
+gst_maruauddec_drain (GstMaruAudDec *maruauddec)
+{
+ GST_DEBUG_OBJECT (maruauddec, "drain frame");
+
+ gint have_data, len;
+
+ do {
+ GstFlowReturn ret;
+
+ len =
+ gst_maruauddec_frame (maruauddec, NULL, 0, NULL, &have_data, &ret);
+
+ } while (len >= 0 && have_data == 1);
+
+ if (maruauddec->outbuf) {
+ gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (maruauddec),
+ maruauddec->outbuf, 1);
+ maruauddec->outbuf = NULL;
+ }
+}
+
+gboolean gst_maruauddec_set_format(GstAudioDecoder *decoder, GstCaps *caps)
+{
+ GstMaruAudDec *maruauddec = NULL;
+ GstMaruAudDecClass *oclass;
+ gboolean ret = TRUE;
+
+ maruauddec = (GstMaruAudDec *) decoder;
+ if (!maruauddec) {
+ GST_DEBUG ("maruauddec is NULL");
+ return FALSE;
+ }
+
+ oclass = (GstMaruAudDecClass *) (G_OBJECT_GET_CLASS (maruauddec));
+
+ GST_DEBUG_OBJECT (maruauddec, "set_format called.");
+
+ GST_OBJECT_LOCK (maruauddec);
+
+ // TODO: check why
+ if (maruauddec->last_caps && gst_caps_is_equal (maruauddec->last_caps, caps)) {
+ GST_DEBUG_OBJECT (maruauddec, "same caps");
+ GST_OBJECT_UNLOCK (maruauddec);
+ return TRUE;
+ }
+
+ gst_caps_replace (&maruauddec->last_caps, caps);
+
+ if (maruauddec->opened) {
+ GST_OBJECT_UNLOCK (maruauddec);
+ gst_maruauddec_drain (maruauddec);
+ GST_OBJECT_LOCK (maruauddec);
+
+ if (!gst_maruauddec_close (maruauddec, TRUE)) {
+ GST_OBJECT_UNLOCK (maruauddec);
+ return FALSE;
+ }
+ }
+
+ gst_maru_caps_with_codecname (oclass->codec->name, oclass->codec->media_type,
+ caps, maruauddec->context);
+
+ if (!gst_maruauddec_open (maruauddec)) {
+ GST_DEBUG_OBJECT (maruauddec, "Failed to open");
+ GST_OBJECT_UNLOCK (maruauddec);
+ return FALSE;
+ }
+
+ GST_OBJECT_UNLOCK (maruauddec);
+
+ return ret;
+}
+
+static GstTSInfo *
+gst_ts_info_store (GstMaruAudDec *dec, GstClockTime timestamp,
+ GstClockTime duration, gint64 offset)
+{
+ GST_DEBUG (" >> ENTER ");
+ gint idx = dec->ts_idx;
+ dec->ts_info[idx].idx = idx;
+ dec->ts_info[idx].timestamp = timestamp;
+ dec->ts_info[idx].duration = duration;
+ dec->ts_info[idx].offset = offset;
+ dec->ts_idx = (idx + 1) & MAX_TS_MASK;
+
+ return &dec->ts_info[idx];
+}
+
+GstFlowReturn gst_maruauddec_handle_frame(GstAudioDecoder *decoder, GstBuffer *inbuf)
+{
+ GST_DEBUG (" >> ENTER ");
+
+ GstMaruAudDec *marudec = (GstMaruAudDec *) decoder;
+ gint have_data;
+ GstMapInfo mapinfo;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ guint8 *in_buf;
+ gint in_size;
+ GstClockTime in_timestamp;
+ GstClockTime in_duration;
+ gint64 in_offset;
+ GstTSInfo *in_info;
+ GstTSInfo *dec_info;
+
+ if (inbuf == NULL) {
+ gst_maruauddec_drain (marudec);
+ return GST_FLOW_OK;
+ }
+ inbuf = gst_buffer_ref (inbuf);
+
+ if (!gst_buffer_map (inbuf, &mapinfo, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (marudec, "Failed to map buffer");
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_ERROR;
+ }
+
+ in_timestamp = GST_BUFFER_TIMESTAMP (inbuf);
+ in_duration = GST_BUFFER_DURATION (inbuf);
+ in_offset = GST_BUFFER_OFFSET (inbuf);
+
+ in_info = gst_ts_info_store (marudec, in_timestamp, in_duration, in_offset);
+ GST_LOG_OBJECT (marudec,
+ "Received new data of size %u, offset: %" G_GUINT64_FORMAT ", ts:%"
+ GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT ", info %d",
+ mapinfo.size, GST_BUFFER_OFFSET (inbuf),
+ GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration), in_info->idx);
+
+ in_size = mapinfo.size;
+ in_buf = (guint8 *) mapinfo.data;
+
+ dec_info = in_info;
+
+ gst_maruauddec_frame (marudec, in_buf, in_size, dec_info, &have_data, &ret);
+
+ gst_buffer_unmap (inbuf, &mapinfo);
+ gst_buffer_unref (inbuf);
+
+ if (marudec->outbuf) {
+ ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (marudec),
+ marudec->outbuf, 1);
+ } else {
+ GST_DEBUG ("There is NO valid marudec->output");
+ }
+ marudec->outbuf = NULL;
+
+ return ret;
+}
+
+void gst_maruauddec_flush(GstAudioDecoder *decoder, gboolean hard)
+{
+ GstMaruAudDec *maruauddec = (GstMaruAudDec *) decoder;
+
+ GST_DEBUG_OBJECT (maruauddec, "flush decoded buffers");
+ interface->flush_buffers (maruauddec->context, maruauddec->dev);
+}
+
+static gboolean
+gst_maruauddec_negotiate (GstMaruAudDec *maruauddec, gboolean force)
+{
+ GstMaruAudDecClass *oclass;
+
+ gint depth;
+ GstAudioFormat format;
+ GstAudioChannelPosition pos[64] = { 0, };
+
+ oclass = (GstMaruAudDecClass *) (G_OBJECT_GET_CLASS (maruauddec));
+
+ depth = gst_maru_smpfmt_depth (maruauddec->context->audio.sample_fmt);
+ format = gst_maru_smpfmt_to_audioformat (maruauddec->context->audio.sample_fmt);
+ if (format == GST_AUDIO_FORMAT_UNKNOWN) {
+ GST_ELEMENT_ERROR (maruauddec, CORE, NEGOTIATION,
+ ("Could not find GStreamer caps mapping for codec '%s'.",
+ oclass->codec->name), (NULL));
+ return FALSE;
+ }
+
+ if (!force && maruauddec->info.rate ==
+ maruauddec->context->audio.sample_rate &&
+ maruauddec->info.channels == maruauddec->context->audio.channels &&
+ maruauddec->info.finfo->depth == depth) {
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (maruauddec,
+ "Renegotiating audio from %dHz@%dchannels (%d) to %dHz@%dchannels (%d)",
+ maruauddec->info.rate, maruauddec->info.channels,
+ maruauddec->info.finfo->depth,
+ maruauddec->context->audio.sample_rate, maruauddec->context->audio.channels, depth);
+
+ gst_maru_channel_layout_to_gst (maruauddec->context->audio.channel_layout,
+ maruauddec->context->audio.channels, pos);
+ memcpy (maruauddec->layout, pos,
+ sizeof (GstAudioChannelPosition) * maruauddec->context->audio.channels);
+
+ /* Get GStreamer channel layout */
+ gst_audio_channel_positions_to_valid_order (pos, maruauddec->context->audio.channels);
+
+ // TODO: purpose of needs_reorder ?
+ maruauddec->needs_reorder =
+ memcmp (pos, maruauddec->layout,
+ sizeof (pos[0]) * maruauddec->context->audio.channels) != 0;
+
+ gst_audio_info_set_format (&maruauddec->info, format,
+ maruauddec->context->audio.sample_rate, maruauddec->context->audio.channels, pos);
+
+ if (!gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (maruauddec),
+ &maruauddec->info)) {
+ GST_ELEMENT_ERROR (maruauddec, CORE, NEGOTIATION, (NULL),
+ ("Could not set caps for maru decoder (%s), not fixed?",
+ oclass->codec->name));
+ memset (&maruauddec->info, 0, sizeof (maruauddec->info));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_maruauddec_register (GstPlugin *plugin, GList *element)
+{
+ GTypeInfo typeinfo = {
+ sizeof (GstMaruAudDecClass),
+ (GBaseInitFunc) gst_maruauddec_base_init,
+ NULL,
+ (GClassInitFunc) gst_maruauddec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstMaruAudDec),
+ 0,
+ (GInstanceInitFunc) gst_maruauddec_init,
+ };
+
+ GType type;
+ gchar *type_name;
+ gint rank = GST_RANK_PRIMARY;
+ GList *elem = element;
+ CodecElement *codec = NULL;
+
+ if (!elem) {
+ return FALSE;
+ }
+
+ /* register element */
+ do {
+ codec = (CodecElement *)(elem->data);
+ if (!codec) {
+ return FALSE;
+ }
+
+ if ((codec->media_type != AVMEDIA_TYPE_AUDIO) || (codec->codec_type != CODEC_TYPE_DECODE)) {
+ continue;
+ }
+
+ type_name = g_strdup_printf ("maru_%sdec", codec->name);
+ type = g_type_from_name (type_name);
+ if (!type) {
+ type = g_type_register_static (GST_TYPE_AUDIO_DECODER, type_name, &typeinfo, 0);
+ g_type_set_qdata (type, GST_MARUDEC_PARAMS_QDATA, (gpointer) codec);
+ }
+
+ if (!gst_element_register (plugin, type_name, rank, type)) {
+ g_free (type_name);
+ return FALSE;
+ }
+
+ g_free (type_name);
+ } while ((elem = elem->next));
+
+ GST_LOG ("Finished Registering decoders");
+
+ return TRUE;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2013 Samsung Electronics Co., Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "gstmarudevice.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+#include <gst/base/gstadapter.h>
+
+#define GST_MARUENC_PARAMS_QDATA g_quark_from_static_string("maruaudenc-params")
+
+enum
+{
+ PROP_0,
+ PROP_BIT_RATE
+};
+
+typedef struct _GstMaruAudEnc
+{
+ GstAudioEncoder parent;
+
+ // cache
+ gint bitrate;
+ gint rtp_payload_size;
+ gint compliance;
+
+ GstAudioChannelPosition layout[64];
+ gboolean needs_reorder;
+
+ CodecContext *context;
+ CodecDevice *dev;
+ gboolean opened;
+
+} GstMaruAudEnc;
+
+typedef struct _GstMaruAudEncClass
+{
+ GstAudioEncoderClass parent_class;
+
+ CodecElement *codec;
+ GstPadTemplate *sinktempl;
+ GstPadTemplate *srctempl;
+} GstMaruAudEncClass;
+
+static GstElementClass *parent_class = NULL;
+
+static void gst_maruaudenc_base_init (GstMaruAudEncClass *klass);
+static void gst_maruaudenc_class_init (GstMaruAudEncClass *klass);
+static void gst_maruaudenc_init (GstMaruAudEnc *maruaudenc);
+static void gst_maruaudenc_finalize (GObject *object);
+
+static GstCaps *gst_maruaudenc_getcaps (GstAudioEncoder *encoder, GstCaps *filter);
+static gboolean gst_maruaudenc_set_format (GstAudioEncoder *encoder, GstAudioInfo *info);
+static GstFlowReturn gst_maruaudenc_handle_frame (GstAudioEncoder *encoder, GstBuffer *inbuf);
+static gboolean gst_maruaudenc_start (GstAudioEncoder *encoder);
+static gboolean gst_maruaudenc_stop (GstAudioEncoder *encoder);
+static void gst_maruaudenc_flush (GstAudioEncoder *encoder);
+
+static void gst_maruaudenc_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec);
+static void gst_maruaudenc_get_property(GObject *object, guint prop_id,
+ GValue *value, GParamSpec *pspec);
+
+#define DEFAULT_AUDIO_BITRATE 128000
+
+#define MARU_DEFAULT_COMPLIANCE 0
+
+/*
+ * Implementation
+ */
+static void
+gst_maruaudenc_base_init (GstMaruAudEncClass *klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstPadTemplate *sinktempl = NULL, *srctempl = NULL;
+ GstCaps *sinkcaps = NULL, *srccaps = NULL;
+ gchar *longname, *description;
+
+ CodecElement *codec = NULL;
+
+ codec =
+ (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
+ GST_MARUENC_PARAMS_QDATA);
+ g_assert (codec != NULL);
+
+ longname = g_strdup_printf ("Maru %s Encoder", codec->longname);
+ description = g_strdup_printf ("Maru %s Encoder", codec->name);
+
+ gst_element_class_set_metadata (element_class,
+ longname,
+ "Codec/Encoder/Audio",
+ description,
+ "SooYoung Ha <yoosah.ha@samsung.com>");
+
+ g_free (longname);
+ g_free (description);
+
+ if (!(srccaps = gst_maru_codecname_to_caps (codec->name, NULL, TRUE))) {
+ GST_DEBUG ("Couldn't get source caps for encoder '%s'", codec->name);
+ srccaps = gst_caps_new_empty_simple ("unknown/unknown");
+ }
+
+ sinkcaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, TRUE, codec);
+ if (!sinkcaps) {
+ GST_DEBUG ("Couldn't get sink caps for encoder '%s'", codec->name);
+ sinkcaps = gst_caps_new_empty_simple ("unknown/unknown");
+ }
+
+ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
+ GST_PAD_ALWAYS, sinkcaps);
+ srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, srctempl);
+ gst_element_class_add_pad_template (element_class, sinktempl);
+
+ klass->codec = codec;
+ klass->sinktempl = sinktempl;
+ klass->srctempl = srctempl;
+}
+
+
+static void
+gst_maruaudenc_class_init (GstMaruAudEncClass *klass)
+{
+ GObjectClass *gobject_class;
+ GstAudioEncoderClass *gstaudioencoder_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstaudioencoder_class = (GstAudioEncoderClass *) klass;
+
+ gobject_class->set_property = gst_maruaudenc_set_property;
+ gobject_class->get_property = gst_maruaudenc_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BIT_RATE,
+ g_param_spec_int ("bitrate", "Bit Rate",
+ "Target Audio Bitrate", 0, G_MAXINT, DEFAULT_AUDIO_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = gst_maruaudenc_finalize;
+
+ gstaudioencoder_class->start = GST_DEBUG_FUNCPTR (gst_maruaudenc_start);
+ gstaudioencoder_class->stop = GST_DEBUG_FUNCPTR (gst_maruaudenc_stop);
+ gstaudioencoder_class->getcaps = GST_DEBUG_FUNCPTR (gst_maruaudenc_getcaps);
+ gstaudioencoder_class->flush = GST_DEBUG_FUNCPTR (gst_maruaudenc_flush);
+ gstaudioencoder_class->set_format = GST_DEBUG_FUNCPTR (gst_maruaudenc_set_format);
+ gstaudioencoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_maruaudenc_handle_frame);
+}
+
+static void
+gst_maruaudenc_init (GstMaruAudEnc *maruaudenc)
+{
+ // instead of AVCodecContext
+ maruaudenc->context = g_malloc0 (sizeof(CodecContext));
+ maruaudenc->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+
+ maruaudenc->opened = FALSE;
+
+ maruaudenc->dev = g_malloc0 (sizeof(CodecDevice));
+
+ maruaudenc->compliance = MARU_DEFAULT_COMPLIANCE;
+
+ gst_audio_encoder_set_drainable (GST_AUDIO_ENCODER (maruaudenc), TRUE);
+}
+
+static void
+gst_maruaudenc_finalize (GObject *object)
+{
+ // Deinit Decoder
+ GstMaruAudEnc *maruaudenc = (GstMaruAudEnc *) object;
+
+ if (maruaudenc->opened) {
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ maruaudenc->opened = FALSE;
+ }
+
+ g_free (maruaudenc->context);
+ maruaudenc->context = NULL;
+
+ g_free (maruaudenc->dev);
+ maruaudenc->dev = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_maruaudenc_start (GstAudioEncoder * encoder)
+{
+/*
+ GstMaruAudEnc *maruaudenc = (GstMaruAudEnc *) encoder;
+ GstMaruAudEncClass *oclass =
+ (GstMaruAudEncClass *) G_OBJECT_GET_CLASS (maruaudenc);
+
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+*/
+ return TRUE;
+}
+
+static gboolean
+gst_maruaudenc_stop (GstAudioEncoder * encoder)
+{
+ GstMaruAudEnc *maruaudenc = (GstMaruAudEnc *) encoder;
+
+ /* close old session */
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ maruaudenc->opened = FALSE;
+
+ return TRUE;
+}
+
+static void
+gst_maruaudenc_flush (GstAudioEncoder * encoder)
+{
+ GstMaruAudEnc *maruaudenc = (GstMaruAudEnc *) encoder;
+
+ if (maruaudenc->opened) {
+ interface->flush_buffers (maruaudenc->context, maruaudenc->dev);
+ }
+}
+
+static GstCaps *
+gst_maruaudenc_getcaps (GstAudioEncoder * encoder, GstCaps * filter)
+{
+ GstMaruAudEnc *maruenc = (GstMaruAudEnc *) encoder;
+ GstCaps *caps = NULL;
+
+ GST_DEBUG_OBJECT (maruenc, "getting caps");
+
+ /* audio needs no special care */
+ caps = gst_audio_encoder_proxy_getcaps (encoder, NULL, filter);
+
+ GST_DEBUG_OBJECT (maruenc, "audio caps, return %" GST_PTR_FORMAT, caps);
+
+ return caps;
+}
+
+static gboolean
+gst_maruaudenc_set_format (GstAudioEncoder *encoder, GstAudioInfo *info)
+{
+ GstMaruAudEnc *maruaudenc = (GstMaruAudEnc *) encoder;
+ GstCaps *other_caps;
+ GstCaps *allowed_caps;
+ GstCaps *icaps;
+ gsize frame_size;
+
+ GstMaruAudEncClass *oclass =
+ (GstMaruAudEncClass *) (G_OBJECT_GET_CLASS (maruaudenc));
+
+ if (maruaudenc->opened) {
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ maruaudenc->opened = FALSE;
+ }
+
+ if (maruaudenc->bitrate > 0) {
+ GST_DEBUG_OBJECT (maruaudenc, "Setting context to bitrate %d",
+ maruaudenc->bitrate);
+ maruaudenc->context->bit_rate = maruaudenc->bitrate;
+ } else {
+ GST_INFO_OBJECT (maruaudenc, "Using context default bitrate %d",
+ maruaudenc->context->bit_rate);
+ }
+
+ // TODO: need to verify this code
+ /*
+ if (maruaudenc->rtp_payload_size) {
+ maruaudenc->context->rtp_payload_size = maruaudenc->rtp_payload_size;
+ }
+ */
+
+ // TODO: set these values in qemu layer.
+ /*
+ maruaudenc->context->rc_strategy = 2;
+ maruaudenc->context->b_frame_strategy = 0;
+ maruaudenc->context->coder_type = 0;
+ maruaudenc->context->context_model = 0;
+ */
+ if (!maruaudenc->context) {
+ GST_ERROR("ctx NULL");
+ }
+ if (!maruaudenc->context->codec) {
+ GST_ERROR("codec NULL");
+ }
+ gst_maru_audioinfo_to_context (info, maruaudenc->context);
+
+ // open codec
+ if (gst_maru_avcodec_open (maruaudenc->context,
+ oclass->codec, maruaudenc->dev) < 0) {
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ GST_DEBUG_OBJECT (maruaudenc, "maru_%senc: Failed to open codec",
+ oclass->codec->name);
+
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (maruaudenc, "picking an output format.");
+ allowed_caps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder));
+ if (!allowed_caps) {
+ GST_DEBUG_OBJECT (maruaudenc, "but no peer, using template caps");
+ allowed_caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder)));
+ }
+
+ GST_DEBUG_OBJECT (maruaudenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
+ gst_maru_caps_with_codecname (oclass->codec->name,
+ oclass->codec->media_type, allowed_caps, maruaudenc->context);
+
+ other_caps =
+ gst_maru_codecname_to_caps (oclass->codec->name, maruaudenc->context, TRUE);
+ if (!other_caps) {
+ gst_caps_unref (allowed_caps);
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ GST_DEBUG ("Unsupported codec - no caps found");
+ return FALSE;
+ }
+
+ icaps = gst_caps_intersect (allowed_caps, other_caps);
+ gst_caps_unref (allowed_caps);
+ gst_caps_unref (other_caps);
+ if (gst_caps_is_empty (icaps)) {
+ gst_caps_unref (icaps);
+ return FALSE;
+ }
+ icaps = gst_caps_truncate (icaps);
+
+if (!gst_audio_encoder_set_output_format (GST_AUDIO_ENCODER (maruaudenc),
+ icaps)) {
+ gst_maru_avcodec_close (maruaudenc->context, maruaudenc->dev);
+ gst_caps_unref (icaps);
+
+ return FALSE;
+ }
+ gst_caps_unref (icaps);
+
+ frame_size = maruaudenc->context->audio.frame_size;
+ if (frame_size > 1) {
+ gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (maruaudenc),
+ frame_size);
+ gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (maruaudenc),
+ frame_size);
+ gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (maruaudenc), 1);
+ } else {
+ gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (maruaudenc),
+ 0);
+ gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (maruaudenc),
+ 0);
+ gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (maruaudenc), 0);
+ }
+
+ /* success! */
+ maruaudenc->opened = TRUE;
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_maruaudenc_encode_audio (GstMaruAudEnc *maruaudenc, guint8 *audio_in,
+ guint in_size, gint *have_data)
+{
+ GstAudioEncoder *enc;
+ gint res;
+ GstFlowReturn ret;
+ guint8 * audio_out;
+
+ enc = GST_AUDIO_ENCODER (maruaudenc);
+
+ GST_LOG_OBJECT (maruaudenc, "encoding buffer %p size %u", audio_in, in_size);
+
+ audio_out = g_malloc0 (FF_MAX_AUDIO_FRAME_SIZE);
+ res = interface->encode_audio (maruaudenc->context, audio_out, 0,
+ audio_in, in_size, 0, maruaudenc->dev);
+
+ if (res < 0) {
+ GST_ERROR_OBJECT (enc, "Failed to encode buffer: %d", res);
+ return GST_FLOW_OK;
+ }
+
+ GST_LOG_OBJECT (maruaudenc, "got output size %d", res);
+
+ if (*have_data) {
+ GstBuffer *outbuf;
+
+ GST_LOG_OBJECT (maruaudenc, "pushing size %d", res);
+
+ outbuf =
+ gst_buffer_new_wrapped_full (0, audio_out, res, 0, res,
+ audio_out, g_free);
+
+ ret = gst_audio_encoder_finish_frame (enc, outbuf, maruaudenc->context->audio.frame_size);
+ } else {
+ GST_LOG_OBJECT (maruaudenc, "no output produced");
+ ret = GST_FLOW_OK;
+ }
+
+ return ret;
+}
+
+static void
+gst_maruaudenc_drain (GstMaruAudEnc *maruaudenc)
+{
+ gint have_data, try = 0;
+
+ GST_LOG_OBJECT (maruaudenc,
+ "codec has delay capabilities, calling until libav has drained everything");
+
+ do {
+ GstFlowReturn ret;
+
+ ret = gst_maruaudenc_encode_audio (maruaudenc, NULL, 0, &have_data);
+ if (ret != GST_FLOW_OK || have_data == 0)
+ break;
+ } while (try++ < 10);
+
+}
+
+static GstFlowReturn
+gst_maruaudenc_handle_frame (GstAudioEncoder *encoder, GstBuffer *inbuf)
+{
+ GstMaruAudEnc *maruaudenc;
+
+ GstFlowReturn ret;
+ guint8 *in_data;
+ guint size;
+ gint have_data;
+ GstMapInfo map;
+
+ maruaudenc = (GstMaruAudEnc *) encoder;
+
+ if (G_UNLIKELY (!maruaudenc->opened)) {
+ GST_ELEMENT_ERROR (maruaudenc, CORE, NEGOTIATION, (NULL),
+ ("not configured to input format before data start"));
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ if (!inbuf) {
+ gst_maruaudenc_drain (maruaudenc);
+ return GST_FLOW_OK;
+ }
+
+ inbuf = gst_buffer_ref (inbuf);
+
+ GST_DEBUG_OBJECT (maruaudenc,
+ "Received time %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
+ ", size %" G_GSIZE_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)), gst_buffer_get_size (inbuf));
+
+ if (maruaudenc->needs_reorder) {
+ GstAudioInfo *info = gst_audio_encoder_get_audio_info (encoder);
+
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_audio_buffer_reorder_channels (inbuf, info->finfo->format,
+ info->channels, info->position, maruaudenc->layout);
+ }
+
+ gst_buffer_map (inbuf, &map, GST_MAP_READ);
+
+ in_data = map.data;
+ size = map.size;
+ ret = gst_maruaudenc_encode_audio (maruaudenc, in_data, size, &have_data);
+ gst_buffer_unmap (inbuf, &map);
+ gst_buffer_unref (inbuf);
+
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (maruaudenc, "Failed to push buffer %d (%s)", ret,
+ gst_flow_get_name (ret));
+ return ret;
+ }
+
+ return GST_FLOW_OK;
+}
+
+
+static void
+gst_maruaudenc_set_property (GObject *object,
+ guint prop_id, const GValue *value, GParamSpec *pspec)
+{
+ GstMaruAudEnc *maruaudenc;
+
+ maruaudenc = (GstMaruAudEnc *) (object);
+
+ if (maruaudenc->opened) {
+ GST_WARNING_OBJECT (maruaudenc,
+ "Can't change properties one decoder is setup !");
+ return;
+ }
+
+ switch (prop_id) {
+ case PROP_BIT_RATE:
+ maruaudenc->bitrate = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_maruaudenc_get_property (GObject *object,
+ guint prop_id, GValue *value, GParamSpec *pspec)
+{
+ GstMaruAudEnc *maruaudenc;
+
+ maruaudenc = (GstMaruAudEnc *) (object);
+
+ switch (prop_id) {
+ case PROP_BIT_RATE:
+ g_value_set_int (value, maruaudenc->bitrate);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+gboolean
+gst_maruaudenc_register (GstPlugin *plugin, GList *element)
+{
+ GTypeInfo typeinfo = {
+ sizeof (GstMaruAudEncClass),
+ (GBaseInitFunc) gst_maruaudenc_base_init,
+ NULL,
+ (GClassInitFunc) gst_maruaudenc_class_init,
+ NULL,
+ NULL,
+ sizeof (GstMaruAudEnc),
+ 0,
+ (GInstanceInitFunc) gst_maruaudenc_init,
+ };
+
+ GType type;
+ gchar *type_name;
+ gint rank = GST_RANK_PRIMARY * 2;
+ GList *elem = element;
+ CodecElement *codec = NULL;
+
+ if (!elem) {
+ return FALSE;
+ }
+
+ /* register element */
+ do {
+ codec = (CodecElement *)(elem->data);
+ if (!codec) {
+ return FALSE;
+ }
+
+ if ((codec->media_type != AVMEDIA_TYPE_AUDIO) && (codec->codec_type != CODEC_TYPE_ENCODE)) {
+ continue;
+ }
+
+ type_name = g_strdup_printf ("maru_%senc", codec->name);
+ type = g_type_from_name (type_name);
+ if (!type) {
+ type = g_type_register_static (GST_TYPE_AUDIO_ENCODER, type_name, &typeinfo, 0);
+ g_type_set_qdata (type, GST_MARUENC_PARAMS_QDATA, (gpointer) codec);
+
+ {
+ static const GInterfaceInfo preset_info = {
+ NULL,
+ NULL,
+ NULL
+ };
+ g_type_add_interface_static (type, GST_TYPE_PRESET, &preset_info);
+ }
+ }
+
+ if (!gst_element_register (plugin, type_name, rank, type)) {
+ g_free (type_name);
+ return FALSE;
+ }
+
+ g_free (type_name);
+ } while ((elem = elem->next));
+
+ GST_LOG ("Finished registering encoders");
+
+ return TRUE;
+}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * Copyright (C) 2013 Samsung Electronics Co., Ltd.
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/* Modifications by Samsung Electronics Co., Ltd.
- * 1. Provide a hardware buffer in order to avoid additional memcpy operations.
- */
-
-#include "gstmarudevice.h"
-#include "gstmaruutils.h"
-#include "gstmaruinterface.h"
-
-#define GST_MARUDEC_PARAMS_QDATA g_quark_from_static_string("marudec-params")
-
-/* indicate dts, pts, offset in the stream */
-#define GST_TS_INFO_NONE &ts_info_none
-static const GstTSInfo ts_info_none = { -1, -1, -1, -1 };
-
-typedef struct _GstMaruDecClass
-{
- GstElementClass parent_class;
-
- CodecElement *codec;
- GstPadTemplate *sinktempl;
- GstPadTemplate *srctempl;
-} GstMaruDecClass;
-
-
-static GstElementClass *parent_class = NULL;
-
-static void gst_marudec_base_init (GstMaruDecClass *klass);
-static void gst_marudec_class_init (GstMaruDecClass *klass);
-static void gst_marudec_init (GstMaruDec *marudec);
-static void gst_marudec_finalize (GObject *object);
-
-static gboolean gst_marudec_setcaps (GstPad *pad, GstCaps *caps);
-
-// sinkpad
-static gboolean gst_marudec_sink_event (GstPad *pad, GstEvent *event);
-static GstFlowReturn gst_marudec_chain (GstPad *pad, GstBuffer *buffer);
-
-// srcpad
-static gboolean gst_marudec_src_event (GstPad *pad, GstEvent *event);
-static GstStateChangeReturn gst_marudec_change_state (GstElement *element,
- GstStateChange transition);
-
-static gboolean gst_marudec_negotiate (GstMaruDec *dec, gboolean force);
-
-static gint gst_marudec_frame (GstMaruDec *marudec, guint8 *data,
- guint size, gint *got_data,
- const GstTSInfo *dec_info, gint64 in_offset, GstFlowReturn *ret);
-
-static gboolean gst_marudec_open (GstMaruDec *marudec);
-static void gst_marudec_close (GstMaruDec *marudec);
-
-// for profile
-static GTimer* profile_decode_timer = NULL;
-static gdouble elapsed_decode_time = 0;
-static int decoded_frame_cnt = 0;
-static int last_frame_cnt = 0;
-static GMutex profile_mutex;
-static int profile_init = 0;
-
-static gboolean
-maru_profile_cb (gpointer user_data)
-{
- int decoding_fps = 0;
- gdouble decoding_time = 0;
-
- g_mutex_lock (&profile_mutex);
- if (decoded_frame_cnt < 0) {
- decoded_frame_cnt = 0;
- last_frame_cnt = 0;
- elapsed_decode_time = 0;
- g_mutex_unlock (&profile_mutex);
- return FALSE;
- }
-
- decoding_fps = decoded_frame_cnt - last_frame_cnt;
- last_frame_cnt = decoded_frame_cnt;
-
- decoding_time = elapsed_decode_time;
- elapsed_decode_time = 0;
- g_mutex_unlock (&profile_mutex);
-
- GST_DEBUG ("decoding fps=%d, latency=%f\n", decoding_fps, decoding_time/decoding_fps);
- return TRUE;
-}
-
-static void init_codec_profile(void)
-{
- if (!profile_init) {
- profile_init = 1;
- profile_decode_timer = g_timer_new();
- }
-}
-
-static void reset_codec_profile(void)
-{
- g_mutex_lock (&profile_mutex);
- decoded_frame_cnt = -1;
- g_mutex_lock (&profile_mutex);
-}
-
-static void begin_video_decode_profile(void)
-{
- g_timer_start(profile_decode_timer);
-}
-
-static void end_video_decode_profile(void)
-{
- g_timer_stop(profile_decode_timer);
-
- g_mutex_lock (&profile_mutex);
- if (decoded_frame_cnt == 0) {
- g_timeout_add_seconds(1, maru_profile_cb, NULL);
- }
-
- elapsed_decode_time += g_timer_elapsed(profile_decode_timer, NULL);
- decoded_frame_cnt++;
- g_mutex_unlock (&profile_mutex);
-}
-
-#define INIT_CODEC_PROFILE(fd) \
- if (interface->get_profile_status(fd)) { \
- init_codec_profile(); \
- }
-#define RESET_CODEC_PROFILE(s) \
- if (profile_init) { \
- reset_codec_profile(); \
- }
-#define BEGIN_VIDEO_DECODE_PROFILE() \
- if (profile_init) { \
- begin_video_decode_profile(); \
- }
-#define END_VIDEO_DECODE_PROFILE() \
- if (profile_init) { \
- end_video_decode_profile(); \
- }
-
-
-static const GstTSInfo *
-gst_ts_info_store (GstMaruDec *dec, GstClockTime timestamp,
- GstClockTime duration, gint64 offset)
-{
- gint idx = dec->ts_idx;
- dec->ts_info[idx].idx = idx;
- dec->ts_info[idx].timestamp = timestamp;
- dec->ts_info[idx].duration = duration;
- dec->ts_info[idx].offset = offset;
- dec->ts_idx = (idx + 1) & MAX_TS_MASK;
-
- return &dec->ts_info[idx];
-}
-
-static const GstTSInfo *
-gst_ts_info_get (GstMaruDec *dec, gint idx)
-{
- if (G_UNLIKELY (idx < 0 || idx > MAX_TS_MASK))
- return GST_TS_INFO_NONE;
-
- return &dec->ts_info[idx];
-}
-
-static void
-gst_marudec_reset_ts (GstMaruDec *marudec)
-{
- marudec->next_out = GST_CLOCK_TIME_NONE;
-}
-
-static void
-gst_marudec_update_qos (GstMaruDec *marudec, gdouble proportion,
- GstClockTime timestamp)
-{
- GST_LOG_OBJECT (marudec, "update QOS: %f, %" GST_TIME_FORMAT,
- proportion, GST_TIME_ARGS (timestamp));
-
- GST_OBJECT_LOCK (marudec);
- marudec->proportion = proportion;
- marudec->earliest_time = timestamp;
- GST_OBJECT_UNLOCK (marudec);
-}
-
-static void
-gst_marudec_reset_qos (GstMaruDec *marudec)
-{
- gst_marudec_update_qos (marudec, 0.5, GST_CLOCK_TIME_NONE);
- marudec->processed = 0;
- marudec->dropped = 0;
-}
-
-static gboolean
-gst_marudec_do_qos (GstMaruDec *marudec, GstClockTime timestamp,
- gboolean *mode_switch)
-{
- GstClockTimeDiff diff;
- gdouble proportion;
- GstClockTime qostime, earliest_time;
- gboolean res = TRUE;
-
- *mode_switch = FALSE;
-
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
- marudec->processed++;
- return TRUE;
- }
-
- proportion = marudec->proportion;
- earliest_time = marudec->earliest_time;
-
- qostime = gst_segment_to_running_time (&marudec->segment, GST_FORMAT_TIME,
- timestamp);
-
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (qostime))) {
- marudec->processed++;
- return TRUE;
- }
-
- diff = GST_CLOCK_DIFF (qostime, earliest_time);
-
- if (proportion < 0.4 && diff < 0 ){
- marudec->processed++;
- return TRUE;
- } else {
- if (diff >= 0) {
- if (0) {
- res = FALSE;
- }
-
- GstClockTime stream_time, jitter;
- GstMessage *qos_msg;
-
- marudec->dropped++;
- stream_time =
- gst_segment_to_stream_time (&marudec->segment, GST_FORMAT_TIME,
- timestamp);
- jitter = GST_CLOCK_DIFF (qostime, earliest_time);
- qos_msg =
- gst_message_new_qos (GST_OBJECT_CAST (marudec), FALSE, qostime,
- stream_time, timestamp, GST_CLOCK_TIME_NONE);
- gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
- gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
- marudec->processed, marudec->dropped);
- gst_element_post_message (GST_ELEMENT_CAST (marudec), qos_msg);
-
- return res;
- }
- }
-
- marudec->processed++;
- return TRUE;
-}
-
-static void
-clear_queued (GstMaruDec *marudec)
-{
- g_list_foreach (marudec->queued, (GFunc) gst_mini_object_unref, NULL);
- g_list_free (marudec->queued);
- marudec->queued = NULL;
-}
-
-static GstFlowReturn
-flush_queued (GstMaruDec *marudec)
-{
- GstFlowReturn res = GST_FLOW_OK;
-
- GST_DEBUG_OBJECT (marudec, "flush queued");
-
- while (marudec->queued) {
- GstBuffer *buf = GST_BUFFER_CAST (marudec->queued->data);
-
- GST_LOG_OBJECT (marudec, "pushing buffer %p, offset %"
- G_GUINT64_FORMAT ", timestamp %"
- GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, buf,
- GST_BUFFER_OFFSET (buf),
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
- GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
-
- res = gst_pad_push (marudec->srcpad, buf);
-
- marudec->queued =
- g_list_delete_link (marudec->queued, marudec->queued);
- }
-
- return res;
-}
-
-static void
-gst_marudec_drain (GstMaruDec *marudec)
-{
- GST_DEBUG_OBJECT (marudec, "drain frame");
-
- {
- gint have_data, len, try = 0;
-
- do {
- GstFlowReturn ret;
-
- len =
- gst_marudec_frame (marudec, NULL, 0, &have_data, &ts_info_none, 0, &ret);
-
- if (len < 0 || have_data == 0) {
- break;
- }
- } while (try++ < 10);
- }
-
- if (marudec->segment.rate < 0.0) {
- GST_DEBUG_OBJECT (marudec, "reverse playback");
- flush_queued (marudec);
- }
-}
-
-/*
- * Implementation
- */
-static void
-gst_marudec_base_init (GstMaruDecClass *klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *sinkcaps = NULL, *srccaps = NULL;
- GstPadTemplate *sinktempl, *srctempl;
- CodecElement *codec;
- gchar *longname, *classification, *description;
-
- codec =
- (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
- GST_MARUDEC_PARAMS_QDATA);
-
- longname = g_strdup_printf ("%s Decoder", codec->longname);
- classification = g_strdup_printf ("Codec/Decoder/%s",
- (codec->media_type == AVMEDIA_TYPE_VIDEO) ?
- "Video" : "Audio");
- description = g_strdup_printf("%s Decoder", codec->name);
-
- gst_element_class_set_details_simple (element_class,
- longname,
- classification,
- description,
- "Erik Walthinsen <omega@cse.ogi.edu>");
-
- g_free (longname);
- g_free (classification);
- g_free (description);
-
- sinkcaps = gst_maru_codecname_to_caps (codec->name, NULL, FALSE);
- if (!sinkcaps) {
- sinkcaps = gst_caps_from_string ("unknown/unknown");
- }
-
- switch (codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- srccaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv");
- break;
- case AVMEDIA_TYPE_AUDIO:
- srccaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, FALSE, codec);
- break;
- default:
- GST_LOG("unknown media type");
- break;
- }
-
- if (!srccaps) {
- srccaps = gst_caps_from_string ("unknown/unknown");
- }
-
- sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
- GST_PAD_ALWAYS, sinkcaps);
- srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
- GST_PAD_ALWAYS, srccaps);
-
- gst_element_class_add_pad_template (element_class, srctempl);
- gst_element_class_add_pad_template (element_class, sinktempl);
-
- klass->codec = codec;
- klass->sinktempl = sinktempl;
- klass->srctempl = srctempl;
-}
-
-static void
-gst_marudec_class_init (GstMaruDecClass *klass)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- parent_class = g_type_class_peek_parent (klass);
-
-#if 0
- // use these function when defines new properties.
-
- gobject_class->set_property = gst_marudec_set_property
- gobject_class->get_property = gst_marudec_get_property
-#endif
-
- gobject_class->finalize = gst_marudec_finalize;
- gstelement_class->change_state = gst_marudec_change_state;
-}
-
-static void
-gst_marudec_init (GstMaruDec *marudec)
-{
- GstMaruDecClass *oclass;
-
- oclass = (GstMaruDecClass*) (G_OBJECT_GET_CLASS(marudec));
-
- marudec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
- gst_pad_set_setcaps_function (marudec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_marudec_setcaps));
- gst_pad_set_event_function (marudec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_marudec_sink_event));
- gst_pad_set_chain_function (marudec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_marudec_chain));
-
- marudec->srcpad = gst_pad_new_from_template (oclass->srctempl, "src") ;
- gst_pad_use_fixed_caps (marudec->srcpad);
- gst_pad_set_event_function (marudec->srcpad,
- GST_DEBUG_FUNCPTR(gst_marudec_src_event));
-
- gst_element_add_pad (GST_ELEMENT(marudec), marudec->sinkpad);
- gst_element_add_pad (GST_ELEMENT(marudec), marudec->srcpad);
-
- marudec->context = g_malloc0 (sizeof(CodecContext));
- marudec->context->video.pix_fmt = PIX_FMT_NONE;
- marudec->context->audio.sample_fmt = SAMPLE_FMT_NONE;
-
- marudec->opened = FALSE;
- marudec->format.video.par_n = -1;
- marudec->format.video.fps_n = -1;
- marudec->format.video.old_fps_n = -1;
-
- marudec->queued = NULL;
- gst_segment_init (&marudec->segment, GST_FORMAT_TIME);
-}
-
-static void
-gst_marudec_finalize (GObject *object)
-{
- GstMaruDec *marudec = (GstMaruDec *) object;
-
- GST_DEBUG_OBJECT (marudec, "finalize object and release context");
- g_free (marudec->context);
- marudec->context = NULL;
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static gboolean
-gst_marudec_src_event (GstPad *pad, GstEvent *event)
-{
- GstMaruDec *marudec;
- gboolean res;
-
- marudec = (GstMaruDec *) gst_pad_get_parent (pad);
-
- switch (GST_EVENT_TYPE (event)) {
- /* Quality Of Service (QOS) event contains a report
- about the current real-time performance of the stream.*/
- case GST_EVENT_QOS:
- {
- gdouble proportion;
- GstClockTimeDiff diff;
- GstClockTime timestamp;
-
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
-
- /* update our QoS values */
- gst_marudec_update_qos (marudec, proportion, timestamp + diff);
- break;
- }
- default:
- break;
- }
-
- /* forward upstream */
- res = gst_pad_push_event (marudec->sinkpad, event);
-
- gst_object_unref (marudec);
-
- return res;
-}
-
-static gboolean
-gst_marudec_sink_event (GstPad *pad, GstEvent *event)
-{
- GstMaruDec *marudec;
- gboolean ret = FALSE;
-
- marudec = (GstMaruDec *) gst_pad_get_parent (pad);
- if (!marudec) {
- return FALSE;
- }
-
- GST_DEBUG_OBJECT (marudec, "Handling %s event",
- GST_EVENT_TYPE_NAME (event));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_EOS:
- gst_marudec_drain (marudec);
- break;
- case GST_EVENT_FLUSH_STOP:
- {
- if (marudec->opened) {
- GST_DEBUG_OBJECT (marudec, "flush decoded buffers");
- interface->flush_buffers (marudec->context, marudec->dev);
- }
-
- gst_marudec_reset_ts (marudec);
- gst_marudec_reset_qos (marudec);
- gst_segment_init (&marudec->segment, GST_FORMAT_TIME);
- clear_queued (marudec);
- }
- break;
- case GST_EVENT_NEWSEGMENT:
- {
- gboolean update;
- GstFormat format;
- gint64 start, stop, time;
- gdouble rate, arate;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- switch (format) {
- case GST_FORMAT_TIME:
- break;
- case GST_FORMAT_BYTES:
- {
- gint bit_rate;
- bit_rate = marudec->context->bit_rate;
-
- if (!bit_rate) {
- GST_WARNING_OBJECT (marudec, "no bitrate to convert BYTES to TIME");
- gst_event_unref (event);
- gst_object_unref (marudec);
- return ret;
- }
-
- GST_DEBUG_OBJECT (marudec, "bitrate: %d", bit_rate);
-
- if (start != -1) {
- start = gst_util_uint64_scale_int (start, GST_SECOND, bit_rate);
- }
- if (stop != -1) {
- stop = gst_util_uint64_scale_int (stop, GST_SECOND, bit_rate);
- }
- if (time != -1) {
- time = gst_util_uint64_scale_int (time, GST_SECOND, bit_rate);
- }
-
- gst_event_unref (event);
-
- format = GST_FORMAT_TIME;
-
- stop = -1;
- event = gst_event_new_new_segment (update, rate, format,
- start, stop, time);
- break;
- }
- default:
- GST_WARNING_OBJECT (marudec, "unknown format received in NEWSEGMENT");
- gst_event_unref (event);
- gst_object_unref (marudec);
- return ret;
- }
-
- if (marudec->opened) {
- gst_marudec_drain (marudec);
- }
-
- GST_DEBUG_OBJECT (marudec,
- "NEWSEGMENT in time start %" GST_TIME_FORMAT " -- stop %"
- GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
-
- gst_segment_set_newsegment_full (&marudec->segment, update,
- rate, arate, format, start, stop, time);
- break;
- }
- default:
- break;
- }
-
- ret = gst_pad_push_event (marudec->srcpad, event);
-
- gst_object_unref (marudec);
-
- return ret;
-}
-
-
-
-static gboolean
-gst_marudec_setcaps (GstPad *pad, GstCaps *caps)
-{
- GstMaruDec *marudec;
- GstMaruDecClass *oclass;
- GstStructure *structure;
- const GValue *par;
- const GValue *fps;
- gboolean ret = TRUE;
-
- GST_DEBUG_OBJECT (pad, "setcaps called.");
-
- marudec = (GstMaruDec *) (gst_pad_get_parent (pad));
- if (!marudec) {
- return FALSE;
- }
-
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
-
- GST_OBJECT_LOCK (marudec);
-
- if (marudec->opened) {
- GST_OBJECT_UNLOCK (marudec);
- gst_marudec_drain (marudec);
- GST_OBJECT_LOCK (marudec);
- }
- gst_marudec_close (marudec);
-
- GST_LOG_OBJECT (marudec, "size %dx%d", marudec->context->video.width,
- marudec->context->video.height);
-
- if (!strcmp(oclass->codec->name, "wmv3") ||
- !strcmp(oclass->codec->name, "vc1")) {
- gst_maru_caps_to_codecname (caps, oclass->codec->name, NULL);
- }
-
- gst_maru_caps_with_codecname (oclass->codec->name, oclass->codec->media_type,
- caps, marudec->context);
-
- GST_LOG_OBJECT (marudec, "size after %dx%d", marudec->context->video.width,
- marudec->context->video.height);
-
- if (!marudec->context->video.fps_d || !marudec->context->video.fps_n) {
- GST_DEBUG_OBJECT (marudec, "forcing 25/1 framerate");
- marudec->context->video.fps_n = 1;
- marudec->context->video.fps_d = 25;
- }
-
- structure = gst_caps_get_structure (caps, 0);
-
- par = gst_structure_get_value (structure, "pixel-aspect-ratio");
- if (par) {
- GST_DEBUG_OBJECT (marudec, "sink caps have pixel-aspect-ratio of %d:%d",
- gst_value_get_fraction_numerator (par),
- gst_value_get_fraction_denominator (par));
- }
-
- fps = gst_structure_get_value (structure, "framerate");
- if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
- marudec->format.video.fps_n = gst_value_get_fraction_numerator (fps);
- marudec->format.video.fps_d = gst_value_get_fraction_denominator (fps);
- GST_DEBUG_OBJECT (marudec, "Using framerate %d/%d from incoming",
- marudec->format.video.fps_n, marudec->format.video.fps_d);
- } else {
- marudec->format.video.fps_n = -1;
- GST_DEBUG_OBJECT (marudec, "Using framerate from codec");
- }
-
- if (!gst_marudec_open (marudec)) {
- GST_DEBUG_OBJECT (marudec, "Failed to open");
- GST_OBJECT_UNLOCK (marudec);
- gst_object_unref (marudec);
-
- return FALSE;
- }
-
- gst_structure_get_int (structure, "width",
- &marudec->format.video.clip_width);
- gst_structure_get_int (structure, "height",
- &marudec->format.video.clip_height);
-
- GST_DEBUG_OBJECT (pad, "clipping to %dx%d",
- marudec->format.video.clip_width, marudec->format.video.clip_height);
-
- GST_OBJECT_UNLOCK (marudec);
- gst_object_unref (marudec);
-
- return ret;
-}
-
-static gboolean
-gst_marudec_open (GstMaruDec *marudec)
-{
- GstMaruDecClass *oclass;
-
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
-
- marudec->dev = g_try_malloc0 (sizeof(CodecDevice));
- if (!marudec->dev) {
- GST_ERROR_OBJECT (marudec, "failed to allocate memory for CodecDevice");
- return FALSE;
- }
-
- if (gst_maru_avcodec_open (marudec->context, oclass->codec, marudec->dev) < 0) {
- g_free(marudec->dev);
- marudec->dev = NULL;
- GST_ERROR_OBJECT (marudec,
- "maru_%sdec: Failed to open codec", oclass->codec->name);
- return FALSE;
- }
-
- marudec->opened = TRUE;
- GST_LOG_OBJECT (marudec, "Opened codec %s", oclass->codec->name);
-
- switch (oclass->codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- marudec->format.video.width = 0;
- marudec->format.video.height = 0;
- marudec->format.video.clip_width = -1;
- marudec->format.video.clip_height = -1;
- marudec->format.video.pix_fmt = PIX_FMT_NB;
- marudec->format.video.interlaced = FALSE;
- break;
- case AVMEDIA_TYPE_AUDIO:
- marudec->format.audio.samplerate = 0;
- marudec->format.audio.channels = 0;
- marudec->format.audio.depth = 0;
- break;
- default:
- break;
- }
-
- gst_marudec_reset_ts (marudec);
-
- marudec->proportion = 0.0;
- marudec->earliest_time = -1;
-
- // initialize profile resource
- INIT_CODEC_PROFILE(marudec->dev->fd);
-
- return TRUE;
-}
-
-static void
-gst_marudec_close (GstMaruDec *marudec)
-{
- if (!marudec->opened) {
- GST_DEBUG_OBJECT (marudec, "not opened yet");
- return;
- }
-
- if (marudec->context) {
- g_free(marudec->context->codecdata);
- marudec->context->codecdata = NULL;
- }
-
- if (!marudec->dev) {
- return;
- }
-
- gst_maru_avcodec_close (marudec->context, marudec->dev);
- marudec->opened = FALSE;
-
- if (marudec->dev) {
- g_free(marudec->dev);
- marudec->dev = NULL;
- }
-
- // reset profile resource
- RESET_CODEC_PROFILE();
-}
-
-
-static gboolean
-gst_marudec_negotiate (GstMaruDec *marudec, gboolean force)
-{
- GstMaruDecClass *oclass;
- GstCaps *caps;
-
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
-
- switch (oclass->codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- if (!force && marudec->format.video.width == marudec->context->video.width
- && marudec->format.video.height == marudec->context->video.height
- && marudec->format.video.fps_n == marudec->format.video.old_fps_n
- && marudec->format.video.fps_d == marudec->format.video.old_fps_d
- && marudec->format.video.pix_fmt == marudec->context->video.pix_fmt
- && marudec->format.video.par_n == marudec->context->video.par_n
- && marudec->format.video.par_d == marudec->context->video.par_d) {
- return TRUE;
- }
- marudec->format.video.width = marudec->context->video.width;
- marudec->format.video.height = marudec->context->video.height;
- marudec->format.video.old_fps_n = marudec->format.video.fps_n;
- marudec->format.video.old_fps_d = marudec->format.video.fps_d;
- marudec->format.video.pix_fmt = marudec->context->video.pix_fmt;
- marudec->format.video.par_n = marudec->context->video.par_n;
- marudec->format.video.par_d = marudec->context->video.par_d;
- break;
- case AVMEDIA_TYPE_AUDIO:
- {
- gint depth = gst_maru_smpfmt_depth (marudec->context->audio.sample_fmt);
- if (!force && marudec->format.audio.samplerate ==
- marudec->context->audio.sample_rate &&
- marudec->format.audio.channels == marudec->context->audio.channels &&
- marudec->format.audio.depth == depth) {
- return TRUE;
- }
- marudec->format.audio.samplerate = marudec->context->audio.sample_rate;
- marudec->format.audio.channels = marudec->context->audio.channels;
- marudec->format.audio.depth = depth;
- }
- break;
- default:
- break;
- }
-
- caps =
- gst_maru_codectype_to_caps (oclass->codec->media_type, marudec->context,
- oclass->codec->name, FALSE);
-
- if (caps == NULL) {
- GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION,
- ("Could not find GStreamer caps mapping for codec '%s'.",
- oclass->codec->name), (NULL));
- return FALSE;
- }
-
- switch (oclass->codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- {
- gint width, height;
- gboolean interlaced;
-
- width = marudec->format.video.clip_width;
- height = marudec->format.video.clip_height;
- interlaced = marudec->format.video.interlaced;
-
- if (width != -1 && height != -1) {
- if (width < marudec->context->video.width) {
- gst_caps_set_simple (caps, "width", G_TYPE_INT, width, NULL);
- }
- if (height < marudec->context->video.height) {
- gst_caps_set_simple (caps, "height", G_TYPE_INT, height, NULL);
- }
- gst_caps_set_simple (caps, "interlaced", G_TYPE_BOOLEAN, interlaced,
- NULL);
-
- if (marudec->format.video.fps_n != -1) {
- gst_caps_set_simple (caps, "framerate",
- GST_TYPE_FRACTION, marudec->format.video.fps_n,
- marudec->format.video.fps_d, NULL);
- }
- }
- }
- break;
- case AVMEDIA_TYPE_AUDIO:
- break;
- default:
- break;
- }
-
- if (!gst_pad_set_caps (marudec->srcpad, caps)) {
- GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION, (NULL),
- ("Could not set caps for decoder (%s), not fixed?",
- oclass->codec->name));
- gst_caps_unref (caps);
- return FALSE;
- }
-
- gst_caps_unref (caps);
-
- return TRUE;
-}
-
-GstBuffer *
-new_aligned_buffer (gint size, GstCaps *caps)
-{
- GstBuffer *buf;
-
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = g_malloc0 (size);
- GST_BUFFER_SIZE (buf) = size;
- GST_BUFFER_FREE_FUNC (buf) = g_free;
-
- if (caps) {
- gst_buffer_set_caps (buf, caps);
- }
-
- return buf;
-}
-
-static GstFlowReturn
-get_output_buffer (GstMaruDec *marudec, GstBuffer **outbuf)
-{
- gint pict_size;
- GstFlowReturn ret;
-
- ret = GST_FLOW_OK;
-
- *outbuf = NULL;
-
- if (G_UNLIKELY (!gst_marudec_negotiate (marudec, FALSE))) {
- GST_DEBUG_OBJECT (marudec, "negotiate failed");
- return GST_FLOW_NOT_NEGOTIATED;
- }
-
- pict_size = gst_maru_avpicture_size (marudec->context->video.pix_fmt,
- marudec->context->video.width, marudec->context->video.height);
- if (pict_size < 0) {
- GST_DEBUG_OBJECT (marudec, "size of a picture is negative. "
- "pixel format: %d, width: %d, height: %d",
- marudec->context->video.pix_fmt, marudec->context->video.width,
- marudec->context->video.height);
- return GST_FLOW_ERROR;
- }
-
- GST_DEBUG_OBJECT (marudec, "outbuf size of decoded video %d", pict_size);
-
- gst_pad_set_element_private(GST_PAD_PEER(marudec->srcpad), (gpointer)marudec);
-
- /* GstPadBufferAllocFunction is mostly overridden by elements that can
- * provide a hardware buffer in order to avoid additional memcpy operations.
- */
- gst_pad_set_bufferalloc_function(
- GST_PAD_PEER(marudec->srcpad),
- (GstPadBufferAllocFunction) interface->buffer_alloc_and_copy);
-
- ret = gst_pad_alloc_buffer_and_set_caps (marudec->srcpad,
- GST_BUFFER_OFFSET_NONE, pict_size,
- GST_PAD_CAPS (marudec->srcpad), outbuf);
- if (G_UNLIKELY (ret != GST_FLOW_OK)) {
- GST_DEBUG_OBJECT (marudec, "pad_alloc failed %d (%s)", ret,
- gst_flow_get_name (ret));
- return ret;
- }
-
- return ret;
-}
-
-static gboolean
-clip_video_buffer (GstMaruDec *dec, GstBuffer *buf,
- GstClockTime in_ts, GstClockTime in_dur)
-{
- gboolean res = TRUE;
-
- return res;
-}
-
-static gboolean
-clip_audio_buffer (GstMaruDec *dec, GstBuffer *buf,
- GstClockTime in_ts, GstClockTime in_dur)
-{
- GstClockTime stop;
- gint64 diff, cstart, cstop;
- gboolean res = TRUE;
-
- if (G_UNLIKELY (dec->segment.format != GST_FORMAT_TIME)) {
- GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : ""));
- return res;
- }
-
- // in_ts: in_timestamp. check a start time.
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (in_ts))) {
- GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : ""));
- return res;
- }
-
- stop =
- GST_CLOCK_TIME_IS_VALID (in_dur) ? (in_ts + in_dur) : GST_CLOCK_TIME_NONE;
-
- res = gst_segment_clip (&dec->segment, GST_FORMAT_TIME, in_ts,
- stop, &cstart, &cstop);
- if (G_UNLIKELY (!res)) {
- GST_LOG_OBJECT (dec, "out of segment");
- GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : ""));
- return res;
- }
-
- if (G_UNLIKELY ((diff = cstart - in_ts) > 0)) {
- diff =
- gst_util_uint64_scale_int (diff, dec->format.audio.samplerate, GST_SECOND) *
- (dec->format.audio.depth * dec->format.audio.channels);
-
- GST_DEBUG_OBJECT (dec, "clipping start to %" GST_TIME_FORMAT " %"
- G_GINT64_FORMAT " bytes", GST_TIME_ARGS (cstart), diff);
-
- GST_BUFFER_SIZE (buf) -= diff;
- GST_BUFFER_DATA (buf) += diff;
-
- }
-
- if (G_UNLIKELY ((diff = stop - cstop) > 0)) {
- diff =
- gst_util_uint64_scale_int (diff, dec->format.audio.samplerate, GST_SECOND) *
- (dec->format.audio.depth * dec->format.audio.channels);
-
- GST_DEBUG_OBJECT (dec, "clipping stop to %" GST_TIME_FORMAT " %"
- G_GINT64_FORMAT " bytes", GST_TIME_ARGS (cstop), diff);
-
- GST_BUFFER_SIZE (buf) -= diff;
- }
-
- GST_BUFFER_TIMESTAMP (buf) = cstart;
- GST_BUFFER_DURATION (buf) = cstop - cstart;
-
- GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : ""));
- return res;
-}
-
-static gint
-gst_marudec_video_frame (GstMaruDec *marudec, guint8 *data, guint size,
- const GstTSInfo *dec_info, gint64 in_offset, GstBuffer **outbuf,
- GstFlowReturn *ret)
-{
- gint len = -1;
- gboolean mode_switch;
- gboolean decode;
- GstClockTime out_timestamp, out_duration, out_pts;
- gint64 out_offset;
- const GstTSInfo *out_info;
- int have_data;
-
- decode = gst_marudec_do_qos (marudec, dec_info->timestamp, &mode_switch);
-
- GST_DEBUG_OBJECT (marudec, "decode video: input buffer size %d", size);
-
- // begin video decode profile
- BEGIN_VIDEO_DECODE_PROFILE();
-
- len = interface->decode_video (marudec, data, size,
- dec_info->idx, in_offset, outbuf, &have_data);
- if (len < 0 || !have_data) {
- return len;
- }
-
- // end video decode profile
- END_VIDEO_DECODE_PROFILE();
-
- *ret = get_output_buffer (marudec, outbuf);
- if (G_UNLIKELY (*ret != GST_FLOW_OK)) {
- GST_DEBUG_OBJECT (marudec, "no output buffer");
- len = -1;
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
- *ret, *outbuf, len);
- return len;
- }
-
- out_info = gst_ts_info_get (marudec, dec_info->idx);
- out_pts = out_info->timestamp;
- out_duration = out_info->duration;
- out_offset = out_info->offset;
-
- /* Timestamps */
- out_timestamp = -1;
- if (out_pts != -1) {
- out_timestamp = (GstClockTime) out_pts;
- GST_LOG_OBJECT (marudec, "using timestamp %" GST_TIME_FORMAT
- " returned by ffmpeg", GST_TIME_ARGS (out_timestamp));
- }
-
- if (!GST_CLOCK_TIME_IS_VALID (out_timestamp) && marudec->next_out != -1) {
- out_timestamp = marudec->next_out;
- GST_LOG_OBJECT (marudec, "using next timestamp %" GST_TIME_FORMAT,
- GST_TIME_ARGS (out_timestamp));
- }
-
- if (!GST_CLOCK_TIME_IS_VALID (out_timestamp)) {
- out_timestamp = dec_info->timestamp;
- GST_LOG_OBJECT (marudec, "using in timestamp %" GST_TIME_FORMAT,
- GST_TIME_ARGS (out_timestamp));
- }
- GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp;
-
- /* Offset */
- if (out_offset != GST_BUFFER_OFFSET_NONE) {
- GST_LOG_OBJECT (marudec, "Using offset returned by ffmpeg");
- } else if (out_timestamp != GST_CLOCK_TIME_NONE) {
- GstFormat out_fmt = GST_FORMAT_DEFAULT;
- GST_LOG_OBJECT (marudec, "Using offset converted from timestamp");
-
- gst_pad_query_peer_convert (marudec->sinkpad,
- GST_FORMAT_TIME, out_timestamp, &out_fmt, &out_offset);
- } else if (dec_info->offset != GST_BUFFER_OFFSET_NONE) {
- GST_LOG_OBJECT (marudec, "using in_offset %" G_GINT64_FORMAT,
- dec_info->offset);
- out_offset = dec_info->offset;
- } else {
- GST_LOG_OBJECT (marudec, "no valid offset found");
- out_offset = GST_BUFFER_OFFSET_NONE;
- }
- GST_BUFFER_OFFSET (*outbuf) = out_offset;
-
- /* Duration */
- if (GST_CLOCK_TIME_IS_VALID (out_duration)) {
- GST_LOG_OBJECT (marudec, "Using duration returned by ffmpeg");
- } else if (GST_CLOCK_TIME_IS_VALID (dec_info->duration)) {
- GST_LOG_OBJECT (marudec, "Using in_duration");
- out_duration = dec_info->duration;
- } else {
- if (marudec->format.video.fps_n != -1 &&
- (marudec->format.video.fps_n != 1000 &&
- marudec->format.video.fps_d != 1)) {
- GST_LOG_OBJECT (marudec, "using input framerate for duration");
- out_duration = gst_util_uint64_scale_int (GST_SECOND,
- marudec->format.video.fps_d, marudec->format.video.fps_n);
- } else {
- if (marudec->context->video.fps_n != 0 &&
- (marudec->context->video.fps_d > 0 &&
- marudec->context->video.fps_d < 1000)) {
- GST_LOG_OBJECT (marudec, "using decoder's framerate for duration");
- out_duration = gst_util_uint64_scale_int (GST_SECOND,
- marudec->context->video.fps_n * 1,
- marudec->context->video.fps_d);
- } else {
- GST_LOG_OBJECT (marudec, "no valid duration found");
- }
- }
- }
-
- if (G_UNLIKELY (!clip_video_buffer (marudec, *outbuf, out_timestamp,
- out_duration))) {
- GST_DEBUG_OBJECT (marudec, "buffer clipped");
- gst_buffer_unref (*outbuf);
- *outbuf = NULL;
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
- *ret, *outbuf, len);
- return len;
- }
-
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
- *ret, *outbuf, len);
- return len;
-}
-
-static gint
-gst_marudec_audio_frame (GstMaruDec *marudec, CodecElement *codec,
- guint8 *data, guint size,
- const GstTSInfo *dec_info, GstBuffer **outbuf,
- GstFlowReturn *ret)
-{
- gint len = -1;
- gint have_data = FF_MAX_AUDIO_FRAME_SIZE;
- GstClockTime out_timestamp, out_duration;
- gint64 out_offset;
-
- *outbuf =
- new_aligned_buffer (FF_MAX_AUDIO_FRAME_SIZE,
- GST_PAD_CAPS (marudec->srcpad));
-
- GST_DEBUG_OBJECT (marudec, "decode audio, input buffer size %d", size);
-
- len = interface->decode_audio (marudec->context,
- (int16_t *) GST_BUFFER_DATA (*outbuf), &have_data,
- data, size, marudec->dev);
-
- GST_DEBUG_OBJECT (marudec,
- "Decode audio: len=%d, have_data=%d", len, have_data);
-
- if (len >= 0 && have_data > 0) {
- GST_DEBUG_OBJECT (marudec, "Creating output buffer");
- if (!gst_marudec_negotiate (marudec, FALSE)) {
- gst_buffer_unref (*outbuf);
- *outbuf = NULL;
- len = -1;
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
- *ret, *outbuf, len);
- return len;
- }
-
- GST_BUFFER_SIZE (*outbuf) = len;
-
- if (GST_CLOCK_TIME_IS_VALID (dec_info->timestamp)) {
- out_timestamp = dec_info->timestamp;
- } else {
- out_timestamp = marudec->next_out;
- }
-
- /* calculate based on number of samples */
- out_duration = gst_util_uint64_scale (have_data, GST_SECOND,
- marudec->format.audio.depth * marudec->format.audio.channels *
- marudec->format.audio.samplerate);
-
- out_offset = dec_info->offset;
-
- GST_DEBUG_OBJECT (marudec,
- "Buffer created. Size: %d, timestamp: %" GST_TIME_FORMAT
- ", duration: %" GST_TIME_FORMAT, have_data,
- GST_TIME_ARGS (out_timestamp), GST_TIME_ARGS (out_duration));
-
- GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp;
- GST_BUFFER_DURATION (*outbuf) = out_duration;
- GST_BUFFER_OFFSET (*outbuf) = out_offset;
- gst_buffer_set_caps (*outbuf, GST_PAD_CAPS (marudec->srcpad));
-
- if (GST_CLOCK_TIME_IS_VALID (out_timestamp)) {
- marudec->next_out = out_timestamp + out_duration;
- }
-
- if (G_UNLIKELY (!clip_audio_buffer (marudec, *outbuf,
- out_timestamp, out_duration))) {
- GST_DEBUG_OBJECT (marudec, "buffer_clipped");
- gst_buffer_unref (*outbuf);
- *outbuf = NULL;
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d", *ret, *outbuf, len);
- return len;
- }
- } else {
- gst_buffer_unref (*outbuf);
- *outbuf = NULL;
- }
-
- if (len == -1 && !strcmp(codec->name, "aac")) {
- GST_ELEMENT_ERROR (marudec, STREAM, DECODE, (NULL),
- ("Decoding of AAC stream by FFMPEG failed."));
- *ret = GST_FLOW_ERROR;
- }
-
- GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
- *ret, *outbuf, len);
- return len;
-}
-
-static gint
-gst_marudec_frame (GstMaruDec *marudec, guint8 *data, guint size,
- gint *got_data, const GstTSInfo *dec_info, gint64 in_offset, GstFlowReturn *ret)
-{
- GstMaruDecClass *oclass;
- GstBuffer *outbuf = NULL;
- gint have_data = 0, len = 0;
-
- if (G_UNLIKELY (marudec->context->codec == NULL)) {
- GST_ERROR_OBJECT (marudec, "no codec context");
- return -1;
- }
-
- *ret = GST_FLOW_OK;
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
-
- switch (oclass->codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- len = gst_marudec_video_frame (marudec, data, size,
- dec_info, in_offset, &outbuf, ret);
- break;
- case AVMEDIA_TYPE_AUDIO:
- len = gst_marudec_audio_frame (marudec, oclass->codec, data, size,
- dec_info, &outbuf, ret);
- if (outbuf == NULL && marudec->discont) {
- GST_DEBUG_OBJECT (marudec, "no buffer but keeping timestamp");
- }
- break;
- default:
- GST_ERROR_OBJECT (marudec, "Asked to decode non-audio/video frame!");
- g_assert_not_reached ();
- break;
- }
-
- if (outbuf) {
- have_data = 1;
- }
-
- if (len < 0 || have_data < 0) {
- GST_WARNING_OBJECT (marudec,
- "maru_%sdec: decoding error (len: %d, have_data: %d)",
- oclass->codec->name, len, have_data);
- *got_data = 0;
- return len;
- } else if (len == 0 && have_data == 0) {
- *got_data = 0;
- return len;
- } else {
- *got_data = 1;
- }
-
- if (outbuf) {
- GST_LOG_OBJECT (marudec,
- "Decoded data, now pushing buffer %p with offset %" G_GINT64_FORMAT
- ", timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT,
- outbuf, GST_BUFFER_OFFSET (outbuf),
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
- GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
-
- if (marudec->discont) {
- /* GST_BUFFER_FLAG_DISCONT :
- * the buffer marks a data discontinuity in the stream. This typically
- * occurs after a seek or a dropped buffer from a live or network source.
- */
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- marudec->discont = FALSE;
- }
-
- if (marudec->segment.rate > 0.0) {
- // push forward
- *ret = gst_pad_push (marudec->srcpad, outbuf);
- } else {
- // push reverse
- GST_DEBUG_OBJECT (marudec, "queued frame");
- marudec->queued = g_list_prepend (marudec->queued, outbuf);
- *ret = GST_FLOW_OK;
- }
- } else {
- GST_DEBUG_OBJECT (marudec, "Didn't get a decoded buffer");
- }
-
- return len;
-}
-
-static GstFlowReturn
-gst_marudec_chain (GstPad *pad, GstBuffer *buffer)
-{
- GstMaruDec *marudec;
- GstMaruDecClass *oclass;
- guint8 *in_buf;
- gint in_size, have_data;
- GstFlowReturn ret = GST_FLOW_OK;
- GstClockTime in_timestamp;
- GstClockTime in_duration;
- gboolean discont;
- gint64 in_offset;
- const GstTSInfo *in_info;
- const GstTSInfo *dec_info;
-
- marudec = (GstMaruDec *) (GST_PAD_PARENT (pad));
-
- if (G_UNLIKELY (!marudec->opened)) {
- // not_negotiated
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
- GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION, (NULL),
- ("maru_%sdec: input format was not set before data start",
- oclass->codec->name));
- gst_buffer_unref (buffer);
- return GST_FLOW_NOT_NEGOTIATED;
- }
-
- discont = GST_BUFFER_IS_DISCONT (buffer);
- if (G_UNLIKELY (discont)) {
- GST_DEBUG_OBJECT (marudec, "received DISCONT");
- gst_marudec_drain (marudec);
- interface->flush_buffers (marudec->context, marudec->dev);
- marudec->discont = TRUE;
- gst_marudec_reset_ts (marudec);
- }
-
- oclass = (GstMaruDecClass *) (G_OBJECT_GET_CLASS (marudec));
-
- in_timestamp = GST_BUFFER_TIMESTAMP (buffer);
- in_duration = GST_BUFFER_DURATION (buffer);
- in_offset = GST_BUFFER_OFFSET (buffer);
-
- in_info = gst_ts_info_store (marudec, in_timestamp, in_duration, in_offset);
- GST_LOG_OBJECT (marudec,
- "Received new data of size %u, offset: %" G_GUINT64_FORMAT ", ts:%"
- GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT ", info %d",
- GST_BUFFER_SIZE (buffer), GST_BUFFER_OFFSET (buffer),
- GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration), in_info->idx);
-
- in_buf = GST_BUFFER_DATA (buffer);
- in_size = GST_BUFFER_SIZE (buffer);
-
- dec_info = in_info;
-
- gst_marudec_frame (marudec, in_buf, in_size, &have_data, dec_info, in_offset, &ret);
-
- gst_buffer_unref (buffer);
-
- return ret;
-}
-
-static GstStateChangeReturn
-gst_marudec_change_state (GstElement *element, GstStateChange transition)
-{
- GstMaruDec *marudec = (GstMaruDec *) element;
- GstStateChangeReturn ret;
-
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_OBJECT_LOCK (marudec);
- gst_marudec_close (marudec);
- GST_OBJECT_UNLOCK (marudec);
-
- /* clear queue */
- clear_queued (marudec);
- break;
- default:
- break;
- }
-
- return ret;
-}
-
-gboolean
-gst_marudec_register (GstPlugin *plugin, GList *element)
-{
- GTypeInfo typeinfo = {
- sizeof (GstMaruDecClass),
- (GBaseInitFunc) gst_marudec_base_init,
- NULL,
- (GClassInitFunc) gst_marudec_class_init,
- NULL,
- NULL,
- sizeof (GstMaruDec),
- 0,
- (GInstanceInitFunc) gst_marudec_init,
- };
-
- GType type;
- gchar *type_name;
- gint rank = GST_RANK_PRIMARY;
- GList *elem = element;
- CodecElement *codec = NULL;
-
- if (!elem) {
- return FALSE;
- }
-
- /* register element */
- do {
- codec = (CodecElement *)(elem->data);
- if (!codec) {
- return FALSE;
- }
-
- if (codec->codec_type != CODEC_TYPE_DECODE) {
- continue;
- }
-
- type_name = g_strdup_printf ("maru_%sdec", codec->name);
- type = g_type_from_name (type_name);
- if (!type) {
- type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
- g_type_set_qdata (type, GST_MARUDEC_PARAMS_QDATA, (gpointer) codec);
- }
-
- if (!gst_element_register (plugin, type_name, rank, type)) {
- g_free (type_name);
- return FALSE;
- }
- g_free (type_name);
- } while ((elem = elem->next));
-
- return TRUE;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * Copyright (C) 2013 Samsung Electronics Co., Ltd.
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#include "gstmarudevice.h"
-#include "gstmaruutils.h"
-#include "gstmaruinterface.h"
-#include <gst/base/gstadapter.h>
-
-#define GST_MARUENC_PARAMS_QDATA g_quark_from_static_string("maruenc-params")
-
-enum
-{
- ARG_0,
- ARG_BIT_RATE
-};
-
-typedef struct _GstMaruEnc
-{
- GstElement element;
-
- GstPad *srcpad;
- GstPad *sinkpad;
-
- CodecContext *context;
- CodecDevice *dev;
- gboolean opened;
- GstClockTime adapter_ts;
- guint64 adapter_consumed;
- GstAdapter *adapter;
- gboolean discont;
-
- // cache
- gulong bitrate;
- gint gop_size;
- gulong buffer_size;
-
- guint8 *working_buf;
- gulong working_buf_size;
-
- GQueue *delay;
-
-} GstMaruEnc;
-
-typedef struct _GstMaruEncClass
-{
- GstElementClass parent_class;
-
- CodecElement *codec;
- GstPadTemplate *sinktempl;
- GstPadTemplate *srctempl;
- GstCaps *sinkcaps;
-} GstMaruEncClass;
-
-static GstElementClass *parent_class = NULL;
-
-static void gst_maruenc_base_init (GstMaruEncClass *klass);
-static void gst_maruenc_class_init (GstMaruEncClass *klass);
-static void gst_maruenc_init (GstMaruEnc *maruenc);
-static void gst_maruenc_finalize (GObject *object);
-
-static gboolean gst_maruenc_setcaps (GstPad *pad, GstCaps *caps);
-static GstCaps *gst_maruenc_getcaps (GstPad *pad);
-
-static GstCaps *gst_maruenc_get_possible_sizes (GstMaruEnc *maruenc,
- GstPad *pad, const GstCaps *caps);
-
-static GstFlowReturn gst_maruenc_chain_video (GstPad *pad, GstBuffer *buffer);
-static GstFlowReturn gst_maruenc_chain_audio (GstPad *pad, GstBuffer *buffer);
-
-static gboolean gst_maruenc_event_video (GstPad *pad, GstEvent *event);
-static gboolean gst_maruenc_event_src (GstPad *pad, GstEvent *event);
-
-GstStateChangeReturn gst_maruenc_change_state (GstElement *element, GstStateChange transition);
-
-#define DEFAULT_VIDEO_BITRATE 300000
-#define DEFAULT_VIDEO_GOP_SIZE 15
-#define DEFAULT_AUDIO_BITRATE 128000
-
-#define DEFAULT_WIDTH 352
-#define DEFAULT_HEIGHT 288
-
-/*
- * Implementation
- */
-static void
-gst_maruenc_base_init (GstMaruEncClass *klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstPadTemplate *sinktempl = NULL, *srctempl = NULL;
- GstCaps *sinkcaps = NULL, *srccaps = NULL;
- CodecElement *codec;
- gchar *longname, *classification, *description;
-
- codec =
- (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
- GST_MARUENC_PARAMS_QDATA);
-
- longname = g_strdup_printf ("%s Encoder", codec->longname);
- classification = g_strdup_printf ("Codec/Encoder/%s",
- (codec->media_type == AVMEDIA_TYPE_VIDEO) ? "Video" : "Audio");
- description = g_strdup_printf ("%s Encoder", codec->name);
-
- gst_element_class_set_details_simple (element_class,
- longname,
- classification,
- description,
- "Erik Walthinsen <omega@cse.ogi.edu>");
-
- g_free (longname);
- g_free (classification);
- g_free (description);
-
- if (!(srccaps = gst_maru_codecname_to_caps (codec->name, NULL, TRUE))) {
- GST_DEBUG ("Couldn't get source caps for encoder '%s'", codec->name);
- srccaps = gst_caps_new_simple ("unknown/unknown", NULL);
- }
-
- switch (codec->media_type) {
- case AVMEDIA_TYPE_VIDEO:
- sinkcaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv; video/x-raw-gray");
- break;
- case AVMEDIA_TYPE_AUDIO:
- sinkcaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, TRUE, codec);
- break;
- default:
- GST_LOG("unknown media type");
- break;
- }
-
- if (!sinkcaps) {
- GST_DEBUG ("Couldn't get sink caps for encoder '%s'", codec->name);
- sinkcaps = gst_caps_new_simple ("unknown/unknown", NULL);
- }
-
- sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
- GST_PAD_ALWAYS, sinkcaps);
- srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
- GST_PAD_ALWAYS, srccaps);
-
- gst_element_class_add_pad_template (element_class, srctempl);
- gst_element_class_add_pad_template (element_class, sinktempl);
-
- klass->codec = codec;
- klass->sinktempl = sinktempl;
- klass->srctempl = srctempl;
- klass->sinkcaps = NULL;
-}
-
-static void
-gst_maruenc_set_property (GObject *object,
- guint prop_id, const GValue *value, GParamSpec *pspec)
-{
- GstMaruEnc *maruenc;
-
- maruenc = (GstMaruEnc *) (object);
-
- if (maruenc->opened) {
- GST_WARNING_OBJECT (maruenc,
- "Can't change properties one decoder is setup !");
- return;
- }
-
- switch (prop_id) {
- case ARG_BIT_RATE:
- maruenc->bitrate = g_value_get_ulong (value);
- break;
- default:
- break;
- }
-}
-
-static void
-gst_maruenc_get_property (GObject *object,
- guint prop_id, GValue *value, GParamSpec *pspec)
-{
- GstMaruEnc *maruenc;
-
- maruenc = (GstMaruEnc *) (object);
-
- switch (prop_id) {
- case ARG_BIT_RATE:
- g_value_set_ulong (value, maruenc->bitrate);
- break;
- default:
- break;
- }
-}
-
-static void
-gst_maruenc_class_init (GstMaruEncClass *klass)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->set_property = gst_maruenc_set_property;
- gobject_class->get_property = gst_maruenc_get_property;
-
- if (klass->codec->media_type == AVMEDIA_TYPE_VIDEO) {
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE,
- g_param_spec_ulong ("bitrate", "Bit Rate",
- "Target VIDEO Bitrate", 0, G_MAXULONG, DEFAULT_VIDEO_BITRATE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- } else if (klass->codec->media_type == AVMEDIA_TYPE_AUDIO) {
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE,
- g_param_spec_ulong ("bitrate", "Bit Rate",
- "Target Audio Bitrate", 0, G_MAXULONG, DEFAULT_AUDIO_BITRATE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- }
-
- gstelement_class->change_state = gst_maruenc_change_state;
-
- gobject_class->finalize = gst_maruenc_finalize;
-}
-
-static void
-gst_maruenc_init (GstMaruEnc *maruenc)
-{
- GstMaruEncClass *oclass;
- oclass = (GstMaruEncClass*) (G_OBJECT_GET_CLASS(maruenc));
-
- maruenc->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
- gst_pad_set_setcaps_function (maruenc->sinkpad,
- GST_DEBUG_FUNCPTR(gst_maruenc_setcaps));
- gst_pad_set_getcaps_function (maruenc->sinkpad,
- GST_DEBUG_FUNCPTR(gst_maruenc_getcaps));
-
- maruenc->srcpad = gst_pad_new_from_template (oclass->srctempl, "src");
- gst_pad_use_fixed_caps (maruenc->srcpad);
-
- maruenc->delay = g_queue_new ();
-
- // instead of AVCodecContext
- maruenc->context = g_malloc0 (sizeof(CodecContext));
- maruenc->context->video.pix_fmt = PIX_FMT_NONE;
- maruenc->context->audio.sample_fmt = SAMPLE_FMT_NONE;
-
- maruenc->opened = FALSE;
-
- maruenc->dev = g_malloc0 (sizeof(CodecDevice));
-
- if (oclass->codec->media_type == AVMEDIA_TYPE_VIDEO) {
- gst_pad_set_chain_function (maruenc->sinkpad, gst_maruenc_chain_video);
- gst_pad_set_event_function (maruenc->sinkpad, gst_maruenc_event_video);
- gst_pad_set_event_function (maruenc->srcpad, gst_maruenc_event_src);
-
- maruenc->bitrate = DEFAULT_VIDEO_BITRATE;
- maruenc->buffer_size = 512 * 1024;
- maruenc->gop_size = DEFAULT_VIDEO_GOP_SIZE;
-
- } else if (oclass->codec->media_type == AVMEDIA_TYPE_AUDIO){
- gst_pad_set_chain_function (maruenc->sinkpad, gst_maruenc_chain_audio);
- maruenc->bitrate = DEFAULT_AUDIO_BITRATE;
- }
-
- gst_element_add_pad (GST_ELEMENT (maruenc), maruenc->sinkpad);
- gst_element_add_pad (GST_ELEMENT (maruenc), maruenc->srcpad);
-
- // TODO: need to know what adapter does.
- maruenc->adapter = gst_adapter_new ();
-}
-
-static void
-gst_maruenc_finalize (GObject *object)
-{
- // Deinit Decoder
- GstMaruEnc *maruenc = (GstMaruEnc *) object;
-
- if (maruenc->opened) {
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- maruenc->opened = FALSE;
- }
-
- if (maruenc->context) {
- g_free (maruenc->context);
- maruenc->context = NULL;
- }
-
- if (maruenc->dev) {
- g_free (maruenc->dev);
- maruenc->dev = NULL;
- }
-
- g_queue_free (maruenc->delay);
-
- g_object_unref (maruenc->adapter);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static GstCaps *
-gst_maruenc_get_possible_sizes (GstMaruEnc *maruenc, GstPad *pad,
- const GstCaps *caps)
-{
- GstCaps *othercaps = NULL;
- GstCaps *tmpcaps = NULL;
- GstCaps *intersect = NULL;
- guint i;
-
- othercaps = gst_pad_peer_get_caps (maruenc->srcpad);
- if (!othercaps) {
- return gst_caps_copy (caps);
- }
-
- intersect = gst_caps_intersect (othercaps,
- gst_pad_get_pad_template_caps (maruenc->srcpad));
- gst_caps_unref (othercaps);
-
- if (gst_caps_is_empty (intersect)) {
- return intersect;
- }
-
- if (gst_caps_is_any (intersect)) {
- return gst_caps_copy (caps);
- }
-
- tmpcaps = gst_caps_new_empty ();
-
- for (i = 0; i <gst_caps_get_size (intersect); i++) {
- GstStructure *s = gst_caps_get_structure (intersect, i);
- const GValue *height = NULL;
- const GValue *width = NULL;
- const GValue *framerate = NULL;
- GstStructure *tmps;
-
- height = gst_structure_get_value (s, "height");
- width = gst_structure_get_value (s, "width");
- framerate = gst_structure_get_value (s, "framerate");
-
- tmps = gst_structure_new ("video/x-rwa-rgb", NULL);
- if (width) {
- gst_structure_set_value (tmps, "width", width);
- }
- if (height) {
- gst_structure_set_value (tmps, "height", height);
- }
- if (framerate) {
- gst_structure_set_value (tmps, "framerate", framerate);
- }
- gst_caps_merge_structure (tmpcaps, gst_structure_copy (tmps));
-
- gst_structure_set_name (tmps, "video/x-raw-yuv");
- gst_caps_merge_structure (tmpcaps, gst_structure_copy (tmps));
-
- gst_structure_set_name (tmps, "video/x-raw-gray");
- gst_caps_merge_structure (tmpcaps, tmps);
- }
- gst_caps_unref (intersect);
-
- intersect = gst_caps_intersect (caps, tmpcaps);
- gst_caps_unref (tmpcaps);
-
- return intersect;
-}
-
-static GstCaps *
-gst_maruenc_getcaps (GstPad *pad)
-{
- GstMaruEnc *maruenc = (GstMaruEnc *) GST_PAD_PARENT (pad);
- GstMaruEncClass *oclass =
- (GstMaruEncClass *) G_OBJECT_GET_CLASS (maruenc);
- CodecContext *ctx = NULL;
- enum PixelFormat pixfmt;
- GstCaps *caps = NULL;
- GstCaps *finalcaps = NULL;
- gint i;
-
- GST_DEBUG_OBJECT (maruenc, "getting caps");
-
- if (!oclass->codec) {
- GST_ERROR_OBJECT (maruenc, "codec element is null.");
- return NULL;
- }
-
- if (oclass->codec->media_type == AVMEDIA_TYPE_AUDIO) {
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
-
- GST_DEBUG_OBJECT (maruenc, "audio caps, return template %" GST_PTR_FORMAT,
- caps);
- return caps;
- }
-
- if (!strcmp(oclass->codec->name, "libx264")) {
- GstPad *peer;
-
- peer = gst_pad_get_peer (maruenc->srcpad);
- if (peer) {
- const GstCaps *templcaps;
- GstCaps *peercaps;
- guint i, n;
-
- peercaps = gst_pad_get_caps (peer);
-
- peercaps = gst_caps_make_writable (peercaps);
- n = gst_caps_get_size (peercaps);
- for (i = 0; i < n; i++) {
- GstStructure *s = gst_caps_get_structure (peercaps, i);
-
- gst_structure_set_name (s, "video/x-raw-yuv");
- gst_structure_remove_field (s, "stream-format");
- gst_structure_remove_field (s, "alignment");
- }
-
- templcaps = gst_pad_get_pad_template_caps (pad);
-
- caps = gst_caps_intersect (peercaps, templcaps);
- gst_caps_unref (peercaps);
- gst_object_unref (peer);
- peer = NULL;
- } else {
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
- }
-
- if (GST_PAD_CAPS (pad) && gst_caps_can_intersect (GST_PAD_CAPS (pad), caps)) {
- GstCaps *tmpcaps = gst_caps_copy (GST_PAD_CAPS (pad));
-
- gst_caps_merge (tmpcaps, caps);
- caps = tmpcaps;
- }
-
- return caps;
- }
-
- // cached
- if (oclass->sinkcaps) {
- caps = gst_maruenc_get_possible_sizes (maruenc, pad, oclass->sinkcaps);
- GST_DEBUG_OBJECT (maruenc, "return cached caps %" GST_PTR_FORMAT, caps);
- return caps;
- }
-
- GST_DEBUG_OBJECT (maruenc, "probing caps");
- i = pixfmt = 0;
-
- for (pixfmt = 0;; pixfmt++) {
- GstCaps *tmpcaps;
-
- if ((pixfmt = oclass->codec->pix_fmts[i++]) == PIX_FMT_NONE) {
- GST_DEBUG_OBJECT (maruenc,
- "At the end of official pixfmt for this codec, breaking out");
- break;
- }
-
- GST_DEBUG_OBJECT (maruenc,
- "Got an official pixfmt [%d], attempting to get caps", pixfmt);
- tmpcaps = gst_maru_pixfmt_to_caps (pixfmt, NULL, oclass->codec->name);
- if (tmpcaps) {
- GST_DEBUG_OBJECT (maruenc, "Got caps, breaking out");
- if (!caps) {
- caps = gst_caps_new_empty ();
- }
- gst_caps_append (caps, tmpcaps);
- continue;
- }
-
- GST_DEBUG_OBJECT (maruenc,
- "Couldn't figure out caps without context, trying again with a context");
-
- GST_DEBUG_OBJECT (maruenc, "pixfmt: %d", pixfmt);
- if (pixfmt >= PIX_FMT_NB) {
- GST_WARNING ("Invalid pixfmt, breaking out");
- break;
- }
-
- ctx = g_malloc0 (sizeof(CodecContext));
-
- ctx->video.width = DEFAULT_WIDTH;
- ctx->video.height = DEFAULT_HEIGHT;
- ctx->video.fps_n = 1;
- ctx->video.fps_d = 25;
- ctx->video.ticks_per_frame = 1;
- ctx->bit_rate = DEFAULT_VIDEO_BITRATE;
- ctx->video.pix_fmt = pixfmt;
-
- GST_DEBUG ("Attempting to open codec");
- if (gst_maru_avcodec_open (ctx, oclass->codec, maruenc->dev) >= 0
- && ctx->video.pix_fmt == pixfmt) {
- ctx->video.width = -1;
- if (!caps) {
- caps = gst_caps_new_empty ();
- }
- tmpcaps = gst_maru_codectype_to_caps (oclass->codec->media_type, ctx,
- oclass->codec->name, TRUE);
- if (tmpcaps) {
- gst_caps_append (caps, tmpcaps);
- } else {
- GST_LOG_OBJECT (maruenc,
- "Couldn't get caps for codec: %s", oclass->codec->name);
- }
- gst_maru_avcodec_close (ctx, maruenc->dev);
- } else {
- GST_DEBUG_OBJECT (maruenc, "Opening codec failed with pixfmt: %d", pixfmt);
- }
-
- g_free (ctx);
- }
-
- if (!caps) {
- caps = gst_maruenc_get_possible_sizes (maruenc, pad,
- gst_pad_get_pad_template_caps (pad));
- GST_DEBUG_OBJECT (maruenc, "probing gave nothing, "
- "return template %" GST_PTR_FORMAT, caps);
- return caps;
- }
-
- GST_DEBUG_OBJECT (maruenc, "probed caps gave %" GST_PTR_FORMAT, caps);
- oclass->sinkcaps = gst_caps_copy (caps);
-
- finalcaps = gst_maruenc_get_possible_sizes (maruenc, pad, caps);
- gst_caps_unref (caps);
-
- return finalcaps;
-}
-
-static gboolean
-gst_maruenc_setcaps (GstPad *pad, GstCaps *caps)
-{
- GstMaruEnc *maruenc;
- GstMaruEncClass *oclass;
- GstCaps *other_caps;
- GstCaps *allowed_caps;
- GstCaps *icaps;
- enum PixelFormat pix_fmt;
-
- maruenc = (GstMaruEnc *) (gst_pad_get_parent (pad));
- oclass = (GstMaruEncClass *) (G_OBJECT_GET_CLASS (maruenc));
-
- if (maruenc->opened) {
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- maruenc->opened = FALSE;
-
- gst_pad_set_caps (maruenc->srcpad, NULL);
- }
-
- maruenc->context->bit_rate = maruenc->bitrate;
- GST_DEBUG_OBJECT (maruenc, "Setting context to bitrate %lu, gop_size %d",
- maruenc->bitrate, maruenc->gop_size);
-
- gst_maru_caps_with_codectype (oclass->codec->media_type, caps, maruenc->context);
-
- if (!maruenc->context->video.fps_d) {
- maruenc->context->video.fps_d = 25;
- maruenc->context->video.fps_n = 1;
- } else if (!strcmp(oclass->codec->name ,"mpeg4")
- && (maruenc->context->video.fps_d > 65535)) {
- maruenc->context->video.fps_n =
- (gint) gst_util_uint64_scale_int (maruenc->context->video.fps_n,
- 65535, maruenc->context->video.fps_d);
- maruenc->context->video.fps_d = 65535;
- GST_LOG_OBJECT (maruenc, "MPEG4 : scaled down framerate to %d / %d",
- maruenc->context->video.fps_d, maruenc->context->video.fps_n);
- }
-
- pix_fmt = maruenc->context->video.pix_fmt;
-
- // open codec
- if (gst_maru_avcodec_open (maruenc->context,
- oclass->codec, maruenc->dev) < 0) {
- GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to open codec",
- oclass->codec->name);
- return FALSE;
- }
-
- if (pix_fmt != maruenc->context->video.pix_fmt) {
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- GST_DEBUG_OBJECT (maruenc,
- "maru_%senc: AV wants different colorspace (%d given, %d wanted)",
- oclass->codec->name, pix_fmt, maruenc->context->video.pix_fmt);
- return FALSE;
- }
-
- if (oclass->codec->media_type == AVMEDIA_TYPE_VIDEO
- && pix_fmt == PIX_FMT_NONE) {
- GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to determine input format",
- oclass->codec->name);
- return FALSE;
- }
-
- GST_DEBUG_OBJECT (maruenc, "picking an output format.");
- allowed_caps = gst_pad_get_allowed_caps (maruenc->srcpad);
- if (!allowed_caps) {
- GST_DEBUG_OBJECT (maruenc, "but no peer, using template caps");
- allowed_caps =
- gst_caps_copy (gst_pad_get_pad_template_caps (maruenc->srcpad));
- }
-
- GST_DEBUG_OBJECT (maruenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
- gst_maru_caps_with_codecname (oclass->codec->name,
- oclass->codec->media_type, allowed_caps, maruenc->context);
-
- other_caps =
- gst_maru_codecname_to_caps (oclass->codec->name, maruenc->context, TRUE);
- if (!other_caps) {
- GST_DEBUG ("Unsupported codec - no caps found");
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- return FALSE;
- }
-
- icaps = gst_caps_intersect (allowed_caps, other_caps);
- gst_caps_unref (allowed_caps);
- gst_caps_unref (other_caps);
- if (gst_caps_is_empty (icaps)) {
- gst_caps_unref (icaps);
- return FALSE;
- }
-
- if (gst_caps_get_size (icaps) > 1) {
- GstCaps *newcaps;
-
- newcaps =
- gst_caps_new_full (gst_structure_copy (gst_caps_get_structure (icaps,
- 0)), NULL);
- gst_caps_unref (icaps);
- icaps = newcaps;
- }
-
- if (!gst_pad_set_caps (maruenc->srcpad, icaps)) {
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- gst_caps_unref (icaps);
- return FALSE;
- }
- gst_object_unref (maruenc);
-
- maruenc->opened = TRUE;
-
- return TRUE;
-}
-
-static void
-gst_maruenc_setup_working_buf (GstMaruEnc *maruenc)
-{
- guint wanted_size =
- maruenc->context->video.width * maruenc->context->video.height * 6 +
- FF_MIN_BUFFER_SIZE;
-
- if (maruenc->working_buf == NULL ||
- maruenc->working_buf_size != wanted_size) {
- if (maruenc->working_buf) {
- g_free (maruenc->working_buf);
- }
- maruenc->working_buf_size = wanted_size;
- maruenc->working_buf = g_malloc0 (maruenc->working_buf_size);
- }
- maruenc->buffer_size = wanted_size;
-}
-
-GstFlowReturn
-gst_maruenc_chain_video (GstPad *pad, GstBuffer *buffer)
-{
- GstMaruEnc *maruenc = (GstMaruEnc *) (GST_PAD_PARENT (pad));
- GstBuffer *outbuf = NULL;
- gint ret_size = 0, frame_size = 0;
- int coded_frame = 0, is_keyframe = 0;
- uint32_t mem_offset = 0;
-
- GST_DEBUG_OBJECT (maruenc,
- "Received buffer of time %" GST_TIME_FORMAT,
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
-
- frame_size = gst_maru_avpicture_size (maruenc->context->video.pix_fmt,
- maruenc->context->video.width, maruenc->context->video.height);
- g_return_val_if_fail (frame_size == GST_BUFFER_SIZE (buffer),
- GST_FLOW_ERROR);
-
- gst_maruenc_setup_working_buf (maruenc);
-
- ret_size =
- interface->encode_video (maruenc->context, maruenc->working_buf,
- maruenc->working_buf_size, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer), GST_BUFFER_TIMESTAMP (buffer),
- &coded_frame, &is_keyframe, maruenc->dev);
-
- if (ret_size < 0) {
- GstMaruEncClass *oclass =
- (GstMaruEncClass *) (G_OBJECT_GET_CLASS (maruenc));
- GST_ERROR_OBJECT (maruenc,
- "maru_%senc: failed to encode buffer", oclass->codec->name);
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
- }
-
- g_queue_push_tail (maruenc->delay, buffer);
- if (ret_size) {
- buffer = g_queue_pop_head (maruenc->delay);
- } else {
- return GST_FLOW_OK;
- }
-
- GST_DEBUG_OBJECT (maruenc, "encoded video. mem_offset = 0x%x", mem_offset);
-
- // encode_video copies output buffers twice.
- // device memory to working_buf and working_buf to GstBuffer
- outbuf = gst_buffer_new_and_alloc (ret_size);
- memcpy (GST_BUFFER_DATA (outbuf), maruenc->working_buf, ret_size);
- GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
- GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
-
- if (coded_frame) {
- if (!is_keyframe) {
- GST_DEBUG_OBJECT (maruenc, "this frame is not a keyframe");
-
- /* GST_BUFFER_FLAG_DELTA_UNIT
- * - this unit cannot be decoded independently.
- */
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
- } else {
- GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
- }
- } else {
- GST_WARNING_OBJECT (maruenc, "codec did not provide keyframe info");
- }
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (maruenc->srcpad));
- gst_buffer_unref (buffer);
-
- return gst_pad_push (maruenc->srcpad, outbuf);
-}
-
-GstFlowReturn
-gst_maruenc_encode_audio (GstMaruEnc *maruenc, guint8 *audio_in,
- guint in_size, guint max_size, GstClockTime timestamp,
- GstClockTime duration, gboolean discont)
-{
- GstBuffer *outbuf;
- guint8 *audio_out;
- gint res;
- GstFlowReturn ret;
-
- outbuf = gst_buffer_new_and_alloc (max_size + FF_MIN_BUFFER_SIZE);
- audio_out = GST_BUFFER_DATA (outbuf);
-
- GST_LOG_OBJECT (maruenc, "encoding buffer of max size %d", max_size);
- if (maruenc->buffer_size != max_size) {
- maruenc->buffer_size = max_size;
- }
-
- res = interface->encode_audio (maruenc->context, audio_out, max_size,
- audio_in, in_size, timestamp, maruenc->dev);
-
- if (res < 0) {
- GST_ERROR_OBJECT (maruenc, "Failed to encode buffer: %d", res);
- gst_buffer_unref (outbuf);
- return GST_FLOW_OK;
- }
- GST_LOG_OBJECT (maruenc, "got output size %d", res);
-
- GST_BUFFER_SIZE (outbuf) = res;
- GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- GST_BUFFER_DURATION (outbuf) = duration;
- if (discont) {
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- }
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (maruenc->srcpad));
-
- GST_LOG_OBJECT (maruenc, "pushing size %d, timestamp %",
- GST_TIME_FORMAT, res, GST_TIME_ARGS (timestamp));
-
- ret = gst_pad_push (maruenc->srcpad, outbuf);
-
- return ret;
-}
-
-static GstFlowReturn
-gst_maruenc_chain_audio (GstPad *pad, GstBuffer *buffer)
-{
- GstMaruEnc *maruenc;
- GstClockTime timestamp, duration;
- guint in_size, frame_size;
- gint osize;
- GstFlowReturn ret;
- gint out_size = 0;
- gboolean discont;
- guint8 *in_data;
- CodecContext *ctx;
-
- maruenc = (GstMaruEnc *) (GST_OBJECT_PARENT (pad));
-
- ctx = maruenc->context;
-
- in_size = GST_BUFFER_SIZE (buffer);
- timestamp = GST_BUFFER_TIMESTAMP (buffer);
- duration = GST_BUFFER_DURATION (buffer);
- discont = GST_BUFFER_IS_DISCONT (buffer);
-
- GST_DEBUG_OBJECT (maruenc,
- "Received time %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
- ", size %d", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration), in_size);
-
- frame_size = ctx->audio.frame_size;
- osize = ctx->audio.bits_per_sample_fmt;
-
- if (frame_size > 1) {
- guint avail, frame_bytes;
-
- if (discont) {
- GST_LOG_OBJECT (maruenc, "DISCONT, clear adapter");
- gst_adapter_clear (maruenc->adapter);
- maruenc->discont = TRUE;
- }
-
- if (gst_adapter_available (maruenc->adapter) == 0) {
- GST_LOG_OBJECT (maruenc, "taking buffer timestamp %" GST_TIME_FORMAT,
- GST_TIME_ARGS (timestamp));
- maruenc->adapter_ts = timestamp;
- maruenc->adapter_consumed = 0;
- } else {
- GstClockTime upstream_time;
- GstClockTime consumed_time;
- guint64 bytes;
-
- consumed_time =
- gst_util_uint64_scale (maruenc->adapter_consumed, GST_SECOND,
- ctx->audio.sample_rate);
- timestamp = maruenc->adapter_ts + consumed_time;
- GST_LOG_OBJECT (maruenc, "taking adapter timestamp %" GST_TIME_FORMAT
- " and adding consumed time %" GST_TIME_FORMAT,
- GST_TIME_ARGS (maruenc->adapter_ts), GST_TIME_ARGS (consumed_time));
-
- upstream_time = gst_adapter_prev_timestamp (maruenc->adapter, &bytes);
- if (GST_CLOCK_TIME_IS_VALID (upstream_time)) {
- GstClockTimeDiff diff;
-
- upstream_time +=
- gst_util_uint64_scale (bytes, GST_SECOND,
- ctx->audio.sample_rate * osize * ctx->audio.channels);
- diff = upstream_time - timestamp;
-
- if (diff > GST_SECOND / 10 || diff < -GST_SECOND / 10) {
- GST_DEBUG_OBJECT (maruenc, "adapter timestamp drifting, "
- "taking upstream timestamp %" GST_TIME_FORMAT,
- GST_TIME_ARGS (upstream_time));
- timestamp = upstream_time;
-
- maruenc->adapter_consumed = bytes / (osize * ctx->audio.channels);
- maruenc->adapter_ts =
- upstream_time - gst_util_uint64_scale (maruenc->adapter_consumed,
- GST_SECOND, ctx->audio.sample_rate);
- maruenc->discont = TRUE;
- }
- }
- }
-
- GST_LOG_OBJECT (maruenc, "pushing buffer in adapter");
- gst_adapter_push (maruenc->adapter, buffer);
-
- frame_bytes = frame_size * osize * ctx->audio.channels;
- avail = gst_adapter_available (maruenc->adapter);
-
- GST_LOG_OBJECT (maruenc, "frame_bytes %u, avail %u", frame_bytes, avail);
-
- while (avail >= frame_bytes) {
- GST_LOG_OBJECT (maruenc, "taking %u bytes from the adapter", frame_bytes);
-
- in_data = (guint8 *) gst_adapter_peek (maruenc->adapter, frame_bytes);
- maruenc->adapter_consumed += frame_size;
-
- duration =
- gst_util_uint64_scale (maruenc->adapter_consumed, GST_SECOND,
- ctx->audio.sample_rate);
- duration -= (timestamp - maruenc->adapter_ts);
-
- out_size = frame_bytes * 4;
-
- ret =
- gst_maruenc_encode_audio (maruenc, in_data, frame_bytes, out_size,
- timestamp, duration, maruenc->discont);
-
- gst_adapter_flush (maruenc->adapter, frame_bytes);
- if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (maruenc, "Failed to push buffer %d (%s)", ret,
- gst_flow_get_name (ret));
- }
-
- timestamp += duration;
-
- maruenc->discont = FALSE;
- avail = gst_adapter_available (maruenc->adapter);
- }
- GST_LOG_OBJECT (maruenc, "%u bytes left in the adapter", avail);
- } else {
- in_data = (guint8 *) GST_BUFFER_DATA (buffer);
- ret = gst_maruenc_encode_audio (maruenc, in_data, in_size, out_size,
- timestamp, duration, discont);
- gst_buffer_unref (buffer);
- if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (maruenc, "Failed to push buffer %d (%s)", ret,
- gst_flow_get_name (ret));
- }
- }
-
- return GST_FLOW_OK;
-}
-
-static void
-gst_maruenc_flush_buffers (GstMaruEnc *maruenc, gboolean send)
-{
- GST_DEBUG_OBJECT (maruenc, "flushing buffers with sending %d", send);
-
- if (!maruenc->opened) {
- while (!g_queue_is_empty (maruenc->delay)) {
- gst_buffer_unref (g_queue_pop_head (maruenc->delay));
- }
- }
-
-}
-
-static gboolean
-gst_maruenc_event_video (GstPad *pad, GstEvent *event)
-{
- GstMaruEnc *maruenc;
- maruenc = (GstMaruEnc *) gst_pad_get_parent (pad);
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_EOS:
- gst_maruenc_flush_buffers (maruenc, TRUE);
- break;
- case GST_EVENT_CUSTOM_DOWNSTREAM:
- break;
- default:
- break;
- }
-
- return gst_pad_push_event (maruenc->srcpad, event);
-}
-
-static gboolean
-gst_maruenc_event_src (GstPad *pad, GstEvent *event)
-{
- GstMaruEnc *maruenc = (GstMaruEnc *) (GST_PAD_PARENT (pad));
- gboolean forward = TRUE;
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_CUSTOM_UPSTREAM:
- {
- const GstStructure *s;
- s = gst_event_get_structure (event);
-
- if (gst_structure_has_name (s, "GstForceKeyUnit")) {
- forward = FALSE;
- gst_event_unref (event);
- }
- }
- break;
- default:
- break;
- }
-
- if (forward) {
- return gst_pad_push_event (maruenc->sinkpad, event);
- }
-
- return TRUE;
-}
-
-GstStateChangeReturn
-gst_maruenc_change_state (GstElement *element, GstStateChange transition)
-{
- GstMaruEnc *maruenc = (GstMaruEnc*)element;
- GstStateChangeReturn ret;
-
- switch (transition) {
- default:
- break;
- }
-
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_maruenc_flush_buffers (maruenc, FALSE);
- if (maruenc->opened) {
- GST_DEBUG_OBJECT (maruenc, "change_state: PAUSED_TO_READY, close context");
- gst_maru_avcodec_close (maruenc->context, maruenc->dev);
- maruenc->opened = FALSE;
- }
- gst_adapter_clear (maruenc->adapter);
-
- if (maruenc->working_buf) {
- g_free (maruenc->working_buf);
- maruenc->working_buf = NULL;
- }
- break;
- default:
- break;
- }
-
- return ret;
-}
-
-gboolean
-gst_maruenc_register (GstPlugin *plugin, GList *element)
-{
- GTypeInfo typeinfo = {
- sizeof (GstMaruEncClass),
- (GBaseInitFunc) gst_maruenc_base_init,
- NULL,
- (GClassInitFunc) gst_maruenc_class_init,
- NULL,
- NULL,
- sizeof (GstMaruEnc),
- 0,
- (GInstanceInitFunc) gst_maruenc_init,
- };
-
- GType type;
- gchar *type_name;
- gint rank = GST_RANK_PRIMARY;
- GList *elem = element;
- CodecElement *codec = NULL;
-
- if (!elem) {
- return FALSE;
- }
-
- /* register element */
- do {
- codec = (CodecElement *)(elem->data);
- if (!codec) {
- return FALSE;
- }
-
- if (codec->codec_type != CODEC_TYPE_ENCODE) {
- continue;
- }
-
- type_name = g_strdup_printf ("maru_%senc", codec->name);
- type = g_type_from_name (type_name);
- if (!type) {
- type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
- g_type_set_qdata (type, GST_MARUENC_PARAMS_QDATA, (gpointer) codec);
- }
-
- if (!gst_element_register (plugin, type_name, rank, type)) {
- g_free (type_name);
- return FALSE;
- }
- g_free (type_name);
- } while ((elem = elem->next));
-
- return TRUE;
-}
}
}
-static void
-codec_buffer_free (gpointer start)
-{
- release_device_mem (device_fd, start);
-}
-
static GstFlowReturn
codec_buffer_alloc_and_copy (GstPad *pad, guint64 offset, guint size,
GstCaps *caps, GstBuffer **buf)
GstMaruDec *marudec;
CodecContext *ctx;
CodecDevice *dev;
-
- *buf = gst_buffer_new ();
+ GstMapInfo mapinfo;
marudec = (GstMaruDec *)gst_pad_get_element_private(pad);
ctx = marudec->context;
// FIXME: we must aligned buffer offset.
buffer = g_malloc (size);
- GST_BUFFER_FREE_FUNC (*buf) = g_free;
-
memcpy (buffer, device_mem + mem_offset, size);
release_device_mem(dev->fd, device_mem + mem_offset);
} else {
// address of "device_mem" and "opaque" is aleady aligned.
buffer = (gpointer)(device_mem + mem_offset);
- GST_BUFFER_FREE_FUNC (*buf) = codec_buffer_free;
- GST_DEBUG ("device memory start: 0x%p, offset 0x%x", (intptr_t)buffer, mem_offset);
+ GST_DEBUG ("device memory start: 0x%p, offset 0x%x", (void *)buffer, mem_offset);
}
- GST_BUFFER_DATA (*buf) = GST_BUFFER_MALLOCDATA (*buf) = (void *)buffer;
- GST_BUFFER_SIZE (*buf) = size;
+ *buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_map (*buf, &mapinfo, GST_MAP_READWRITE);
+ mapinfo.data = (guint8 *)buffer;
+ mapinfo.size = size;
GST_BUFFER_OFFSET (*buf) = offset;
-
- if (caps) {
- gst_buffer_set_caps (*buf, caps);
- }
+ gst_buffer_unmap (*buf, &mapinfo);
return GST_FLOW_OK;
}
}
GST_DEBUG ("decode_audio 2. ctx_id: %d, buffer = 0x%x",
- ctx->index, device_mem + opaque.buffer_size);
+ ctx->index, (unsigned int) (device_mem + opaque.buffer_size));
len = codec_decode_audio_data_from (have_data, samples,
&ctx->audio, device_mem + opaque.buffer_size);
gint64 offset;
} GstTSInfo;
-typedef struct _GstMaruDec
+typedef struct _GstMaruVidDec
{
- GstElement element;
-
- GstPad *srcpad;
- GstPad *sinkpad;
+ GstVideoDecoder element;
CodecContext *context;
CodecDevice *dev;
- union {
- struct {
- gint width, height;
- gint clip_width, clip_height;
- gint par_n, par_d;
- gint fps_n, fps_d;
- gint old_fps_n, old_fps_d;
- gboolean interlaced;
-
- enum PixelFormat pix_fmt;
- } video;
- struct {
- gint channels;
- gint samplerate;
- gint depth;
- } audio;
- } format;
+ GstVideoCodecState *input_state;
+ GstVideoCodecState *output_state;
+
+ /* current context */
+ enum PixelFormat ctx_pix_fmt;
+ gint ctx_width;
+ gint ctx_height;
+ gint ctx_par_n;
+ gint ctx_par_d;
+ gint ctx_ticks;
+ gint ctx_time_d;
+ gint ctx_time_n;
+ gint ctx_interlaced;
+ GstBuffer *palette;
gboolean opened;
gboolean discont;
gint64 processed;
gint64 dropped;
-
- /* GstSegment can be used for two purposes:
- * 1. performing seeks (handling seek events)
- * 2. tracking playback regions (handling newsegment events)
- */
- GstSegment segment;
-
GstTSInfo ts_info[MAX_TS_MASK + 1];
gint ts_idx;
- /* reverse playback queue */
- GList *queued;
+ // decode result
+ bool is_last_buffer;
+ int mem_offset;
+ bool is_using_new_decode_api;
+
+ int max_threads;
+
+ GstCaps *last_caps;
+} GstMaruVidDec;
+
+typedef struct _GstMaruDec
+{
+ GstAudioDecoder parent;
+ GstElement element;
+
+ /* decoding */
+ CodecContext *context;
+ CodecDevice *dev;
+ gboolean opened;
+
+ /* prevent reopening the decoder on GST_EVENT_CAPS when caps are same as last time. */
+ GstCaps *last_caps;
+
+ /* Stores current buffers to push as GstAudioDecoder wants 1:1 mapping for input/output buffers */
+ GstBuffer *outbuf;
+
+ /* current output format */
+ GstAudioInfo info;
+ GstAudioChannelPosition ffmpeg_layout[64];
+ gboolean needs_reorder;
// decode result
bool is_last_buffer;
bool is_using_new_decode_api;
} GstMaruDec;
+typedef struct _GstMaruAudDec
+{
+ GstAudioDecoder parent;
+
+ /* prevent reopening the decoder on GST_EVENT_CAPS when caps are same as last time. */
+ GstCaps *last_caps;
+
+ /* Stores current buffers to push as GstAudioDecoder wants 1:1 mapping for input/output buffers */
+ GstBuffer *outbuf;
+
+ /* current output format */
+ GstAudioInfo info;
+ GstAudioChannelPosition layout[64];
+ gboolean needs_reorder;
+
+ /* decoding */
+ CodecContext *context;
+ gboolean opened;
+
+ struct {
+ gint channels;
+ gint sample_rate;
+ gint depth;
+ } audio;
+
+ CodecDevice *dev;
+
+ GstTSInfo ts_info[MAX_TS_MASK + 1];
+ gint ts_idx;
+ // decode result
+ bool is_last_buffer;
+ int mem_offset;
+ bool is_using_new_decode_api;
+
+} GstMaruAudDec;
+
+
typedef struct {
int
(*init) (CodecContext *ctx, CodecElement *codec, CodecDevice *dev);
void
(*deinit) (CodecContext *ctx, CodecDevice *dev);
int
- (*decode_video) (GstMaruDec *marudec, uint8_t *in_buf, int in_size,
+ (*decode_video) (GstMaruVidDec *marudec, uint8_t *in_buf, int in_size,
gint idx, gint64 in_offset, GstBuffer **out_buf, int *have_data);
int
(*decode_audio) (CodecContext *ctx, int16_t *samples,
invoke_device_api(int fd, int32_t ctx_index, int32_t api_index,
uint32_t *mem_offset, int32_t buffer_size)
{
+ GST_DEBUG (" >> Enter");
IOCTL_Data ioctl_data = { 0, };
int ret = -1;
*mem_offset = ioctl_data.mem_offset;
}
+ GST_DEBUG (" >> Leave");
return ret;
}
static int
secure_device_mem (int fd, guint ctx_id, guint buf_size, gpointer* buffer)
{
+ GST_DEBUG (" >> Enter");
int ret = 0;
IOCTL_Data data;
*buffer = (gpointer)((uint32_t)device_mem + data.mem_offset);
GST_DEBUG ("device_mem %p, offset_size 0x%x", device_mem, data.mem_offset);
+ GST_DEBUG (" >> Leave");
return ret;
}
static void
release_device_mem (int fd, gpointer start)
{
+ GST_DEBUG (" >> Enter");
int ret;
uint32_t offset = start - device_mem;
if (ret < 0) {
GST_ERROR ("failed to release buffer\n");
}
+ GST_DEBUG (" >> Leave");
}
static int
return ctx_index;
}
-
+// TODO: check this code is needed
+#if 0
static void
buffer_free (gpointer start)
{
{
release_device_mem (device_fd, start - OFFSET_PICTURE_BUFFER);
}
-
+#endif
static inline void fill_size_header(void *buffer, size_t size)
{
*((uint32_t *)buffer) = (uint32_t)size;
int ret;
uint32_t mem_offset;
+ GST_DEBUG (" >> Enter");
if ((ctx->index = get_context_index(dev->fd)) <= 0) {
GST_ERROR ("failed to get a context index");
return -1;
ret = invoke_device_api (dev->fd, ctx->index, CODEC_INIT, &mem_offset, SMALLDATA);
if (ret < 0) {
+ GST_ERROR ("invoke_device_api failed");
return -1;
}
release_device_mem(dev->fd, device_mem + mem_offset);
+ GST_DEBUG (" >> Leave");
return opened;
}
} __attribute__((packed));
static int
-decode_video (GstMaruDec *marudec, uint8_t *inbuf, int inbuf_size,
+decode_video (GstMaruVidDec *marudec, uint8_t *inbuf, int inbuf_size,
gint idx, gint64 in_offset, GstBuffer **out_buf, int *have_data)
{
+ GST_DEBUG (" >> Enter");
CodecContext *ctx = marudec->context;
CodecDevice *dev = marudec->dev;
int len = 0, ret = 0;
release_device_mem(dev->fd, device_mem + mem_offset);
}
+ GST_DEBUG (" >> Leave");
return len;
}
+GstFlowReturn
+alloc_and_copy (GstMaruVidDec *marudec, guint64 offset, guint size,
+ GstCaps *caps, GstBuffer **buf)
+{
+ GST_DEBUG (" >> enter");
+ bool is_last_buffer = 0;
+ uint32_t mem_offset;
+ CodecContext *ctx;
+ CodecDevice *dev;
+ GstMapInfo mapinfo;
+
+ ctx = marudec->context;
+ dev = marudec->dev;
+
+ if (marudec->is_using_new_decode_api) {
+ is_last_buffer = marudec->is_last_buffer;
+ mem_offset = marudec->mem_offset;
+ } else {
+ ctx = marudec->context;
+
+ mem_offset = 0;
+
+ int ret = invoke_device_api(dev->fd, ctx->index, CODEC_PICTURE_COPY, &mem_offset, size);
+ if (ret < 0) {
+ GST_DEBUG ("failed to get available buffer");
+ return GST_FLOW_ERROR;
+ }
+ is_last_buffer = ret;
+ }
+
+ gpointer *buffer = NULL;
+ is_last_buffer = 1;
+ if (is_last_buffer) {
+ // FIXME: we must aligned buffer offset.
+ //buffer = g_malloc (size);
+ gst_buffer_map (*buf, &mapinfo, GST_MAP_READWRITE);
+
+ if (marudec->is_using_new_decode_api) {
+ memcpy (mapinfo.data, device_mem + mem_offset + OFFSET_PICTURE_BUFFER, size);
+ } else {
+ memcpy (mapinfo.data, device_mem + mem_offset, size);
+ }
+ release_device_mem(dev->fd, device_mem + mem_offset);
+
+ GST_DEBUG ("secured last buffer!! Use heap buffer");
+ } else {
+ // address of "device_mem" and "opaque" is aleady aligned.
+ if (marudec->is_using_new_decode_api) {
+ buffer = (gpointer)(device_mem + mem_offset + OFFSET_PICTURE_BUFFER);
+ //GST_BUFFER_FREE_FUNC (*buf) = buffer_free2;
+ } else {
+ buffer = (gpointer)(device_mem + mem_offset);
+ //GST_BUFFER_FREE_FUNC (*buf) = buffer_free;
+ }
+
+
+ GST_DEBUG ("device memory start: 0x%p, offset 0x%x", (void *) buffer, mem_offset);
+ }
+
+ gst_buffer_unmap (*buf, &mapinfo);
+
+ GST_DEBUG (" >> leave");
+ return GST_FLOW_OK;
+}
+
static GstFlowReturn
buffer_alloc_and_copy (GstPad *pad, guint64 offset, guint size,
GstCaps *caps, GstBuffer **buf)
{
+ GST_DEBUG (" >> enter");
bool is_last_buffer = 0;
uint32_t mem_offset;
GstMaruDec *marudec;
CodecContext *ctx;
CodecDevice *dev;
-
- *buf = gst_buffer_new ();
+ GstMapInfo mapinfo;
marudec = (GstMaruDec *)gst_pad_get_element_private(pad);
ctx = marudec->context;
}
gpointer *buffer = NULL;
+ is_last_buffer = 1;
if (is_last_buffer) {
// FIXME: we must aligned buffer offset.
buffer = g_malloc (size);
- GST_BUFFER_FREE_FUNC (*buf) = g_free;
-
if (marudec->is_using_new_decode_api) {
memcpy (buffer, device_mem + mem_offset + OFFSET_PICTURE_BUFFER, size);
} else {
// address of "device_mem" and "opaque" is aleady aligned.
if (marudec->is_using_new_decode_api) {
buffer = (gpointer)(device_mem + mem_offset + OFFSET_PICTURE_BUFFER);
- GST_BUFFER_FREE_FUNC (*buf) = buffer_free2;
+ //GST_BUFFER_FREE_FUNC (*buf) = buffer_free2;
} else {
buffer = (gpointer)(device_mem + mem_offset);
- GST_BUFFER_FREE_FUNC (*buf) = buffer_free;
+ //GST_BUFFER_FREE_FUNC (*buf) = buffer_free;
}
- GST_DEBUG ("device memory start: 0x%p, offset 0x%x", (intptr_t)buffer, mem_offset);
+ GST_DEBUG ("device memory start: 0x%p, offset 0x%x", (void *) buffer, mem_offset);
}
- GST_BUFFER_DATA (*buf) = GST_BUFFER_MALLOCDATA (*buf) = (void *)buffer;
- GST_BUFFER_SIZE (*buf) = size;
+ *buf = gst_buffer_new ();
+ //*buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_map (*buf, &mapinfo, GST_MAP_READWRITE);
+ mapinfo.data = (guint8 *)buffer;
+ mapinfo.size = size;
GST_BUFFER_OFFSET (*buf) = offset;
+ gst_buffer_unmap (*buf, &mapinfo);
- if (caps) {
- gst_buffer_set_caps (*buf, caps);
- }
-
+ GST_DEBUG (" >> leave");
return GST_FLOW_OK;
}
encode_input->inbuf_size = inbuf_size;
encode_input->in_timestamp = in_timestamp;
memcpy(&encode_input->inbuf, inbuf, inbuf_size);
+ GST_DEBUG ("insize: %d, inpts: %lld", encode_input->inbuf_size,(long long) encode_input->in_timestamp);
mem_offset = GET_OFFSET(buffer);
}
GST_DEBUG ("decode_audio. ctx_id: %d, buffer = 0x%x",
- ctx->index, device_mem + mem_offset);
+ ctx->index, (unsigned int) (device_mem + mem_offset));
struct audio_decode_output *decode_output = device_mem + mem_offset;
len = decode_output->len;
memcpy (samples, device_mem + mem_offset + OFFSET_PICTURE_BUFFER, len);
- GST_DEBUG ("decode_audio. sample_fmt %d sample_rate %d, channels %d, ch_layout %lld",
- ctx->audio.sample_fmt, ctx->audio.sample_rate, ctx->audio.channels, ctx->audio.channel_layout);
+ GST_DEBUG ("decode_audio. sample_fmt %d sample_rate %d, channels %d, ch_layout %lld, len %d",
+ ctx->audio.sample_fmt, ctx->audio.sample_rate, ctx->audio.channels,
+ ctx->audio.channel_layout, len);
release_device_mem(dev->fd, device_mem + mem_offset);
*/
#include "gstmaruutils.h"
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio-channels.h>
#include <gst/pbutils/codec-utils.h>
+typedef struct
+{
+ GstVideoFormat format;
+ enum PixelFormat pixfmt;
+} PixToFmt;
+
+/* FIXME : FILLME */
+static const PixToFmt pixtofmttable[] = {
+ /* GST_VIDEO_FORMAT_I420, */
+ {GST_VIDEO_FORMAT_I420, PIX_FMT_YUV420P},
+ /* Note : this should use a different chroma placement */
+ {GST_VIDEO_FORMAT_I420, PIX_FMT_YUVJ420P},
+
+ /* GST_VIDEO_FORMAT_YV12, */
+ /* GST_VIDEO_FORMAT_YUY2, */
+ {GST_VIDEO_FORMAT_YUY2, PIX_FMT_YUYV422},
+ /* GST_VIDEO_FORMAT_UYVY, */
+ {GST_VIDEO_FORMAT_UYVY, PIX_FMT_UYVY422},
+ /* GST_VIDEO_FORMAT_AYUV, */
+ /* GST_VIDEO_FORMAT_RGBx, */
+ /* GST_VIDEO_FORMAT_BGRx, */
+ /* GST_VIDEO_FORMAT_xRGB, */
+ /* GST_VIDEO_FORMAT_xBGR, */
+ /* GST_VIDEO_FORMAT_RGBA, */
+ {GST_VIDEO_FORMAT_RGBA, PIX_FMT_RGBA},
+ /* GST_VIDEO_FORMAT_BGRA, */
+ {GST_VIDEO_FORMAT_BGRA, PIX_FMT_BGRA},
+ /* GST_VIDEO_FORMAT_ARGB, */
+ {GST_VIDEO_FORMAT_ARGB, PIX_FMT_ARGB},
+ /* GST_VIDEO_FORMAT_ABGR, */
+ {GST_VIDEO_FORMAT_ABGR, PIX_FMT_ABGR},
+ /* GST_VIDEO_FORMAT_RGB, */
+ {GST_VIDEO_FORMAT_RGB, PIX_FMT_RGB24},
+ /* GST_VIDEO_FORMAT_BGR, */
+ {GST_VIDEO_FORMAT_BGR, PIX_FMT_BGR24},
+ /* GST_VIDEO_FORMAT_Y41B, */
+ {GST_VIDEO_FORMAT_Y41B, PIX_FMT_YUV411P},
+ /* GST_VIDEO_FORMAT_Y42B, */
+ {GST_VIDEO_FORMAT_Y42B, PIX_FMT_YUV422P},
+ {GST_VIDEO_FORMAT_Y42B, PIX_FMT_YUVJ422P},
+ /* GST_VIDEO_FORMAT_YVYU, */
+ /* GST_VIDEO_FORMAT_Y444, */
+ {GST_VIDEO_FORMAT_Y444, PIX_FMT_YUV444P},
+ {GST_VIDEO_FORMAT_Y444, PIX_FMT_YUVJ444P},
+ /* GST_VIDEO_FORMAT_v210, */
+ /* GST_VIDEO_FORMAT_v216, */
+ /* GST_VIDEO_FORMAT_NV12, */
+ {GST_VIDEO_FORMAT_NV12, PIX_FMT_NV12},
+ /* GST_VIDEO_FORMAT_NV21, */
+ {GST_VIDEO_FORMAT_NV21, PIX_FMT_NV21},
+ /* GST_VIDEO_FORMAT_GRAY8, */
+ {GST_VIDEO_FORMAT_GRAY8, PIX_FMT_GRAY8},
+ /* GST_VIDEO_FORMAT_GRAY16_BE, */
+ {GST_VIDEO_FORMAT_GRAY16_BE, PIX_FMT_GRAY16BE},
+ /* GST_VIDEO_FORMAT_GRAY16_LE, */
+ {GST_VIDEO_FORMAT_GRAY16_LE, PIX_FMT_GRAY16LE},
+ /* GST_VIDEO_FORMAT_v308, */
+ /* GST_VIDEO_FORMAT_Y800, */
+ /* GST_VIDEO_FORMAT_Y16, */
+ /* GST_VIDEO_FORMAT_RGB16, */
+ {GST_VIDEO_FORMAT_RGB16, PIX_FMT_RGB565},
+ /* GST_VIDEO_FORMAT_BGR16, */
+ /* GST_VIDEO_FORMAT_RGB15, */
+ {GST_VIDEO_FORMAT_RGB15, PIX_FMT_RGB555},
+ /* GST_VIDEO_FORMAT_BGR15, */
+ /* GST_VIDEO_FORMAT_UYVP, */
+ /* GST_VIDEO_FORMAT_A420, */
+ {GST_VIDEO_FORMAT_A420, PIX_FMT_YUVA420P},
+ /* GST_VIDEO_FORMAT_RGB8_PALETTED, */
+ {GST_VIDEO_FORMAT_RGB8P, PIX_FMT_PAL8},
+ /* GST_VIDEO_FORMAT_YUV9, */
+ {GST_VIDEO_FORMAT_YUV9, PIX_FMT_YUV410P},
+ /* GST_VIDEO_FORMAT_YVU9, */
+ /* GST_VIDEO_FORMAT_IYU1, */
+ /* GST_VIDEO_FORMAT_ARGB64, */
+ /* GST_VIDEO_FORMAT_AYUV64, */
+ /* GST_VIDEO_FORMAT_r210, */
+ {GST_VIDEO_FORMAT_I420_10LE, PIX_FMT_YUV420P10LE},
+ {GST_VIDEO_FORMAT_I420_10BE, PIX_FMT_YUV420P10BE},
+ {GST_VIDEO_FORMAT_I422_10LE, PIX_FMT_YUV422P10LE},
+ {GST_VIDEO_FORMAT_I422_10BE, PIX_FMT_YUV422P10BE},
+ {GST_VIDEO_FORMAT_Y444_10LE, PIX_FMT_YUV444P10LE},
+ {GST_VIDEO_FORMAT_Y444_10BE, PIX_FMT_YUV444P10BE},
+};
+
gint
-gst_maru_smpfmt_depth (int smp_fmt)
+gst_maru_smpfmt_depth (int32_t smp_fmt)
{
+ GST_DEBUG (" >> ENTER ");
gint depth = -1;
switch (smp_fmt) {
CH_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, {
CH_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, {
CH_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, {
- CH_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE}, {
+ CH_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1}, {
CH_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, {
CH_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, {
CH_FRONT_LEFT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, {
CH_STEREO_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}
};
-static GstAudioChannelPosition *
-gst_ff_channel_layout_to_gst (guint64 channel_layout, guint channels)
+static guint64
+gst_ffmpeg_channel_positions_to_layout (GstAudioChannelPosition * pos,
+ gint channels)
{
- guint nchannels = 0, i, j;
- GstAudioChannelPosition *pos = NULL;
- gboolean none_layout = FALSE;
-
- for (i = 0; i < 64; i++) {
- if ((channel_layout & (G_GUINT64_CONSTANT (1) << i)) != 0) {
- nchannels++;
+ gint i, j;
+ guint64 ret = 0;
+ gint channels_found = 0;
+
+ if (!pos)
+ return 0;
+
+ for (i = 0; i < channels; i++) {
+ for (j = 0; j < G_N_ELEMENTS (_ff_to_gst_layout); j++) {
+ if (_ff_to_gst_layout[j].gst == pos[i]) {
+ ret |= _ff_to_gst_layout[j].ff;
+ channels_found++;
+ break;
+ }
}
}
+ if (channels_found != channels)
+ return 0;
+ return ret;
+}
+
+gboolean
+gst_ffmpeg_channel_layout_to_gst (guint64 channel_layout, gint channels,
+ GstAudioChannelPosition * pos)
+{
+ guint nchannels = 0;
+ gboolean none_layout = FALSE;
+
if (channel_layout == 0) {
nchannels = channels;
none_layout = TRUE;
- }
+ } else {
+ guint i, j;
- if (nchannels != channels) {
- GST_ERROR ("Number of channels is different (%u != %u)", channels,
- nchannels);
- return NULL;
- }
+ for (i = 0; i < 64; i++) {
+ if ((channel_layout & (G_GUINT64_CONSTANT (1) << i)) != 0) {
+ nchannels++;
+ }
+ }
+
+ if (nchannels != channels) {
+ GST_ERROR ("Number of channels is different (%u != %u)", channels,
+ nchannels);
+ nchannels = channels;
+ none_layout = TRUE;
+ } else {
- pos = g_new (GstAudioChannelPosition, nchannels);
+ for (i = 0, j = 0; i < G_N_ELEMENTS (_ff_to_gst_layout); i++) {
+ if ((channel_layout & _ff_to_gst_layout[i].ff) != 0) {
+ pos[j++] = _ff_to_gst_layout[i].gst;
- for (i = 0, j = 0; i < G_N_ELEMENTS (_ff_to_gst_layout); i++) {
- if ((channel_layout & _ff_to_gst_layout[i].ff) != 0) {
- pos[j++] = _ff_to_gst_layout[i].gst;
+ if (_ff_to_gst_layout[i].gst == GST_AUDIO_CHANNEL_POSITION_NONE)
+ none_layout = TRUE;
+ }
+ }
- if (_ff_to_gst_layout[i].gst == GST_AUDIO_CHANNEL_POSITION_NONE) {
+ if (j != nchannels) {
+ GST_WARNING
+ ("Unknown channels in channel layout - assuming NONE layout");
none_layout = TRUE;
}
}
}
- if (j != nchannels) {
- GST_WARNING ("Unknown channels in channel layout - assuming NONE layout");
- none_layout = TRUE;
- }
-
- if (!none_layout && !gst_audio_check_channel_positions (pos, nchannels)) {
+ if (!none_layout
+ && !gst_audio_check_valid_channel_positions (pos, nchannels, FALSE)) {
GST_ERROR ("Invalid channel layout %" G_GUINT64_FORMAT
- " - assuming NONE layout", channel_layout);
+ " - assuming NONE layout", channel_layout);
none_layout = TRUE;
}
if (none_layout) {
if (nchannels == 1) {
- pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_MONO;
+ pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO;
} else if (nchannels == 2) {
pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
- } else if (channel_layout == 0) {
- g_free (pos);
- pos = NULL;
} else {
- for (i = 0; i < nchannels; i++) {
+ guint i;
+
+ for (i = 0; i < nchannels; i++)
pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
- }
}
}
- if (nchannels == 1 && pos[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER) {
- GST_DEBUG ("mono common case; won't set channel positions");
- g_free (pos);
- pos = NULL;
- } else if (nchannels == 2 && pos[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT
- && pos[1] == GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT) {
- GST_DEBUG ("stereo common case; won't set channel positions");
- g_free (pos);
- pos = NULL;
+ return TRUE;
+}
+
+static gboolean
+_gst_value_list_contains (const GValue * list, const GValue * value)
+{
+ guint i, n;
+ const GValue *tmp;
+
+ n = gst_value_list_get_size (list);
+ for (i = 0; i < n; i++) {
+ tmp = gst_value_list_get_value (list, i);
+ if (gst_value_compare (value, tmp) == GST_VALUE_EQUAL)
+ return TRUE;
}
- return pos;
+ return FALSE;
+}
+
+static void
+gst_maru_video_set_pix_fmts (GstCaps * caps, const int32_t *fmts)
+{
+ GValue va = { 0, };
+ GValue v = { 0, };
+ GstVideoFormat format;
+ gint i;
+
+ if (!fmts || fmts[0] == -1) {
+
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&v, G_TYPE_STRING);
+ for (i = 0; i <= PIX_FMT_NB; i++) {
+ format = gst_maru_pixfmt_to_videoformat (i);
+ if (format == GST_VIDEO_FORMAT_UNKNOWN)
+ continue;
+ g_value_set_string (&v, gst_video_format_to_string (format));
+ gst_value_list_append_value (&va, &v);
+ }
+ gst_caps_set_value (caps, "format", &va);
+ g_value_unset (&v);
+ g_value_unset (&va);
+ return;
+ }
+
+ /* Only a single format */
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&v, G_TYPE_STRING);
+ i = 0;
+ while (i < 4) {
+ format = gst_maru_pixfmt_to_videoformat (fmts[i]);
+ if (format != GST_VIDEO_FORMAT_UNKNOWN) {
+ g_value_set_string (&v, gst_video_format_to_string (format));
+ /* Only append values we don't have yet */
+ if (!_gst_value_list_contains (&va, &v))
+ gst_value_list_append_value (&va, &v);
+ }
+ i++;
+ }
+ if (gst_value_list_get_size (&va) == 1) {
+ /* The single value is still in v */
+ gst_caps_set_value (caps, "format", &v);
+ } else if (gst_value_list_get_size (&va) > 1) {
+ gst_caps_set_value (caps, "format", &va);
+ }
+ g_value_unset (&v);
+ g_value_unset (&va);
+}
+
+static gboolean
+caps_has_field (GstCaps * caps, const gchar * field)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (gst_structure_has_field (s, field))
+ return TRUE;
+ }
+
+ return FALSE;
}
GstCaps*
gst_maru_codectype_to_video_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec)
{
+ GST_DEBUG (" >> ENTER ");
GstCaps *caps;
- GST_DEBUG ("context: %p, codec: %s, encode: %d, pixel format: %d",
- ctx, name, encode, ctx->video.pix_fmt);
+ if (ctx) {
+ GST_DEBUG ("context: %p, codec: %s, encode: %d, pixel format: %d",
+ ctx, name, encode, ctx->video.pix_fmt);
+ } else {
+ GST_DEBUG ("context: %p, codec: %s, encode: %d",
+ ctx, name, encode);
+ }
if (ctx) {
caps = gst_maru_pixfmt_to_caps (ctx->video.pix_fmt, ctx, name);
} else {
- GstCaps *temp;
- enum PixelFormat i;
- CodecContext ctx;
-
- caps = gst_caps_new_empty ();
- for (i = 0; i <= PIX_FMT_NB; i++) {
- temp = gst_maru_pixfmt_to_caps (i, encode ? &ctx : NULL, name);
- if (temp != NULL) {
- gst_caps_append (caps, temp);
- }
- }
+ caps =
+ gst_maru_video_caps_new (ctx, name, "video/x-raw", NULL);
+ if (!caps_has_field (caps, "format"))
+ gst_maru_video_set_pix_fmts (caps, codec ? codec->pix_fmts : NULL);
}
return caps;
gst_maru_codectype_to_audio_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec)
{
+ GST_DEBUG (" >> ENTER ");
GstCaps *caps = NULL;
GST_DEBUG ("context: %p, codec: %s, encode: %d, codec: %p",
gst_maru_codectype_to_caps (int media_type, CodecContext *ctx,
const char *name, gboolean encode)
{
+ GST_DEBUG (" >> ENTER ");
GstCaps *caps;
switch (media_type) {
void
gst_maru_caps_to_pixfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
{
+ GST_DEBUG (" >> ENTER ");
GstStructure *str;
const GValue *fps;
const GValue *par = NULL;
g_return_if_fail (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps));
if (strcmp (gst_structure_get_name (str), "video/x-raw-yuv") == 0) {
- guint32 fourcc;
-
- if (gst_structure_get_fourcc (str, "format", &fourcc)) {
- switch (fourcc) {
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
- ctx->video.pix_fmt = PIX_FMT_YUYV422;
- break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- ctx->video.pix_fmt = PIX_FMT_YUV420P;
- break;
- case GST_MAKE_FOURCC ('A', '4', '2', '0'):
- ctx->video.pix_fmt = PIX_FMT_YUVA420P;
- break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
- ctx->video.pix_fmt = PIX_FMT_YUV411P;
- break;
- case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
- ctx->video.pix_fmt = PIX_FMT_YUV422P;
- break;
- case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
- ctx->video.pix_fmt = PIX_FMT_YUV410P;
- break;
- }
+ const gchar *format;
+
+ if ((format = gst_structure_get_string (str, "format"))) {
+ if (g_str_equal (format, "YUY2"))
+ ctx->video.pix_fmt = PIX_FMT_YUYV422;
+ else if (g_str_equal (format, "I420"))
+ ctx->video.pix_fmt = PIX_FMT_YUV420P;
+ else if (g_str_equal (format, "A420"))
+ ctx->video.pix_fmt = PIX_FMT_YUVA420P;
+ else if (g_str_equal (format, "Y41B"))
+ ctx->video.pix_fmt = PIX_FMT_YUV411P;
+ else if (g_str_equal (format, "Y42B"))
+ ctx->video.pix_fmt = PIX_FMT_YUV422P;
+ else if (g_str_equal (format, "YUV9"))
+ ctx->video.pix_fmt = PIX_FMT_YUV410P;
+ else {
+ GST_WARNING ("couldn't convert format %s" " to a pixel format",
+ format);
+ }
}
} else if (strcmp (gst_structure_get_name (str), "video/x-raw-rgb") == 0) {
gint bpp = 0, rmask = 0, endianness = 0;
void
gst_maru_caps_to_smpfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
{
+ GST_DEBUG (" >> ENTER ");
GstStructure *str;
gint depth = 0, width = 0, endianness = 0;
gboolean signedness = FALSE;
gst_maru_caps_with_codecname (const char *name, int media_type,
const GstCaps *caps, CodecContext *ctx)
{
+ GST_DEBUG (" >> ENTER ");
GstStructure *structure;
const GValue *value;
- const GstBuffer *buf;
+ GstBuffer *buf;
if (!ctx || !gst_caps_get_size (caps)) {
return;
structure = gst_caps_get_structure (caps, 0);
if ((value = gst_structure_get_value (structure, "codec_data"))) {
+ GstMapInfo mapinfo;
guint size;
guint8 *data;
- buf = GST_BUFFER_CAST (gst_value_get_mini_object (value));
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ buf = (GstBuffer *) gst_value_get_buffer (value);
+ gst_buffer_map (buf, &mapinfo, GST_MAP_READ);
+ size = mapinfo.size;
+ data = mapinfo.data;
+ gst_buffer_unmap (buf, &mapinfo);
GST_DEBUG ("extradata: %p, size: %d", data, size);
if (ctx->codecdata) {
gchar *codec_name,
CodecContext *context)
{
+ GST_DEBUG (" >> ENTER ");
const gchar *mimetype;
const GstStructure *str;
int media_type = AVMEDIA_TYPE_UNKNOWN;
break;
case 3:
{
- guint32 fourcc;
-
g_strlcpy (codec_name, "wmv3", CODEC_NAME_BUFFER_SIZE);
- if (gst_structure_get_fourcc (str, "format", &fourcc)) {
- if ((fourcc == GST_MAKE_FOURCC ('W', 'V', 'C', '1')) ||
- (fourcc == GST_MAKE_FOURCC ('W', 'M', 'V', 'A'))) {
+ const gchar *format;
+ if ((format = gst_structure_get_string (str, "format"))) {
+ if ((g_str_equal (format, "WVC1")) || (g_str_equal (format, "WMVA"))) {
g_strlcpy (codec_name, "vc1", CODEC_NAME_BUFFER_SIZE);
}
}
void
gst_maru_caps_with_codectype (int media_type, const GstCaps *caps, CodecContext *ctx)
{
+ GST_DEBUG (" >> ENTER ");
if (ctx == NULL) {
return;
}
gst_maru_video_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...)
{
- GstStructure *structure = NULL;
+ GST_DEBUG (" >> ENTER ");
GstCaps *caps = NULL;
va_list var_args;
gint i;
- GST_LOG ("context: %p, name: %s, mimetype: %s", ctx, name, mimetype);
+ GST_DEBUG ("context: %p, name: %s, mimetype: %s", ctx, name, mimetype);
if (ctx != NULL && ctx->video.width != -1) {
gint num, denom;
denom = ctx->video.fps_n;
if (!denom) {
- GST_LOG ("invalid framerate: %d/0, -> %d/1", num, num);
+ GST_DEBUG ("invalid framerate: %d/0, -> %d/1", num, num);
+ denom = 1;
}
if (gst_util_fraction_compare (num, denom, 1000, 1) > 0) {
- GST_LOG ("excessive framerate: %d/%d, -> 0/1", num, denom);
+ GST_DEBUG ("excessive framerate: %d/%d, -> 0/1", num, denom);
num = 0;
denom = 1;
}
- GST_LOG ("setting framerate: %d/%d", num, denom);
+ GST_DEBUG ("setting framerate: %d/%d", num, denom);
gst_caps_set_simple (caps,
"framerate", GST_TYPE_FRACTION, num, denom, NULL);
} else {
gst_caps_append (caps, temp);
}
} else if (strcmp (name, "none") == 0) {
- GST_LOG ("default caps");
+ GST_DEBUG ("default caps");
}
}
* default unfixed setting */
if (!caps) {
GST_DEBUG ("Creating default caps");
- caps = gst_caps_new_simple (mimetype,
- "width", GST_TYPE_INT_RANGE, 16, 4096,
- "height", GST_TYPE_INT_RANGE, 16, 4096,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ caps = gst_caps_new_empty_simple (mimetype);
}
- for (i = 0; i < gst_caps_get_size (caps); i++) {
- va_start (var_args, fieldname);
- structure = gst_caps_get_structure (caps, i);
- gst_structure_set_valist (structure, fieldname, var_args);
- va_end (var_args);
- }
+ va_start (var_args, fieldname);
+ gst_caps_set_simple_valist (caps, fieldname, var_args);
+ va_end (var_args);
return caps;
}
gst_maru_audio_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...)
{
- GstStructure *structure = NULL;
+ GST_DEBUG (" >> ENTER ");
+
GstCaps *caps = NULL;
gint i;
va_list var_args;
if (ctx != NULL && ctx->audio.channels != -1) {
- GstAudioChannelPosition *pos;
- guint64 channel_layout = ctx->audio.channel_layout;
-
- if (channel_layout == 0) {
- const guint64 default_channel_set[] = {
- 0, 0, CH_LAYOUT_SURROUND, CH_LAYOUT_QUAD, CH_LAYOUT_5POINT0,
- CH_LAYOUT_5POINT1, 0, CH_LAYOUT_7POINT1
- };
-
- if (strcmp (name, "ac3") == 0) {
- if (ctx->audio.channels > 0 &&
- ctx->audio.channels < G_N_ELEMENTS (default_channel_set)) {
- channel_layout = default_channel_set[ctx->audio.channels - 1];
- }
- } else {
- // TODO
- }
- }
+ GstAudioChannelPosition pos[64];
+ guint64 mask;
caps = gst_caps_new_simple (mimetype,
"rate", G_TYPE_INT, ctx->audio.sample_rate,
"channels", G_TYPE_INT, ctx->audio.channels, NULL);
- pos = gst_ff_channel_layout_to_gst (channel_layout, ctx->audio.channels);
- if (pos != NULL) {
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
- g_free (pos);
+ if (ctx->audio.channels > 1 &&
+ gst_ffmpeg_channel_layout_to_gst (ctx->audio.channel_layout,
+ ctx->audio.channels, pos) &&
+ gst_audio_channel_positions_to_mask (pos, ctx->audio.channels, FALSE,
+ &mask)) {
+ gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, mask, NULL);
}
} else {
gint maxchannels = 2;
if (n_rates) {
GValue list = { 0, };
- GstStructure *structure;
+ //GstStructure *structure;
g_value_init(&list, GST_TYPE_LIST);
for (i = 0; i < n_rates; i++) {
gst_value_list_append_value(&list, &v);
g_value_unset(&v);
}
- structure = gst_caps_get_structure(caps, 0);
- gst_structure_set_value(structure, "rate", &list);
+ gst_caps_set_value (caps, "rate", &list);
g_value_unset(&list);
} else {
gst_caps_set_simple(caps, "rate", GST_TYPE_INT_RANGE, 4000, 96000, NULL);
}
}
- for (i = 0; i < gst_caps_get_size (caps); i++) {
- va_start (var_args, fieldname);
- structure = gst_caps_get_structure (caps, i);
- gst_structure_set_valist (structure, fieldname, var_args);
- va_end (var_args);
- }
+ va_start (var_args, fieldname);
+ gst_caps_set_simple_valist (caps, fieldname, var_args);
+ va_end (var_args);
return caps;
}
GstCaps *
gst_maru_pixfmt_to_caps (enum PixelFormat pix_fmt, CodecContext *ctx, const char *name)
{
+ GST_DEBUG (" >> ENTER ");
GstCaps *caps = NULL;
+ GstVideoFormat format;
- int bpp = 0, depth = 0, endianness = 0;
- gulong g_mask = 0, r_mask = 0, b_mask = 0, a_mask = 0;
- guint32 fmt = 0;
-
- switch (pix_fmt) {
- case PIX_FMT_YUV420P:
- fmt = GST_MAKE_FOURCC ('I', '4', '2', '0');
- break;
- case PIX_FMT_YUYV422:
- fmt = GST_MAKE_FOURCC ('A', '4', '2', '0');
- break;
- case PIX_FMT_RGB24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0xff0000;
- g_mask = 0x00ff00;
- b_mask = 0x0000ff;
- break;
- case PIX_FMT_BGR24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0x0000ff;
- g_mask = 0x00ff00;
- b_mask = 0xff0000;
- break;
- case PIX_FMT_YUV422P:
- fmt = GST_MAKE_FOURCC ('Y', '4', '2', 'B');
- break;
- case PIX_FMT_YUV444P:
- fmt = GST_MAKE_FOURCC ('Y', '4', '4', '4');
- break;
- case PIX_FMT_RGB32:
- bpp = 32;
- depth = 32;
- endianness = G_BIG_ENDIAN;
-#if (G_BYTE_ORDER == G_BIG_ENDIAN)
- r_mask = 0x00ff0000;
- g_mask = 0x0000ff00;
- b_mask = 0x000000ff;
- a_mask = 0xff000000;
-#else
- r_mask = 0x00ff0000;
- g_mask = 0x0000ff00;
- b_mask = 0x000000ff;
- a_mask = 0xff000000;
-#endif
- break;
- case PIX_FMT_YUV410P:
- fmt = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
- break;
- case PIX_FMT_YUV411P:
- fmt = GST_MAKE_FOURCC ('Y', '4', '1', 'b');
- break;
- case PIX_FMT_RGB565:
- bpp = depth = 16;
- endianness = G_BYTE_ORDER;
- r_mask = 0xf800;
- g_mask = 0x07e0;
- b_mask = 0x001f;
- break;
- case PIX_FMT_RGB555:
- bpp = 16;
- depth = 15;
- endianness = G_BYTE_ORDER;
- r_mask = 0x7c00;
- g_mask = 0x03e0;
- b_mask = 0x001f;
- break;
- default:
- break;
- }
+ format = gst_maru_pixfmt_to_videoformat (pix_fmt);
- if (caps == NULL) {
- if (bpp != 0) {
- if (r_mask != 0) {
- if (a_mask) {
- caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "red_mask", G_TYPE_INT, r_mask,
- "green_mask", G_TYPE_INT, g_mask,
- "blue_mask", G_TYPE_INT, b_mask,
- "alpha_mask", G_TYPE_INT, a_mask,
- "endianness", G_TYPE_INT, endianness, NULL);
- } else {
- caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "red_mask", G_TYPE_INT, r_mask,
- "green_mask", G_TYPE_INT, g_mask,
- "blue_mask", G_TYPE_INT, b_mask,
- "alpha_mask", G_TYPE_INT, a_mask,
- "endianness", G_TYPE_INT, endianness, NULL);
- }
- } else {
- caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "endianness", G_TYPE_INT, endianness, NULL);
- if (caps && ctx) {
- // set paletee
- }
- }
- } else if (fmt) {
- caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fmt, NULL);
- }
+ if (format != GST_VIDEO_FORMAT_UNKNOWN) {
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-raw",
+ "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
}
if (caps != NULL) {
- GST_DEBUG ("caps for pix_fmt=%d: %", GST_PTR_FORMAT, pix_fmt, caps);
+ GST_DEBUG ("caps for pix_fmt=%d: %" GST_PTR_FORMAT, pix_fmt, caps);
} else {
- GST_LOG ("No caps found for pix_fmt=%d", pix_fmt);
+ GST_DEBUG ("No caps found for pix_fmt=%d", pix_fmt);
}
return caps;
}
-GstCaps *
-gst_maru_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name)
+GstVideoFormat
+gst_maru_pixfmt_to_videoformat (enum PixelFormat pixfmt)
{
- GstCaps *caps = NULL;
+ guint i;
- int bpp = 0;
- gboolean integer = TRUE;
- gboolean signedness = FALSE;
+ for (i = 0; i < G_N_ELEMENTS (pixtofmttable); i++)
+ if (pixtofmttable[i].pixfmt == pixfmt)
+ return pixtofmttable[i].format;
- switch (sample_fmt) {
- case SAMPLE_FMT_S16:
- case SAMPLE_FMT_S16P:
- signedness = TRUE;
- bpp = 16;
- break;
- case SAMPLE_FMT_S32:
- case SAMPLE_FMT_S32P:
- signedness = TRUE;
- bpp = 32;
- break;
- case SAMPLE_FMT_FLT:
- case SAMPLE_FMT_FLTP:
- integer = FALSE;
- bpp = 32;
- break;
- case SAMPLE_FMT_DBL:
- case SAMPLE_FMT_DBLP:
- integer = FALSE;
- bpp = 64;
- break;
- default:
- break;
- }
+ return GST_VIDEO_FORMAT_UNKNOWN;
+}
- GST_DEBUG ("sample format: %d", sample_fmt);
+enum PixelFormat
+gst_maru_videoformat_to_pixfmt (GstVideoFormat format)
+{
+ guint i;
- if (bpp) {
- if (integer) {
- caps = gst_maru_audio_caps_new (ctx, name, "audio/x-raw-int",
- "signed", G_TYPE_BOOLEAN, signedness,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "width", G_TYPE_INT, bpp, "depth", G_TYPE_INT, bpp, NULL);
- } else {
- caps = gst_maru_audio_caps_new (ctx, name, "audio/x-raw-float",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "width", G_TYPE_INT, bpp, NULL);
- }
+ for (i = 0; i < G_N_ELEMENTS (pixtofmttable); i++)
+ if (pixtofmttable[i].format == format)
+ return pixtofmttable[i].pixfmt;
+ return PIX_FMT_NONE;
+}
+
+void
+gst_maru_videoinfo_to_context (GstVideoInfo * info, CodecContext * context)
+{
+ gint i, bpp = 0;
+
+ context->video.width = GST_VIDEO_INFO_WIDTH (info);
+ context->video.height = GST_VIDEO_INFO_HEIGHT (info);
+ for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (info); i++)
+ bpp += GST_VIDEO_INFO_COMP_DEPTH (info, i);
+ context->video.bpp = bpp;
+
+ context->video.ticks_per_frame = 1;
+ if (GST_VIDEO_INFO_FPS_N (info) == 0) {
+ GST_DEBUG ("Using 25/1 framerate");
+ context->video.fps_d = 25;
+ context->video.fps_n = 1;
+ } else {
+ context->video.fps_d = GST_VIDEO_INFO_FPS_N (info);
+ context->video.fps_n = GST_VIDEO_INFO_FPS_D (info);
}
- if (caps != NULL) {
+ context->video.par_n = GST_VIDEO_INFO_PAR_N (info);
+ context->video.par_d = GST_VIDEO_INFO_PAR_D (info);
+
+ context->video.pix_fmt =
+ gst_maru_videoformat_to_pixfmt (GST_VIDEO_INFO_FORMAT (info));
+}
+
+GstCaps *
+gst_maru_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name)
+{
+ GstCaps *caps = NULL;
+ GstAudioFormat format;
+
+ format = gst_maru_smpfmt_to_audioformat (sample_fmt);
+
+ if (format != GST_AUDIO_FORMAT_UNKNOWN) {
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
GST_LOG ("caps for sample_fmt=%d: %" GST_PTR_FORMAT, sample_fmt, caps);
} else {
GST_LOG ("No caps found for sample_fmt=%d", sample_fmt);
GstCaps *
gst_maru_codecname_to_caps (const char *name, CodecContext *ctx, gboolean encode)
{
+ GST_DEBUG (" >> ENTER");
GstCaps *caps = NULL;
- GST_LOG ("codec: %s, context: %p, encode: %d", name, ctx, encode);
+ GST_DEBUG ("codec: %s, context: %p, encode: %d", name, ctx, encode);
if (strcmp (name, "mpegvideo") == 0) {
caps = gst_maru_video_caps_new (ctx, name, "video/mpeg",
"wmvversion", G_TYPE_INT, 3, NULL);
} else if (strcmp (name, "vc1") == 0) {
caps = gst_maru_video_caps_new (ctx, name, "video/x-wmv",
- "wmvversion", G_TYPE_INT, 3, "format", GST_TYPE_FOURCC,
- GST_MAKE_FOURCC ('W', 'V', 'C', '1'), NULL);
+ "wmvversion", G_TYPE_INT, 3, "format",
+ G_TYPE_STRING, "WVC1", NULL);
} else if (strcmp (name, "aac") == 0) {
caps = gst_maru_audio_caps_new (ctx, name, "audio/mpeg", NULL);
if (!encode) {
}
if (caps != NULL) {
+ GST_DEBUG ("caps is NOT null");
if (ctx && ctx->codecdata_size > 0) {
GST_DEBUG ("codec_data size %d", ctx->codecdata_size);
GstBuffer *data = gst_buffer_new_and_alloc (ctx->codecdata_size);
- memcpy (GST_BUFFER_DATA(data), ctx->codecdata, ctx->codecdata_size);
+ gst_buffer_fill (data, 0, ctx->codecdata, ctx->codecdata_size);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, data, NULL);
gst_buffer_unref (data);
}
- GST_LOG ("caps for codec %s %" GST_PTR_FORMAT, name, caps);
+ GST_DEBUG ("caps for codec %s %" GST_PTR_FORMAT, name, caps);
} else {
- GST_LOG ("No caps found for codec %s", name);
+ GST_DEBUG ("No caps found for codec %s", name);
}
return caps;
void
gst_maru_init_pix_fmt_info (void)
{
+ GST_DEBUG (" >> ENTER ");
pix_fmt_info[PIX_FMT_YUV420P].x_chroma_shift = 1,
pix_fmt_info[PIX_FMT_YUV420P].y_chroma_shift = 1;
int
gst_maru_avpicture_size (int pix_fmt, int width, int height)
{
+ GST_DEBUG (" >> ENTER ");
int size, w2, h2, size2;
int stride, stride2;
int fsize;
int
gst_maru_align_size (int buf_size)
{
+ GST_DEBUG (" >> ENTER ");
int i, align_size;
align_size = buf_size / 1024;
return align_size;
}
+
+GstAudioFormat
+gst_maru_smpfmt_to_audioformat(int32_t sample_fmt)
+{
+ switch (sample_fmt) {
+ case SAMPLE_FMT_U8:
+ case SAMPLE_FMT_U8P:
+ return GST_AUDIO_FORMAT_U8;
+ break;
+ case SAMPLE_FMT_S16:
+ case SAMPLE_FMT_S16P:
+ return GST_AUDIO_FORMAT_S16;
+ break;
+ case SAMPLE_FMT_S32:
+ case SAMPLE_FMT_S32P:
+ return GST_AUDIO_FORMAT_S32;
+ break;
+ case SAMPLE_FMT_FLT:
+ case SAMPLE_FMT_FLTP:
+ return GST_AUDIO_FORMAT_F32;
+ break;
+ case SAMPLE_FMT_DBL:
+ case SAMPLE_FMT_DBLP:
+ return GST_AUDIO_FORMAT_F64;
+ break;
+ default:
+ /* .. */
+ return GST_AUDIO_FORMAT_UNKNOWN;
+ break;
+ }
+}
+
+gboolean
+gst_maru_channel_layout_to_gst (guint64 channel_layout, gint channels,
+ GstAudioChannelPosition * pos)
+{
+ guint nchannels = 0;
+ gboolean none_layout = FALSE;
+
+ if (channel_layout == 0) {
+ nchannels = channels;
+ none_layout = TRUE;
+ } else {
+ guint i, j;
+
+ for (i = 0; i < 64; i++) {
+ if ((channel_layout & (G_GUINT64_CONSTANT (1) << i)) != 0) {
+ nchannels++;
+ }
+ }
+
+ if (nchannels != channels) {
+ GST_ERROR ("Number of channels is different (%u != %u)", channels,
+ nchannels);
+ nchannels = channels;
+ none_layout = TRUE;
+ } else {
+
+ for (i = 0, j = 0; i < G_N_ELEMENTS (_ff_to_gst_layout); i++) {
+ if ((channel_layout & _ff_to_gst_layout[i].ff) != 0) {
+ pos[j++] = _ff_to_gst_layout[i].gst;
+
+ if (_ff_to_gst_layout[i].gst == GST_AUDIO_CHANNEL_POSITION_NONE)
+ none_layout = TRUE;
+ }
+ }
+
+ if (j != nchannels) {
+ GST_WARNING
+ ("Unknown channels in channel layout - assuming NONE layout");
+ none_layout = TRUE;
+ }
+ }
+ }
+
+ if (!none_layout
+ && !gst_audio_check_valid_channel_positions (pos, nchannels, FALSE)) {
+ GST_ERROR ("Invalid channel layout %" G_GUINT64_FORMAT
+ " - assuming NONE layout", channel_layout);
+ none_layout = TRUE;
+ }
+
+ if (none_layout) {
+ if (nchannels == 1) {
+ pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO;
+ } else if (nchannels == 2) {
+ pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ } else {
+ guint i;
+
+ for (i = 0; i < nchannels; i++)
+ pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+ }
+ }
+
+ return TRUE;
+}
+
+void
+gst_maru_audioinfo_to_context (GstAudioInfo *info, CodecContext *context)
+{
+ const CodecElement *codec = context->codec;
+ enum SampleFormat smpl_fmts[4];
+ enum SampleFormat smpl_fmt = -1;
+ int i;
+
+ context->audio.channels = info->channels;
+ context->audio.sample_rate = info->rate;
+ context->audio.channel_layout =
+ gst_ffmpeg_channel_positions_to_layout (info->position, info->channels);
+
+ if (!codec) {
+ GST_ERROR ("invalid codec");
+ return ;
+ }
+
+ for (i = 0; i < 4; i++) {
+ smpl_fmts[i] = codec->sample_fmts[i];
+ }
+ i = 0;
+ switch (info->finfo->format) {
+ case GST_AUDIO_FORMAT_F32:
+ if (smpl_fmts[0] != -1) {
+ while (smpl_fmts[i] != -1) {
+ if (smpl_fmts[i] == SAMPLE_FMT_FLT) {
+ smpl_fmt = smpl_fmts[i];
+ break;
+ } else if (smpl_fmts[i] == SAMPLE_FMT_FLTP) {
+ smpl_fmt = smpl_fmts[i];
+ }
+
+ i++;
+ }
+ } else {
+ smpl_fmt = SAMPLE_FMT_FLT;
+ }
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ if (smpl_fmts[0] != -1) {
+ while (smpl_fmts[i] != -1) {
+ if (smpl_fmts[i] == SAMPLE_FMT_DBL) {
+ smpl_fmt = smpl_fmts[i];
+ break;
+ } else if (smpl_fmts[i] == SAMPLE_FMT_DBLP) {
+ smpl_fmt = smpl_fmts[i];
+ }
+
+ i++;
+ }
+ } else {
+ smpl_fmt = SAMPLE_FMT_DBL;
+ }
+ break;
+ case GST_AUDIO_FORMAT_S32:
+ if (smpl_fmts[0] != -1) {
+ while (smpl_fmts[i] != -1) {
+ if (smpl_fmts[i] == SAMPLE_FMT_S32) {
+ smpl_fmt = smpl_fmts[i];
+ break;
+ } else if (smpl_fmts[i] == SAMPLE_FMT_S32P) {
+ smpl_fmt = smpl_fmts[i];
+ }
+
+ i++;
+ }
+ } else {
+ smpl_fmt = SAMPLE_FMT_S32;
+ }
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ if (smpl_fmts[0] != -1) {
+ while (smpl_fmts[i] != -1) {
+ if (smpl_fmts[i] == SAMPLE_FMT_S16) {
+ smpl_fmt = smpl_fmts[i];
+ break;
+ } else if (smpl_fmts[i] == SAMPLE_FMT_S16P) {
+ smpl_fmt = smpl_fmts[i];
+ }
+
+ i++;
+ }
+ } else {
+ smpl_fmt = SAMPLE_FMT_S16;
+ }
+ break;
+ case GST_AUDIO_FORMAT_U8:
+ if (smpl_fmts[0] != -1) {
+ while (smpl_fmts[i] != -1) {
+ if (smpl_fmts[i] == SAMPLE_FMT_U8) {
+ smpl_fmt = smpl_fmts[i];
+ break;
+ } else if (smpl_fmts[i] == SAMPLE_FMT_U8P) {
+ smpl_fmt = smpl_fmts[i];
+ }
+
+ i++;
+ }
+ } else {
+ smpl_fmt = SAMPLE_FMT_U8;
+ }
+ break;
+ default:
+ break;
+ }
+
+ g_assert (smpl_fmt != -1);
+
+ context->audio.sample_fmt = smpl_fmt;
+}
gint gst_maru_smpfmt_depth (int smp_fmt);
+GstVideoFormat gst_maru_pixfmt_to_videoformat (enum PixelFormat pixfmt);
+
+enum PixelFormat gst_maru_videoformat_to_pixfmt (GstVideoFormat format);
+
+GstAudioFormat gst_maru_smpfmt_to_audioformat(int32_t sample_fmt);
+
+gboolean gst_maru_channel_layout_to_gst (guint64 channel_layout, gint channels,
+ GstAudioChannelPosition * pos);
+
+void gst_maru_audioinfo_to_context (GstAudioInfo *info, CodecContext *context);
+
+void gst_maru_videoinfo_to_context (GstVideoInfo * info, CodecContext * context);
#endif
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2013 Samsung Electronics Co., Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/* Modifications by Samsung Electronics Co., Ltd.
+ * 1. Provide a hardware buffer in order to avoid additional memcpy operations.
+ */
+
+#include "gstmarudevice.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+
+#define GST_MARUDEC_PARAMS_QDATA g_quark_from_static_string("marudec-params")
+
+/* indicate dts, pts, offset in the stream */
+#define GST_TS_INFO_NONE &ts_info_none
+static const GstTSInfo ts_info_none = { -1, -1, -1, -1 };
+
+typedef struct _GstMaruVidDecClass
+{
+ GstVideoDecoderClass parent_class;
+
+ CodecElement *codec;
+} GstMaruVidDecClass;
+
+typedef struct _GstMaruDecClass
+{
+ GstElementClass parent_class;
+
+ CodecElement *codec;
+ GstPadTemplate *sinktempl;
+ GstPadTemplate *srctempl;
+} GstMaruDecClass;
+
+
+static GstElementClass *parent_class = NULL;
+
+static void gst_maruviddec_base_init (GstMaruVidDecClass *klass);
+static void gst_maruviddec_class_init (GstMaruVidDecClass *klass);
+static void gst_maruviddec_init (GstMaruVidDec *marudec);
+static void gst_maruviddec_finalize (GObject *object);
+
+static gboolean gst_marudec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state);
+static GstFlowReturn gst_maruviddec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame);
+static gboolean gst_marudec_negotiate (GstMaruVidDec *dec, gboolean force);
+static gint gst_maruviddec_frame (GstMaruVidDec *marudec, guint8 *data, guint size, gint *got_data,
+ const GstTSInfo *dec_info, gint64 in_offset, GstVideoCodecFrame * frame, GstFlowReturn *ret);
+
+static gboolean gst_marudec_open (GstMaruVidDec *marudec);
+static gboolean gst_marudec_close (GstMaruVidDec *marudec);
+
+GstFlowReturn alloc_and_copy (GstMaruVidDec *marudec, guint64 offset, guint size,
+ GstCaps *caps, GstBuffer **buf);
+
+// for profile
+static GTimer* profile_decode_timer = NULL;
+static gdouble elapsed_decode_time = 0;
+static int decoded_frame_cnt = 0;
+static int last_frame_cnt = 0;
+static GMutex profile_mutex;
+static int profile_init = 0;
+
+static gboolean
+maru_profile_cb (gpointer user_data)
+{
+ GST_DEBUG (" >> ENTER ");
+ int decoding_fps = 0;
+ gdouble decoding_time = 0;
+
+ g_mutex_lock (&profile_mutex);
+ if (decoded_frame_cnt < 0) {
+ decoded_frame_cnt = 0;
+ last_frame_cnt = 0;
+ elapsed_decode_time = 0;
+ g_mutex_unlock (&profile_mutex);
+ return FALSE;
+ }
+
+ decoding_fps = decoded_frame_cnt - last_frame_cnt;
+ last_frame_cnt = decoded_frame_cnt;
+
+ decoding_time = elapsed_decode_time;
+ elapsed_decode_time = 0;
+ g_mutex_unlock (&profile_mutex);
+
+ GST_DEBUG ("decoding fps=%d, latency=%f\n", decoding_fps, decoding_time/decoding_fps);
+ return TRUE;
+}
+
+static void init_codec_profile(void)
+{
+ GST_DEBUG (" >> ENTER ");
+ if (!profile_init) {
+ profile_init = 1;
+ profile_decode_timer = g_timer_new();
+ }
+}
+
+static void reset_codec_profile(void)
+{
+ GST_DEBUG (" >> ENTER ");
+ g_mutex_lock (&profile_mutex);
+ decoded_frame_cnt = -1;
+ g_mutex_lock (&profile_mutex);
+}
+
+static void begin_video_decode_profile(void)
+{
+ GST_DEBUG (" >> ENTER ");
+ g_timer_start(profile_decode_timer);
+}
+
+static void end_video_decode_profile(void)
+{
+ GST_DEBUG (" >> ENTER ");
+ g_timer_stop(profile_decode_timer);
+
+ g_mutex_lock (&profile_mutex);
+ if (decoded_frame_cnt == 0) {
+ g_timeout_add_seconds(1, maru_profile_cb, NULL);
+ }
+
+ elapsed_decode_time += g_timer_elapsed(profile_decode_timer, NULL);
+ decoded_frame_cnt++;
+ g_mutex_unlock (&profile_mutex);
+}
+
+#define INIT_CODEC_PROFILE(fd) \
+ if (interface->get_profile_status(fd)) { \
+ init_codec_profile(); \
+ }
+#define RESET_CODEC_PROFILE(s) \
+ if (profile_init) { \
+ reset_codec_profile(); \
+ }
+#define BEGIN_VIDEO_DECODE_PROFILE() \
+ if (profile_init) { \
+ begin_video_decode_profile(); \
+ }
+#define END_VIDEO_DECODE_PROFILE() \
+ if (profile_init) { \
+ end_video_decode_profile(); \
+ }
+
+
+static const GstTSInfo *
+gst_ts_info_store (GstMaruVidDec *dec, GstClockTime timestamp,
+ GstClockTime duration, gint64 offset)
+{
+ GST_DEBUG (" >> ENTER ");
+ gint idx = dec->ts_idx;
+ dec->ts_info[idx].idx = idx;
+ dec->ts_info[idx].timestamp = timestamp;
+ dec->ts_info[idx].duration = duration;
+ dec->ts_info[idx].offset = offset;
+ dec->ts_idx = (idx + 1) & MAX_TS_MASK;
+
+ return &dec->ts_info[idx];
+}
+
+static const GstTSInfo *
+gst_ts_info_get (GstMaruVidDec *dec, gint idx)
+{
+ GST_DEBUG (" >> ENTER ");
+ if (G_UNLIKELY (idx < 0 || idx > MAX_TS_MASK)){
+ GST_DEBUG (" >> LEAVE 0");
+ return GST_TS_INFO_NONE;
+ }
+
+ GST_DEBUG (" >> LEAVE ");
+ return &dec->ts_info[idx];
+}
+
+static void
+gst_marudec_reset_ts (GstMaruVidDec *marudec)
+{
+ GST_DEBUG (" >> ENTER ");
+ marudec->next_out = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_marudec_do_qos (GstMaruVidDec *marudec, GstVideoCodecFrame * frame,
+ GstClockTime timestamp, gboolean *mode_switch)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstClockTimeDiff diff;
+
+ *mode_switch = FALSE;
+
+ if (G_UNLIKELY (frame == NULL)) {
+ marudec->processed++;
+ GST_DEBUG (" >> LEAVE ");
+ return ;
+ }
+
+ diff = gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (marudec), frame);
+ /* if we don't have timing info, then we don't do QoS */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (diff))) {
+ marudec->processed++;
+ GST_DEBUG (" >> LEAVE ");
+ return ;
+ }
+
+ if (diff > 0) {
+ marudec->processed++;
+ GST_DEBUG (" >> LEAVE ");
+ } else if (diff <= 0) {
+ GST_DEBUG (" >> LEAVE ");
+ }
+}
+
+static void
+gst_marudec_drain (GstMaruVidDec *marudec)
+{
+ GST_DEBUG (" >> ENTER ");
+ GST_DEBUG_OBJECT (marudec, "drain frame");
+
+ {
+ gint have_data, len, try = 0;
+
+ do {
+ GstFlowReturn ret;
+
+ len =
+ gst_maruviddec_frame (marudec, NULL, 0, &have_data, &ts_info_none, 0, NULL, &ret);
+
+ if (len < 0 || have_data == 0) {
+ break;
+ }
+ } while (try++ < 10);
+ }
+}
+
+/*
+ * Implementation
+ */
+static void
+gst_maruviddec_base_init (GstMaruVidDecClass *klass)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstPadTemplate *sinktempl, *srctempl;
+ GstCaps *sinkcaps, *srccaps;
+ CodecElement *codec;
+ gchar *longname, *description;
+
+ codec =
+ (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
+ GST_MARUDEC_PARAMS_QDATA);
+ g_assert (codec != NULL);
+
+ longname = g_strdup_printf ("%s Decoder long", codec->longname);
+ description = g_strdup_printf("%s Decoder desc", codec->name);
+
+ gst_element_class_set_metadata (element_class,
+ longname,
+ "Codec/Decoder/Video sims",
+ description,
+ "Sooyoung Ha <yoosah.ha@samsung.com>");
+
+ g_free (longname);
+ g_free (description);
+
+ /* get the caps */
+ sinkcaps = gst_maru_codecname_to_caps (codec->name, NULL, FALSE);
+ if (!sinkcaps) {
+ sinkcaps = gst_caps_new_empty_simple ("unknown/unknown");
+ }
+
+ srccaps = gst_maru_codectype_to_video_caps (NULL, codec->name, FALSE, codec);
+
+ if (!srccaps) {
+ srccaps = gst_caps_from_string ("video/x-raw");
+ }
+
+ /* pad templates */
+ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
+ GST_PAD_ALWAYS, sinkcaps);
+ srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, srctempl);
+ gst_element_class_add_pad_template (element_class, sinktempl);
+
+ klass->codec = codec;
+}
+
+static void
+gst_maruviddec_class_init (GstMaruVidDecClass *klass)
+{
+ GST_DEBUG (" >> ENTER ");
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstVideoDecoderClass *viddec_class = GST_VIDEO_DECODER_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_maruviddec_finalize;
+
+ /* use these function when defines new properties.
+ gobject_class->set_property = gst_marudec_set_property
+ gobject_class->get_property = gst_marudec_get_property
+ */
+
+ viddec_class->set_format = gst_marudec_set_format;
+ viddec_class->handle_frame = gst_maruviddec_handle_frame;
+}
+
+static void
+gst_maruviddec_init (GstMaruVidDec *marudec)
+{
+ GST_DEBUG (" >> ENTER ");
+ marudec->context = g_malloc0 (sizeof(CodecContext));
+ marudec->context->video.pix_fmt = PIX_FMT_NONE;
+ marudec->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+
+ marudec->opened = FALSE;
+}
+
+static void
+gst_maruviddec_finalize (GObject *object)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstMaruVidDec *marudec = (GstMaruVidDec *) object;
+
+ GST_DEBUG_OBJECT (marudec, "finalize object and release context");
+ g_free (marudec->context);
+ marudec->context = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_marudec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstMaruVidDec *marudec;
+ GstMaruVidDecClass *oclass;
+ gboolean ret = FALSE;
+
+ marudec = (GstMaruVidDec *) decoder;
+ oclass = (GstMaruVidDecClass *) (G_OBJECT_GET_CLASS (marudec));
+ if (!marudec) {
+ GST_ERROR ("invalid marudec");
+ return FALSE;
+ }
+ if (marudec->last_caps != NULL &&
+ gst_caps_is_equal (marudec->last_caps, state->caps)) {
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (marudec, "setcaps called.");
+
+ GST_OBJECT_LOCK (marudec);
+
+ /* stupid check for VC1 */
+ if (!strcmp(oclass->codec->name, "wmv3") ||
+ !strcmp(oclass->codec->name, "vc1")) {
+ gst_maru_caps_to_codecname (state->caps, oclass->codec->name, NULL);
+ }
+
+ /* close old session */
+ if (marudec->opened) {
+ GST_OBJECT_UNLOCK (marudec);
+ gst_marudec_drain (marudec);
+ GST_OBJECT_LOCK (marudec);
+ if (!gst_marudec_close (marudec)) {
+ GST_OBJECT_UNLOCK (marudec);
+ return FALSE;
+ }
+ }
+
+ gst_caps_replace (&marudec->last_caps, state->caps);
+
+ GST_LOG_OBJECT (marudec, "size %dx%d", marudec->context->video.width,
+ marudec->context->video.height);
+
+ gst_maru_caps_with_codecname (oclass->codec->name, oclass->codec->media_type,
+ state->caps, marudec->context);
+
+ GST_LOG_OBJECT (marudec, "size after %dx%d", marudec->context->video.width,
+ marudec->context->video.height);
+
+ if (!marudec->context->video.fps_d || !marudec->context->video.fps_n) {
+ GST_DEBUG_OBJECT (marudec, "forcing 25/1 framerate");
+ marudec->context->video.fps_n = 1;
+ marudec->context->video.fps_d = 25;
+ }
+
+ GstQuery *query;
+ gboolean is_live;
+
+ query = gst_query_new_latency ();
+ is_live = FALSE;
+ /* Check if upstream is live. If it isn't we can enable frame based
+ * threading, which is adding latency */
+ if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (marudec), query)) {
+ gst_query_parse_latency (query, &is_live, NULL, NULL);
+ }
+ gst_query_unref (query);
+
+ if (!gst_marudec_open (marudec)) {
+ GST_DEBUG_OBJECT (marudec, "Failed to open");
+
+ ret = FALSE;
+ } else {
+ ret = TRUE;
+ }
+ if (marudec->input_state) {
+ gst_video_codec_state_unref (marudec->input_state);
+ }
+ marudec->input_state = gst_video_codec_state_ref (state);
+
+ if (marudec->input_state->info.fps_n) {
+ GstVideoInfo *info = &marudec->input_state->info;
+ gst_util_uint64_scale_ceil (GST_SECOND, info->fps_d, info->fps_n);
+ }
+ GST_OBJECT_UNLOCK (marudec);
+
+ return ret;
+}
+
+static gboolean
+gst_marudec_open (GstMaruVidDec *marudec)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstMaruVidDecClass *oclass;
+
+ oclass = (GstMaruVidDecClass *) (G_OBJECT_GET_CLASS (marudec));
+
+ marudec->dev = g_try_malloc0 (sizeof(CodecDevice));
+ if (!marudec->dev) {
+ GST_ERROR_OBJECT (marudec, "failed to allocate memory for CodecDevice");
+ return FALSE;
+ }
+
+ if (gst_maru_avcodec_open (marudec->context, oclass->codec, marudec->dev) < 0) {
+ g_free(marudec->dev);
+ marudec->dev = NULL;
+ GST_ERROR_OBJECT (marudec,
+ "maru_%sdec: Failed to open codec", oclass->codec->name);
+ return FALSE;
+ }
+
+ marudec->opened = TRUE;
+
+ GST_LOG_OBJECT (marudec, "Opened codec %s", oclass->codec->name);
+
+ gst_marudec_reset_ts (marudec);
+
+ // initialize profile resource
+ INIT_CODEC_PROFILE(marudec->dev->fd);
+
+ return TRUE;
+}
+
+static gboolean
+gst_marudec_close (GstMaruVidDec *marudec)
+{
+ GST_DEBUG (" >> ENTER ");
+ if (!marudec->opened) {
+ GST_DEBUG_OBJECT (marudec, "not opened yet");
+ return FALSE;
+ }
+
+ if (marudec->context) {
+ g_free(marudec->context->codecdata);
+ marudec->context->codecdata = NULL;
+ }
+
+ if (!marudec->dev) {
+ return FALSE;
+ }
+
+ gst_maru_avcodec_close (marudec->context, marudec->dev);
+ marudec->opened = FALSE;
+
+ if (marudec->dev) {
+ g_free(marudec->dev);
+ marudec->dev = NULL;
+ }
+
+ // reset profile resource
+ RESET_CODEC_PROFILE();
+ return TRUE;
+}
+
+static gboolean
+update_video_context (GstMaruVidDec * marudec, CodecContext * context,
+ gboolean force)
+{
+ GST_DEBUG (" >> ENTER ");
+ if (!force && marudec->ctx_width == context->video.width
+ && marudec->ctx_height == context->video.height
+ && marudec->ctx_ticks == context->video.ticks_per_frame
+ && marudec->ctx_time_n == context->video.fps_n
+ && marudec->ctx_time_d == context->video.fps_d
+ && marudec->ctx_pix_fmt == context->video.pix_fmt
+ && marudec->ctx_par_n == context->video.par_n
+ && marudec->ctx_par_d == context->video.par_d) {
+ return FALSE;
+ }
+ marudec->ctx_width = context->video.width;
+ marudec->ctx_height = context->video.height;
+ marudec->ctx_ticks = context->video.ticks_per_frame;
+ marudec->ctx_time_n = context->video.fps_n;
+ marudec->ctx_time_d = context->video.fps_d;
+ marudec->ctx_pix_fmt = context->video.pix_fmt;
+ marudec->ctx_par_n = context->video.par_n;
+ marudec->ctx_par_d = context->video.par_d;
+
+ return TRUE;
+}
+
+static void
+gst_maruviddec_update_par (GstMaruVidDec * marudec,
+ GstVideoInfo * in_info, GstVideoInfo * out_info)
+{
+ GST_DEBUG (" >> ENTER");
+ gboolean demuxer_par_set = FALSE;
+ gboolean decoder_par_set = FALSE;
+ gint demuxer_num = 1, demuxer_denom = 1;
+ gint decoder_num = 1, decoder_denom = 1;
+
+ if (in_info->par_n && in_info->par_d) {
+ demuxer_num = in_info->par_n;
+ demuxer_denom = in_info->par_d;
+ demuxer_par_set = TRUE;
+ GST_DEBUG_OBJECT (marudec, "Demuxer PAR: %d:%d", demuxer_num,
+ demuxer_denom);
+ }
+
+ if (marudec->ctx_par_n && marudec->ctx_par_d) {
+ decoder_num = marudec->ctx_par_n;
+ decoder_denom = marudec->ctx_par_d;
+ decoder_par_set = TRUE;
+ GST_DEBUG_OBJECT (marudec, "Decoder PAR: %d:%d", decoder_num,
+ decoder_denom);
+ }
+
+ if (!demuxer_par_set && !decoder_par_set)
+ goto no_par;
+
+ if (demuxer_par_set && !decoder_par_set)
+ goto use_demuxer_par;
+
+ if (decoder_par_set && !demuxer_par_set)
+ goto use_decoder_par;
+
+ /* Both the demuxer and the decoder provide a PAR. If one of
+ * the two PARs is 1:1 and the other one is not, use the one
+ * that is not 1:1. */
+ if (demuxer_num == demuxer_denom && decoder_num != decoder_denom)
+ goto use_decoder_par;
+
+ if (decoder_num == decoder_denom && demuxer_num != demuxer_denom)
+ goto use_demuxer_par;
+
+ /* Both PARs are non-1:1, so use the PAR provided by the demuxer */
+ goto use_demuxer_par;
+
+use_decoder_par:
+ {
+ GST_DEBUG_OBJECT (marudec,
+ "Setting decoder provided pixel-aspect-ratio of %u:%u", decoder_num,
+ decoder_denom);
+ out_info->par_n = decoder_num;
+ out_info->par_d = decoder_denom;
+ return;
+ }
+use_demuxer_par:
+ {
+ GST_DEBUG_OBJECT (marudec,
+ "Setting demuxer provided pixel-aspect-ratio of %u:%u", demuxer_num,
+ demuxer_denom);
+ out_info->par_n = demuxer_num;
+ out_info->par_d = demuxer_denom;
+ return;
+ }
+no_par:
+ {
+ GST_DEBUG_OBJECT (marudec,
+ "Neither demuxer nor codec provide a pixel-aspect-ratio");
+ out_info->par_n = 1;
+ out_info->par_d = 1;
+ return;
+ }
+}
+
+
+static gboolean
+gst_marudec_negotiate (GstMaruVidDec *marudec, gboolean force)
+{
+ GST_DEBUG (" >> ENTER ");
+ CodecContext *context = marudec->context;
+ GstVideoFormat fmt;
+ GstVideoInfo *in_info, *out_info;
+ GstVideoCodecState *output_state;
+ gint fps_n, fps_d;
+
+ if (!update_video_context (marudec, context, force))
+ return TRUE;
+
+ fmt = gst_maru_pixfmt_to_videoformat (marudec->ctx_pix_fmt);
+ if (G_UNLIKELY (fmt == GST_VIDEO_FORMAT_UNKNOWN))
+ goto unknown_format;
+
+ output_state =
+ gst_video_decoder_set_output_state (GST_VIDEO_DECODER (marudec), fmt,
+ marudec->ctx_width, marudec->ctx_height, marudec->input_state);
+ if (marudec->output_state)
+ gst_video_codec_state_unref (marudec->output_state);
+ marudec->output_state = output_state;
+
+ in_info = &marudec->input_state->info;
+ out_info = &marudec->output_state->info;
+ out_info->interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
+
+ /* try to find a good framerate */
+ if ((in_info->fps_d && in_info->fps_n) ||
+ GST_VIDEO_INFO_FLAG_IS_SET (in_info, GST_VIDEO_FLAG_VARIABLE_FPS)) {
+ /* take framerate from input when it was specified (#313970) */
+ fps_n = in_info->fps_n;
+ fps_d = in_info->fps_d;
+ } else {
+ fps_n = marudec->ctx_time_d / marudec->ctx_ticks;
+ fps_d = marudec->ctx_time_n;
+
+ if (!fps_d) {
+ GST_LOG_OBJECT (marudec, "invalid framerate: %d/0, -> %d/1", fps_n,
+ fps_n);
+ fps_d = 1;
+ }
+ if (gst_util_fraction_compare (fps_n, fps_d, 1000, 1) > 0) {
+ GST_LOG_OBJECT (marudec, "excessive framerate: %d/%d, -> 0/1", fps_n,
+ fps_d);
+ fps_n = 0;
+ fps_d = 1;
+ }
+ }
+ GST_LOG_OBJECT (marudec, "setting framerate: %d/%d", fps_n, fps_d);
+ out_info->fps_n = fps_n;
+ out_info->fps_d = fps_d;
+
+ /* calculate and update par now */
+ gst_maruviddec_update_par (marudec, in_info, out_info);
+
+ if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (marudec)))
+ goto negotiate_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+unknown_format:
+ {
+ GST_ERROR_OBJECT (marudec,
+ "decoder requires a video format unsupported by GStreamer");
+ return FALSE;
+ }
+negotiate_failed:
+ {
+ /* Reset so we try again next time even if force==FALSE */
+ marudec->ctx_width = 0;
+ marudec->ctx_height = 0;
+ marudec->ctx_ticks = 0;
+ marudec->ctx_time_n = 0;
+ marudec->ctx_time_d = 0;
+ marudec->ctx_pix_fmt = 0;
+ marudec->ctx_par_n = 0;
+ marudec->ctx_par_d = 0;
+
+ GST_ERROR_OBJECT (marudec, "negotiation failed");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+get_output_buffer (GstMaruVidDec *marudec, GstVideoCodecFrame * frame)
+{
+ GST_DEBUG (" >> ENTER ");
+ gint pict_size;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (G_UNLIKELY (!gst_marudec_negotiate (marudec, FALSE))) {
+ GST_DEBUG_OBJECT (marudec, "negotiate failed");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ pict_size = gst_maru_avpicture_size (marudec->context->video.pix_fmt,
+ marudec->context->video.width, marudec->context->video.height);
+ if (pict_size < 0) {
+ GST_DEBUG_OBJECT (marudec, "size of a picture is negative. "
+ "pixel format: %d, width: %d, height: %d",
+ marudec->context->video.pix_fmt, marudec->context->video.width,
+ marudec->context->video.height);
+ return GST_FLOW_ERROR;
+ }
+
+ GST_DEBUG_OBJECT (marudec, "outbuf size of decoded video %d", pict_size);
+
+ ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (marudec), frame);
+
+ alloc_and_copy(marudec, 0, pict_size, NULL, &(frame->output_buffer));
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_ERROR ("alloc output buffer failed");
+ }
+
+ return ret;
+}
+
+static gint
+gst_maruviddec_video_frame (GstMaruVidDec *marudec, guint8 *data, guint size,
+ const GstTSInfo *dec_info, gint64 in_offset,
+ GstVideoCodecFrame * frame, GstFlowReturn *ret)
+{
+ GST_DEBUG (" >> ENTER ");
+ gint len = -1;
+ gboolean mode_switch;
+ GstClockTime out_timestamp, out_duration, out_pts;
+ gint64 out_offset;
+ const GstTSInfo *out_info;
+ int have_data;
+
+ /* run QoS code, we don't stop decoding the frame when we are late because
+ * else we might skip a reference frame */
+ gst_marudec_do_qos (marudec, frame, dec_info->timestamp, &mode_switch);
+
+ GST_DEBUG_OBJECT (marudec, "decode video: input buffer size %d", size);
+
+ // begin video decode profile
+ BEGIN_VIDEO_DECODE_PROFILE();
+
+ len = interface->decode_video (marudec, data, size,
+ dec_info->idx, in_offset, NULL, &have_data);
+ if (len < 0 || !have_data) {
+ GST_ERROR ("decode video failed, len = %d", len);
+ return len;
+ }
+
+ // end video decode profile
+ END_VIDEO_DECODE_PROFILE();
+
+ *ret = get_output_buffer (marudec, frame);
+ if (G_UNLIKELY (*ret != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (marudec, "no output buffer");
+ len = -1;
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
+ *ret, (void *) (frame->output_buffer), len);
+ return len;
+ }
+
+ out_info = gst_ts_info_get (marudec, dec_info->idx);
+ out_pts = out_info->timestamp;
+ out_duration = out_info->duration;
+ out_offset = out_info->offset;
+
+ /* Timestamps */
+ out_timestamp = -1;
+ if (out_pts != -1) {
+ out_timestamp = (GstClockTime) out_pts;
+ GST_LOG_OBJECT (marudec, "using timestamp %" GST_TIME_FORMAT
+ " returned by ffmpeg", GST_TIME_ARGS (out_timestamp));
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (out_timestamp) && marudec->next_out != -1) {
+ out_timestamp = marudec->next_out;
+ GST_LOG_OBJECT (marudec, "using next timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (out_timestamp));
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (out_timestamp)) {
+ out_timestamp = dec_info->timestamp;
+ GST_LOG_OBJECT (marudec, "using in timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (out_timestamp));
+ }
+ GST_BUFFER_TIMESTAMP (frame->output_buffer) = out_timestamp;
+
+ /* Offset */
+ if (out_offset != GST_BUFFER_OFFSET_NONE) {
+ GST_LOG_OBJECT (marudec, "Using offset returned by ffmpeg");
+ } else if (out_timestamp != GST_CLOCK_TIME_NONE) {
+/* TODO: check this is needed.
+ GstFormat out_fmt = GST_FORMAT_DEFAULT;
+ GST_LOG_OBJECT (marudec, "Using offset converted from timestamp");
+
+ gst_pad_query_peer_convert (marudec->sinkpad,
+ GST_FORMAT_TIME, out_timestamp, &out_fmt, &out_offset);
+*/
+ } else if (dec_info->offset != GST_BUFFER_OFFSET_NONE) {
+ GST_LOG_OBJECT (marudec, "using in_offset %" G_GINT64_FORMAT,
+ dec_info->offset);
+ out_offset = dec_info->offset;
+ } else {
+ GST_LOG_OBJECT (marudec, "no valid offset found");
+ out_offset = GST_BUFFER_OFFSET_NONE;
+ }
+ GST_BUFFER_OFFSET (frame->output_buffer) = out_offset;
+
+ /* Duration */
+ if (GST_CLOCK_TIME_IS_VALID (out_duration)) {
+ GST_LOG_OBJECT (marudec, "Using duration returned by ffmpeg");
+ } else if (GST_CLOCK_TIME_IS_VALID (dec_info->duration)) {
+ GST_LOG_OBJECT (marudec, "Using in_duration");
+ out_duration = dec_info->duration;
+ } else {
+ if (marudec->ctx_time_n != -1 &&
+ (marudec->ctx_time_n != 1000 &&
+ marudec->ctx_time_d != 1)) {
+ GST_LOG_OBJECT (marudec, "using input framerate for duration");
+ out_duration = gst_util_uint64_scale_int (GST_SECOND,
+ marudec->ctx_time_d, marudec->ctx_time_n);
+ } else {
+ if (marudec->context->video.fps_n != 0 &&
+ (marudec->context->video.fps_d > 0 &&
+ marudec->context->video.fps_d < 1000)) {
+ GST_LOG_OBJECT (marudec, "using decoder's framerate for duration");
+ out_duration = gst_util_uint64_scale_int (GST_SECOND,
+ marudec->context->video.fps_n * 1,
+ marudec->context->video.fps_d);
+ } else {
+ GST_LOG_OBJECT (marudec, "no valid duration found");
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
+ *ret, (void *) (frame->output_buffer), len);
+
+ *ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (marudec), frame);
+
+ return len;
+}
+
+static gint
+gst_maruviddec_frame (GstMaruVidDec *marudec, guint8 *data, guint size,
+ gint *got_data, const GstTSInfo *dec_info, gint64 in_offset,
+ GstVideoCodecFrame * frame, GstFlowReturn *ret)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstMaruVidDecClass *oclass;
+ gint have_data = 0, len = 0;
+
+ if (G_UNLIKELY (marudec->context->codec == NULL)) {
+ GST_ERROR_OBJECT (marudec, "no codec context");
+ return -1;
+ }
+ GST_LOG_OBJECT (marudec, "data:%p, size:%d", data, size);
+
+ *ret = GST_FLOW_OK;
+ oclass = (GstMaruVidDecClass *) (G_OBJECT_GET_CLASS (marudec));
+
+ switch (oclass->codec->media_type) {
+ case AVMEDIA_TYPE_VIDEO:
+ len = gst_maruviddec_video_frame (marudec, data, size,
+ dec_info, in_offset, frame, ret);
+ break;
+ default:
+ GST_ERROR_OBJECT (marudec, "Asked to decode non-audio/video frame!");
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (frame && frame->output_buffer) {
+ have_data = 1;
+ }
+
+ if (len < 0 || have_data < 0) {
+ GST_WARNING_OBJECT (marudec,
+ "maru_%sdec: decoding error (len: %d, have_data: %d)",
+ oclass->codec->name, len, have_data);
+ *got_data = 0;
+ return len;
+ } else if (len == 0 && have_data == 0) {
+ *got_data = 0;
+ return len;
+ } else {
+ *got_data = 1;
+ }
+
+ return len;
+}
+
+static GstFlowReturn
+gst_maruviddec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
+{
+ GST_DEBUG (" >> ENTER ");
+ GstMaruVidDec *marudec = (GstMaruVidDec *) decoder;
+ gint have_data;
+ GstMapInfo mapinfo;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ guint8 *in_buf;
+ gint in_size;
+ GstClockTime in_timestamp;
+ GstClockTime in_duration;
+ gint64 in_offset;
+ const GstTSInfo *in_info;
+ const GstTSInfo *dec_info;
+
+ if (!gst_buffer_map (frame->input_buffer, &mapinfo, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (marudec, "Failed to map buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ in_timestamp = GST_BUFFER_TIMESTAMP (frame->input_buffer);
+ in_duration = GST_BUFFER_DURATION (frame->input_buffer);
+ in_offset = GST_BUFFER_OFFSET (frame->input_buffer);
+
+ in_info = gst_ts_info_store (marudec, in_timestamp, in_duration, in_offset);
+ GST_LOG_OBJECT (marudec,
+ "Received new data of size %u, offset: %" G_GUINT64_FORMAT ", ts:%"
+ GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT ", info %d",
+ mapinfo.size, GST_BUFFER_OFFSET (frame->input_buffer),
+ GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration), in_info->idx);
+
+ in_size = mapinfo.size;
+ in_buf = (guint8 *) mapinfo.data;
+
+ dec_info = in_info;
+
+ gst_buffer_unmap (frame->input_buffer, &mapinfo);
+
+ gst_maruviddec_frame (marudec, in_buf, in_size, &have_data, dec_info, in_offset, frame, &ret);
+
+ return ret;
+}
+
+gboolean
+gst_maruviddec_register (GstPlugin *plugin, GList *element)
+{
+ GTypeInfo typeinfo = {
+ sizeof (GstMaruVidDecClass),
+ (GBaseInitFunc) gst_maruviddec_base_init,
+ NULL,
+ (GClassInitFunc) gst_maruviddec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstMaruVidDec),
+ 0,
+ (GInstanceInitFunc) gst_maruviddec_init,
+ };
+
+ GType type;
+ gchar *type_name;
+ gint rank = GST_RANK_PRIMARY;
+ GList *elem = element;
+ CodecElement *codec = NULL;
+
+ if (!elem) {
+ return FALSE;
+ }
+
+ /* register element */
+ do {
+ codec = (CodecElement *)(elem->data);
+ if (!codec) {
+ return FALSE;
+ }
+
+ if (codec->codec_type != CODEC_TYPE_DECODE || codec->media_type != AVMEDIA_TYPE_VIDEO) {
+ continue;
+ }
+
+ type_name = g_strdup_printf ("maru_%sdec", codec->name);
+ type = g_type_from_name (type_name);
+ if (!type) {
+ type = g_type_register_static (GST_TYPE_VIDEO_DECODER, type_name, &typeinfo, 0);
+ g_type_set_qdata (type, GST_MARUDEC_PARAMS_QDATA, (gpointer) codec);
+ }
+
+ if (!gst_element_register (plugin, type_name, rank, type)) {
+ g_free (type_name);
+ return FALSE;
+ }
+ g_free (type_name);
+ } while ((elem = elem->next));
+
+ return TRUE;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2013 Samsung Electronics Co., Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "gstmarudevice.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+#include <gst/base/gstadapter.h>
+
+#define GST_MARUENC_PARAMS_QDATA g_quark_from_static_string("maruenc-params")
+
+enum
+{
+ ARG_0,
+ ARG_BIT_RATE
+};
+
+typedef struct _GstMaruVidEnc
+{
+
+ GstVideoEncoder parent;
+
+ GstVideoCodecState *input_state;
+
+ CodecContext *context;
+ CodecDevice *dev;
+ gboolean opened;
+ gboolean discont;
+
+ /* cache */
+ gulong bitrate;
+ gint gop_size;
+ gulong buffer_size;
+
+ guint8 *working_buf;
+ gulong working_buf_size;
+
+ GQueue *delay;
+
+} GstMaruVidEnc;
+
+typedef struct _GstMaruEnc
+{
+ GstElement element;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ CodecContext *context;
+ CodecDevice *dev;
+ gboolean opened;
+ GstClockTime adapter_ts;
+ guint64 adapter_consumed;
+ GstAdapter *adapter;
+ gboolean discont;
+
+ // cache
+ gulong bitrate;
+ gint gop_size;
+ gulong buffer_size;
+
+ guint8 *working_buf;
+ gulong working_buf_size;
+
+ GQueue *delay;
+
+} GstMaruEnc;
+
+typedef struct _GstMaruVidEncClass
+{
+ GstVideoEncoderClass parent_class;
+
+ CodecElement *codec;
+ GstPadTemplate *sinktempl;
+ GstPadTemplate *srctempl;
+} GstMaruVidEncClass;
+
+typedef struct _GstMaruEncClass
+{
+ GstElementClass parent_class;
+
+ CodecElement *codec;
+ GstPadTemplate *sinktempl;
+ GstPadTemplate *srctempl;
+ GstCaps *sinkcaps;
+} GstMaruEncClass;
+
+static GstElementClass *parent_class = NULL;
+
+static void gst_maruvidenc_base_init (GstMaruVidEncClass *klass);
+static void gst_maruvidenc_class_init (GstMaruVidEncClass *klass);
+static void gst_maruvidenc_init (GstMaruVidEnc *maruenc);
+static void gst_maruvidenc_finalize (GObject *object);
+
+static gboolean gst_maruvidenc_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state);
+static gboolean gst_maruvidenc_propose_allocation (GstVideoEncoder * encoder,
+ GstQuery * query);
+
+static GstCaps *gst_maruvidenc_getcaps (GstVideoEncoder * encoder, GstCaps * filter);
+static GstFlowReturn gst_maruvidenc_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame);
+
+static void gst_maruvidenc_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_maruvidenc_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+#define DEFAULT_VIDEO_BITRATE 300000
+#define DEFAULT_VIDEO_GOP_SIZE 15
+
+#define DEFAULT_WIDTH 352
+#define DEFAULT_HEIGHT 288
+
+/*
+ * Implementation
+ */
+static void
+gst_maruvidenc_base_init (GstMaruVidEncClass *klass)
+{
+ GST_DEBUG (" >> ENTER");
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ CodecElement *codec;
+ GstPadTemplate *sinktempl = NULL, *srctempl = NULL;
+ GstCaps *sinkcaps = NULL, *srccaps = NULL;
+ gchar *longname, *description;
+
+ codec =
+ (CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
+ GST_MARUENC_PARAMS_QDATA);
+ g_assert (codec != NULL);
+
+ /* construct the element details struct */
+ longname = g_strdup_printf ("%s Encoder", codec->longname);
+ description = g_strdup_printf ("%s Encoder", codec->name);
+
+ gst_element_class_set_metadata (element_class,
+ longname,
+ "Codec/Encoder/Video",
+ description,
+ "Sooyoung Ha <yoosah.ha@samsung.com>");
+ g_free (longname);
+ g_free (description);
+
+ if (!(srccaps = gst_maru_codecname_to_caps (codec->name, NULL, TRUE))) {
+ GST_DEBUG ("Couldn't get source caps for encoder '%s'", codec->name);
+ srccaps = gst_caps_new_empty_simple ("unknown/unknown");
+ }
+
+ sinkcaps = gst_maru_codectype_to_video_caps (NULL, codec->name, FALSE, codec);
+
+ if (!sinkcaps) {
+ GST_DEBUG ("Couldn't get sink caps for encoder '%s'", codec->name);
+ sinkcaps = gst_caps_new_empty_simple ("unknown/unknown");
+ }
+
+ /* pad templates */
+ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
+ GST_PAD_ALWAYS, sinkcaps);
+ srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, srctempl);
+ gst_element_class_add_pad_template (element_class, sinktempl);
+
+ klass->codec = codec;
+ klass->sinktempl = sinktempl;
+ klass->srctempl = srctempl;
+}
+
+static void
+gst_maruvidenc_class_init (GstMaruVidEncClass *klass)
+{
+ GST_DEBUG (" >> ENTER");
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstVideoEncoderClass *venc_class = (GstVideoEncoderClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_maruvidenc_set_property;
+ gobject_class->get_property = gst_maruvidenc_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE,
+ g_param_spec_ulong ("bitrate", "Bit Rate",
+ "Target VIDEO Bitrate", 0, G_MAXULONG, DEFAULT_VIDEO_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ venc_class->handle_frame = gst_maruvidenc_handle_frame;
+ venc_class->getcaps = gst_maruvidenc_getcaps;
+ venc_class->set_format = gst_maruvidenc_set_format;
+ venc_class->propose_allocation = gst_maruvidenc_propose_allocation;
+
+ gobject_class->finalize = gst_maruvidenc_finalize;
+}
+
+static void
+gst_maruvidenc_init (GstMaruVidEnc *maruenc)
+{
+ GST_DEBUG (" >> ENTER");
+ // instead of AVCodecContext
+ maruenc->context = g_malloc0 (sizeof(CodecContext));
+ maruenc->context->video.pix_fmt = PIX_FMT_NONE;
+ maruenc->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+
+ maruenc->opened = FALSE;
+
+ maruenc->dev = g_malloc0 (sizeof(CodecDevice));
+
+ maruenc->bitrate = DEFAULT_VIDEO_BITRATE;
+ maruenc->buffer_size = 512 * 1024;
+ maruenc->gop_size = DEFAULT_VIDEO_GOP_SIZE;
+}
+
+static void
+gst_maruvidenc_finalize (GObject *object)
+{
+ GST_DEBUG (" >> ENTER");
+ // Deinit Decoder
+ GstMaruVidEnc *maruenc = (GstMaruVidEnc *) object;
+
+ if (maruenc->opened) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ maruenc->opened = FALSE;
+ }
+
+ if (maruenc->context) {
+ g_free (maruenc->context);
+ maruenc->context = NULL;
+ }
+
+ if (maruenc->dev) {
+ g_free (maruenc->dev);
+ maruenc->dev = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstCaps *
+gst_maruvidenc_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
+{
+ GST_DEBUG (" >> ENTER");
+ GstMaruVidEnc *maruenc = (GstMaruVidEnc *) encoder;
+ GstCaps *caps = NULL;
+
+ GST_DEBUG_OBJECT (maruenc, "getting caps");
+
+ caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
+ GST_DEBUG_OBJECT (maruenc, "return caps %" GST_PTR_FORMAT, caps);
+ return caps;
+}
+
+static gboolean
+gst_maruvidenc_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state)
+{
+ GST_DEBUG (" >> ENTER");
+ GstCaps *other_caps;
+ GstCaps *allowed_caps;
+ GstCaps *icaps;
+ GstVideoCodecState *output_format;
+ enum PixelFormat pix_fmt;
+ GstMaruVidEnc *maruenc = (GstMaruVidEnc *) encoder;
+ GstMaruVidEncClass *oclass =
+ (GstMaruVidEncClass *) G_OBJECT_GET_CLASS (maruenc);
+
+ /* close old session */
+ if (maruenc->opened) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ maruenc->opened = FALSE;
+ }
+
+ /* user defined properties */
+ maruenc->context->bit_rate = maruenc->bitrate;
+ GST_DEBUG_OBJECT (maruenc, "Setting avcontext to bitrate %lu, gop_size %d",
+ maruenc->bitrate, maruenc->gop_size);
+
+ GST_DEBUG_OBJECT (maruenc, "Extracting common video information");
+ /* fetch pix_fmt, fps, par, width, height... */
+ gst_maru_videoinfo_to_context (&state->info, maruenc->context);
+
+ if (!strcmp(oclass->codec->name ,"mpeg4")
+ && (maruenc->context->video.fps_d > 65535)) {
+ /* MPEG4 Standards do not support time_base denominator greater than
+ * (1<<16) - 1 . We therefore scale them down.
+ * Agreed, it will not be the exact framerate... but the difference
+ * shouldn't be that noticeable */
+ maruenc->context->video.fps_n =
+ (gint) gst_util_uint64_scale_int (maruenc->context->video.fps_n,
+ 65535, maruenc->context->video.fps_d);
+ maruenc->context->video.fps_d = 65535;
+ GST_DEBUG_OBJECT (maruenc, "MPEG4 : scaled down framerate to %d / %d",
+ maruenc->context->video.fps_d, maruenc->context->video.fps_n);
+ }
+
+ pix_fmt = maruenc->context->video.pix_fmt;
+
+ /* open codec */
+ if (gst_maru_avcodec_open (maruenc->context,
+ oclass->codec, maruenc->dev) < 0) {
+ GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to open codec",
+ oclass->codec->name);
+ return FALSE;
+ }
+
+ /* is the colourspace correct? */
+ if (pix_fmt != maruenc->context->video.pix_fmt) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ GST_DEBUG_OBJECT (maruenc,
+ "maru_%senc: AV wants different colorspace (%d given, %d wanted)",
+ oclass->codec->name, pix_fmt, maruenc->context->video.pix_fmt);
+ return FALSE;
+ }
+
+ /* we may have failed mapping caps to a pixfmt,
+ * and quite some codecs do not make up their own mind about that
+ * in any case, _NONE can never work out later on */
+ if (oclass->codec->media_type == AVMEDIA_TYPE_VIDEO
+ && pix_fmt == PIX_FMT_NONE) {
+ GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to determine input format",
+ oclass->codec->name);
+ return FALSE;
+ }
+
+ /* some codecs support more than one format, first auto-choose one */
+ GST_DEBUG_OBJECT (maruenc, "picking an output format ...");
+ allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
+ if (!allowed_caps) {
+ GST_DEBUG_OBJECT (maruenc, "... but no peer, using template caps");
+ /* we need to copy because get_allowed_caps returns a ref, and
+ * get_pad_template_caps doesn't */
+ allowed_caps =
+ gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
+ }
+ GST_DEBUG_OBJECT (maruenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
+ gst_maru_caps_with_codecname (oclass->codec->name,
+ oclass->codec->media_type, allowed_caps, maruenc->context);
+
+ /* try to set this caps on the other side */
+ other_caps =
+ gst_maru_codecname_to_caps (oclass->codec->name, maruenc->context, TRUE);
+ if (!other_caps) {
+ GST_DEBUG ("Unsupported codec - no caps found");
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ return FALSE;
+ }
+
+ icaps = gst_caps_intersect (allowed_caps, other_caps);
+ gst_caps_unref (allowed_caps);
+ gst_caps_unref (other_caps);
+ if (gst_caps_is_empty (icaps)) {
+ gst_caps_unref (icaps);
+ return FALSE;
+ }
+ icaps = gst_caps_truncate (icaps);
+
+ /* Store input state and set output state */
+ if (maruenc->input_state)
+ gst_video_codec_state_unref (maruenc->input_state);
+ maruenc->input_state = gst_video_codec_state_ref (state);
+
+ output_format = gst_video_encoder_set_output_state (encoder, icaps, state);
+ gst_video_codec_state_unref (output_format);
+
+ /* success! */
+ maruenc->opened = TRUE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_maruvidenc_propose_allocation (GstVideoEncoder * encoder,
+ GstQuery * query)
+{
+ GST_DEBUG (" >> ENTER");
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+
+ return GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
+ query);
+}
+
+static void
+gst_maruenc_setup_working_buf (GstMaruVidEnc *maruenc)
+{
+ GST_DEBUG (" >> ENTER");
+ guint wanted_size =
+ maruenc->context->video.width * maruenc->context->video.height * 6 +
+ FF_MIN_BUFFER_SIZE;
+
+ if (maruenc->working_buf == NULL ||
+ maruenc->working_buf_size != wanted_size) {
+ if (maruenc->working_buf) {
+ g_free (maruenc->working_buf);
+ }
+ maruenc->working_buf_size = wanted_size;
+ maruenc->working_buf = g_malloc0 (maruenc->working_buf_size);
+ }
+ maruenc->buffer_size = wanted_size;
+}
+
+static GstFlowReturn
+gst_maruvidenc_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GST_DEBUG (" >> ENTER");
+ GstMaruVidEnc *maruenc = (GstMaruVidEnc *) encoder;
+ GstBuffer *outbuf;
+ gint ret_size = 0;
+ int coded_frame = 0, is_keyframe = 0;
+ GstMapInfo mapinfo;
+
+ gst_buffer_map (frame->input_buffer, &mapinfo, GST_MAP_READ);
+
+ gst_maruenc_setup_working_buf (maruenc);
+
+ ret_size =
+ interface->encode_video (maruenc->context, maruenc->working_buf,
+ maruenc->working_buf_size, mapinfo.data,
+ mapinfo.size, GST_BUFFER_TIMESTAMP (frame->input_buffer),
+ &coded_frame, &is_keyframe, maruenc->dev);
+ gst_buffer_unmap (frame->input_buffer, &mapinfo);
+
+ if (ret_size < 0) {
+ GstMaruVidEncClass *oclass =
+ (GstMaruVidEncClass *) (G_OBJECT_GET_CLASS (maruenc));
+ GST_ERROR_OBJECT (maruenc,
+ "maru_%senc: failed to encode buffer", oclass->codec->name);
+ return GST_FLOW_OK;
+ }
+
+ /* Encoder needs more data */
+ if (!ret_size) {
+ return GST_FLOW_OK;
+ }
+
+ gst_video_codec_frame_unref (frame);
+
+ /* Get oldest frame */
+ frame = gst_video_encoder_get_oldest_frame (encoder);
+
+ /* Allocate output buffer */
+ if (gst_video_encoder_allocate_output_frame (encoder, frame,
+ ret_size) != GST_FLOW_OK) {
+ gst_video_codec_frame_unref (frame);
+ GstMaruVidEncClass *oclass =
+ (GstMaruVidEncClass *) (G_OBJECT_GET_CLASS (maruenc));
+ GST_ERROR_OBJECT (maruenc,
+ "maru_%senc: failed to alloc buffer", oclass->codec->name);
+ return GST_FLOW_ERROR;
+ }
+
+ outbuf = frame->output_buffer;
+ gst_buffer_fill (outbuf, 0, maruenc->working_buf, ret_size);
+
+ /* buggy codec may not set coded_frame */
+ if (coded_frame) {
+ if (is_keyframe)
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ } else
+ GST_WARNING_OBJECT (maruenc, "codec did not provide keyframe info");
+
+ return gst_video_encoder_finish_frame (encoder, frame);
+}
+
+
+static void
+gst_maruvidenc_set_property (GObject *object,
+ guint prop_id, const GValue *value, GParamSpec *pspec)
+{
+ GST_DEBUG (" >> ENTER");
+ GstMaruVidEnc *maruenc;
+
+ maruenc = (GstMaruVidEnc *) (object);
+
+ if (maruenc->opened) {
+ GST_WARNING_OBJECT (maruenc,
+ "Can't change properties one decoder is setup !");
+ return;
+ }
+
+ switch (prop_id) {
+ case ARG_BIT_RATE:
+ maruenc->bitrate = g_value_get_ulong (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_maruvidenc_get_property (GObject *object,
+ guint prop_id, GValue *value, GParamSpec *pspec)
+{
+ GST_DEBUG (" >> ENTER");
+ GstMaruVidEnc *maruenc;
+
+ maruenc = (GstMaruVidEnc *) (object);
+
+ switch (prop_id) {
+ case ARG_BIT_RATE:
+ g_value_set_ulong (value, maruenc->bitrate);
+ break;
+ default:
+ break;
+ }
+}
+
+gboolean
+gst_maruvidenc_register (GstPlugin *plugin, GList *element)
+{
+ GTypeInfo typeinfo = {
+ sizeof (GstMaruVidEncClass),
+ (GBaseInitFunc) gst_maruvidenc_base_init,
+ NULL,
+ (GClassInitFunc) gst_maruvidenc_class_init,
+ NULL,
+ NULL,
+ sizeof (GstMaruVidEnc),
+ 0,
+ (GInstanceInitFunc) gst_maruvidenc_init,
+ };
+
+ GType type;
+ gchar *type_name;
+ gint rank = GST_RANK_PRIMARY;
+ GList *elem = element;
+ CodecElement *codec = NULL;
+
+ if (!elem) {
+ return FALSE;
+ }
+
+ /* register element */
+ do {
+ codec = (CodecElement *)(elem->data);
+ if (!codec) {
+ return FALSE;
+ }
+
+ if (codec->codec_type != CODEC_TYPE_ENCODE || codec->media_type != AVMEDIA_TYPE_VIDEO) {
+ continue;
+ }
+
+ type_name = g_strdup_printf ("maru_%senc", codec->name);
+ type = g_type_from_name (type_name);
+ if (!type) {
+ type = g_type_register_static (GST_TYPE_VIDEO_ENCODER, type_name, &typeinfo, 0);
+ g_type_set_qdata (type, GST_MARUENC_PARAMS_QDATA, (gpointer) codec);
+ }
+
+ if (!gst_element_register (plugin, type_name, rank, type)) {
+ g_free (type_name);
+ return FALSE;
+ }
+ g_free (type_name);
+ } while ((elem = elem->next));
+
+ return TRUE;
+}
* this allows simpler detection of big vs little endian.
*/
enum PixelFormat {
+ PIX_FMT_NONE = -1,
+ PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+ PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
+ PIX_FMT_RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
+ PIX_FMT_BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
+ PIX_FMT_YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
+ PIX_FMT_YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
+ PIX_FMT_YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
+ PIX_FMT_YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
+ PIX_FMT_GRAY8, ///< Y , 8bpp
+ PIX_FMT_MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
+ PIX_FMT_MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
+ PIX_FMT_PAL8, ///< 8 bit with PIX_FMT_RGB32 palette
+ PIX_FMT_YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
+ PIX_FMT_YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
+ PIX_FMT_YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
+ PIX_FMT_XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
+ PIX_FMT_XVMC_MPEG2_IDCT,
+ PIX_FMT_UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
+ PIX_FMT_UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
+ PIX_FMT_BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
+ PIX_FMT_BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
+ PIX_FMT_BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
+ PIX_FMT_RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
+ PIX_FMT_RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
+ PIX_FMT_RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
+ PIX_FMT_NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
+ PIX_FMT_NV21, ///< as above, but U and V bytes are swapped
+
+ PIX_FMT_ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
+ PIX_FMT_RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
+ PIX_FMT_ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
+ PIX_FMT_BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
+
+ PIX_FMT_GRAY16BE, ///< Y , 16bpp, big-endian
+ PIX_FMT_GRAY16LE, ///< Y , 16bpp, little-endian
+ PIX_FMT_YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
+ PIX_FMT_YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of PIX_FMT_YUV440P and setting color_range
+ PIX_FMT_YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
+ PIX_FMT_VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
+ PIX_FMT_RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
+
+ PIX_FMT_RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
+ PIX_FMT_RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
+ PIX_FMT_RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
+ PIX_FMT_RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
+
+ PIX_FMT_BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
+ PIX_FMT_BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
+ PIX_FMT_BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
+ PIX_FMT_BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
+
+ PIX_FMT_VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
+ PIX_FMT_VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
+ PIX_FMT_VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+
+ PIX_FMT_YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ PIX_FMT_YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ PIX_FMT_YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ PIX_FMT_YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ PIX_FMT_YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ PIX_FMT_YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ PIX_FMT_VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ PIX_FMT_DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
+
+ PIX_FMT_RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
+ PIX_FMT_RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
+ PIX_FMT_BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
+ PIX_FMT_BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
+ PIX_FMT_Y400A, ///< 8bit gray, 8bit alpha
+ PIX_FMT_BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
+ PIX_FMT_BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
+ PIX_FMT_YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ PIX_FMT_YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ PIX_FMT_YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ PIX_FMT_YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ PIX_FMT_YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ PIX_FMT_YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ PIX_FMT_YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ PIX_FMT_YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ PIX_FMT_YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ PIX_FMT_YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ PIX_FMT_YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ PIX_FMT_YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ PIX_FMT_VDA_VLD, ///< hardware decoding through VDA
+ PIX_FMT_GBRP, ///< planar GBR 4:4:4 24bpp
+ PIX_FMT_GBRP9BE, ///< planar GBR 4:4:4 27bpp, big-endian
+ PIX_FMT_GBRP9LE, ///< planar GBR 4:4:4 27bpp, little-endian
+ PIX_FMT_GBRP10BE, ///< planar GBR 4:4:4 30bpp, big-endian
+ PIX_FMT_GBRP10LE, ///< planar GBR 4:4:4 30bpp, little-endian
+ PIX_FMT_GBRP16BE, ///< planar GBR 4:4:4 48bpp, big-endian
+ PIX_FMT_GBRP16LE, ///< planar GBR 4:4:4 48bpp, little-endian
+ PIX_FMT_YUVA422P, ///< planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
+ PIX_FMT_YUVA444P, ///< planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
+ PIX_FMT_YUVA420P9BE, ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
+ PIX_FMT_YUVA420P9LE, ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
+ PIX_FMT_YUVA422P9BE, ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
+ PIX_FMT_YUVA422P9LE, ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
+ PIX_FMT_YUVA444P9BE, ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
+ PIX_FMT_YUVA444P9LE, ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
+ PIX_FMT_YUVA420P10BE, ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
+ PIX_FMT_YUVA420P10LE, ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
+ PIX_FMT_YUVA422P10BE, ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
+ PIX_FMT_YUVA422P10LE, ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
+ PIX_FMT_YUVA444P10BE, ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
+ PIX_FMT_YUVA444P10LE, ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
+ PIX_FMT_YUVA420P16BE, ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
+ PIX_FMT_YUVA420P16LE, ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
+ PIX_FMT_YUVA422P16BE, ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
+ PIX_FMT_YUVA422P16LE, ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
+ PIX_FMT_YUVA444P16BE, ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
+ PIX_FMT_YUVA444P16LE, ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
+ PIX_FMT_NB, ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
+
+#if FF_API_PIX_FMT
+#include "old_pix_fmts.h"
+#endif
+};
+#if yoosah_developing
+enum PixelFormat {
PIX_FMT_NONE= -1,
PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
PIX_FMT_YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
PIX_FMT_NB, ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
-
+#endif
#if AV_HAVE_BIGENDIAN
# define PIX_FMT_NE(be, le) PIX_FMT_##be
#else