GST_REQUIRED=0.10.16
GSTPB_REQUIRED=0.10.16
-AC_CONFIG_SRCDIR([src/gstemul.c])
+AC_CONFIG_SRCDIR([src/gstmaru.c])
AC_CONFIG_HEADERS([config.h])
dnl required version of automake
+* Wed Sep 11 12:32:26 UTC 2013 Kitae Kim <kt920.kim@samsung.com>
+- changed a way to use device memory effectively.
+
* Wed Aug 21 06:04:11 UTC 2013 Kitae Kim <kt920.kim@samsung.com>
- fixed prevent issues.
- fixed prevent issues.
* Fri Jun 21 11:49:35 UTC 2013 Kitae Kim <kt920.kim@samsung.com>
-- improved architecture of gstreamer codec plugin.
Name: gst-plugins-emulator
-Version: 0.1.4
+Version: 0.1.5
Release: 2
Summary: GStreamer Streaming-media framework plug-in for Tizen emulator.
Group: TO_BE/FILLED_IN
##############################################################################
# sources used to compile this plug-in
-libgstemul_la_SOURCES = gstemul.c \
- gstemulutils.c \
- gstemuldec.c \
- gstemulenc.c \
- gstemulapi.c \
- gstemuldev.c \
- gstemulapi2.c
+libgstemul_la_SOURCES = gstmaru.c \
+ gstmaruutils.c \
+ gstmarudec.c \
+ gstmaruenc.c \
+ gstmaruinterface.c \
+ gstmarudevice.c \
+ gstmarumem.c
# compiler and linker flags used to compile this plugin, set in configure.ac
libgstemul_la_CFLAGS = $(GST_CFLAGS) -g
libgstemul_la_LIBTOOLFLAGS = --tag=disable-static
# headers we need but don't want installed
-#noinst_HEADERS = gstemul.h
+#noinst_HEADERS = gstmaru.h
*
*/
-#include "gstemulcommon.h"
+#include "gstmaru.h"
-GST_DEBUG_CATEGORY (emul_debug);
+GST_DEBUG_CATEGORY (maru_debug);
#define GST_TYPE_EMULDEC \
- (gst_emul_dec_get_type())
+ (gst_maru_dec_get_type())
#define GST_EMULDEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_EMULDEC,GstEmulDec))
#define GST_EMULDEC_CLASS(klass) \
#define GST_IS_EMULDEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_EMULDEC))
-gboolean gst_emuldec_register (GstPlugin *plugin, GList *element);
-gboolean gst_emulenc_register (GstPlugin *plugin, GList *element);
+gboolean gst_marudec_register (GstPlugin *plugin, GList *element);
+gboolean gst_maruenc_register (GstPlugin *plugin, GList *element);
static GList *codec_element = NULL;
static gboolean
-gst_emul_codec_element_init ()
+gst_maru_codec_element_init ()
{
int fd = 0, size = 0;
int version = 0;
fd = open (CODEC_DEV, O_RDWR);
if (fd < 0) {
- perror ("[gst-emul] failed to open codec device");
+ perror ("[gst-maru] failed to open codec device");
return FALSE;
}
buffer = mmap (NULL, 4096, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
if (!buffer) {
- perror ("[gst-emul] failure memory mapping.");
+ perror ("[gst-maru] failure memory mapping.");
close (fd);
return FALSE;
}
CODEC_LOG (DEBUG, "request a device to get codec element.\n");
if (ioctl(fd, CODEC_CMD_GET_ELEMENT, NULL) < 0) {
- perror ("[gst-emul] failed to get codec elements");
+ perror ("[gst-maru] failed to get codec elements");
munmap (buffer, 4096);
close (fd);
return FALSE;
static gboolean
plugin_init (GstPlugin *plugin)
{
- GST_DEBUG_CATEGORY_INIT (emul_debug,
- "tizen-emul", 0, "Tizen Emulator Codec Elements");
+ GST_DEBUG_CATEGORY_INIT (maru_debug,
+ "tizen-maru", 0, "Tizen Emulator Codec Elements");
- gst_emul_init_pix_fmt_info ();
+ gst_maru_init_pix_fmt_info ();
- if (!gst_emul_codec_element_init ()) {
+ if (!gst_maru_codec_element_init ()) {
GST_ERROR ("failed to get codec elements from QEMU");
return FALSE;
}
- if (!gst_emuldec_register (plugin, codec_element)) {
+ if (!gst_marudec_register (plugin, codec_element)) {
GST_ERROR ("failed to register decoder elements");
return FALSE;
}
- if (!gst_emulenc_register (plugin, codec_element)) {
+ if (!gst_maruenc_register (plugin, codec_element)) {
GST_ERROR ("failed to register encoder elements");
return FALSE;
}
}
#ifndef PACKAGE
-#define PACKAGE "gst-plugins-emulator"
+#define PACKAGE "gst-plugins-maruator"
#endif
GST_PLUGIN_DEFINE (
*
*/
-#ifndef __GST_EMUL_H__
-#define __GST_EMUL_H__
+#ifndef __GST_MARU_H__
+#define __GST_MARU_H__
#include <stdint.h>
#include <stdio.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
-// #include <semaphore.h>
#include <glib.h>
#include <gst/gst.h>
#include "pixfmt.h"
-GST_DEBUG_CATEGORY_EXTERN (emul_debug);
-#define GST_CAT_DEFAULT emul_debug
+GST_DEBUG_CATEGORY_EXTERN (maru_debug);
+#define GST_CAT_DEFAULT maru_debug
G_BEGIN_DECLS
DEBUG,
};
-#define CODEC_DEV "/dev/newcodec"
+#define CODEC_DEV "/dev/brillcodec"
#define CODEC_VER 1
#define CODEC_LOG(level, fmt, ...) \
do { \
if (level <= INFO) \
- printf("[gst-emul][%d] " fmt, __LINE__, ##__VA_ARGS__); \
+ printf("[gst-maru][%d] " fmt, __LINE__, ##__VA_ARGS__); \
} while (0)
#define FF_INPUT_BUFFER_PADDING_SIZE 8
} AudioData;
typedef struct _CodecContext {
- union {
+// union {
VideoData video;
AudioData audio;
- };
+// };
int32_t bit_rate;
- int32_t codec_tag;
+ int32_t codec_tag;
int32_t codecdata_size;
uint8_t *codecdata;
CODEC_DEINIT,
};
+#if 0
enum CODEC_IO_CMD {
CODEC_CMD_COPY_TO_DEVICE_MEM = 5,
CODEC_CMD_COPY_FROM_DEVICE_MEM,
CODEC_CMD_M_SECURE_BUFFER,
CODEC_CMD_L_SECURE_BUFFER,
};
+#endif
+enum CODEC_IO_CMD {
+ CODEC_CMD_GET_VERSION = 20,
+ CODEC_CMD_GET_ELEMENT,
+ CODEC_CMD_GET_CONTEXT_INDEX,
+ CODEC_CMD_USE_DEVICE_MEM = 40,
+ CODEC_CMD_GET_DATA_FROM_SMALL_BUFFER,
+ CODEC_CMD_GET_DATA_FROM_MEDIUM_BUFFER,
+ CODEC_CMD_GET_DATA_FROM_LARGE_BUFFER,
+ CODEC_CMD_SECURE_SMALL_BUFFER,
+ CODEC_CMD_SECURE_MEDIUM_BUFFER,
+ CODEC_CMD_SECURE_LARGE_BUFFER,
+ CODEC_CMD_RELEASE_BUFFER,
+};
-#define CODEC_META_DATA_SIZE 256
-
-// CODEC_CMD_REQ_TO_SMALL_MEMORY
-// CODEC_CMD_REQ_FROM_SMALL_MEMORY
-// CODEC_CMD_REQ_TO_LARGE_MEMORY
-// CODEC_CMD_REQ_FROM_LARGE_MEMORY
enum CODEC_MEDIA_TYPE {
AVMEDIA_TYPE_UNKNOWN = -1,
SAMPLE_FMT_NB
};
-/* Define codec types.
- * e.g. FFmpeg, x264, libvpx and etc.
- */
-enum {
- FFMPEG_TYPE = 1,
-};
-
G_END_DECLS
#endif
*
*/
-#include "gstemulcommon.h"
-#include "gstemulutils.h"
-#include "gstemulapi.h"
-#include "gstemuldev.h"
+#include "gstmaru.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+#include "gstmarudevice.h"
-#define GST_EMULDEC_PARAMS_QDATA g_quark_from_static_string("marudec-params")
+#define GST_MARUDEC_PARAMS_QDATA g_quark_from_static_string("marudec-params")
/* indicate dts, pts, offset in the stream */
typedef struct
static GstElementClass *parent_class = NULL;
-static void gst_emuldec_base_init (GstEmulDecClass *klass);
-static void gst_emuldec_class_init (GstEmulDecClass *klass);
-static void gst_emuldec_init (GstEmulDec *emuldec);
-static void gst_emuldec_finalize (GObject *object);
+static void gst_marudec_base_init (GstEmulDecClass *klass);
+static void gst_marudec_class_init (GstEmulDecClass *klass);
+static void gst_marudec_init (GstEmulDec *marudec);
+static void gst_marudec_finalize (GObject *object);
-static gboolean gst_emuldec_setcaps (GstPad *pad, GstCaps *caps);
+static gboolean gst_marudec_setcaps (GstPad *pad, GstCaps *caps);
// sinkpad
-static gboolean gst_emuldec_sink_event (GstPad *pad, GstEvent *event);
-static GstFlowReturn gst_emuldec_chain (GstPad *pad, GstBuffer *buffer);
+static gboolean gst_marudec_sink_event (GstPad *pad, GstEvent *event);
+static GstFlowReturn gst_marudec_chain (GstPad *pad, GstBuffer *buffer);
// srcpad
-static gboolean gst_emuldec_src_event (GstPad *pad, GstEvent *event);
-static GstStateChangeReturn gst_emuldec_change_state (GstElement *element,
+static gboolean gst_marudec_src_event (GstPad *pad, GstEvent *event);
+static GstStateChangeReturn gst_marudec_change_state (GstElement *element,
GstStateChange transition);
-static gboolean gst_emuldec_negotiate (GstEmulDec *dec, gboolean force);
+static gboolean gst_marudec_negotiate (GstEmulDec *dec, gboolean force);
-static gint gst_emuldec_frame (GstEmulDec *emuldec, guint8 *data,
+static gint gst_marudec_frame (GstEmulDec *marudec, guint8 *data,
guint size, gint *got_data,
const GstTSInfo *dec_info, gint64 in_offset, GstFlowReturn *ret);
-static gboolean gst_emuldec_open (GstEmulDec *emuldec);
-static int gst_emuldec_close (GstEmulDec *emuldec);
+static gboolean gst_marudec_open (GstEmulDec *marudec);
+static int gst_marudec_close (GstEmulDec *marudec);
static const GstTSInfo *
}
static void
-gst_emuldec_reset_ts (GstEmulDec *emuldec)
+gst_marudec_reset_ts (GstEmulDec *marudec)
{
- emuldec->next_out = GST_CLOCK_TIME_NONE;
+ marudec->next_out = GST_CLOCK_TIME_NONE;
}
static void
-gst_emuldec_update_qos (GstEmulDec *emuldec, gdouble proportion,
+gst_marudec_update_qos (GstEmulDec *marudec, gdouble proportion,
GstClockTime timestamp)
{
- GST_LOG_OBJECT (emuldec, "update QOS: %f, %" GST_TIME_FORMAT,
+ GST_LOG_OBJECT (marudec, "update QOS: %f, %" GST_TIME_FORMAT,
proportion, GST_TIME_ARGS (timestamp));
- GST_OBJECT_LOCK (emuldec);
- emuldec->proportion = proportion;
- emuldec->earliest_time = timestamp;
- GST_OBJECT_UNLOCK (emuldec);
+ GST_OBJECT_LOCK (marudec);
+ marudec->proportion = proportion;
+ marudec->earliest_time = timestamp;
+ GST_OBJECT_UNLOCK (marudec);
}
static void
-gst_emuldec_reset_qos (GstEmulDec *emuldec)
+gst_marudec_reset_qos (GstEmulDec *marudec)
{
- gst_emuldec_update_qos (emuldec, 0.5, GST_CLOCK_TIME_NONE);
- emuldec->processed = 0;
- emuldec->dropped = 0;
+ gst_marudec_update_qos (marudec, 0.5, GST_CLOCK_TIME_NONE);
+ marudec->processed = 0;
+ marudec->dropped = 0;
}
static gboolean
-gst_emuldec_do_qos (GstEmulDec *emuldec, GstClockTime timestamp,
+gst_marudec_do_qos (GstEmulDec *marudec, GstClockTime timestamp,
gboolean *mode_switch)
{
GstClockTimeDiff diff;
*mode_switch = FALSE;
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
- emuldec->processed++;
+ marudec->processed++;
return TRUE;
}
- proportion = emuldec->proportion;
- earliest_time = emuldec->earliest_time;
+ proportion = marudec->proportion;
+ earliest_time = marudec->earliest_time;
- qostime = gst_segment_to_running_time (&emuldec->segment, GST_FORMAT_TIME,
+ qostime = gst_segment_to_running_time (&marudec->segment, GST_FORMAT_TIME,
timestamp);
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (qostime))) {
- emuldec->processed++;
+ marudec->processed++;
return TRUE;
}
diff = GST_CLOCK_DIFF (qostime, earliest_time);
if (proportion < 0.4 && diff < 0 ){
- emuldec->processed++;
+ marudec->processed++;
return TRUE;
} else {
if (diff >= 0) {
-// if (emuldec->waiting_for_key) {
+// if (marudec->waiting_for_key) {
if (0) {
res = FALSE;
} else {
GstClockTime stream_time, jitter;
GstMessage *qos_msg;
- emuldec->dropped++;
+ marudec->dropped++;
stream_time =
- gst_segment_to_stream_time (&emuldec->segment, GST_FORMAT_TIME,
+ gst_segment_to_stream_time (&marudec->segment, GST_FORMAT_TIME,
timestamp);
jitter = GST_CLOCK_DIFF (qostime, earliest_time);
qos_msg =
- gst_message_new_qos (GST_OBJECT_CAST (emuldec), FALSE, qostime,
+ gst_message_new_qos (GST_OBJECT_CAST (marudec), FALSE, qostime,
stream_time, timestamp, GST_CLOCK_TIME_NONE);
gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
- emuldec->processed, emuldec->dropped);
- gst_element_post_message (GST_ELEMENT_CAST (emuldec), qos_msg);
+ marudec->processed, marudec->dropped);
+ gst_element_post_message (GST_ELEMENT_CAST (marudec), qos_msg);
return res;
}
}
- emuldec->processed++;
+ marudec->processed++;
return TRUE;
}
static void
-clear_queued (GstEmulDec *emuldec)
+clear_queued (GstEmulDec *marudec)
{
- g_list_foreach (emuldec->queued, (GFunc) gst_mini_object_unref, NULL);
- g_list_free (emuldec->queued);
- emuldec->queued = NULL;
+ g_list_foreach (marudec->queued, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (marudec->queued);
+ marudec->queued = NULL;
}
static GstFlowReturn
-flush_queued (GstEmulDec *emuldec)
+flush_queued (GstEmulDec *marudec)
{
GstFlowReturn res = GST_FLOW_OK;
CODEC_LOG (DEBUG, "flush queued\n");
- while (emuldec->queued) {
- GstBuffer *buf = GST_BUFFER_CAST (emuldec->queued->data);
+ while (marudec->queued) {
+ GstBuffer *buf = GST_BUFFER_CAST (marudec->queued->data);
- GST_LOG_OBJECT (emuldec, "pushing buffer %p, offset %"
+ GST_LOG_OBJECT (marudec, "pushing buffer %p, offset %"
G_GUINT64_FORMAT ", timestamp %"
GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, buf,
GST_BUFFER_OFFSET (buf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
- res = gst_pad_push (emuldec->srcpad, buf);
+ res = gst_pad_push (marudec->srcpad, buf);
- emuldec->queued =
- g_list_delete_link (emuldec->queued, emuldec->queued);
+ marudec->queued =
+ g_list_delete_link (marudec->queued, marudec->queued);
}
return res;
}
static void
-gst_emuldec_drain (GstEmulDec *emuldec)
+gst_marudec_drain (GstEmulDec *marudec)
{
GstEmulDecClass *oclass;
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
// TODO: drain
#if 1
GstFlowReturn ret;
len =
- gst_emuldec_frame (emuldec, NULL, 0, &have_data, &ts_info_none, 0, &ret);
+ gst_marudec_frame (marudec, NULL, 0, &have_data, &ts_info_none, 0, &ret);
if (len < 0 || have_data == 0) {
break;
}
#endif
- if (emuldec->segment.rate < 0.0) {
+ if (marudec->segment.rate < 0.0) {
CODEC_LOG (DEBUG, "reverse playback\n");
- flush_queued (emuldec);
+ flush_queued (marudec);
}
}
* Implementation
*/
static void
-gst_emuldec_base_init (GstEmulDecClass *klass)
+gst_marudec_base_init (GstEmulDecClass *klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstCaps *sinkcaps = NULL, *srccaps = NULL;
codec =
(CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
- GST_EMULDEC_PARAMS_QDATA);
+ GST_MARUDEC_PARAMS_QDATA);
longname = g_strdup_printf ("%s Decoder", codec->longname);
classification = g_strdup_printf ("Codec/Decoder/%s",
g_free (classification);
g_free (description);
- sinkcaps = gst_emul_codecname_to_caps (codec->name, NULL, FALSE);
+ sinkcaps = gst_maru_codecname_to_caps (codec->name, NULL, FALSE);
if (!sinkcaps) {
sinkcaps = gst_caps_from_string ("unknown/unknown");
}
srccaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv");
break;
case AVMEDIA_TYPE_AUDIO:
- srccaps = gst_emul_codectype_to_audio_caps (NULL, codec->name, FALSE, codec);
+ srccaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, FALSE, codec);
break;
default:
GST_LOG("unknown media type.\n");
}
static void
-gst_emuldec_class_init (GstEmulDecClass *klass)
+gst_marudec_class_init (GstEmulDecClass *klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
#if 0
- gobject_class->set_property = gst_emuldec_set_property
- gobject_class->get_property = gst_emuldec_get_property
+ gobject_class->set_property = gst_marudec_set_property
+ gobject_class->get_property = gst_marudec_get_property
#endif
- gobject_class->finalize = gst_emuldec_finalize;
- gstelement_class->change_state = gst_emuldec_change_state;
+ gobject_class->finalize = gst_marudec_finalize;
+ gstelement_class->change_state = gst_marudec_change_state;
}
static void
-gst_emuldec_init (GstEmulDec *emuldec)
+gst_marudec_init (GstEmulDec *marudec)
{
GstEmulDecClass *oclass;
- oclass = (GstEmulDecClass*) (G_OBJECT_GET_CLASS(emuldec));
+ oclass = (GstEmulDecClass*) (G_OBJECT_GET_CLASS(marudec));
- emuldec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
- gst_pad_set_setcaps_function (emuldec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_emuldec_setcaps));
- gst_pad_set_event_function (emuldec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_emuldec_sink_event));
- gst_pad_set_chain_function (emuldec->sinkpad,
- GST_DEBUG_FUNCPTR(gst_emuldec_chain));
+ marudec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
+ gst_pad_set_setcaps_function (marudec->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_marudec_setcaps));
+ gst_pad_set_event_function (marudec->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_marudec_sink_event));
+ gst_pad_set_chain_function (marudec->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_marudec_chain));
- emuldec->srcpad = gst_pad_new_from_template (oclass->srctempl, "src") ;
- gst_pad_use_fixed_caps (emuldec->srcpad);
- gst_pad_set_event_function (emuldec->srcpad,
- GST_DEBUG_FUNCPTR(gst_emuldec_src_event));
+ marudec->srcpad = gst_pad_new_from_template (oclass->srctempl, "src") ;
+ gst_pad_use_fixed_caps (marudec->srcpad);
+ gst_pad_set_event_function (marudec->srcpad,
+ GST_DEBUG_FUNCPTR(gst_marudec_src_event));
- gst_element_add_pad (GST_ELEMENT(emuldec), emuldec->sinkpad);
- gst_element_add_pad (GST_ELEMENT(emuldec), emuldec->srcpad);
+ gst_element_add_pad (GST_ELEMENT(marudec), marudec->sinkpad);
+ gst_element_add_pad (GST_ELEMENT(marudec), marudec->srcpad);
- emuldec->context = g_malloc0 (sizeof(CodecContext));
- emuldec->context->video.pix_fmt = PIX_FMT_NONE;
- emuldec->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+ marudec->context = g_malloc0 (sizeof(CodecContext));
+ marudec->context->video.pix_fmt = PIX_FMT_NONE;
+ marudec->context->audio.sample_fmt = SAMPLE_FMT_NONE;
- emuldec->opened = FALSE;
- emuldec->format.video.par_n = -1;
- emuldec->format.video.fps_n = -1;
- emuldec->format.video.old_fps_n = -1;
+ marudec->opened = FALSE;
+ marudec->format.video.par_n = -1;
+ marudec->format.video.fps_n = -1;
+ marudec->format.video.old_fps_n = -1;
- emuldec->queued = NULL;
- gst_segment_init (&emuldec->segment, GST_FORMAT_TIME);
+ marudec->queued = NULL;
+ gst_segment_init (&marudec->segment, GST_FORMAT_TIME);
- emuldec->dev = g_malloc0 (sizeof(CodecDevice));
- if (!emuldec->dev) {
+ marudec->dev = g_malloc0 (sizeof(CodecDevice));
+ if (!marudec->dev) {
CODEC_LOG (ERR, "failed to allocate memory.\n");
}
}
static void
-gst_emuldec_finalize (GObject *object)
+gst_marudec_finalize (GObject *object)
{
- GstEmulDec *emuldec = (GstEmulDec *) object;
+ GstEmulDec *marudec = (GstEmulDec *) object;
- if (emuldec->context) {
- g_free (emuldec->context);
- emuldec->context = NULL;
+ if (marudec->context) {
+ g_free (marudec->context);
+ marudec->context = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
-gst_emuldec_src_event (GstPad *pad, GstEvent *event)
+gst_marudec_src_event (GstPad *pad, GstEvent *event)
{
- GstEmulDec *emuldec;
+ GstEmulDec *marudec;
gboolean res;
- emuldec = (GstEmulDec *) gst_pad_get_parent (pad);
+ marudec = (GstEmulDec *) gst_pad_get_parent (pad);
switch (GST_EVENT_TYPE (event)) {
/* Quality Of Service (QOS) event contains a report
gst_event_parse_qos (event, &proportion, &diff, ×tamp);
/* update our QoS values */
- gst_emuldec_update_qos (emuldec, proportion, timestamp + diff);
+ gst_marudec_update_qos (marudec, proportion, timestamp + diff);
break;
}
default:
}
/* forward upstream */
- res = gst_pad_push_event (emuldec->sinkpad, event);
+ res = gst_pad_push_event (marudec->sinkpad, event);
- gst_object_unref (emuldec);
+ gst_object_unref (marudec);
return res;
}
static gboolean
-gst_emuldec_sink_event (GstPad *pad, GstEvent *event)
+gst_marudec_sink_event (GstPad *pad, GstEvent *event)
{
- GstEmulDec *emuldec;
+ GstEmulDec *marudec;
gboolean ret = FALSE;
- emuldec = (GstEmulDec *) gst_pad_get_parent (pad);
+ marudec = (GstEmulDec *) gst_pad_get_parent (pad);
- GST_DEBUG_OBJECT (emuldec, "Handling %s event",
+ GST_DEBUG_OBJECT (marudec, "Handling %s event",
GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
- gst_emuldec_drain (emuldec);
+ gst_marudec_drain (marudec);
break;
case GST_EVENT_FLUSH_STOP:
{
- printf("[%s][%d] GST_EVET_FLUSH_STOP\n", __func__, __LINE__);
#if 0
- if (emuldec->opened) {
+ if (marudec->opened) {
// TODO: what does avcodec_flush_buffers do?
- emul_avcodec_flush_buffers (emuldec->context, emuldec->dev);
+ maru_avcodec_flush_buffers (marudec->context, marudec->dev);
}
#endif
- gst_emuldec_reset_ts (emuldec);
- gst_emuldec_reset_qos (emuldec);
+ gst_marudec_reset_ts (marudec);
+ gst_marudec_reset_qos (marudec);
#if 0
- gst_emuldec_flush_pcache (emuldec);
- emuldec->waiting_for_key = TRUE;
+ gst_marudec_flush_pcache (marudec);
+ marudec->waiting_for_key = TRUE;
#endif
- gst_segment_init (&emuldec->segment, GST_FORMAT_TIME);
- clear_queued (emuldec);
+ gst_segment_init (&marudec->segment, GST_FORMAT_TIME);
+ clear_queued (marudec);
}
break;
case GST_EVENT_NEWSEGMENT:
case GST_FORMAT_BYTES:
{
gint bit_rate;
- bit_rate = emuldec->context->bit_rate;
+ bit_rate = marudec->context->bit_rate;
if (!bit_rate) {
- GST_WARNING_OBJECT (emuldec, "no bitrate to convert BYTES to TIME");
+ GST_WARNING_OBJECT (marudec, "no bitrate to convert BYTES to TIME");
gst_event_unref (event);
- gst_object_unref (emuldec);
+ gst_object_unref (marudec);
return ret;
}
- GST_DEBUG_OBJECT (emuldec, "bitrate: %d", bit_rate);
+ GST_DEBUG_OBJECT (marudec, "bitrate: %d", bit_rate);
if (start != -1) {
start = gst_util_uint64_scale_int (start, GST_SECOND, bit_rate);
break;
}
default:
- GST_WARNING_OBJECT (emuldec, "unknown format received in NEWSEGMENT");
+ GST_WARNING_OBJECT (marudec, "unknown format received in NEWSEGMENT");
gst_event_unref (event);
- gst_object_unref (emuldec);
+ gst_object_unref (marudec);
return ret;
}
- if (emuldec->context->codec) {
- gst_emuldec_drain (emuldec);
+ if (marudec->context->codec) {
+ gst_marudec_drain (marudec);
}
- GST_DEBUG_OBJECT (emuldec,
+ GST_DEBUG_OBJECT (marudec,
"NEWSEGMENT in time start %" GST_TIME_FORMAT " -- stop %"
GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
- gst_segment_set_newsegment_full (&emuldec->segment, update,
+ gst_segment_set_newsegment_full (&marudec->segment, update,
rate, arate, format, start, stop, time);
break;
}
break;
}
- ret = gst_pad_push_event (emuldec->srcpad, event);
+ ret = gst_pad_push_event (marudec->srcpad, event);
- gst_object_unref (emuldec);
+ gst_object_unref (marudec);
return ret;
}
static gboolean
-gst_emuldec_setcaps (GstPad *pad, GstCaps *caps)
+gst_marudec_setcaps (GstPad *pad, GstCaps *caps)
{
- GstEmulDec *emuldec;
+ GstEmulDec *marudec;
GstEmulDecClass *oclass;
GstStructure *structure;
const GValue *par;
GST_DEBUG_OBJECT (pad, "setcaps called.");
- emuldec = (GstEmulDec *) (gst_pad_get_parent (pad));
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ marudec = (GstEmulDec *) (gst_pad_get_parent (pad));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
- GST_OBJECT_LOCK (emuldec);
+ GST_OBJECT_LOCK (marudec);
- if (emuldec->opened) {
- GST_OBJECT_UNLOCK (emuldec);
- gst_emuldec_drain (emuldec);
- GST_OBJECT_LOCK (emuldec);
- gst_emuldec_close (emuldec);
+ if (marudec->opened) {
+ GST_OBJECT_UNLOCK (marudec);
+ gst_marudec_drain (marudec);
+ GST_OBJECT_LOCK (marudec);
+ gst_marudec_close (marudec);
}
- GST_LOG_OBJECT (emuldec, "size %dx%d", emuldec->context->video.width,
- emuldec->context->video.height);
+ GST_LOG_OBJECT (marudec, "size %dx%d", marudec->context->video.width,
+ marudec->context->video.height);
- gst_emul_caps_with_codecname (oclass->codec->name, oclass->codec->media_type,
- caps, emuldec->context);
+ if (!strcmp(oclass->codec->name, "wmv3") ||
+ !strcmp(oclass->codec->name, "vc1")) {
+ gst_maru_caps_to_codecname (caps, oclass->codec->name, NULL);
+ }
+
+ gst_maru_caps_with_codecname (oclass->codec->name, oclass->codec->media_type,
+ caps, marudec->context);
- GST_LOG_OBJECT (emuldec, "size after %dx%d", emuldec->context->video.width,
- emuldec->context->video.height);
+ GST_LOG_OBJECT (marudec, "size after %dx%d", marudec->context->video.width,
+ marudec->context->video.height);
- if (!emuldec->context->video.fps_d || !emuldec->context->video.fps_n) {
- GST_DEBUG_OBJECT (emuldec, "forcing 25/1 framerate");
- emuldec->context->video.fps_n = 1;
- emuldec->context->video.fps_d = 25;
+ if (!marudec->context->video.fps_d || !marudec->context->video.fps_n) {
+ GST_DEBUG_OBJECT (marudec, "forcing 25/1 framerate");
+ marudec->context->video.fps_n = 1;
+ marudec->context->video.fps_d = 25;
}
structure = gst_caps_get_structure (caps, 0);
par = gst_structure_get_value (structure, "pixel-aspect-ratio");
if (par) {
- GST_DEBUG_OBJECT (emuldec, "sink caps have pixel-aspect-ratio of %d:%d",
+ GST_DEBUG_OBJECT (marudec, "sink caps have pixel-aspect-ratio of %d:%d",
gst_value_get_fraction_numerator (par),
gst_value_get_fraction_denominator (par));
#if 0 // TODO
- if (emuldec->par) {
- g_free(emuldec->par);
+ if (marudec->par) {
+ g_free(marudec->par);
}
- emuldec->par = g_new0 (GValue, 1);
- gst_value_init_and_copy (emuldec->par, par);
+ marudec->par = g_new0 (GValue, 1);
+ gst_value_init_and_copy (marudec->par, par);
#endif
}
fps = gst_structure_get_value (structure, "framerate");
if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
- emuldec->format.video.fps_n = gst_value_get_fraction_numerator (fps);
- emuldec->format.video.fps_d = gst_value_get_fraction_denominator (fps);
- GST_DEBUG_OBJECT (emuldec, "Using framerate %d/%d from incoming",
- emuldec->format.video.fps_n, emuldec->format.video.fps_d);
+ marudec->format.video.fps_n = gst_value_get_fraction_numerator (fps);
+ marudec->format.video.fps_d = gst_value_get_fraction_denominator (fps);
+ GST_DEBUG_OBJECT (marudec, "Using framerate %d/%d from incoming",
+ marudec->format.video.fps_n, marudec->format.video.fps_d);
} else {
- emuldec->format.video.fps_n = -1;
- GST_DEBUG_OBJECT (emuldec, "Using framerate from codec");
+ marudec->format.video.fps_n = -1;
+ GST_DEBUG_OBJECT (marudec, "Using framerate from codec");
}
#if 0
if (strcmp (oclass->codec->name, "aac") == 0) {
const gchar *format = gst_structure_get_string (structure, "stream-format");
if (format == NULL || strcmp ("format", "raw") == 0) {
- emuldec->turnoff_parser = TRUE;
+ marudec->turnoff_parser = TRUE;
}
}
#endif
- if (!gst_emuldec_open (emuldec)) {
- GST_DEBUG_OBJECT (emuldec, "Failed to open");
+ if (!gst_marudec_open (marudec)) {
+ GST_DEBUG_OBJECT (marudec, "Failed to open");
#if 0
- if (emuldec->par) {
- g_free(emuldec->par);
- emuldec->par = NULL;
+ if (marudec->par) {
+ g_free(marudec->par);
+ marudec->par = NULL;
}
#endif
- GST_OBJECT_UNLOCK (emuldec);
- gst_object_unref (emuldec);
+ GST_OBJECT_UNLOCK (marudec);
+ gst_object_unref (marudec);
return FALSE;
}
gst_structure_get_int (structure, "width",
- &emuldec->format.video.clip_width);
+ &marudec->format.video.clip_width);
gst_structure_get_int (structure, "height",
- &emuldec->format.video.clip_height);
+ &marudec->format.video.clip_height);
GST_DEBUG_OBJECT (pad, "clipping to %dx%d",
- emuldec->format.video.clip_width, emuldec->format.video.clip_height);
+ marudec->format.video.clip_width, marudec->format.video.clip_height);
- GST_OBJECT_UNLOCK (emuldec);
- gst_object_unref (emuldec);
+ GST_OBJECT_UNLOCK (marudec);
+ gst_object_unref (marudec);
return ret;
}
static gboolean
-gst_emuldec_open (GstEmulDec *emuldec)
+gst_marudec_open (GstEmulDec *marudec)
{
GstEmulDecClass *oclass;
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
- if (!emuldec->dev) {
+ if (!marudec->dev) {
return FALSE;
}
- if (gst_emul_avcodec_open (emuldec->context,
- oclass->codec, emuldec->dev) < 0) {
- gst_emuldec_close (emuldec);
- GST_ERROR_OBJECT (emuldec,
+ if (gst_maru_avcodec_open (marudec->context,
+ oclass->codec, marudec->dev) < 0) {
+ gst_marudec_close (marudec);
+ GST_ERROR_OBJECT (marudec,
"maru_%sdec: Failed to open codec", oclass->codec->name);
return FALSE;
}
- emuldec->opened = TRUE;
- GST_LOG_OBJECT (emuldec, "Opened codec %s", oclass->codec->name);
+ marudec->opened = TRUE;
+ GST_LOG_OBJECT (marudec, "Opened codec %s", oclass->codec->name);
switch (oclass->codec->media_type) {
case AVMEDIA_TYPE_VIDEO:
- emuldec->format.video.width = 0;
- emuldec->format.video.height = 0;
- emuldec->format.video.clip_width = -1;
- emuldec->format.video.clip_height = -1;
- emuldec->format.video.pix_fmt = PIX_FMT_NB;
- emuldec->format.video.interlaced = FALSE;
+ marudec->format.video.width = 0;
+ marudec->format.video.height = 0;
+ marudec->format.video.clip_width = -1;
+ marudec->format.video.clip_height = -1;
+ marudec->format.video.pix_fmt = PIX_FMT_NB;
+ marudec->format.video.interlaced = FALSE;
break;
case AVMEDIA_TYPE_AUDIO:
- emuldec->format.audio.samplerate = 0;
- emuldec->format.audio.channels = 0;
- emuldec->format.audio.depth = 0;
+ marudec->format.audio.samplerate = 0;
+ marudec->format.audio.channels = 0;
+ marudec->format.audio.depth = 0;
break;
default:
break;
}
- gst_emuldec_reset_ts (emuldec);
+ gst_marudec_reset_ts (marudec);
- emuldec->proportion = 0.0;
- emuldec->earliest_time = -1;
+ marudec->proportion = 0.0;
+ marudec->earliest_time = -1;
return TRUE;
}
static int
-gst_emuldec_close (GstEmulDec *emuldec)
+gst_marudec_close (GstEmulDec *marudec)
{
int ret = 0;
- if (emuldec->context->codecdata) {
- g_free(emuldec->context->codecdata);
- emuldec->context->codecdata = NULL;
+ if (marudec->context->codecdata) {
+ g_free(marudec->context->codecdata);
+ marudec->context->codecdata = NULL;
}
- if (!emuldec->dev) {
+ if (!marudec->dev) {
return -1;
}
- ret = gst_emul_avcodec_close (emuldec->context, emuldec->dev);
+ ret = gst_maru_avcodec_close (marudec->context, marudec->dev);
- if (emuldec->dev) {
- g_free(emuldec->dev);
- emuldec->dev = NULL;
+ if (marudec->dev) {
+ g_free(marudec->dev);
+ marudec->dev = NULL;
}
return ret;
static gboolean
-gst_emuldec_negotiate (GstEmulDec *emuldec, gboolean force)
+gst_marudec_negotiate (GstEmulDec *marudec, gboolean force)
{
GstEmulDecClass *oclass;
GstCaps *caps;
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
switch (oclass->codec->media_type) {
case AVMEDIA_TYPE_VIDEO:
- if (!force && emuldec->format.video.width == emuldec->context->video.width
- && emuldec->format.video.height == emuldec->context->video.height
- && emuldec->format.video.fps_n == emuldec->format.video.old_fps_n
- && emuldec->format.video.fps_d == emuldec->format.video.old_fps_d
- && emuldec->format.video.pix_fmt == emuldec->context->video.pix_fmt
- && emuldec->format.video.par_n == emuldec->context->video.par_n
- && emuldec->format.video.par_d == emuldec->context->video.par_d) {
+ if (!force && marudec->format.video.width == marudec->context->video.width
+ && marudec->format.video.height == marudec->context->video.height
+ && marudec->format.video.fps_n == marudec->format.video.old_fps_n
+ && marudec->format.video.fps_d == marudec->format.video.old_fps_d
+ && marudec->format.video.pix_fmt == marudec->context->video.pix_fmt
+ && marudec->format.video.par_n == marudec->context->video.par_n
+ && marudec->format.video.par_d == marudec->context->video.par_d) {
return TRUE;
}
- emuldec->format.video.width = emuldec->context->video.width;
- emuldec->format.video.height = emuldec->context->video.height;
- emuldec->format.video.old_fps_n = emuldec->format.video.fps_n;
- emuldec->format.video.old_fps_d = emuldec->format.video.fps_d;
- emuldec->format.video.pix_fmt = emuldec->context->video.pix_fmt;
- emuldec->format.video.par_n = emuldec->context->video.par_n;
- emuldec->format.video.par_d = emuldec->context->video.par_d;
+ marudec->format.video.width = marudec->context->video.width;
+ marudec->format.video.height = marudec->context->video.height;
+ marudec->format.video.old_fps_n = marudec->format.video.fps_n;
+ marudec->format.video.old_fps_d = marudec->format.video.fps_d;
+ marudec->format.video.pix_fmt = marudec->context->video.pix_fmt;
+ marudec->format.video.par_n = marudec->context->video.par_n;
+ marudec->format.video.par_d = marudec->context->video.par_d;
break;
case AVMEDIA_TYPE_AUDIO:
{
- gint depth = gst_emul_smpfmt_depth (emuldec->context->audio.sample_fmt);
- if (!force && emuldec->format.audio.samplerate ==
- emuldec->context->audio.sample_rate &&
- emuldec->format.audio.channels == emuldec->context->audio.channels &&
- emuldec->format.audio.depth == depth) {
+ gint depth = gst_maru_smpfmt_depth (marudec->context->audio.sample_fmt);
+ if (!force && marudec->format.audio.samplerate ==
+ marudec->context->audio.sample_rate &&
+ marudec->format.audio.channels == marudec->context->audio.channels &&
+ marudec->format.audio.depth == depth) {
return TRUE;
}
- emuldec->format.audio.samplerate = emuldec->context->audio.sample_rate;
- emuldec->format.audio.channels = emuldec->context->audio.channels;
- emuldec->format.audio.depth = depth;
+ marudec->format.audio.samplerate = marudec->context->audio.sample_rate;
+ marudec->format.audio.channels = marudec->context->audio.channels;
+ marudec->format.audio.depth = depth;
}
break;
default:
}
caps =
- gst_emul_codectype_to_caps (oclass->codec->media_type, emuldec->context,
+ gst_maru_codectype_to_caps (oclass->codec->media_type, marudec->context,
oclass->codec->name, FALSE);
if (caps == NULL) {
- GST_ELEMENT_ERROR (emuldec, CORE, NEGOTIATION,
+ GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION,
("Could not find GStreamer caps mapping for codec '%s'.",
oclass->codec->name), (NULL));
return FALSE;
gint width, height;
gboolean interlaced;
- width = emuldec->format.video.clip_width;
- height = emuldec->format.video.clip_height;
- interlaced = emuldec->format.video.interlaced;
+ width = marudec->format.video.clip_width;
+ height = marudec->format.video.clip_height;
+ interlaced = marudec->format.video.interlaced;
if (width != -1 && height != -1) {
- if (width < emuldec->context->video.width) {
+ if (width < marudec->context->video.width) {
gst_caps_set_simple (caps, "width", G_TYPE_INT, width, NULL);
}
- if (height < emuldec->context->video.height) {
+ if (height < marudec->context->video.height) {
gst_caps_set_simple (caps, "height", G_TYPE_INT, height, NULL);
}
gst_caps_set_simple (caps, "interlaced", G_TYPE_BOOLEAN, interlaced,
NULL);
- if (emuldec->format.video.fps_n != -1) {
+ if (marudec->format.video.fps_n != -1) {
gst_caps_set_simple (caps, "framerate",
- GST_TYPE_FRACTION, emuldec->format.video.fps_n,
- emuldec->format.video.fps_d, NULL);
+ GST_TYPE_FRACTION, marudec->format.video.fps_n,
+ marudec->format.video.fps_d, NULL);
}
#if 0
- gst_emuldec_add_pixel_aspect_ratio (emuldec,
+ gst_marudec_add_pixel_aspect_ratio (marudec,
gst_caps_get_structure (caps, 0));
#endif
}
break;
}
- if (!gst_pad_set_caps (emuldec->srcpad, caps)) {
- GST_ELEMENT_ERROR (emuldec, CORE, NEGOTIATION, (NULL),
+ if (!gst_pad_set_caps (marudec->srcpad, caps)) {
+ GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION, (NULL),
("Could not set caps for decoder (%s), not fixed?",
oclass->codec->name));
gst_caps_unref (caps);
}
static GstFlowReturn
-get_output_buffer (GstEmulDec *emuldec, GstBuffer **outbuf)
+get_output_buffer (GstEmulDec *marudec, GstBuffer **outbuf)
{
gint pict_size;
GstFlowReturn ret;
*outbuf = NULL;
- if (G_UNLIKELY (!gst_emuldec_negotiate (emuldec, FALSE))) {
- GST_DEBUG_OBJECT (emuldec, "negotiate failed");
+ if (G_UNLIKELY (!gst_marudec_negotiate (marudec, FALSE))) {
+ GST_DEBUG_OBJECT (marudec, "negotiate failed");
return GST_FLOW_NOT_NEGOTIATED;
}
- pict_size = gst_emul_avpicture_size (emuldec->context->video.pix_fmt,
- emuldec->context->video.width, emuldec->context->video.height);
+ pict_size = gst_maru_avpicture_size (marudec->context->video.pix_fmt,
+ marudec->context->video.width, marudec->context->video.height);
if (pict_size < 0) {
- GST_DEBUG_OBJECT (emuldec, "size of a picture is negative. "
+ GST_DEBUG_OBJECT (marudec, "size of a picture is negative. "
"pixel format: %d, width: %d, height: %d",
- emuldec->context->video.pix_fmt, emuldec->context->video.width,
- emuldec->context->video.height);
+ marudec->context->video.pix_fmt, marudec->context->video.width,
+ marudec->context->video.height);
return GST_FLOW_ERROR;
}
* provide a hardware buffer in order to avoid additional memcpy operations.
*/
gst_pad_set_bufferalloc_function(
- GST_PAD_PEER(emuldec->srcpad),
+ GST_PAD_PEER(marudec->srcpad),
(GstPadBufferAllocFunction) codec_buffer_alloc);
} else {
CODEC_LOG (DEBUG, "request a large size of memory\n");
}
- ret = gst_pad_alloc_buffer_and_set_caps (emuldec->srcpad,
+ ret = gst_pad_alloc_buffer_and_set_caps (marudec->srcpad,
GST_BUFFER_OFFSET_NONE, pict_size,
- GST_PAD_CAPS (emuldec->srcpad), outbuf);
+ GST_PAD_CAPS (marudec->srcpad), outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
- GST_DEBUG_OBJECT (emuldec, "pad_alloc failed %d (%s)", ret,
+ GST_DEBUG_OBJECT (marudec, "pad_alloc failed %d (%s)", ret,
gst_flow_get_name (ret));
return ret;
}
if ((uintptr_t) GST_BUFFER_DATA (*outbuf) % 16) {
- GST_DEBUG_OBJECT (emuldec,
+ GST_DEBUG_OBJECT (marudec,
"Downstream can't allocate aligned buffers.");
gst_buffer_unref (*outbuf);
- *outbuf = new_aligned_buffer (pict_size, GST_PAD_CAPS (emuldec->srcpad));
+ *outbuf = new_aligned_buffer (pict_size, GST_PAD_CAPS (marudec->srcpad));
}
- codec_picture_copy (emuldec->context, GST_BUFFER_DATA (*outbuf),
- GST_BUFFER_SIZE (*outbuf), emuldec->dev);
+ codec_picture_copy (marudec->context, GST_BUFFER_DATA (*outbuf),
+ GST_BUFFER_SIZE (*outbuf), marudec->dev);
return ret;
}
}
static gint
-gst_emuldec_video_frame (GstEmulDec *emuldec, guint8 *data, guint size,
+gst_marudec_video_frame (GstEmulDec *marudec, guint8 *data, guint size,
const GstTSInfo *dec_info, gint64 in_offset, GstBuffer **outbuf,
GstFlowReturn *ret)
{
gint64 out_offset;
const GstTSInfo *out_info;
- decode = gst_emuldec_do_qos (emuldec, dec_info->timestamp, &mode_switch);
+ decode = gst_marudec_do_qos (marudec, dec_info->timestamp, &mode_switch);
CODEC_LOG (DEBUG, "decode video: input buffer size: %d\n", size);
len =
- codec_decode_video (emuldec->context, data, size,
+ codec_decode_video (marudec->context, data, size,
dec_info->idx, in_offset, outbuf,
- &have_data, emuldec->dev);
+ &have_data, marudec->dev);
if (!decode) {
// skip_frame
}
- GST_DEBUG_OBJECT (emuldec, "after decode: len %d, have_data %d",
+ GST_DEBUG_OBJECT (marudec, "after decode: len %d, have_data %d",
len, have_data);
#if 0
- if (len < 0 && (mode_switch || emuldec->context->skip_frame)) {
+ if (len < 0 && (mode_switch || marudec->context->skip_frame)) {
len = 0;
}
if (len > 0 && have_data <= 0 && (mode_switch
- || emuldec->context->skip_frame)) {
- emuldec->last_out = -1;
+ || marudec->context->skip_frame)) {
+ marudec->last_out = -1;
}
#endif
if (len < 0 || have_data <= 0) {
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
- out_info = gst_ts_info_get (emuldec, dec_info->idx);
+ out_info = gst_ts_info_get (marudec, dec_info->idx);
out_pts = out_info->timestamp;
out_duration = out_info->duration;
out_offset = out_info->offset;
- *ret = get_output_buffer (emuldec, outbuf);
+ *ret = get_output_buffer (marudec, outbuf);
if (G_UNLIKELY (*ret != GST_FLOW_OK)) {
- GST_DEBUG_OBJECT (emuldec, "no output buffer");
+ GST_DEBUG_OBJECT (marudec, "no output buffer");
len = -1;
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
out_timestamp = -1;
if (out_pts != -1) {
out_timestamp = (GstClockTime) out_pts;
- GST_LOG_OBJECT (emuldec, "using timestamp %" GST_TIME_FORMAT
+ GST_LOG_OBJECT (marudec, "using timestamp %" GST_TIME_FORMAT
" returned by ffmpeg", GST_TIME_ARGS (out_timestamp));
}
- if (!GST_CLOCK_TIME_IS_VALID (out_timestamp) && emuldec->next_out != -1) {
- out_timestamp = emuldec->next_out;
- GST_LOG_OBJECT (emuldec, "using next timestamp %" GST_TIME_FORMAT,
+ if (!GST_CLOCK_TIME_IS_VALID (out_timestamp) && marudec->next_out != -1) {
+ out_timestamp = marudec->next_out;
+ GST_LOG_OBJECT (marudec, "using next timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (out_timestamp));
}
if (!GST_CLOCK_TIME_IS_VALID (out_timestamp)) {
out_timestamp = dec_info->timestamp;
- GST_LOG_OBJECT (emuldec, "using in timestamp %" GST_TIME_FORMAT,
+ GST_LOG_OBJECT (marudec, "using in timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (out_timestamp));
}
GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp;
/* Offset */
if (out_offset != GST_BUFFER_OFFSET_NONE) {
- GST_LOG_OBJECT (emuldec, "Using offset returned by ffmpeg");
+ GST_LOG_OBJECT (marudec, "Using offset returned by ffmpeg");
} else if (out_timestamp != GST_CLOCK_TIME_NONE) {
GstFormat out_fmt = GST_FORMAT_DEFAULT;
- GST_LOG_OBJECT (emuldec, "Using offset converted from timestamp");
+ GST_LOG_OBJECT (marudec, "Using offset converted from timestamp");
- gst_pad_query_peer_convert (emuldec->sinkpad,
+ gst_pad_query_peer_convert (marudec->sinkpad,
GST_FORMAT_TIME, out_timestamp, &out_fmt, &out_offset);
} else if (dec_info->offset != GST_BUFFER_OFFSET_NONE) {
- GST_LOG_OBJECT (emuldec, "using in_offset %" G_GINT64_FORMAT,
+ GST_LOG_OBJECT (marudec, "using in_offset %" G_GINT64_FORMAT,
dec_info->offset);
out_offset = dec_info->offset;
} else {
- GST_LOG_OBJECT (emuldec, "no valid offset found");
+ GST_LOG_OBJECT (marudec, "no valid offset found");
out_offset = GST_BUFFER_OFFSET_NONE;
}
GST_BUFFER_OFFSET (*outbuf) = out_offset;
/* Duration */
if (GST_CLOCK_TIME_IS_VALID (out_duration)) {
- GST_LOG_OBJECT (emuldec, "Using duration returned by ffmpeg");
+ GST_LOG_OBJECT (marudec, "Using duration returned by ffmpeg");
} else if (GST_CLOCK_TIME_IS_VALID (dec_info->duration)) {
- GST_LOG_OBJECT (emuldec, "Using in_duration");
+ GST_LOG_OBJECT (marudec, "Using in_duration");
out_duration = dec_info->duration;
#if 0
- } else if (GST_CLOCK_TIME_IS_VALID (emuldec->last_diff)) {
- GST_LOG_OBJECT (emuldec, "Using last-diff");
- out_duration = emuldec->last_diff;
+ } else if (GST_CLOCK_TIME_IS_VALID (marudec->last_diff)) {
+ GST_LOG_OBJECT (marudec, "Using last-diff");
+ out_duration = marudec->last_diff;
#endif
} else {
- if (emuldec->format.video.fps_n != -1 &&
- (emuldec->format.video.fps_n != 1000 &&
- emuldec->format.video.fps_d != 1)) {
- GST_LOG_OBJECT (emuldec, "using input framerate for duration");
+ if (marudec->format.video.fps_n != -1 &&
+ (marudec->format.video.fps_n != 1000 &&
+ marudec->format.video.fps_d != 1)) {
+ GST_LOG_OBJECT (marudec, "using input framerate for duration");
out_duration = gst_util_uint64_scale_int (GST_SECOND,
- emuldec->format.video.fps_d, emuldec->format.video.fps_n);
+ marudec->format.video.fps_d, marudec->format.video.fps_n);
} else {
- if (emuldec->context->video.fps_n != 0 &&
- (emuldec->context->video.fps_d > 0 &&
- emuldec->context->video.fps_d < 1000)) {
- GST_LOG_OBJECT (emuldec, "using decoder's framerate for duration");
+ if (marudec->context->video.fps_n != 0 &&
+ (marudec->context->video.fps_d > 0 &&
+ marudec->context->video.fps_d < 1000)) {
+ GST_LOG_OBJECT (marudec, "using decoder's framerate for duration");
out_duration = gst_util_uint64_scale_int (GST_SECOND,
- emuldec->context->video.fps_n * 1,
- emuldec->context->video.fps_d);
+ marudec->context->video.fps_n * 1,
+ marudec->context->video.fps_d);
} else {
- GST_LOG_OBJECT (emuldec, "no valid duration found");
+ GST_LOG_OBJECT (marudec, "no valid duration found");
}
}
}
#if 0
if (GST_CLOCK_TIME_IS_VALID (out_duration)) {
- out_duration += out_duration * emuldec->picture->repeat_pict / 2;
+ out_duration += out_duration * marudec->picture->repeat_pict / 2;
}
GST_BUFFER_DURATION (*outbuf) = out_duration;
if (out_timestamp != -1 && out_duration != -1 && out_duration != 0) {
- emuldec->next_out = out_timestamp + out_duration;
+ marudec->next_out = out_timestamp + out_duration;
} else {
- emuldec->next_out = -1;
+ marudec->next_out = -1;
}
#endif
- if (G_UNLIKELY (!clip_video_buffer (emuldec, *outbuf, out_timestamp,
+ if (G_UNLIKELY (!clip_video_buffer (marudec, *outbuf, out_timestamp,
out_duration))) {
- GST_DEBUG_OBJECT (emuldec, "buffer clipped");
+ GST_DEBUG_OBJECT (marudec, "buffer clipped");
gst_buffer_unref (*outbuf);
*outbuf = NULL;
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
static gint
-gst_emuldec_audio_frame (GstEmulDec *emuldec, CodecElement *codec,
+gst_marudec_audio_frame (GstEmulDec *marudec, CodecElement *codec,
guint8 *data, guint size,
const GstTSInfo *dec_info, GstBuffer **outbuf,
GstFlowReturn *ret)
*outbuf =
new_aligned_buffer (FF_MAX_AUDIO_FRAME_SIZE,
- GST_PAD_CAPS (emuldec->srcpad));
+ GST_PAD_CAPS (marudec->srcpad));
CODEC_LOG (DEBUG, "decode audio, input buffer size: %d\n", size);
- len = codec_decode_audio (emuldec->context,
+ len = codec_decode_audio (marudec->context,
(int16_t *) GST_BUFFER_DATA (*outbuf), &have_data,
- data, size, emuldec->dev);
+ data, size, marudec->dev);
- GST_DEBUG_OBJECT (emuldec,
+ GST_DEBUG_OBJECT (marudec,
"Decode audio: len=%d, have_data=%d", len, have_data);
-// CODEC_LOG (INFO, "decode audio, sample_fmt: %d\n", emuldec->context->audio.sample_fmt);
-
if (len >= 0 && have_data > 0) {
- GST_DEBUG_OBJECT (emuldec, "Creating output buffer");
- if (!gst_emuldec_negotiate (emuldec, FALSE)) {
+ GST_DEBUG_OBJECT (marudec, "Creating output buffer");
+ if (!gst_marudec_negotiate (marudec, FALSE)) {
gst_buffer_unref (*outbuf);
*outbuf = NULL;
len = -1;
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
if (GST_CLOCK_TIME_IS_VALID (dec_info->timestamp)) {
out_timestamp = dec_info->timestamp;
} else {
- out_timestamp = emuldec->next_out;
+ out_timestamp = marudec->next_out;
}
/* calculate based on number of samples */
out_duration = gst_util_uint64_scale (have_data, GST_SECOND,
- emuldec->format.audio.depth * emuldec->format.audio.channels *
- emuldec->format.audio.samplerate);
+ marudec->format.audio.depth * marudec->format.audio.channels *
+ marudec->format.audio.samplerate);
out_offset = dec_info->offset;
- GST_DEBUG_OBJECT (emuldec,
+ GST_DEBUG_OBJECT (marudec,
"Buffer created. Size: %d, timestamp: %" GST_TIME_FORMAT
", duration: %" GST_TIME_FORMAT, have_data,
GST_TIME_ARGS (out_timestamp), GST_TIME_ARGS (out_duration));
GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp;
GST_BUFFER_DURATION (*outbuf) = out_duration;
GST_BUFFER_OFFSET (*outbuf) = out_offset;
- gst_buffer_set_caps (*outbuf, GST_PAD_CAPS (emuldec->srcpad));
+ gst_buffer_set_caps (*outbuf, GST_PAD_CAPS (marudec->srcpad));
if (GST_CLOCK_TIME_IS_VALID (out_timestamp)) {
- emuldec->next_out = out_timestamp + out_duration;
+ marudec->next_out = out_timestamp + out_duration;
}
- if (G_UNLIKELY (!clip_audio_buffer (emuldec, *outbuf,
+ if (G_UNLIKELY (!clip_audio_buffer (marudec, *outbuf,
out_timestamp, out_duration))) {
- GST_DEBUG_OBJECT (emuldec, "buffer_clipped");
+ GST_DEBUG_OBJECT (marudec, "buffer_clipped");
gst_buffer_unref (*outbuf);
*outbuf = NULL;
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d", *ret, *outbuf, len);
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d", *ret, *outbuf, len);
return len;
}
} else {
}
if (len == -1 && !strcmp(codec->name, "aac")) {
- GST_ELEMENT_ERROR (emuldec, STREAM, DECODE, (NULL),
+ GST_ELEMENT_ERROR (marudec, STREAM, DECODE, (NULL),
("Decoding of AAC stream by FFMPEG failed."));
*ret = GST_FLOW_ERROR;
}
- GST_DEBUG_OBJECT (emuldec, "return flow %d, out %p, len %d",
+ GST_DEBUG_OBJECT (marudec, "return flow %d, out %p, len %d",
*ret, *outbuf, len);
return len;
}
static gint
-gst_emuldec_frame (GstEmulDec *emuldec, guint8 *data, guint size,
+gst_marudec_frame (GstEmulDec *marudec, guint8 *data, guint size,
gint *got_data, const GstTSInfo *dec_info, gint64 in_offset, GstFlowReturn *ret)
{
GstEmulDecClass *oclass;
GstBuffer *outbuf = NULL;
gint have_data = 0, len = 0;
- if (G_UNLIKELY (emuldec->context->codec == NULL)) {
- GST_ERROR_OBJECT (emuldec, "no codec context");
+ if (G_UNLIKELY (marudec->context->codec == NULL)) {
+ GST_ERROR_OBJECT (marudec, "no codec context");
return -1;
}
*ret = GST_FLOW_OK;
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
switch (oclass->codec->media_type) {
case AVMEDIA_TYPE_VIDEO:
- len = gst_emuldec_video_frame (emuldec, data, size,
+ len = gst_marudec_video_frame (marudec, data, size,
dec_info, in_offset, &outbuf, ret);
break;
case AVMEDIA_TYPE_AUDIO:
- len = gst_emuldec_audio_frame (emuldec, oclass->codec, data, size,
+ len = gst_marudec_audio_frame (marudec, oclass->codec, data, size,
dec_info, &outbuf, ret);
- if (outbuf == NULL && emuldec->discont) {
- GST_DEBUG_OBJECT (emuldec, "no buffer but keeping timestamp");
-// emuldec->clear_ts = FALSE;
+ if (outbuf == NULL && marudec->discont) {
+ GST_DEBUG_OBJECT (marudec, "no buffer but keeping timestamp");
+// marudec->clear_ts = FALSE;
}
break;
default:
- GST_ERROR_OBJECT (emuldec, "Asked to decode non-audio/video frame!");
+ GST_ERROR_OBJECT (marudec, "Asked to decode non-audio/video frame!");
g_assert_not_reached ();
break;
}
}
if (len < 0 || have_data < 0) {
- GST_WARNING_OBJECT (emuldec,
+ GST_WARNING_OBJECT (marudec,
"maru_%sdec: decoding error (len: %d, have_data: %d)",
oclass->codec->name, len, have_data);
*got_data = 0;
}
if (outbuf) {
- GST_LOG_OBJECT (emuldec,
+ GST_LOG_OBJECT (marudec,
"Decoded data, now pushing buffer %p with offset %" G_GINT64_FORMAT
", timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT,
outbuf, GST_BUFFER_OFFSET (outbuf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
- if (emuldec->discont) {
+ if (marudec->discont) {
/* GST_BUFFER_FLAG_DISCONT :
* the buffer marks a data discontinuity in the stream. This typically
* occurs after a seek or a dropped buffer from a live or network source.
*/
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- emuldec->discont = FALSE;
+ marudec->discont = FALSE;
}
- if (emuldec->segment.rate > 0.0) {
+ if (marudec->segment.rate > 0.0) {
// push forward
- *ret = gst_pad_push (emuldec->srcpad, outbuf);
+ *ret = gst_pad_push (marudec->srcpad, outbuf);
} else {
// push reverse
- GST_DEBUG_OBJECT (emuldec, "queued frame");
- emuldec->queued = g_list_prepend (emuldec->queued, outbuf);
+ GST_DEBUG_OBJECT (marudec, "queued frame");
+ marudec->queued = g_list_prepend (marudec->queued, outbuf);
*ret = GST_FLOW_OK;
}
} else {
- GST_DEBUG_OBJECT (emuldec, "Didn't get a decoded buffer");
+ GST_DEBUG_OBJECT (marudec, "Didn't get a decoded buffer");
}
return len;
}
static GstFlowReturn
-gst_emuldec_chain (GstPad *pad, GstBuffer *buffer)
+gst_marudec_chain (GstPad *pad, GstBuffer *buffer)
{
- GstEmulDec *emuldec;
+ GstEmulDec *marudec;
GstEmulDecClass *oclass;
guint8 *in_buf;
gint in_size, len, have_data;
const GstTSInfo *in_info;
const GstTSInfo *dec_info;
- emuldec = (GstEmulDec *) (GST_PAD_PARENT (pad));
+ marudec = (GstEmulDec *) (GST_PAD_PARENT (pad));
- if (G_UNLIKELY (!emuldec->opened)) {
+ if (G_UNLIKELY (!marudec->opened)) {
// not_negotiated
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
- GST_ELEMENT_ERROR (emuldec, CORE, NEGOTIATION, (NULL),
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
+ GST_ELEMENT_ERROR (marudec, CORE, NEGOTIATION, (NULL),
("maru_%sdec: input format was not set before data start",
oclass->codec->name));
gst_buffer_unref (buffer);
// FIXME
if (G_UNLIKELY (discont)) {
- GST_DEBUG_OBJECT (emuldec, "received DISCONT");
- gst_emuldec_drain (emuldec);
-// gst_emuldec_flush_pcache (emuldec);
-// emul_avcodec_flush buffers (emuldec->context, emuldec->dev);
- emuldec->discont = TRUE;
- gst_emuldec_reset_ts (emuldec);
+ GST_DEBUG_OBJECT (marudec, "received DISCONT");
+ gst_marudec_drain (marudec);
+// gst_marudec_flush_pcache (marudec);
+// maru_avcodec_flush buffers (marudec->context, marudec->dev);
+ marudec->discont = TRUE;
+ gst_marudec_reset_ts (marudec);
}
-// emuldec->clear_ts = TRUE;
+// marudec->clear_ts = TRUE;
- oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (emuldec));
+ oclass = (GstEmulDecClass *) (G_OBJECT_GET_CLASS (marudec));
#if 0
- if (G_UNLIKELY (emuldec->waiting_for_key)) {
+ if (G_UNLIKELY (marudec->waiting_for_key)) {
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT) &&
oclass->codec->media_type != AVMEDIA_TYPE_AUDIO) {
// skip_keyframe
}
- emuldec->waiting_for_key = FALSE;
+ marudec->waiting_for_key = FALSE;
}
- if (emuldec->pcache) {
- GST_LOG_OBJECT (emuldec, "join parse cache");
- buffer = gst_buffer_join (emuldec->pcache, buffer);
- emuldec->pcache = NULL;
+ if (marudec->pcache) {
+ GST_LOG_OBJECT (marudec, "join parse cache");
+ buffer = gst_buffer_join (marudec->pcache, buffer);
+ marudec->pcache = NULL;
}
#endif
in_duration = GST_BUFFER_DURATION (buffer);
in_offset = GST_BUFFER_OFFSET (buffer);
- in_info = gst_ts_info_store (emuldec, in_timestamp, in_duration, in_offset);
+ in_info = gst_ts_info_store (marudec, in_timestamp, in_duration, in_offset);
#if 0
if (in_timestamp != -1) {
- if (!emuldec->reordered_in && emuldec->last_in != -1) {
- if (in_timestamp < emuldec->last_in) {
- GST_LOG_OBJECT (emuldec, "detected reordered input timestamps");
- emuldec->reordered_in = TRUE;
- emuldec->last_diff = GST_CLOCK_TIME_NONE;
- } else if (in_timestamp > emuldec->last_in) {
+ if (!marudec->reordered_in && marudec->last_in != -1) {
+ if (in_timestamp < marudec->last_in) {
+ GST_LOG_OBJECT (marudec, "detected reordered input timestamps");
+ marudec->reordered_in = TRUE;
+ marudec->last_diff = GST_CLOCK_TIME_NONE;
+ } else if (in_timestamp > marudec->last_in) {
GstClockTime diff;
- diff = in_timestamp - emuldec->last_in;
- if (emuldec->last_frames) {
- diff /= emuldec->last_frames;
+ diff = in_timestamp - marudec->last_in;
+ if (marudec->last_frames) {
+ diff /= marudec->last_frames;
}
- GST_LOG_OBJECT (emuldec, "estimated duration %" GST_TIME_FORMAT " %u",
- GST_TIME_ARGS (diff), emuldec->last_frames);
+ GST_LOG_OBJECT (marudec, "estimated duration %" GST_TIME_FORMAT " %u",
+ GST_TIME_ARGS (diff), marudec->last_frames);
- emuldec->last_diff = diff;
+ marudec->last_diff = diff;
}
}
- emuldec->last_in = in_timestamp;
- emuldec->last_frames;
+ marudec->last_in = in_timestamp;
+ marudec->last_frames;
}
#endif
- GST_LOG_OBJECT (emuldec,
+ GST_LOG_OBJECT (marudec,
"Received new data of size %u, offset: %" G_GUINT64_FORMAT ", ts:%"
GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT ", info %d",
GST_BUFFER_SIZE (buffer), GST_BUFFER_OFFSET (buffer),
dec_info = in_info;
len =
- gst_emuldec_frame (emuldec, in_buf, in_size, &have_data, dec_info, in_offset, &ret);
+ gst_marudec_frame (marudec, in_buf, in_size, &have_data, dec_info, in_offset, &ret);
#if 0
- if (emuldec->clear_ts) {
+ if (marudec->clear_ts) {
in_timestamp = GST_CLOCK_TIME_NONE;
in_duration = GST_CLOCK_TIME_NONE;
in_offset = GST_BUFFER_OFFSET_NONE;
in_info = GST_TS_INFO_NONE;
} else {
- emuldec->clear_ts = TRUE;
+ marudec->clear_ts = TRUE;
}
#endif
}
static GstStateChangeReturn
-gst_emuldec_change_state (GstElement *element, GstStateChange transition)
+gst_marudec_change_state (GstElement *element, GstStateChange transition)
{
- GstEmulDec *emuldec = (GstEmulDec *) element;
+ GstEmulDec *marudec = (GstEmulDec *) element;
GstStateChangeReturn ret;
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_OBJECT_LOCK (emuldec);
- gst_emuldec_close (emuldec);
- GST_OBJECT_UNLOCK (emuldec);
+ GST_OBJECT_LOCK (marudec);
+ gst_marudec_close (marudec);
+ GST_OBJECT_UNLOCK (marudec);
/* clear queue */
- clear_queued (emuldec);
+ clear_queued (marudec);
break;
default:
break;
}
gboolean
-gst_emuldec_register (GstPlugin *plugin, GList *element)
+gst_marudec_register (GstPlugin *plugin, GList *element)
{
GTypeInfo typeinfo = {
sizeof (GstEmulDecClass),
- (GBaseInitFunc) gst_emuldec_base_init,
+ (GBaseInitFunc) gst_marudec_base_init,
NULL,
- (GClassInitFunc) gst_emuldec_class_init,
+ (GClassInitFunc) gst_marudec_class_init,
NULL,
NULL,
sizeof (GstEmulDec),
0,
- (GInstanceInitFunc) gst_emuldec_init,
+ (GInstanceInitFunc) gst_marudec_init,
};
GType type;
gchar *type_name;
- gint rank = GST_RANK_NONE;
+ gint rank = GST_RANK_PRIMARY;
GList *elem = element;
CodecElement *codec = NULL;
type = g_type_from_name (type_name);
if (!type) {
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
- g_type_set_qdata (type, GST_EMULDEC_PARAMS_QDATA, (gpointer) codec);
+ g_type_set_qdata (type, GST_MARUDEC_PARAMS_QDATA, (gpointer) codec);
}
if (!gst_element_register (plugin, type_name, rank, type)) {
#include <sys/mman.h>
#include <sys/stat.h>
-#include "gstemulapi.h"
-#include "gstemuldev.h"
+#include "gstmaruinterface.h"
+#include "gstmarudevice.h"
static GStaticMutex gst_avcodec_mutex = G_STATIC_MUTEX_INIT;
int device_fd;
int
-gst_emul_codec_device_open (CodecDevice *dev, int media_type)
+gst_maru_codec_device_open (CodecDevice *dev, int media_type)
{
int fd;
void *mmapbuf;
}
int
-gst_emul_codec_device_close (CodecDevice *dev)
+gst_maru_codec_device_close (CodecDevice *dev)
{
int fd = 0;
void *mmapbuf = NULL;
}
dev->buf = NULL;
- ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &dev->mem_info.offset);
+ ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &dev->mem_info.offset);
CODEC_LOG (INFO, "close %s.\n", CODEC_DEV);
if (close(fd) != 0) {
}
int
-gst_emul_avcodec_open (CodecContext *ctx,
+gst_maru_avcodec_open (CodecContext *ctx,
CodecElement *codec,
CodecDevice *dev)
{
g_static_mutex_lock (&gst_avcodec_mutex);
- if (gst_emul_codec_device_open (dev, codec->media_type) < 0) {
+ if (gst_maru_codec_device_open (dev, codec->media_type) < 0) {
perror("failed to open device.\n");
return -1;
}
}
int
-gst_emul_avcodec_close (CodecContext *ctx, CodecDevice *dev)
+gst_maru_avcodec_close (CodecContext *ctx, CodecDevice *dev)
{
int ret;
g_static_mutex_lock (&gst_avcodec_mutex);
- CODEC_LOG (DEBUG, "gst_emul_avcodec_close\n");
+ CODEC_LOG (DEBUG, "gst_maru_avcodec_close\n");
codec_deinit (ctx, dev);
- ret = gst_emul_codec_device_close (dev);
+ ret = gst_maru_codec_device_close (dev);
g_static_mutex_unlock (&gst_avcodec_mutex);
return ret;
*
*/
-#ifndef __GST_EMUL_DEV_H__
-#define __GST_EMUL_DEV_H__
+#ifndef __GST_MARU_DEVICE_H__
+#define __GST_MARU_DEVICE_H__
int gst_emul_codec_device_open (CodecDevice *dev, int media_type);
int gst_emul_codec_device_close (CodecDevice *dev);
*
*/
-#include "gstemulutils.h"
-#include "gstemulapi.h"
-#include "gstemuldev.h"
+#include "gstmaruutils.h"
+#include "gstmaruinterface.h"
+#include "gstmarudevice.h"
#include <gst/base/gstadapter.h>
-#define GST_EMULENC_PARAMS_QDATA g_quark_from_static_string("maruenc-params")
+#define GST_MARUENC_PARAMS_QDATA g_quark_from_static_string("maruenc-params")
typedef struct _GstEmulEnc
{
static GstElementClass *parent_class = NULL;
-static void gst_emulenc_base_init (GstEmulEncClass *klass);
-static void gst_emulenc_class_init (GstEmulEncClass *klass);
-static void gst_emulenc_init (GstEmulEnc *emulenc);
-static void gst_emulenc_finalize (GObject *object);
+static void gst_maruenc_base_init (GstEmulEncClass *klass);
+static void gst_maruenc_class_init (GstEmulEncClass *klass);
+static void gst_maruenc_init (GstEmulEnc *maruenc);
+static void gst_maruenc_finalize (GObject *object);
-static gboolean gst_emulenc_setcaps (GstPad *pad, GstCaps *caps);
-static GstCaps *gst_emulenc_getcaps (GstPad *pad);
+static gboolean gst_maruenc_setcaps (GstPad *pad, GstCaps *caps);
+static GstCaps *gst_maruenc_getcaps (GstPad *pad);
-static GstCaps *gst_emulenc_get_possible_sizes (GstEmulEnc *emulenc,
+static GstCaps *gst_maruenc_get_possible_sizes (GstEmulEnc *maruenc,
GstPad *pad, const GstCaps *caps);
-static GstFlowReturn gst_emulenc_chain_video (GstPad *pad, GstBuffer *buffer);
-static GstFlowReturn gst_emulenc_chain_audio (GstPad *pad, GstBuffer *buffer);
+static GstFlowReturn gst_maruenc_chain_video (GstPad *pad, GstBuffer *buffer);
+static GstFlowReturn gst_maruenc_chain_audio (GstPad *pad, GstBuffer *buffer);
-static gboolean gst_emulenc_event_video (GstPad *pad, GstEvent *event);
-static gboolean gst_emulenc_event_src (GstPad *pad, GstEvent *event);
+static gboolean gst_maruenc_event_video (GstPad *pad, GstEvent *event);
+static gboolean gst_maruenc_event_src (GstPad *pad, GstEvent *event);
-GstStateChangeReturn gst_emulenc_change_state (GstElement *element, GstStateChange transition);
+GstStateChangeReturn gst_maruenc_change_state (GstElement *element, GstStateChange transition);
#define DEFAULT_VIDEO_BITRATE 300000
#define DEFAULT_VIDEO_GOP_SIZE 15
* Implementation
*/
static void
-gst_emulenc_base_init (GstEmulEncClass *klass)
+gst_maruenc_base_init (GstEmulEncClass *klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstPadTemplate *sinktempl = NULL, *srctempl = NULL;
codec =
(CodecElement *)g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
- GST_EMULENC_PARAMS_QDATA);
+ GST_MARUENC_PARAMS_QDATA);
longname = g_strdup_printf ("%s Encoder", codec->longname);
classification = g_strdup_printf ("Codec/Encoder/%s",
g_free (classification);
- if (!(srccaps = gst_emul_codecname_to_caps (codec->name, NULL, TRUE))) {
+ if (!(srccaps = gst_maru_codecname_to_caps (codec->name, NULL, TRUE))) {
GST_DEBUG ("Couldn't get source caps for encoder '%s'", codec->name);
srccaps = gst_caps_new_simple ("unknown/unknown", NULL);
}
sinkcaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv; video/x-raw-gray");
break;
case AVMEDIA_TYPE_AUDIO:
- sinkcaps = gst_emul_codectype_to_audio_caps (NULL, codec->name, TRUE, codec);
+ sinkcaps = gst_maru_codectype_to_audio_caps (NULL, codec->name, TRUE, codec);
break;
default:
GST_LOG("unknown media type.\n");
}
static void
-gst_emulenc_class_init (GstEmulEncClass *klass)
+gst_maruenc_class_init (GstEmulEncClass *klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
#if 0
- gobject_class->set_property = gst_emulenc_set_property
- gobject_class->get_property = gst_emulenc_get_property
+ gobject_class->set_property = gst_maruenc_set_property
+ gobject_class->get_property = gst_maruenc_get_property
#endif
- gstelement_class->change_state = gst_emulenc_change_state;
+ gstelement_class->change_state = gst_maruenc_change_state;
- gobject_class->finalize = gst_emulenc_finalize;
+ gobject_class->finalize = gst_maruenc_finalize;
}
static void
-gst_emulenc_init (GstEmulEnc *emulenc)
+gst_maruenc_init (GstEmulEnc *maruenc)
{
GstEmulEncClass *oclass;
- oclass = (GstEmulEncClass*) (G_OBJECT_GET_CLASS(emulenc));
+ oclass = (GstEmulEncClass*) (G_OBJECT_GET_CLASS(maruenc));
- emulenc->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
- gst_pad_set_setcaps_function (emulenc->sinkpad,
- GST_DEBUG_FUNCPTR(gst_emulenc_setcaps));
- gst_pad_set_getcaps_function (emulenc->sinkpad,
- GST_DEBUG_FUNCPTR(gst_emulenc_getcaps));
+ maruenc->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
+ gst_pad_set_setcaps_function (maruenc->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_maruenc_setcaps));
+ gst_pad_set_getcaps_function (maruenc->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_maruenc_getcaps));
- emulenc->srcpad = gst_pad_new_from_template (oclass->srctempl, "src");
- gst_pad_use_fixed_caps (emulenc->srcpad);
+ maruenc->srcpad = gst_pad_new_from_template (oclass->srctempl, "src");
+ gst_pad_use_fixed_caps (maruenc->srcpad);
if (oclass->codec->media_type == AVMEDIA_TYPE_VIDEO) {
- gst_pad_set_chain_function (emulenc->sinkpad, gst_emulenc_chain_video);
- gst_pad_set_event_function (emulenc->sinkpad, gst_emulenc_event_video);
- gst_pad_set_event_function (emulenc->srcpad, gst_emulenc_event_src);
+ gst_pad_set_chain_function (maruenc->sinkpad, gst_maruenc_chain_video);
+ gst_pad_set_event_function (maruenc->sinkpad, gst_maruenc_event_video);
+ gst_pad_set_event_function (maruenc->srcpad, gst_maruenc_event_src);
- emulenc->bitrate = DEFAULT_VIDEO_BITRATE;
- emulenc->buffer_size = 512 * 1024;
- emulenc->gop_size = DEFAULT_VIDEO_GOP_SIZE;
+ maruenc->bitrate = DEFAULT_VIDEO_BITRATE;
+ maruenc->buffer_size = 512 * 1024;
+ maruenc->gop_size = DEFAULT_VIDEO_GOP_SIZE;
#if 0
- emulenc->lmin = 2;
- emulenc->lmax = 31;
+ maruenc->lmin = 2;
+ maruenc->lmax = 31;
#endif
} else if (oclass->codec->media_type == AVMEDIA_TYPE_AUDIO){
- gst_pad_set_chain_function (emulenc->sinkpad, gst_emulenc_chain_audio);
- emulenc->bitrate = DEFAULT_AUDIO_BITRATE;
+ gst_pad_set_chain_function (maruenc->sinkpad, gst_maruenc_chain_audio);
+ maruenc->bitrate = DEFAULT_AUDIO_BITRATE;
}
- gst_element_add_pad (GST_ELEMENT (emulenc), emulenc->sinkpad);
- gst_element_add_pad (GST_ELEMENT (emulenc), emulenc->srcpad);
+ gst_element_add_pad (GST_ELEMENT (maruenc), maruenc->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (maruenc), maruenc->srcpad);
- emulenc->context = g_malloc0 (sizeof(CodecContext));
- emulenc->context->video.pix_fmt = PIX_FMT_NONE;
- emulenc->context->audio.sample_fmt = SAMPLE_FMT_NONE;
+ maruenc->context = g_malloc0 (sizeof(CodecContext));
+ maruenc->context->video.pix_fmt = PIX_FMT_NONE;
+ maruenc->context->audio.sample_fmt = SAMPLE_FMT_NONE;
- emulenc->opened = FALSE;
+ maruenc->opened = FALSE;
#if 0
- emulenc->file = NULL;
+ maruenc->file = NULL;
#endif
- emulenc->delay = g_queue_new ();
+ maruenc->delay = g_queue_new ();
- emulenc->dev = g_malloc0 (sizeof(CodecDevice));
- if (!emulenc->dev) {
- printf("failed to allocate memory.\n");
+ maruenc->dev = g_malloc0 (sizeof(CodecDevice));
+ if (!maruenc->dev) {
+ printf("[gst-maru][%d] failed to allocate memory\n", __LINE__);
}
// need to know what adapter does.
- emulenc->adapter = gst_adapter_new ();
+ maruenc->adapter = gst_adapter_new ();
}
static void
-gst_emulenc_finalize (GObject *object)
+gst_maruenc_finalize (GObject *object)
{
// Deinit Decoder
- GstEmulEnc *emulenc = (GstEmulEnc *) object;
+ GstEmulEnc *maruenc = (GstEmulEnc *) object;
- if (emulenc->opened) {
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
- emulenc->opened = FALSE;
+ if (maruenc->opened) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ maruenc->opened = FALSE;
}
- if (emulenc->context) {
- g_free (emulenc->context);
- emulenc->context = NULL;
+ if (maruenc->context) {
+ g_free (maruenc->context);
+ maruenc->context = NULL;
}
- g_queue_free (emulenc->delay);
+ g_queue_free (maruenc->delay);
#if 0
- g_free (emulenc->filename);
+ g_free (maruenc->filename);
#endif
- g_object_unref (emulenc->adapter);
+ g_object_unref (maruenc->adapter);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
-gst_emulenc_get_possible_sizes (GstEmulEnc *emulenc, GstPad *pad,
+gst_maruenc_get_possible_sizes (GstEmulEnc *maruenc, GstPad *pad,
const GstCaps *caps)
{
GstCaps *othercaps = NULL;
GstCaps *intersect = NULL;
guint i;
- othercaps = gst_pad_peer_get_caps (emulenc->srcpad);
+ othercaps = gst_pad_peer_get_caps (maruenc->srcpad);
if (!othercaps) {
return gst_caps_copy (caps);
}
intersect = gst_caps_intersect (othercaps,
- gst_pad_get_pad_template_caps (emulenc->srcpad));
+ gst_pad_get_pad_template_caps (maruenc->srcpad));
gst_caps_unref (othercaps);
if (gst_caps_is_empty (intersect)) {
}
static GstCaps *
-gst_emulenc_getcaps (GstPad *pad)
+gst_maruenc_getcaps (GstPad *pad)
{
- GstEmulEnc *emulenc = (GstEmulEnc *) GST_PAD_PARENT (pad);
+ GstEmulEnc *maruenc = (GstEmulEnc *) GST_PAD_PARENT (pad);
GstEmulEncClass *oclass =
- (GstEmulEncClass *) G_OBJECT_GET_CLASS (emulenc);
+ (GstEmulEncClass *) G_OBJECT_GET_CLASS (maruenc);
CodecContext *ctx = NULL;
enum PixelFormat pixfmt;
GstCaps *caps = NULL;
GstCaps *finalcaps = NULL;
gint i;
- GST_DEBUG_OBJECT (emulenc, "getting caps");
+ GST_DEBUG_OBJECT (maruenc, "getting caps");
if (!oclass->codec) {
- GST_ERROR_OBJECT (emulenc, "codec element is null.");
+ GST_ERROR_OBJECT (maruenc, "codec element is null.");
return NULL;
}
if (oclass->codec->media_type == AVMEDIA_TYPE_AUDIO) {
caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
- GST_DEBUG_OBJECT (emulenc, "audio caps, return template %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (maruenc, "audio caps, return template %" GST_PTR_FORMAT,
caps);
return caps;
}
// cached
if (oclass->sinkcaps) {
- caps = gst_emulenc_get_possible_sizes (emulenc, pad, oclass->sinkcaps);
- GST_DEBUG_OBJECT (emulenc, "return cached caps %" GST_PTR_FORMAT, caps);
+ caps = gst_maruenc_get_possible_sizes (maruenc, pad, oclass->sinkcaps);
+ GST_DEBUG_OBJECT (maruenc, "return cached caps %" GST_PTR_FORMAT, caps);
return caps;
}
- GST_DEBUG_OBJECT (emulenc, "probing caps");
+ GST_DEBUG_OBJECT (maruenc, "probing caps");
i = pixfmt = 0;
for (pixfmt = 0;; pixfmt++) {
GstCaps *tmpcaps;
if ((pixfmt = oclass->codec->pix_fmts[i++]) == PIX_FMT_NONE) {
- GST_DEBUG_OBJECT (emulenc,
+ GST_DEBUG_OBJECT (maruenc,
"At the end of official pixfmt for this codec, breaking out");
break;
}
- GST_DEBUG_OBJECT (emulenc,
+ GST_DEBUG_OBJECT (maruenc,
"Got an official pixfmt [%d], attempting to get caps", pixfmt);
- tmpcaps = gst_emul_pixfmt_to_caps (pixfmt, NULL, oclass->codec->name);
+ tmpcaps = gst_maru_pixfmt_to_caps (pixfmt, NULL, oclass->codec->name);
if (tmpcaps) {
- GST_DEBUG_OBJECT (emulenc, "Got caps, breaking out");
+ GST_DEBUG_OBJECT (maruenc, "Got caps, breaking out");
if (!caps) {
caps = gst_caps_new_empty ();
}
continue;
}
- GST_DEBUG_OBJECT (emulenc,
+ GST_DEBUG_OBJECT (maruenc,
"Couldn't figure out caps without context, trying again with a context");
- GST_DEBUG_OBJECT (emulenc, "pixfmt: %d", pixfmt);
+ GST_DEBUG_OBJECT (maruenc, "pixfmt: %d", pixfmt);
if (pixfmt >= PIX_FMT_NB) {
GST_WARNING ("Invalid pixfmt, breaking out");
break;
ctx = g_malloc0 (sizeof(CodecContext));
if (!ctx) {
- GST_DEBUG_OBJECT (emulenc, "no context");
+ GST_DEBUG_OBJECT (maruenc, "no context");
break;
}
ctx->video.pix_fmt = pixfmt;
GST_DEBUG ("Attempting to open codec");
- if (gst_emul_avcodec_open (ctx, oclass->codec, emulenc->dev) >= 0
+ if (gst_maru_avcodec_open (ctx, oclass->codec, maruenc->dev) >= 0
&& ctx->video.pix_fmt == pixfmt) {
ctx->video.width = -1;
if (!caps) {
caps = gst_caps_new_empty ();
}
- tmpcaps = gst_emul_codectype_to_caps (oclass->codec->media_type, ctx,
+ tmpcaps = gst_maru_codectype_to_caps (oclass->codec->media_type, ctx,
oclass->codec->name, TRUE);
if (tmpcaps) {
gst_caps_append (caps, tmpcaps);
} else {
- GST_LOG_OBJECT (emulenc,
+ GST_LOG_OBJECT (maruenc,
"Couldn't get caps for codec: %s", oclass->codec->name);
}
- gst_emul_avcodec_close (ctx, emulenc->dev);
+ gst_maru_avcodec_close (ctx, maruenc->dev);
} else {
- GST_DEBUG_OBJECT (emulenc, "Opening codec failed with pixfmt: %d", pixfmt);
+ GST_DEBUG_OBJECT (maruenc, "Opening codec failed with pixfmt: %d", pixfmt);
}
- gst_emul_avcodec_close (ctx, emulenc->dev);
+ gst_maru_avcodec_close (ctx, maruenc->dev);
#if 0
if (ctx->priv_data) {
- gst_emul_avcodec_close (ctx, emulenc->dev);
+ gst_maru_avcodec_close (ctx, maruenc->dev);
}
#endif
g_free (ctx);
}
if (!caps) {
- caps = gst_emulenc_get_possible_sizes (emulenc, pad,
+ caps = gst_maruenc_get_possible_sizes (maruenc, pad,
gst_pad_get_pad_template_caps (pad));
- GST_DEBUG_OBJECT (emulenc, "probing gave nothing, "
+ GST_DEBUG_OBJECT (maruenc, "probing gave nothing, "
"return template %" GST_PTR_FORMAT, caps);
return caps;
}
- GST_DEBUG_OBJECT (emulenc, "probed caps gave %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (maruenc, "probed caps gave %" GST_PTR_FORMAT, caps);
oclass->sinkcaps = gst_caps_copy (caps);
- finalcaps = gst_emulenc_get_possible_sizes (emulenc, pad, caps);
+ finalcaps = gst_maruenc_get_possible_sizes (maruenc, pad, caps);
gst_caps_unref (caps);
return finalcaps;
}
static gboolean
-gst_emulenc_setcaps (GstPad *pad, GstCaps *caps)
+gst_maruenc_setcaps (GstPad *pad, GstCaps *caps)
{
- GstEmulEnc *emulenc;
+ GstEmulEnc *maruenc;
GstEmulEncClass *oclass;
GstCaps *other_caps;
GstCaps *allowed_caps;
enum PixelFormat pix_fmt;
int32_t buf_size;
- emulenc = (GstEmulEnc *) (gst_pad_get_parent (pad));
- oclass = (GstEmulEncClass *) (G_OBJECT_GET_CLASS (emulenc));
+ maruenc = (GstEmulEnc *) (gst_pad_get_parent (pad));
+ oclass = (GstEmulEncClass *) (G_OBJECT_GET_CLASS (maruenc));
- if (emulenc->opened) {
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
- emulenc->opened = FALSE;
+ if (maruenc->opened) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ maruenc->opened = FALSE;
- gst_pad_set_caps (emulenc->srcpad, NULL);
+ gst_pad_set_caps (maruenc->srcpad, NULL);
}
- emulenc->context->bit_rate = emulenc->bitrate;
- GST_DEBUG_OBJECT (emulenc, "Setting context to bitrate %lu, gop_size %d",
- emulenc->bitrate, emulenc->gop_size);
+ maruenc->context->bit_rate = maruenc->bitrate;
+ GST_DEBUG_OBJECT (maruenc, "Setting context to bitrate %lu, gop_size %d",
+ maruenc->bitrate, maruenc->gop_size);
#if 0
// user defined properties
- emulenc->context->gop_size = emulenc->gop_size;
- emulenc->context->lmin = (emulenc->lmin * FF_QP2LAMBDA + 0.5);
- emulenc->context->lmax = (emulenc->lmax * FF_QP2LAMBDA + 0.5);
+ maruenc->context->gop_size = maruenc->gop_size;
+ maruenc->context->lmin = (maruenc->lmin * FF_QP2LAMBDA + 0.5);
+ maruenc->context->lmax = (maruenc->lmax * FF_QP2LAMBDA + 0.5);
// some other defaults
- emulenc->context->b_frame_strategy = 0;
- emulenc->context->coder_type = 0;
- emulenc->context->context_model = 0;
- emulenc->context->scenechange_threshold = 0;
- emulenc->context->inter_threshold = 0;
-
- if (emulenc->interlaced) {
- emulenc->context->flags |=
+ maruenc->context->b_frame_strategy = 0;
+ maruenc->context->coder_type = 0;
+ maruenc->context->context_model = 0;
+ maruenc->context->scenechange_threshold = 0;
+ maruenc->context->inter_threshold = 0;
+
+ if (maruenc->interlaced) {
+ maruenc->context->flags |=
CODEC_FLAG_INTERLACED_DCT | CODEC_FLAG_INTERLACED_ME;
- emulenc->picture->interlaced_frame = TRUE;
+ maruenc->picture->interlaced_frame = TRUE;
- emulenc->picture->top_field_first = TRUE;
+ maruenc->picture->top_field_first = TRUE;
}
#endif
- gst_emul_caps_with_codectype (oclass->codec->media_type, caps, emulenc->context);
+ gst_maru_caps_with_codectype (oclass->codec->media_type, caps, maruenc->context);
- if (!emulenc->context->video.fps_d) {
- emulenc->context->video.fps_d = 25;
- emulenc->context->video.fps_n = 1;
+ if (!maruenc->context->video.fps_d) {
+ maruenc->context->video.fps_d = 25;
+ maruenc->context->video.fps_n = 1;
} else if (!strcmp(oclass->codec->name ,"mpeg4")
- && (emulenc->context->video.fps_d > 65535)) {
- emulenc->context->video.fps_n =
- (gint) gst_util_uint64_scale_int (emulenc->context->video.fps_n,
- 65535, emulenc->context->video.fps_d);
- emulenc->context->video.fps_d = 65535;
- GST_LOG_OBJECT (emulenc, "MPEG4 : scaled down framerate to %d / %d",
- emulenc->context->video.fps_d, emulenc->context->video.fps_n);
+ && (maruenc->context->video.fps_d > 65535)) {
+ maruenc->context->video.fps_n =
+ (gint) gst_util_uint64_scale_int (maruenc->context->video.fps_n,
+ 65535, maruenc->context->video.fps_d);
+ maruenc->context->video.fps_d = 65535;
+ GST_LOG_OBJECT (maruenc, "MPEG4 : scaled down framerate to %d / %d",
+ maruenc->context->video.fps_d, maruenc->context->video.fps_n);
}
- pix_fmt = emulenc->context->video.pix_fmt;
+ pix_fmt = maruenc->context->video.pix_fmt;
{
switch (oclass->codec->media_type) {
{
int width, height;
- width = emulenc->context->video.width;
- height = emulenc->context->video.height;
+ width = maruenc->context->video.width;
+ height = maruenc->context->video.height;
buf_size = width * height * 6 + FF_MIN_BUFFER_SIZE + 100;
break;
}
}
}
- emulenc->dev->buf_size = gst_emul_align_size(buf_size);
+ maruenc->dev->buf_size = gst_maru_align_size(buf_size);
// open codec
- if (gst_emul_avcodec_open (emulenc->context,
- oclass->codec, emulenc->dev) < 0) {
- GST_DEBUG_OBJECT (emulenc, "maru_%senc: Failed to open codec",
+ if (gst_maru_avcodec_open (maruenc->context,
+ oclass->codec, maruenc->dev) < 0) {
+ GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to open codec",
oclass->codec->name);
return FALSE;
}
- if (pix_fmt != emulenc->context->video.pix_fmt) {
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
- GST_DEBUG_OBJECT (emulenc,
+ if (pix_fmt != maruenc->context->video.pix_fmt) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ GST_DEBUG_OBJECT (maruenc,
"maru_%senc: AV wants different colorspace (%d given, %d wanted)",
- oclass->codec->name, pix_fmt, emulenc->context->video.pix_fmt);
+ oclass->codec->name, pix_fmt, maruenc->context->video.pix_fmt);
return FALSE;
}
if (oclass->codec->media_type == AVMEDIA_TYPE_VIDEO
&& pix_fmt == PIX_FMT_NONE) {
- GST_DEBUG_OBJECT (emulenc, "maru_%senc: Failed to determine input format",
+ GST_DEBUG_OBJECT (maruenc, "maru_%senc: Failed to determine input format",
oclass->codec->name);
return FALSE;
}
- GST_DEBUG_OBJECT (emulenc, "picking an output format.");
- allowed_caps = gst_pad_get_allowed_caps (emulenc->srcpad);
+ GST_DEBUG_OBJECT (maruenc, "picking an output format.");
+ allowed_caps = gst_pad_get_allowed_caps (maruenc->srcpad);
if (!allowed_caps) {
- GST_DEBUG_OBJECT (emulenc, "but no peer, using template caps");
+ GST_DEBUG_OBJECT (maruenc, "but no peer, using template caps");
allowed_caps =
- gst_caps_copy (gst_pad_get_pad_template_caps (emulenc->srcpad));
+ gst_caps_copy (gst_pad_get_pad_template_caps (maruenc->srcpad));
}
- GST_DEBUG_OBJECT (emulenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
- gst_emul_caps_with_codecname (oclass->codec->name,
- oclass->codec->media_type, allowed_caps, emulenc->context);
+ GST_DEBUG_OBJECT (maruenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
+ gst_maru_caps_with_codecname (oclass->codec->name,
+ oclass->codec->media_type, allowed_caps, maruenc->context);
other_caps =
- gst_emul_codecname_to_caps (oclass->codec->name, emulenc->context, TRUE);
+ gst_maru_codecname_to_caps (oclass->codec->name, maruenc->context, TRUE);
if (!other_caps) {
GST_DEBUG("Unsupported codec - no caps found");
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
return FALSE;
}
icaps = newcaps;
}
- if (!gst_pad_set_caps (emulenc->srcpad, icaps)) {
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
+ if (!gst_pad_set_caps (maruenc->srcpad, icaps)) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
gst_caps_unref (icaps);
return FALSE;
}
- gst_object_unref (emulenc);
+ gst_object_unref (maruenc);
- emulenc->opened = TRUE;
+ maruenc->opened = TRUE;
return TRUE;
}
static void
-gst_emulenc_setup_working_buf (GstEmulEnc *emulenc)
+gst_maruenc_setup_working_buf (GstEmulEnc *maruenc)
{
guint wanted_size =
- emulenc->context->video.width * emulenc->context->video.height * 6 +
+ maruenc->context->video.width * maruenc->context->video.height * 6 +
FF_MIN_BUFFER_SIZE;
- if (emulenc->working_buf == NULL ||
- emulenc->working_buf_size != wanted_size) {
- if (emulenc->working_buf) {
- g_free (emulenc->working_buf);
+ if (maruenc->working_buf == NULL ||
+ maruenc->working_buf_size != wanted_size) {
+ if (maruenc->working_buf) {
+ g_free (maruenc->working_buf);
}
- emulenc->working_buf_size = wanted_size;
- emulenc->working_buf = g_malloc0 (emulenc->working_buf_size);
+ maruenc->working_buf_size = wanted_size;
+ maruenc->working_buf = g_malloc0 (maruenc->working_buf_size);
}
- emulenc->buffer_size = wanted_size;
+ maruenc->buffer_size = wanted_size;
}
GstFlowReturn
-gst_emulenc_chain_video (GstPad *pad, GstBuffer *buffer)
+gst_maruenc_chain_video (GstPad *pad, GstBuffer *buffer)
{
- GstEmulEnc *emulenc = (GstEmulEnc *) (GST_PAD_PARENT (pad));
+ GstEmulEnc *maruenc = (GstEmulEnc *) (GST_PAD_PARENT (pad));
GstBuffer *outbuf;
gint ret_size = 0, frame_size;
- GST_DEBUG_OBJECT (emulenc,
+ GST_DEBUG_OBJECT (maruenc,
"Received buffer of time %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
#if 0
- GST_OBJECT_LOCK (emulenc);
- force_keyframe = emulenc->force_keyframe;
- emulenc->force_keyframe = FALSE;
- GST_OBJECT_UNLOCK (emulenc);
+ GST_OBJECT_LOCK (maruenc);
+ force_keyframe = maruenc->force_keyframe;
+ maruenc->force_keyframe = FALSE;
+ GST_OBJECT_UNLOCK (maruenc);
if (force_keyframe) {
- emulenc->picture->pict_type = FF_I_TYPE;
+ maruenc->picture->pict_type = FF_I_TYPE;
}
#endif
- frame_size = gst_emul_avpicture_size (emulenc->context->video.pix_fmt,
- emulenc->context->video.width, emulenc->context->video.height);
+ frame_size = gst_maru_avpicture_size (maruenc->context->video.pix_fmt,
+ maruenc->context->video.width, maruenc->context->video.height);
g_return_val_if_fail (frame_size == GST_BUFFER_SIZE (buffer),
GST_FLOW_ERROR);
#if 0
- pts = gst_emul_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buffer) /
- emulenc->context.video.ticks_per_frame,
- emulenc->context.video.fps_n, emulen->context.video.fps_d);
+ pts = gst_maru_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buffer) /
+ maruenc->context.video.ticks_per_frame,
+ maruenc->context.video.fps_n, maruen->context.video.fps_d);
#endif
// TODO: check whether this func needs or not.
- gst_emulenc_setup_working_buf (emulenc);
+ gst_maruenc_setup_working_buf (maruenc);
ret_size =
- codec_encode_video (emulenc->context, emulenc->working_buf,
- emulenc->working_buf_size, GST_BUFFER_DATA (buffer),
+ codec_encode_video (maruenc->context, maruenc->working_buf,
+ maruenc->working_buf_size, GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer), GST_BUFFER_TIMESTAMP (buffer),
- emulenc->dev);
+ maruenc->dev);
if (ret_size < 0) {
GstEmulEncClass *oclass =
- (GstEmulEncClass *) (G_OBJECT_GET_CLASS (emulenc));
- GST_ERROR_OBJECT (emulenc,
+ (GstEmulEncClass *) (G_OBJECT_GET_CLASS (maruenc));
+ GST_ERROR_OBJECT (maruenc,
"maru_%senc: failed to encode buffer", oclass->codec->name);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
- g_queue_push_tail (emulenc->delay, buffer);
+ g_queue_push_tail (maruenc->delay, buffer);
if (ret_size) {
- buffer = g_queue_pop_head (emulenc->delay);
+ buffer = g_queue_pop_head (maruenc->delay);
} else {
return GST_FLOW_OK;
}
#if 0
- if (emulenc->file && emulenc->context->stats_out) {
- if (fprintf (emulenc->file, "%s", emulenc->context->stats_out) < 0) {
- GST_ELEMENT_ERROR (emulenc, RESOURCE, WRITE,
- (("Could not write to file \"%s\"."), emulenc->filename),
+ if (maruenc->file && maruenc->context->stats_out) {
+ if (fprintf (maruenc->file, "%s", maruenc->context->stats_out) < 0) {
+ GST_ELEMENT_ERROR (maruenc, RESOURCE, WRITE,
+ (("Could not write to file \"%s\"."), maruenc->filename),
GST_ERROR_SYSTEM);
}
}
uint32_t mem_offset;
uint8_t *working_buf = NULL;
- mem_offset = emulenc->dev->mem_info.offset;
- working_buf = emulenc->dev->buf + mem_offset;
+ mem_offset = maruenc->dev->mem_info.offset;
+ working_buf = maruenc->dev->buf + mem_offset;
if (!working_buf) {
} else {
CODEC_LOG (INFO,
"encoded video. mem_offset = 0x%x\n", mem_offset);
outbuf = gst_buffer_new_and_alloc (ret_size);
-// memcpy (GST_BUFFER_DATA (outbuf), emulenc->working_buf, ret_size);
+// memcpy (GST_BUFFER_DATA (outbuf), maruenc->working_buf, ret_size);
memcpy (GST_BUFFER_DATA (outbuf), working_buf, ret_size);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
}
- ret = ioctl(emulenc->dev->fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(maruenc->dev->fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed to release used buffer\n");
}
#endif
#if 0
- if (emulenc->context->coded_frame) {
- if (!emulenc->context->coded_frame->key_frame) {
+ if (maruenc->context->coded_frame) {
+ if (!maruenc->context->coded_frame->key_frame) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
}
} else {
- GST_WARNING_OBJECT (emulenc, "codec did not provide keyframe info");
+ GST_WARNING_OBJECT (maruenc, "codec did not provide keyframe info");
}
#endif
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (emulenc->srcpad));
+ gst_buffer_set_caps (outbuf, GST_PAD_CAPS (maruenc->srcpad));
gst_buffer_unref (buffer);
#if 0
if (force_keyframe) {
- gst_pad_push_event (emulenc->srcpad,
+ gst_pad_push_event (maruenc->srcpad,
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new ("GstForceKeyUnit", "timestamp",
G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (outbuf), NULL)));
}
#endif
- return gst_pad_push (emulenc->srcpad, outbuf);
+ return gst_pad_push (maruenc->srcpad, outbuf);
}
GstFlowReturn
-gst_emulenc_encode_audio (GstEmulEnc *emulenc, guint8 *audio_in,
+gst_maruenc_encode_audio (GstEmulEnc *maruenc, guint8 *audio_in,
guint in_size, guint max_size, GstClockTime timestamp,
GstClockTime duration, gboolean discont)
{
outbuf = gst_buffer_new_and_alloc (max_size + FF_MIN_BUFFER_SIZE);
audio_out = GST_BUFFER_DATA (outbuf);
- GST_LOG_OBJECT (emulenc, "encoding buffer of max size %d", max_size);
- if (emulenc->buffer_size != max_size) {
- emulenc->buffer_size = max_size;
+ GST_LOG_OBJECT (maruenc, "encoding buffer of max size %d", max_size);
+ if (maruenc->buffer_size != max_size) {
+ maruenc->buffer_size = max_size;
}
- res = codec_encode_audio (emulenc->context, audio_out, max_size,
- audio_in, in_size, emulenc->dev);
+ res = codec_encode_audio (maruenc->context, audio_out, max_size,
+ audio_in, in_size, maruenc->dev);
if (res < 0) {
- GST_ERROR_OBJECT (emulenc, "Failed to encode buffer: %d", res);
+ GST_ERROR_OBJECT (maruenc, "Failed to encode buffer: %d", res);
gst_buffer_unref (outbuf);
return GST_FLOW_OK;
}
- GST_LOG_OBJECT (emulenc, "got output size %d", res);
+ GST_LOG_OBJECT (maruenc, "got output size %d", res);
GST_BUFFER_SIZE (outbuf) = res;
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (emulenc->srcpad));
+ gst_buffer_set_caps (outbuf, GST_PAD_CAPS (maruenc->srcpad));
- GST_LOG_OBJECT (emulenc, "pushing size %d, timestamp %",
+ GST_LOG_OBJECT (maruenc, "pushing size %d, timestamp %",
GST_TIME_FORMAT, res, GST_TIME_ARGS (timestamp));
- ret = gst_pad_push (emulenc->srcpad, outbuf);
+ ret = gst_pad_push (maruenc->srcpad, outbuf);
return ret;
}
static GstFlowReturn
-gst_emulenc_chain_audio (GstPad *pad, GstBuffer *buffer)
+gst_maruenc_chain_audio (GstPad *pad, GstBuffer *buffer)
{
- GstEmulEnc *emulenc;
+ GstEmulEnc *maruenc;
GstEmulEncClass *oclass;
GstClockTime timestamp, duration;
guint in_size, frame_size;
guint8 *in_data;
CodecContext *ctx;
- emulenc = (GstEmulEnc *) (GST_OBJECT_PARENT (pad));
- oclass = (GstEmulEncClass *) G_OBJECT_GET_CLASS (emulenc);
+ maruenc = (GstEmulEnc *) (GST_OBJECT_PARENT (pad));
+ oclass = (GstEmulEncClass *) G_OBJECT_GET_CLASS (maruenc);
- ctx = emulenc->context;
+ ctx = maruenc->context;
in_size = GST_BUFFER_SIZE (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
discont = GST_BUFFER_IS_DISCONT (buffer);
- GST_DEBUG_OBJECT (emulenc,
+ GST_DEBUG_OBJECT (maruenc,
"Received time %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
", size %d", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration), in_size);
guint avail, frame_bytes;
if (discont) {
- GST_LOG_OBJECT (emulenc, "DISCONT, clear adapter");
- gst_adapter_clear (emulenc->adapter);
- emulenc->discont = TRUE;
+ GST_LOG_OBJECT (maruenc, "DISCONT, clear adapter");
+ gst_adapter_clear (maruenc->adapter);
+ maruenc->discont = TRUE;
}
- if (gst_adapter_available (emulenc->adapter) == 0) {
- GST_LOG_OBJECT (emulenc, "taking buffer timestamp %" GST_TIME_FORMAT,
+ if (gst_adapter_available (maruenc->adapter) == 0) {
+ GST_LOG_OBJECT (maruenc, "taking buffer timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
- emulenc->adapter_ts = timestamp;
- emulenc->adapter_consumed = 0;
+ maruenc->adapter_ts = timestamp;
+ maruenc->adapter_consumed = 0;
} else {
GstClockTime upstream_time;
GstClockTime consumed_time;
guint64 bytes;
consumed_time =
- gst_util_uint64_scale (emulenc->adapter_consumed, GST_SECOND,
+ gst_util_uint64_scale (maruenc->adapter_consumed, GST_SECOND,
ctx->audio.sample_rate);
- timestamp = emulenc->adapter_ts + consumed_time;
- GST_LOG_OBJECT (emulenc, "taking adapter timestamp %" GST_TIME_FORMAT
+ timestamp = maruenc->adapter_ts + consumed_time;
+ GST_LOG_OBJECT (maruenc, "taking adapter timestamp %" GST_TIME_FORMAT
" and adding consumed time %" GST_TIME_FORMAT,
- GST_TIME_ARGS (emulenc->adapter_ts), GST_TIME_ARGS (consumed_time));
+ GST_TIME_ARGS (maruenc->adapter_ts), GST_TIME_ARGS (consumed_time));
- upstream_time = gst_adapter_prev_timestamp (emulenc->adapter, &bytes);
+ upstream_time = gst_adapter_prev_timestamp (maruenc->adapter, &bytes);
if (GST_CLOCK_TIME_IS_VALID (upstream_time)) {
GstClockTimeDiff diff;
diff = upstream_time - timestamp;
if (diff > GST_SECOND / 10 || diff < -GST_SECOND / 10) {
- GST_DEBUG_OBJECT (emulenc, "adapter timestamp drifting, "
+ GST_DEBUG_OBJECT (maruenc, "adapter timestamp drifting, "
"taking upstream timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (upstream_time));
timestamp = upstream_time;
- emulenc->adapter_consumed = bytes / (osize * ctx->audio.channels);
- emulenc->adapter_ts =
- upstream_time - gst_util_uint64_scale (emulenc->adapter_consumed,
+ maruenc->adapter_consumed = bytes / (osize * ctx->audio.channels);
+ maruenc->adapter_ts =
+ upstream_time - gst_util_uint64_scale (maruenc->adapter_consumed,
GST_SECOND, ctx->audio.sample_rate);
- emulenc->discont = TRUE;
+ maruenc->discont = TRUE;
}
}
}
- GST_LOG_OBJECT (emulenc, "pushing buffer in adapter");
- gst_adapter_push (emulenc->adapter, buffer);
+ GST_LOG_OBJECT (maruenc, "pushing buffer in adapter");
+ gst_adapter_push (maruenc->adapter, buffer);
frame_bytes = frame_size * osize * ctx->audio.channels;
- avail = gst_adapter_available (emulenc->adapter);
+ avail = gst_adapter_available (maruenc->adapter);
- GST_LOG_OBJECT (emulenc, "frame_bytes %u, avail %u", frame_bytes, avail);
+ GST_LOG_OBJECT (maruenc, "frame_bytes %u, avail %u", frame_bytes, avail);
while (avail >= frame_bytes) {
- GST_LOG_OBJECT (emulenc, "taking %u bytes from the adapter", frame_bytes);
+ GST_LOG_OBJECT (maruenc, "taking %u bytes from the adapter", frame_bytes);
- in_data = (guint8 *) gst_adapter_peek (emulenc->adapter, frame_bytes);
- emulenc->adapter_consumed += frame_size;
+ in_data = (guint8 *) gst_adapter_peek (maruenc->adapter, frame_bytes);
+ maruenc->adapter_consumed += frame_size;
duration =
- gst_util_uint64_scale (emulenc->adapter_consumed, GST_SECOND,
+ gst_util_uint64_scale (maruenc->adapter_consumed, GST_SECOND,
ctx->audio.sample_rate);
- duration -= (timestamp - emulenc->adapter_ts);
+ duration -= (timestamp - maruenc->adapter_ts);
out_size = frame_bytes * 4;
ret =
- gst_emulenc_encode_audio (emulenc, in_data, frame_bytes, out_size,
- timestamp, duration, emulenc->discont);
+ gst_maruenc_encode_audio (maruenc, in_data, frame_bytes, out_size,
+ timestamp, duration, maruenc->discont);
- gst_adapter_flush (emulenc->adapter, frame_bytes);
+ gst_adapter_flush (maruenc->adapter, frame_bytes);
if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (emulenc, "Failed to push buffer %d (%s)", ret,
+ GST_DEBUG_OBJECT (maruenc, "Failed to push buffer %d (%s)", ret,
gst_flow_get_name (ret));
}
timestamp += duration;
- emulenc->discont = FALSE;
- avail = gst_adapter_available (emulenc->adapter);
+ maruenc->discont = FALSE;
+ avail = gst_adapter_available (maruenc->adapter);
}
- GST_LOG_OBJECT (emulenc, "%u bytes left in the adapter", avail);
+ GST_LOG_OBJECT (maruenc, "%u bytes left in the adapter", avail);
} else {
#if 0
int coded_bps = av_get_bits_per_sample (oclass->codec->name);
- GST_LOG_OBJECT (emulenc, "coded bps %d, osize %d", coded_bps, osize);
+ GST_LOG_OBJECT (maruenc, "coded bps %d, osize %d", coded_bps, osize);
out_size = in_size / osize;
if (coded_bps) {
}
#endif
in_data = (guint8 *) GST_BUFFER_DATA (buffer);
- ret = gst_emulenc_encode_audio (emulenc, in_data, in_size, out_size,
+ ret = gst_maruenc_encode_audio (maruenc, in_data, in_size, out_size,
timestamp, duration, discont);
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (emulenc, "Failed to push buffer %d (%s)", ret,
+ GST_DEBUG_OBJECT (maruenc, "Failed to push buffer %d (%s)", ret,
gst_flow_get_name (ret));
}
}
}
static void
-gst_emulenc_flush_buffers (GstEmulEnc *emulenc, gboolean send)
+gst_maruenc_flush_buffers (GstEmulEnc *maruenc, gboolean send)
{
GstBuffer *outbuf, *inbuf;
gint ret_size = 0;
- GST_DEBUG_OBJECT (emulenc, "flushing buffers with sending %d", send);
+ GST_DEBUG_OBJECT (maruenc, "flushing buffers with sending %d", send);
- if (!emulenc->opened) {
- while (!g_queue_is_empty (emulenc->delay)) {
- gst_buffer_unref (g_queue_pop_head (emulenc->delay));
+ if (!maruenc->opened) {
+ while (!g_queue_is_empty (maruenc->delay)) {
+ gst_buffer_unref (g_queue_pop_head (maruenc->delay));
}
}
#if 0
- while (!g_queue_is_empty (emulenc->delay)) {
- emulenc_setup_working_buf (emulenc);
+ while (!g_queue_is_empty (maruenc->delay)) {
+ maruenc_setup_working_buf (maruenc);
- ret_size = codec_encode_video (emulenc->context,
- emulenc->working_buf, emulenc->working_buf_size, NULL, NULL, 0,
- emulenc->dev);
+ ret_size = codec_encode_video (maruenc->context,
+ maruenc->working_buf, maruenc->working_buf_size, NULL, NULL, 0,
+ maruenc->dev);
if (ret_size < 0) {
GstEmulEncClass *oclass =
- (GstEmulEncClass *) (G_OBJECT_GET_CLASS (emulenc));
- GST_WARNING_OBJECT (emulenc,
+ (GstEmulEncClass *) (G_OBJECT_GET_CLASS (maruenc));
+ GST_WARNING_OBJECT (maruenc,
"maru_%senc: failed to flush buffer", oclass->codec->name);
break;
}
- if (emulenc->file && emulenc->context->stats_out) {
- if (fprintf (emulenc->file, "%s", emulenc->context->stats_out) < 0) {
+ if (maruenc->file && maruenc->context->stats_out) {
+ if (fprintf (maruenc->file, "%s", maruenc->context->stats_out) < 0) {
GST_ELEMENT_ERROR (emeulenc, RESOURCE, WRITE,
- (("Could not write to file \"%s\"."), emulenc->filename),
+ (("Could not write to file \"%s\"."), maruenc->filename),
GST_ERROR_SYSTEM);
}
}
- inbuf = g_queue_pop_head (emulenc->delay);
+ inbuf = g_queue_pop_head (maruenc->delay);
outbuf = gst_buffer_new_and_alloc (ret_size);
- memcpy (GST_BUFFER_DATA (outbuf), emulenc->working_buf, ret_size);
+ memcpy (GST_BUFFER_DATA (outbuf), maruenc->working_buf, ret_size);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
- if (!emulenc->context->coded_frame->key_frame) {
+ if (!maruenc->context->coded_frame->key_frame) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
}
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (emulenc->srcpad));
+ gst_buffer_set_caps (outbuf, GST_PAD_CAPS (maruenc->srcpad));
gst_buffer_unref (inbuf);
if (send) {
- gst_pad_push (emulenc->srcpad, outbuf);
+ gst_pad_push (maruenc->srcpad, outbuf);
} else {
gst_buffer_unref (outbuf);
}
}
- while (!g_queue_is_empty (emulenc->delay)) {
- gst_buffer_unref (g_queue_pop_head (emulenc->delay));
+ while (!g_queue_is_empty (maruenc->delay)) {
+ gst_buffer_unref (g_queue_pop_head (maruenc->delay));
}
#endif
}
static gboolean
-gst_emulenc_event_video (GstPad *pad, GstEvent *event)
+gst_maruenc_event_video (GstPad *pad, GstEvent *event)
{
- GstEmulEnc *emulenc;
- emulenc = (GstEmulEnc *) gst_pad_get_parent (pad);
+ GstEmulEnc *maruenc;
+ maruenc = (GstEmulEnc *) gst_pad_get_parent (pad);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
- gst_emulenc_flush_buffers (emulenc, TRUE);
+ gst_maruenc_flush_buffers (maruenc, TRUE);
break;
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
if (gst_structure_has_name (s, "GstForceKeyUnit")) {
#if 0
- emulenc->picture->pict_type = FF_I_TYPE;
+ maruenc->picture->pict_type = FF_I_TYPE;
#endif
}
}
break;
}
- return gst_pad_push_event (emulenc->srcpad, event);
+ return gst_pad_push_event (maruenc->srcpad, event);
}
static gboolean
-gst_emulenc_event_src (GstPad *pad, GstEvent *event)
+gst_maruenc_event_src (GstPad *pad, GstEvent *event)
{
- GstEmulEnc *emulenc = (GstEmulEnc *) (GST_PAD_PARENT (pad));
+ GstEmulEnc *maruenc = (GstEmulEnc *) (GST_PAD_PARENT (pad));
gboolean forward = TRUE;
switch (GST_EVENT_TYPE (event)) {
if (gst_structure_has_name (s, "GstForceKeyUnit")) {
#if 0
- GST_OBJECT_LOCK (emulenc);
- emulenc->force_keyframe = TRUE;
- GST_OBJECT_UNLOCK (emulenc);
+ GST_OBJECT_LOCK (maruenc);
+ maruenc->force_keyframe = TRUE;
+ GST_OBJECT_UNLOCK (maruenc);
#endif
forward = FALSE;
gst_event_unref (event);
}
if (forward) {
- return gst_pad_push_event (emulenc->sinkpad, event);
+ return gst_pad_push_event (maruenc->sinkpad, event);
}
return TRUE;
}
GstStateChangeReturn
-gst_emulenc_change_state (GstElement *element, GstStateChange transition)
+gst_maruenc_change_state (GstElement *element, GstStateChange transition)
{
- GstEmulEnc *emulenc = (GstEmulEnc*)element;
+ GstEmulEnc *maruenc = (GstEmulEnc*)element;
GstStateChangeReturn ret;
switch (transition) {
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_emulenc_flush_buffers (emulenc, FALSE);
- if (emulenc->opened) {
- gst_emul_avcodec_close (emulenc->context, emulenc->dev);
- emulenc->opened = FALSE;
+ gst_maruenc_flush_buffers (maruenc, FALSE);
+ if (maruenc->opened) {
+ gst_maru_avcodec_close (maruenc->context, maruenc->dev);
+ maruenc->opened = FALSE;
}
- gst_adapter_clear (emulenc->adapter);
+ gst_adapter_clear (maruenc->adapter);
#if 0
- if (emulenc->flie) {
- fclose (emulenc->file);
- emulenc->file = NULL;
+ if (maruenc->flie) {
+ fclose (maruenc->file);
+ maruenc->file = NULL;
}
#endif
- if (emulenc->working_buf) {
- g_free (emulenc->working_buf);
- emulenc->working_buf = NULL;
+ if (maruenc->working_buf) {
+ g_free (maruenc->working_buf);
+ maruenc->working_buf = NULL;
}
break;
default:
}
gboolean
-gst_emulenc_register (GstPlugin *plugin, GList *element)
+gst_maruenc_register (GstPlugin *plugin, GList *element)
{
GTypeInfo typeinfo = {
sizeof (GstEmulEncClass),
- (GBaseInitFunc) gst_emulenc_base_init,
+ (GBaseInitFunc) gst_maruenc_base_init,
NULL,
- (GClassInitFunc) gst_emulenc_class_init,
+ (GClassInitFunc) gst_maruenc_class_init,
NULL,
NULL,
sizeof (GstEmulEnc),
0,
- (GInstanceInitFunc) gst_emulenc_init,
+ (GInstanceInitFunc) gst_maruenc_init,
};
GType type;
type = g_type_from_name (type_name);
if (!type) {
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
- g_type_set_qdata (type, GST_EMULENC_PARAMS_QDATA, (gpointer) codec);
+ g_type_set_qdata (type, GST_MARUENC_PARAMS_QDATA, (gpointer) codec);
}
if (!gst_element_register (plugin, type_name, rank, type)) {
*
*/
-#include "gstemulcommon.h"
-#include "gstemulapi.h"
-#include "gstemulapi2.h"
-#include "gstemuldev.h"
+#include "gstmaru.h"
+#include "gstmaruinterface.h"
+#include "gstmarumem.h"
+#include "gstmarudevice.h"
extern int device_fd;
extern gpointer device_mem;
uint32_t mem_offset;
} CodecHeader;
+#define SMALL_BUFFER (256 * 1024)
+#define MEDIUM_BUFFER (2 * 1024 * 1024)
+#define LARGE_BUFFER (4 * 1024 * 1024)
+
+#define CODEC_META_DATA_SIZE 256
+
static int
_codec_header (int32_t api_index, uint32_t mem_offset, uint8_t *device_buf)
{
}
}
-#define SMALL_BUFFER (256 * 1024)
-#define MEDIUM_BUFFER (2 * 1024 * 1024)
-#define LARGE_BUFFER (4 * 1024 * 1024)
-
static struct mem_info
secure_device_mem (guint buf_size)
{
CODEC_LOG (DEBUG, "enter: %s\n", __func__);
if (buf_size < SMALL_BUFFER) {
- cmd = CODEC_CMD_S_SECURE_BUFFER;
+ cmd = CODEC_CMD_SECURE_SMALL_BUFFER;
CODEC_LOG (DEBUG, "small buffer size\n");
} else if (buf_size < MEDIUM_BUFFER) {
// HD Video(2MB)
- cmd = CODEC_CMD_M_SECURE_BUFFER;
+ cmd = CODEC_CMD_SECURE_MEDIUM_BUFFER;
CODEC_LOG (DEBUG, "HD buffer size\n");
} else {
// FULL HD Video(4MB)
- cmd = CODEC_CMD_L_SECURE_BUFFER;
+ cmd = CODEC_CMD_SECURE_LARGE_BUFFER;
CODEC_LOG (DEBUG, "FULL HD buffer size\n");
}
CODEC_LOG (DEBUG, "enter: %s\n", __func__);
CODEC_LOG (DEBUG, "release device_mem start: %p, offset: 0x%x\n", start, offset);
- ret = ioctl (device_fd, CODEC_CMD_RELEASE_MEMORY, &offset);
+ ret = ioctl (device_fd, CODEC_CMD_RELEASE_BUFFER, &offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed to release buffer\n");
}
GST_ERROR ("failed to get context index\n");
return -1;
}
- CODEC_LOG (INFO, "get context index: %d\n", ctx->index);
+ CODEC_LOG (DEBUG, "get context index: %d\n", ctx->index);
meta_offset = (ctx->index - 1) * CODEC_META_DATA_SIZE;
CODEC_LOG (DEBUG,
_codec_init_meta_from (ctx, codec->media_type, mmapbuf + meta_offset + size);
ctx->codec= codec;
- CODEC_LOG (DEBUG, "leave: %s, opened: %d\n", __func__, opened);
+ CODEC_LOG (DEBUG, "opened: %d\n", opened);
+
+ CODEC_LOG (DEBUG, "leave: %s\n", __func__);
return opened;
}
return -1;
}
- ret = ioctl (fd, CODEC_CMD_S_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR,
"decode_audio. failed to get available memory to write inbuf\n");
return -1;
}
-// CODEC_LOG (INFO, "write, decode_video. mem_offset = 0x%x\n", mem_offset);
+ CODEC_LOG (DEBUG, "decode_video. mem_offset = 0x%x\n", mem_offset);
meta_offset = (ctx->index - 1) * CODEC_META_DATA_SIZE;
- CODEC_LOG (DEBUG, "decode_video. meta mem_offset = 0x%x\n", meta_offset);
+ CODEC_LOG (DEBUG, "decode_video. meta_offset = 0x%x\n", meta_offset);
// size = _codec_header (CODEC_DECODE_VIDEO, mem_offset, mmapbuf + meta_offset);
size = 8;
uint32_t mem_offset = 0;
CODEC_LOG (DEBUG, "require to use medium size of memory\n");
- ret = ioctl (fd, CODEC_CMD_REQ_FROM_MEDIUM_MEMORY, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_GET_DATA_FROM_MEDIUM_BUFFER, &mem_offset);
if (ret < 0) {
return;
}
memcpy (pict, mmapbuf + mem_offset, pict_size);
- ret = ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed release used memory\n");
}
uint32_t mem_offset = 0;
CODEC_LOG (DEBUG, "require to use large size of memory\n");
- ret = ioctl (fd, CODEC_CMD_REQ_FROM_LARGE_MEMORY, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_GET_DATA_FROM_LARGE_BUFFER, &mem_offset);
if (ret < 0) {
return;
}
memcpy (pict, mmapbuf + mem_offset, pict_size);
- ret = ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed release used memory\n");
}
return -1;
}
- ret = ioctl (fd, CODEC_CMD_S_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR,
"decode_audio. failed to get available memory to write inbuf\n");
return -1;
}
-// CODEC_LOG (INFO, "decode audio1 mem_offset = 0x%x\n", mem_offset);
+ CODEC_LOG (DEBUG, "decode audio. mem_offset = 0x%x\n", mem_offset);
meta_offset = (ctx->index - 1) * CODEC_META_DATA_SIZE;
CODEC_LOG (DEBUG, "decode_audio. meta_offset = 0x%x\n", meta_offset);
dev->mem_info.offset = mem_offset;
_codec_write_to_qemu (ctx->index, CODEC_DECODE_AUDIO, mem_offset, fd);
- ret = ioctl (fd, CODEC_CMD_REQ_FROM_SMALL_MEMORY, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_GET_DATA_FROM_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
return -1;
}
-// CODEC_LOG (INFO, "decode audio2. mem_offset = 0x%x\n", mem_offset);
len =
_codec_decode_audio_meta_from (&ctx->audio, have_data, mmapbuf + meta_offset + size);
}
memset(mmapbuf + mem_offset, 0x00, sizeof(len));
- ret = ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed release used memory\n");
}
if (in_size < SMALL_BUFFER) {
CODEC_LOG (DEBUG, "use small size of buffer\n");
- ret = ioctl (fd, CODEC_CMD_S_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed to small size of buffer.\n");
return -1;
} else if (in_size < MEDIUM_BUFFER) {
CODEC_LOG (DEBUG, "use medium size of buffer\n");
- ret = ioctl (fd, CODEC_CMD_M_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_MEDIUM_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed to small size of buffer.\n");
return -1;
}
} else {
CODEC_LOG (DEBUG, "use large size of buffer\n");
- ret = ioctl (fd, CODEC_CMD_L_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_LARGE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed to large size of buffer.\n");
return -1;
_codec_write_to_qemu (ctx->index, CODEC_ENCODE_VIDEO, mem_offset, fd);
#ifndef DIRECT_BUFFER
- ret = ioctl (fd, CODEC_CMD_REQ_FROM_SMALL_MEMORY, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_GET_DATA_FROM_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
return -1;
}
#endif
#if 1
- ret = ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
CODEC_LOG (ERR, "failed release used memory\n");
}
return -1;
}
- ret = ioctl (fd, CODEC_CMD_S_SECURE_BUFFER, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_SECURE_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
return -1;
}
dev->mem_info.offset = mem_offset;
_codec_write_to_qemu (ctx->index, CODEC_ENCODE_AUDIO, mem_offset, fd);
- ret = ioctl (fd, CODEC_CMD_REQ_FROM_SMALL_MEMORY, &mem_offset);
+ ret = ioctl (fd, CODEC_CMD_GET_DATA_FROM_SMALL_BUFFER, &mem_offset);
if (ret < 0) {
return -1;
}
len = _codec_encode_audio_outbuf (out_buf, mmapbuf + mem_offset);
memset(mmapbuf + mem_offset, 0x00, sizeof(len));
- ret = ioctl(fd, CODEC_CMD_RELEASE_MEMORY, &mem_offset);
+ ret = ioctl(fd, CODEC_CMD_RELEASE_BUFFER, &mem_offset);
if (ret < 0) {
return -1;
}
*
*/
-#ifndef __GST_EMUL_API_H__
-#define __GST_EMUL_API_H__
+#ifndef __GST_MARU_INTERFACE_H__
+#define __GST_MARU_INTERFACE_H__
-#include "gstemulcommon.h"
+#include "gstmaru.h"
int
codec_init (CodecContext *ctx, CodecElement *codec, CodecDevice *dev);
*
*/
-#include "gstemulapi2.h"
+#include "gstmarumem.h"
/*
* codec data such as codec name, longname, media type and etc.
size = _codec_info_data (codec, device_buf);
+ if (codec) {
+ CODEC_LOG (INFO, "name: %s, media type: %s\n",
+ codec->name, codec->media_type ? "AUDIO" : "VIDEO");
+ }
+
if (codec->media_type == AVMEDIA_TYPE_AUDIO) {
- CODEC_LOG (INFO,
- "before init. audio sample_fmt: %d\n", ctx->audio.sample_fmt);
+ CODEC_LOG (DEBUG,
+ "before init. audio sample_fmt: %d\n", ctx->audio.sample_fmt);
+ CODEC_LOG (DEBUG,
+ "before init. audio block_align: %d\n", ctx->audio.block_align);
}
CODEC_LOG (DEBUG, "init. write data to qemu, size: %d\n", size);
_codec_encode_audio_meta_to (int max_size, int in_size, uint8_t *device_buf)
{
int size = 0;
-
+
CODEC_LOG (DEBUG, "encode_audio. write data to device.\n");
memcpy (device_buf, &in_size, sizeof(in_size));
*
*/
-#include "gstemulcommon.h"
+#ifndef __GST_MARU_MEM_H__
+#define __GST_MARU_MEM_H__
+
+#include "gstmaru.h"
void _codec_init_meta_to (CodecContext *ctx, CodecElement *codec, uint8_t *device_buf);
void _codec_encode_video_inbuf (uint8_t *in_buf, int in_size,
uint8_t *device_buf);
-// int _codec_encode_video_outbuf (uint8_t *out_buf, uint8_t *device_buf);
void _codec_encode_video_outbuf (int len, uint8_t *outbuf, uint8_t *device_buf);
void _codec_encode_audio_meta_to (int max_size, int in_size, uint8_t *device_buf);
void _codec_encode_audio_inbuf (uint8_t *in_buf, int in_size, uint8_t *device_buf);
int _codec_encode_audio_outbuf (uint8_t *out_buf, uint8_t *device_buf);
+
+#endif
*
*/
-#include "gstemulutils.h"
+#include "gstmaruutils.h"
#include <gst/audio/multichannel.h>
#include <gst/pbutils/codec-utils.h>
gint
-gst_emul_smpfmt_depth (int smp_fmt)
+gst_maru_smpfmt_depth (int smp_fmt)
{
gint depth = -1;
}
GstCaps*
-gst_emul_codectype_to_video_caps (CodecContext *ctx, const char *name,
+gst_maru_codectype_to_video_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec)
{
GstCaps *caps;
ctx, name, encode, ctx->video.pix_fmt);
if (ctx) {
- caps = gst_emul_pixfmt_to_caps (ctx->video.pix_fmt, ctx, name);
+ caps = gst_maru_pixfmt_to_caps (ctx->video.pix_fmt, ctx, name);
} else {
GstCaps *temp;
enum PixelFormat i;
- CodecContext ctx = { 0 };
+ CodecContext ctx;
caps = gst_caps_new_empty ();
for (i = 0; i <= PIX_FMT_NB; i++) {
- temp = gst_emul_pixfmt_to_caps (i, encode ? &ctx : NULL, name);
+ temp = gst_maru_pixfmt_to_caps (i, encode ? &ctx : NULL, name);
if (temp != NULL) {
gst_caps_append (caps, temp);
}
}
GstCaps *
-gst_emul_codectype_to_audio_caps (CodecContext *ctx, const char *name,
+gst_maru_codectype_to_audio_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec)
{
GstCaps *caps = NULL;
ctx, name, encode, codec);
if (ctx) {
- caps = gst_emul_smpfmt_to_caps (ctx->audio.sample_fmt, ctx, name);
+ caps = gst_maru_smpfmt_to_caps (ctx->audio.sample_fmt, ctx, name);
#if 1
} else if (codec && codec->sample_fmts[0] != -1){
GstCaps *temp;
caps = gst_caps_new_empty ();
for (i = 0; codec->sample_fmts[i] != -1; i++) {
temp =
- gst_emul_smpfmt_to_caps (codec->sample_fmts[i], ctx, name);
+ gst_maru_smpfmt_to_caps (codec->sample_fmts[i], ctx, name);
if (temp != NULL) {
gst_caps_append (caps, temp);
}
ctx.audio.channels = -1;
caps = gst_caps_new_empty ();
for (i = 0; i <= SAMPLE_FMT_DBL; i++) {
- temp = gst_emul_smpfmt_to_caps (i, encode ? &ctx : NULL, name);
+ temp = gst_maru_smpfmt_to_caps (i, encode ? &ctx : NULL, name);
if (temp != NULL) {
gst_caps_append (caps, temp);
}
}
GstCaps*
-gst_emul_codectype_to_caps (int media_type, CodecContext *ctx,
+gst_maru_codectype_to_caps (int media_type, CodecContext *ctx,
const char *name, gboolean encode)
{
GstCaps *caps;
switch (media_type) {
case AVMEDIA_TYPE_VIDEO:
caps =
- gst_emul_codectype_to_video_caps (ctx, name, encode, NULL);
+ gst_maru_codectype_to_video_caps (ctx, name, encode, NULL);
break;
case AVMEDIA_TYPE_AUDIO:
caps =
- gst_emul_codectype_to_audio_caps (ctx, name, encode, NULL);
+ gst_maru_codectype_to_audio_caps (ctx, name, encode, NULL);
break;
default:
caps = NULL;
}
void
-gst_emul_caps_to_pixfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
+gst_maru_caps_to_pixfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
{
GstStructure *str;
const GValue *fps;
}
void
-gst_emul_caps_to_smpfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
+gst_maru_caps_to_smpfmt (const GstCaps *caps, CodecContext *ctx, gboolean raw)
{
GstStructure *str;
gint depth = 0, width = 0, endianness = 0;
}
void
-gst_emul_caps_with_codecname (const char *name, int media_type,
+gst_maru_caps_with_codecname (const char *name, int media_type,
const GstCaps *caps, CodecContext *ctx)
{
GstStructure *structure;
switch (media_type) {
case AVMEDIA_TYPE_VIDEO:
- gst_emul_caps_to_pixfmt (caps, ctx, FALSE);
+ gst_maru_caps_to_pixfmt (caps, ctx, FALSE);
// get_palette
break;
case AVMEDIA_TYPE_AUDIO:
- gst_emul_caps_to_smpfmt (caps, ctx, FALSE);
+ gst_maru_caps_to_smpfmt (caps, ctx, FALSE);
break;
default:
break;
}
+}
+
+void
+gst_maru_caps_to_codecname (const GstCaps *caps, gchar *codec_name, CodecContext *context)
+{
+ const gchar *mimetype;
+ const GstStructure *str;
+ gint wmvversion = 0;
+
+ str = gst_caps_get_structure (caps, 0);
+
+ mimetype = gst_structure_get_name (str);
+ if (!strcmp (mimetype, "video/x-wmv")) {
+ gint wmvversion = 0;
+
+ if (gst_structure_get_int (str, "wmvversion", &wmvversion)) {
+ switch (wmvversion) {
+ case 1:
+ g_strlcpy(codec_name, "wmv1", 32);
+ break;
+ case 2:
+ g_strlcpy(codec_name, "wmv2", 32);
+ break;
+ case 3:
+ {
+ guint32 fourcc;
+
+ g_strlcpy(codec_name, "wmv3", 32);
+
+ if (gst_structure_get_fourcc (str, "format", &fourcc)) {
+ if ((fourcc == GST_MAKE_FOURCC ('W', 'V', 'C', '1')) ||
+ (fourcc == GST_MAKE_FOURCC ('W', 'M', 'V', 'A'))) {
+ g_strlcpy(codec_name, "vc1", 32);
+ }
+ }
+ }
+ break;
+ }
+ }
+ }
+
+#if 0 // check other types if it needs.
+ } else if () {
+ }
+#endif
+
+#if 0
+ if (context != NULL) {
+ if (video == TRUE) {
+ context->codec_type = CODEC_TYPE_VIDEO;
+ } else if (audio == TRUE) {
+ context->codec_type = CODEC_TYPE_AUDIO;
+ } else {
+ context->codec_type = CODEC_TYPE_UNKNOWN;
+ }
+ context->codec_id = id;
+ gst_maru_caps_with_codecname (name, context->codec_type, caps, context);
+ }
+#endif
+
+ if (codec_name != NULL) {
+ GST_DEBUG ("The %s belongs to the caps %" GST_PTR_FORMAT, codec_name, caps);
+ } else {
+ GST_WARNING ("Couldn't figure out the name for caps %" GST_PTR_FORMAT, caps);
+ }
}
void
-gst_emul_caps_with_codectype (int media_type, const GstCaps *caps, CodecContext *ctx)
+gst_maru_caps_with_codectype (int media_type, const GstCaps *caps, CodecContext *ctx)
{
if (ctx == NULL) {
return;
switch (media_type) {
case AVMEDIA_TYPE_VIDEO:
- gst_emul_caps_to_pixfmt (caps, ctx, TRUE);
+ gst_maru_caps_to_pixfmt (caps, ctx, TRUE);
break;
case AVMEDIA_TYPE_AUDIO:
- gst_emul_caps_to_smpfmt (caps, ctx, TRUE);
+ gst_maru_caps_to_smpfmt (caps, ctx, TRUE);
break;
default:
break;
}
GstCaps *
-gst_emul_video_caps_new (CodecContext *ctx, const char *name,
+gst_maru_video_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...)
{
GstStructure *structure = NULL;
}
GstCaps *
-gst_emul_audio_caps_new (CodecContext *ctx, const char *name,
+gst_maru_audio_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...)
{
GstStructure *structure = NULL;
}
GstCaps *
-gst_emul_pixfmt_to_caps (enum PixelFormat pix_fmt, CodecContext *ctx, const char *name)
+gst_maru_pixfmt_to_caps (enum PixelFormat pix_fmt, CodecContext *ctx, const char *name)
{
GstCaps *caps = NULL;
if (bpp != 0) {
if (r_mask != 0) {
if (a_mask) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-raw-rgb",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"red_mask", G_TYPE_INT, r_mask,
"alpha_mask", G_TYPE_INT, a_mask,
"endianness", G_TYPE_INT, endianness, NULL);
} else {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-raw-rgb",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"red_mask", G_TYPE_INT, r_mask,
"endianness", G_TYPE_INT, endianness, NULL);
}
} else {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-raw-rgb",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"endianness", G_TYPE_INT, endianness, NULL);
}
}
} else if (fmt) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-raw-yuv",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fmt, NULL);
}
}
}
GstCaps *
-gst_emul_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name)
+gst_maru_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name)
{
GstCaps *caps = NULL;
if (bpp) {
if (integer) {
- caps = gst_emul_audio_caps_new (ctx, name, "audio/x-raw-int",
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/x-raw-int",
"signed", G_TYPE_BOOLEAN, signedness,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"width", G_TYPE_INT, bpp, "depth", G_TYPE_INT, bpp, NULL);
} else {
- caps = gst_emul_audio_caps_new (ctx, name, "audio/x-raw-float",
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/x-raw-float",
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"width", G_TYPE_INT, bpp, NULL);
}
}
GstCaps *
-gst_emul_codecname_to_caps (const char *name, CodecContext *ctx, gboolean encode)
+gst_maru_codecname_to_caps (const char *name, CodecContext *ctx, gboolean encode)
{
GstCaps *caps = NULL;
GST_LOG ("codec: %s, context: %p, encode: %d", name, ctx, encode);
if (strcmp (name, "mpegvideo") == 0) {
- caps = gst_emul_video_caps_new (ctx, name, "video/mpeg",
+ caps = gst_maru_video_caps_new (ctx, name, "video/mpeg",
"mpegversion", G_TYPE_INT, 1,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
} else if (strcmp (name, "h263") == 0) {
if (encode) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-h263",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-h263",
"variant", G_TYPE_STRING, "itu", NULL);
} else {
- caps = gst_emul_video_caps_new (ctx, "none", "video/x-h263",
+ caps = gst_maru_video_caps_new (ctx, "none", "video/x-h263",
"variant", G_TYPE_STRING, "itu", NULL);
}
} else if (strcmp (name, "h263p") == 0) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-h263",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-h263",
"variant", G_TYPE_STRING, "itu",
"h263version", G_TYPE_STRING, "h263p", NULL);
#if 0
// TODO
switch (ctx->codec_tag) {
case GST_MAKE_FOURCC ('D', 'I', 'V', 'X'):
- caps = gst_emul_video_caps_new (ctx, name, "video/x-divx",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-divx",
"divxversion", G_TYPE_INT, 5, NULL);
break;
case GST_MAKE_FOURCC ('m', 'p', '4', 'v'):
default:
- caps = gst_emul_video_caps_new (ctx, name, "video/mpeg",
+ caps = gst_maru_video_caps_new (ctx, name, "video/mpeg",
"systemstream", G_TYPE_BOOLEAN, FALSE,
"mpegversion", G_TYPE_INT, 4, NULL);
break;
}
} else {
- caps = gst_emul_video_caps_new (ctx, name, "video/mpeg",
+ caps = gst_maru_video_caps_new (ctx, name, "video/mpeg",
"mpegversion", G_TYPE_INT, 4,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
if (encode) {
- caps = gst_emul_video_caps_new (ctx, name, "video/mpeg",
+ caps = gst_maru_video_caps_new (ctx, name, "video/mpeg",
"mpegversion", G_TYPE_INT, 4,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
} else {
- gst_caps_append (caps, gst_emul_video_caps_new (ctx, name,
+ gst_caps_append (caps, gst_maru_video_caps_new (ctx, name,
"video/x-divx", "divxversion", GST_TYPE_INT_RANGE, 4, 5, NULL));
- gst_caps_append (caps, gst_emul_video_caps_new (ctx, name,
+ gst_caps_append (caps, gst_maru_video_caps_new (ctx, name,
"video/x-xvid", NULL));
- gst_caps_append (caps, gst_emul_video_caps_new (ctx, name,
+ gst_caps_append (caps, gst_maru_video_caps_new (ctx, name,
"video/x-3ivx", NULL));
}
}
} else if (strcmp (name, "h264") == 0) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-h264", NULL);
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-h264", NULL);
} else if (g_str_has_prefix(name, "msmpeg4")) {
// msmpeg4v1,m msmpeg4v2, msmpeg4
gint version;
version = 43;
}
- caps = gst_emul_video_caps_new (ctx, name, "video/x-msmpeg",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-msmpeg",
"msmpegversion", G_TYPE_INT, version, NULL);
if (!encode && !strcmp (name, "msmpeg4")) {
- gst_caps_append (caps, gst_emul_video_caps_new (ctx, name,
+ gst_caps_append (caps, gst_maru_video_caps_new (ctx, name,
"video/x-divx", "divxversion", G_TYPE_INT, 3, NULL));
}
} else if (strcmp (name, "wmv3") == 0) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-wmv",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-wmv",
"wmvversion", G_TYPE_INT, 3, NULL);
} else if (strcmp (name, "vc1") == 0) {
- caps = gst_emul_video_caps_new (ctx, name, "video/x-wmv",
+ caps = gst_maru_video_caps_new (ctx, name, "video/x-wmv",
"wmvversion", G_TYPE_INT, 3, "format", GST_TYPE_FOURCC,
GST_MAKE_FOURCC ('W', 'V', 'C', '1'), NULL);
#if 0
mime_type = g_strdup ("video/x-vp8");
#endif
} else if (strcmp (name, "aac") == 0) {
- caps = gst_emul_audio_caps_new (ctx, name, "audio/mpeg", NULL);
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/mpeg", NULL);
if (!encode) {
GValue arr = { 0, };
GValue item = { 0, };
}
}
} else if (strcmp (name, "ac3") == 0) {
- caps = gst_emul_audio_caps_new (ctx, name, "audio/x-ac3", NULL);
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/x-ac3", NULL);
} else if (strcmp (name, "mp3") == 0) {
if (encode) {
- caps = gst_emul_audio_caps_new (ctx, name, "audio/mpeg",
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/mpeg",
"mpegversion", G_TYPE_INT, 1,
"layer", GST_TYPE_INT_RANGE, 1, 3, NULL);
} else {
gchar *mime_type;
mime_type = g_strdup_printf ("audio/x-gst_ff-%s", name);
- caps = gst_emul_audio_caps_new (ctx, name, mime_type, NULL);
+ caps = gst_maru_audio_caps_new (ctx, name, mime_type, NULL);
if (mime_type) {
g_free(mime_type);
if (strcmp (name, "wmav2") == 0) {
version = 2;
}
- caps = gst_emul_audio_caps_new (ctx, name, "audio/x-wma", "wmaversion",
+ caps = gst_maru_audio_caps_new (ctx, name, "audio/x-wma", "wmaversion",
G_TYPE_INT, version, "block_align", GST_TYPE_INT_RANGE, 0, G_MAXINT,
"bitrate", GST_TYPE_INT_RANGE, 0, G_MAXINT, NULL);
} else {
static PixFmtInfo pix_fmt_info[PIX_FMT_NB];
void
-gst_emul_init_pix_fmt_info (void)
+gst_maru_init_pix_fmt_info (void)
{
pix_fmt_info[PIX_FMT_YUV420P].x_chroma_shift = 1,
pix_fmt_info[PIX_FMT_YUV420P].y_chroma_shift = 1;
}
int
-gst_emul_avpicture_size (int pix_fmt, int width, int height)
+gst_maru_avpicture_size (int pix_fmt, int width, int height)
{
int size, w2, h2, size2;
int stride, stride2;
}
int
-gst_emul_align_size (int buf_size)
+gst_maru_align_size (int buf_size)
{
int i, align_size;
*
*/
-#ifndef __GST_EMUL_UTIL_H__
-#define __GST_EMUL_UTIL_H__
+#ifndef __GST_MARU_UTIL_H__
+#define __GST_MARU_UTIL_H__
-#include "gstemulcommon.h"
+#include "gstmaru.h"
// FFmpeg
#include "audioconvert.h"
#define CH_LAYOUT_7POINT1_WIDE AV_CH_LAYOUT_7POINT1_WIDE
#define CH_LAYOUT_STEREO_DOWNMIX AV_CH_LAYOUT_STEREO_DOWNMIX
-GstCaps *gst_emul_codectype_to_video_caps (CodecContext *ctx, const char *name,
+GstCaps *gst_maru_codectype_to_video_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec);
-GstCaps *gst_emul_codectype_to_audio_caps (CodecContext *ctx, const char *name,
+GstCaps *gst_maru_codectype_to_audio_caps (CodecContext *ctx, const char *name,
gboolean encode, CodecElement *codec);
-GstCaps *gst_emul_codectype_to_caps (int media_type, CodecContext *ctx,
+GstCaps *gst_maru_codectype_to_caps (int media_type, CodecContext *ctx,
const char *name, gboolean encode);
-void gst_emul_caps_with_codecname (const char *name, int media_type,
+void gst_maru_caps_with_codecname (const char *name, int media_type,
const GstCaps *caps, CodecContext *ctx);
-void gst_emul_caps_with_codectype (int media_type, const GstCaps *caps, CodecContext *ctx);
+void gst_maru_caps_with_codectype (int media_type, const GstCaps *caps, CodecContext *ctx);
-GstCaps *gst_emul_video_caps_new (CodecContext *ctx, const char *name,
+GstCaps *gst_maru_video_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...);
-GstCaps *gst_emul_audio_caps_new (CodecContext *ctx, const char *name,
+GstCaps *gst_maru_audio_caps_new (CodecContext *ctx, const char *name,
const char *mimetype, const char *fieldname, ...);
-GstCaps *gst_emul_pixfmt_to_caps (enum PixelFormat pix_fmt, CodecContext *ctx, const char *name);
+GstCaps *gst_maru_pixfmt_to_caps (enum PixelFormat pix_fmt, CodecContext *ctx, const char *name);
-GstCaps *gst_emul_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name);
+GstCaps *gst_maru_smpfmt_to_caps (int8_t sample_fmt, CodecContext *ctx, const char *name);
-GstCaps *gst_emul_codecname_to_caps (const char *name, CodecContext *ctx, gboolean encode);
+GstCaps *gst_maru_codecname_to_caps (const char *name, CodecContext *ctx, gboolean encode);
-void gst_emul_init_pix_fmt_info (void);
+void gst_maru_init_pix_fmt_info (void);
-int gst_emul_avpicture_size (int pix_fmt, int width, int height);
+int gst_maru_avpicture_size (int pix_fmt, int width, int height);
-int gst_emul_align_size (int buf_size);
+int gst_maru_align_size (int buf_size);
-gint gst_emul_smpfmt_depth (int smp_fmt);
+gint gst_maru_smpfmt_depth (int smp_fmt);
#endif