#include <string.h>
#include "mpegutil.h"
+#include "gstvdputils.h"
#include "gstvdpmpegdec.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
enum
{
- PROP_0,
- PROP_DISPLAY
+ PROP_0
};
/* the capabilities of the inputs and outputs.
return gst_pad_push (mpeg_dec->src, GST_BUFFER (buffer));
}
+static GstFlowReturn
+gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, GstBuffer ** outbuf)
+{
+ GstFlowReturn ret;
+
+ ret = gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
+ GST_PAD_CAPS (mpeg_dec->src), outbuf);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (!mpeg_dec->device) {
+ GstVdpDevice *device;
+ VdpStatus status;
+
+ GST_WARNING ("ASDASD");
+ device = mpeg_dec->device =
+ g_object_ref (GST_VDP_VIDEO_BUFFER (*outbuf)->device);
+
+ status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
+ mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
+ if (status != VDP_STATUS_OK) {
+ GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
+ ("Could not create vdpau decoder"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+ return ret;
+}
+
static GstFlowReturn
gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
GstClockTime timestamp, gint64 size)
info->backward_reference = VDP_INVALID_HANDLE;
}
- if (gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
- GST_PAD_CAPS (mpeg_dec->src), &outbuf) != GST_FLOW_OK) {
+ if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) {
gst_adapter_clear (mpeg_dec->adapter);
return GST_FLOW_ERROR;
}
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
- if (mpeg_dec->decoder == VDP_INVALID_HANDLE) {
- status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
- mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
- if (status != VDP_STATUS_OK) {
- GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
- ("Could not create vdpau decoder"),
- ("Error returned from vdpau was: %s",
- device->vdp_get_error_string (status)));
- gst_buffer_unref (outbuf);
- return GST_FLOW_ERROR;
- }
- mpeg_dec->device = g_object_ref (device);
- }
if (info->forward_reference != VDP_INVALID_HANDLE &&
info->picture_coding_type != I_FRAME)
gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
- g_object_class_install_property (gobject_class, PROP_DISPLAY,
- g_param_spec_string ("display", "Display", "X Display name",
- NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
-
gstelement_class->change_state = gst_vdp_mpeg_dec_change_state;
}
}
GstCaps *
-gst_vdp_video_to_yuv_caps (GstCaps * caps)
+gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device)
{
GstCaps *new_caps, *allowed_caps, *result;
gint i;
GstStructure *structure;
- const GValue *value;
- GstVdpDevice *device = NULL;
new_caps = gst_caps_new_empty ();
gst_structure_set_name (new_struct, "video/x-raw-yuv");
gst_structure_remove_field (new_struct, "chroma-type");
- gst_structure_remove_field (new_struct, "device");
gst_structure_set (new_struct, "format", GST_TYPE_FOURCC,
GPOINTER_TO_INT (iter->data), NULL);
}
structure = gst_caps_get_structure (caps, 0);
- value = gst_structure_get_value (structure, "device");
- if (value)
- device = g_value_get_object (value);
-
if (device) {
allowed_caps = gst_vdp_get_allowed_yuv_caps (device);
result = gst_caps_intersect (new_caps, allowed_caps);
NULL);
gst_structure_set_name (structure, "video/x-vdpau-video");
- if (device)
- gst_structure_set (structure, "device", G_TYPE_OBJECT, device, NULL);
}
if (device) {
},
};
-GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps);
-GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
+GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps, GstVdpDevice *device);
+GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
#endif /* _GST_VDP_UTILS_H_ */
\ No newline at end of file
GstCaps *result;
if (direction == GST_PAD_SINK)
- result = gst_vdp_video_to_yuv_caps (caps);
+ result = gst_vdp_video_to_yuv_caps (caps, video_yuv->device);
else if (direction == GST_PAD_SRC)
result = gst_vdp_yuv_to_video_caps (caps, video_yuv->device);
gst_vdp_video_yuv_class_init (GstVdpVideoYUVClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
VdpVideoSurface surface;
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
+ if (!yuv_video->device)
+ yuv_video->device = g_object_ref (device);
+
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
switch (yuv_video->format) {
gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
GstCaps *result;
if (direction == GST_PAD_SINK) {
- result = gst_vdp_yuv_to_video_caps (caps, NULL);
+ result = gst_vdp_yuv_to_video_caps (caps, yuv_video->device);
} else if (direction == GST_PAD_SRC) {
- result = gst_vdp_video_to_yuv_caps (caps);
+ result = gst_vdp_video_to_yuv_caps (caps, yuv_video->device);
}
GST_LOG ("transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, caps, result);
return result;
}
+static gboolean
+gst_vdp_yuv_video_start (GstBaseTransform * trans)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
+
+ yuv_video->device = NULL;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_yuv_video_stop (GstBaseTransform * trans)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
+
+ if (yuv_video->device)
+ g_object_unref (yuv_video->device);
+
+ return TRUE;
+}
+
/* GObject vmethod implementations */
static void
gobject_class = (GObjectClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
+ trans_class->start = gst_vdp_yuv_video_start;
+ trans_class->stop = gst_vdp_yuv_video_stop;
trans_class->transform_caps = gst_vdp_yuv_video_transform_caps;
trans_class->transform_size = gst_vdp_yuv_video_transform_size;
trans_class->set_caps = gst_vdp_yuv_video_set_caps;
struct _GstVdpYUVVideo {
GstBaseTransform trans;
+ GstVdpDevice *device;
+
guint32 format;
gint width, height;
};