--- /dev/null
+/* GStreamer\r
+ * Copyright (C) 2008 Michael Smith <msmith@songbirdnest.com>\r
+ *\r
+ * This library is free software; you can redistribute it and/or\r
+ * modify it under the terms of the GNU Library General Public\r
+ * License as published by the Free Software Foundation; either\r
+ * version 2 of the License, or (at your option) any later version.\r
+ *\r
+ * This library is distributed in the hope that it will be useful,\r
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\r
+ * Library General Public License for more details.\r
+ *\r
+ * You should have received a copy of the GNU Library General Public\r
+ * License along with this library; if not, write to the\r
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,\r
+ * Boston, MA 02111-1307, USA.\r
+ */\r
+\r
+#include "dshowvideofakesrc.h"\r
+\r
+// {A0A5CF33-BD0C-4158-9A56-3011DEE3AF6B}\r
+const GUID CLSID_VideoFakeSrc = \r
+{ 0xa0a5cf33, 0xbd0c, 0x4158, { 0x9a, 0x56, 0x30, 0x11, 0xde, 0xe3, 0xaf, 0x6b } };\r
+\r
+/* output pin*/\r
+VideoFakeSrcPin::VideoFakeSrcPin (CBaseFilter *pFilter, CCritSec *sec, HRESULT *hres):\r
+ CBaseOutputPin("VideoFakeSrcPin", pFilter, sec, hres, L"output")\r
+{\r
+}\r
+\r
+VideoFakeSrcPin::~VideoFakeSrcPin()\r
+{\r
+}\r
+\r
+HRESULT VideoFakeSrcPin::GetMediaType(int iPosition, CMediaType *pMediaType)\r
+{\r
+ GST_DEBUG ("GetMediaType(%d) called", iPosition);\r
+ if(iPosition == 0) {\r
+ *pMediaType = m_MediaType;\r
+ return S_OK;\r
+ }\r
+ \r
+ return VFW_S_NO_MORE_ITEMS;\r
+}\r
+\r
+/* This seems to be called to notify us of the actual media type being used,\r
+ * even though SetMediaType isn't called. How bizarre! */\r
+HRESULT VideoFakeSrcPin::CheckMediaType(const CMediaType *pmt)\r
+{\r
+ GST_DEBUG ("CheckMediaType called: %p", pmt);\r
+\r
+ /* The video renderer will request a different stride, which we must accept.\r
+ * So, we accept arbitrary strides (and do memcpy() to convert if needed),\r
+ * and require the rest of the media type to match\r
+ */\r
+ if (IsEqualGUID(pmt->majortype,m_MediaType.majortype) &&\r
+ IsEqualGUID(pmt->subtype,m_MediaType.subtype) &&\r
+ IsEqualGUID(pmt->formattype,m_MediaType.formattype) &&\r
+ pmt->cbFormat >= m_MediaType.cbFormat)\r
+ {\r
+ if (IsEqualGUID(pmt->formattype, FORMAT_VideoInfo)) {\r
+ VIDEOINFOHEADER *newvh = (VIDEOINFOHEADER *)pmt->pbFormat;\r
+ VIDEOINFOHEADER *curvh = (VIDEOINFOHEADER *)m_MediaType.pbFormat;\r
+\r
+ if ((memcmp ((void *)&newvh->rcSource, (void *)&curvh->rcSource, sizeof (RECT)) == 0) &&\r
+ (memcmp ((void *)&newvh->rcTarget, (void *)&curvh->rcTarget, sizeof (RECT)) == 0) &&\r
+ (newvh->bmiHeader.biCompression == curvh->bmiHeader.biCompression) &&\r
+ (newvh->bmiHeader.biHeight == curvh->bmiHeader.biHeight) &&\r
+ (newvh->bmiHeader.biWidth >= curvh->bmiHeader.biWidth))\r
+ {\r
+ GST_DEBUG ("CheckMediaType has same media type, width %d (%d image)", newvh->bmiHeader.biWidth, curvh->bmiHeader.biWidth);\r
+ \r
+ /* OK, compatible! */\r
+ return S_OK;\r
+ }\r
+ else {\r
+ GST_WARNING ("Looked similar, but aren't...");\r
+ }\r
+ }\r
+ \r
+ }\r
+ GST_WARNING ("Different media types, FAILING!");\r
+ return S_FALSE;\r
+}\r
+\r
+HRESULT VideoFakeSrcPin::DecideBufferSize (IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest)\r
+{\r
+ ALLOCATOR_PROPERTIES properties;\r
+ GST_DEBUG ("Required allocator properties: %d, %d, %d, %d", \r
+ ppropInputRequest->cbAlign, ppropInputRequest->cbBuffer, \r
+ ppropInputRequest->cbPrefix, ppropInputRequest->cBuffers);\r
+\r
+ ppropInputRequest->cbBuffer = m_SampleSize;\r
+ ppropInputRequest->cBuffers = 1;\r
+\r
+ /* First set the buffer descriptions we're interested in */\r
+ HRESULT hres = pAlloc->SetProperties(ppropInputRequest, &properties);\r
+ GST_DEBUG ("Actual Allocator properties: %d, %d, %d, %d", \r
+ properties.cbAlign, properties.cbBuffer, \r
+ properties.cbPrefix, properties.cBuffers);\r
+\r
+ /* Then actually allocate the buffers */\r
+ pAlloc->Commit();\r
+\r
+ return S_OK;\r
+}\r
+\r
+STDMETHODIMP\r
+VideoFakeSrcPin::Notify(IBaseFilter * pSender, Quality q)\r
+{\r
+ /* Implementing this usefully is not required, but the base class\r
+ * has an assertion here... */\r
+ /* TODO: Map this to GStreamer QOS events? */\r
+ return E_NOTIMPL;\r
+}\r
+\r
+STDMETHODIMP VideoFakeSrcPin::SetMediaType (AM_MEDIA_TYPE *pmt)\r
+{\r
+ m_MediaType.Set (*pmt);\r
+ m_SampleSize = m_MediaType.GetSampleSize();\r
+\r
+ GST_DEBUG ("SetMediaType called. SampleSize is %d", m_SampleSize);\r
+\r
+ return S_OK;\r
+}\r
+\r
+/* If the destination buffer is a different shape (strides, etc.) from the source\r
+ * buffer, we have to copy. Do that here, for supported video formats.\r
+ *\r
+ * TODO: When possible (when these things DON'T differ), we should buffer-alloc the\r
+ * final output buffer, and not do this copy */\r
+STDMETHODIMP VideoFakeSrcPin::CopyToDestinationBuffer (byte *srcbuf, byte *dstbuf)\r
+{\r
+ VIDEOINFOHEADER *vh = (VIDEOINFOHEADER *)m_MediaType.pbFormat;\r
+ GST_DEBUG ("Rendering a frame");\r
+\r
+ byte *src, *dst;\r
+ int dststride, srcstride, rows;\r
+ guint32 fourcc = vh->bmiHeader.biCompression;\r
+\r
+ /* biHeight is always negative; we don't want that. */\r
+ int height = ABS (vh->bmiHeader.biHeight);\r
+ int width = vh->bmiHeader.biWidth;\r
+\r
+ /* YUY2 is the preferred layout for DirectShow, so we will probably get this\r
+ * most of the time */\r
+ if ((fourcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')) ||\r
+ (fourcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')) ||\r
+ (fourcc == GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'))) \r
+ {\r
+ /* Nice and simple */\r
+ int srcstride = GST_ROUND_UP_4 (vh->rcSource.right * 2);\r
+ int dststride = width * 2;\r
+\r
+ for (int i = 0; i < height; i++) {\r
+ memcpy (dstbuf + dststride * i, srcbuf + srcstride * i, srcstride);\r
+ }\r
+ }\r
+ else if (fourcc == GST_MAKE_FOURCC ('Y', 'V', '1', '2')) {\r
+ for (int component = 0; component < 3; component++) {\r
+ // TODO: Get format properly rather than hard-coding it. Use gst_video_* APIs *?\r
+ if (component == 0) {\r
+ srcstride = GST_ROUND_UP_4 (vh->rcSource.right);\r
+ src = srcbuf;\r
+ }\r
+ else {\r
+ srcstride = GST_ROUND_UP_4 ( GST_ROUND_UP_2 (vh->rcSource.right) / 2);\r
+ if (component == 1)\r
+ src = srcbuf + GST_ROUND_UP_4 (vh->rcSource.right) * GST_ROUND_UP_2 (vh->rcSource.bottom);\r
+ else\r
+ src = srcbuf + GST_ROUND_UP_4 (vh->rcSource.right) * GST_ROUND_UP_2 (vh->rcSource.bottom) +\r
+ srcstride * (GST_ROUND_UP_2 (vh->rcSource.bottom) / 2);\r
+ }\r
+\r
+ /* Is there a better way to do this? This is ICK! */\r
+ if (component == 0) {\r
+ dststride = width;\r
+ dst = dstbuf;\r
+ rows = height;\r
+ } else if (component == 1) {\r
+ dststride = width / 2;\r
+ dst = dstbuf + width * height;\r
+ rows = height/2;\r
+ }\r
+ else {\r
+ dststride = width / 2;\r
+ dst = dstbuf + width * height +\r
+ width/2 * height/2;\r
+ rows = height/2;\r
+ }\r
+\r
+ for (int i = 0; i < rows; i++) {\r
+ memcpy (dst + i * dststride, src + i * srcstride, srcstride);\r
+ }\r
+ }\r
+ }\r
+\r
+ return S_OK;\r
+}\r
+\r
+\r
+GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)\r
+{\r
+ IMediaSample *pSample = NULL;\r
+\r
+ byte *data = GST_BUFFER_DATA (buffer);\r
+ \r
+ /* TODO: Use more of the arguments here? */\r
+ HRESULT hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0);\r
+ if (SUCCEEDED (hres))\r
+ {\r
+ BYTE *sample_buffer;\r
+ AM_MEDIA_TYPE *mediatype;\r
+\r
+ pSample->GetPointer(&sample_buffer);\r
+ pSample->GetMediaType(&mediatype);\r
+ if (mediatype)\r
+ SetMediaType (mediatype);\r
+\r
+ if(sample_buffer)\r
+ {\r
+ /* Copy to the destination stride. \r
+ * This is not just a simple memcpy because of the different strides. \r
+ * TODO: optimise for the same-stride case and avoid the copy entirely. \r
+ */\r
+ CopyToDestinationBuffer (data, sample_buffer);\r
+ }\r
+\r
+ pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */\r
+ pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */\r
+ pSample->SetPreroll(FALSE); /* For non-displayed frames. \r
+ Not used in GStreamer */\r
+\r
+ /* Disable synchronising on this sample. We instead let GStreamer handle \r
+ * this at a higher level, inside BaseSink. */\r
+ pSample->SetTime(NULL, NULL);\r
+\r
+ hres = Deliver(pSample);\r
+ pSample->Release();\r
+\r
+ if (SUCCEEDED (hres))\r
+ return GST_FLOW_OK;\r
+ else if (hres == VFW_E_NOT_CONNECTED)\r
+ return GST_FLOW_NOT_LINKED;\r
+ else\r
+ return GST_FLOW_ERROR;\r
+ }\r
+ else {\r
+ GST_WARNING ("Could not get sample for delivery to sink: %x", hres);\r
+ return GST_FLOW_ERROR;\r
+ }\r
+}\r
+\r
+STDMETHODIMP VideoFakeSrcPin::Flush ()\r
+{\r
+ DeliverBeginFlush();\r
+ DeliverEndFlush();\r
+ return S_OK;\r
+}\r
+\r
+VideoFakeSrc::VideoFakeSrc() : CBaseFilter("VideoFakeSrc", NULL, &m_critsec, CLSID_VideoFakeSrc)\r
+{\r
+ HRESULT hr = S_OK;\r
+ m_pOutputPin = new VideoFakeSrcPin ((CSource *)this, &m_critsec, &hr);\r
+}\r
+\r
+int VideoFakeSrc::GetPinCount()\r
+{\r
+ return 1;\r
+}\r
+\r
+CBasePin *VideoFakeSrc::GetPin(int n)\r
+{\r
+ return (CBasePin *)m_pOutputPin;\r
+}\r
+\r
+VideoFakeSrcPin *VideoFakeSrc::GetOutputPin()\r
+{\r
+ return m_pOutputPin;\r
+}\r
--- /dev/null
+/* GStreamer\r
+ * Copyright (C) 2008 Michael Smith <msmith@songbirdnest.com>\r
+ *\r
+ * This library is free software; you can redistribute it and/or\r
+ * modify it under the terms of the GNU Library General Public\r
+ * License as published by the Free Software Foundation; either\r
+ * version 2 of the License, or (at your option) any later version.\r
+ *\r
+ * This library is distributed in the hope that it will be useful,\r
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\r
+ * Library General Public License for more details.\r
+ *\r
+ * You should have received a copy of the GNU Library General Public\r
+ * License along with this library; if not, write to the\r
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,\r
+ * Boston, MA 02111-1307, USA.\r
+ */\r
+\r
+#ifdef HAVE_CONFIG_H\r
+#include "config.h"\r
+#endif\r
+\r
+#include "dshowvideosink.h"\r
+#include "dshowvideofakesrc.h"\r
+\r
+#include <gst/interfaces/xoverlay.h>\r
+\r
+#include "windows.h"\r
+\r
+static const GstElementDetails gst_dshowvideosink_details =\r
+GST_ELEMENT_DETAILS ("DirectShow video sink",\r
+ "Sink/Video",\r
+ "Display data using a DirectShow video renderer",\r
+ "Michael Smith <msmith@songbirdnest.com>");\r
+\r
+GST_DEBUG_CATEGORY_STATIC (dshowvideosink_debug);\r
+#define GST_CAT_DEFAULT dshowvideosink_debug\r
+\r
+static GstCaps * gst_directshow_media_type_to_caps (AM_MEDIA_TYPE *mediatype);\r
+static gboolean gst_caps_to_directshow_media_type (GstCaps *caps, AM_MEDIA_TYPE *mediatype);\r
+\r
+/* TODO: Support RGB! */\r
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",\r
+ GST_PAD_SINK,\r
+ GST_PAD_ALWAYS,\r
+ GST_STATIC_CAPS (\r
+ "video/x-raw-yuv,"\r
+ "width = (int) [ 1, MAX ],"\r
+ "height = (int) [ 1, MAX ],"\r
+ "framerate = (fraction) [ 0, MAX ]," \r
+ "format = {(fourcc)YUY2, (fourcc)UYVY, (fourcc) YUVY, (fourcc)YV12 }")\r
+ );\r
+\r
+static void gst_dshowvideosink_init_interfaces (GType type);\r
+\r
+GST_BOILERPLATE_FULL (GstDshowVideoSink, gst_dshowvideosink, GstBaseSink,\r
+ GST_TYPE_BASE_SINK, gst_dshowvideosink_init_interfaces);\r
+\r
+enum\r
+{\r
+ PROP_0,\r
+ PROP_KEEP_ASPECT_RATIO,\r
+ PROP_FULL_SCREEN,\r
+ PROP_RENDERER\r
+};\r
+\r
+/* GObject methods */\r
+static void gst_dshowvideosink_finalize (GObject * gobject);\r
+static void gst_dshowvideosink_set_property (GObject * object, guint prop_id,\r
+ const GValue * value, GParamSpec * pspec);\r
+static void gst_dshowvideosink_get_property (GObject * object, guint prop_id,\r
+ GValue * value, GParamSpec * pspec);\r
+\r
+/* GstElement methods */\r
+static GstStateChangeReturn gst_dshowvideosink_change_state (GstElement * element, GstStateChange transition);\r
+\r
+/* GstBaseSink methods */\r
+static gboolean gst_dshowvideosink_start (GstBaseSink * bsink);\r
+static gboolean gst_dshowvideosink_stop (GstBaseSink * bsink);\r
+static gboolean gst_dshowvideosink_unlock (GstBaseSink * bsink);\r
+static gboolean gst_dshowvideosink_unlock_stop (GstBaseSink * bsink);\r
+static gboolean gst_dshowvideosink_set_caps (GstBaseSink * bsink, GstCaps * caps);\r
+static GstCaps *gst_dshowvideosink_get_caps (GstBaseSink * bsink);\r
+static GstFlowReturn gst_dshowvideosink_render (GstBaseSink *sink, GstBuffer *buffer);\r
+\r
+/* GstXOverlay methods */\r
+static void gst_dshowvideosink_set_window_id (GstXOverlay * overlay, ULONG window_id);\r
+\r
+/* TODO: event, preroll, buffer_alloc? \r
+ * buffer_alloc won't generally be all that useful because the renderers require a \r
+ * different stride to GStreamer's implicit values. \r
+ */\r
+\r
+static gboolean\r
+gst_dshowvideosink_interface_supported (GstImplementsInterface * iface,\r
+ GType type)\r
+{\r
+ g_assert (type == GST_TYPE_X_OVERLAY);\r
+ return TRUE;\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_interface_init (GstImplementsInterfaceClass * klass)\r
+{\r
+ klass->supported = gst_dshowvideosink_interface_supported;\r
+}\r
+\r
+\r
+static void\r
+gst_dshowvideosink_xoverlay_interface_init (GstXOverlayClass * iface)\r
+{\r
+ iface->set_xwindow_id = gst_dshowvideosink_set_window_id;\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_init_interfaces (GType type)\r
+{\r
+ static const GInterfaceInfo iface_info = {\r
+ (GInterfaceInitFunc) gst_dshowvideosink_interface_init,\r
+ NULL,\r
+ NULL,\r
+ };\r
+\r
+ static const GInterfaceInfo xoverlay_info = {\r
+ (GInterfaceInitFunc) gst_dshowvideosink_xoverlay_interface_init,\r
+ NULL,\r
+ NULL,\r
+ };\r
+\r
+ g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,\r
+ &iface_info);\r
+ g_type_add_interface_static (type, GST_TYPE_X_OVERLAY, &xoverlay_info);\r
+\r
+ GST_DEBUG_CATEGORY_INIT (dshowvideosink_debug, "dshowvideosink", 0, \\r
+ "DirectShow video sink");\r
+}\r
+static void\r
+gst_dshowvideosink_base_init (gpointer klass)\r
+{\r
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);\r
+\r
+ gst_element_class_add_pad_template (element_class,\r
+ gst_static_pad_template_get (&sink_template));\r
+\r
+ gst_element_class_set_details (element_class, &gst_dshowvideosink_details);\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_class_init (GstDshowVideoSinkClass * klass)\r
+{\r
+ GObjectClass *gobject_class;\r
+ GstElementClass *gstelement_class;\r
+ GstBaseSinkClass *gstbasesink_class;\r
+\r
+ gobject_class = (GObjectClass *) klass;\r
+ gstelement_class = (GstElementClass *) klass;\r
+ gstbasesink_class = (GstBaseSinkClass *) klass;\r
+\r
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_dshowvideosink_finalize);\r
+ gobject_class->set_property =\r
+ GST_DEBUG_FUNCPTR (gst_dshowvideosink_set_property);\r
+ gobject_class->get_property =\r
+ GST_DEBUG_FUNCPTR (gst_dshowvideosink_get_property);\r
+\r
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_dshowvideosink_change_state);\r
+\r
+ gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_dshowvideosink_get_caps);\r
+ gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_dshowvideosink_set_caps);\r
+ gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_dshowvideosink_start);\r
+ gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_dshowvideosink_stop);\r
+ gstbasesink_class->unlock = GST_DEBUG_FUNCPTR (gst_dshowvideosink_unlock);\r
+ gstbasesink_class->unlock_stop =\r
+ GST_DEBUG_FUNCPTR (gst_dshowvideosink_unlock_stop);\r
+ gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_dshowvideosink_render);\r
+\r
+ /* Add properties */\r
+ g_object_class_install_property (G_OBJECT_CLASS (klass),\r
+ PROP_KEEP_ASPECT_RATIO, g_param_spec_boolean ("force-aspect-ratio",\r
+ "Force aspect ratio",\r
+ "When enabled, scaling will respect original aspect ratio", FALSE,\r
+ (GParamFlags)G_PARAM_READWRITE));\r
+ g_object_class_install_property (G_OBJECT_CLASS (klass),\r
+ PROP_FULL_SCREEN, g_param_spec_boolean ("fullscreen",\r
+ "Full screen mode",\r
+ "Use full-screen mode (not available when using XOverlay)", FALSE,\r
+ (GParamFlags)G_PARAM_READWRITE));\r
+\r
+ g_object_class_install_property (G_OBJECT_CLASS (klass),\r
+ PROP_RENDERER, g_param_spec_string ("renderer", "Renderer", \r
+ "Force usage of specific DirectShow renderer (VMR9 or VMR)",\r
+ NULL, (GParamFlags)G_PARAM_READWRITE));\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_clear (GstDshowVideoSink *sink)\r
+{\r
+ sink->renderersupport = NULL;\r
+ sink->fakesrc = NULL;\r
+ sink->filter_graph = NULL;\r
+\r
+ sink->keep_aspect_ratio = FALSE;\r
+ sink->full_screen = FALSE;\r
+\r
+ sink->window_closed = FALSE;\r
+ sink->window_id = NULL;\r
+\r
+ sink->connected = FALSE;\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_init (GstDshowVideoSink * sink, GstDshowVideoSinkClass * klass)\r
+{\r
+ gst_dshowvideosink_clear (sink);\r
+\r
+ CoInitializeEx (NULL, COINIT_MULTITHREADED);\r
+\r
+ /* TODO: Copied from GstVideoSink; should we use that as base class? */\r
+ /* 20ms is more than enough, 80-130ms is noticable */\r
+ gst_base_sink_set_max_lateness (GST_BASE_SINK (sink), 20 * GST_MSECOND);\r
+ gst_base_sink_set_qos_enabled (GST_BASE_SINK (sink), TRUE);\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_finalize (GObject * gobject)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (gobject);\r
+\r
+ if (sink->preferredrenderer)\r
+ g_free (sink->preferredrenderer);\r
+\r
+ CoUninitialize ();\r
+\r
+ G_OBJECT_CLASS (parent_class)->finalize (gobject);\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_set_property (GObject * object, guint prop_id,\r
+ const GValue * value, GParamSpec * pspec)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (object);\r
+\r
+ switch (prop_id) {\r
+ case PROP_RENDERER:\r
+ if (sink->preferredrenderer)\r
+ g_free (sink->preferredrenderer);\r
+\r
+ sink->preferredrenderer = g_value_dup_string (value);\r
+ break;\r
+ case PROP_KEEP_ASPECT_RATIO:\r
+ sink->keep_aspect_ratio = g_value_get_boolean (value);\r
+ break;\r
+ case PROP_FULL_SCREEN:\r
+ sink->full_screen = g_value_get_boolean (value);\r
+ break;\r
+ default:\r
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);\r
+ break;\r
+ }\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_get_property (GObject * object, guint prop_id,\r
+ GValue * value, GParamSpec * pspec)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (object);\r
+\r
+ switch (prop_id) {\r
+ case PROP_RENDERER:\r
+ g_value_take_string (value, sink->preferredrenderer);\r
+ break;\r
+ case PROP_KEEP_ASPECT_RATIO:\r
+ g_value_set_boolean (value, sink->keep_aspect_ratio);\r
+ break;\r
+ case PROP_FULL_SCREEN:\r
+ g_value_set_boolean (value, sink->full_screen);\r
+ break;\r
+ default:\r
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);\r
+ break;\r
+ }\r
+}\r
+\r
+static GstCaps *\r
+gst_dshowvideosink_get_caps (GstBaseSink * basesink)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (basesink);\r
+\r
+ return NULL;\r
+}\r
+\r
+static void dump_available_media_types (IPin *pin)\r
+{\r
+ /* Enumerate all media types on this pin, output info about them */\r
+ IEnumMediaTypes *enumerator = NULL;\r
+ AM_MEDIA_TYPE *type;\r
+ GstCaps *caps;\r
+ int i = 0;\r
+\r
+ GST_INFO ("Enumerating media types on pin %p", pin);\r
+\r
+ pin->EnumMediaTypes (&enumerator);\r
+\r
+ while (enumerator->Next (1, &type, NULL) == S_OK) {\r
+ i++;\r
+ caps = gst_directshow_media_type_to_caps (type);\r
+\r
+ if (caps) {\r
+ gchar *str = gst_caps_to_string (caps);\r
+ GST_INFO ("Type %d: converted to caps \"%s\"", i, str);\r
+ g_free (str);\r
+\r
+ gst_caps_unref (caps);\r
+ }\r
+ else\r
+ GST_INFO ("Failed to convert type to GstCaps");\r
+\r
+ DeleteMediaType (type);\r
+ }\r
+ GST_INFO ("Enumeration complete");\r
+\r
+ enumerator->Release();\r
+}\r
+\r
+static void\r
+dump_all_pin_media_types (IBaseFilter *filter)\r
+{\r
+ IEnumPins *enumpins = NULL;\r
+ IPin *pin = NULL;\r
+ HRESULT hres; \r
+\r
+ hres = filter->EnumPins (&enumpins);\r
+ if (FAILED(hres)) {\r
+ GST_WARNING ("Cannot enumerate pins on filter");\r
+ return;\r
+ }\r
+\r
+ GST_INFO ("Enumerating pins on filter %p", filter);\r
+ while (enumpins->Next (1, &pin, NULL) == S_OK)\r
+ {\r
+ IMemInputPin *meminputpin;\r
+ PIN_DIRECTION pindir;\r
+ hres = pin->QueryDirection (&pindir);\r
+\r
+ GST_INFO ("Found a pin with direction: %s", (pindir == PINDIR_INPUT)? "input": "output");\r
+ dump_available_media_types (pin);\r
+\r
+ hres = pin->QueryInterface (\r
+ IID_IMemInputPin, (void **) &meminputpin);\r
+ if (hres == S_OK) {\r
+ GST_INFO ("Pin is a MemInputPin (push mode): %p", meminputpin);\r
+ meminputpin->Release();\r
+ }\r
+ else\r
+ GST_INFO ("Pin is not a MemInputPin (pull mode?): %p", pin);\r
+\r
+ pin->Release();\r
+ }\r
+ enumpins->Release();\r
+}\r
+\r
+gboolean \r
+gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir, IPin **pin)\r
+{\r
+ gboolean ret = FALSE;\r
+ IEnumPins *enumpins = NULL;\r
+ IPin *pintmp = NULL;\r
+ HRESULT hres; \r
+ *pin = NULL;\r
+\r
+ hres = filter->EnumPins (&enumpins);\r
+ if (FAILED(hres)) {\r
+ return ret;\r
+ }\r
+\r
+ while (enumpins->Next (1, &pintmp, NULL) == S_OK)\r
+ {\r
+ PIN_DIRECTION pindirtmp;\r
+ hres = pintmp->QueryDirection (&pindirtmp);\r
+ if (hres == S_OK && pindir == pindirtmp) {\r
+ *pin = pintmp;\r
+ ret = TRUE;\r
+ break;\r
+ }\r
+ pintmp->Release ();\r
+ }\r
+ enumpins->Release ();\r
+\r
+ return ret;\r
+}\r
+\r
+/* WNDPROC for application-supplied windows */\r
+LRESULT APIENTRY WndProcHook (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)\r
+{\r
+ /* Handle certain actions specially on the window passed to us.\r
+ * Then forward back to the original window.\r
+ */\r
+ GstDshowVideoSink *sink = (GstDshowVideoSink *)GetProp (hWnd, L"GstDShowVideoSink");\r
+\r
+ switch (message) {\r
+ case WM_PAINT:\r
+ sink->renderersupport->PaintWindow ();\r
+ break;\r
+ case WM_MOVE:\r
+ case WM_SIZE:\r
+ sink->renderersupport->MoveWindow ();\r
+ break;\r
+ case WM_DISPLAYCHANGE:\r
+ sink->renderersupport->DisplayModeChanged();\r
+ break;\r
+ case WM_ERASEBKGND:\r
+ /* DirectShow docs recommend ignoring this message to avoid flicker */\r
+ return TRUE;\r
+ case WM_CLOSE:\r
+ sink->window_closed = TRUE;\r
+ }\r
+ return CallWindowProc (sink->prevWndProc, hWnd, message, wParam, lParam);\r
+}\r
+\r
+/* WndProc for our default window, if the application didn't supply one */\r
+LRESULT APIENTRY \r
+WndProc (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)\r
+{\r
+ GstDshowVideoSink *sink = (GstDshowVideoSink *)GetWindowLongPtr (hWnd, GWLP_USERDATA);\r
+\r
+ if (!sink) {\r
+ /* I think these happen before we have a chance to set our userdata pointer */\r
+ GST_DEBUG ("No sink!");\r
+ return DefWindowProc (hWnd, message, wParam, lParam);\r
+ }\r
+\r
+ GST_DEBUG_OBJECT (sink, "Got a window message for %x, %x", hWnd, message);\r
+\r
+ switch (message) {\r
+ case WM_PAINT:\r
+ sink->renderersupport->PaintWindow ();\r
+ break;\r
+ case WM_MOVE:\r
+ case WM_SIZE:\r
+ sink->renderersupport->MoveWindow ();\r
+ break;\r
+ case WM_DISPLAYCHANGE:\r
+ sink->renderersupport->DisplayModeChanged();\r
+ break;\r
+ case WM_ERASEBKGND:\r
+ /* DirectShow docs recommend ignoring this message */\r
+ return TRUE;\r
+ case WM_CLOSE:\r
+ sink->renderersupport->DestroyWindow ();\r
+ sink->window_closed = TRUE;\r
+ return 0;\r
+ }\r
+\r
+ return DefWindowProc (hWnd, message, wParam, lParam);\r
+}\r
+\r
+static gpointer\r
+gst_dshowvideosink_window_thread (GstDshowVideoSink * sink)\r
+{\r
+ WNDCLASS WndClass;\r
+ int width, height;\r
+ int offx, offy;\r
+ DWORD exstyle, style;\r
+\r
+ memset (&WndClass, 0, sizeof (WNDCLASS));\r
+ WndClass.style = CS_HREDRAW | CS_VREDRAW;\r
+ WndClass.hInstance = GetModuleHandle (NULL);\r
+ WndClass.lpszClassName = L"GST-DShowSink";\r
+ WndClass.hbrBackground = (HBRUSH) GetStockObject (BLACK_BRUSH);\r
+ WndClass.cbClsExtra = 0;\r
+ WndClass.cbWndExtra = 0;\r
+ WndClass.lpfnWndProc = WndProc;\r
+ WndClass.hCursor = LoadCursor (NULL, IDC_ARROW);\r
+ RegisterClass (&WndClass);\r
+\r
+ if (sink->full_screen) {\r
+ /* This doesn't seem to work, it returns the wrong values! But when we\r
+ * later use ShowWindow to show it maximized, it goes to full-screen\r
+ * anyway. TODO: Figure out why. */\r
+ width = GetSystemMetrics (SM_CXFULLSCREEN);\r
+ height = GetSystemMetrics (SM_CYFULLSCREEN);\r
+ offx = 0;\r
+ offy = 0;\r
+\r
+ style = WS_POPUP; /* No window decorations */\r
+ exstyle = 0;\r
+ }\r
+ else {\r
+ /* By default, create a normal top-level window, the size \r
+ * of the video.\r
+ */\r
+ RECT rect;\r
+ VIDEOINFOHEADER *vi = (VIDEOINFOHEADER *)sink->mediatype.pbFormat;\r
+\r
+ /* rcTarget is the aspect-ratio-corrected size of the video. */\r
+ width = vi->rcTarget.right + GetSystemMetrics (SM_CXSIZEFRAME) * 2;\r
+ height = vi->rcTarget.bottom + GetSystemMetrics (SM_CYCAPTION) +\r
+ (GetSystemMetrics (SM_CYSIZEFRAME) * 2);\r
+\r
+ SystemParametersInfo (SPI_GETWORKAREA, NULL, &rect, 0);\r
+ int screenwidth = rect.right - rect.left;\r
+ int screenheight = rect.bottom - rect.top;\r
+ offx = rect.left;\r
+ offy = rect.top;\r
+\r
+ /* Make it fit into the screen without changing the\r
+ * aspect ratio. */\r
+ if (width > screenwidth) {\r
+ double ratio = (double)screenwidth/(double)width;\r
+ width = screenwidth;\r
+ height = (int)(height * ratio);\r
+ }\r
+ if (height > screenheight) {\r
+ double ratio = (double)screenheight/(double)height;\r
+ height = screenheight;\r
+ width = (int)(width * ratio);\r
+ }\r
+\r
+ style = WS_OVERLAPPEDWINDOW; /* Normal top-level window */\r
+ exstyle = 0;\r
+ }\r
+\r
+ HWND video_window = CreateWindowEx (exstyle, L"GST-DShowSink",\r
+ L"GStreamer DirectShow sink default window",\r
+ style, offx, offy, width, height, NULL, NULL,\r
+ WndClass.hInstance, NULL);\r
+ if (video_window == NULL) {\r
+ GST_ERROR_OBJECT (sink, "Failed to create window!");\r
+ return NULL;\r
+ }\r
+\r
+ SetWindowLongPtr (video_window, GWLP_USERDATA, (LONG)sink);\r
+\r
+ /* signal application we created a window */\r
+ gst_x_overlay_got_xwindow_id (GST_X_OVERLAY (sink),\r
+ (gulong)video_window);\r
+\r
+ /* Set the renderer's clipping window */\r
+ if (!sink->renderersupport->SetRendererWindow (video_window)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to set video clipping window on filter %p", sink->renderersupport);\r
+ }\r
+\r
+ /* Now show the window, as appropriate */\r
+ if (sink->full_screen) {\r
+ ShowWindow (video_window, SW_SHOWMAXIMIZED);\r
+ ShowCursor (FALSE);\r
+ }\r
+ else\r
+ ShowWindow (video_window, SW_SHOWNORMAL);\r
+\r
+ /* Trigger the initial paint of the window */\r
+ UpdateWindow (video_window);\r
+\r
+ ReleaseSemaphore (sink->window_created_signal, 1, NULL);\r
+\r
+ /* start message loop processing our default window messages */\r
+ while (1) {\r
+ MSG msg;\r
+\r
+ if (GetMessage (&msg, video_window, 0, 0) <= 0) {\r
+ GST_LOG_OBJECT (sink, "our window received WM_QUIT or error.");\r
+ break;\r
+ }\r
+ DispatchMessage (&msg);\r
+ }\r
+\r
+ return NULL;\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_create_default_window (GstDshowVideoSink * sink)\r
+{\r
+ sink->window_created_signal = CreateSemaphore (NULL, 0, 1, NULL);\r
+ if (sink->window_created_signal == NULL)\r
+ goto failed;\r
+\r
+ sink->window_thread = g_thread_create (\r
+ (GThreadFunc) gst_dshowvideosink_window_thread, sink, TRUE, NULL);\r
+\r
+ /* wait maximum 10 seconds for window to be created */\r
+ if (WaitForSingleObject (sink->window_created_signal,\r
+ 10000) != WAIT_OBJECT_0)\r
+ goto failed;\r
+\r
+ CloseHandle (sink->window_created_signal);\r
+ return TRUE;\r
+\r
+failed:\r
+ CloseHandle (sink->window_created_signal);\r
+ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE,\r
+ ("Error creating our default window"), (NULL));\r
+\r
+ return FALSE;\r
+}\r
+\r
+static void gst_dshowvideosink_set_window_id (GstXOverlay * overlay, ULONG window_id)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (overlay);\r
+ HWND videowindow = (HWND)window_id;\r
+\r
+ if (videowindow == sink->window_id) {\r
+ GST_DEBUG_OBJECT (sink, "Window already set");\r
+ return;\r
+ }\r
+\r
+ /* TODO: What if we already have a window? What if we're already playing? */\r
+ sink->window_id = videowindow;\r
+}\r
+\r
+static void gst_dshowvideosink_set_window_for_renderer (GstDshowVideoSink *sink)\r
+{\r
+ /* Application has requested a specific window ID */\r
+ sink->prevWndProc = (WNDPROC) SetWindowLong (sink->window_id, GWL_WNDPROC, (LONG)WndProcHook);\r
+ GST_DEBUG_OBJECT (sink, "Set wndproc to %p from %p", WndProcHook, sink->prevWndProc);\r
+ SetProp (sink->window_id, L"GstDShowVideoSink", sink);\r
+ /* This causes the new WNDPROC to become active */\r
+ SetWindowPos (sink->window_id, 0, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER | SWP_FRAMECHANGED);\r
+\r
+ if (!sink->renderersupport->SetRendererWindow (sink->window_id)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to set HWND %x on renderer", sink->window_id);\r
+ return;\r
+ }\r
+\r
+ /* This tells the renderer where the window is located, needed to \r
+ * start drawing in the right place. */\r
+ sink->renderersupport->MoveWindow();\r
+ GST_INFO_OBJECT (sink, "Set renderer window to %x", sink->window_id);\r
+}\r
+\r
+static void\r
+gst_dshowvideosink_prepare_window (GstDshowVideoSink *sink)\r
+{\r
+ /* Give the app a last chance to supply a window id */\r
+ if (!sink->window_id) {\r
+ gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sink));\r
+ }\r
+\r
+ /* If the app supplied one, use it. Otherwise, go ahead\r
+ * and create (and use) our own window */\r
+ if (sink->window_id) {\r
+ gst_dshowvideosink_set_window_for_renderer (sink);\r
+ }\r
+ else {\r
+ gst_dshowvideosink_create_default_window (sink);\r
+ }\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_connect_graph (GstDshowVideoSink *sink)\r
+{\r
+ HRESULT hres;\r
+ IPin *srcpin;\r
+ IPin *sinkpin;\r
+\r
+ GST_INFO_OBJECT (sink, "Connecting DirectShow pins");\r
+\r
+ srcpin = sink->fakesrc->GetOutputPin();\r
+\r
+ gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT, \r
+ &sinkpin);\r
+ if (!sinkpin) {\r
+ GST_WARNING_OBJECT (sink, "Cannot get input pin from Renderer");\r
+ return FALSE;\r
+ }\r
+\r
+ /* Be warned that this call WILL deadlock unless you call it from\r
+ * the main thread. Thus, we call this from the state change, not from\r
+ * setcaps (which happens in a streaming thread).\r
+ */\r
+ hres = sink->filter_graph->ConnectDirect (\r
+ srcpin, sinkpin, NULL);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Could not connect pins: %x", hres);\r
+ sinkpin->Release();\r
+ return FALSE;\r
+ }\r
+ sinkpin->Release();\r
+ return TRUE;\r
+}\r
+\r
+static GstStateChangeReturn\r
+gst_dshowvideosink_start_graph (GstDshowVideoSink *sink)\r
+{\r
+ IMediaControl *control = NULL;\r
+ HRESULT hres;\r
+ GstStateChangeReturn ret;\r
+\r
+ GST_DEBUG_OBJECT (sink, "Connecting and starting DirectShow graph");\r
+\r
+ if (!sink->connected) {\r
+ /* This is fine; this just means we haven't connected yet.\r
+ * That's normal for the first time this is called. \r
+ * So, create a window (or start using an application-supplied\r
+ * one, then connect the graph */\r
+ gst_dshowvideosink_prepare_window (sink);\r
+ if (!gst_dshowvideosink_connect_graph (sink)) {\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+ sink->connected = TRUE;\r
+ }\r
+\r
+ hres = sink->filter_graph->QueryInterface(\r
+ IID_IMediaControl, (void **) &control);\r
+\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to get IMediaControl interface");\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+\r
+ GST_INFO_OBJECT (sink, "Running DirectShow graph");\r
+ hres = control->Run();\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink,\r
+ "Failed to run the directshow graph (error=%x)", hres);\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+ \r
+ GST_DEBUG_OBJECT (sink, "DirectShow graph is now running");\r
+ ret = GST_STATE_CHANGE_SUCCESS;\r
+\r
+done:\r
+ if (control)\r
+ control->Release();\r
+\r
+ return ret;\r
+}\r
+static GstStateChangeReturn\r
+gst_dshowvideosink_pause_graph (GstDshowVideoSink *sink)\r
+{\r
+ IMediaControl *control = NULL;\r
+ GstStateChangeReturn ret;\r
+ HRESULT hres;\r
+\r
+ hres = sink->filter_graph->QueryInterface(\r
+ IID_IMediaControl, (void **) &control);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to get IMediaControl interface");\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+\r
+ GST_INFO_OBJECT (sink, "Pausing DirectShow graph");\r
+ hres = control->Pause();\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink,\r
+ "Can't pause the directshow graph (error=%x)", hres);\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+\r
+ ret = GST_STATE_CHANGE_SUCCESS;\r
+\r
+done:\r
+ if (control)\r
+ control->Release();\r
+\r
+ return ret;\r
+}\r
+\r
+static GstStateChangeReturn\r
+gst_dshowvideosink_stop_graph (GstDshowVideoSink *sink)\r
+{\r
+ IMediaControl *control = NULL;\r
+ GstStateChangeReturn ret;\r
+ HRESULT hres;\r
+ IPin *sinkpin;\r
+\r
+ hres = sink->filter_graph->QueryInterface(\r
+ IID_IMediaControl, (void **) &control);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to get IMediaControl interface");\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+\r
+ GST_INFO_OBJECT (sink, "Stopping DirectShow graph");\r
+ hres = control->Stop();\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink,\r
+ "Can't stop the directshow graph (error=%x)", hres);\r
+ ret = GST_STATE_CHANGE_FAILURE;\r
+ goto done;\r
+ }\r
+\r
+ sink->filter_graph->Disconnect(sink->fakesrc->GetOutputPin());\r
+\r
+ gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT, \r
+ &sinkpin);\r
+ sink->filter_graph->Disconnect(sinkpin);\r
+ sinkpin->Release();\r
+\r
+ GST_DEBUG_OBJECT (sink, "DirectShow graph has stopped");\r
+\r
+ if (sink->window_id) {\r
+ /* Return control of application window */\r
+ SetWindowLong (sink->window_id, GWL_WNDPROC, (LONG)sink->prevWndProc);\r
+ RemoveProp (sink->window_id, L"GstDShowVideoSink");\r
+ SetWindowPos (sink->window_id, 0, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER | SWP_FRAMECHANGED);\r
+ sink->prevWndProc = NULL;\r
+ }\r
+ sink->connected = FALSE;\r
+\r
+ ret = GST_STATE_CHANGE_SUCCESS;\r
+\r
+done:\r
+ if (control)\r
+ control->Release();\r
+\r
+ return ret;\r
+}\r
+\r
+static GstStateChangeReturn\r
+gst_dshowvideosink_change_state (GstElement * element, GstStateChange transition)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (element);\r
+ GstStateChangeReturn ret;\r
+\r
+ switch (transition) {\r
+ case GST_STATE_CHANGE_NULL_TO_READY:\r
+ break;\r
+ case GST_STATE_CHANGE_READY_TO_PAUSED:\r
+ break;\r
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:\r
+ ret = gst_dshowvideosink_start_graph (sink);\r
+ if (ret != GST_STATE_CHANGE_SUCCESS)\r
+ return ret;\r
+ break;\r
+ }\r
+\r
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);\r
+\r
+ switch (transition) {\r
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:\r
+ ret = gst_dshowvideosink_pause_graph (sink);\r
+ break;\r
+ case GST_STATE_CHANGE_PAUSED_TO_READY:\r
+ ret = gst_dshowvideosink_stop_graph (sink);\r
+ break;\r
+ case GST_STATE_CHANGE_READY_TO_NULL:\r
+ gst_dshowvideosink_clear (sink);\r
+ break;\r
+ }\r
+\r
+ return ret;\r
+}\r
+\r
+class VMR9Support : public RendererSupport\r
+{\r
+private:\r
+ GstDshowVideoSink *sink;\r
+ IBaseFilter *filter;\r
+ IVMRWindowlessControl9 *control;\r
+ IVMRFilterConfig9 *config;\r
+ HWND video_window;\r
+\r
+public:\r
+ VMR9Support (GstDshowVideoSink *sink) : \r
+ sink(sink), \r
+ filter(NULL),\r
+ control(NULL),\r
+ config(NULL)\r
+ {\r
+ }\r
+\r
+ ~VMR9Support() {\r
+ if (control)\r
+ control->Release();\r
+ if (config)\r
+ config->Release();\r
+ if (filter)\r
+ filter->Release();\r
+ }\r
+\r
+ const char *GetName() {\r
+ return "VideoMixingRenderer9";\r
+ }\r
+\r
+ IBaseFilter *GetFilter() {\r
+ return filter;\r
+ }\r
+\r
+ gboolean Configure() {\r
+ HRESULT hres;\r
+\r
+ hres = CoCreateInstance (CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC,\r
+ IID_IBaseFilter, (LPVOID *) &filter);\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink, \r
+ "Can't create an instance of renderer (error=%x)",\r
+ hres);\r
+ return FALSE;\r
+ }\r
+\r
+ hres = filter->QueryInterface (\r
+ IID_IVMRFilterConfig9, (void **) &config);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR9 filter config interface missing: %x", hres);\r
+ return FALSE;\r
+ }\r
+\r
+ hres = config->SetRenderingMode (VMR9Mode_Windowless);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR9 couldn't be set to windowless mode: %x", hres);\r
+ return FALSE;\r
+ }\r
+ else {\r
+ GST_DEBUG_OBJECT (sink, "Set VMR9 (%p) to windowless mode!", filter);\r
+ }\r
+\r
+ /* We can't QI to this until _after_ we've been set to windowless mode. \r
+ * Apparently this is against the rules in COM, but that's how it is... */\r
+ hres = filter->QueryInterface (\r
+ IID_IVMRWindowlessControl9, (void **) &control);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR9 windowless control interface missing: %x", hres);\r
+ return FALSE;\r
+ }\r
+\r
+ if (sink->keep_aspect_ratio) {\r
+ control->SetAspectRatioMode(VMR9ARMode_LetterBox);\r
+ }\r
+ else {\r
+ control->SetAspectRatioMode(VMR9ARMode_None);\r
+ }\r
+ return TRUE;\r
+ }\r
+\r
+ gboolean SetRendererWindow(HWND window) {\r
+ video_window = window;\r
+ HRESULT hres = control->SetVideoClippingWindow (video_window);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to set video clipping window on filter %p: %x", filter, hres);\r
+ return FALSE;\r
+ }\r
+ return TRUE;\r
+ }\r
+\r
+ void PaintWindow()\r
+ {\r
+ HRESULT hr;\r
+ PAINTSTRUCT ps;\r
+ HDC hdc;\r
+ RECT rcClient;\r
+\r
+ GetClientRect(video_window, &rcClient);\r
+ hdc = BeginPaint(video_window, &ps);\r
+\r
+ hr = control->RepaintVideo(video_window, hdc);\r
+\r
+ EndPaint(video_window, &ps);\r
+ }\r
+\r
+ void MoveWindow()\r
+ {\r
+ HRESULT hr;\r
+ RECT rect;\r
+\r
+ // Track the movement of the container window and resize as needed\r
+ GetClientRect(video_window, &rect);\r
+ hr = control->SetVideoPosition(NULL, &rect);\r
+ }\r
+\r
+ void DisplayModeChanged() {\r
+ control->DisplayModeChanged();\r
+ }\r
+\r
+ void DestroyWindow() {\r
+ ::DestroyWindow (video_window);\r
+ }\r
+};\r
+\r
+class VMR7Support : public RendererSupport\r
+{\r
+private:\r
+ GstDshowVideoSink *sink;\r
+ IBaseFilter *filter;\r
+ IVMRWindowlessControl *control;\r
+ IVMRFilterConfig *config;\r
+ HWND video_window;\r
+\r
+public:\r
+ VMR7Support (GstDshowVideoSink *sink) : \r
+ sink(sink), \r
+ filter(NULL),\r
+ control(NULL),\r
+ config(NULL)\r
+ {\r
+ }\r
+\r
+ ~VMR7Support() {\r
+ if (control)\r
+ control->Release();\r
+ if (config)\r
+ config->Release();\r
+ if (filter)\r
+ filter->Release();\r
+ }\r
+\r
+ const char *GetName() {\r
+ return "VideoMixingRenderer";\r
+ }\r
+\r
+ IBaseFilter *GetFilter() {\r
+ return filter;\r
+ }\r
+\r
+ gboolean Configure() {\r
+ HRESULT hres;\r
+\r
+ hres = CoCreateInstance (CLSID_VideoMixingRenderer, NULL, CLSCTX_INPROC,\r
+ IID_IBaseFilter, (LPVOID *) &filter);\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink, \r
+ "Can't create an instance of renderer (error=%x)",\r
+ hres);\r
+ return FALSE;\r
+ }\r
+\r
+ hres = filter->QueryInterface (\r
+ IID_IVMRFilterConfig, (void **) &config);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR filter config interface missing: %x", hres);\r
+ return FALSE;\r
+ }\r
+\r
+ hres = config->SetRenderingMode (VMRMode_Windowless);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR couldn't be set to windowless mode: %x", hres);\r
+ return FALSE;\r
+ }\r
+ else {\r
+ GST_DEBUG_OBJECT (sink, "Set VMR (%p) to windowless mode!", filter);\r
+ }\r
+\r
+ hres = filter->QueryInterface (\r
+ IID_IVMRWindowlessControl, (void **) &control);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "VMR windowless control interface missing: %x", hres);\r
+ return FALSE;\r
+ }\r
+\r
+ if (sink->keep_aspect_ratio) {\r
+ control->SetAspectRatioMode(VMR_ARMODE_LETTER_BOX);\r
+ }\r
+ else {\r
+ control->SetAspectRatioMode(VMR_ARMODE_NONE);\r
+ }\r
+ return TRUE;\r
+ }\r
+\r
+ gboolean SetRendererWindow(HWND window) {\r
+ video_window = window;\r
+ HRESULT hres = control->SetVideoClippingWindow (video_window);\r
+ if (FAILED (hres)) {\r
+ GST_WARNING_OBJECT (sink, "Failed to set video clipping window on filter %p: %x", filter, hres);\r
+ return FALSE;\r
+ }\r
+ return TRUE;\r
+ }\r
+\r
+ void PaintWindow()\r
+ {\r
+ HRESULT hr;\r
+ PAINTSTRUCT ps;\r
+ HDC hdc;\r
+ RECT rcClient;\r
+\r
+ GetClientRect(video_window, &rcClient);\r
+ hdc = BeginPaint(video_window, &ps);\r
+\r
+ hr = control->RepaintVideo(video_window, hdc);\r
+\r
+ EndPaint(video_window, &ps);\r
+ }\r
+\r
+ void MoveWindow()\r
+ {\r
+ HRESULT hr;\r
+ RECT rect;\r
+\r
+ // Track the movement of the container window and resize as needed\r
+ GetClientRect(video_window, &rect);\r
+ hr = control->SetVideoPosition(NULL, &rect);\r
+ }\r
+\r
+ void DisplayModeChanged() {\r
+ control->DisplayModeChanged();\r
+ }\r
+\r
+ void DestroyWindow() {\r
+ ::DestroyWindow (video_window);\r
+ }\r
+};\r
+\r
+static gboolean \r
+gst_dshowvideosink_create_renderer (GstDshowVideoSink *sink) \r
+{\r
+ GST_DEBUG_OBJECT (sink, "Trying to create renderer '%s'", "VMR9");\r
+\r
+ RendererSupport *support = NULL;\r
+\r
+ if (sink->preferredrenderer) {\r
+ if (!strcmp (sink->preferredrenderer, "VMR9")) {\r
+ GST_INFO_OBJECT (sink, "Forcing use of VMR9");\r
+ support = new VMR9Support (sink);\r
+ }\r
+ else if (!strcmp (sink->preferredrenderer, "VMR")) {\r
+ GST_INFO_OBJECT (sink, "Forcing use of VMR");\r
+ support = new VMR7Support (sink);\r
+ }\r
+ else {\r
+ GST_ERROR_OBJECT (sink, "Unknown sink type '%s'", sink->preferredrenderer);\r
+ return FALSE;\r
+ }\r
+\r
+ if (!support->Configure()) {\r
+ GST_ERROR_OBJECT (sink, "Couldn't configure selected renderer");\r
+ delete support;\r
+ return FALSE;\r
+ }\r
+ goto done;\r
+ }\r
+\r
+ support = new VMR9Support (sink);\r
+ if (!support->Configure()) {\r
+ GST_INFO_OBJECT (sink, "Failed to configure VMR9, trying VMR7");\r
+ delete support;\r
+ support = new VMR7Support (sink);\r
+ if (!support->Configure()) {\r
+ GST_ERROR_OBJECT (sink, "Failed to configure VMR9 or VMR7");\r
+ delete support;\r
+ return FALSE;\r
+ }\r
+ }\r
+\r
+done:\r
+ sink->renderersupport = support;\r
+ return TRUE;\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_build_filtergraph (GstDshowVideoSink *sink)\r
+{\r
+ HRESULT hres;\r
+\r
+ /* Build our DirectShow FilterGraph, looking like: \r
+ *\r
+ * [ fakesrc ] -> [ sink filter ]\r
+ *\r
+ * so we can feed data in through the fakesrc.\r
+ *\r
+ * The sink filter can be one of our supported filters: VMR9 (VMR7?, EMR?)\r
+ */\r
+\r
+ hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,\r
+ IID_IFilterGraph, (LPVOID *) & sink->filter_graph);\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink, \r
+ "Can't create an instance of the dshow graph manager (error=%x)", hres);\r
+ goto error;\r
+ }\r
+\r
+ sink->fakesrc = new VideoFakeSrc();\r
+\r
+ IBaseFilter *filter;\r
+ hres = sink->fakesrc->QueryInterface (\r
+ IID_IBaseFilter, (void **) &filter);\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink, "Could not QI fakesrc to IBaseFilter");\r
+ goto error;\r
+ }\r
+\r
+ hres = sink->filter_graph->AddFilter (filter, L"fakesrc");\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink,\r
+ "Can't add our fakesrc filter to the graph (error=%x)", hres);\r
+ goto error;\r
+ }\r
+\r
+ if (!gst_dshowvideosink_create_renderer (sink)) {\r
+ GST_ERROR_OBJECT (sink, "Could not create a video renderer");\r
+ goto error;\r
+ }\r
+\r
+ /* dump_all_pin_media_types (sink->renderer); */\r
+\r
+ hres =\r
+ sink->filter_graph->AddFilter (sink->renderersupport->GetFilter(),\r
+ L"renderer");\r
+ if (FAILED (hres)) {\r
+ GST_ERROR_OBJECT (sink, \r
+ "Can't add renderer to the graph (error=%x)", hres);\r
+ goto error;\r
+ }\r
+\r
+ return TRUE;\r
+\r
+error:\r
+ if (sink->fakesrc) {\r
+ sink->fakesrc->Release();\r
+ sink->fakesrc = NULL;\r
+ }\r
+\r
+ if (sink->filter_graph) {\r
+ sink->filter_graph->Release();\r
+ sink->filter_graph = NULL;\r
+ }\r
+\r
+ return FALSE;\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_start (GstBaseSink * bsink)\r
+{\r
+ HRESULT hres = S_FALSE;\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+\r
+ /* Just build the filtergraph; we don't link or otherwise configure it yet */\r
+ return gst_dshowvideosink_build_filtergraph (sink);\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_set_caps (GstBaseSink * bsink, GstCaps * caps)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+\r
+ /* TODO: What do we want to do if the caps change while we're running?\r
+ * Find out if we can handle this or not... */\r
+\r
+ if (!gst_caps_to_directshow_media_type (caps, &sink->mediatype)) {\r
+ GST_WARNING_OBJECT (sink, "Cannot convert caps to AM_MEDIA_TYPE, rejecting");\r
+ return FALSE;\r
+ }\r
+\r
+ /* Now we have an AM_MEDIA_TYPE describing what we're going to send.\r
+ * We set this on our DirectShow fakesrc's output pin. \r
+ */\r
+ sink->fakesrc->GetOutputPin()->SetMediaType (&sink->mediatype);\r
+\r
+ return TRUE;\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_stop (GstBaseSink * bsink)\r
+{\r
+ IPin *input_pin = NULL, *output_pin = NULL;\r
+ HRESULT hres = S_FALSE;\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+\r
+ if (!sink->filter_graph) {\r
+ GST_WARNING_OBJECT (sink, "Cannot destroy filter graph; it doesn't exist");\r
+ return TRUE;\r
+ }\r
+\r
+ /* Release the renderer */\r
+ if (sink->renderersupport) {\r
+ delete sink->renderersupport;\r
+ sink->renderersupport = NULL;\r
+ }\r
+\r
+ /* Release our dshow fakesrc */\r
+ if (sink->fakesrc) {\r
+ sink->fakesrc->Release();\r
+ sink->fakesrc = NULL;\r
+ }\r
+\r
+ /* Release the filter graph manager */\r
+ if (sink->filter_graph) {\r
+ sink->filter_graph->Release();\r
+ sink->filter_graph = NULL;\r
+ }\r
+\r
+ return TRUE;\r
+}\r
+\r
+static GstFlowReturn \r
+gst_dshowvideosink_render (GstBaseSink *bsink, GstBuffer *buffer)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+ GstFlowReturn ret;\r
+\r
+ if (sink->window_closed) {\r
+ GST_WARNING_OBJECT (sink, "Window has been closed, stopping");\r
+ return GST_FLOW_ERROR;\r
+ }\r
+\r
+ GST_DEBUG_OBJECT (sink, "Pushing buffer through fakesrc->renderer");\r
+ ret = sink->fakesrc->GetOutputPin()->PushBuffer (buffer);\r
+ GST_DEBUG_OBJECT (sink, "Done pushing buffer through fakesrc->renderer");\r
+\r
+ return ret;\r
+}\r
+\r
+/* TODO: How can we implement these? Figure that out... */\r
+static gboolean\r
+gst_dshowvideosink_unlock (GstBaseSink * bsink)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+\r
+ return TRUE;\r
+}\r
+\r
+static gboolean\r
+gst_dshowvideosink_unlock_stop (GstBaseSink * bsink)\r
+{\r
+ GstDshowVideoSink *sink = GST_DSHOWVIDEOSINK (bsink);\r
+\r
+ return TRUE;\r
+}\r
+\r
+/* TODO: Move all of this into generic code? */\r
+\r
+/* Helpers to format GUIDs the same way we find them in the source */\r
+#define GUID_FORMAT "{%.8x, %.4x, %.4x, { %.2x, %.2x, %.2x, %.2x, %.2x, %.2x, %.2x, %.2x }}"\r
+#define GUID_ARGS(guid) \\r
+ guid.Data1, guid.Data2, guid.Data3, \\r
+ guid.Data4[0], guid.Data4[1], guid.Data4[3], guid.Data4[4], \\r
+ guid.Data4[5], guid.Data4[6], guid.Data4[7], guid.Data4[8]\r
+\r
+static GstCaps *\r
+audio_media_type_to_caps (AM_MEDIA_TYPE *mediatype)\r
+{\r
+ return NULL;\r
+}\r
+\r
+static GstCaps *\r
+video_media_type_to_caps (AM_MEDIA_TYPE *mediatype)\r
+{\r
+ GstCaps *caps = NULL;\r
+\r
+ /* TODO: Add RGB types. */\r
+ if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_YUY2))\r
+ caps = gst_caps_new_simple ("video/x-raw-yuv", \r
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'), NULL);\r
+ else if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_UYVY))\r
+ caps = gst_caps_new_simple ("video/x-raw-yuv", \r
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'), NULL);\r
+ else if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_YUYV))\r
+ caps = gst_caps_new_simple ("video/x-raw-yuv", \r
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V'), NULL);\r
+ else if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_YV12))\r
+ caps = gst_caps_new_simple ("video/x-raw-yuv", \r
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'V', '1', '2'), NULL);\r
+\r
+ if (!caps) {\r
+ GST_DEBUG ("No subtype known; cannot continue");\r
+ return NULL;\r
+ }\r
+\r
+ if (IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo) &&\r
+ mediatype->cbFormat >= sizeof(VIDEOINFOHEADER))\r
+ {\r
+ VIDEOINFOHEADER *vh = (VIDEOINFOHEADER *)mediatype->pbFormat;\r
+\r
+ /* TODO: Set PAR here. Based on difference between source and target RECTs? \r
+ * Do we want framerate? Based on AvgTimePerFrame? */\r
+ gst_caps_set_simple (caps, \r
+ "width", G_TYPE_INT, vh->bmiHeader.biWidth,\r
+ "height", G_TYPE_INT, vh->bmiHeader.biHeight,\r
+ NULL);\r
+ }\r
+\r
+ return caps;\r
+}\r
+\r
+\r
+/* Create a GstCaps object representing the same media type as\r
+ * this AM_MEDIA_TYPE.\r
+ *\r
+ * Returns NULL if no corresponding GStreamer type is known.\r
+ *\r
+ * May modify mediatype.\r
+ */\r
+static GstCaps *\r
+gst_directshow_media_type_to_caps (AM_MEDIA_TYPE *mediatype)\r
+{\r
+ GstCaps *caps = NULL;\r
+\r
+ if (IsEqualGUID (mediatype->majortype, MEDIATYPE_Video))\r
+ caps = video_media_type_to_caps (mediatype);\r
+ else if (IsEqualGUID (mediatype->majortype, MEDIATYPE_Audio))\r
+ caps = audio_media_type_to_caps (mediatype);\r
+ else {\r
+ GST_DEBUG ("Non audio/video media types not yet recognised, please add me: "\r
+ GUID_FORMAT, GUID_ARGS(mediatype->majortype));\r
+ }\r
+\r
+ if (caps) {\r
+ gchar *capsstring = gst_caps_to_string (caps);\r
+ GST_DEBUG ("Converted AM_MEDIA_TYPE to \"%s\"", capsstring);\r
+ g_free (capsstring);\r
+ }\r
+ else {\r
+ GST_WARNING ("Failed to convert AM_MEDIA_TYPE to caps");\r
+ }\r
+\r
+ return caps;\r
+}\r
+\r
+/* Fill in a DirectShow AM_MEDIA_TYPE structure representing the same media\r
+ * type as this GstCaps object.\r
+ *\r
+ * Returns FALSE if no corresponding type is known.\r
+ *\r
+ * Only operates on simple (single structure) caps.\r
+ */\r
+static gboolean\r
+gst_caps_to_directshow_media_type (GstCaps *caps, AM_MEDIA_TYPE *mediatype)\r
+{\r
+ GstStructure *s = gst_caps_get_structure (caps, 0);\r
+ const gchar *name = gst_structure_get_name (s);\r
+\r
+ gchar *capsstring = gst_caps_to_string (caps);\r
+ GST_DEBUG ("Converting caps \"%s\" to AM_MEDIA_TYPE", capsstring);\r
+ g_free (capsstring);\r
+\r
+ memset (mediatype, 0, sizeof (AM_MEDIA_TYPE));\r
+\r
+ if (!strcmp (name, "video/x-raw-yuv")) {\r
+ guint32 fourcc;\r
+ int width, height;\r
+ int bpp;\r
+\r
+ if (!gst_structure_get_fourcc (s, "format", &fourcc)) {\r
+ GST_WARNING ("Failed to convert caps, no fourcc");\r
+ return FALSE;\r
+ }\r
+\r
+ if (!gst_structure_get_int (s, "width", &width)) {\r
+ GST_WARNING ("Failed to convert caps, no width");\r
+ return FALSE;\r
+ }\r
+ if (!gst_structure_get_int (s, "height", &height)) {\r
+ GST_WARNING ("Failed to convert caps, no height");\r
+ return FALSE;\r
+ }\r
+\r
+ mediatype->majortype = MEDIATYPE_Video;\r
+ switch (fourcc) {\r
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):\r
+ mediatype->subtype = MEDIASUBTYPE_YUY2;\r
+ bpp = 16;\r
+ break;\r
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V'):\r
+ mediatype->subtype = MEDIASUBTYPE_YUYV;\r
+ bpp = 16;\r
+ break;\r
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):\r
+ mediatype->subtype = MEDIASUBTYPE_UYVY;\r
+ bpp = 16;\r
+ break;\r
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):\r
+ mediatype->subtype = MEDIASUBTYPE_YV12;\r
+ bpp = 12;\r
+ break;\r
+ default:\r
+ GST_WARNING ("Failed to convert caps, not a known fourcc");\r
+ return FALSE;\r
+ }\r
+\r
+ mediatype->bFixedSizeSamples = TRUE; /* Always true for raw video */\r
+ mediatype->bTemporalCompression = FALSE; /* Likewise, always false */\r
+\r
+ {\r
+ int par_n, par_d;\r
+ VIDEOINFOHEADER *vi = (VIDEOINFOHEADER *)CoTaskMemAlloc (sizeof (VIDEOINFOHEADER));\r
+ memset (vi, 0, sizeof (VIDEOINFOHEADER));\r
+\r
+ mediatype->formattype = FORMAT_VideoInfo;\r
+ mediatype->cbFormat = sizeof (VIDEOINFOHEADER);\r
+ mediatype->pbFormat = (BYTE *)vi;\r
+\r
+ mediatype->lSampleSize = width * height * bpp / 8;\r
+\r
+ GST_INFO ("Set mediatype format: size %d, sample size %d", mediatype->cbFormat, mediatype->lSampleSize);\r
+\r
+ vi->rcSource.top = 0;\r
+ vi->rcSource.left = 0;\r
+ vi->rcSource.bottom = height;\r
+ vi->rcSource.right = width;\r
+\r
+ vi->rcTarget.top = 0;\r
+ vi->rcTarget.left = 0;\r
+ if (gst_structure_get_fraction (s, "pixel-aspect-ratio", &par_n, &par_d)) {\r
+ /* To handle non-square pixels, we set the target rectangle to a \r
+ * different size than the source rectangle.\r
+ * There might be a better way, but this seems to work. */\r
+ vi->rcTarget.bottom = height;\r
+ vi->rcTarget.right = width * par_n / par_d;\r
+ GST_DEBUG ("Got PAR: set target right to %d from width %d", vi->rcTarget.right, width);\r
+ }\r
+ else {\r
+ GST_DEBUG ("No PAR found");\r
+ vi->rcTarget.bottom = height;\r
+ vi->rcTarget.right = width;\r
+ }\r
+\r
+ vi->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);\r
+ vi->bmiHeader.biWidth = width;\r
+ vi->bmiHeader.biHeight = -height; /* Required to be negative. */\r
+ vi->bmiHeader.biPlanes = 1; /* Required to be 1 */\r
+ vi->bmiHeader.biBitCount = bpp;\r
+ vi->bmiHeader.biCompression = fourcc;\r
+ vi->bmiHeader.biSizeImage = width * height * bpp / 8;\r
+\r
+ /* We can safely zero these; they don't matter for our uses */\r
+ vi->bmiHeader.biXPelsPerMeter = 0;\r
+ vi->bmiHeader.biYPelsPerMeter = 0;\r
+ vi->bmiHeader.biClrUsed = 0;\r
+ vi->bmiHeader.biClrImportant = 0;\r
+ }\r
+\r
+ GST_DEBUG ("Successfully built AM_MEDIA_TYPE from caps");\r
+ return TRUE;\r
+ }\r
+\r
+ GST_WARNING ("Failed to convert caps, not a known caps type");\r
+ /* Only YUV supported so far */\r
+\r
+ return FALSE;\r
+}\r
+\r
+/* Plugin entry point */\r
+extern "C" static gboolean\r
+plugin_init (GstPlugin * plugin)\r
+{\r
+ /* PRIMARY: this is the best videosink to use on windows */\r
+ if (!gst_element_register (plugin, "dshowvideosink",\r
+ GST_RANK_PRIMARY, GST_TYPE_DSHOWVIDEOSINK))\r
+ return FALSE;\r
+\r
+ return TRUE;\r
+}\r
+\r
+extern "C" GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,\r
+ GST_VERSION_MINOR,\r
+ "dshowsinkwrapper",\r
+ "DirectShow sink wrapper plugin",\r
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)\r