Many GStreamer objects were not properly managed or never released.
Change-Id: I38b3854e8b9e2264b5b647f331d3bb16b886e2d6
Reviewed-by: Andrew den Exter <andrew.den.exter@qinetic.com.au>
if (m_setup || m_stream == 0 || appsrc == 0)
return false;
+ if (m_appSrc)
+ gst_object_unref(G_OBJECT(m_appSrc));
+
m_appSrc = GST_APP_SRC(appsrc);
+ gst_object_ref(G_OBJECT(m_appSrc));
gst_app_src_set_callbacks(m_appSrc, (GstAppSrcCallbacks*)&m_callbacks, this, (GDestroyNotify)&QGstAppSrc::destroy_notify);
g_object_get(G_OBJECT(m_appSrc), "max-bytes", &m_maxBytes, NULL);
gst_caps_remove_structure(caps, 0);
}
+
+ gst_caps_unref(caps);
+ gst_caps_unref(allCaps);
#else
Q_UNUSED(elementType);
#endif // GST_CHECK_VERSION(0,10,31)
padTemplates = padTemplates->next;
if (padTemplate->direction == padDirection) {
- const GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
+ GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
const GstStructure *structure = gst_caps_get_structure(caps, i);
gst_caps_merge_structure(res, newStructure);
}
+ gst_caps_unref(caps);
}
}
}
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
}
}
#include "qgstreamervideorenderer_p.h"
#include <private/qvideosurfacegstsink_p.h>
+#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QDebug>
{
if (!m_videoSink && m_surface) {
m_videoSink = QVideoSurfaceGstSink::createSink(m_surface);
- gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
- gst_object_sink(GST_OBJECT(m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
}
return reinterpret_cast<GstElement*>(m_videoSink);
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
- gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
- gst_object_sink (GST_OBJECT (m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+ gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_widget->setNativeSize(QGstUtils::capsCorrectedResolution(caps));
m_videoSink = gst_element_factory_make("xvimagesink", NULL);
if (m_videoSink) {
- gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
- gst_object_sink(GST_OBJECT(m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
}
}
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
return true;
}
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+ gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
return QMultimedia::MaybeSupported;
}
+void qt_gst_object_ref_sink(gpointer object)
+{
+#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 24)
+ gst_object_ref_sink(object);
+#else
+ g_return_if_fail (GST_IS_OBJECT(object));
+
+ GST_OBJECT_LOCK(object);
+ if (G_LIKELY(GST_OBJECT_IS_FLOATING(object))) {
+ GST_OBJECT_FLAG_UNSET(object, GST_OBJECT_FLOATING);
+ GST_OBJECT_UNLOCK(object);
+ } else {
+ GST_OBJECT_UNLOCK(object);
+ gst_object_ref(object);
+ }
+#endif
+}
+
QT_END_NAMESPACE
const QSet<QString> &supportedMimeTypeSet);
}
+void qt_gst_object_ref_sink(gpointer object);
+
QT_END_NAMESPACE
#endif
}
}
}
+ gst_caps_unref(caps);
}
}
gst_object_unref (factory);
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
+
+ g_object_unref(G_OBJECT(appsrc));
}
#endif
if (mFormat.isValid()) {
setAudioFlags(false);
GstCaps *caps = QGstUtils::capsForAudioFormat(mFormat);
- gst_app_sink_set_caps(m_appSink, caps); // appsink unrefs caps
+ gst_app_sink_set_caps(m_appSink, caps);
+ gst_caps_unref(caps);
} else {
// We want whatever the native audio format is
setAudioFlags(true);
else
caps = gst_caps_from_string(codec.toLatin1());
- return (GstEncodingProfile *)gst_encoding_audio_profile_new(
- caps,
- !preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
- NULL, //restriction
- 0); //presence
+ GstEncodingProfile *profile = (GstEncodingProfile *)gst_encoding_audio_profile_new(
+ caps,
+ !preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
+ NULL, //restriction
+ 0); //presence
+
+ gst_caps_unref(caps);
+
+ return profile;
}
QT_END_NAMESPACE
caps = gst_caps_from_string(format.toLatin1());
}
- return (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
- "camerabin2_profile",
- (gchar *)"custom camera profile",
- caps,
- NULL); //preset
+ GstEncodingContainerProfile *profile = (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
+ "camerabin2_profile",
+ (gchar *)"custom camera profile",
+ caps,
+ NULL); //preset
+
+ gst_caps_unref(caps);
+
+ return profile;
}
/*!
GstEncodingProfile *audioProfile = m_session->audioEncodeControl()->createProfile();
GstEncodingProfile *videoProfile = m_session->videoEncodeControl()->createProfile();
- if (audioProfile)
- gst_encoding_container_profile_add_profile(containerProfile, audioProfile);
- if (videoProfile)
- gst_encoding_container_profile_add_profile(containerProfile, videoProfile);
+ if (audioProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, audioProfile))
+ gst_encoding_profile_unref(audioProfile);
+ }
+ if (videoProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, videoProfile))
+ gst_encoding_profile_unref(videoProfile);
+ }
}
return containerProfile;
#include "camerabincapturebufferformat.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamervideorendererinterface_p.h>
+#include <private/qgstutils_p.h>
#include <qmediarecorder.h>
#ifdef HAVE_GST_PHOTOGRAPHY
#define CAMERABIN_IMAGE_MODE 1
#define CAMERABIN_VIDEO_MODE 2
-#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
-#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
-
#define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
{
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
- gstRef(m_camerabin);
+ qt_gst_object_ref_sink(m_camerabin);
m_bus = gst_element_get_bus(m_camerabin);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
gst_element_get_state(m_camerabin, NULL, NULL, GST_CLOCK_TIME_NONE);
- gstUnref(m_camerabin);
- gstUnref(m_viewfinderElement);
+ gst_object_unref(GST_OBJECT(m_bus));
+ gst_object_unref(GST_OBJECT(m_camerabin));
}
+ if (m_viewfinderElement)
+ gst_object_unref(GST_OBJECT(m_viewfinderElement));
}
#ifdef HAVE_GST_PHOTOGRAPHY
qWarning() << "Staring camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
- gst_object_ref(GST_OBJECT(m_viewfinderElement));
+ qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
}
if (m_videoSrc != videoSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
+ if (videoSrc)
+ gst_object_unref(GST_OBJECT(videoSrc));
+
return m_videoSrc;
}
m_recorderControl->applySettings();
+ GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
- m_recorderControl->videoProfile(),
+ profile,
NULL);
+ gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
}
}
}
+ gst_iterator_free(elements);
}
}
NULL, //restriction
1); //presence
+ gst_caps_unref(caps);
+
gst_encoding_video_profile_set_pass(profile, 0);
gst_encoding_video_profile_set_variableframerate(profile, TRUE);
//qDebug() << "set caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+
+ gst_caps_unref(caps);
}
if (encoderElement) {
}
}
}
+ gst_caps_unref(caps);
}
}
gst_object_unref (factory);
#include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamerbushelper_p.h>
+#include <private/qgstutils_p.h>
#include <gst/gsttagsetter.h>
#include <gst/gstversion.h>
QT_BEGIN_NAMESPACE
-#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
-#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
-
QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent)
:QObject(parent),
m_state(StoppedState),
m_passPrerollImage(false)
{
m_pipeline = gst_pipeline_new("media-capture-pipeline");
- gstRef(m_pipeline);
+ qt_gst_object_ref_sink(m_pipeline);
m_bus = gst_element_get_bus(m_pipeline);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
{
setState(StoppedState);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
+ gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_pipeline));
}
gst_bin_add(GST_BIN(encodeBin), audioEncoder);
if (!gst_element_link_many(audioConvert, audioQueue, m_audioVolume, audioEncoder, muxer, NULL)) {
+ m_audioVolume = 0;
gst_object_unref(encodeBin);
return 0;
}
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+ gst_caps_unref(caps);
}
// add ghostpads
GstPad *pad = gst_element_get_static_pad(queue, "src");
Q_ASSERT(pad);
gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
+ gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
g_signal_connect(G_OBJECT(sink), "handoff",
#define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} }
+#define UNREF_ELEMENT(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode)
{
if (ok) {
gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioPreview, NULL);
ok &= gst_element_link(m_audioSrc, m_audioPreview);
+ } else {
+ UNREF_ELEMENT(m_audioSrc);
+ UNREF_ELEMENT(m_audioPreview);
}
}
if (m_captureMode & Video || m_captureMode & Image) {
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
ok &= gst_element_link(m_videoTee, m_imageCaptureBin);
+ } else {
+ UNREF_ELEMENT(m_videoSrc);
+ UNREF_ELEMENT(m_videoTee);
+ UNREF_ELEMENT(m_videoPreviewQueue);
+ UNREF_ELEMENT(m_videoPreview);
+ UNREF_ELEMENT(m_imageCaptureBin);
}
}
break;
ok &= gst_element_link(m_audioTee, m_audioPreviewQueue);
ok &= gst_element_link(m_audioPreviewQueue, m_audioPreview);
ok &= gst_element_link(m_audioTee, m_encodeBin);
+ } else {
+ UNREF_ELEMENT(m_audioSrc);
+ UNREF_ELEMENT(m_audioPreview);
+ UNREF_ELEMENT(m_audioTee);
+ UNREF_ELEMENT(m_audioPreviewQueue);
}
}
ok &= gst_element_link(m_videoSrc, m_videoTee);
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
+ } else {
+ UNREF_ELEMENT(m_videoSrc);
+ UNREF_ELEMENT(m_videoTee);
+ UNREF_ELEMENT(m_videoPreviewQueue);
+ UNREF_ELEMENT(m_videoPreview);
}
if (ok && (m_captureMode & Video))
}
}
+ gst_iterator_free(elements);
}
}
return;
GstPad *pad = getAudioProbePad();
- if (pad)
+ if (pad) {
gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId);
+ gst_object_unref(G_OBJECT(pad));
+ }
m_audioBufferProbeId = -1;
}
Q_ASSERT(m_audioBufferProbeId == -1);
GstPad *pad = getAudioProbePad();
- if (pad)
+ if (pad) {
m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this);
+ gst_object_unref(G_OBJECT(pad));
+ }
}
QT_END_NAMESPACE
//qDebug() << "set video caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+
+ gst_caps_unref(caps);
}
return GST_ELEMENT(encoderBin);
#include <private/gstvideoconnector_p.h>
#include <private/qgstutils_p.h>
#include <private/playlistfileparser_p.h>
+#include <private/qgstutils_p.h>
#include <gst/gstvalue.h>
#include <gst/base/gstbasesrc.h>
}
}
- m_videoOutputBin = gst_bin_new("video-output-bin");
- gst_object_ref(GST_OBJECT(m_videoOutputBin));
-
- m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0));
+ m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
+
m_colorSpace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-vo");
- gst_object_ref(GST_OBJECT(m_colorSpace));
+ // might not get a parent, take ownership to avoid leak
+ qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL);
gst_object_ref(GST_OBJECT(m_nullVideoSink));
+
+ m_videoOutputBin = gst_bin_new("video-output-bin");
+ // might not get a parent, take ownership to avoid leak
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoOutputBin));
gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
gst_element_link(m_videoIdentity, m_nullVideoSink);
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
+
+ g_object_unref(G_OBJECT(appsrc));
}
#endif