* \brief Use GStreamer to read/write video
*/
#include "precomp.hpp"
+
+#include <opencv2/core/utils/logger.hpp>
+#include <opencv2/core/utils/filesystem.hpp>
+
#include <iostream>
-using namespace std;
-#ifndef _MSC_VER
-#include <unistd.h>
-#endif
#include <string.h>
+
#include <gst/gst.h>
#include <gst/gstbuffer.h>
#include <gst/video/video.h>
#endif
-#ifdef NDEBUG
-#define CV_WARN(message)
-#else
-#define CV_WARN(message) fprintf(stderr, "OpenCV | GStreamer warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
-#endif
+#define CV_WARN(...) CV_LOG_WARNING(NULL, "OpenCV | GStreamer warning: " << __VA_ARGS__)
#if GST_VERSION_MAJOR == 0
#define COLOR_ELEM "ffmpegcolorspace"
#define COLOR_ELEM_NAME COLOR_ELEM
#endif
-#if defined(_WIN32) || defined(_WIN64)
-#if defined(__MINGW32__)
-inline char *realpath(const char *path, char *resolved_path)
+#if GST_VERSION_MAJOR == 0
+#define CV_GST_FORMAT(format) &(format)
+#else
+#define CV_GST_FORMAT(format) (format)
+#endif
+
+
+namespace cv {
+
+static void toFraction(double decimal, CV_OUT int& numerator, CV_OUT int& denominator);
+static void handleMessage(GstElement * pipeline);
+
+
+namespace {
+
+template<typename T> static inline void GSafePtr_addref(T* ptr)
{
- return _fullpath(resolved_path,path,PATH_MAX);
+ if (ptr)
+ g_object_ref_sink(ptr);
}
+
+template<typename T> static inline void GSafePtr_release(T** pPtr);
+
+template<> inline void GSafePtr_release<GError>(GError** pPtr) { g_clear_error(pPtr); }
+template<> inline void GSafePtr_release<GstElement>(GstElement** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstElementFactory>(GstElementFactory** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstPad>(GstPad** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstCaps>(GstCaps** pPtr) { if (pPtr) { gst_caps_unref(*pPtr); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstBuffer>(GstBuffer** pPtr) { if (pPtr) { gst_buffer_unref(*pPtr); *pPtr = NULL; } }
+#if GST_VERSION_MAJOR > 0
+template<> inline void GSafePtr_release<GstSample>(GstSample** pPtr) { if (pPtr) { gst_sample_unref(*pPtr); *pPtr = NULL; } }
#endif
-#define snprintf _snprintf
-#define vsnprintf _vsnprintf
-#define strcasecmp _stricmp
-#define strncasecmp _strnicmp
-#include <sys/stat.h>
+template<> inline void GSafePtr_release<GstBus>(GstBus** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstMessage>(GstMessage** pPtr) { if (pPtr) { gst_message_unref(*pPtr); *pPtr = NULL; } }
+
+#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
+template<> inline void GSafePtr_release<GstEncodingVideoProfile>(GstEncodingVideoProfile** pPtr) { if (pPtr) { gst_encoding_profile_unref(*pPtr); *pPtr = NULL; } }
+template<> inline void GSafePtr_release<GstEncodingContainerProfile>(GstEncodingContainerProfile** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } }
#endif
-void toFraction(double decimal, double &numerator, double &denominator);
-void handleMessage(GstElement * pipeline);
+template<> inline void GSafePtr_addref<char>(char* pPtr); // declaration only. not defined. should not be used
+template<> inline void GSafePtr_release<char>(char** pPtr) { if (pPtr) { g_free(*pPtr); *pPtr = NULL; } }
-using namespace cv;
+template <typename T>
+class GSafePtr
+{
+protected:
+ T* ptr;
+public:
+ inline GSafePtr() CV_NOEXCEPT : ptr(NULL) { }
+ inline ~GSafePtr() CV_NOEXCEPT { release(); }
+ inline void release() CV_NOEXCEPT
+ {
+#if 0
+ printf("release: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, ptr);
+ if (ptr) {
+ printf(" refcount: %d\n", (int)GST_OBJECT_REFCOUNT_VALUE(ptr)); \
+ }
+#endif
+ if (ptr)
+ GSafePtr_release<T>(&ptr);
+ }
+
+ inline operator T* () CV_NOEXCEPT { return ptr; }
+ inline operator /*const*/ T* () const CV_NOEXCEPT { return (T*)ptr; } // there is no const correctness in Gst C API
+
+ inline T* get() CV_NOEXCEPT { return ptr; }
+ inline /*const*/ T* get() const CV_NOEXCEPT { CV_Assert(ptr); return (T*)ptr; } // there is no const correctness in Gst C API
+
+ inline const T* operator -> () const { CV_Assert(ptr); return ptr; }
+ inline operator bool () const CV_NOEXCEPT { return ptr != NULL; }
+ inline bool operator ! () const CV_NOEXCEPT { return ptr == NULL; }
-static cv::Mutex gst_initializer_mutex;
+ inline T** getRef() { CV_Assert(ptr == NULL); return &ptr; }
+
+ inline GSafePtr& reset(T* p) CV_NOEXCEPT // pass result of functions with "transfer floating" ownership
+ {
+ //printf("reset: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p);
+ release();
+ if (p)
+ {
+ GSafePtr_addref<T>(p);
+ ptr = p;
+ }
+ return *this;
+ }
+
+ inline GSafePtr& attach(T* p) CV_NOEXCEPT // pass result of functions with "transfer full" ownership
+ {
+ //printf("attach: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p);
+ release(); ptr = p; return *this;
+ }
+ inline T* detach() CV_NOEXCEPT { T* p = ptr; ptr = NULL; return p; }
+
+ inline void swap(GSafePtr& o) CV_NOEXCEPT { std::swap(ptr, o.ptr); }
+private:
+ GSafePtr(const GSafePtr&); // = disabled
+ GSafePtr& operator=(const T*); // = disabled
+};
+
+} // namespace
/*!
* \brief The gst_initializer class
class gst_initializer
{
public:
- static void init()
+ static gst_initializer& init()
{
- gst_initializer_mutex.lock();
- static gst_initializer init;
- gst_initializer_mutex.unlock();
+ static gst_initializer g_init;
+ if (g_init.isFailed)
+ CV_Error(Error::StsError, "Can't initialize GStreamer");
+ return g_init;
}
private:
- gst_initializer()
+ bool isFailed;
+ bool call_deinit;
+ gst_initializer() :
+ isFailed(false)
{
- gst_init(NULL, NULL);
+ call_deinit = utils::getConfigurationParameterBool("OPENCV_VIDEOIO_GSTREAMER_CALL_DEINIT", false);
+
+ GSafePtr<GError> err;
+ gst_init_check(NULL, NULL, err.getRef());
+ if (err)
+ {
+ CV_WARN("Can't initialize GStreamer: " << err->message);
+ isFailed = true;
+ return;
+ }
guint major, minor, micro, nano;
gst_version(&major, &minor, µ, &nano);
if (GST_VERSION_MAJOR != major)
{
- CV_WARN("incompatible gstreamer version");
+ CV_WARN("incompatible GStreamer version");
+ isFailed = true;
+ return;
+ }
+ }
+ ~gst_initializer()
+ {
+ if (call_deinit)
+ {
+ // Debug leaks: GST_LEAKS_TRACER_STACK_TRACE=1 GST_DEBUG="GST_TRACER:7" GST_TRACERS="leaks"
+ gst_deinit();
}
-// gst_debug_set_active(1);
-// gst_debug_set_colored(1);
-// gst_debug_set_default_threshold(GST_LEVEL_INFO);
}
};
-inline static string get_gst_propname(int propId)
+inline static
+std::string get_gst_propname(int propId)
{
switch (propId)
{
case CV_CAP_PROP_CONTRAST: return "contrast";
case CV_CAP_PROP_SATURATION: return "saturation";
case CV_CAP_PROP_HUE: return "hue";
- default: return string();
+ default: return std::string();
}
}
-inline static bool is_gst_element_exists(const std::string & name)
+inline static
+bool is_gst_element_exists(const std::string& name)
{
- GstElementFactory * testfac = gst_element_factory_find(name.c_str());
- if (!testfac)
- return false;
- g_object_unref(G_OBJECT(testfac));
- return true;
+ GSafePtr<GstElementFactory> testfac; testfac.attach(gst_element_factory_find(name.c_str()));
+ return (bool)testfac;
}
//==================================================================================================
class GStreamerCapture CV_FINAL : public IVideoCapture
{
private:
- GstElement* pipeline;
- GstElement* v4l2src;
- GstElement* sink;
-#if GST_VERSION_MAJOR > 0
- GstSample* sample;
+ GSafePtr<GstElement> pipeline;
+ GSafePtr<GstElement> v4l2src;
+ GSafePtr<GstElement> sink;
+#if GST_VERSION_MAJOR == 0
+ GSafePtr<GstBuffer> buffer;
#else
- void * sample; // unused
- GstBuffer* buffer;
+ GSafePtr<GstSample> sample;
#endif
- GstCaps* caps;
+ GSafePtr<GstCaps> caps;
+
gint64 duration;
gint width;
gint height;
- gint channels;
double fps;
bool isPosFramesSupported;
bool isPosFramesEmulated;
gint64 emulatedFrameNumber;
- bool isOutputByteBuffer;
public:
GStreamerCapture();
- ~GStreamerCapture();
+ virtual ~GStreamerCapture() CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE;
virtual bool retrieveFrame(int /*unused*/, OutputArray dst) CV_OVERRIDE;
virtual double getProperty(int propId) const CV_OVERRIDE;
virtual bool setProperty(int propId, double value) CV_OVERRIDE;
- virtual bool isOpened() const CV_OVERRIDE;
+ virtual bool isOpened() const CV_OVERRIDE { return (bool)pipeline; }
virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; }
bool open(int id);
bool open(const String &filename_);
static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data);
protected:
- bool determineFrameDims(Size & sz);
+ bool determineFrameDims(CV_OUT Size& sz, CV_OUT gint& channels, CV_OUT bool& isOutputByteBuffer);
bool isPipelinePlaying();
void startPipeline();
void stopPipeline();
void removeFilter(const char *filter);
};
-/*!
- * \brief CvCapture_GStreamer::init
- * inits the class
- */
GStreamerCapture::GStreamerCapture() :
- pipeline(NULL), v4l2src(NULL), sink(NULL), sample(NULL),
-#if GST_VERSION_MAJOR == 0
- buffer(NULL),
-#endif
- caps(NULL),
- duration(-1), width(-1), height(-1), channels(0), fps(-1),
+ duration(-1), width(-1), height(-1), fps(-1),
isPosFramesSupported(false),
isPosFramesEmulated(false),
- emulatedFrameNumber(-1),
- isOutputByteBuffer(false)
+ emulatedFrameNumber(-1)
{
}
{
if (isPipelinePlaying())
stopPipeline();
- if (pipeline && GST_IS_ELEMENT(pipeline))
+ if (pipeline && GST_IS_ELEMENT(pipeline.get()))
{
- gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
- gst_object_unref(GST_OBJECT(pipeline));
+ gst_element_set_state(pipeline, GST_STATE_NULL);
+ pipeline.release();
}
}
*/
bool GStreamerCapture::grabFrame()
{
- if(!pipeline)
+ if (!pipeline || !GST_IS_ELEMENT(pipeline.get()))
return false;
// start the pipeline if it was not in playing state yet
- if(!this->isPipelinePlaying())
+ if (!this->isPipelinePlaying())
this->startPipeline();
// bail out if EOS
- if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
+ if (gst_app_sink_is_eos(GST_APP_SINK(sink.get())))
return false;
#if GST_VERSION_MAJOR == 0
- if(buffer)
- gst_buffer_unref(buffer);
- buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
- if(!buffer)
+ buffer.attach(gst_app_sink_pull_buffer(GST_APP_SINK(sink.get())));
+ if (!buffer)
return false;
#else
- if(sample)
- gst_sample_unref(sample);
- sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
- if(!sample)
+ sample.attach(gst_app_sink_pull_sample(GST_APP_SINK(sink.get())));
+ if (!sample)
return false;
#endif
if (!buffer)
return false;
#else
- if(!sample)
+ if (!sample)
return false;
#endif
Size sz;
- if (!determineFrameDims(sz))
+ gint channels = 0;
+ bool isOutputByteBuffer = false;
+ if (!determineFrameDims(sz, channels, isOutputByteBuffer))
return false;
// gstreamer expects us to handle the memory at this point
// so we can just wrap the raw buffer and be done with it
#if GST_VERSION_MAJOR == 0
- Mat src(sz, CV_8UC1, (uchar*)GST_BUFFER_DATA(buffer));
+ Mat src(sz, CV_MAKETYPE(CV_8U, channels), (uchar*)GST_BUFFER_DATA(buffer.get()));
src.copyTo(dst);
#else
- GstBuffer * buf = gst_sample_get_buffer(sample);
+ GstBuffer* buf = gst_sample_get_buffer(sample); // no lifetime transfer
if (!buf)
return false;
- GstMapInfo info;
+ GstMapInfo info = {};
if (!gst_buffer_map(buf, &info, GST_MAP_READ))
{
//something weird went wrong here. abort. abort.
- CV_WARN("Failed to map GStreamerbuffer to system memory");
+ CV_WARN("Failed to map GStreamer buffer to system memory");
return false;
}
+ try
{
Mat src;
if (isOutputByteBuffer)
CV_Assert(src.isContinuous());
src.copyTo(dst);
}
+ catch (...)
+ {
+ gst_buffer_unmap(buf, &info);
+ throw;
+ }
gst_buffer_unmap(buf, &info);
#endif
return true;
}
-bool GStreamerCapture::determineFrameDims(Size &sz)
+bool GStreamerCapture::determineFrameDims(Size &sz, gint& channels, bool& isOutputByteBuffer)
{
#if GST_VERSION_MAJOR == 0
- GstCaps * frame_caps = gst_buffer_get_caps(buffer);
+ GstCaps * frame_caps = gst_buffer_get_caps(buffer); // no lifetime transfer
#else
- GstCaps * frame_caps = gst_sample_get_caps(sample);
+ GstCaps * frame_caps = gst_sample_get_caps(sample); // no lifetime transfer
#endif
// bail out in no caps
if (!GST_CAPS_IS_SIMPLE(frame_caps))
return false;
- GstStructure* structure = gst_caps_get_structure(frame_caps, 0);
+ GstStructure* structure = gst_caps_get_structure(frame_caps, 0); // no lifetime transfer
// bail out if width or height are 0
if (!gst_structure_get_int(structure, "width", &width)
|| !gst_structure_get_int(structure, "height", &height))
+ {
+ CV_WARN("Can't query frame size from GStreeamer buffer");
return false;
+ }
sz = Size(width, height);
#if GST_VERSION_MAJOR > 0
- const gchar* name = gst_structure_get_name(structure);
-
- if (!name)
+ const gchar* name_ = gst_structure_get_name(structure);
+ if (!name_)
return false;
+ std::string name = toLowerCase(std::string(name_));
// we support 11 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
- if (strcasecmp(name, "video/x-raw") == 0)
+ if (name == "video/x-raw")
{
- const gchar* format = gst_structure_get_string(structure, "format");
- if (!format)
+ const gchar* format_ = gst_structure_get_string(structure, "format");
+ if (!format_)
return false;
- if (strcasecmp(format, "BGR") == 0)
+ std::string format = toUpperCase(std::string(format_));
+
+ if (format == "BGR")
{
channels = 3;
}
- else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) )
+ else if (format == "UYVY" || format == "YUY2" || format == "YVYU")
{
channels = 2;
}
- else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) )
+ else if (format == "NV12" || format == "NV21" || format == "YV12" || format == "I420")
{
channels = 1;
sz.height = sz.height * 3 / 2;
}
- else if(strcasecmp(format, "GRAY8") == 0)
+ else if (format == "GRAY8")
{
channels = 1;
}
+ else
+ {
+ CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer format: %s", format.c_str()));
+ }
}
- else if (strcasecmp(name, "video/x-bayer") == 0)
+ else if (name == "video/x-bayer")
{
channels = 1;
}
- else if(strcasecmp(name, "image/jpeg") == 0)
+ else if (name == "image/jpeg")
{
// the correct size will be set once the first frame arrives
channels = 1;
isOutputByteBuffer = true;
}
+ else
+ {
+ CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer layer type: %s", name.c_str()));
+ }
#else
+ CV_UNUSED(isOutputByteBuffer);
// we support only video/x-raw, format=BGR -> 8bit, 3 channels
channels = 3;
#endif
return true;
}
-/*!
- * \brief CvCapture_GStreamer::isPipelinePlaying
- * \return if the pipeline is currently playing.
- */
bool GStreamerCapture::isPipelinePlaying()
{
+ if (!pipeline || !GST_IS_ELEMENT(pipeline.get()))
+ {
+ CV_WARN("GStreamer: pipeline have not been created");
+ return false;
+ }
GstState current, pending;
GstClockTime timeout = 5*GST_SECOND;
GstStateChangeReturn ret = gst_element_get_state(pipeline, ¤t, &pending, timeout);
if (!ret)
{
- CV_WARN("GStreamer: unable to query pipeline state");
+ CV_WARN("unable to query pipeline state");
return false;
}
return current == GST_STATE_PLAYING;
*/
void GStreamerCapture::startPipeline()
{
- //fprintf(stderr, "relinked, pausing\n");
- GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
+ if (!pipeline || !GST_IS_ELEMENT(pipeline.get()))
+ {
+ CV_WARN("GStreamer: pipeline have not been created");
+ return;
+ }
+ GstStateChangeReturn status = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (status == GST_STATE_CHANGE_ASYNC)
{
// wait for status update
if (status == GST_STATE_CHANGE_FAILURE)
{
handleMessage(pipeline);
- gst_object_unref(pipeline);
- pipeline = NULL;
- CV_WARN("GStreamer: unable to start pipeline");
+ pipeline.release();
+ CV_WARN("unable to start pipeline");
return;
}
if (isPosFramesEmulated)
emulatedFrameNumber = 0;
- //printf("state now playing\n");
handleMessage(pipeline);
}
-/*!
- * \brief CvCapture_GStreamer::stopPipeline
- * Stop the pipeline by setting it to NULL
- */
void GStreamerCapture::stopPipeline()
{
- //fprintf(stderr, "restarting pipeline, going to ready\n");
- if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
+ if (!pipeline || !GST_IS_ELEMENT(pipeline.get()))
+ {
+ CV_WARN("GStreamer: pipeline have not been created");
+ return;
+ }
+ if (gst_element_set_state(pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
{
- CV_WARN("GStreamer: unable to stop pipeline");
- gst_object_unref(pipeline);
- pipeline = NULL;
+ CV_WARN("unable to stop pipeline");
+ pipeline.release();
}
}
*/
void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2)
{
- //printf("GStreamer: setFilter \n");
- if(!caps || !( GST_IS_CAPS (caps) ))
+ if (!caps || !(GST_IS_CAPS(caps.get())))
{
- if(type == G_TYPE_INT)
+ if (type == G_TYPE_INT)
{
#if GST_VERSION_MAJOR == 0
- caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
+ caps.attach(gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL));
#else
- caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
+ caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, NULL));
#endif
}
else
{
#if GST_VERSION_MAJOR == 0
- caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
+ caps.attach(gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL));
#else
- caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
+ caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, v2, NULL));
#endif
}
}
else
{
#if GST_VERSION_MAJOR > 0
- if (! gst_caps_is_writable(caps))
- caps = gst_caps_make_writable (caps);
+ if (!gst_caps_is_writable(caps.get()))
+ caps.attach(gst_caps_make_writable(caps.detach()));
#endif
- if(type == G_TYPE_INT){
+ if (type == G_TYPE_INT)
+ {
gst_caps_set_simple(caps, prop, type, v1, NULL);
- }else{
+ }
+ else
+ {
gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
}
}
#if GST_VERSION_MAJOR > 0
- caps = gst_caps_fixate(caps);
+ caps.attach(gst_caps_fixate(caps.detach()));
#endif
- gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
- //printf("filtering with %s\n", gst_caps_to_string(caps));
+ gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps);
+ GST_LOG("filtering with caps: %" GST_PTR_FORMAT, caps.get());
}
/*!
return;
#if GST_VERSION_MAJOR > 0
- if (! gst_caps_is_writable(caps))
- caps = gst_caps_make_writable (caps);
+ if (!gst_caps_is_writable(caps.get()))
+ caps.attach(gst_caps_make_writable(caps.detach()));
#endif
- GstStructure *s = gst_caps_get_structure(caps, 0);
+ GstStructure *s = gst_caps_get_structure(caps, 0); // no lifetime transfer
gst_structure_remove_field(s, filter);
- gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
+#if GST_VERSION_MAJOR > 0
+ caps.attach(gst_caps_fixate(caps.detach()));
+#endif
+
+ gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps);
}
/*!
*/
void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data)
{
- GstPad *sinkpad;
- GstElement *color = (GstElement *) data;
+ GSafePtr<GstPad> sinkpad;
+ GstElement* color = (GstElement*)data;
- sinkpad = gst_element_get_static_pad (color, "sink");
- if (!sinkpad){
- //fprintf(stderr, "Gstreamer: no pad named sink\n");
+ sinkpad.attach(gst_element_get_static_pad(color, "sink"));
+ if (!sinkpad) {
+ CV_WARN("no pad named sink");
return;
}
- gst_pad_link (pad, sinkpad);
- gst_object_unref (sinkpad);
-}
-
-bool GStreamerCapture::isOpened() const
-{
- return pipeline != NULL;
+ gst_pad_link(pad, sinkpad.get());
}
/*!
- * \brief CvCapture_GStreamer::open Open the given file with gstreamer
- * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
+ * \brief Create GStreamer pipeline
* \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
* \return boolean. Specifies if opening was successful.
*
- * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
+ * In case of camera 'index', a pipeline is constructed as follows:
* v4l2src ! autoconvert ! appsink
*
*
* e.g. videotestsrc ! videoconvert ! appsink
* the appsink name should be either 'appsink0' (the default) or 'opencvsink'
*
- * When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
- * larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
- * a live source)
+ * GStreamer will not drop frames if the grabbing interval larger than the framerate period.
+ * To support dropping for live streams add appsink 'drop' parameter into your custom pipeline.
*
* The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
* is really slow if we need to restart the pipeline over and over again.
*
- * TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
- * I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
- *
*/
bool GStreamerCapture::open(int id)
{
std::ostringstream desc;
desc << "v4l2src device=/dev/video" << id
<< " ! " << COLOR_ELEM
- << " ! appsink";
+ << " ! appsink drop=true";
return open(desc.str());
}
{
gst_initializer::init();
- const gchar * filename = filename_.c_str();
+ const gchar* filename = filename_.c_str();
bool file = false;
- //bool stream = false;
bool manualpipeline = false;
- char *uri = NULL;
- GstElement* uridecodebin = NULL;
- GstElement* color = NULL;
+ GSafePtr<char> uri;
+ GSafePtr<GstElement> uridecodebin;
+ GSafePtr<GstElement> color;
GstStateChangeReturn status;
// test if we have a valid uri. If so, open it with an uridecodebin
// ordinary file path.
if (!gst_uri_is_valid(filename))
{
-#ifdef _MSC_VER
- uri = new char[2048];
- DWORD pathSize = GetFullPathName(filename, 2048, uri, NULL);
- struct stat buf;
- if (pathSize == 0 || stat(uri, &buf) != 0)
- {
- delete[] uri;
- uri = NULL;
- }
-#else
- uri = realpath(filename, NULL);
-#endif
- //stream = false;
- if(uri)
+ if (utils::fs::exists(filename_))
{
- uri = g_filename_to_uri(uri, NULL, NULL);
- if(uri)
+ uri.attach(g_filename_to_uri(filename, NULL, NULL));
+ if (uri)
{
file = true;
}
else
{
- CV_WARN("GStreamer: Error opening file\n");
- CV_WARN(filename);
- CV_WARN(uri);
+ CV_WARN("Error opening file: " << filename << " (" << uri.get() << ")");
return false;
}
}
else
{
- GError *err = NULL;
- uridecodebin = gst_parse_launch(filename, &err);
- if(!uridecodebin)
+ GSafePtr<GError> err;
+ uridecodebin.attach(gst_parse_launch(filename, err.getRef()));
+ if (err)
{
- fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
+ CV_WARN("Error opening bin: " << err->message);
return false;
}
- //stream = true;
manualpipeline = true;
}
}
else
{
- //stream = true;
- uri = g_strdup(filename);
+ uri.attach(g_strdup(filename));
}
bool element_from_uri = false;
- if(!uridecodebin)
+ if (!uridecodebin)
{
// At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
// This means that we cannot use an uridecodebin when dealing with v4l2, since setting
// capture properties will not work.
// The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
- gchar * protocol = gst_uri_get_protocol(uri);
- if (!strcasecmp(protocol , "v4l2"))
+ GSafePtr<gchar> protocol_; protocol_.attach(gst_uri_get_protocol(uri));
+ CV_Assert(protocol_);
+ std::string protocol = toLowerCase(std::string(protocol_.get()));
+ if (protocol == "v4l2")
{
#if GST_VERSION_MAJOR == 0
- uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
+ uridecodebin.reset(gst_element_make_from_uri(GST_URI_SRC, uri.get(), "src"));
#else
- uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
+ uridecodebin.reset(gst_element_make_from_uri(GST_URI_SRC, uri.get(), "src", NULL));
#endif
+ CV_Assert(uridecodebin);
element_from_uri = true;
}
else
{
- uridecodebin = gst_element_factory_make("uridecodebin", NULL);
- g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
+ uridecodebin.reset(gst_element_factory_make("uridecodebin", NULL));
+ CV_Assert(uridecodebin);
+ g_object_set(G_OBJECT(uridecodebin.get()), "uri", uri.get(), NULL);
}
- g_free(protocol);
- if(!uridecodebin)
+ if (!uridecodebin)
{
CV_WARN("Can not parse GStreamer URI bin");
return false;
if (manualpipeline)
{
- GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin));
+ GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin.get()));
- GstElement *element = NULL;
gboolean done = false;
- gchar* name = NULL;
#if GST_VERSION_MAJOR > 0
GValue value = G_VALUE_INIT;
#endif
while (!done)
{
+ GstElement *element = NULL;
+ GSafePtr<gchar> name;
#if GST_VERSION_MAJOR > 0
switch (gst_iterator_next (it, &value))
{
{
case GST_ITERATOR_OK:
#endif
- name = gst_element_get_name(element);
+ name.attach(gst_element_get_name(element));
if (name)
{
if (strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL)
{
- sink = GST_ELEMENT ( gst_object_ref (element) );
+ sink.attach(GST_ELEMENT(gst_object_ref(element)));
}
else if (strstr(name, COLOR_ELEM_NAME) != NULL)
{
- color = GST_ELEMENT ( gst_object_ref (element) );
+ color.attach(GST_ELEMENT(gst_object_ref(element)));
}
else if (strstr(name, "v4l") != NULL)
{
- v4l2src = GST_ELEMENT ( gst_object_ref (element) );
+ v4l2src.attach(GST_ELEMENT(gst_object_ref(element)));
}
- g_free(name);
+ name.release();
done = sink && color && v4l2src;
}
#if GST_VERSION_MAJOR > 0
g_value_unset (&value);
#endif
-
break;
case GST_ITERATOR_RESYNC:
gst_iterator_resync (it);
if (!sink)
{
- CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
+ CV_WARN("cannot find appsink in manual pipeline");
return false;
}
- pipeline = uridecodebin;
+ pipeline.swap(uridecodebin);
}
else
{
- pipeline = gst_pipeline_new(NULL);
+ pipeline.reset(gst_pipeline_new(NULL));
+ CV_Assert(pipeline);
+
// videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
//automatically selects the correct colorspace conversion based on caps.
- color = gst_element_factory_make(COLOR_ELEM, NULL);
- sink = gst_element_factory_make("appsink", NULL);
+ color.reset(gst_element_factory_make(COLOR_ELEM, NULL));
+ CV_Assert(color);
+
+ sink.reset(gst_element_factory_make("appsink", NULL));
+ CV_Assert(sink);
- gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
+ gst_bin_add_many(GST_BIN(pipeline.get()), uridecodebin.get(), color.get(), sink.get(), NULL);
- if(element_from_uri)
+ if (element_from_uri)
{
- if(!gst_element_link(uridecodebin, color))
+ if(!gst_element_link(uridecodebin, color.get()))
{
CV_WARN("cannot link color -> sink");
- gst_object_unref(pipeline);
- pipeline = NULL;
+ pipeline.release();
return false;
}
}
else
{
- g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
+ g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color.get());
}
- if(!gst_element_link(color, sink))
+ if (!gst_element_link(color.get(), sink.get()))
{
- CV_WARN("GStreamer: cannot link color -> sink\n");
- gst_object_unref(pipeline);
- pipeline = NULL;
+ CV_WARN("GStreamer: cannot link color -> sink");
+ pipeline.release();
return false;
}
}
- //TODO: is 1 single buffer really high enough?
- gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
-// gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
+ if (!manualpipeline || strstr(filename, " max-buffers=") == NULL)
+ {
+ //TODO: is 1 single buffer really high enough?
+ gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), 1);
+ }
+
//do not emit signals: all calls will be synchronous and blocking
- gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE);
-// gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE);
+ gst_app_sink_set_emit_signals (GST_APP_SINK(sink.get()), FALSE);
+
#if GST_VERSION_MAJOR == 0
- caps = gst_caps_new_simple("video/x-raw-rgb",
- "bpp", G_TYPE_INT, 24,
- "red_mask", G_TYPE_INT, 0x0000FF,
- "green_mask", G_TYPE_INT, 0x00FF00,
- "blue_mask", G_TYPE_INT, 0xFF0000,
- NULL);
+ caps.attach(gst_caps_new_simple("video/x-raw-rgb",
+ "bpp", G_TYPE_INT, 24,
+ "red_mask", G_TYPE_INT, 0x0000FF,
+ "green_mask", G_TYPE_INT, 0x00FF00,
+ "blue_mask", G_TYPE_INT, 0xFF0000,
+ NULL));
#else
- caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg");
+ caps.attach(gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg"));
- if(manualpipeline){
- GstPad* sink_pad = gst_element_get_static_pad(sink, "sink");
- GstCaps* peer_caps = gst_pad_peer_query_caps(sink_pad,NULL);
+ if (manualpipeline)
+ {
+ GSafePtr<GstCaps> peer_caps;
+ GSafePtr<GstPad> sink_pad;
+ sink_pad.attach(gst_element_get_static_pad(sink, "sink"));
+ peer_caps.attach(gst_pad_peer_query_caps(sink_pad, NULL));
if (!gst_caps_can_intersect(caps, peer_caps)) {
- gst_caps_unref(caps);
- caps = gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}");
+ caps.attach(gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}"));
+ CV_Assert(caps);
}
- gst_object_unref(sink_pad);
- gst_caps_unref(peer_caps);
}
#endif
- gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
- gst_caps_unref(caps);
+ gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps);
+ caps.release();
{
- GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init");
+ GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init");
- status = gst_element_set_state(GST_ELEMENT(pipeline),
+ status = gst_element_set_state(GST_ELEMENT(pipeline.get()),
file ? GST_STATE_PAUSED : GST_STATE_PLAYING);
if (status == GST_STATE_CHANGE_ASYNC)
{
}
if (status == GST_STATE_CHANGE_FAILURE)
{
- GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error");
+ GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error");
handleMessage(pipeline);
- gst_object_unref(pipeline);
- pipeline = NULL;
- CV_WARN("GStreamer: unable to start pipeline\n");
+ pipeline.release();
+ CV_WARN("unable to start pipeline");
return false;
}
#endif
{
handleMessage(pipeline);
- CV_WARN("GStreamer: unable to query duration of stream");
+ CV_WARN("unable to query duration of stream");
duration = -1;
}
handleMessage(pipeline);
- GstPad* pad = gst_element_get_static_pad(sink, "sink");
+ GSafePtr<GstPad> pad;
+ pad.attach(gst_element_get_static_pad(sink, "sink"));
+
+ GSafePtr<GstCaps> buffer_caps;
#if GST_VERSION_MAJOR == 0
- GstCaps* buffer_caps = gst_pad_get_caps(pad);
+ buffer_caps.attach(gst_pad_get_caps(pad));
#else
- GstCaps* buffer_caps = gst_pad_get_current_caps(pad);
+ buffer_caps.attach(gst_pad_get_current_caps(pad));
#endif
- const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0);
-
- if (!gst_structure_get_int (structure, "width", &width))
- {
- CV_WARN("Cannot query video width\n");
- }
-
- if (!gst_structure_get_int (structure, "height", &height))
+ const GstStructure *structure = gst_caps_get_structure(buffer_caps, 0); // no lifetime transfer
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
{
- CV_WARN("Cannot query video height\n");
+ CV_WARN("cannot query video width/height");
}
gint num = 0, denom=1;
- if(!gst_structure_get_fraction(structure, "framerate", &num, &denom))
+ if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
{
- CV_WARN("Cannot query video fps\n");
+ CV_WARN("cannot query video fps");
}
fps = (double)num/(double)denom;
gboolean status_;
format_ = GST_FORMAT_DEFAULT;
-#if GST_VERSION_MAJOR == 0
-#define FORMAT &format_
-#else
-#define FORMAT format_
-#endif
- status_ = gst_element_query_position(sink, FORMAT, &value_);
-#undef FORMAT
+
+ status_ = gst_element_query_position(sink, CV_GST_FORMAT(format_), &value_);
if (!status_ || value_ != 0 || duration < 0)
{
- CV_WARN(cv::format("Cannot query video position: status=%d value=%lld duration=%lld\n",
- (int)status_, (long long int)value_, (long long int)duration).c_str());
+ CV_WARN("Cannot query video position: status=" << status_ << ", value=" << value_ << ", duration=" << duration);
isPosFramesSupported = false;
isPosFramesEmulated = true;
emulatedFrameNumber = 0;
isPosFramesSupported = true;
}
- GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
+ GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
}
return true;
gint64 value;
gboolean status;
-#if GST_VERSION_MAJOR == 0
-#define FORMAT &format
-#else
-#define FORMAT format
-#endif
-
if(!pipeline) {
CV_WARN("GStreamer: no pipeline");
return 0;
}
- switch(propId) {
+ switch(propId)
+ {
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
- status = gst_element_query_position(sink, FORMAT, &value);
+ status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value);
if(!status) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to query position of stream");
return 0; // TODO getProperty() "unsupported" value should be changed
}
format = GST_FORMAT_DEFAULT;
- status = gst_element_query_position(sink, FORMAT, &value);
+ status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value);
if(!status) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to query position of stream");
return value;
case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT;
- status = gst_element_query_position(sink, FORMAT, &value);
+ status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value);
if(!status) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to query position of stream");
case CV_CAP_PROP_HUE:
if (v4l2src)
{
- string propName = get_gst_propname(propId);
+ std::string propName = get_gst_propname(propId);
if (!propName.empty())
{
gint32 val = 0;
- g_object_get(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
+ g_object_get(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL);
return static_cast<double>(val);
}
}
CV_WARN("there is no sink yet");
return 0;
}
- return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
+ return gst_app_sink_get_max_buffers(GST_APP_SINK(sink.get()));
default:
- CV_WARN("GStreamer: unhandled property");
+ CV_WARN("unhandled property: " << propId);
break;
}
-#undef FORMAT
-
return 0;
}
switch(propId)
{
case CV_CAP_PROP_POS_MSEC:
- if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_TIME,
+ if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_TIME,
flags, (gint64) (value * GST_MSECOND))) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
return false;
CV_WARN("unable to seek");
}
- if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_DEFAULT,
+ if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_DEFAULT,
flags, (gint64) value)) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
return true;
}
case CV_CAP_PROP_POS_AVI_RATIO:
- if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_PERCENT,
+ if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_PERCENT,
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
break;
case CV_CAP_PROP_FPS:
if(value > 0) {
- double num=0, denom = 1;
- toFraction(value, num, denom);
+ int num = 0, denom = 1;
+ toFraction(value, num, denom);
setFilter("framerate", GST_TYPE_FRACTION, value, denom);
} else
removeFilter("framerate");
case CV_CAP_PROP_HUE:
if (v4l2src)
{
- string propName = get_gst_propname(propId);
+ std::string propName = get_gst_propname(propId);
if (!propName.empty())
{
gint32 val = cv::saturate_cast<gint32>(value);
- g_object_set(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
+ g_object_set(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL);
return true;
}
}
CV_WARN("there is no sink yet");
return false;
}
- gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
+ gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), (guint) value);
return true;
}
default:
}
-Ptr<IVideoCapture> cv::createGStreamerCapture(const String& filename)
+Ptr<IVideoCapture> createGStreamerCapture(const String& filename)
{
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(filename))
return Ptr<IVideoCapture>();
}
-Ptr<IVideoCapture> cv::createGStreamerCapture(int index)
+Ptr<IVideoCapture> createGStreamerCapture(int index)
{
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(index))
/*!
* \brief The CvVideoWriter_GStreamer class
- * Use Gstreamer to write video
+ * Use GStreamer to write video
*/
class CvVideoWriter_GStreamer : public CvVideoWriter
{
public:
CvVideoWriter_GStreamer()
- : pipeline(0), source(0), encodebin(0), file(0), buffer(0), input_pix_fmt(0),
+ : input_pix_fmt(0),
num_frames(0), framerate(0)
{
}
virtual bool writeFrame( const IplImage* image ) CV_OVERRIDE;
protected:
const char* filenameToMimetype(const char* filename);
- GstElement* pipeline;
- GstElement* source;
- GstElement* encodebin;
- GstElement* file;
+ GSafePtr<GstElement> pipeline;
+ GSafePtr<GstElement> source;
- GstBuffer* buffer;
int input_pix_fmt;
int num_frames;
double framerate;
+
+ void close_();
};
/*!
* ends the pipeline by sending EOS and destroys the pipeline and all
* elements afterwards
*/
-void CvVideoWriter_GStreamer::close()
+void CvVideoWriter_GStreamer::close_()
{
GstStateChangeReturn status;
if (pipeline)
{
handleMessage(pipeline);
- if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK)
+ if (gst_app_src_end_of_stream(GST_APP_SRC(source.get())) != GST_FLOW_OK)
{
- CV_WARN("Cannot send EOS to GStreamer pipeline\n");
- return;
+ CV_WARN("Cannot send EOS to GStreamer pipeline");
}
-
- //wait for EOS to trickle down the pipeline. This will let all elements finish properly
- GstBus* bus = gst_element_get_bus(pipeline);
- GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
- if (!msg || GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
+ else
{
- CV_WARN("Error during VideoWriter finalization\n");
- if(msg != NULL)
+ //wait for EOS to trickle down the pipeline. This will let all elements finish properly
+ GSafePtr<GstBus> bus; bus.attach(gst_element_get_bus(pipeline));
+ if (bus)
{
- gst_message_unref(msg);
- g_object_unref(G_OBJECT(bus));
+ GSafePtr<GstMessage> msg; msg.attach(gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)));
+ if (!msg || GST_MESSAGE_TYPE(msg.get()) == GST_MESSAGE_ERROR)
+ {
+ CV_WARN("Error during VideoWriter finalization");
+ handleMessage(pipeline);
+ }
+ }
+ else
+ {
+ CV_WARN("can't get GstBus");
}
- return;
}
- gst_message_unref(msg);
- g_object_unref(G_OBJECT(bus));
-
status = gst_element_set_state (pipeline, GST_STATE_NULL);
if (status == GST_STATE_CHANGE_ASYNC)
{
if (status == GST_STATE_CHANGE_FAILURE)
{
handleMessage (pipeline);
- gst_object_unref (GST_OBJECT (pipeline));
- pipeline = NULL;
- CV_WARN("Unable to stop gstreamer pipeline\n");
- return;
+ CV_WARN("Unable to stop writer pipeline");
}
-
- gst_object_unref (GST_OBJECT (pipeline));
- pipeline = NULL;
}
}
+void CvVideoWriter_GStreamer::close()
+{
+ close_();
+ source.release();
+ pipeline.release();
+}
/*!
- * \brief CvVideoWriter_GStreamer::filenameToMimetype
+ * \brief filenameToMimetype
* \param filename
* \return mimetype
- * Resturns a container mime type for a given filename by looking at it's extension
+ * Returns a container mime type for a given filename by looking at it's extension
*/
const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
{
//get extension
- const char *ext = strrchr(filename, '.');
- if(!ext || ext == filename) return NULL;
- ext += 1; //exclude the dot
+ const char *ext_ = strrchr(filename, '.');
+ if (!ext_ || ext_ == filename)
+ return NULL;
+ ext_ += 1; //exclude the dot
+
+ std::string ext(ext_);
+ ext = toLowerCase(ext);
// return a container mime based on the given extension.
// gstreamer's function returns too much possibilities, which is not useful to us
//return the appropriate mime
- if (strncasecmp(ext,"avi", 3) == 0)
- return (const char*)"video/x-msvideo";
+ if (ext == "avi")
+ return "video/x-msvideo";
- if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0 || strncasecmp(ext,"webm",4) == 0 )
- return (const char*)"video/x-matroska";
+ if (ext == "mkv" || ext == "mk3d" || ext == "webm")
+ return "video/x-matroska";
- if (strncasecmp(ext,"wmv", 3) == 0)
- return (const char*)"video/x-ms-asf";
+ if (ext == "wmv")
+ return "video/x-ms-asf";
- if (strncasecmp(ext,"mov", 3) == 0)
- return (const char*)"video/x-quicktime";
+ if (ext == "mov")
+ return "video/x-quicktime";
- if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0)
- return (const char*)"application/ogg";
+ if (ext == "ogg" || ext == "ogv")
+ return "application/ogg";
- if (strncasecmp(ext,"rm", 3) == 0)
- return (const char*)"vnd.rn-realmedia";
+ if (ext == "rm")
+ return "vnd.rn-realmedia";
- if (strncasecmp(ext,"swf", 3) == 0)
- return (const char*)"application/x-shockwave-flash";
+ if (ext == "swf")
+ return "application/x-shockwave-flash";
- if (strncasecmp(ext,"mp4", 3) == 0)
- return (const char*)"video/x-quicktime, variant=(string)iso";
+ if (ext == "mp4")
+ return "video/x-quicktime, variant=(string)iso";
//default to avi
- return (const char*)"video/x-msvideo";
+ return "video/x-msvideo";
}
/*!
* \return success
*
* We support 2 modes of operation. Either the user enters a filename and a fourcc
- * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
+ * code, or enters a manual pipeline description like in CvVideoCapture_GStreamer.
* In the latter case, we just push frames on the appsink with appropriate caps.
* In the former case, we try to deduce the correct container from the filename,
* and the correct encoder from the fourcc profile.
double fps, CvSize frameSize, bool is_color )
{
// check arguments
- assert (filename);
- assert (fps > 0);
- assert (frameSize.width > 0 && frameSize.height > 0);
+ CV_Assert(filename);
+ CV_Assert(fps > 0);
+ CV_Assert(frameSize.width > 0 && frameSize.height > 0);
// init gstreamer
gst_initializer::init();
// init vars
+ GSafePtr<GstElement> file;
+ GSafePtr<GstElement> encodebin;
+
bool manualpipeline = true;
int bufsize = 0;
- GError *err = NULL;
- const char* mime = NULL;
+ GSafePtr<GError> err;
GstStateChangeReturn stateret;
- GstCaps* caps = NULL;
- GstCaps* videocaps = NULL;
-
-#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
- GstCaps* containercaps = NULL;
- GstEncodingContainerProfile* containerprofile = NULL;
- GstEncodingVideoProfile* videoprofile = NULL;
-#endif
+ GSafePtr<GstCaps> caps;
GstIterator* it = NULL;
gboolean done = FALSE;
- GstElement *element = NULL;
- gchar* name = NULL;
#if GST_VERSION_MAJOR == 0
GstElement* splitter = NULL;
// we first try to construct a pipeline from the given string.
// if that fails, we assume it is an ordinary filename
- encodebin = gst_parse_launch(filename, &err);
- manualpipeline = (encodebin != NULL);
+ encodebin.attach(gst_parse_launch(filename, err.getRef()));
+ manualpipeline = (bool)encodebin;
- if(manualpipeline)
+ if (manualpipeline)
{
+ if (err)
+ {
+ CV_WARN("error opening writer pipeline: " << err->message);
+ if (encodebin)
+ {
+ gst_element_set_state(encodebin, GST_STATE_NULL);
+ }
+ handleMessage(encodebin);
+ encodebin.release();
+ return false;
+ }
#if GST_VERSION_MAJOR == 0
- it = gst_bin_iterate_sources(GST_BIN(encodebin));
- if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
+ it = gst_bin_iterate_sources(GST_BIN(encodebin.get()));
+ if (gst_iterator_next(it, (gpointer *)source.getRef()) != GST_ITERATOR_OK) {
CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
#else
- it = gst_bin_iterate_sources (GST_BIN(encodebin));
- GValue value = G_VALUE_INIT;
+ it = gst_bin_iterate_sources (GST_BIN(encodebin.get()));
- while (!done) {
+ while (!done)
+ {
+ GValue value = G_VALUE_INIT;
+ GSafePtr<gchar> name;
+ GstElement* element = NULL;
switch (gst_iterator_next (it, &value)) {
case GST_ITERATOR_OK:
- element = GST_ELEMENT (g_value_get_object (&value));
- name = gst_element_get_name(element);
- if (name){
- if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
- source = GST_ELEMENT ( gst_object_ref (element) );
+ element = GST_ELEMENT (g_value_get_object (&value)); // no lifetime transfer
+ name.attach(gst_element_get_name(element));
+ if (name)
+ {
+ if (strstr(name.get(), "opencvsrc") != NULL || strstr(name.get(), "appsrc") != NULL)
+ {
+ source.attach(GST_ELEMENT(gst_object_ref(element)));
done = TRUE;
}
- g_free(name);
}
- g_value_unset (&value);
+ g_value_unset(&value);
break;
case GST_ITERATOR_RESYNC:
return false;
}
#endif
- pipeline = encodebin;
+ pipeline.swap(encodebin);
}
else
{
- pipeline = gst_pipeline_new (NULL);
+ err.release();
+ pipeline.reset(gst_pipeline_new(NULL));
// we just got a filename and a fourcc code.
// first, try to guess the container from the filename
- //encodebin = gst_element_factory_make("encodebin", NULL);
//proxy old non existing fourcc ids. These were used in previous opencv versions,
//but do not even exist in gstreamer any more
//create encoder caps from fourcc
-
- videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
- if (!videocaps){
- CV_WARN("Gstreamer Opencv backend does not support this codec.");
+ GSafePtr<GstCaps> videocaps;
+ videocaps.attach(gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL));
+ if (!videocaps)
+ {
+ CV_WARN("OpenCV backend does not support passed FOURCC value");
return false;
}
//create container caps from file extension
- mime = filenameToMimetype(filename);
- if (!mime) {
- CV_WARN("Gstreamer Opencv backend does not support this file type.");
+ const char* mime = filenameToMimetype(filename);
+ if (!mime)
+ {
+ CV_WARN("OpenCV backend does not support this file type (extension): " << filename);
return false;
}
+ //create pipeline elements
+ encodebin.reset(gst_element_factory_make("encodebin", NULL));
+
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
- containercaps = gst_caps_from_string(mime);
+ GSafePtr<GstCaps> containercaps;
+ GSafePtr<GstEncodingContainerProfile> containerprofile;
+ GSafePtr<GstEncodingVideoProfile> videoprofile;
- //create encodebin profile
- containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
- videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
- gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
-#endif
+ containercaps.attach(gst_caps_from_string(mime));
- //create pipeline elements
- encodebin = gst_element_factory_make("encodebin", NULL);
+ //create encodebin profile
+ containerprofile.attach(gst_encoding_container_profile_new("container", "container", containercaps.get(), NULL));
+ videoprofile.reset(gst_encoding_video_profile_new(videocaps.get(), NULL, NULL, 1));
+ gst_encoding_container_profile_add_profile(containerprofile.get(), (GstEncodingProfile*)videoprofile.get());
-#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
- g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
+ g_object_set(G_OBJECT(encodebin.get()), "profile", containerprofile.get(), NULL);
#endif
- source = gst_element_factory_make("appsrc", NULL);
- file = gst_element_factory_make("filesink", NULL);
- g_object_set(G_OBJECT(file), "location", filename, NULL);
+
+ source.reset(gst_element_factory_make("appsrc", NULL));
+ file.reset(gst_element_factory_make("filesink", NULL));
+ g_object_set(G_OBJECT(file.get()), "location", (const char*)filename, NULL);
}
+ int fps_num = 0, fps_denom = 1;
+ toFraction(fps, fps_num, fps_denom);
+
if (fourcc == CV_FOURCC('M','J','P','G') && frameSize.height == 1)
{
#if GST_VERSION_MAJOR > 0
input_pix_fmt = GST_VIDEO_FORMAT_ENCODED;
- caps = gst_caps_new_simple("image/jpeg",
- "framerate", GST_TYPE_FRACTION, int(fps), 1,
- NULL);
- caps = gst_caps_fixate(caps);
+ caps.attach(gst_caps_new_simple("image/jpeg",
+ "framerate", GST_TYPE_FRACTION, int(fps_num), int(fps_denom),
+ NULL));
+ caps.attach(gst_caps_fixate(caps.detach()));
#else
- CV_WARN("Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data.");
+ CV_WARN("GStreamer 0.10 OpenCV backend does not support writing encoded MJPEG data.");
return false;
#endif
}
- else if(is_color)
+ else if (is_color)
{
input_pix_fmt = GST_VIDEO_FORMAT_BGR;
bufsize = frameSize.width * frameSize.height * 3;
#if GST_VERSION_MAJOR == 0
- caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
- frameSize.width,
- frameSize.height,
- int(fps), 1,
- 1, 1);
+ caps.attach(gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
+ frameSize.width,
+ frameSize.height,
+ gint(fps_num), gint(fps_denom),
+ 1, 1));
#else
- caps = gst_caps_new_simple("video/x-raw",
- "format", G_TYPE_STRING, "BGR",
- "width", G_TYPE_INT, frameSize.width,
- "height", G_TYPE_INT, frameSize.height,
- "framerate", GST_TYPE_FRACTION, int(fps), 1,
- NULL);
- caps = gst_caps_fixate(caps);
-
+ caps.attach(gst_caps_new_simple("video/x-raw",
+ "format", G_TYPE_STRING, "BGR",
+ "width", G_TYPE_INT, frameSize.width,
+ "height", G_TYPE_INT, frameSize.height,
+ "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom),
+ NULL));
+ CV_Assert(caps);
+ caps.attach(gst_caps_fixate(caps.detach()));
#endif
-
+ CV_Assert(caps);
}
else
{
bufsize = frameSize.width * frameSize.height;
#if GST_VERSION_MAJOR == 0
- caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
- frameSize.width,
- frameSize.height,
- int(fps), 1,
- 1, 1);
+ caps.attach(gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
+ frameSize.width,
+ frameSize.height,
+ gint(fps_num), gint(fps_denom),
+ 1, 1));
#else
- caps = gst_caps_new_simple("video/x-raw",
- "format", G_TYPE_STRING, "GRAY8",
- "width", G_TYPE_INT, frameSize.width,
- "height", G_TYPE_INT, frameSize.height,
- "framerate", GST_TYPE_FRACTION, int(fps), 1,
- NULL);
- caps = gst_caps_fixate(caps);
+ caps.attach(gst_caps_new_simple("video/x-raw",
+ "format", G_TYPE_STRING, "GRAY8",
+ "width", G_TYPE_INT, frameSize.width,
+ "height", G_TYPE_INT, frameSize.height,
+ "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom),
+ NULL));
+ caps.attach(gst_caps_fixate(caps.detach()));
#endif
#else
CV_Error(Error::StsError,
- "Gstreamer 0.10.29 or newer is required for grayscale input");
+ "GStreamer 0.10.29 or newer is required for grayscale input");
#endif
}
- gst_app_src_set_caps(GST_APP_SRC(source), caps);
- gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
- gst_app_src_set_size (GST_APP_SRC(source), -1);
+ gst_app_src_set_caps(GST_APP_SRC(source.get()), caps);
+ gst_app_src_set_stream_type(GST_APP_SRC(source.get()), GST_APP_STREAM_TYPE_STREAM);
+ gst_app_src_set_size (GST_APP_SRC(source.get()), -1);
- g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
- g_object_set(G_OBJECT(source), "block", 1, NULL);
- g_object_set(G_OBJECT(source), "is-live", 0, NULL);
+ g_object_set(G_OBJECT(source.get()), "format", GST_FORMAT_TIME, NULL);
+ g_object_set(G_OBJECT(source.get()), "block", 1, NULL);
+ g_object_set(G_OBJECT(source.get()), "is-live", 0, NULL);
- if(!manualpipeline)
+ if (!manualpipeline)
{
- g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
- gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
- if(!gst_element_link_many(source, encodebin, file, NULL)) {
- CV_WARN("GStreamer: cannot link elements\n");
+ g_object_set(G_OBJECT(file.get()), "buffer-size", bufsize, NULL);
+ gst_bin_add_many(GST_BIN(pipeline.get()), source.get(), encodebin.get(), file.get(), NULL);
+ if (!gst_element_link_many(source.get(), encodebin.get(), file.get(), NULL))
+ {
+ CV_WARN("cannot link elements");
+ pipeline.release();
return false;
}
}
// encodebin pipeline to prevent early EOF event handling
// We always fetch BGR or gray-scale frames, so combiner->spliter
// endge in graph is useless.
- it = gst_bin_iterate_recurse (GST_BIN(encodebin));
+ it = gst_bin_iterate_recurse (GST_BIN(encodebin.get()));
while (!done) {
+ GSafePtr<gchar> name;
+ GstElement* element = NULL;
switch (gst_iterator_next (it, (void**)&element)) {
case GST_ITERATOR_OK:
- name = gst_element_get_name(element);
+ name.attach(gst_element_get_name(element));
if (strstr(name, "streamsplitter"))
splitter = element;
else if (strstr(name, "streamcombiner"))
}
#endif
- GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline");
+ GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline");
- stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
- if(stateret == GST_STATE_CHANGE_FAILURE) {
+ stateret = gst_element_set_state(GST_ELEMENT(pipeline.get()), GST_STATE_PLAYING);
+ if (stateret == GST_STATE_CHANGE_FAILURE)
+ {
handleMessage(pipeline);
CV_WARN("GStreamer: cannot put pipeline to play\n");
+ pipeline.release();
return false;
}
//gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy
#if GST_VERSION_MAJOR == 0
- buffer = gst_buffer_try_new_and_alloc (size);
+ GstBuffer *buffer = gst_buffer_try_new_and_alloc (size);
if (!buffer)
{
CV_WARN("Cannot create GStreamer buffer");
GST_BUFFER_DURATION(buffer) = duration;
GST_BUFFER_TIMESTAMP(buffer) = timestamp;
#else
- buffer = gst_buffer_new_allocate (NULL, size, NULL);
+ GstBuffer *buffer = gst_buffer_new_allocate(NULL, size, NULL);
GstMapInfo info;
gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
memcpy(info.data, (guint8*)image->imageData, size);
GST_BUFFER_DTS(buffer) = timestamp;
#endif
//set the current number in the frame
- GST_BUFFER_OFFSET(buffer) = num_frames;
+ GST_BUFFER_OFFSET(buffer) = num_frames;
- ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
- if (ret != GST_FLOW_OK) {
+ ret = gst_app_src_push_buffer(GST_APP_SRC(source.get()), buffer);
+ if (ret != GST_FLOW_OK)
+ {
CV_WARN("Error pushing buffer to GStreamer pipeline");
return false;
}
return true;
}
-/*!
- * \brief cvCreateVideoWriter_GStreamer
- * \param filename
- * \param fourcc
- * \param fps
- * \param frameSize
- * \param isColor
- * \return
- * Constructor
- */
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
CvSize frameSize, int isColor )
{
CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
- if( wrt->open(filename, fourcc, fps,frameSize, isColor))
- return wrt;
+ try
+ {
+ if (wrt->open(filename, fourcc, fps, frameSize, isColor))
+ return wrt;
+ delete wrt;
+ }
+ catch (...)
+ {
+ delete wrt;
+ throw;
+ }
- delete wrt;
return 0;
}
// utility functions
-/*!
- * \brief toFraction
- * \param decimal
- * \param numerator
- * \param denominator
- * Split a floating point value into numerator and denominator
- */
-void toFraction(double decimal, double &numerator, double &denominator)
+void toFraction(const double decimal, int &numerator_i, int &denominator_i)
{
- double dummy;
- double whole;
- decimal = modf (decimal, &whole);
- for (denominator = 1; denominator<=100; denominator++){
- if (modf(denominator * decimal, &dummy) < 0.001f)
+ double err = 1.0;
+ int denominator = 1;
+ double numerator = 0;
+ for (int check_denominator = 1; ; check_denominator++)
+ {
+ double check_numerator = (double)check_denominator * decimal;
+ double dummy;
+ double check_err = modf(check_numerator, &dummy);
+ if (check_err < err)
+ {
+ err = check_err;
+ denominator = check_denominator;
+ numerator = check_numerator;
+ if (err < FLT_EPSILON)
+ break;
+ }
+ if (check_denominator == 100) // limit
break;
}
- numerator = denominator * decimal;
+ numerator_i = cvRound(numerator);
+ denominator_i = denominator;
+ //printf("%g: %d/%d (err=%g)\n", decimal, numerator_i, denominator_i, err);
}
*/
void handleMessage(GstElement * pipeline)
{
- GError *err = NULL;
- gchar *debug = NULL;
- GstBus* bus = NULL;
+ GSafePtr<GstBus> bus;
GstStreamStatusType tp;
GstElement * elem = NULL;
- GstMessage* msg = NULL;
- bus = gst_element_get_bus(pipeline);
+ bus.attach(gst_element_get_bus(pipeline));
- while(gst_bus_have_pending(bus)) {
- msg = gst_bus_pop(bus);
- if (!msg || !GST_IS_MESSAGE(msg))
- {
+ while (gst_bus_have_pending(bus))
+ {
+ GSafePtr<GstMessage> msg;
+ msg.attach(gst_bus_pop(bus));
+ if (!msg || !GST_IS_MESSAGE(msg.get()))
continue;
- }
//printf("\t\tGot %s message\n", GST_MESSAGE_TYPE_NAME(msg));
- if(gst_is_missing_plugin_message(msg))
+ if (gst_is_missing_plugin_message(msg))
{
- CV_WARN("your gstreamer installation is missing a required plugin\n");
+ CV_WARN("your GStreamer installation is missing a required plugin");
}
else
{
// gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate));
break;
case GST_MESSAGE_ERROR:
- gst_message_parse_error(msg, &err, &debug);
- //fprintf(stderr, "\t\tGStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
- // gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
-
- g_error_free(err);
- g_free(debug);
+ {
+ GSafePtr<GError> err;
+ GSafePtr<gchar> debug;
+ gst_message_parse_error(msg, err.getRef(), debug.getRef());
+ GSafePtr<gchar> name; name.attach(gst_element_get_name(GST_MESSAGE_SRC (msg)));
+ CV_WARN("Embedded video playback halted; module " << name.get() <<
+ " reported: " << err->message);
+ CV_LOG_DEBUG(NULL, "GStreamer debug: " << debug.get());
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
break;
+ }
case GST_MESSAGE_EOS:
//fprintf(stderr, "\t\treached the end of the stream.");
break;
break;
}
}
- gst_message_unref(msg);
}
-
- gst_object_unref(GST_OBJECT(bus));
}
+
+
+} // namespace cv