GSList * streams);
static void gst_dash_demux_stream_free (GstDashDemuxStream * stream);
static void gst_dash_demux_reset (GstDashDemux * demux, gboolean dispose);
+#ifndef GST_DISABLE_GST_DEBUG
static GstClockTime gst_dash_demux_get_buffering_time (GstDashDemux * demux);
-static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux * demux,
- GstActiveStream * stream);
static GstClockTime gst_dash_demux_stream_get_buffering_time (GstDashDemuxStream
* stream);
+#endif
+static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux * demux,
+ GstActiveStream * stream);
static GstPad *gst_dash_demux_create_pad (GstDashDemux * demux);
#define gst_dash_demux_parent_class parent_class
demux->cancelled = FALSE;
}
+#ifndef GST_DISABLE_GST_DEBUG
static GstClockTime
gst_dash_demux_get_buffering_time (GstDashDemux * demux)
{
return (GstClockTime) level.time;
}
+#endif
static gboolean
gst_dash_demux_all_streams_have_data (GstDashDemux * demux)
/* Wake the download task up */
GST_TASK_SIGNAL (demux->download_task);
if (selected_stream) {
+#ifndef GST_DISABLE_GST_DEBUG
guint64 brate;
+#endif
diff = (GST_TIMEVAL_TO_TIME (now) - GST_TIMEVAL_TO_TIME (start));
gst_download_rate_add_rate (&selected_stream->dnl_rate, size_buffer, diff);
+#ifndef GST_DISABLE_GST_DEBUG
brate = (size_buffer * 8) / ((double) diff / GST_SECOND);
+#endif
GST_INFO_OBJECT (demux,
"Stream: %d Download rate = %" PRIu64 " Kbits/s (%" PRIu64
" Ko in %.2f s)", selected_stream->index,
{
xmlChar *prop_string;
gchar *str;
- gint ret, len, pos;
+ gint ret, pos;
gint year, month, day, hour, minute, second;
gboolean exists = FALSE;
prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
if (prop_string) {
- len = xmlStrlen (prop_string);
str = (gchar *) prop_string;
- GST_TRACE ("dateTime: %s, len %d", str, len);
+ GST_TRACE ("dateTime: %s, len %d", str, xmlStrlen (prop_string));
/* parse year */
ret = sscanf (str, "%d", &year);
if (ret != 1)
gchar *this_mimeType = NULL;
rep =
gst_mpdparser_get_lowest_representation (adapt_set->Representations);
+#ifndef GST_DISABLE_GST_DEBUG
if (rep && rep->BaseURLs) {
GstBaseURL *url = rep->BaseURLs->data;
GST_DEBUG ("%s", url->baseURL);
}
+#endif
if (rep->RepresentationBase)
this_mimeType = rep->RepresentationBase->mimeType;
if (!this_mimeType && adapt_set->RepresentationBase) {
void
gst_egl_adaptation_init_egl_exts (GstEglAdaptationContext * ctx)
{
+#ifndef GST_DISABLE_GST_DEBUG
const char *eglexts;
unsigned const char *glexts;
GST_STR_NULL (eglexts));
GST_DEBUG_OBJECT (ctx->element, "Available GLES extensions: %s\n",
GST_STR_NULL ((const char *) glexts));
-
+#endif
return;
}
gst_eglglessink_fill_texture (GstEglGlesSink * eglglessink, GstBuffer * buf)
{
GstVideoFrame vframe;
- gint w, h;
+#ifndef GST_DISABLE_GST_DEBUG
+ gint w;
+#endif
+ gint h;
memset (&vframe, 0, sizeof (vframe));
GST_ERROR_OBJECT (eglglessink, "Couldn't map frame");
goto HANDLE_ERROR;
}
-
+#ifndef GST_DISABLE_GST_DEBUG
w = GST_VIDEO_FRAME_WIDTH (&vframe);
+#endif
h = GST_VIDEO_FRAME_HEIGHT (&vframe);
GST_DEBUG_OBJECT (eglglessink,
GstAudioInfo * info);
static GstCaps *gst_faac_getcaps (GstAudioEncoder * enc, GstCaps * filter);
-static gboolean gst_faac_start (GstAudioEncoder * enc);
static gboolean gst_faac_stop (GstAudioEncoder * enc);
static gboolean gst_faac_set_format (GstAudioEncoder * enc,
GstAudioInfo * info);
"Free MPEG-2/4 AAC encoder",
"Ronald Bultje <rbultje@ronald.bitfreak.net>");
- base_class->start = GST_DEBUG_FUNCPTR (gst_faac_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_faac_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_faac_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_faac_handle_frame);
}
static gboolean
-gst_faac_start (GstAudioEncoder * enc)
-{
- GstFaac *faac = GST_FAAC (enc);
-
- GST_DEBUG_OBJECT (faac, "start");
- return TRUE;
-}
-
-static gboolean
gst_faac_stop (GstAudioEncoder * enc)
{
GstFaac *faac = GST_FAAC (enc);
{
GstFlowReturn result;
GstClockTime pts = GST_CLOCK_TIME_NONE, dts = GST_CLOCK_TIME_NONE;
- guint size;
if (stream == NULL)
goto no_stream;
stream->discont = FALSE;
}
- size = gst_buffer_get_size (buf);
demux->next_pts = G_MAXUINT64;
demux->next_dts = G_MAXUINT64;
result = gst_pad_push (stream->pad, buf);
GST_DEBUG_OBJECT (demux, "pushed stream id 0x%02x type 0x%02x, pts time: %"
- GST_TIME_FORMAT ", size %d. result: %s",
+ GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT ". result: %s",
stream->id, stream->type, GST_TIME_ARGS (pts),
- size, gst_flow_get_name (result));
+ gst_buffer_get_size (buf), gst_flow_get_name (result));
return result;
{
guint16 length;
const guint8 *data;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean csps;
+#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
/* audio_bound:6==1 ! fixed:1 | constrained:1 */
{
+#ifndef GST_DISABLE_GST_DEBUG
guint8 audio_bound;
gboolean fixed;
GST_DEBUG_OBJECT (demux, "audio_bound %d, fixed %d, constrained %d",
audio_bound, fixed, csps);
+#endif
data += 1;
}
/* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
{
+#ifndef GST_DISABLE_GST_DEBUG
gboolean audio_lock;
gboolean video_lock;
guint8 video_bound;
audio_lock = (data[0] & 0x80) == 0x80;
video_lock = (data[0] & 0x40) == 0x40;
-
+#endif
if ((data[0] & 0x20) != 0x20)
goto marker_expected;
-
+#ifndef GST_DISABLE_GST_DEBUG
/* max number of simultaneous video streams active */
video_bound = (data[0] & 0x1f);
GST_DEBUG_OBJECT (demux, "audio_lock %d, video_lock %d, video_bound %d",
audio_lock, video_lock, video_bound);
+#endif
data += 1;
}
/* packet_rate_restriction:1 | reserved:7==0x7F */
{
+#ifndef GST_DISABLE_GST_DEBUG
gboolean packet_rate_restriction;
-
+#endif
if ((data[0] & 0x7f) != 0x7f)
goto marker_expected;
-
+#ifndef GST_DISABLE_GST_DEBUG
/* only valid if csps is set */
if (csps) {
packet_rate_restriction = (data[0] & 0x80) == 0x80;
GST_DEBUG_OBJECT (demux, "packet_rate_restriction %d",
packet_rate_restriction);
}
+#endif
}
data += 1;
for (i = 0; i < stream_count; i++) {
guint8 stream_id;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound;
guint32 buf_byte_size_bound;
-
+#endif
stream_id = *data++;
if (!(stream_id & 0x80))
goto sys_len_error;
/* check marker bits */
if ((*data & 0xC0) != 0xC0)
goto no_placeholder_bits;
-
+#ifndef GST_DISABLE_GST_DEBUG
STD_buffer_bound_scale = *data & 0x20;
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++;
} else {
buf_byte_size_bound = STD_buffer_size_bound * 1024;
}
-
+#endif
GST_DEBUG_OBJECT (demux, "STD_buffer_bound_scale %d",
STD_buffer_bound_scale);
GST_DEBUG_OBJECT (demux, "STD_buffer_size_bound %d or %d bytes",
guint16 length = 0, info_length = 0, es_map_length = 0;
guint8 psm_version = 0;
const guint8 *data, *es_map_base;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean applicable;
+#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
/* Read PSM applicable bit together with version */
psm_version = GST_READ_UINT8 (data);
+#ifndef GST_DISABLE_GST_DEBUG
applicable = (psm_version & 0x80) >> 7;
+#endif
psm_version &= 0x1F;
GST_DEBUG_OBJECT (demux, "PSM version %u (applicable now %u)", psm_version,
applicable);
if (stream_type == -1) {
/* no stream type, if PS1, get the new id */
if (start_code == ID_PRIVATE_STREAM_1 && datalen >= 2) {
- guint8 nframes;
-
/* VDR writes A52 streams without any header bytes
* (see ftp://ftp.mplayerhq.hu/MPlayer/samples/MPEG-VOB/vdr-AC3) */
if (datalen >= 4) {
* take the first byte too, since it's the frame count in audio
* streams and our backwards compat convention is to strip it off */
if (stream_type != ST_PS_DVD_SUBPICTURE) {
+#ifndef GST_DISABLE_GST_DEBUG
+ guint8 nframes;
+
/* Number of audio frames in this packet */
- nframes = map.data[offset++];
+ nframes = map.data[offset];
+#endif
+ offset++;
datalen--;
GST_DEBUG_OBJECT (demux, "private type 0x%02x, %d frames", id,
nframes);
static guint
gst_mpeg_descriptor_parse_1 (guint8 * data, guint size)
{
+#ifndef GST_DISABLE_GST_DEBUG
guint8 tag;
+#endif
guint8 length;
/* need at least 2 bytes for tag and length */
if (size < 2)
return 0;
- tag = *data++;
+#ifndef GST_DISABLE_GST_DEBUG
+ tag = *data;
+#endif
+ data += 1;
length = *data++;
size -= 2;
push_out:
{
GstBuffer *out;
+#ifndef GST_DISABLE_GST_DEBUG
guint16 consumed;
consumed = avail - 6 - datalen;
+#endif
if (filter->unbounded_packet == FALSE) {
filter->length -= avail - 6;
gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
GstAdapter * adapter, gboolean at_eos)
{
- GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
gboolean completed = FALSE;
const guint8 *data;
guint size;
guint i;
- GST_LOG_OBJECT (rsvg, "parse start");
+ GST_LOG_OBJECT (decoder, "parse start");
size = gst_adapter_available (adapter);
/* "<svg></svg>" */
data = gst_adapter_map (adapter, size);
if (data == NULL) {
- GST_ERROR_OBJECT (rsvg, "Unable to map memory");
+ GST_ERROR_OBJECT (decoder, "Unable to map memory");
return GST_FLOW_ERROR;
}
for (i = 0; i < size - 4; i++) {
return GST_VIDEO_DECODER_FLOW_NEED_DATA;
data = gst_adapter_map (adapter, size);
if (data == NULL) {
- GST_ERROR_OBJECT (rsvg, "Unable to map memory");
+ GST_ERROR_OBJECT (decoder, "Unable to map memory");
return GST_FLOW_ERROR;
}
break;
if (completed) {
- GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);
+ GST_LOG_OBJECT (decoder, "have complete svg of %u bytes", size);
gst_video_decoder_add_to_frame (decoder, size);
return gst_video_decoder_have_frame (decoder);
after_download = g_get_real_time ();
if (_buffer) {
+#ifndef GST_DISABLE_GST_DEBUG
guint64 bitrate = (8 * gst_buffer_get_size (_buffer) * 1000000LLU) /
(after_download - before_download);
+#endif
GST_DEBUG_OBJECT (mssdemux,
"Measured download bitrate: %s %" G_GUINT64_FORMAT " bps",
GstCaps * filter);
static gboolean gst_wayland_sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
static gboolean gst_wayland_sink_start (GstBaseSink * bsink);
-static gboolean gst_wayland_sink_stop (GstBaseSink * bsink);
static gboolean gst_wayland_sink_preroll (GstBaseSink * bsink,
GstBuffer * buffer);
static gboolean
return -1;
}
+#ifndef GST_DISABLE_GST_DEBUG
static const gchar *
gst_wayland_format_to_string (uint32_t wl_format)
{
return gst_video_format_to_string (format);
}
+#endif
static void
gst_wayland_sink_class_init (GstWaylandSinkClass * klass)
gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_wayland_sink_get_caps);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_wayland_sink_set_caps);
gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_wayland_sink_start);
- gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_wayland_sink_stop);
gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_wayland_sink_preroll);
gstbasesink_class->propose_allocation =
GST_DEBUG_FUNCPTR (gst_wayland_sink_propose_allocation);
}
static gboolean
-gst_wayland_sink_stop (GstBaseSink * bsink)
-{
- GstWaylandSink *sink = (GstWaylandSink *) bsink;
-
- GST_DEBUG_OBJECT (sink, "stop");
-
- return TRUE;
-}
-
-static gboolean
gst_wayland_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
{
GstWaylandSink *sink = GST_WAYLAND_SINK (bsink);
guint32 payloadSize;
guint8 payload_type_byte, payload_size_byte;
+#ifndef GST_DISABLE_GST_DEBUG
guint remaining, payload_size;
+#endif
GstH264ParserResult res;
GST_DEBUG ("parsing \"Sei message\"");
}
while (payload_size_byte == 0xff);
+#ifndef GST_DISABLE_GST_DEBUG
remaining = nal_reader_get_remaining (&nr) * 8;
payload_size = payloadSize < remaining ? payloadSize : remaining;
GST_DEBUG ("SEI message received: payloadType %u, payloadSize = %u bytes",
sei->payloadType, payload_size);
+#endif
if (sei->payloadType == GST_H264_SEI_BUF_PERIOD) {
/* size not set; might depend on emulation_prevention_three_byte */
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_audio_channel_mix_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_audio_channel_mix_dispose (GObject * object);
-static void gst_audio_channel_mix_finalize (GObject * object);
static gboolean gst_audio_channel_mix_setup (GstAudioFilter * filter,
const GstAudioInfo * info);
gobject_class->set_property = gst_audio_channel_mix_set_property;
gobject_class->get_property = gst_audio_channel_mix_get_property;
- gobject_class->dispose = gst_audio_channel_mix_dispose;
- gobject_class->finalize = gst_audio_channel_mix_finalize;
audio_filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_channel_mix_setup);
base_transform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_audio_channel_mix_transform_ip);
}
}
-void
-gst_audio_channel_mix_dispose (GObject * object)
-{
- GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (object);
-
- GST_DEBUG_OBJECT (audiochannelmix, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_audio_channel_mix_parent_class)->dispose (object);
-}
-
-void
-gst_audio_channel_mix_finalize (GObject * object)
-{
- GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (object);
-
- GST_DEBUG_OBJECT (audiochannelmix, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_audio_channel_mix_parent_class)->finalize (object);
-}
-
static gboolean
gst_audio_channel_mix_setup (GstAudioFilter * filter, const GstAudioInfo * info)
{
+#ifndef GST_DISABLE_GST_DEBUG
GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (filter);
GST_DEBUG_OBJECT (audiochannelmix, "setup");
+#endif
return TRUE;
}
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_watchdog_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_watchdog_dispose (GObject * object);
-static void gst_watchdog_finalize (GObject * object);
-static GstCaps *gst_watchdog_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_watchdog_start (GstBaseTransform * trans);
static gboolean gst_watchdog_stop (GstBaseTransform * trans);
static gboolean gst_watchdog_sink_event (GstBaseTransform * trans,
gobject_class->set_property = gst_watchdog_set_property;
gobject_class->get_property = gst_watchdog_get_property;
- gobject_class->dispose = gst_watchdog_dispose;
- gobject_class->finalize = gst_watchdog_finalize;
- base_transform_class->transform_caps =
- GST_DEBUG_FUNCPTR (gst_watchdog_transform_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_watchdog_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_watchdog_stop);
base_transform_class->sink_event =
}
}
-void
-gst_watchdog_dispose (GObject * object)
-{
- GstWatchdog *watchdog = GST_WATCHDOG (object);
-
- GST_DEBUG_OBJECT (watchdog, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_watchdog_parent_class)->dispose (object);
-}
-
-void
-gst_watchdog_finalize (GObject * object)
-{
- GstWatchdog *watchdog = GST_WATCHDOG (object);
-
- GST_DEBUG_OBJECT (watchdog, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_watchdog_parent_class)->finalize (object);
-}
-
-static GstCaps *
-gst_watchdog_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps, GstCaps * filter)
-{
- GstWatchdog *watchdog = GST_WATCHDOG (trans);
-
- GST_DEBUG_OBJECT (watchdog, "transform_caps");
-
- return gst_caps_ref (caps);
-}
-
static gpointer
gst_watchdog_thread (gpointer user_data)
{
GST_TIME_ARGS (state->next_ts), GST_TIME_ARGS (new_ts));
if (!gstspu_execute_event (dvdspu)) {
- GstClockTime vid_run_ts;
-
/* No current command buffer, try and get one */
SpuPacket *packet = (SpuPacket *) g_queue_pop_head (dvdspu->pending_spus);
if (packet == NULL)
return; /* No SPU packets available */
- vid_run_ts =
- gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME,
- dvdspu->video_seg.position);
GST_LOG_OBJECT (dvdspu,
"Popped new SPU packet with TS %" GST_TIME_FORMAT
". Video position=%" GST_TIME_FORMAT " (%" GST_TIME_FORMAT
") type %s",
- GST_TIME_ARGS (packet->event_ts), GST_TIME_ARGS (vid_run_ts),
+ GST_TIME_ARGS (packet->event_ts),
+ GST_TIME_ARGS (gst_segment_to_running_time (&dvdspu->video_seg,
+ GST_FORMAT_TIME, dvdspu->video_seg.position)),
GST_TIME_ARGS (dvdspu->video_seg.position),
packet->buf ? "buffer" : "event");
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
- GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
-
- GST_DEBUG_OBJECT (interaudiosrc, "get_times");
+ GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
static gboolean
gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query)
{
- GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
gboolean ret;
- GST_DEBUG_OBJECT (interaudiosrc, "query");
+ GST_DEBUG_OBJECT (src, "query");
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
static GstCaps *
gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
- GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GstStructure *structure;
+ GST_DEBUG_OBJECT (src, "fixate");
+
caps = gst_caps_make_writable (caps);
structure = gst_caps_get_structure (caps, 0);
- GST_DEBUG_OBJECT (interaudiosrc, "fixate");
-
gst_structure_fixate_field_nearest_int (structure, "channels", 2);
gst_structure_fixate_field_nearest_int (structure, "rate", 48000);
gst_inter_sub_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
- GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
-
- GST_DEBUG_OBJECT (intersubsrc, "get_times");
+ GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
gst_inter_video_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
- GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
-
- GST_DEBUG_OBJECT (intervideosrc, "get_times");
+ GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
static GstCaps *
gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
- GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
GstStructure *structure;
- GST_DEBUG_OBJECT (intervideosrc, "fixate");
+ GST_DEBUG_OBJECT (src, "fixate");
caps = gst_caps_make_writable (caps);
/* prototypes */
-static void gst_comb_detect_set_property (GObject * object,
- guint property_id, const GValue * value, GParamSpec * pspec);
-static void gst_comb_detect_get_property (GObject * object,
- guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_comb_detect_dispose (GObject * object);
-static void gst_comb_detect_finalize (GObject * object);
-
static GstCaps *gst_comb_detect_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
-static gboolean gst_comb_detect_start (GstBaseTransform * trans);
-static gboolean gst_comb_detect_stop (GstBaseTransform * trans);
static gboolean gst_comb_detect_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
static void
gst_comb_detect_class_init (GstCombDetectClass * klass)
{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
"Comb Detect", "Video/Filter", "Detect combing artifacts in video stream",
"David Schleef <ds@schleef.org>");
- gobject_class->set_property = gst_comb_detect_set_property;
- gobject_class->get_property = gst_comb_detect_get_property;
- gobject_class->dispose = gst_comb_detect_dispose;
- gobject_class->finalize = gst_comb_detect_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_comb_detect_transform_caps);
- base_transform_class->start = GST_DEBUG_FUNCPTR (gst_comb_detect_start);
- base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_comb_detect_stop);
video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_comb_detect_set_info);
video_filter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_comb_detect_transform_frame);
{
}
-void
-gst_comb_detect_set_property (GObject * object, guint property_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (object);
-
- GST_DEBUG_OBJECT (combdetect, "set_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_comb_detect_get_property (GObject * object, guint property_id,
- GValue * value, GParamSpec * pspec)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (object);
-
- GST_DEBUG_OBJECT (combdetect, "get_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_comb_detect_dispose (GObject * object)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (object);
-
- GST_DEBUG_OBJECT (combdetect, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_comb_detect_parent_class)->dispose (object);
-}
-
-void
-gst_comb_detect_finalize (GObject * object)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (object);
-
- GST_DEBUG_OBJECT (combdetect, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_comb_detect_parent_class)->finalize (object);
-}
static GstCaps *
gst_comb_detect_transform_caps (GstBaseTransform * trans,
return othercaps;
}
-static gboolean
-gst_comb_detect_start (GstBaseTransform * trans)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (trans);
-
- GST_DEBUG_OBJECT (combdetect, "start");
-
- /* initialize processing */
- return TRUE;
-}
-
-static gboolean
-gst_comb_detect_stop (GstBaseTransform * trans)
-{
- GstCombDetect *combdetect = GST_COMB_DETECT (trans);
-
- GST_DEBUG_OBJECT (combdetect, "stop");
-
- /* finalize processing */
- return TRUE;
-}
static gboolean
gst_comb_detect_set_info (GstVideoFilter * filter,
/* prototypes */
-static void gst_ivtc_set_property (GObject * object,
- guint property_id, const GValue * value, GParamSpec * pspec);
-static void gst_ivtc_get_property (GObject * object,
- guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_ivtc_dispose (GObject * object);
-static void gst_ivtc_finalize (GObject * object);
-
static GstCaps *gst_ivtc_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static GstCaps *gst_ivtc_fixate_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
static gboolean gst_ivtc_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps);
-static gboolean gst_ivtc_start (GstBaseTransform * trans);
-static gboolean gst_ivtc_stop (GstBaseTransform * trans);
static gboolean gst_ivtc_sink_event (GstBaseTransform * trans,
GstEvent * event);
static GstFlowReturn gst_ivtc_transform (GstBaseTransform * trans,
static void
gst_ivtc_class_init (GstIvtcClass * klass)
{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
"Inverse Telecine", "Video/Filter", "Inverse Telecine Filter",
"David Schleef <ds@schleef.org>");
- gobject_class->set_property = gst_ivtc_set_property;
- gobject_class->get_property = gst_ivtc_get_property;
- gobject_class->dispose = gst_ivtc_dispose;
- gobject_class->finalize = gst_ivtc_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_ivtc_transform_caps);
base_transform_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_ivtc_fixate_caps);
base_transform_class->set_caps = GST_DEBUG_FUNCPTR (gst_ivtc_set_caps);
- base_transform_class->start = GST_DEBUG_FUNCPTR (gst_ivtc_start);
- base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_ivtc_stop);
base_transform_class->sink_event = GST_DEBUG_FUNCPTR (gst_ivtc_sink_event);
base_transform_class->transform = GST_DEBUG_FUNCPTR (gst_ivtc_transform);
}
{
}
-void
-gst_ivtc_set_property (GObject * object, guint property_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstIvtc *ivtc = GST_IVTC (object);
-
- GST_DEBUG_OBJECT (ivtc, "set_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_ivtc_get_property (GObject * object, guint property_id,
- GValue * value, GParamSpec * pspec)
-{
- GstIvtc *ivtc = GST_IVTC (object);
-
- GST_DEBUG_OBJECT (ivtc, "get_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_ivtc_dispose (GObject * object)
-{
- GstIvtc *ivtc = GST_IVTC (object);
-
- GST_DEBUG_OBJECT (ivtc, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_ivtc_parent_class)->dispose (object);
-}
-
-void
-gst_ivtc_finalize (GObject * object)
-{
- GstIvtc *ivtc = GST_IVTC (object);
-
- GST_DEBUG_OBJECT (ivtc, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_ivtc_parent_class)->finalize (object);
-}
-
static GstCaps *
gst_ivtc_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
return TRUE;
}
-/* states */
-static gboolean
-gst_ivtc_start (GstBaseTransform * trans)
-{
- GstIvtc *ivtc = GST_IVTC (trans);
-
- GST_DEBUG_OBJECT (ivtc, "start");
-
- return TRUE;
-}
-
-static gboolean
-gst_ivtc_stop (GstBaseTransform * trans)
-{
- GstIvtc *ivtc = GST_IVTC (trans);
-
- GST_DEBUG_OBJECT (ivtc, "stop");
- gst_ivtc_flush (ivtc);
-
- return TRUE;
-}
-
/* sink and src pad event handlers */
static gboolean
gst_ivtc_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstFlowReturn result;
GstClockTime pts = GST_CLOCK_TIME_NONE, dts = GST_CLOCK_TIME_NONE;
- guint size;
if (stream == NULL)
goto no_stream;
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
}
- size = gst_buffer_get_size (buf);
-
demux->next_pts = G_MAXUINT64;
demux->next_dts = G_MAXUINT64;
stream->last_flow = result = gst_pad_push (stream->pad, buf);
GST_LOG_OBJECT (demux, "pushed stream id 0x%02x type 0x%02x, pts time: %"
- GST_TIME_FORMAT ", size %d. result: %s",
+ GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT ". result: %s",
stream->id, stream->type, GST_TIME_ARGS (pts),
- size, gst_flow_get_name (result));
+ gst_buffer_get_size (buf), gst_flow_get_name (result));
return result;
{
guint16 length;
const guint8 *data;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean csps;
+#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
/* audio_bound:6==1 ! fixed:1 | constrained:1 */
{
+#ifndef GST_DISABLE_GST_DEBUG
guint8 audio_bound;
gboolean fixed;
GST_DEBUG_OBJECT (demux, "audio_bound %d, fixed %d, constrained %d",
audio_bound, fixed, csps);
+#endif
data += 1;
}
/* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
{
+#ifndef GST_DISABLE_GST_DEBUG
gboolean audio_lock;
gboolean video_lock;
guint8 video_bound;
audio_lock = (data[0] & 0x80) == 0x80;
video_lock = (data[0] & 0x40) == 0x40;
+#endif
if ((data[0] & 0x20) != 0x20)
goto marker_expected;
+#ifndef GST_DISABLE_GST_DEBUG
/* max number of simultaneous video streams active */
video_bound = (data[0] & 0x1f);
GST_DEBUG_OBJECT (demux, "audio_lock %d, video_lock %d, video_bound %d",
audio_lock, video_lock, video_bound);
+#endif
data += 1;
}
/* packet_rate_restriction:1 | reserved:7==0x7F */
{
+#ifndef GST_DISABLE_GST_DEBUG
gboolean packet_rate_restriction;
-
+#endif
if ((data[0] & 0x7f) != 0x7f)
goto marker_expected;
-
+#ifndef GST_DISABLE_GST_DEBUG
/* only valid if csps is set */
if (csps) {
packet_rate_restriction = (data[0] & 0x80) == 0x80;
GST_DEBUG_OBJECT (demux, "packet_rate_restriction %d",
packet_rate_restriction);
}
+#endif
}
data += 1;
for (i = 0; i < stream_count; i++) {
guint8 stream_id;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound;
guint32 buf_byte_size_bound;
-
+#endif
stream_id = *data++;
if (!(stream_id & 0x80))
goto sys_len_error;
/* check marker bits */
if ((*data & 0xC0) != 0xC0)
goto no_placeholder_bits;
-
+#ifndef GST_DISABLE_GST_DEBUG
STD_buffer_bound_scale = *data & 0x20;
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++;
STD_buffer_bound_scale);
GST_DEBUG_OBJECT (demux, "STD_buffer_size_bound %d or %d bytes",
STD_buffer_size_bound, buf_byte_size_bound);
+#endif
}
}
guint16 length = 0, info_length = 0, es_map_length = 0;
guint8 psm_version = 0;
const guint8 *data, *es_map_base;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean applicable;
+#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
/* Read PSM applicable bit together with version */
psm_version = GST_READ_UINT8 (data);
+#ifndef GST_DISABLE_GST_DEBUG
applicable = (psm_version & 0x80) >> 7;
+#endif
psm_version &= 0x1F;
GST_DEBUG_OBJECT (demux, "PSM version %u (applicable now %u)", psm_version,
applicable);
if (stream_type == -1) {
/* no stream type, if PS1, get the new id */
if (start_code == ID_PRIVATE_STREAM_1 && datalen >= 2) {
- guint8 nframes;
-
/* VDR writes A52 streams without any header bytes
* (see ftp://ftp.mplayerhq.hu/MPlayer/samples/MPEG-VOB/vdr-AC3) */
if (datalen >= 4) {
* streams and our backwards compat convention is to strip it off */
if (stream_type != ST_PS_DVD_SUBPICTURE) {
/* Number of audio frames in this packet */
- nframes = map.data[offset++];
- datalen--;
+#ifndef GST_DISABLE_GST_DEBUG
+ guint8 nframes;
+
+ nframes = map.data[offset];
GST_LOG_OBJECT (demux, "private type 0x%02x, %d frames", id,
nframes);
+#endif
+ offset++;
+ datalen--;
} else {
GST_LOG_OBJECT (demux, "private type 0x%02x, stream type %d", id,
stream_type);
push_out:
{
GstBuffer *out;
+#ifndef GST_DISABLE_GST_DEBUG
guint16 consumed;
consumed = avail - 6 - datalen;
+#endif
if (filter->unbounded_packet == FALSE) {
filter->length -= avail - 6;
MpegTsMux *mux = GST_MPEG_TSMUX (user_data);
gboolean res = FALSE;
gboolean forward = TRUE;
+#ifndef GST_DISABLE_GST_DEBUG
GstPad *pad;
pad = data->pad;
+#endif
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:
GstBuffer *out_buf;
guint8 *in_data, *out_data;
guint i, size, num_frames;
- gint out_size, in_size;
+ gint out_size;
+#ifndef GST_DISABLE_GST_DEBUG
+ gint in_size;
+#endif
gint decode_ret;
GstMapInfo inmap, outmap;
num_frames = size / 40;
/* this is the input/output size */
+#ifndef GST_DISABLE_GST_DEBUG
in_size = num_frames * 40;
+#endif
out_size = num_frames * 640;
GST_LOG_OBJECT (dec, "we have %u frames, %u in, %u out", num_frames, in_size,
GstBuffer *out_buf;
guint8 *in_data, *out_data;
guint i, size, num_frames;
- gint out_size, in_size;
+ gint out_size;
+#ifndef GST_DISABLE_GST_DEBUG
+ gint in_size;
+#endif
gint encode_ret;
GstMapInfo inmap, outmap;
num_frames = size / 640;
/* this is the input/output size */
+#ifndef GST_DISABLE_GST_DEBUG
in_size = num_frames * 640;
+#endif
out_size = num_frames * 40;
GST_LOG_OBJECT (enc, "we have %u frames, %u in, %u out", num_frames, in_size,
/* prototypes */
-static void gst_scene_change_set_property (GObject * object,
- guint property_id, const GValue * value, GParamSpec * pspec);
-static void gst_scene_change_get_property (GObject * object,
- guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_scene_change_dispose (GObject * object);
-static void gst_scene_change_finalize (GObject * object);
-
-static gboolean gst_scene_change_set_info (GstVideoFilter * filter,
- GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
- GstVideoInfo * out_info);
static GstFlowReturn gst_scene_change_transform_frame_ip (GstVideoFilter *
filter, GstVideoFrame * frame);
static void
gst_scene_change_class_init (GstSceneChangeClass * klass)
{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
"Video/Filter", "Detects scene changes in video",
"David Schleef <ds@entropywave.com>");
- gobject_class->set_property = gst_scene_change_set_property;
- gobject_class->get_property = gst_scene_change_get_property;
- gobject_class->dispose = gst_scene_change_dispose;
- gobject_class->finalize = gst_scene_change_finalize;
- video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_scene_change_set_info);
video_filter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_scene_change_transform_frame_ip);
{
}
-void
-gst_scene_change_set_property (GObject * object, guint property_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
-
- GST_DEBUG_OBJECT (scenechange, "set_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_scene_change_get_property (GObject * object, guint property_id,
- GValue * value, GParamSpec * pspec)
-{
- GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
-
- GST_DEBUG_OBJECT (scenechange, "get_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_scene_change_dispose (GObject * object)
-{
- GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
-
- GST_DEBUG_OBJECT (scenechange, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_scene_change_parent_class)->dispose (object);
-}
-
-void
-gst_scene_change_finalize (GObject * object)
-{
- GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
-
- GST_DEBUG_OBJECT (scenechange, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_scene_change_parent_class)->finalize (object);
-}
-
-static gboolean
-gst_scene_change_set_info (GstVideoFilter * filter, GstCaps * incaps,
- GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
-{
- GstSceneChange *scenechange = GST_SCENE_CHANGE (filter);
-
- GST_DEBUG_OBJECT (scenechange, "set_info");
-
- return TRUE;
-}
-
static double
get_frame_score (GstVideoFrame * f1, GstVideoFrame * f2)
/* prototypes */
-
-static void gst_video_diff_set_property (GObject * object,
- guint property_id, const GValue * value, GParamSpec * pspec);
-static void gst_video_diff_get_property (GObject * object,
- guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_video_diff_dispose (GObject * object);
-static void gst_video_diff_finalize (GObject * object);
-
-static gboolean gst_video_diff_start (GstBaseTransform * trans);
-static gboolean gst_video_diff_stop (GstBaseTransform * trans);
-static gboolean gst_video_diff_set_info (GstVideoFilter * filter,
- GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
- GstVideoInfo * out_info);
static GstFlowReturn gst_video_diff_transform_frame (GstVideoFilter * filter,
GstVideoFrame * inframe, GstVideoFrame * outframe);
static void
gst_video_diff_class_init (GstVideoDiffClass * klass)
{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstBaseTransformClass *base_transform_class =
- GST_BASE_TRANSFORM_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
/* Setting up pads and setting metadata should be moved to
"FIXME Long name", "Generic", "FIXME Description",
"FIXME <fixme@example.com>");
- gobject_class->set_property = gst_video_diff_set_property;
- gobject_class->get_property = gst_video_diff_get_property;
- gobject_class->dispose = gst_video_diff_dispose;
- gobject_class->finalize = gst_video_diff_finalize;
- base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_diff_start);
- base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_diff_stop);
- video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_diff_set_info);
video_filter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_video_diff_transform_frame);
videodiff->threshold = 10;
}
-void
-gst_video_diff_set_property (GObject * object, guint property_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
-
- GST_DEBUG_OBJECT (videodiff, "set_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_video_diff_get_property (GObject * object, guint property_id,
- GValue * value, GParamSpec * pspec)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
-
- GST_DEBUG_OBJECT (videodiff, "get_property");
-
- switch (property_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
- break;
- }
-}
-
-void
-gst_video_diff_dispose (GObject * object)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
-
- GST_DEBUG_OBJECT (videodiff, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_video_diff_parent_class)->dispose (object);
-}
-
-void
-gst_video_diff_finalize (GObject * object)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
-
- GST_DEBUG_OBJECT (videodiff, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_video_diff_parent_class)->finalize (object);
-}
-
-static gboolean
-gst_video_diff_start (GstBaseTransform * trans)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (trans);
-
- GST_DEBUG_OBJECT (videodiff, "start");
-
- return TRUE;
-}
-
-static gboolean
-gst_video_diff_stop (GstBaseTransform * trans)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (trans);
-
- GST_DEBUG_OBJECT (videodiff, "stop");
-
- return TRUE;
-}
-
-static gboolean
-gst_video_diff_set_info (GstVideoFilter * filter, GstCaps * incaps,
- GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
-{
- GstVideoDiff *videodiff = GST_VIDEO_DIFF (filter);
-
- GST_DEBUG_OBJECT (videodiff, "set_info");
-
- return TRUE;
-}
-
static GstFlowReturn
gst_video_diff_transform_frame_ip_planarY (GstVideoDiff * videodiff,
GstVideoFrame * outframe, GstVideoFrame * inframe, GstVideoFrame * oldframe)
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_zebra_stripe_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
-static void gst_zebra_stripe_dispose (GObject * object);
-static void gst_zebra_stripe_finalize (GObject * object);
static gboolean gst_zebra_stripe_start (GstBaseTransform * trans);
static gboolean gst_zebra_stripe_stop (GstBaseTransform * trans);
-static gboolean gst_zebra_stripe_set_info (GstVideoFilter * filter,
- GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
- GstVideoInfo * out_info);
static GstFlowReturn gst_zebra_stripe_transform_frame_ip (GstVideoFilter *
filter, GstVideoFrame * frame);
gobject_class->set_property = gst_zebra_stripe_set_property;
gobject_class->get_property = gst_zebra_stripe_get_property;
- gobject_class->dispose = gst_zebra_stripe_dispose;
- gobject_class->finalize = gst_zebra_stripe_finalize;
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_zebra_stripe_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_zebra_stripe_stop);
- video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_zebra_stripe_set_info);
video_filter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_transform_frame_ip);
}
}
-void
-gst_zebra_stripe_dispose (GObject * object)
-{
- GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (object);
-
- GST_DEBUG_OBJECT (zebrastripe, "dispose");
-
- /* clean up as possible. may be called multiple times */
-
- G_OBJECT_CLASS (gst_zebra_stripe_parent_class)->dispose (object);
-}
-
-void
-gst_zebra_stripe_finalize (GObject * object)
-{
- GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (object);
-
- GST_DEBUG_OBJECT (zebrastripe, "finalize");
-
- /* clean up object here */
-
- G_OBJECT_CLASS (gst_zebra_stripe_parent_class)->finalize (object);
-}
-
static gboolean
gst_zebra_stripe_start (GstBaseTransform * trans)
{
+#ifndef GST_DISABLE_GST_DEBUG
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
GST_DEBUG_OBJECT (zebrastripe, "start");
+#endif
if (GST_BASE_TRANSFORM_CLASS (gst_zebra_stripe_parent_class)->start)
return
static gboolean
gst_zebra_stripe_stop (GstBaseTransform * trans)
{
+#ifndef GST_DISABLE_GST_DEBUG
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
GST_DEBUG_OBJECT (zebrastripe, "stop");
+#endif
if (GST_BASE_TRANSFORM_CLASS (gst_zebra_stripe_parent_class)->stop)
return
return TRUE;
}
-static gboolean
-gst_zebra_stripe_set_info (GstVideoFilter * filter, GstCaps * incaps,
- GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
-{
- GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (filter);
-
- GST_DEBUG_OBJECT (zebrastripe, "set_info");
-
- return TRUE;
-}
-
static GstFlowReturn
gst_zebra_stripe_transform_frame_ip_planarY (GstZebraStripe * zebrastripe,
GstVideoFrame * frame)
store[id] = buf;
}
-
+#ifndef GST_DISABLE_GST_DEBUG
static const gchar *nal_names[] = {
"Unknown",
"Slice",
return nal_names[nal_type];
return "Invalid";
}
+#endif
/* SPS/PPS/IDR considered key, all others DELTA;
* so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
{
GstClockTime running_time;
guint count;
+#ifndef GST_DISABLE_GST_DEBUG
gboolean have_sps, have_pps;
gint i;
+#endif
parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
gst_event_replace (&parse->force_key_unit_event, NULL);
GST_TIME_ARGS (running_time), count);
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
+#ifndef GST_DISABLE_GST_DEBUG
have_sps = have_pps = FALSE;
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
if (parse->sps_nals[i] != NULL) {
GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
have_sps, have_pps);
+#endif
/* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
parse->push_codec = TRUE;
(value = gst_structure_get_value (str, "codec_data"))) {
GstMapInfo map;
guint8 *data;
- guint num_sps, num_pps, profile;
+ guint num_sps, num_pps;
+#ifndef GST_DISABLE_GST_DEBUG
+ guint profile;
+#endif
gint i;
GST_DEBUG_OBJECT (h264parse, "have packetized h264");
gst_buffer_unmap (codec_data, &map);
goto wrong_version;
}
-
+#ifndef GST_DISABLE_GST_DEBUG
/* AVCProfileIndication */
/* profile_compat */
/* AVCLevelIndication */
profile = (data[1] << 16) | (data[2] << 8) | data[3];
GST_DEBUG_OBJECT (h264parse, "profile %06x", profile);
+#endif
/* 6 bits reserved | 2 bits lengthSizeMinusOne */
/* this is the number of bytes in front of the NAL units to mark their
{1408, 1152}
};
+#ifndef GST_DISABLE_GST_DEBUG
static const gchar *source_format_name[] = {
"Forbidden",
"sub-QCIF",
"Reserved",
"Extended PType"
};
+#endif
GstBitReader br;
GstMapInfo map;
if (strcmp (name_str, "OMX.k3.video.decoder.avc") == 0)
if (n_elems == 1 && color_formats_elems[k] == COLOR_FormatYCbYCr) {
GST_INFO ("On HuaweiMediaPad it reports a wrong COLOR_FormatYCbYCr,"
- "should be COLOR_TI_FormatYUV420PackedSemiPlanar, fix it.");
+ "should be COLOR_TI_FormatYUV420PackedSemiPlanar, fix it.");
color_formats_elems[k] = COLOR_TI_FormatYUV420PackedSemiPlanar;
}
handle_application_info_reply (CamApplicationInfo * info,
CamSLSession * session, guint8 * buffer, guint length)
{
+#ifndef GST_DISABLE_GST_DEBUG
guint8 type;
guint8 menu_length;
gchar menu[256];
menu[menu_length] = 0;
GST_INFO ("application info reply, type: %d, menu: %s", type, menu);
-
+#endif
return CAM_RETURN_OK;
}
handle_conditional_access_info_reply (CamConditionalAccess * cas,
CamSLSession * session, guint8 * buffer, guint length)
{
+#ifndef GST_DISABLE_GST_DEBUG
int i;
guint16 cas_id;
}
cas->ready = TRUE;
+#endif
return CAM_RETURN_OK;
}
handle_conditional_access_pmt_reply (CamConditionalAccess * cas,
CamSLSession * session, guint8 * buffer, guint length)
{
+#ifndef GST_DISABLE_GST_DEBUG
guint16 program_num;
guint8 version_num, current_next_indicator;
length -= 3;
}
+#endif
return CAM_RETURN_OK;
}
VdpStatus status;
GstVdpVideoMemory *vmem;
+#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
+#endif
GST_DEBUG_OBJECT (vdp_decoder, "n_bufs:%d, frame:%d", n_bufs,
frame->system_frame_number);
goto no_mem;
GST_DEBUG_OBJECT (vdp_decoder, "Calling VdpDecoderRender()");
+#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
+#endif
status =
vdp_decoder->device->vdp_decoder_render (vdp_decoder->decoder,
vmem->surface, info, n_bufs, bufs);
+#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
+#endif
if (status != VDP_STATUS_OK)
goto decode_error;
{
VdpStatus vdp_stat;
GstVideoInfo *info = vmem->info;
+#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
+#endif
if (g_atomic_int_add (&vmem->refcount, 1) > 1)
return TRUE;
vmem->destination_pitches[0],
vmem->destination_pitches[1], vmem->destination_pitches[2]);
+#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
+#endif
vdp_stat =
vmem->device->vdp_video_surface_get_bits_ycbcr (vmem->surface,
vmem->ycbcr_format, vmem->cached_data, vmem->destination_pitches);
+#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
+#endif
GST_CAT_WARNING (GST_CAT_PERFORMANCE, "Downloading took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));