- rtpjitterbuffer has improved end-of-stream handling
-- rtpmp4vpay will be prefered over rtpmp4gpay for MPEG-4 video in
+- rtpmp4vpay will be preferred over rtpmp4gpay for MPEG-4 video in
autoplugging scenarios now
- rtspsrc now allows applications to send RTSP SET_PARAMETER and
used in order to re-produce a specific build. To set a manifest, you
can set manifest = 'my_manifest.xml' in your configuration file, or
use the --manifest command line option. The command line option will
- take precendence over anything specific in the configuration file.
+ take precedence over anything specific in the configuration file.
- The new build-deps command can be used to build only the
dependencies of a recipe, without the recipe itself.
},
"properties": {
"drain-on-changes": {
- "blurb": "Drains the filter when its coeficients change",
+ "blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
},
"properties": {
"drain-on-changes": {
- "blurb": "Drains the filter when its coeficients change",
+ "blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
"writable": true
},
"drain-on-changes": {
- "blurb": "Drains the filter when its coeficients change",
+ "blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
"writable": true
},
"min": {
- "blurb": "mininum buffer size",
+ "blurb": "minimum buffer size",
"construct": true,
"construct-only": false,
"default": "1",
"writable": true
},
"tls-interaction": {
- "blurb": "A GTlsInteraction object to promt the user for password or certificate",
+ "blurb": "A GTlsInteraction object to prompt the user for password or certificate",
"construct": false,
"construct-only": false,
"type-name": "GTlsInteraction",
"writable": true
},
"multicast-iface": {
- "blurb": "The network interface on which to join the multicast group.This allows multiple interfaces seperated by comma. (\"eth0,eth1\")",
+ "blurb": "The network interface on which to join the multicast group.This allows multiple interfaces separated by comma. (\"eth0,eth1\")",
"construct": false,
"construct-only": false,
"default": "NULL",
gst_dvdemux_push_event (demux, new_event);
}
- /* if successfull seek, we update our real segment and push
+ /* if successful seek, we update our real segment and push
* out the new segment. */
if (res) {
memcpy (&demux->time_segment, &seeksegment, sizeof (GstSegment));
demux->need_segment = FALSE;
}
- /* and restart the task in case it got paused explicitely or by
+ /* and restart the task in case it got paused explicitly or by
* the FLUSH_START event we pushed out. */
gst_pad_start_task (demux->sinkpad, (GstTaskFunction) gst_dvdemux_loop,
demux->sinkpad, NULL);
if (!gst_dvdemux_handle_pull_seek (dvdemux, dvdemux->videosrcpad,
event)) {
GST_ELEMENT_WARNING (dvdemux, STREAM, DECODE, (NULL),
- ("Error perfoming initial seek"));
+ ("Error performing initial seek"));
}
gst_event_unref (event);
* of the FLAC stream.
*
* Applications can set the tags to write using the #GstTagSetter interface.
- * Tags contained withing the FLAC bitstream will be picked up
+ * Tags contained within the FLAC bitstream will be picked up
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*
/* ERRORS */
no_pixbuf:
{
- GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL), ("error geting pixbuf"));
+ GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL),
+ ("error getting pixbuf"));
return GST_FLOW_ERROR;
}
channels_not_supported:
"ignore-alpha", G_BINDING_BIDIRECTIONAL | G_BINDING_SYNC_CREATE);
/* Take the floating ref, other wise the destruction of the container will
- * make this widget disapear possibly before we are done. */
+ * make this widget disappear possibly before we are done. */
gst_object_ref_sink (gtk_sink->widget);
gtk_sink->widget_destroy_id = g_signal_connect (gtk_sink->widget, "destroy",
G_CALLBACK (widget_destroy_cb), gtk_sink);
* @client: a #GstJackAudioClient
* @active: new mode for the client
*
- * Activate or deactive @client. When a client is activated it will receive
+ * Activate or deactivate @client. When a client is activated it will receive
* callbacks when data should be processed.
*
* Returns: 0 if all ok.
gstjpeg.c \
gstjpegenc.c \
gstjpegdec.c
-# deprected gstsmokeenc.c smokecodec.c gstsmokedec.c
+# deprecated gstsmokeenc.c smokecodec.c gstsmokedec.c
libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_API_VERSION) \
static GMutex pa_shared_resource_mutex;
/* We keep a custom ringbuffer that is backed up by data allocated by
- * pulseaudio. We must also overide the commit function to write into
+ * pulseaudio. We must also override the commit function to write into
* pulseaudio memory instead. */
struct _GstPulseRingBuffer
{
gst_pulsering_context_subscribe_cb, pctx);
/* try to connect to the server and wait for completion, we don't want to
- * autospawn a deamon */
+ * autospawn a daemon */
GST_LOG_OBJECT (psink, "connect to server %s",
GST_STR_NULL (psink->server));
if (pa_context_connect (pctx->context, psink->server,
if (pbuf->in_commit && (length >= rbuf->spec.segsize)) {
/* only signal when we are waiting in the commit thread
- * and got request for atleast a segment */
+ * and got request for at least a segment */
pa_threaded_mainloop_signal (mainloop, 0);
}
}
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
else
- /* FIXME: this will eventually be superceded by checks to see if the volume
+ /* FIXME: this will eventually be superseded by checks to see if the volume
* is readable/writable */
goto unlock;
static const gchar *const map[] = {
GST_TAG_TITLE, PA_PROP_MEDIA_TITLE,
- /* might get overriden in the next iteration by GST_TAG_ARTIST */
+ /* might get overridden in the next iteration by GST_TAG_ARTIST */
GST_TAG_PERFORMER, PA_PROP_MEDIA_ARTIST,
GST_TAG_ARTIST, PA_PROP_MEDIA_ARTIST,
#include <QtQuick/QQuickWindow>
#include <QOpenGLFramebufferObject>
-/* compatability definitions... */
+/* compatibility definitions... */
#ifndef GL_READ_FRAMEBUFFER
#define GL_READ_FRAMEBUFFER 0x8CA8
#endif
struct _GstDV1394Src {
GstPushSrc element;
- // consecutive=2, skip=4 will skip 4 frames, then let 2 consecutive ones thru
+ // consecutive=2, skip=4 will skip 4 frames, then let 2 consecutive ones through
gint consecutive;
gint skip;
gboolean drop_incomplete;
GST_LOG ("Processing tag %s (num=%u)", tag, num_tags);
if (num_tags > 1 && gst_tag_is_fixed (tag)) {
- GST_WARNING ("Multiple occurences of fixed tag '%s', ignoring some", tag);
+ GST_WARNING ("Multiple occurrences of fixed tag '%s', ignoring some", tag);
num_tags = 1;
}
gst_wavpack_enc_set_wp_config (GstWavpackEnc * enc)
{
enc->wp_config = g_new0 (WavpackConfig, 1);
- /* set general stream informations in the WavpackConfig */
+ /* set general stream information in the WavpackConfig */
enc->wp_config->bytes_per_sample = GST_ROUND_UP_8 (enc->depth) / 8;
enc->wp_config->bits_per_sample = enc->depth;
enc->wp_config->num_channels = enc->channels;
WavpackCloseFile (enc->wp_context);
goto config_failed;
}
- GST_DEBUG_OBJECT (enc, "setup of encoding context successfull");
+ GST_DEBUG_OBJECT (enc, "setup of encoding context successful");
}
if (enc->need_channel_remap) {
* f(x) = ax^2 + bx + c
*/
- /* FIXME: If treshold is the same as the maximum
+ /* FIXME: If threshold is the same as the maximum
* we need to raise it a bit to prevent
* division by zero. */
if (threshold == 1.0)
* plus some more space for the inverse FFT below. \
* \
* The samples are put at offset kernel_length, the inverse FFT \
- * overwrites everthing from offset 0 to length-kernel_length+1, keeping \
+ * overwrites everything from offset 0 to length-kernel_length+1, keeping \
* the last kernel_length-1 samples for copying to the next processing \
* step. \
*/ \
/**
* GstAudioFXBaseFIRFilter:drain-on-changes:
*
- * Whether the filter should be drained when its coeficients change
+ * Whether the filter should be drained when its coefficients change
*
* Note: Currently this only works if the kernel size is not changed!
* Support for drainless kernel size changes will be added in the future.
*/
g_object_class_install_property (gobject_class, PROP_DRAIN_ON_CHANGES,
g_param_spec_boolean ("drain-on-changes", "Drain on changes",
- "Drains the filter when its coeficients change",
+ "Drains the filter when its coefficients change",
DEFAULT_DRAIN_ON_CHANGES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gboolean low_latency; /* work in slower low latency mode */
gboolean drain_on_changes; /* If the filter should be drained when
- * coeficients change */
+ * coefficients change */
/* < private > */
GstAudioFXBaseFIRFilterProcessFunc process;
* for the best overlap position. Scaletempo uses a statistical cross
* correlation (roughly a dot-product). Scaletempo consumes most of its CPU
* cycles here. One can use the #GstScaletempo:search propery to tune how far
- * the algoritm looks.
+ * the algorithm looks.
*
*/
* register the element factories and pad templates
* register the features
*
- * exchange the string 'plugin' with your elemnt name
+ * exchange the string 'plugin' with your element name
*/
static gboolean
return res;
}
-/* Hack to make initial linking work; ideally, this'd work even when
+/* Hack to make initial linking work; ideally, this would work even when
* no target has been assigned to the ghostpad yet. */
static void
gst_auto_detect_reset (GstAutoDetect * self)
if (map.size < 8)
goto too_small;
- /* check tag first before blindy trying to read 'size' bytes */
+ /* check tag first before blindly trying to read 'size' bytes */
tag = GST_READ_UINT32_LE (map.data);
size = GST_READ_UINT32_LE (map.data + 4);
if (tag == GST_RIFF_TAG_LIST) {
if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
goto header_wrong_avih;
- GST_DEBUG_OBJECT (avi, "AVI header ok, reading elemnts from header");
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
/* now, read the elements from the header until the end */
while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
}
if (avi->segment.rate > 0.0) {
- /* only check this for fowards playback for now */
+ /* only check this for forwards playback for now */
if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
&& (timestamp > avi->segment.stop)) {
goto eos_stop;
gst_tag_list_foreach (tags, gst_avi_mux_write_tag, &bw);
if (info + 8 == gst_byte_writer_get_pos (&bw)) {
- /* no tags writen, remove the empty INFO LIST as it is useless
+ /* no tags written, remove the empty INFO LIST as it is useless
* and prevents playback in vlc */
gst_byte_writer_set_pos (&bw, info - 4);
} else {
*
* The progressreport element can be put into a pipeline to report progress,
* which is done by doing upstream duration and position queries in regular
- * (real-time) intervals. Both the interval and the prefered query format
+ * (real-time) intervals. Both the interval and the preferred query format
* can be specified via the #GstProgressReport:update-freq and the
* #GstProgressReport:format property.
*
0, G_MAXUINT32, DEFAULT_SEED,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MINIMUM,
- g_param_spec_int ("min", "mininum", "mininum buffer size",
+ g_param_spec_int ("min", "minimum", "minimum buffer size",
0, G_MAXINT32, DEFAULT_MIN,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MAXIMUM,
*
* Some methods provide parameters which can be set by getting
* the "method" child via the #GstChildProxy interface and
- * setting the appropiate properties on it.
+ * setting the appropriate properties on it.
*
* * tomsmocomp Motion Adaptive: Motion Search
* * greedyh Motion Adaptive: Advanced Detection
break;
}
- /* make complete matches more signficant */
+ /* make complete matches more significant */
if (k == length)
k += GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY;
/* sse.h
- Streaming SIMD Extenstions (a.k.a. Katmai New Instructions)
+ Streaming SIMD Extensions (a.k.a. Katmai New Instructions)
GCC interface library for IA32.
To use this library, simply include this header file
/* Store FENCE - enforce ordering of stores before fence vs. stores
- occuring after fence in source code.
+ occurring after fence in source code.
*/
#ifdef SSE_TRACE
#define sfence() \
#ifndef IS_C
#ifdef SKIP_SEARCH
- "movq %%mm6, %%mm0\n\t" // just use the results of our wierd bob
+ "movq %%mm6, %%mm0\n\t" // just use the results of our weird bob
#else
return 0;
#else
#ifdef SKIP_SEARCH
- out[0] = best[0]; // just use the results of our wierd bob
+ out[0] = best[0]; // just use the results of our weird bob
out[1] = best[1];
#else
diff[0] = diff[0] - MIN (diff[0], 10) - 4;
// -*- c++ -*-
// First, get and save our possible Bob values
- // Assume our pixels are layed out as follows with x the calc'd bob value
+ // Assume our pixels are laid out as follows with x the calc'd bob value
// and the other pixels are from the current field
//
// j a b c k current field
// -*- c++ -*-
// First, get and save our possible Bob values
- // Assume our pixels are layed out as follows with x the calc'd bob value
+ // Assume our pixels are laid out as follows with x the calc'd bob value
// and the other pixels are from the current field
//
// j a b c k current field
/*
* The MPEG2 spec uses a slightly harsher filter, they specify
* [-1 8 2 8 -1]. ffmpeg uses a similar filter but with more of
- * a tendancy to blur than to use the local information. The
+ * a tendency to blur than to use the local information. The
* filter taps here are: [-1 4 2 4 -1].
*/
*
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
- * named events. Tones are specified by their frequencies and events are specied
+ * named events. Tones are specified by their frequencies and events are specified
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*
*
* * `type` (G_TYPE_INT, 0-1): Which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
- * named events. Tones are specified by their frequencies and events are specied
+ * named events. Tones are specified by their frequencies and events are specified
* by their number. This element currently only recognizes events.
* Do not confuse with "method" which specified the output.
*
*
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
- * named events. Tones are specified by their frequencies and events are specied
+ * named events. Tones are specified by their frequencies and events are specified
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*
g *= g;
b *= b;
r = r >> 5; /* To lack the lower bit for saturated addition, */
- g = g >> 5; /* devide the value with 32, instead of 16. It is */
+ g = g >> 5; /* divide the value with 32, instead of 16. It is */
b = b >> 4; /* same as `v2 &= 0xfefeff' */
if (r > 127)
r = 127;
#include "gstquark.h"
#include "gsteffectv.h"
-/* number of frames of time-buffer. It should be as a configurable paramater */
+/* number of frames of time-buffer. It should be as a configurable parameter */
/* This number also must be 2^n just for the speed. */
#define PLANES 16
gboolean ret = FALSE;
gint32 ddts = dts - *last;
if (!discont && ddts <= -RESYNC_THRESHOLD) {
- /* Theoretically, we should use substract the duration of the last buffer,
+ /* Theoretically, we should use subtract the duration of the last buffer,
but this demuxer sends no durations on buffers, not sure if it cannot
know, or just does not care to calculate. */
*offset -= ddts * GST_MSECOND;
demux->seek_event = gst_event_ref (event);
demux->seek_time = seeksegment.position;
demux->state = FLV_STATE_SEEK;
- /* do not know about succes yet, but we did care and handled it */
+ /* do not know about success yet, but we did care and handled it */
ret = TRUE;
goto exit;
}
}
}
-/* If we can pull that's prefered */
+/* If we can pull that's preferred */
static gboolean
gst_flv_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
* The application that wants to index the stream will create a new index object
* using gst_index_new() or gst_index_factory_make(). The index is assigned to a
* specific element, a bin or the whole pipeline. This will cause indexable
- * elements to add entires to the index while playing.
+ * elements to add entries to the index while playing.
*/
/* FIXME: complete gobject annotations */
/**
* gst_index_commit:
* @index: the index to commit
- * @id: the writer that commited the index
+ * @id: the writer that committed the index
*
* Tell the index that the writer with the given id is done
* with this index and is not going to write any more entries
* gst_index_add_associationv:
* @index: the index to add the entry to
* @id: the id of the index writer
- * @flags: optinal flags for this entry
+ * @flags: optional flags for this entry
* @n: number of associations
* @list: list of associations
*
* gst_index_add_association:
* @index: the index to add the entry to
* @id: the id of the index writer
- * @flags: optinal flags for this entry
+ * @flags: optional flags for this entry
* @format: the format of the value
* @value: the value
* @...: other format/value pairs or 0 to end the list
}
info_from_caps_failed:
{
- GST_ERROR_OBJECT (self, "coud not get info from caps");
+ GST_ERROR_OBJECT (self, "could not get info from caps");
return FALSE;
}
}
info_from_caps_failed:
{
- GST_ERROR_OBJECT (self, "coud not get info from caps");
+ GST_ERROR_OBJECT (self, "could not get info from caps");
return FALSE;
}
}
* to get all formats that are possible up- and downstream.
*
* For the pad for which the caps are requested we don't remove the channel
- * informations as they must be in the returned caps and incompatibilities
+ * information as they must be in the returned caps and incompatibilities
* will be detected here already
*/
ret = gst_caps_new_any ();
atom_moov_get_trak_count (qtmux->moov));
GST_OBJECT_UNLOCK (qtmux);
- /* Now that we know how much reserved space is targetted,
+ /* Now that we know how much reserved space is targeted,
* output a free atom to fill the extra reserved */
ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
qtmux->reserved_moov_size - qtmux->base_moov_size, FALSE);
#define QTSAMPLE_DTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp))
/* timestamp + offset + cslg_shift is the outgoing PTS */
#define QTSAMPLE_PTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (stream)->cslg_shift + (sample)->pts_offset))
-/* timestamp + offset is the PTS used for internal seek calcuations */
+/* timestamp + offset is the PTS used for internal seek calculations */
#define QTSAMPLE_PTS_NO_CSLG(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (sample)->pts_offset))
/* timestamp + duration - dts is the duration */
#define QTSAMPLE_DUR_DTS(stream, sample, dts) (QTSTREAMTIME_TO_GSTTIME ((stream), (sample)->timestamp + (sample)->duration) - (dts))
/* Maps the @segment to the qt edts internal segments and pushes
- * the correspnding segment event.
+ * the corresponding segment event.
*
* If it ends up being at a empty segment, a gap will be pushed and the next
* edts segment will be activated in sequence.
if (!qtdemux->upstream_format_is_time && !qtdemux->first_moof_already_parsed
&& !qtdemux->received_seek && GST_CLOCK_TIME_IS_VALID (min_dts)
&& min_dts != 0) {
- /* Unless the user has explictly requested another seek, perform an
+ /* Unless the user has explicitly requested another seek, perform an
* internal seek to the time specified in the tfdt.
*
* This way if the user opens a file where the first tfdt is 1 hour
GST_DEBUG_OBJECT (stream->pad, "here");
- /* Check if we have somethig compatible */
+ /* Check if we have something compatible */
stsd_entry = CUR_STREAM (stream);
switch (stsd_entry->fourcc) {
case FOURCC_c608:{
*
* To keep track of the current buffer timestamp and starting point
* we use gst_adapter_prev_pts that gives us the PTS and the distance
- * from the beggining of the buffer, with the distance and demux->offset
+ * from the beginning of the buffer, with the distance and demux->offset
* we know if it is still the same buffer or not.
*/
prev_pts = gst_adapter_prev_pts (demux->adapter, &dist);
* the same format. */
/* video sample description size is 86 bytes without extension.
* node_length have to be bigger than 86 bytes because video sample
- * description can include extenstions such as esds, fiel, glbl, etc. */
+ * description can include extensions such as esds, fiel, glbl, etc. */
if (node_length < 86) {
GST_WARNING_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT
" sample description length too short (%u < 86)",
}
} else {
/* Ensure the cslg_shift value is consistent so we can use it
- * unconditionnally to produce TS and Segment */
+ * unconditionally to produce TS and Segment */
stream->cslg_shift = 0;
}
else
style = "iso";
- /* santize the name for the caps. */
+ /* sanitize the name for the caps. */
for (i = 0; i < 4; i++) {
guint8 d = data[4 + i];
if (g_ascii_isalnum (d))
* 026 Hungarian
* 027 Estonian
* 028 Latvian / Lettish
- * 029 Lappish / Saamish (used code for Nothern Sami)
+ * 029 Lappish / Saamish (used code for Northern Sami)
*/
"urd", "hin", "tha", "kor", "lit", "pol", "hun", "est", "lav", "sme",
* input sample data enters in *in_data and is not modified
* this filter only accepts signed audio data, so mid level is always 0
*
- * for integers, this code considers the non-existant positive max value to be
+ * for integers, this code considers the non-existent positive max value to be
* full-scale; so max-1 will not map to 1.0
*/
/**
* gst_ebml_write_buffer:
* @ebml: #GstEbmlWrite
- * @buf: #GstBuffer cointaining the data.
+ * @buf: #GstBuffer containing the data.
*
* Write binary element (see gst_ebml_write_buffer_header).
*/
/* QoS for video track with an index. the assumption is that
index entries point to keyframes, but if that is not true we
- will instad skip until the next keyframe. */
+ will instead skip until the next keyframe. */
if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
stream->index_table && demux->common.segment.rate > 0.0) {
}
}
-/* returns TRUE if we truely are in error state, and should give up */
+/* returns TRUE if we truly are in error state, and should give up */
static inline GstFlowReturn
gst_matroska_demux_check_parse_error (GstMatroskaDemux * demux)
{
/**
* gst_matroska_mux_reset:
- * @element: #GstMatroskaMux that should be reseted.
+ * @element: #GstMatroskaMux that should be reset.
*
* Reset matroska muxer back to initial state.
*/
if (cdata->pad == pad) {
/*
* observed duration, this will remain GST_CLOCK_TIME_NONE
- * only if the pad is resetted
+ * only if the pad is reset
*/
GstClockTime collected_duration = GST_CLOCK_TIME_NONE;
GstMatroskaPad *collect_pad;
/*
* observed duration, this will never remain GST_CLOCK_TIME_NONE
- * since this means buffer without timestamps that is not possibile
+ * since this means buffer without timestamps that is not possible
*/
GstClockTime collected_duration = GST_CLOCK_TIME_NONE;
* chained oggs. Fixes #334082
* TODO: Test samples: http://www.matroska.org/samples/matrix/index.html
* http://samples.mplayerhq.hu/Matroska/
- * TODO: check if parseing is done correct for all codecs according to spec
+ * TODO: check if parsing is done correct for all codecs according to spec
* TODO: seeking with incomplete or without CUE
*/
/* QoS for video track with an index. the assumption is that
index entries point to keyframes, but if that is not true we
- will instad skip until the next keyframe. */
+ will instead skip until the next keyframe. */
if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
stream->index_table && parse->common.segment.rate > 0.0) {
}
#if 0
-/* returns TRUE if we truely are in error state, and should give up */
+/* returns TRUE if we truly are in error state, and should give up */
static inline gboolean
gst_matroska_parse_check_parse_error (GstMatroskaParse * parse)
{
goto exit_error;
switch (id) {
- /* is our read version uptodate? */
+ /* is our read version up-to-date? */
case GST_EBML_ID_EBMLREADVERSION:{
guint64 num;
/* ICRA The ICRA content rating for parental control. (Previously RSACi) */
/* Temporal Information */
- GST_MATROSKA_TAG_ID_DATE_RELEASED, GST_TAG_DATE}, { /* The time that the item was originaly released. This is akin to the TDRL tag in ID3. */
+ GST_MATROSKA_TAG_ID_DATE_RELEASED, GST_TAG_DATE}, { /* The time that the item was originally released. This is akin to the TDRL tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_RECORDED, GST_TAG_DATE}, { /* The time that the recording began. This is akin to the TDRC tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_ENCODED, GST_TAG_DATE}, { /* The time that the encoding of this item was completed began. This is akin to the TDEN tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_TAGGED, GST_TAG_DATE}, { /* The time that the tags were done for this item. This is akin to the TDTG tag in ID3. */
- GST_MATROSKA_TAG_ID_DATE_DIGITIZED, GST_TAG_DATE}, { /* The time that the item was tranfered to a digital medium. This is akin to the IDIT tag in RIFF. */
+ GST_MATROSKA_TAG_ID_DATE_DIGITIZED, GST_TAG_DATE}, { /* The time that the item was transferred to a digital medium. This is akin to the IDIT tag in RIFF. */
GST_MATROSKA_TAG_ID_DATE_WRITTEN, GST_TAG_DATE}, { /* The time that the writing of the music/script began. */
GST_MATROSKA_TAG_ID_DATE_PURCHASED, GST_TAG_DATE}, { /* Information on when the file was purchased (see also purchase tags). */
GST_MATROSKA_TAG_ID_DATE, GST_TAG_DATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
/* Spacial Information */
GST_MATROSKA_TAG_ID_RECORDING_LOCATION, GST_TAG_GEO_LOCATION_NAME}, { /* The location where the item was recorded. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
- /* COMPOSITION_LOCATION Location that the item was originaly designed/written. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
+ /* COMPOSITION_LOCATION Location that the item was originally designed/written. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
/* COMPOSER_NATIONALITY Nationality of the main composer of the item, mostly for classical music. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. */
/* Personal */
/* ENCODER_SETTINGS A list of the settings used for encoding this item. No specific format. */
GST_MATROSKA_TAG_ID_BPS, GST_TAG_BITRATE}, {
GST_MATROSKA_TAG_ID_BITSPS, GST_TAG_BITRATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
- /* WONTFIX (already handled in another way): FPS The average frames per second of the specified item. This is typically the average number of Blocks per second. In the event that lacing is used, each laced chunk is to be counted as a seperate frame. */
+ /* WONTFIX (already handled in another way): FPS The average frames per second of the specified item. This is typically the average number of Blocks per second. In the event that lacing is used, each laced chunk is to be counted as a separate frame. */
GST_MATROSKA_TAG_ID_BPM, GST_TAG_BEATS_PER_MINUTE}, {
/* MEASURE In music, a measure is a unit of time in Western music like "4/4". It represents a regular grouping of beats, a meter, as indicated in musical notation by the time signature.. The majority of the contemporary rock and pop music you hear on the radio these days is written in the 4/4 time signature. */
/* TUNING It is saved as a frequency in hertz to allow near-perfect tuning of instruments to the same tone as the musical piece (e.g. "441.34" in Hertz). The default value is 440.0 Hz. */
* Copyright (C) 1998-2001 Andy Lo A Foe <andy@alsaplayer.org>
* Original code by Tinic Uro
*
- * This code is copied from Alsaplayer. The orginal code was by Tinic Uro and under
+ * This code is copied from Alsaplayer. The original code was by Tinic Uro and under
* the BSD license without a advertisig clause. Andy Lo A Foe then relicensed the
* code when he used it for Alsaplayer to GPL with Tinic's permission. Richard Boulton
* then took this code and made a GPL plugin out of it.
(st = gst_caps_get_structure (new_caps, 0))
&& gst_structure_get_fraction (st, "framerate", &src->fps_n,
&src->fps_d)) {
- GST_INFO_OBJECT (src, "Seting framerate to %d/%d", src->fps_n,
+ GST_INFO_OBJECT (src, "Setting framerate to %d/%d", src->fps_n,
src->fps_d);
} else {
src->fps_n = -1;
if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS &&
!part_pad->seen_buffer) {
/* If this is the first buffer on the pad in the collect_streams state,
- * then calculate inital offset based on running time of this segment */
+ * then calculate initial offset based on running time of this segment */
part_pad->initial_ts_offset =
part_pad->orig_segment.start + part_pad->orig_segment.base -
part_pad->orig_segment.time;
/* On ENDING_FILE, the reference stream sends a command to start a new
* fragment, then releases the GOP for output in the new fragment.
- * If somes streams received no buffer during the last GOP that overran,
+ * If some streams received no buffer during the last GOP that overran,
* because its next buffer has a timestamp bigger than
* ctx->max_in_running_time, its queue is empty. In that case the only
* way to wakeup the output thread is by injecting an event in the
SPLITMUX_SRC_UNLOCK (splitmux);
}
case GST_EVENT_RECONFIGURE:{
- GST_DEBUG_OBJECT (splitmux, "reconfigure evnet on pad %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (splitmux, "reconfigure event on pad %" GST_PTR_FORMAT,
pad);
SPLITMUX_SRC_PADS_RLOCK (splitmux);
return TRUE;
if (next_wildcard_reached)
/* the forthcoming pattern substring up to the next wildcard has
- * been matched, but a mismatch occoured for the rest of the
+ * been matched, but a mismatch occurred for the rest of the
* pattern, following the next wildcard.
* there's no need to advance the current match position any
* further if the rest pattern will not match.
G_BEGIN_DECLS
/* Reference level (in dBSPL). The 2001 proposal specifies 83. This was
- * changed later in all implementations to 89, which is the new, offical value:
+ * changed later in all implementations to 89, which is the new, official value:
* David Robinson acknowledged the change but didn't update the website yet. */
#define RG_REFERENCE_LEVEL 89.
* payload: (int) [0, 127]
For audio and video, these will normally be a media payload type as
- defined in the RTP Audio/Video Profile. For dynamicaly allocated
+ defined in the RTP Audio/Video Profile. For dynamically allocated
payload types, this value will be >= 96 and the encoding-name must be
set.
The receiver now displays an h263 image. Since there is no jitterbuffer in the
pipeline, frames will be displayed at the time when they are received. This can
- result in jerky playback in the case of high network jitter or currupted video
+ result in jerky playback in the case of high network jitter or corrupted video
when packets are dropped or reordered.
Stream a quicktime file with mpeg4 video and AAC audio on port 5000 and port
recommended to use a gstrtpjitterbuffer after the udpsrc elements.
Even when sync is enabled, the two different streams will not play synchronised
- against eachother because the receiver does not have enough information to
+ against each other because the receiver does not have enough information to
perform this task. For this you need to add the rtpbin element in both the
sender and receiver pipeline and use additional sources and sinks to transmit
RTCP packets used for inter-stream synchronisation.
/* Process one RTP packet. Accumulate RTP payload in the proper place in a DV
* frame, and return that frame if we detect a new frame, or NULL otherwise.
- * We assume a DV frame is 144000 bytes. That should accomodate PAL as well as
+ * We assume a DV frame is 144000 bytes. That should accommodate PAL as well as
* NTSC.
*/
static GstBuffer *
encoding_name =
g_strdup (gst_structure_get_string (structure, "encoding-name"));
- /* if we managed to negotiate to AAL2, we definatly are going to do AAL2
+ /* if we managed to negotiate to AAL2, we definitely are going to do AAL2
* encoding. Else we only encode AAL2 when explicitly set by the
* property. */
if (g_str_has_prefix (encoding_name, "AAL2-"))
GstBuffer *paybuf;
- /* this will be the total lenght of the packet */
+ /* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (8 + avail, 0, 0);
/* fill one MTU or all available bytes */
return ret;
}
-/* Scans after all GOB start codes and initalizes the GOB structure with start
+/* Scans after all GOB start codes and initializes the GOB structure with start
* and end positions. */
static ParseReturn
gst_rtp_h261_pay_init_gobs (GstRtpH261Pay * pay, Gob * gobs, gint num_gobs,
gsize size, gint offset, gsize * newsize)
{
/* In order to read variable length codes at the very end of the buffer
- * wihout peeking into possibly unallocated data, we pad with extra 0's
+ * without peeking into possibly unallocated data, we pad with extra 0's
* which will generate an invalid code at the end of the buffer. */
guint pad = 4;
gsize allocsize = size + pad;
GST_DEBUG_OBJECT (rtph263pay, "Frame too large for MTU");
/*
- * Let's go trough all the data and fragment it untill end is reached
+ * Let's go trough all the data and fragment it until end is reached
*/
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
* This algorithm separates large frames at synchronisation points (Segments)
* (See RFC 4629 section 6). It would be interesting to have a property such as network
* quality to select between both packetization methods */
- /* TODO Add VRC supprt (See RFC 4629 section 5.2) */
+ /* TODO Add VRC support (See RFC 4629 section 5.2) */
while (avail > 0) {
guint towrite;
*/
nalu_size = (payload[0] << 8) | payload[1];
- /* dont include nalu_size */
+ /* don't include nalu_size */
if (nalu_size > (payload_len - 2))
nalu_size = payload_len - 2;
avc = rtph264pay->stream_format == GST_H264_STREAM_FORMAT_AVC;
if (avc) {
- /* In AVC mode, there is no adapter, so nothign to drain */
+ /* In AVC mode, there is no adapter, so nothing to drain */
if (draining)
return GST_FLOW_OK;
gst_buffer_map (buffer, &map, GST_MAP_READ);
nalu_size = (payload[0] << 8) | payload[1];
- /* dont include nalu_size */
+ /* don't include nalu_size */
if (nalu_size > (payload_len - 2))
nalu_size = payload_len - 2;
*p++ = 0x11; /* huffman table 1 */
*p++ = 0; /* first DCT coeff */
*p++ = 63; /* last DCT coeff */
- *p++ = 0; /* sucessive approx. */
+ *p++ = 0; /* successive approx. */
return (p - start);
};
* @JPEG_MARKER_DRI: Define Restart Interval marker
* @JPEG_MARKER_H264: H264 marker
*
- * Identifers for markers in JPEG header
+ * Identifiers for markers in JPEG header
*/
enum _RtpJpegMarker
{
skip += data_len;
pos += data_len;
- /* update our pointers whith what we consumed */
+ /* update our pointers with what we consumed */
data += skip;
avail -= skip;
rtpmp4gdepay->last_AU_index = AU_index;
}
- /* keep track of the higest AU_index */
+ /* keep track of the highest AU_index */
if (rtpmp4gdepay->max_AU_index != -1
&& rtpmp4gdepay->max_AU_index <= AU_index) {
GST_DEBUG_OBJECT (rtpmp4gdepay, "new interleave group, flushing");
GstRTPBuffer rtp = { NULL };
GstBuffer *paybuf;
- /* this will be the total lenght of the packet */
+ /* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* fill one MTU or all available bytes, we need 4 spare bytes for
guint packet_len;
GstRTPBuffer rtp = { NULL };
- /* this will be the total lenght of the packet */
+ /* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* fill one MTU or all available bytes */
guint red_header_size = rtp_red_block_header_get_length (FALSE) +
(redundant_block ? rtp_red_block_header_get_length (TRUE) : 0);
- guint32 timestmap = gst_rtp_buffer_get_timestamp (inp_rtp);
+ guint32 timestamp = gst_rtp_buffer_get_timestamp (inp_rtp);
guint csrc_count = gst_rtp_buffer_get_csrc_count (inp_rtp);
GstBuffer *red = gst_rtp_buffer_new_allocate (red_header_size, 0, csrc_count);
guint8 *red_block_header;
gst_rtp_buffer_set_marker (&red_rtp, gst_rtp_buffer_get_marker (inp_rtp));
gst_rtp_buffer_set_payload_type (&red_rtp, self->pt);
gst_rtp_buffer_set_seq (&red_rtp, gst_rtp_buffer_get_seq (inp_rtp));
- gst_rtp_buffer_set_timestamp (&red_rtp, timestmap);
+ gst_rtp_buffer_set_timestamp (&red_rtp, timestamp);
gst_rtp_buffer_set_ssrc (&red_rtp, gst_rtp_buffer_get_ssrc (inp_rtp));
for (i = 0; i != csrc_count; ++i)
gst_rtp_buffer_set_csrc (&red_rtp, i,
rtp_red_block_set_is_redundant (red_block_header, TRUE);
rtp_red_block_set_payload_type (red_block_header, redundant_block->pt);
rtp_red_block_set_timestamp_offset (red_block_header,
- timestmap - redundant_block->timestamp);
+ timestamp - redundant_block->timestamp);
rtp_red_block_set_payload_length (red_block_header,
gst_buffer_get_size (redundant_block->payload));
return NULL;
- /* ERORRS */
+ /* ERRORS */
switch_failed:
{
GST_ELEMENT_WARNING (rtptheoradepay, STREAM, DECODE,
ret = gst_rtp_ulpfec_enc_stream_ctx_process (ctx, buffer);
- /* FIXME: does not work for mulitple ssrcs */
+ /* FIXME: does not work for multiple ssrcs */
fec->num_packets_protected = ctx->num_packets_protected;
return ret;
}
return NULL;
}
- /* ERORRS */
+ /* ERRORS */
switch_failed:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
goto next;
}
- /* calculate the maximim amount of bytes we can use per line */
+ /* calculate the maximum amount of bytes we can use per line */
if (offs + ((length / pgroup) * xinc) > width) {
plen = ((width - offs) * pgroup) / xinc;
GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
STORAGE_UNLOCK (self);
if (NULL == stream) {
- GST_ERROR_OBJECT (self, "Cant find ssrc = 0x08%x", ssrc);
+ GST_ERROR_OBJECT (self, "Can't find ssrc = 0x08%x", ssrc);
} else {
STREAM_LOCK (stream);
if (stream->queue.length > 0) {
STORAGE_UNLOCK (self);
if (NULL == stream) {
- GST_ERROR_OBJECT (self, "Cant find ssrc = 0x%x", ssrc);
+ GST_ERROR_OBJECT (self, "Can't find ssrc = 0x%x", ssrc);
} else {
STREAM_LOCK (stream);
if (stream->queue.length > 0) {
* @info: #RtpUlpFecMapInfo
*
* Unmap @info previously mapped with rtp_ulpfec_map_info_map() and unrefs the
- * buffer. For convinience can even be called even if rtp_ulpfec_map_info_map
+ * buffer. For convenience can even be called even if rtp_ulpfec_map_info_map
* returned FALSE
**/
void
#define RTP_ULPFEC_SEQ_BASE_OFFSET_MAX(L) (RTP_ULPFEC_PROTECTED_PACKETS_MAX(L) - 1)
/**
- * RtpUlpFecMapInfo: Helper wraper around GstRTPBuffer
+ * RtpUlpFecMapInfo: Helper wrapper around GstRTPBuffer
*
* @rtp: mapped RTP buffer
**/
typedef struct {
- // FIXME: it used to contain more fileds now we are left with only GstRTPBuffer.
+ // FIXME: it used to contain more fields now we are left with only GstRTPBuffer.
// it will be nice to use it directly
GstRTPBuffer rtp;
} RtpUlpFecMapInfo;
* To use #GstRtpBin as a sender, request a send_rtp_sink_\%u pad, which will
* automatically create a send_rtp_src_\%u pad. If the session number is not provided,
* the pad from the lowest available session will be returned. The session manager will modify the
- * SSRC in the RTP packets to its own SSRC and wil forward the packets on the
+ * SSRC in the RTP packets to its own SSRC and will forward the packets on the
* send_rtp_src_\%u pad after updating its internal state.
*
* The session manager needs the clock-rate of the payload types it is handling
/* Manages the RTP stream for one SSRC.
*
- * We pipe the stream (comming from the SSRC demuxer) into a jitterbuffer.
+ * We pipe the stream (coming from the SSRC demuxer) into a jitterbuffer.
* If we see an SDES RTCP packet that links multiple SSRCs together based on a
* common CNAME, we create a GstRtpBinClient structure to group the SSRCs
* together (see below).
bin = session->bin;
- GST_DEBUG ("emiting signal for pt %u in session %u", pt, session->id);
+ GST_DEBUG ("emitting signal for pt %u in session %u", pt, session->id);
/* not in cache, send signal to request caps */
g_value_init (&args[0], GST_TYPE_ELEMENT);
/* For NTP sync we need to first get a snapshot of running_time and NTP
* time. We know at what running_time we play a certain RTP time, we also
* calculated when we would play the RTP time in the SR packet. Now we need
- * to know how the running_time and the NTP time relate to eachother. */
+ * to know how the running_time and the NTP time relate to each other. */
get_current_times (bin, &local_running_time, &local_ntpnstime);
/* see how far away the NTP time is. This is the difference between the
/* calculate the min of all deltas, ignoring streams that did not yet have a
* valid rt_delta because we did not yet receive an SR packet for those
* streams.
- * We calculate the mininum because we would like to only apply positive
+ * We calculate the minimum because we would like to only apply positive
* offsets to streams, delaying their playback instead of trying to speed up
- * other streams (which might be imposible when we have to create negative
+ * other streams (which might be impossible when we have to create negative
* latencies).
* The stream that has the smallest diff is selected as the reference stream,
* all other streams will have a positive offset to this difference. */
guint64 ext_base;
use_rtp = TRUE;
- /* signed version for convienience */
+ /* signed version for convenience */
clock_base = base_rtptime;
/* deal with possible wrap-around */
ext_base = base_rtptime;
/* ERRORS */
max_streams:
{
- GST_WARNING_OBJECT (rtpbin, "stream exeeds maximum (%d)",
+ GST_WARNING_OBJECT (rtpbin, "stream exceeds maximum (%d)",
rtpbin->max_streams);
return NULL;
}
}
}
-/* a new pad (SSRC) was created in @session. This signal is emited from the
+/* a new pad (SSRC) was created in @session. This signal is emitted from the
* payload demuxer. */
static void
new_payload_found (GstElement * element, guint pt, GstPad * pad,
payload_type_change (GstElement * element, guint pt, GstRtpBinSession * session)
{
GST_DEBUG_OBJECT (session->bin,
- "emiting signal for pt type changed to %u in session %u", pt,
+ "emitting signal for pt type changed to %u in session %u", pt,
session->id);
g_signal_emit (session->bin, gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE],
}
/* If the requested name is NULL we should create a name with
- * the session number assuming we want the lowest posible session
+ * the session number assuming we want the lowest possible session
* with a free pad like the template */
static gchar *
gst_rtp_bin_get_free_pad_name (GstElement * element, GstPadTemplate * templ)
g_queue_init (&priv->gap_packets);
gst_segment_init (&priv->segment, GST_FORMAT_TIME);
- /* reset skew detection initialy */
+ /* reset skew detection initially */
rtp_jitter_buffer_reset_skew (priv->jbuf);
rtp_jitter_buffer_set_delay (priv->jbuf, priv->latency_ns);
rtp_jitter_buffer_set_buffering (priv->jbuf, FALSE);
}
/*
- * Must be called with JBUF_LOCK held, will release the LOCK when emiting the
+ * Must be called with JBUF_LOCK held, will release the LOCK when emitting the
* signal. The function returns GST_FLOW_ERROR when a parsing error happened and
* GST_FLOW_FLUSHING when the element is shutting down. On success
* GST_FLOW_OK is returned.
if (priv->rtx_delay == -1) {
/* the maximum delay for any RTX-packet is given by the latency, since
anything after that is considered lost. For various calulcations,
- (given large avg_jitter and/or packet_spacing), the resuling delay
+ (given large avg_jitter and/or packet_spacing), the resulting delay
could exceed the configured latency, ending up issuing an RTX-request
that would never arrive in time. To help this we cap the delay
for any RTX with the last possible time it could still arrive in time. */
* lost items (so that we can set discont flags and such) */
if (priv->do_lost) {
GstClockTime duration, timestamp;
- /* create paket lost event */
+ /* create packet lost event */
timestamp = apply_offset (jitterbuffer, get_pts_timeout (timer));
duration = timer->duration;
if (duration == GST_CLOCK_TIME_NONE && priv->packet_spacing > 0)
}
/*
- * This funcion implements the main pushing loop on the source pad.
+ * This function implements the main pushing loop on the source pad.
*
* It first tries to push as many buffers as possible. If there is a seqnum
* mismatch, we wait for the next timeouts.
}
}
-/* collect the info from the lastest RTCP packet and the jitterbuffer sync, do
+/* collect the info from the latest RTCP packet and the jitterbuffer sync, do
* some sanity checks and then emit the handle-sync signal with the parameters.
* This function must be called with the LOCK */
static void
* @pt: the payload type
* @pad: the pad with the new payload
*
- * Emited when a new payload type pad has been created in @demux.
+ * Emitted when a new payload type pad has been created in @demux.
*/
gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE] =
g_signal_new ("new-payload-type", G_TYPE_FROM_CLASS (klass),
* @demux: the object which received the signal
* @pt: the new payload type
*
- * Emited when the payload type changed.
+ * Emitted when the payload type changed.
*/
gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE] =
g_signal_new ("payload-type-change", G_TYPE_FROM_CLASS (klass),
/* get the caps for pt */
GstCaps* (*request_pt_map) (GstRtpPtDemux *demux, guint pt);
- /* signal emmited when a new PT is found from the incoming stream */
+ /* signal emitted when a new PT is found from the incoming stream */
void (*new_payload_type) (GstRtpPtDemux *demux, guint pt, GstPad * pad);
/* signal emitted when the payload type changes */
* It is an error, according to RFC4588 to have two retransmission requests for
* packets belonging to two different streams but with the same sequence number.
* Note that the default seqnum-offset value (-1, which means random) would
- * work just fine, but it is overriden here for illustration purposes.
+ * work just fine, but it is overridden here for illustration purposes.
*/
#ifdef HAVE_CONFIG_H
if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map,
GUINT_TO_POINTER (ssrc), NULL, &ssrc2)
&& GPOINTER_TO_UINT (ssrc2) != GPOINTER_TO_UINT (ssrc)) {
- GST_TRACE_OBJECT (rtx, "Retransmited stream %X already associated "
+ GST_TRACE_OBJECT (rtx, "Retransmitted stream %X already associated "
"to its master, %X", GPOINTER_TO_UINT (ssrc2), ssrc);
} else {
SsrcAssoc *assoc;
* The jitter may be too impatient of the rtx packet has been
* lost too.
* It does not mean we reject the event, we still want to forward
- * the request to the gstrtpsession to be translater into a FB NACK
+ * the request to the gstrtpsession to be translator into a FB NACK
*/
GST_LOG_OBJECT (rtx, "Duplicate request: seqnum: %u, ssrc: %X",
seqnum, ssrc);
GST_OBJECT_UNLOCK (rtx);
}
- /* Transfer event upstream so that the request can acutally by translated
+ /* Transfer event upstream so that the request can actually by translated
* through gstrtpsession through the network */
res = gst_pad_event_default (pad, parent, event);
break;
GST_OBJECT_LOCK (rtx);
- /* choose another ssrc for our retransmited stream */
+ /* choose another ssrc for our retransmitted stream */
if (g_hash_table_contains (rtx->rtx_ssrcs, GUINT_TO_POINTER (ssrc))) {
guint master_ssrc;
SSRCRtxData *data;
*
* * RTP packet validation based on consecutive sequence numbers.
*
- * * Maintainance of the SSRC participant database.
+ * * Maintenance of the SSRC participant database.
*
* * Keeping per participant statistics based on received RTCP packets.
*
* @ssrc: the SSRC of the pad
* @pad: the new pad.
*
- * Emited when a new SSRC pad has been created.
+ * Emitted when a new SSRC pad has been created.
*/
gst_rtp_ssrc_demux_signals[SIGNAL_NEW_SSRC_PAD] =
g_signal_new ("new-ssrc-pad",
* @ssrc: the SSRC of the pad
* @pad: the removed pad.
*
- * Emited when a SSRC pad has been removed.
+ * Emitted when a SSRC pad has been removed.
*/
gst_rtp_ssrc_demux_signals[SIGNAL_REMOVED_SSRC_PAD] =
g_signal_new ("removed-ssrc-pad",
* Cri : The time of the clock at the receiver for packet i
* D + ni : The jitter when receiving packet i
*
- * We see that the network delay is irrelevant here as we can elliminate D:
+ * We see that the network delay is irrelevant here as we can eliminate D:
*
* recv_diff(i) = (Cri + ni) - (Cr0 + n0))
*
if (!is_rtcp_time (sess, current_time, &data))
goto done;
- /* check if all the buffers are empty afer generation */
+ /* check if all the buffers are empty after generation */
all_empty = TRUE;
GST_DEBUG
GstFlowReturn rtp_session_on_timeout (RTPSession *sess, GstClockTime current_time,
guint64 ntpnstime, GstClockTime running_time);
-/* request the transmittion of an early RTCP packet */
+/* request the transmission of an early RTCP packet */
gboolean rtp_session_request_early_rtcp (RTPSession * sess, GstClockTime current_time,
GstClockTime max_delay);
* @reason: the reason for leaving
*
* Mark @src in the BYE state. This can happen when the source wants to
- * leave the sesssion or when a BYE packets has been received.
+ * leave the session or when a BYE packets has been received.
*
* This will make the source inactive.
*/
Transport header field. The server also includes its ports where RTP and RTCP
messages can be sent to.
- In the above example UDP was choosen as a transport. At this point the RTSPSrc element
- will furter configure its elements to process this stream.
+ In the above example UDP was chosen as a transport. At this point the RTSPSrc element
+ will further configure its elements to process this stream.
The RTSPSrc will create and connect an RTP session manager element and will
connect it to the src pads of the udp element. The data pad from the RTP session
*/
g_object_class_install_property (gobject_class, PROP_TLS_INTERACTION,
g_param_spec_object ("tls-interaction", "TLS interaction",
- "A GTlsInteraction object to promt the user for password or certificate",
+ "A GTlsInteraction object to prompt the user for password or certificate",
G_TYPE_TLS_INTERACTION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
}
continue;
case GST_RTSP_ENET:
- GST_DEBUG_OBJECT (src, "An ethernet problem occured.");
+ GST_DEBUG_OBJECT (src, "An ethernet problem occurred.");
default:
GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
("Unhandled return value %d.", res));
version_retry++;
break;
}
- /* falltrough */
+ /* fallthrough */
default:
break;
}
gst_rtsp_message_unset (&request);
gst_rtsp_message_unset (&response);
- /* exit early when we did agregate control */
+ /* exit early when we did aggregate control */
if (control)
break;
}
guint64 frames_per_interval; /* how many frames per interval */
guint64 frames_todo;
guint bands; /* number of spectrum bands */
- gint threshold; /* energy level treshold */
+ gint threshold; /* energy level threshold */
gboolean multi_channel; /* send separate channel results */
guint64 num_frames; /* frame count (1 sample per channel)
/**
* GstMultiUDPSink::send-duplicates:
*
- * When a host/port pair is added mutliple times, send the packet to the host
+ * When a host/port pair is added multiple times, send the packet to the host
* multiple times as well.
*/
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEND_DUPLICATES,
sink->bytes_to_serve += size;
/* now copy the pre-filled num_buffer messages over to the next num_buffer
- * messages for the next client, where we also change the target adddress */
+ * messages for the next client, where we also change the target address */
for (i = 1; i < num_addr; ++i) {
for (j = 0; j < num_buffers; ++j) {
msgs[i * num_buffers + j] = msgs[j];
*
* The #GstUDPSrc:caps property is mainly used to give a type to the UDP packet
* so that they can be autoplugged in GStreamer pipelines. This is very useful
- * for RTP implementations where the contents of the UDP packets is transfered
+ * for RTP implementations where the contents of the UDP packets is transferred
* out-of-bounds using SDP or other means.
*
* The #GstUDPSrc:buffer-size property is used to change the default kernel
*
* A custom file descriptor can be configured with the
* #GstUDPSrc:socket property. The socket will be closed when setting
- * the element to READY by default. This behaviour can be overriden
+ * the element to READY by default. This behaviour can be overridden
* with the #GstUDPSrc:close-socket property, in which case the
* application is responsible for closing the file descriptor.
*
g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
g_param_spec_string ("multicast-iface", "Multicast Interface",
"The network interface on which to join the multicast group."
- "This allows multiple interfaces seperated by comma. (\"eth0,eth1\")",
+ "This allows multiple interfaces separated by comma. (\"eth0,eth1\")",
UDP_DEFAULT_MULTICAST_IFACE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_URI,
* involves either cropping or padding.
*
* If you use autocrop there is little point in setting the other
- * properties manually because they will be overriden if the caps change,
+ * properties manually because they will be overridden if the caps change,
* but nothing stops you from doing so.
*
* Sample pipeline:
v = gst_structure_get_value (structure, "width");
if (!gst_video_box_transform_dimension_value (v, dw, &w_val)) {
GST_WARNING_OBJECT (video_box,
- "could not tranform width value with dw=%d" ", caps structure=%"
+ "could not transform width value with dw=%d" ", caps structure=%"
GST_PTR_FORMAT, dw, structure);
goto bail;
}
if (!gst_video_box_transform_dimension_value (v, dh, &h_val)) {
g_value_unset (&w_val);
GST_WARNING_OBJECT (video_box,
- "could not tranform height value with dh=%d" ", caps structure=%"
+ "could not transform height value with dh=%d" ", caps structure=%"
GST_PTR_FORMAT, dh, structure);
goto bail;
}
v = gst_structure_get_value (structure, "width");
if (!gst_video_crop_transform_dimension_value (v, dx, &w_val, direction,
w_dynamic)) {
- GST_WARNING_OBJECT (vcrop, "could not tranform width value with dx=%d"
+ GST_WARNING_OBJECT (vcrop, "could not transform width value with dx=%d"
", caps structure=%" GST_PTR_FORMAT, dx, structure);
continue;
}
if (!gst_video_crop_transform_dimension_value (v, dy, &h_val, direction,
h_dynamic)) {
g_value_unset (&w_val);
- GST_WARNING_OBJECT (vcrop, "could not tranform height value with dy=%d"
+ GST_WARNING_OBJECT (vcrop, "could not transform height value with dy=%d"
", caps structure=%" GST_PTR_FORMAT, dy, structure);
continue;
}
"height", G_TYPE_INT, height, NULL);
break;
case GST_VIDEO_ORIENTATION_CUSTOM:
- GST_WARNING_OBJECT (videoflip, "unsuported custom orientation");
+ GST_WARNING_OBJECT (videoflip, "unsupported custom orientation");
break;
default:
g_assert_not_reached ();
its color can be changed with a property.
The mixer can mix streams with different framerates and video sizes. It
uses the duration value of the buffer to schedule the rendering of the
-buffers. For streams with a different resoltion than the final output
+buffers. For streams with a different resolution than the final output
resolution one can specify the position of the top left corner where this
image should be placed with the pad properties xpos and ypos.
The overall alpha value of a stream can also be specified with a pad
* videomixer name=mix ! videoconvert ! ximagesink \
* videotestsrc ! \
* video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
- * ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
+ * ]| A pipeline to demonstrate bgra mixing. (This does not demonstrate alpha blending).
* |[
* gst-launch-1.0 videotestsrc pattern=1 ! \
* video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
*
* We don't do synchronized mixing so this really depends on where the
* streams where punched in and what their relative offsets are against
- * eachother which we can get from the first timestamps we see.
+ * each other which we can get from the first timestamps we see.
*
* When we add a new stream (or remove a stream) the duration might
* also become invalid again and we need to post a new DURATION
{
GstChildProxyInterface *iface = g_iface;
- GST_INFO ("intializing child proxy interface");
+ GST_INFO ("initializing child proxy interface");
iface->get_child_by_index = gst_videomixer2_child_proxy_get_child_by_index;
iface->get_children_count = gst_videomixer2_child_proxy_get_children_count;
}
wavenc->audio_length = 0x7FFF0000;
wavenc->meta_length = 0;
wavenc->sent_header = FALSE;
- /* its true because we haven't writen anything */
+ /* its true because we haven't written anything */
wavenc->finished_properly = TRUE;
break;
default:
no_bytes_per_sample:
{
GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
- ("Could not caluclate bytes per sample - invalid data"));
+ ("Could not calculate bytes per sample - invalid data"));
goto fail;
}
unknown_format:
G_GINT64_FORMAT, wav->offset, wav->end_offset, wav->dataleft);
if ((wav->dataleft == 0 || wav->dataleft < wav->blockalign)) {
- /* In case chunk size is not declared in the begining get size from the
+ /* In case chunk size is not declared in the beginning get size from the
* file size directly */
if (wav->chunk_size == 0) {
gint64 upstream_size = 0;
if (upstream_size < wav->offset + wav->datastart)
goto found_eos;
- /* If file has updated since the beggining continue reading the file */
+ /* If file has updated since the beginning continue reading the file */
wav->dataleft = upstream_size - wav->offset - wav->datastart;
wav->end_offset = upstream_size;
#!/bin/sh
#
-# Check that the code follows a consistant code style
+# Check that the code follows a consistent code style
#
# Check for existence of indent, and error out if not present.
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means AALIB was incorrectly installed"
+ echo "*** exact error that occurred. This usually means AALIB was incorrectly installed"
echo "*** or that you have moved AALIB since it was installed. In the latter case, you"
echo "*** may want to edit the aalib-config script: $AALIB_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
echo "*** The FreeType test program failed to run. If your system uses"
echo "*** shared libraries and they are installed outside the normal"
echo "*** system library path, make sure the variable LD_LIBRARY_PATH"
- echo "*** (or whatever is appropiate for your system) is correctly set."
+ echo "*** (or whatever is appropriate for your system) is correctly set."
fi
fi
FT2_CFLAGS=""
glib_major_version, glib_minor_version, glib_micro_version);
printf ("*** was found! If glib-config was correct, then it is best\n");
printf ("*** to remove the old version of GLIB. You may also be able to fix the error\n");
- printf("*** by modifying your LD_LIBRARY_PATH enviroment variable, or by editing\n");
+ printf("*** by modifying your LD_LIBRARY_PATH environment variable, or by editing\n");
printf("*** /etc/ld.so.conf. Make sure you have run ldconfig if that is\n");
printf("*** required on your system.\n");
printf("*** If glib-config was wrong, set the environment variable GLIB_CONFIG\n");
printf("*** being found. The easiest way to fix this is to remove the old version\n");
printf("*** of GLIB, but you can also set the GLIB_CONFIG environment to point to the\n");
printf("*** correct copy of glib-config. (In this case, you will have to\n");
- printf("*** modify your LD_LIBRARY_PATH enviroment variable, or edit /etc/ld.so.conf\n");
+ printf("*** modify your LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf\n");
printf("*** so that the correct libraries are found at run-time))\n");
}
}
echo "***"
echo "*** rpm --erase --nodeps gtk gtk-devel" ],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means GLIB was incorrectly installed"
+ echo "*** exact error that occurred. This usually means GLIB was incorrectly installed"
echo "*** or that you have moved GLIB since it was installed. In the latter case, you"
echo "*** may want to edit the glib-config script: $GLIB_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
AC_MSG_RESULT($_cv_gst_fionread_in_sys_ioctl)
if test "$_cv_gst_fionread_in_sys_ioctl" = "yes"; then
- AC_DEFINE([HAVE_FIONREAD_IN_SYS_IOCTL], 1, [FIONREAD ioctl found in sys/ioclt.h])
+ AC_DEFINE([HAVE_FIONREAD_IN_SYS_IOCTL], 1, [FIONREAD ioctl found in sys/ioctl.h])
else
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means EBML was incorrectly installed"
+ echo "*** exact error that occurred. This usually means EBML was incorrectly installed"
echo "*** or that you have moved EBML since it was installed." ])
CFLAGS="$ac_save_CFLAGS"
LIBS="$ac_save_LIBS"
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means SDL was incorrectly installed"
+ echo "*** exact error that occurred. This usually means SDL was incorrectly installed"
echo "*** or that you have moved SDL since it was installed. In the latter case, you"
echo "*** may want to edit the sdl-config script: $SDL_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means Shout2 was incorrectly installed"
+ echo "*** exact error that occurred. This usually means Shout2 was incorrectly installed"
echo "*** or that you have moved Shout2 since it was installed. In the latter case, you"
echo "*** may want to edit the shout-config script: $SHOUT2_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
gtk_major_version, gtk_minor_version, gtk_micro_version);
printf ("*** was found! If gtk-config was correct, then it is best\n");
printf ("*** to remove the old version of GTK+. You may also be able to fix the error\n");
- printf("*** by modifying your LD_LIBRARY_PATH enviroment variable, or by editing\n");
+ printf("*** by modifying your LD_LIBRARY_PATH environment variable, or by editing\n");
printf("*** /etc/ld.so.conf. Make sure you have run ldconfig if that is\n");
printf("*** required on your system.\n");
printf("*** If gtk-config was wrong, set the environment variable GTK_CONFIG\n");
printf("*** being found. The easiest way to fix this is to remove the old version\n");
printf("*** of GTK+, but you can also set the GTK_CONFIG environment to point to the\n");
printf("*** correct copy of gtk-config. (In this case, you will have to\n");
- printf("*** modify your LD_LIBRARY_PATH enviroment variable, or edit /etc/ld.so.conf\n");
+ printf("*** modify your LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf\n");
printf("*** so that the correct libraries are found at run-time))\n");
}
}
echo "***"
echo "*** rpm --erase --nodeps gtk gtk-devel" ],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means GTK was incorrectly installed"
+ echo "*** exact error that occurred. This usually means GTK was incorrectly installed"
echo "*** or that you have moved GTK since it was installed. In the latter case, you"
echo "*** may want to edit the gtk-config script: $GTK_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
libfame_major_version, libfame_minor_version, libfame_micro_version);
printf ("*** was found! If libfame-config was correct, then it is best\n");
printf ("*** to remove the old version of libfame. You may also be able to fix the error\n");
- printf("*** by modifying your LD_LIBRARY_PATH enviroment variable, or by editing\n");
+ printf("*** by modifying your LD_LIBRARY_PATH environment variable, or by editing\n");
printf("*** /etc/ld.so.conf. Make sure you have run ldconfig if that is\n");
printf("*** required on your system.\n");
printf("*** If libfame-config was wrong, set the environment variable LIBFAME_CONFIG\n");
printf("*** being found. The easiest way to fix this is to remove the old version\n");
printf("*** of libfame, but you can also set the LIBFAME_CONFIG environment to point to the\n");
printf("*** correct copy of libfame-config. (In this case, you will have to\n");
- printf("*** modify your LD_LIBRARY_PATH enviroment variable, or edit /etc/ld.so.conf\n");
+ printf("*** modify your LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf\n");
printf("*** so that the correct libraries are found at run-time))\n");
}
}
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"
echo "***" ],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means libfame was incorrectly installed"
+ echo "*** exact error that occurred. This usually means libfame was incorrectly installed"
echo "*** or that you have moved libfame since it was installed. In the latter case, you"
echo "*** may want to edit the libfame-config script: $LIBFAME_CONFIG" ])
CFLAGS="$ac_save_CFLAGS"
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means Ogg was incorrectly installed"
+ echo "*** exact error that occurred. This usually means Ogg was incorrectly installed"
echo "*** or that you have moved Ogg since it was installed." ])
CFLAGS="$ac_save_CFLAGS"
LIBS="$ac_save_LIBS"
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means Vorbis was incorrectly installed"
+ echo "*** exact error that occurred. This usually means Vorbis was incorrectly installed"
echo "*** or that you have moved Vorbis since it was installed." ])
CFLAGS="$ac_save_CFLAGS"
LIBS="$ac_save_LIBS"
/* *INDENT-OFF* */
static const GstOss4AudioFormat fmt_map[] = {
- /* note: keep sorted by preference, prefered formats first */
+ /* note: keep sorted by preference, preferred formats first */
{
GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW, 0,
AFMT_MU_LAW, "audio/x-mulaw"}, {
* use of the latest version is strongly recommended.
*
* {!notice This header file contains many obsolete definitions
- * (for compatibility with older applications that still ned them).
+ * (for compatibility with older applications that still need them).
* Do not use this file as a reference manual of OSS.
* Please check the OSS Programmer's guide for descriptions
* of the supported API details (http://manuals.opensound.com/developer).}
#define SEQ_MIDIPUTC 5
#define SEQ_DRUMON 6 /*** OBSOLETE ***/
#define SEQ_DRUMOFF 7 /*** OBSOLETE ***/
-#define SEQ_ECHO TMR_ECHO /* For synching programs with output */
+#define SEQ_ECHO TMR_ECHO /* For syncing programs with output */
#define SEQ_AFTERTOUCH 9
#define SEQ_CONTROLLER 10
#define SEQ_BALANCE 11
SEQ_CONTROL(dev, voice, CTL_PAN, (pos+128) / 2)
/*
- * Timing and syncronization macros
+ * Timing and synchronization macros
*/
#define _TIMER_EVENT(ev, parm) {_SEQ_NEEDBUF(8);\
* The SOUND_MIXER_READ_DEVMASK returns a bitmask which tells
* the devices supported by the particular mixer.
*
- * {!notice This "legacy" mixer API is obsolete. It has been superceded
+ * {!notice This "legacy" mixer API is obsolete. It has been superseded
* by a new one (see below).
*/
int numaudios; /* # of audio/dsp devices */
int openedaudio[8]; /* Bit mask telling which audio devices are busy */
- int numsynths; /* # of availavle synth devices */
+ int numsynths; /* # of available synth devices */
int nummidis; /* # of available MIDI ports */
int numtimers; /* # of available timer devices */
int nummixers; /* # of mixer devices */
got_outer_asbd =
_core_audio_get_stream_format (core_audio, &outer_asbd, TRUE);
- /* Collect info about the HW capabilites and preferences */
+ /* Collect info about the HW capabilities and preferences */
spdif_allowed =
gst_core_audio_audio_device_is_spdif_avail (core_audio->device_id);
if (!core_audio->is_src)
gst_caps_append_structure (caps, out_s);
gst_caps_append_structure (caps, mono);
} else {
- /* Otherwhise just add the caps */
+ /* Otherwise just add the caps */
gst_caps_append_structure (caps, out_s);
}
}
/**
* GstOSXVideoSink:embed
*
- * For ABI comatibility onyl, do not use
+ * For ABI comatibility only, do not use
*
**/
g_object_class_install_property (gobject_class, ARG_EMBED,
- g_param_spec_boolean ("embed", "embed", "For ABI compatiblity only, do not use",
+ g_param_spec_boolean ("embed", "embed", "For ABI compatibility only, do not use",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
- (void)windowWillClose:(NSNotification *)notification {
/* Only handle close events if the window was closed manually by the user
- * and not becuase of a state change state to READY */
+ * and not because of a state change state to READY */
if (osxvideosink->osxwindow == NULL) {
return;
}
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
- /* Add some depedency, so the dynamic features get updated upon changes in
+ /* Add some dependency, so the dynamic features get updated upon changes in
* /dev/video* */
gst_plugin_add_dependency (plugin,
NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
/* Save everything */
allocator->obj = v4l2object;
- /* Keep a ref on the elemnt so obj does not disapear */
+ /* Keep a ref on the element so obj does not disappear */
gst_object_ref (allocator->obj->element);
flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
for (i = 0; i < group->n_mem; i++) {
gsize maxsize, psize;
- /* TODO request used size and maxsize seperatly */
+ /* TODO request used size and maxsize separately */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type))
maxsize = psize = size[i];
else
done:
ret = GST_BUFFER_POOL_CLASS (parent_class)->set_config (bpool, config);
- /* If anything was changed documentation recommand to return FALSE */
+ /* If anything was changed documentation recommend to return FALSE */
return !updated && ret;
/* ERRORS */
case GST_V4L2_IO_DMABUF_IMPORT:
if (!V4L2_TYPE_IS_OUTPUT (pool->obj->type)) {
/* For captures, we need to enqueue buffers before we start streaming,
- * so the driver don't underflow immediatly. As we have put then back
+ * so the driver don't underflow immediately. As we have put then back
* into the base class queue, resurrect them, then releasing will queue
* them back. */
while (gst_v4l2_buffer_pool_resurrect_buffer (pool) == GST_FLOW_OK)
/* V4L2 buffer pool are often very limited in the amount of buffers it
* can offer. The copy_threshold will workaround this limitation by
* falling back to copy if the pipeline needed more buffers. This also
- * prevent having to do REQBUFS(N)/REQBUFS(0) everytime configure is
+ * prevent having to do REQBUFS(N)/REQBUFS(0) every time configure is
* called. */
if (count != min_buffers || pool->enable_copy_threshold) {
GST_WARNING_OBJECT (pool,
gst_v4l2_buffer_pool_qbuf (pool, buffer, group) != GST_FLOW_OK)
pclass->release_buffer (bpool, buffer);
} else {
- /* Simply release invalide/modified buffer, the allocator will
+ /* Simply release invalid/modified buffer, the allocator will
* give it back later */
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
pclass->release_buffer (bpool, buffer);
guint index;
if (!gst_v4l2_is_buffer_valid (buffer, &group)) {
- /* Simply release invalide/modified buffer, the allocator will
+ /* Simply release invalid/modified buffer, the allocator will
* give it back later */
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
pclass->release_buffer (bpool, buffer);
goto prepare_failed;
}
- /* retreive the group */
+ /* retrieve the group */
gst_v4l2_is_buffer_valid (to_queue, &group);
}
/* This flow return is used to indicated that the last buffer of a
* drain or a resoltuion change has been found. This should normally
- * only occure for mem-2-mem devices. */
+ * only occur for mem-2-mem devices. */
#define GST_V4L2_FLOW_LAST_BUFFER GST_FLOW_CUSTOM_SUCCESS
/* This flow return is used to indicated that the returned buffer was marked
rank = DV_BASE_RANK;
break;
- case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
rank = 0;
break;
structure = gst_structure_new ("video/mpegts",
"systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
break;
- case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
break;
case V4L2_PIX_FMT_SBGGR8:
case V4L2_PIX_FMT_SGBRG8:
fps_d = GST_VIDEO_INFO_FPS_D (&info);
/* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
- * or if contiguous is prefered */
+ * or if contiguous is preferred */
n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
n_v4l_planes = 1;
break;
default:
GST_WARNING_OBJECT (v4l2object->dbg_obj,
- "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
+ "Unknown colorimetry transfer %d", info.colorimetry.transfer);
break;
}
format.fmt.pix_mp.field = field;
format.fmt.pix_mp.num_planes = n_v4l_planes;
- /* try to ask our prefered stride but it's not a failure if not
+ /* try to ask our preferred stride but it's not a failure if not
* accepted */
for (i = 0; i < n_v4l_planes; i++) {
gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
stride = GST_VIDEO_TILE_X_TILES (stride) <<
GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
- /* try to ask our prefered stride */
+ /* try to ask our preferred stride */
format.fmt.pix.bytesperline = stride;
if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
* @v4l2object: the object
* @info: a GstVideoInfo to be filled
*
- * Acquire the driver choosen format. This is useful in decoder or encoder elements where
- * the output format is choosen by the HW.
+ * Acquire the driver chosen format. This is useful in decoder or encoder elements where
+ * the output format is chosen by the HW.
*
* Returns: %TRUE on success, %FALSE on failure.
*/
gst_structure_free (pref_s);
}
- GST_DEBUG_OBJECT (basesrc, "Prefered size %ix%i", pref.width, pref.height);
+ GST_DEBUG_OBJECT (basesrc, "Preferred size %ix%i", pref.width, pref.height);
/* Sort the structures to get the caps that is nearest to our preferences,
* first. Use single struct caps for sorting so we preserve the features. */
goto done;
}
- /* If all this failed, keep the height that was nearest to the orignal
+ /* If all this failed, keep the height that was nearest to the original
* height and the nearest possible width. This changes the DAR but
* there's not much else to do here.
*/
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
- /* We don't allow renegotiation without carefull disabling the pool */
+ /* We don't allow renegotiation without careful disabling the pool */
if (self->v4l2capture->pool &&
gst_buffer_pool_is_active (GST_BUFFER_POOL (self->v4l2capture->pool)))
return TRUE;
}
/* and ensure the processing thread has stopped in case another error
- * occured. */
+ * occurred. */
gst_v4l2_object_unlock (self->v4l2capture);
gst_pad_stop_task (decoder->srcpad);
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
/* We are running in byte-stream mode, so we don't know the headers, but
* we need to send something, otherwise the decoder will refuse to
- * intialize.
+ * initialize.
*/
if (codec_data) {
gst_buffer_ref (codec_data);
}
}
- /* No need to keep input arround */
+ /* No need to keep input around */
tmp = frame->input_buffer;
frame->input_buffer = gst_buffer_new ();
gst_buffer_copy_into (frame->input_buffer, tmp,
SET_META ("PWC2");
} else {
/* This code should be kept on sync with the exposed CODEC type of format
- * from gstv4l2object.c. This warning will only occure in case we forget
+ * from gstv4l2object.c. This warning will only occur in case we forget
* to also add a format here. */
gchar *s_str = gst_structure_to_string (s);
g_warning ("Missing fixed name mapping for caps '%s', this is a GStreamer "
}
/* and ensure the processing thread has stopped in case another error
- * occured. */
+ * occurred. */
gst_v4l2_object_unlock (self->v4l2capture);
gst_pad_stop_task (encoder->srcpad);
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
}
/* FIXME This may not be entirely correct, as encoder may keep some
- * observation withouth delaying the encoding. Linux Media API need some
+ * observation without delaying the encoding. Linux Media API need some
* more work to explicitly expressed the decoder / encoder latency. This
* value will then become max latency, and the reported driver latency would
* become the min latency. */
GST_LOG_OBJECT (e, "card: '%s'", v4l2object->vcap.card);
GST_LOG_OBJECT (e, "bus_info: '%s'", v4l2object->vcap.bus_info);
GST_LOG_OBJECT (e, "version: %08x", v4l2object->vcap.version);
- GST_LOG_OBJECT (e, "capabilites: %08x", v4l2object->device_caps);
+ GST_LOG_OBJECT (e, "capabilities: %08x", v4l2object->device_caps);
return TRUE;
GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
(_("Failed to query norm on device '%s'."),
v4l2object->videodev),
- ("Failed to get attributes for norm %d on devide '%s'. (%d - %s)",
+ ("Failed to get attributes for norm %d on divide '%s'. (%d - %s)",
n, v4l2object->videodev, errno, strerror (errno)));
return FALSE;
}
MMRESULT mmresult;
guint index;
- /* setup waveformex struture with the input ringbuffer specs */
+ /* setup waveformex structure with the input ringbuffer specs */
memset (&wfx, 0, sizeof (wfx));
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_PCM;
if (mmresult != MMSYSERR_NOERROR) {
waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
- "gst_waveform_sink_write: Error writting buffer to the device => %s",
+ "gst_waveform_sink_write: Error writing buffer to the device => %s",
wfsink->error_string);
}
waveheader->dwUser = 0;
wfsink->bytes_in_queue = 0;
GST_CAT_LOG_OBJECT (waveformsink_debug, wfsink,
- "gst_waveform_sink_write: Writting a buffer to the device (free buffers remaining=%d, write buffer=%d)",
+ "gst_waveform_sink_write: Writing a buffer to the device (free buffers remaining=%d, write buffer=%d)",
wfsink->free_buffers_count, wfsink->write_buffer);
}
}
if (mmresult != MMSYSERR_NOERROR) {
waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
- "gst_waveform_sink_reset: Error reseting waveform-audio device => %s",
+ "gst_waveform_sink_reset: Error resetting waveform-audio device => %s",
wfsink->error_string);
}
}
* known to work better with remote displays.
*/
g_object_class_install_property (gc, PROP_REMOTE,
- g_param_spec_boolean ("remote", "Remote dispay",
+ g_param_spec_boolean ("remote", "Remote display",
"Whether the display is remote", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* if the Extension is present
* @caps: the #GstCaps that Display @disp can accept
*
- * Structure used to store various informations collected/calculated for a
+ * Structure used to store various information collected/calculated for a
* Display.
*/
struct _GstXContext {
* through the #GstXOverlay interface
* @gc: the Graphical Context of Window @win
*
- * Structure used to store informations about a Window.
+ * Structure used to store information about a Window.
*/
struct _GstXWindow {
Window win;
#include <gst/app/gstappsink.h>
#include <gst/pbutils/gstdiscoverer.h>
-/* Verify jpegdec is working when explictly requested by a pipeline. */
+/* Verify jpegdec is working when explicitly requested by a pipeline. */
GST_START_TEST (test_jpegdec_explicit)
{
GstElement *pipeline, *source, *dec, *sink;
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
fail_unless (inbuffer == outbuffer);
- /* check that timestamp + duration is contigous to the next timestamp */
+ /* check that timestamp + duration is contiguous to the next timestamp */
message = gst_bus_poll (bus, GST_MESSAGE_ELEMENT, -1);
structure = gst_message_get_structure (message);
gst_structure_get_clock_time (structure, "timestamp", &ts1);
test_input_data_init (&input2);
/* Create the inputs, after calling the run below, all this data is
- * transfered to it and we have no need to clean up */
+ * transferred to it and we have no need to clean up */
input1.input = NULL;
input1.input =
g_list_append (input1.input, gst_event_new_stream_start ("test-1"));
test_input_data_init (&input2);
/* Create the inputs, after calling the run below, all this data is
- * transfered to it and we have no need to clean up */
+ * transferred to it and we have no need to clean up */
input1.input = NULL;
input1.input =
g_list_append (input1.input, gst_event_new_stream_start ("test-1"));
test_input_data_init (&input2);
/* Create the inputs, after calling the run below, all this data is
- * transfered to it and we have no need to clean up */
+ * transferred to it and we have no need to clean up */
input1.input = NULL;
input1.input =
g_list_append (input1.input, gst_event_new_stream_start ("test-1"));
test_input_data_init (&input2);
/* Create the inputs, after calling the run below, all this data is
- * transfered to it and we have no need to clean up */
+ * transferred to it and we have no need to clean up */
input1.input = NULL;
input1.input =
g_list_append (input1.input, gst_event_new_stream_start ("test-1"));
test_input_data_init (&input2);
/* Create the inputs, after calling the run below, all this data is
- * transfered to it and we have no need to clean up */
+ * transferred to it and we have no need to clean up */
input1.input = NULL;
input1.input =
g_list_append (input1.input, gst_event_new_stream_start ("test-1"));
GST_TAG_TRACK_GAIN, 0.00, GST_TAG_TRACK_PEAK, 0.2,
GST_TAG_REFERENCE_LEVEL, 83., NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
- /* Because our authorative reference is 89 dB, we bump it up by +6 dB. */
+ /* Because our authoritative reference is 89 dB, we bump it up by +6 dB. */
fail_unless_gain (element, +6.00); /* pre-amp + track gain */
send_eos_event (element);
/* GStreamer RTP payloader unit tests
- * Copyright (C) 2008 Nokia Corporation and its subsidary(-ies)
+ * Copyright (C) 2008 Nokia Corporation and its subsidiary(-ies)
* contact: <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
/*
* Creates a RTP pipeline for one test.
- * @param frame_data Pointer to the frame data which is used to pass thru pay/depayloaders.
+ * @param frame_data Pointer to the frame data which is used to pass through pay/depayloaders.
* @param frame_data_size Frame data size in bytes.
* @param frame_count Frame count.
* @param filtercaps Caps filters.
/*
* Creates the RTP pipeline and runs the test using the pipeline.
- * @param frame_data Pointer to the frame data which is used to pass thru pay/depayloaders.
+ * @param frame_data Pointer to the frame data which is used to pass through pay/depayloaders.
* @param frame_data_size Frame data size in bytes.
* @param frame_count Frame count.
* @param filtercaps Caps filters.
fail_unless_equals_pointer (buffers->data, rtp_buffer);
gst_check_drop_buffers ();
- /* Advance clock twice and we shoudl have one RTCP packet at least */
+ /* Advance clock twice and we should have one RTCP packet at least */
gst_test_clock_crank (tclock);
gst_test_clock_crank (tclock);
/* GStreamer
*
- * Copyright (C) 2009 Nokia Corporation and its subsidary(-ies)
+ * Copyright (C) 2009 Nokia Corporation and its subsidiary(-ies)
* contact: <stefan.kost@nokia.com>
* Copyright (C) 2012 Cisco Systems, Inc
* Authors: Kelley Rogers <kelro@cisco.com>
gst_harness_push (h, buffer);
/* now push in the next regular buffer at its ideal time, and verify the
- rouge RTX-buffer did not mess things up */
+ rogue RTX-buffer did not mess things up */
push_test_buffer (h, next_seqnum);
now = gst_clock_get_time (GST_ELEMENT_CLOCK (h->element));
buffer = gst_harness_pull (h);
gst_rtp_buffer_unmap (&rtp);
_push_and_check_cant_pull_twice (h, bufinp, 3);
- /* Now we ts_offset points to the previous buffer we didnt loose */
+ /* Now we ts_offset points to the previous buffer we didn't loose */
ts_offset = TIMESTAMP_DIFF;
red_in[1] = ts_offset >> 6;
red_in[2] = (ts_offset & 0x3f) << 2;
gst_event_unref (gst_harness_pull_upstream_event (hrecv));
fail_unless_equals_int (gst_harness_upstream_events_in_queue (hrecv), 0);
- /* Push 'packets_num' packets through rtxsend to rtxreceive loosing every
+ /* Push 'packets_num' packets through rtxsend to rtxreceive losing every
'drop_every_n_packets' packet. When we loose the packet we send RTX event
through rtxreceive to rtxsend, and verify the packet was retransmitted */
for (drop_nth_packet = 2; drop_nth_packet < 10; ++drop_nth_packet) {
gst_harness_push_upstream_event (h,
create_rtx_event (master_ssrc, master_pt, rtx_seqnum));
- /* Pull only the ones supposed to be retransmited */
+ /* Pull only the ones supposed to be retransmitted */
if (j >= i - half_buffers)
pull_and_verify (h, TRUE, rtx_ssrc, rtx_pt, rtx_seqnum);
}
/* Check there no extra buffers in the harness queue */
fail_unless_equals_int (gst_harness_buffers_in_queue (h), 0);
- /* We create RTP buffers with timestamps that will eventualy wrap around 0
+ /* We create RTP buffers with timestamps that will eventually wrap around 0
to be sure, rtprtxsend can handle it properly */
push_pull_and_verify (h,
create_rtp_buffer_with_timestamp (master_ssrc, master_pt, 0x100 + i,
fail_unless_equals_int (GST_FLOW_OK,
session_harness_recv_rtp (h, generate_test_buffer (0, 0x12345678)));
- /* When probation is disable, the packet should be produced immediatly */
+ /* When probation is disabled, the packet should be produced immediately */
fail_unless_equals_int (1, gst_harness_buffers_in_queue (h->recv_rtp_h));
session_harness_free (h);
gst_buffer_unref (session_harness_pull_rtcp (h));
/* request NACK immediately, but also advance the clock, so the request is
- * now late, but it should be kept to avoid sendign an early rtcp without
+ * now late, but it should be kept to avoid sending an early rtcp without
* NACK. This would otherwise lead to a stall if the late packet was cause
* by high RTT, we need to send some RTX in order to update that statistic. */
session_harness_rtp_retransmission_request (h, 0x12345678, 1234, 0, 0, 0);
g_ptr_array_add (bufs_in, create_rtp_packet (96, ssrc, RTP_TSTAMP (6),
seq_start + 11));
- /* Loosing one */
+ /* Losing one */
g_ptr_array_remove_index (bufs_in, nth_to_loose);
/* Push all of them through */
fail_unless (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS);
gst_message_unref (msg);
- /* unlink manually and relase request pad to ensure that we *can* do that
+ /* unlink manually and release request pad to ensure that we *can* do that
* - https://bugzilla.gnome.org/show_bug.cgi?id=753622 */
sink = gst_bin_get_by_name (GST_BIN (pipeline), "splitsink");
fail_if (sink == NULL);
gst_object_unref (enc_src_pad);
gst_element_release_request_pad (sink, splitmux_sink_pad);
gst_object_unref (splitmux_sink_pad);
- /* at this point the pad must be releaased - try to find it again to verify */
+ /* at this point the pad must be released - try to find it again to verify */
splitmux_sink_pad = gst_element_get_static_pad (sink, "video");
fail_if (splitmux_sink_pad != NULL);
g_object_unref (sink);
gchar *in_pattern;
/* This pipeline should start a new file every GOP, ie 1 second,
- * driven by the primary video stream and with 2 auxilliary video streams */
+ * driven by the primary video stream and with 2 auxiliary video streams */
pipeline =
gst_parse_launch
("splitmuxsink name=splitsink "
fail_unless (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS);
gst_message_unref (msg);
- /* unlink manually and relase request pad to ensure that we *can* do that
+ /* unlink manually and release request pad to ensure that we *can* do that
* - https://bugzilla.gnome.org/show_bug.cgi?id=753622 */
sink = gst_bin_get_by_name (GST_BIN (pipeline), "splitsink");
fail_if (sink == NULL);
gst_object_unref (enc_src_pad);
gst_element_release_request_pad (sink, splitmux_sink_pad);
gst_object_unref (splitmux_sink_pad);
- /* at this point the pad must be releaased - try to find it again to verify */
+ /* at this point the pad must be released - try to find it again to verify */
splitmux_sink_pad = gst_element_get_static_pad (sink, "video");
fail_if (splitmux_sink_pad != NULL);
g_object_unref (sink);
GST_MESSAGE_ANY & ~(GST_MESSAGE_ERROR | GST_MESSAGE_WARNING),
GST_MESSAGE_UNKNOWN, target_state);
- /* Cannot be tested with fakesrc becouse speex payloader requires a valid header?! */
+ /* Cannot be tested with fakesrc because speex payloader requires a valid header?! */
/*
s = PIPELINE_STRING(DEFAULT_BUFCOUNT, DEFAULT_BUFSIZE, "rtpspeexpay", "rtpspeexdepay");
run_pipeline (setup_pipeline (s), s,
/* If failing, the element could not be created */
g_assert (cairo_overlay);
- /* Hook up the neccesary signals for cairooverlay */
+ /* Hook up the necessary signals for cairooverlay */
g_signal_connect (cairo_overlay, "draw",
G_CALLBACK (draw_overlay), overlay_state);
g_signal_connect (cairo_overlay, "caps-changed",
return TRUE;
if (!gst_glsl_string_get_version_profile (src, &version, &profile)) {
- g_print ("Warning: failed to retreive GLSL version and profile for "
+ g_print ("Warning: failed to retrieve GLSL version and profile for "
"shader type 0x%x\nsrc:\n%s\n", type, src);
}
g_main_loop_run (loop);
- g_print ("stoping client pipeline\n");
+ g_print ("stopping client pipeline\n");
gst_element_set_state (GST_ELEMENT (pipe), GST_STATE_NULL);
gst_object_unref (pipe);
return -1;
}
- /* programm a pattern of events */
+ /* program a pattern of events */
#if 0
prog = gst_structure_from_string ("program"
", image00=(structure)\"image\\,contrast\\=0.0\\;\""