dnl GST_PLUGINS_BASE_CFLAGS: to link internally against the plugins base libs
dnl (compare to other modules) or for i18n
dnl GST_ALL_LDFLAGS: linker flags shared by all
-dnl GST_LIB_LDFLAGS: additional linker flags for all libaries
+dnl GST_LIB_LDFLAGS: additional linker flags for all libraries
dnl GST_LT_LDFLAGS: library versioning of our libraries
dnl GST_PLUGIN_LDFLAGS: flags to be used for all plugins
return GST_CLOCK_TIME_NONE;
}
- /* in case an xrun condition has occured we need to handle this */
+ /* in case an xrun condition has occurred we need to handle this */
if (snd_pcm_status_get_state (status) != SND_PCM_STATE_RUNNING) {
if (xrun_recovery (asrc, asrc->handle, err) < 0) {
GST_WARNING_OBJECT (asrc, "Could not recover from xrun condition !");
gst_gl_filter_render_to_target_with_shader (filter, effects->intexture,
effects->midtexture[3], shader);
- /* horizonal convolution */
+ /* horizontal convolution */
shader = gst_gl_effects_get_fragment_shader (effects, "sobel_hconv3",
sep_sobel_hconv3_fragment_source_gles2);
gst_gl_shader_use (shader);
"void main () {\n"
" vec4 yuva;\n"
/* operations translated from alpha and tested with glvideomixer
- * with one pad's paremeters blend-equation-rgb={subtract,reverse-subtract},
+ * with one pad's parameters blend-equation-rgb={subtract,reverse-subtract},
* blend-function-src-rgb=src-color and blend-function-dst-rgb=dst-color */
" vec4 rgba = texture2D (tex, v_texcoord);\n"
" yuva.xyz = rgb_to_yuv (rgba.rgb);\n"
"void main () {\n"
" vec3 yuv;\n"
/* operations translated from videobalanceand tested with glvideomixer
- * with one pad's paremeters blend-equation-rgb={subtract,reverse-subtract},
+ * with one pad's parameters blend-equation-rgb={subtract,reverse-subtract},
* blend-function-src-rgb=src-color and blend-function-dst-rgb=dst-color */
" float hue_cos = cos (PI * hue);\n"
" float hue_sin = sin (PI * hue);\n"
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
- //load identity befor tracing
+ //load identity before tracing
glLoadIdentity ();
//camera translation
glTranslatef (0.0f, 0.1f, -1.3f);
#endif
} else {
/* FIXME: Add support for unsigned ints, non 4x4 matrices, etc */
- GST_FIXME ("Don't know how to set the \'%s\' paramater. Unknown type",
+ GST_FIXME ("Don't know how to set the \'%s\' parameter. Unknown type",
field_name);
return TRUE;
}
gst_gl_shader_use (shader);
- /* FIXME: propertise these */
+ /* FIXME: turn these into properties */
gst_gl_shader_set_uniform_1i (shader, "tex", 0);
gst_gl_shader_set_uniform_1f (shader, "width",
GST_VIDEO_INFO_WIDTH (&filter->out_info));
GST_VIDEO_INFO_HEIGHT (&filter->out_info));
gst_gl_shader_set_uniform_1f (shader, "time", filtershader->time);
- /* FIXME: propertise these */
+ /* FIXME: turn these into properties */
filter->draw_attr_position_loc =
gst_gl_shader_get_attribute_location (shader, "a_position");
filter->draw_attr_texture_loc =
frag_stage = gst_glsl_stage_new_default_fragment (gl_sink->context);
}
if (!vert_stage || !frag_stage) {
- GST_ERROR_OBJECT (gl_sink, "Failed to retreive fragment shader for "
+ GST_ERROR_OBJECT (gl_sink, "Failed to retrieve fragment shader for "
"texture target");
if (vert_stage)
gst_object_unref (vert_stage);
gst_gl_sync_meta_wait (gl_sink->stored_sync_meta,
gst_gl_context_get_current ());
- /* make sure that the environnement is clean */
+ /* make sure that the environment is clean */
gst_gl_context_clear_shader (gl_sink->context);
gl->BindTexture (gl_target, 0);
GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
/* If we're operating in frame-by-frame mode, push
* the primary view now, and let the parent class
- * push the remaining auxilliary view */
+ * push the remaining auxiliary view */
if (GST_VIDEO_INFO_MULTIVIEW_MODE (&vagg->info) ==
GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
/* Transfer the timestamps video-agg put on the aux buffer */
* pages.
* Before extracting the packets out of the ogg pages, we push the raw vorbis
* header packets to the decoder.
- * We don't use the incomming timestamps but use the ganulepos on the ogg pages
+ * We don't use the incoming timestamps but use the ganulepos on the ogg pages
* directly.
* This parser only does ogg/vorbis for now.
*/
beyond = ipad->map.n_index
&& ipad->map.index[ipad->map.n_index - 1].offset >= length;
if (beyond) {
- GST_WARNING_OBJECT (pad, "Index offsets beyong byte length");
+ GST_WARNING_OBJECT (pad, "Index offsets beyond byte length");
if (ipad->discont) {
/* hole - the index is most likely screwed up */
GST_WARNING_OBJECT (ogg, "Discarding entire index");
GST_DEBUG_OBJECT (ogg, "Raw best guess: %" G_GINT64_FORMAT, best);
/* offset the guess down as we need to capture the start of the
- page we are targetting - but only do so if we did not undershoot
+ page we are targeting - but only do so if we did not undershoot
last time, as we're likely to still do this time */
if (!ogg->seek_undershot) {
/* very small packets are packed on pages, so offset by at least
"bisect begin: %" G_GINT64_FORMAT ", searched: %" G_GINT64_FORMAT
", end %" G_GINT64_FORMAT ", chain: %p", begin, searched, end, chain);
- /* the below guards against garbage seperating the last and
+ /* the below guards against garbage separating the last and
* first pages of two links. */
while (searched < endsearched) {
gint64 bisect;
gint64 first_granule; /* the granulepos of first page == first sample in next page */
GstClockTime first_time; /* the timestamp of the second page or granuletime of first page */
- GstClockTime position; /* position when last push occured; used to detect when we
+ GstClockTime position; /* position when last push occurred; used to detect when we
* need to send a newsegment update event for sparse streams */
GList *continued;
}
if (upstream_has_meta || caps_has_meta) {
- /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
+ /* Send caps immediately, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (overlay->srcpad, overlay_caps);
overlay->ink_rect.y = tmp.x;
overlay->ink_rect.width = tmp.height;
overlay->ink_rect.height = tmp.width;
- /* We want the top left corect, but we now have the top right */
+ /* We want the top left correct, but we now have the top right */
overlay->ink_rect.x += overlay->ink_rect.width;
tmp = overlay->logical_rect;
if (overlay->use_vertical_render) {
gint tmp;
- /* tranlate to the center of the image, rotate, and tranlate the rotated
+ /* translate to the center of the image, rotate, and translate the rotated
* image back to the right place */
cairo_matrix_translate (&cairo_matrix, unscaled_height / 2.0l,
unscaled_width / 2.0l);
GstVideoCodecState *input_state;
GstVideoCodecState *output_state;
- /* telemetry debuging options */
+ /* telemetry debugging options */
gint telemetry_mv;
gint telemetry_mbmode;
gint telemetry_qi;
enc = GST_THEORA_ENC (benc);
/* we keep track of two timelines.
- * - The timestamps from the incomming buffers, which we copy to the outgoing
+ * - The timestamps from the incoming buffers, which we copy to the outgoing
* encoded buffers as-is. We need to do this as we simply forward the
* newsegment events.
* - The running_time of the buffers, which we use to construct the granulepos
* elements.
*
* Applications can set the tags to write using the #GstTagSetter interface.
- * Tags contained withing the vorbis bitstream will be picked up
+ * Tags contained within the vorbis bitstream will be picked up
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*
*
* When the memory is mappable for read and write requests then it is assumes
* to be a fast path and so this caps feature should not be used. Though
- * according to the dma-buf protocal, while it is mapped it prevents the
+ * according to the dma-buf protocol, while it is mapped it prevents the
* exporter to migrate the buffer.
*
* This caps feature should not serve at all the purpose of selecting the
* @GST_ALLOCATOR_DMABUF allocator during caps negotiation.
* When the exporter is the upstream element from the importer point of view,
- * the exporter should try to map the dma buffer at runtime (preferrably during
+ * the exporter should try to map the dma buffer at runtime (preferably during
* decide_allocation phase). When it succeeds for #GST_MAP_READWRITE this caps
* feature should not be used. This allows scalers, color converts and any image
* processing filters to work directly on the dma buffer.
- * In this case the importer element should check all incomming memory using
+ * In this case the importer element should check all incoming memory using
* gst_is_dmabuf_memory().
*
* Since: 1.12
*
* Check if appsink will emit the "new-preroll" and "new-sample" signals.
*
- * Returns: %TRUE if @appsink is emiting the "new-preroll" and "new-sample"
+ * Returns: %TRUE if @appsink is emitting the "new-preroll" and "new-sample"
* signals.
*/
gboolean
/**
* GstAppSrc::max-latency:
*
- * The maximum latency of the source. A value of -1 means an unlimited amout
+ * The maximum latency of the source. A value of -1 means an unlimited amount
* of latency.
*/
g_object_class_install_property (gobject_class, PROP_MAX_LATENCY,
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
- * Returns: #GST_FLOW_OK when the buffer was successfuly queued.
+ * Returns: #GST_FLOW_OK when the buffer was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
- * #GST_FLOW_EOS when EOS occured.
+ * #GST_FLOW_EOS when EOS occurred.
*/
GstFlowReturn
gst_app_src_push_buffer (GstAppSrc * appsrc, GstBuffer * buffer)
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
- * Returns: #GST_FLOW_OK when the buffer list was successfuly queued.
+ * Returns: #GST_FLOW_OK when the buffer list was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
- * #GST_FLOW_EOS when EOS occured.
+ * #GST_FLOW_EOS when EOS occurred.
*
* Since: 1.14
*/
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
- * Returns: #GST_FLOW_OK when the buffer was successfuly queued.
+ * Returns: #GST_FLOW_OK when the buffer was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
- * #GST_FLOW_EOS when EOS occured.
+ * #GST_FLOW_EOS when EOS occurred.
*
* Since: 1.6
*
* Indicates to the appsrc element that the last buffer queued in the
* element is the last buffer of the stream.
*
- * Returns: #GST_FLOW_OK when the EOS was successfuly queued.
+ * Returns: #GST_FLOW_OK when the EOS was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
*/
GstFlowReturn
* This is expressed in caps by having a channel mask with no bits set.
*
* As another special case it is allowed to have two channels without a channel mask.
- * This implicitely means that this is a stereo stream with a front left and front right
+ * This implicitly means that this is a stereo stream with a front left and front right
* channel.
*/
typedef enum {
gint8 *s;
gsize stride = GST_ROUND_UP_N (num_samples * chain->stride, ALIGN);
/* first part contains the pointers, second part the data, add some extra bytes
- * for alignement */
+ * for alignment */
gsize needed = (stride + sizeof (gpointer)) * chain->blocks + ALIGN - 1;
GST_DEBUG ("alloc samples %d %" G_GSIZE_FORMAT " %" G_GSIZE_FORMAT,
* @GST_AUDIO_PACK_FLAG_NONE: No flag
* @GST_AUDIO_PACK_FLAG_TRUNCATE_RANGE: When the source has a smaller depth
* than the target format, set the least significant bits of the target
- * to 0. This is likely sightly faster but less accurate. When this flag
+ * to 0. This is likely slightly faster but less accurate. When this flag
* is not specified, the most significant bits of the source are duplicated
* in the least significant bits of the destination.
*
* Get the number of output frames that would be currently available when
* @in_frames are given to @resampler.
*
- * Returns: The number of frames that would be availabe after giving
+ * Returns: The number of frames that would be available after giving
* @in_frames as input to @resampler.
*/
gsize
* GstAudioResamplerFilterInterpolation:
* @GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_NONE: no interpolation
* @GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_LINEAR: linear interpolation of the
- * filter coeficients.
+ * filter coefficients.
* @GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_CUBIC: cubic interpolation of the
- * filter coeficients.
+ * filter coefficients.
*
* The different filter interpolation methods.
*/
/**
* GST_AUDIO_RESAMPLER_OPT_FILTER_INTERPOLATION:
*
- * GST_TYPE_AUDIO_RESAMPLER_INTERPOLATION: how the filter coeficients should be
+ * GST_TYPE_AUDIO_RESAMPLER_INTERPOLATION: how the filter coefficients should be
* interpolated.
* GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_CUBIC is default.
*/
*
* We don't do synchronized mixing so this really depends on where the
* streams where punched in and what their relative offsets are against
- * eachother which we can get from the first timestamps we see.
+ * each other which we can get from the first timestamps we see.
*
* When we add a new stream (or remove a stream) the duration might
* also become invalid again and we need to post a new DURATION
* the running time.
*
* 2) If the current pad's offset/offset_end overlaps with the output
- * offset/offset_end, mix it at the appropiate position in the output
+ * offset/offset_end, mix it at the appropriate position in the output
* buffer and advance the pad's position. Remember if this pad needs
* a new buffer to advance behind the output offset_end.
*
etime = etime > cexternal ? etime - cexternal : 0;
itime = itime > cinternal ? itime - cinternal : 0;
- /* don't do any skewing unless the callback explicitely requests one */
+ /* don't do any skewing unless the callback explicitly requests one */
requested_skew = 0;
if (sink->priv->custom_slaving_callback != NULL) {
/* FIXME, we can sample and add observations here or use the timeouts on the
* clock. No idea which one is better or more stable. The timeout seems more
* arbitrary but this one seems more demanding and does not work when there is
- * no data comming in to the sink. */
+ * no data coming in to the sink. */
#if 0
GstClockTime etime, itime;
gdouble r_squared;
goto flushing;
/* retry if we got unscheduled, which means we did not reach the timeout
- * yet. if some other error occures, we continue. */
+ * yet. if some other error occurs, we continue. */
} while (status == GST_CLOCK_UNSCHEDULED);
GST_DEBUG_OBJECT (sink, "latency synced");
gst_audio_ring_buffer_may_start (sink->ringbuffer, FALSE);
/* Only post clock-provide messages if this is the clock that
- * we've created. If the subclass has overriden it the subclass
+ * we've created. If the subclass has overridden it the subclass
* should post this messages whenever necessary */
if (gst_audio_base_sink_is_self_provided_clock (sink))
gst_element_post_message (element,
gst_audio_ring_buffer_may_start (sink->ringbuffer, TRUE);
if (GST_BASE_SINK_CAST (sink)->pad_mode == GST_PAD_MODE_PULL ||
g_atomic_int_get (&sink->eos_rendering) || eos) {
- /* we always start the ringbuffer in pull mode immediatly */
+ /* we always start the ringbuffer in pull mode immediately */
/* sync rendering on eos needs running clock,
* and others need running clock when finished rendering eos */
gst_audio_ring_buffer_start (sink->ringbuffer);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* Only post clock-lost messages if this is the clock that
- * we've created. If the subclass has overriden it the subclass
+ * we've created. If the subclass has overridden it the subclass
* should post this messages whenever necessary */
if (gst_audio_base_sink_is_self_provided_clock (sink))
gst_element_post_message (element,
gst_audio_ring_buffer_set_flushing (src->ringbuffer, FALSE);
gst_audio_ring_buffer_may_start (src->ringbuffer, FALSE);
/* Only post clock-provide messages if this is the clock that
- * we've created. If the subclass has overriden it the subclass
+ * we've created. If the subclass has overridden it the subclass
* should post this messages whenever necessary */
if (src->clock && GST_IS_AUDIO_CLOCK (src->clock) &&
GST_AUDIO_CLOCK_CAST (src->clock)->func ==
case GST_STATE_CHANGE_PAUSED_TO_READY:
GST_DEBUG_OBJECT (src, "PAUSED->READY");
/* Only post clock-lost messages if this is the clock that
- * we've created. If the subclass has overriden it the subclass
+ * we've created. If the subclass has overridden it the subclass
* should post this messages whenever necessary */
if (src->clock && GST_IS_AUDIO_CLOCK (src->clock) &&
GST_AUDIO_CLOCK_CAST (src->clock)->func ==
/* sanity checking */
if (G_LIKELY (buf && ctx->info.bpf)) {
if (!meta || meta->info.layout == GST_AUDIO_LAYOUT_INTERLEAVED) {
- /* output shoud be whole number of sample frames */
+ /* output should be whole number of sample frames */
if (size % ctx->info.bpf)
goto wrong_buffer;
/* output should have no additional padding */
* arrives out of order.
*
* we first gather buffers in the gather queue until we get a DISCONT. We
- * prepend each incomming buffer so that they are in reversed order.
+ * prepend each incoming buffer so that they are in reversed order.
*
* gather queue: 9 8 7
* decode queue:
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
- /* input shoud be whole number of sample frames */
+ /* input should be whole number of sample frames */
if (size % ctx->info.bpf)
goto wrong_buffer;
* implementation uses another internal buffer between the audio
* device.
*
- * For playback ringbuffers this is the amount of samples transfered from the
+ * For playback ringbuffers this is the amount of samples transferred from the
* ringbuffer to the device but still not played.
*
* For capture ringbuffers this is the amount of samples in the device that are
- * not yet transfered to the ringbuffer.
+ * not yet transferred to the ringbuffer.
*
* Returns: The number of samples queued in the audio device.
*
left = len;
do {
written = writefunc (sink, readptr, left);
- GST_LOG_OBJECT (sink, "transfered %d bytes of %d from segment %d",
+ GST_LOG_OBJECT (sink, "transferred %d bytes of %d from segment %d",
written, left, readseg);
if (written < 0 || written > left) {
/* might not be critical, it e.g. happens when aborting playback */
left = len;
do {
read = readfunc (src, readptr, left, ×tamp);
- GST_LOG_OBJECT (src, "transfered %d bytes of %d to segment %d", read,
+ GST_LOG_OBJECT (src, "transferred %d bytes of %d to segment %d", read,
left, readseg);
if (read < 0 || read > left) {
GST_WARNING_OBJECT (src,
#if 0
GST_DEBUG ("stop, waiting...");
GST_AUDIO_SRC_RING_BUFFER_WAIT (buf);
- GST_DEBUG ("stoped");
+ GST_DEBUG ("stopped");
#endif
return TRUE;
* @rate: a new sample rate
*
* Sets @rate as new sample rate for the following processing. If the sample
- * rate differs this implicitely marks the next data as discontinuous.
+ * rate differs this implicitly marks the next data as discontinuous.
*
* Since: 1.14
*/
#include "_kiss_fft_guts_f32.h"
/* The guts header contains all the multiplication and addition macros that are defined for
- fixed or floating point complex numbers. It also delares the kf_ internal functions.
+ fixed or floating point complex numbers. It also declares the kf_ internal functions.
*/
static kiss_fft_f32_cpx *scratchbuf = NULL;
#include "_kiss_fft_guts_f64.h"
/* The guts header contains all the multiplication and addition macros that are defined for
- fixed or floating point complex numbers. It also delares the kf_ internal functions.
+ fixed or floating point complex numbers. It also declares the kf_ internal functions.
*/
static kiss_fft_f64_cpx *scratchbuf = NULL;
#include "_kiss_fft_guts_s16.h"
/* The guts header contains all the multiplication and addition macros that are defined for
- fixed or floating point complex numbers. It also delares the kf_ internal functions.
+ fixed or floating point complex numbers. It also declares the kf_ internal functions.
*/
static kiss_fft_s16_cpx *scratchbuf = NULL;
#include "_kiss_fft_guts_s32.h"
/* The guts header contains all the multiplication and addition macros that are defined for
- fixed or floating point complex numbers. It also delares the kf_ internal functions.
+ fixed or floating point complex numbers. It also declares the kf_ internal functions.
*/
static kiss_fft_s32_cpx *scratchbuf = NULL;
egl->egl_surface =
eglCreateWindowSurface (egl->egl_display, egl->egl_config,
(EGLNativeWindowType) window_handle, attrs);
- /* Store window handle for later comparision */
+ /* Store window handle for later comparison */
egl->window_handle = window_handle;
} else if (!gst_gl_check_extension ("EGL_KHR_surfaceless_context",
egl->egl_exts)) {
* unclear how to do that. By trying to create an EGL context? */
g_udev_enumerator_add_match_subsystem (gudev_enum, "drm");
devlist = g_udev_enumerator_execute (gudev_enum);
- GST_DEBUG ("Scanned for udev devices with a drm subsytem");
+ GST_DEBUG ("Scanned for udev devices with a drm subsystem");
if (devlist == NULL) {
GST_WARNING ("Found no matching DRM devices");
* gst_gl_api_to_string:
* @api: a #GstGLAPI to stringify
*
- * Returns: A space seperated string of the OpenGL api's enabled in @api
+ * Returns: A space separated string of the OpenGL api's enabled in @api
*/
gchar *
gst_gl_api_to_string (GstGLAPI api)
/**
* gst_gl_api_from_string:
- * @api_s: a space seperated string of OpenGL apis
+ * @api_s: a space separated string of OpenGL apis
*
* Returns: The #GstGLAPI represented by @api_s
*/
* gst_gl_platform_to_string:
* @platform: a #GstGLPlatform to stringify
*
- * Returns: A space seperated string of the OpenGL platforms enabled in @platform
+ * Returns: A space separated string of the OpenGL platforms enabled in @platform
*/
gchar *
gst_gl_platform_to_string (GstGLPlatform platform)
/**
* gst_gl_platform_from_string:
- * @platform_s: a space seperated string of OpenGL platformss
+ * @platform_s: a space separated string of OpenGL platformss
*
* Returns: The #GstGLPlatform represented by @platform_s
*/
* gst_gl_base_memory_alloc_data:
* @gl_mem: a #GstGLBaseMemory
*
- * Note: only intended for subclass usage to allocate the sytem memory buffer
+ * Note: only intended for subclass usage to allocate the system memory buffer
* on demand. If there is already a non-NULL data pointer in @gl_mem->data,
* then this function imply returns TRUE.
*
* @offset: the offset to start at
* @size: the number of bytes to copy
*
- * Returns: whether the copy suceeded.
+ * Returns: whether the copy succeeded.
*
* Since: 1.8
*/
* @notify will be called once for each allocated memory using these @params
* when freeing the memory.
*
- * Returns: whether the paramaters could be initialized
+ * Returns: whether the parameters could be initialized
*
* Since: 1.8
*/
* @params: the source #GstGLAllocationParams
*
* Frees the dynamically allocated data in @params. Direct subclasses
- * should call this function in their own overriden free function.
+ * should call this function in their own overridden free function.
*
* Since: 1.8
*/
* @dest: the destination #GstGLAllocationParams
*
* Copies the dynamically allocated data from @src to @dest. Direct subclasses
- * should call this function in their own overriden copy function.
+ * should call this function in their own overridden copy function.
*
* Since: 1.8
*/
/**
* GstGLBaseMemoryError:
- * @GST_GL_BASE_MEMORY_ERROR_FAILED: generic faliure
+ * @GST_GL_BASE_MEMORY_ERROR_FAILED: generic failure
* @GST_GL_BASE_MEMORY_ERROR_OLD_LIBS: the implementation is too old and doesn't
* implement enough features
* @GST_GL_BASE_MEMORY_ERROR_RESOURCE_UNAVAILABLE: a resource could not be found
priv->gl_params->target = tex_target;
}
- /* Recalulate the size and offset as we don't add padding between planes. */
+ /* Recalculate the size and offset as we don't add padding between planes. */
priv->gl_params->v_info->size = 0;
for (p = 0; p < GST_VIDEO_INFO_N_PLANES (priv->gl_params->v_info); p++) {
priv->gl_params->v_info->offset[p] = priv->gl_params->v_info->size;
static const gfloat from_rgb_bt709_vcoeff[] = {0.440654f, -0.400285f, -0.040370f};
/* GRAY16 to RGB conversion
- * data transfered as GL_LUMINANCE_ALPHA then convert back to GRAY16
+ * data transferred as GL_LUMINANCE_ALPHA then convert back to GRAY16
* high byte weight as : 255*256/65535
* ([0~1] denormalize to [0~255],shift to high byte,normalize to [0~1])
* low byte weight as : 255/65535 (similar)
};
/* GRAY16 to RGB conversion
- * data transfered as GL_LUMINANCE_ALPHA then convert back to GRAY16
+ * data transferred as GL_LUMINANCE_ALPHA then convert back to GRAY16
* high byte weight as : 255*256/65535
* ([0~1] denormalize to [0~255],shift to high byte,normalize to [0~1])
* low byte weight as : 255/65535 (similar)
|| out_tex->tex_format == GST_GL_LUMINANCE_ALPHA
|| out_width != mem_width || out_height != mem_height) {
/* Luminance formats are not color renderable */
- /* renderering to a framebuffer only renders the intersection of all
+ /* rendering to a framebuffer only renders the intersection of all
* the attachments i.e. the smallest attachment size */
if (!convert->priv->out_tex[j]) {
GstGLVideoAllocationParams *params;
* can share GL resources, this is the next best thing.
*
* XXX: we may need a way to associate two wrapped GstGLContext's as being
- * shared however I have not come across a use case that requries this yet.
+ * shared however I have not come across a use case that requires this yet.
*/
struct ContextShareGroup
{
* @name: the name of the function to retrieve
*
* Attempts to use the @context_type specific GetProcAddress implementations
- * to retreive @name.
+ * to retrieve @name.
*
* See also gst_gl_context_get_proc_address().
*
* Get a function pointer to a specified opengl function, @name. If the the
* specific function does not exist, NULL is returned instead.
*
- * Platform specfic functions (names starting 'egl', 'glX', 'wgl', etc) can also
+ * Platform specific functions (names starting 'egl', 'glX', 'wgl', etc) can also
* be retrieved using this method.
*
* Note: This function may return valid function pointers that may not be valid
* @min: (out): resulting minor version
*
* Returns the OpenGL version implemented by @context. See
- * gst_gl_context_get_gl_api() for retreiving the OpenGL api implemented by
+ * gst_gl_context_get_gl_api() for retrieving the OpenGL api implemented by
* @context.
*
* Since: 1.4
* @see_also: #GstContext, #GstGLContext, #GstGLWindow
*
* #GstGLDisplay represents a connection to the underlying windowing system.
- * Elements are required to make use of #GstContext to share and propogate
+ * Elements are required to make use of #GstContext to share and propagate
* a #GstGLDisplay.
*
* There are a number of environment variables that influence the choice of
*
* limit the use of OpenGL to the requested @gl_api. This is intended to allow
* application and elements to request a specific set of OpenGL API's based on
- * what they support. See gst_gl_context_get_gl_api() for the retreiving the
+ * what they support. See gst_gl_context_get_gl_api() for the retrieving the
* API supported by a #GstGLContext.
*/
void
* @compare_func: (scope call): a comparison function to run
*
* Execute @compare_func over the list of windows stored by @display. The
- * first argment to @compare_func is the #GstGLWindow being checked and the
+ * first argument to @compare_func is the #GstGLWindow being checked and the
* second argument is @data.
*
* Returns: (transfer none): The first #GstGLWindow that causes a match
* @name: the extension to search for
* @ext: the list of possible extensions
*
- * Returns: whether @name is in the space seperated list of @ext
+ * Returns: whether @name is in the space separated list of @ext
*/
gboolean
gst_gl_check_extension (const char *name, const gchar * ext)
goto done;
}
- /* If all this failed, keep the height that was nearest to the orignal
+ /* If all this failed, keep the height that was nearest to the original
* height and the nearest possible width. This changes the DAR but
* there's not much else to do here.
*/
/**
* gst_gl_texture_target_from_string:
- * @str: a string equivalant to one of the GST_GL_TEXTURE_TARGET_*_STR values
+ * @str: a string equivalent to one of the GST_GL_TEXTURE_TARGET_*_STR values
*
* Returns: the #GstGLTextureTarget represented by @str or
* %GST_GL_TEXTURE_TARGET_NONE
* @width: (out) (allow-none): output width
* @height: (out) (allow-none): output height
*
- * Retreive the effective dimensions from the current attachments attached to
+ * Retrieve the effective dimensions from the current attachments attached to
* @fb.
*
* Since: 1.10
GST_DEBUG_CATEGORY_STATIC (GST_CAT_GL_MEMORY);
#define GST_CAT_DEFAULT GST_CAT_GL_MEMORY
-/* compatability definitions... */
+/* compatibility definitions... */
#ifndef GL_UNPACK_ROW_LENGTH
#define GL_UNPACK_ROW_LENGTH 0x0CF2
#endif
* Copies @gl_mem into the texture specfified by @tex_id. The format of @tex_id
* is specified by @tex_format, @width and @height.
*
- * Returns: Whether the copy suceeded
+ * Returns: Whether the copy succeeded
*
* Since: 1.8
*/
static GstAllocator *_gl_allocator;
-/* compatability definitions... */
+/* compatibility definitions... */
#ifndef GL_PIXEL_PACK_BUFFER
#define GL_PIXEL_PACK_BUFFER 0x88EB
#endif
* using glCopyTexImage. See the OpenGL specification for details on the
* mappings between texture formats.
*
- * Returns: Whether the copy suceeded
+ * Returns: Whether the copy succeeded
*
* Since: 1.8
*/
g_return_if_fail (GST_IS_GLSL_STAGE (stage));
if (!_gst_glsl_funcs_fill (&shader->priv->vtable, shader->context)) {
- GST_WARNING_OBJECT (shader, "Failed to retreive required GLSL functions");
+ GST_WARNING_OBJECT (shader, "Failed to retrieve required GLSL functions");
return;
}
g_return_val_if_fail (GST_IS_GLSL_STAGE (stage), FALSE);
if (!_gst_glsl_funcs_fill (&shader->priv->vtable, shader->context)) {
- GST_WARNING_OBJECT (shader, "Failed to retreive required GLSL functions");
+ GST_WARNING_OBJECT (shader, "Failed to retrieve required GLSL functions");
gst_object_ref_sink (stage);
gst_object_unref (stage);
return FALSE;
if (!_gst_glsl_funcs_fill (&shader->priv->vtable, shader->context)) {
g_set_error (error, GST_GLSL_ERROR, GST_GLSL_ERROR_PROGRAM,
- "Failed to retreive required GLSL functions");
+ "Failed to retrieve required GLSL functions");
GST_OBJECT_UNLOCK (shader);
return FALSE;
}
shader->context->gl_vtable->GetAttribLocation (shader->priv->
program_handle, name);
- GST_TRACE_OBJECT (shader, "retreived program %i attribute \'%s\' location %i",
+ GST_TRACE_OBJECT (shader, "retrieved program %i attribute \'%s\' location %i",
(int) shader->priv->program_handle, name, ret);
return ret;
_init_debug ();
- /* search for #version while allowing for preceeding comments/whitespace as
+ /* search for #version while allowing for preceding comments/whitespace as
* permitted by the GLSL specification */
while (str && str[i] != '\0' && i < 1024) {
if (str[i] == '\n' || str[i] == '\r') {
if ((profile & GST_GLSL_PROFILE_COMPATIBILITY) == 0)
return FALSE;
} else if ((gst_gl_context_get_gl_api (context) & GST_GL_API_OPENGL3) != 0) {
- /* GL_ARB_es2_compatibility is requried for GL3 contexts */
+ /* GL_ARB_es2_compatibility is required for GL3 contexts */
if ((profile & (GST_GLSL_PROFILE_CORE | GST_GLSL_PROFILE_ES)) == 0)
return FALSE;
} else {
return FALSE;
if (gst_gl_context_check_gl_version (context, GST_GL_API_OPENGL3, 1, 0))
- /* GL_ARB_es2_compatibility is requried for GL3 contexts */
+ /* GL_ARB_es2_compatibility is required for GL3 contexts */
if (version < GST_GLSL_VERSION_150 && version != GST_GLSL_VERSION_100)
return FALSE;
/**
* GstGLSLError:
- * @GST_GLSL_ERROR_COMPILE: Compilation error occured
- * @GST_GLSL_ERROR_LINK: Link error occured
- * @GST_GLSL_ERROR_PROGRAM: General program error occured
+ * @GST_GLSL_ERROR_COMPILE: Compilation error occurred
+ * @GST_GLSL_ERROR_LINK: Link error occurred
+ * @GST_GLSL_ERROR_PROGRAM: General program error occurred
*
* Compilation stage that caused an error
*
case GL_FRAGMENT_SHADER:
return "fragment";
case GL_TESS_CONTROL_SHADER:
- return "tesselation control";
+ return "tessellation control";
case GL_TESS_EVALUATION_SHADER:
- return "tesselation evaluation";
+ return "tessellation evaluation";
case GL_GEOMETRY_SHADER:
return "geometry";
case GL_COMPUTE_SHADER:
* @stage: a #GstGLSLStage
* @error: a #GError to use on failure
*
- * Returns: whether the compilation suceeded
+ * Returns: whether the compilation succeeded
*
* Since: 1.8
*/
gboolean direct;
GstVideoInfo out_info;
- /* only used for pointer comparision */
+ /* only used for pointer comparison */
gpointer out_caps;
};
&_dma_buf_upload_free
};
-/* a variant of the DMABuf uploader that relies on HW color convertion instead
+/* a variant of the DMABuf uploader that relies on HW color conversion instead
* of shaders */
static gpointer
/**
* GstGLUploadReturn:
* @GST_GL_UPLOAD_DONE: No further processing required
- * @GST_GL_UPLOAD_ERROR: An unspecified error occured
+ * @GST_GL_UPLOAD_ERROR: An unspecified error occurred
* @GST_GL_UPLOAD_UNSUPPORTED: The configuration is unsupported.
* @GST_GL_UPLOAD_RECONFIGURE: This element requires a reconfiguration.
*/
if (gst_gl_display_found (element, *display_ptr))
goto get_gl_context;
- /* If no neighboor, or application not interested, use system default */
+ /* If no neighbor, or application not interested, use system default */
display = gst_gl_display_new ();
*display_ptr = display;
* Retrieves the stored 4x4 affine transformation matrix stored in @meta in
* NDC coordinates. if @meta is NULL, an identity matrix is returned.
*
- * NDC is a left-handed coordinate sytem
+ * NDC is a left-handed coordinate system
* - x - [-1, 1] - +ve X moves right
* - y - [-1, 1] - +ve Y moves up
* - z - [-1, 1] - +ve Z moves into
gst_gl_shader_use (viewconvert->shader);
- /* FIXME: the auxillary buffer could have a different transform matrix */
+ /* FIXME: the auxiliary buffer could have a different transform matrix */
{
GstVideoAffineTransformationMeta *af_meta;
gfloat matrix[16];
|| out_tex->tex_format == GST_GL_LUMINANCE_ALPHA
|| out_width != width || out_height != height) {
/* Luminance formats are not color renderable */
- /* renderering to a framebuffer only renders the intersection of all
+ /* rendering to a framebuffer only renders the intersection of all
* the attachments i.e. the smallest attachment size */
if (!priv->out_tex[j]) {
GstGLVideoAllocationParams *params;
target = &viewconvert->priv->primary_in;
/* For frame-by-frame mode, we need to collect the 2nd eye into
- * our auxilliary buffer */
+ * our auxiliary buffer */
if (mode == GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
if (!GST_BUFFER_FLAG_IS_SET (input, GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE))
target = &viewconvert->priv->auxilliary_in;
* @callback: (scope async): function to invoke
* @data: (closure): data to invoke @callback with
*
- * Invoke @callback with data on the window thread. @callback is guarenteed to
+ * Invoke @callback with data on the window thread. @callback is guaranteed to
* have executed when this function returns.
*
* Since: 1.4
* @data: (closure): data to invoke @callback with
* @destroy_notify: called when @data is not needed any more
*
- * Sets the draw callback called everytime gst_gl_window_draw() is called
+ * Sets the draw callback called every time gst_gl_window_draw() is called
*
* Since: 1.4
*/
* @data: (closure): data to invoke @callback with
* @destroy_notify: called when @data is not needed any more
*
- * Sets the resize callback called everytime a resize of the window occurs.
+ * Sets the resize callback called every time a resize of the window occurs.
*
* Since: 1.4
*/
* GST_GL_WINDOW_CB:
* @f: the function to cast
*
- * Cast to the currect function type for generic window callbacks
+ * Cast to the current function type for generic window callbacks
*/
#define GST_GL_WINDOW_CB(f) ((GstGLWindowCB) (f))
* GST_GL_WINDOW_RESIZE_CB:
* @f: the function to cast
*
- * Cast to the currect function type for window resize callbacks
+ * Cast to the current function type for window resize callbacks
*/
#define GST_GL_WINDOW_RESIZE_CB(f) ((GstGLWindowResizeCB) (f))
window_egl->window_height);
GST_DEBUG
- ("Opened Vivante FB display succesfully, resolution is (%dx%d), display %p, window %p.",
+ ("Opened Vivante FB display successfully, resolution is (%dx%d), display %p, window %p.",
window_egl->window_width, window_egl->window_height, (gpointer) display,
(gpointer) window_egl->win_id);
}
default:
{
- /* transmit messages to the parrent (ex: mouse/keyboard input) */
+ /* transmit messages to the parent (ex: mouse/keyboard input) */
HWND parent_id = window_win32->parent_win_id;
if (parent_id)
PostMessage (parent_id, uMsg, wParam, lParam);
ret->xcb_connection = XGetXCBConnection (ret->display);
if (!ret->xcb_connection) {
- GST_ERROR ("Failed to open retieve XCB connection from X11 Display");
+ GST_ERROR ("Failed to retrieve XCB connection from X11 Display");
gst_object_unref (ret);
return NULL;
}
ret->xcb_connection = XGetXCBConnection (ret->display);
if (!ret->xcb_connection) {
- GST_ERROR ("Failed to open retieve XCB connection from X11 Display");
+ GST_ERROR ("Failed to retrieve XCB connection from X11 Display");
gst_object_unref (ret);
return NULL;
}
}
static const GstH265FormatRangeExtensionProfile h265_ext_profiles[] = {
- /* FIXME 2.0: Consider ':' seperated subsampling notation for consistency
+ /* FIXME 2.0: Consider ':' separated subsampling notation for consistency
* https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/merge_requests/23
*/
/* *INDENT-OFF* */
guint extra_constraints = 0;
FormatRangeExtensionProfileMatch *m;
- /* Filter out all the profiles having constraints not satisified by
+ /* Filter out all the profiles having constraints not satisfied by
* @ext_profile.
- * Then pick the one having the least extra contraints. This allow us
+ * Then pick the one having the least extra constraints. This allow us
* to match the closet profile if bitstream contains not standard
* constraints. */
if (p.max_14bit_constraint_flag != ext_profile.max_14bit_constraint_flag) {
* gst_encoding_profile_get_enabled:
* @profile: a #GstEncodingProfile
*
- * Returns: Whther @profile is enabled or not
+ * Returns: Whether @profile is enabled or not
*
* Since: 1.6
*/
const gchar *preset);
-/* Invidual stream encodingprofile API */
+/* Individual stream encodingprofile API */
GST_PBUTILS_API
GstEncodingVideoProfile * gst_encoding_video_profile_new (GstCaps *format,
* different names, for example one for transcoding in full HD, another one for
* low res, etc.. which are defined in the same encoding target.
*
- * Basically if you wan to encode a stream to send it to, say, youtube you should
+ * Basically if you want to encode a stream to send it to, say, youtube you should
* have a Youtube encoding target defined in the "online-service" category.
*
* ## Encoding target serialization format
/**
* gst_encoding_target_load_from_file:
* @filepath: (type filename): The file location to load the #GstEncodingTarget from
- * @error: If an error occured, this field will be filled in.
+ * @error: If an error occurred, this field will be filled in.
*
* Opens the provided file and returns the contained #GstEncodingTarget.
*
* valid for target names).
* @category: (allow-none): the name of the target category, like
* #GST_ENCODING_CATEGORY_DEVICE. Can be %NULL
- * @error: If an error occured, this field will be filled in.
+ * @error: If an error occurred, this field will be filled in.
*
* Searches for the #GstEncodingTarget with the given name, loads it
* and returns it.
* gst_encoding_target_save_to_file:
* @target: a #GstEncodingTarget
* @filepath: (type filename): the location to store the @target at.
- * @error: If an error occured, this field will be filled in.
+ * @error: If an error occurred, this field will be filled in.
*
* Saves the @target to the provided file location.
*
/**
* gst_encoding_target_save:
* @target: a #GstEncodingTarget
- * @error: If an error occured, this field will be filled in.
+ * @error: If an error occurred, this field will be filled in.
*
* Saves the @target to a default user-local directory.
*
break;
}
case GST_EVENT_RECONFIGURE:
- /* dont't forward */
+ /* don't forward */
gst_event_unref (event);
res = TRUE;
break;
case GST_EVENT_SEGMENT:
{
/* the newsegment values are used to clip the input samples
- * and to convert the incomming timestamps to running time so
+ * and to convert the incoming timestamps to running time so
* we can do QoS */
gst_event_copy_segment (event, &scope->priv->segment);
* Get the installer details for missing elements
*
* Returns: (transfer none) (array zero-terminated=1): An array of strings
- * containing informations about how to install the various missing elements
+ * containing information about how to install the various missing elements
* for @info to be usable. If you wish to use the strings after the life-time
* of @info, you will need to copy them.
*
}
/* If a parent is non-NULL, collected stream information will be appended to it
- * (and where the information exists, it will be overriden)
+ * (and where the information exists, it will be overridden)
*/
static GstDiscovererStreamInfo *
parse_stream_topology (GstDiscoverer * dc, const GstStructure * topology,
if (nval == NULL) {
/* FIXME : aggregate with information from main streams */
- GST_DEBUG ("Coudn't find 'next' ! might be the last entry");
+ GST_DEBUG ("Couldn't find 'next' ! might be the last entry");
} else {
GstPad *srcpad;
* line arguments passed by GStreamer to the helper application into
* arguments that are understood by the real installer.
*
- * The helper application path defined at compile time can be overriden at
+ * The helper application path defined at compile time can be overridden at
* runtime by setting the GST_INSTALL_PLUGINS_HELPER environment
* variable. This can be useful for testing/debugging purposes.
*
* - 1 if no appropriate installation candidate for any of the requested
* plugins could be found. Only return this if nothing has been
* installed (#GST_INSTALL_PLUGINS_NOT_FOUND)
- * - 2 if an error occured during the installation. The application will
+ * - 2 if an error occurred during the installation. The application will
* assume that the user will already have seen an error message by the
* installer in this case and will usually not show another one
* (#GST_INSTALL_PLUGINS_ERROR)
* any of the requested plugins could be found. Only return this if nothing
* has been installed. Return #GST_INSTALL_PLUGINS_PARTIAL_SUCCESS if
* some (but not all) of the requested plugins could be installed.
- * @GST_INSTALL_PLUGINS_ERROR: an error occured during the installation. If
+ * @GST_INSTALL_PLUGINS_ERROR: an error occurred during the installation. If
* this happens, the user has already seen an error message and another
* one should not be displayed
* @GST_INSTALL_PLUGINS_CRASHED: the installer had an unclean exit code
* indicate that everything went fine so far and the provided callback
* will be called with the result of the installation later
* @GST_INSTALL_PLUGINS_INTERNAL_FAILURE: some internal failure has
- * occured when trying to start the installer
+ * occurred when trying to start the installer
* @GST_INSTALL_PLUGINS_HELPER_MISSING: the helper script to call the
* actual installer is not installed
* @GST_INSTALL_PLUGINS_INSTALL_IN_PROGRESS: a previously-started plugin
*
* 0x01 On: One Shot Off: Loop
* 0x02 On: Root note is Set Off: No root
- * 0x04 On: Stretch is On, Off: Strech is OFF
+ * 0x04 On: Stretch is On, Off: Stretch is OFF
* 0x08 On: Disk Based Off: Ram based
* 0x10 On: ?????????? Off: ????????? (Acidizer puts that ON)
*/
* data that is within the range of strf.size, but excluding any
* additional data withint this chunk but outside strf.size.
* @strf_data: a #GstBuffer containing the additional data in the strf
- * chunk outside reach of strf.size. Ususally a palette.
+ * chunk outside reach of strf.size. Usually a palette.
* @strd_data: a #GstBuffer containing the data in the strd stream header
* chunk. Usually codec initialization data.
* @codec_name: if given, will be filled with a human-readable codec name.
It is now possible to use all the gst_rtp_buffer_get_*() or
gst_rtp_buffer_set_*() functions to read or write the different parts of the
RTP header such as the payload type, the sequence number or the RTP
- timestamp. The use can also retreive a pointer to the actual RTP payload data
+ timestamp. The use can also retrieve a pointer to the actual RTP payload data
using the gst_rtp_buffer_get_payload() function.
RTP Base Payloader Class (GstBaseRTPPayload)
if (data_len < header_len)
goto wrong_length;
- /* move to next compount packet */
+ /* move to next compound packet */
data += header_len;
data_len -= header_len;
maxsize = rtcp->map.maxsize;
/* packet->offset is now pointing to the next free offset in the buffer to
- * start a compount packet. Next we figure out if we have enough free space in
+ * start a compound packet. Next we figure out if we have enough free space in
* the buffer to continue. */
len = rtcp_packet_min_length (type);
if (len == -1)
dts = GST_BUFFER_DTS (*buffer);
duration = GST_BUFFER_DURATION (*buffer);
- /* apply last incomming timestamp and duration to outgoing buffer if
+ /* apply last incoming timestamp and duration to outgoing buffer if
* not otherwise set. */
if (!GST_CLOCK_TIME_IS_VALID (pts))
GST_BUFFER_PTS (*buffer) = priv->pts;
* Push @out_buf to the peer of @filter. This function takes ownership of
* @out_buf.
*
- * This function will by default apply the last incomming timestamp on
+ * This function will by default apply the last incoming timestamp on
* the outgoing buffer when it didn't have a timestamp already.
*
* Returns: a #GstFlowReturn.
* RTP header. If there is already a RFC 5285 header extension with a one byte
* header, the new extension will be appended.
* It will not work if there is already a header extension that does not follow
- * the mecanism described in RFC 5285 or if there is a header extension with
+ * the mechanism described in RFC 5285 or if there is a header extension with
* a two bytes header as described in RFC 5285. In that case, use
* gst_rtp_buffer_add_extension_twobytes_header()
*
* RTP header. If there is already a RFC 5285 header extension with a two bytes
* header, the new extension will be appended.
* It will not work if there is already a header extension that does not follow
- * the mecanism described in RFC 5285 or if there is a header extension with
+ * the mechanism described in RFC 5285 or if there is a header extension with
* a one byte header as described in RFC 5285. In that case, use
* gst_rtp_buffer_add_extension_onebyte_header()
*
* mostly used to get the default clock-rate and bandwidth for dynamic payload
* types specified with @media and @encoding name.
*
- * The search for @encoding_name will be performed in a case insensitve way.
+ * The search for @encoding_name will be performed in a case insensitive way.
*
* Returns: a #GstRTPPayloadInfo or NULL when no info could be found.
*/
* GST_RTSP_OK when a complete message was read.
* GST_RTSP_EEOF: when the read socket is closed
* GST_RTSP_EINTR: when more data is needed.
- * GST_RTSP_..: some other error occured.
+ * GST_RTSP_..: some other error occurred.
*/
static GstRTSPResult
build_next (GstRTSPBuilder * builder, GstRTSPMessage * message,
* @param: authentication directive
* @value: value
*
- * Setup @conn with authentication directives. This is not necesary for
+ * Setup @conn with authentication directives. This is not necessary for
* methods #GST_RTSP_AUTH_NONE and #GST_RTSP_AUTH_BASIC. For
* #GST_RTSP_AUTH_DIGEST, directives should be taken from the digest challenge
* in the WWW-Authenticate response header and can include realm, domain,
* @message_received: callback when a message was received
* @message_sent: callback when a message was sent
* @closed: callback when the connection is closed
- * @error: callback when an error occured
+ * @error: callback when an error occurred
* @tunnel_start: a client started a tunneled connection. The tunnelid of the
* connection must be saved.
* @tunnel_complete: a client finished a tunneled connection. In this callback
* you usually pair the tunnelid of this connection with the saved one using
* gst_rtsp_connection_do_tunnel().
- * @error_full: callback when an error occured with more information than
+ * @error_full: callback when an error occurred with more information than
* the @error callback.
* @tunnel_lost: callback when the post connection of a tunnel is closed.
* @tunnel_http_response: callback when an HTTP response to the GET request
/**
* GstRTSPResult:
* @GST_RTSP_OK: no error
- * @GST_RTSP_ERROR: some unspecified error occured
+ * @GST_RTSP_ERROR: some unspecified error occurred
* @GST_RTSP_EINVAL: invalid arguments were provided to a function
* @GST_RTSP_EINTR: an operation was canceled
* @GST_RTSP_ENOMEM: no memory was available for the operation
- * @GST_RTSP_ERESOLV: a host resolve error occured
+ * @GST_RTSP_ERESOLV: a host resolve error occurred
* @GST_RTSP_ENOTIMPL: function not implemented
- * @GST_RTSP_ESYS: a system error occured, errno contains more details
- * @GST_RTSP_EPARSE: a parsing error occured
+ * @GST_RTSP_ESYS: a system error occurred, errno contains more details
+ * @GST_RTSP_EPARSE: a parsing error occurred
* @GST_RTSP_EWSASTART: windows networking could not start
* @GST_RTSP_EWSAVERSION: windows networking stack has wrong version
* @GST_RTSP_EEOF: end-of-file was reached
- * @GST_RTSP_ENET: a network problem occured, h_errno contains more details
+ * @GST_RTSP_ENET: a network problem occurred, h_errno contains more details
* @GST_RTSP_ENOTIP: the host is not an IP host
- * @GST_RTSP_ETIMEOUT: a timeout occured
+ * @GST_RTSP_ETIMEOUT: a timeout occurred
* @GST_RTSP_ETGET: the tunnel GET request has been performed
* @GST_RTSP_ETPOST: the tunnel POST request has been performed
* @GST_RTSP_ELAST: last error
GST_RTSP_HDR_PROXY_REQUIRE, /* Proxy-Require R req. all */
GST_RTSP_HDR_PUBLIC, /* Public r opt. all */
GST_RTSP_HDR_RANGE, /* Range Rr opt. PLAY, PAUSE, RECORD */
- GST_RTSP_HDR_REFERER, /* Referer R opt. all */
+ GST_RTSP_HDR_REFERER, /* Referrer R opt. all */
GST_RTSP_HDR_REQUIRE, /* Require R req. all */
GST_RTSP_HDR_RETRY_AFTER, /* Retry-After r opt. all */
GST_RTSP_HDR_RTP_INFO, /* RTP-Info r req. PLAY */
GST_RTSP_API
GstRTSPResult gst_rtsp_extension_receive_request (GstRTSPExtension *ext, GstRTSPMessage *req);
-/* signal emision */
+/* signal emission */
GST_RTSP_API
GstRTSPResult gst_rtsp_extension_send (GstRTSPExtension *ext, GstRTSPMessage *req,
continue;
}
- /* Basic Authorization request has only an unformated blurb following, all
+ /* Basic Authorization request has only an unformatted blurb following, all
* other variants have comma-separated name=value pairs */
if (end[0] != '\0' && field == GST_RTSP_HDR_AUTHORIZATION
&& auth_credential->scheme == GST_RTSP_AUTH_BASIC) {
* @type: a #GstMIKEYPayloadType
* @nth: payload to find
*
- * Find the @nth occurence of the payload with @type in @msg.
+ * Find the @nth occurrence of the payload with @type in @msg.
*
* Returns: the @nth #GstMIKEYPayload of @type.
*
* @GST_MIKEY_PT_ID: ID payload
* @GST_MIKEY_PT_CERT: Certificate Payload
* @GST_MIKEY_PT_CHASH: Cert hash payload
- * @GST_MIKEY_PT_V: Verfication message payload
+ * @GST_MIKEY_PT_V: Verification message payload
* @GST_MIKEY_PT_SP: Security Policy payload
* @GST_MIKEY_PT_RAND: RAND payload
* @GST_MIKEY_PT_ERR: Error payload
* @len: specifies the length of @val
* @val: specifies the value of the parameter
*
- * A Type/Length/Value field for security paramaters
+ * A Type/Length/Value field for security parameters
*/
typedef struct {
guint8 type;
* The tag entry is the tag id, the tag type,
* the count and the offset.
*
- * The offset is the on the amount of data writen so far, as one
+ * The offset is the on the amount of data written so far, as one
* can't predict the total bytes that the tag entries will take.
* This means those fields requires being updated later.
*/
* @merge_tags: merge start and end tags. Subclasses may want to override this
* vfunc to allow prioritising of start or end tag according to user
* preference. Note that both start_tags and end_tags may be NULL. By default
- * start tags are prefered over end tags.
+ * start tags are preferred over end tags.
*
* The #GstTagDemuxClass structure. See documentation at beginning of section
* for details about what subclasses need to override and do.
}
-/* look at this page for addtional schemas
+/* look at this page for additional schemas
* http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/XMP.html
*/
static gpointer
/*
* Stores extra namespaces for array tags
- * The namespaces should be writen in the form:
+ * The namespaces should be written in the form:
*
* xmlns:XpTo="http://some.org/your/ns/name/ (next ones)"
*/
before a formal standard is set. The formal standard will use another version
or revision number if not identical to what is described in this document. The
contents in this document may change for clarifications but never for added or
-altered functionallity.
+altered functionality.
Distribution of this document is unlimited.
1.2. Abstract
j - Encryption
- This flag indicates wether or not the frame is enrypted. If set one byte
+ This flag indicates whether or not the frame is encrypted. If set one byte
indicating with which method it was encrypted will be appended to the
frame header. See section_4.26. for more information about encryption
method registration.
Some flags indicates that the frame header is extended with additional
information. This information will be added to the frame header in the same
order as the flags indicating the additions. I.e. the four bytes of
-decompressed size will preceed the encryption method byte. These additions to
+decompressed size will precede the encryption method byte. These additions to
the frame header, while not included in the frame header size but are included
in the 'frame size' field, are not subject to encryption or compression.
TCOM
The 'Composer(s)' frame is intended for the name of the composer(s). They
- are seperated with the "/" character.
+ are separated with the "/" character.
TEXT
The 'Lyricist(s)/Text writer(s)' frame is intended for the writer(s) of
- the text or lyrics in the recording. They are seperated with the "/
+ the text or lyrics in the recording. They are separated with the "/
" character.
The 'Original lyricist(s)/text writer(s)' frame is intended for the text
writer(s) of the original recording, if for example the music in the file
should be a cover of a previously released song. The text writers are
- seperated with the "/" character.
+ separated with the "/" character.
The 'Original artist(s)/performer(s)' frame is intended for the performer
(s) of the original recording, if for example the music in the file
should be a cover of a previously released song. The performers are
- seperated with the "/" character.
+ separated with the "/" character.
TPE1
The 'Lead artist(s)/Lead performer(s)/Soloist(s)/Performing group' is
- used for the main artist(s). They are seperated with the "/" character.
+ used for the main artist(s). They are separated with the "/" character.
The 'Track number/Position in set' frame is a numeric string containing
the order number of the audio-file on its original recording. This may be
extended with a "/" character and a numeric string containing the total
- numer of tracks/elements on the original recording. E.g. "4/9".
+ number of tracks/elements on the original recording. E.g. "4/9".
4.11. Comments
-This frame is indended for any kind of full text information that does not fit
+This frame is intended for any kind of full text information that does not fit
in any other frame. It consists of a frame header followed by encoding,
language and content descriptors and is ended with the actual comment as a text
string. Newline characters are allowed in the comment text string. There may be
$03 Cover (front)
$04 Cover (back)
$05 Leaflet page
- $06 Media (e.g. lable side of CD)
+ $06 Media (e.g. label side of CD)
$07 Lead artist/lead performer/soloist
$08 Artist/performer
$09 Conductor
information from another ID3v2 tag that might reside in another audio file or
alone in a binary file. It is recommended that this method is only used when
the files are stored on a CD-ROM or other circumstances when the risk of file
-seperation is low. The frame contains a frame identifier, which is the frame
+separation is low. The frame contains a frame identifier, which is the frame
that should be linked into this tag, a URL field, where a reference to the file
where the frame is given, and additional ID data, if needed. Data should be
retrieved from the first tag found in the file to which this link points. There
conjunction with this one. The frame begins, after the frame ID, size and
encoding fields, with a 'price payed' field. The first three characters of this
field contains the currency used for the transaction, encoded according to ISO-
-4217 alphabetic currency code. Concatenated to this is the actual price payed,
+4217 alphabetic currency code. Concatenated to this is the actual price paid,
as a numerical string using "." as the decimal separator. Next is an 8
character date string (YYYYMMDD) followed by a string with the name of the
seller as the last field in the frame. There may only be one "OWNE" frame in a
<Header for 'Ownership frame', ID: "OWNE">
Text encoding $xx
- Price payed <text string> $00
+ Price paid <text string> $00
Date of purch. <text string>
Seller <text string according to encoding>
frame begins, after the frame ID, size and encoding fields, with a price string
field. A price is constructed by one three character currency code, encoded
according to ISO-4217 alphabetic currency code, followed by a numerical value
-where "." is used as decimal seperator. In the price string several prices may
-be concatenated, seperated by a "/" character, but there may only be one
+where "." is used as decimal separator. In the price string several prices may
+be concatenated, separated by a "/" character, but there may only be one
currency of each type.
The price string is followed by an 8 character date string in the format
YYYYMMDD, describing for how long the price is valid. After that is a contact
64. Native American
65. Cabaret
66. New Wave
- 67. Psychadelic
+ 67. Psychedelic
68. Rave
69. Showtunes
70. Trailer
*
* The "terminological" code is derived from the local name of the language
* (e.g. "deu" for German instead of "ger"). In most scenarios, the
- * "terminological" codes are prefered over the "bibliographic" ones.
+ * "terminological" codes are preferred over the "bibliographic" ones.
*
* Language codes are case-sensitive and expected to be lower case.
*
*
* The "bibliographic" code is derived from the English name of the language
* (e.g. "ger" for German instead of "de" or "deu"). In most scenarios, the
- * "terminological" codes are prefered.
+ * "terminological" codes are preferred.
*
* Language codes are case-sensitive and expected to be lower case.
*
Specifics:
- * Use a GInstancePrivate for extensability.
+ * Use a GInstancePrivate for extensibility.
* Try to move more common video objects to video.[ch]
*
* The width, height and pixel-aspect-ratio can also be specified in the output caps.
*
- * @callback will be called after conversion, when an error occured or if conversion didn't
+ * @callback will be called after conversion, when an error occurred or if conversion didn't
* finish after @timeout. @callback will always be called from the thread default
* %GMainContext, see g_main_context_get_thread_default(). If GLib before 2.22 is used,
* this will always be the global default main context.
/* If change are not acceptable, fallback to generic pool */
if (!gst_buffer_pool_config_validate_params (config, outcaps, size, min,
max)) {
- GST_DEBUG_OBJECT (decoder, "unsuported pool, making new pool");
+ GST_DEBUG_OBJECT (decoder, "unsupported pool, making new pool");
gst_object_unref (pool);
pool = gst_video_buffer_pool_new ();
gboolean tags_changed;
GstClockTime min_pts;
- /* adjustment needed on pts, dts, segment start and stop to accomodate
+ /* adjustment needed on pts, dts, segment start and stop to accommodate
* min_pts */
GstClockTime time_adjustment;
* Request minimal value for PTS passed to handle_frame.
*
* For streams with reordered frames this can be used to ensure that there
- * is enough time to accomodate first DTS, which may be less than first PTS
+ * is enough time to accommodate first DTS, which may be less than first PTS
*
* Since: 1.6
*/
videosink->width = 0;
videosink->height = 0;
- /* 20ms is more than enough, 80-130ms is noticable */
+ /* 20ms is more than enough, 80-130ms is noticeable */
gst_base_sink_set_processing_deadline (GST_BASE_SINK (videosink),
15 * GST_MSECOND);
gst_base_sink_set_max_lateness (GST_BASE_SINK (videosink), 5 * GST_MSECOND);
* GstVideoVBIParserResult:
* @GST_VIDEO_VBI_PARSER_RESULT_DONE: No line were provided, or no more Ancillary data was found.
* @GST_VIDEO_VBI_PARSER_RESULT_OK: A #GstVideoAncillary was found.
- * @GST_VIDEO_VBI_PARSER_RESULT_ERROR: An error occured
+ * @GST_VIDEO_VBI_PARSER_RESULT_ERROR: An error occurred
*
* Return values for #GstVideoVBIParser
*
GST_LOG ("blend src %dx%d onto dest %dx%d @ %d,%d", src_width, src_height,
dest_width, dest_height, x, y);
- /* In case overlay is completely outside the video, dont render */
+ /* In case overlay is completely outside the video, don't render */
if (x + src_width <= 0 || y + src_height <= 0
|| x >= dest_width || y >= dest_height) {
goto nothing_to_do;
/* conversion between GStreamer color{matrix,transfer,primaries} enum and
* values defined by ISO/IEC 23001-8 and ITU-T H.273 specification.
- * Also H264 and H265 specifications follow the color{matrix,trasfer,primaries}
+ * Also H264 and H265 specifications follow the color{matrix,transfer,primaries}
* values */
GST_VIDEO_API
* @convert: a #GstVideoConverter
* @config: (transfer full): a #GstStructure
*
- * Set @config as extra configuraion for @convert.
+ * Set @config as extra configuration for @convert.
*
* If the parameters in @config can not be set exactly, this function returns
* %FALSE and will try to update as much state as possible. The new state can
* algorithm described by @method.
*
* Each component will be quantized to a multiple of @quantizer. Better
- * performance is achived when @quantizer is a power of 2.
+ * performance is achieved when @quantizer is a power of 2.
*
* @width is the width of the lines that this ditherer will handle.
*
* code is a reduce version of:
* 0: - Read first UV word (UVU)
* Unpack U and V
- * 1: - Resued U/V from 1 (sub-sampling)
+ * 1: - Reused U/V from 1 (sub-sampling)
* 2: - Unpack remaining U value
* - Read following UV word (VUV)
* - Unpack V value
* code is a reduce version of:
* 0: - Read first UV word (UVU)
* Unpack U and V
- * 1: - Resued U/V from 1 (sub-sampling)
+ * 1: - Reused U/V from 1 (sub-sampling)
* 2: - Unpack remaining U value
* - Read following UV word (VUV)
* - Unpack V value
* @GST_VIDEO_PACK_FLAG_NONE: No flag
* @GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE: When the source has a smaller depth
* than the target format, set the least significant bits of the target
- * to 0. This is likely sightly faster but less accurate. When this flag
+ * to 0. This is likely slightly faster but less accurate. When this flag
* is not specified, the most significant bits of the source are duplicated
* in the least significant bits of the destination.
* @GST_VIDEO_PACK_FLAG_INTERLACED: The source is interlaced. The unpacked
* @buffer: the mapped buffer
* @meta: pointer to metadata if any
* @id: id of the mapped frame. the id can for example be used to
- * indentify the frame in case of multiview video.
+ * identify the frame in case of multiview video.
* @data: pointers to the plane data
* @map: mappings of the planes
*
* such as left or right eye view. This flags is set on
* any buffer that contains non-mono content - even for
* streams that contain only a single viewpoint. In mixed
- * mono / non-mono streams, the absense of the flag marks
+ * mono / non-mono streams, the absence of the flag marks
* mono buffers.
* @GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE: When conveying stereo/multiview content with
* frame-by-frame methods, this flag marks the first buffer
* gst_video_mastering_display_info_is_valid:
* @minfo: a #GstVideoMasteringDisplayInfo
*
- * Checks the minumum validity of @mininfo (not theoretical validation).
+ * Checks the minimum validity of @mininfo (not theoretical validation).
*
* Each x and y chromaticity coordinate should be in the range of [0, 1]
* min_luma should be less than max_luma.
for (i = 0; i < n_planes; i++) {
gint hedge;
- /* this is the amout of pixels to add as left padding */
+ /* this is the amount of pixels to add as left padding */
hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, i, align->padding_left);
hedge *= GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, i);
* the per rectangle sequence number, which is misleading for renderers/
* consumers, that handle global-alpha themselves. For them the
* pixel-data returned by gst_video_overlay_rectangle_get_pixels_*()
- * wont be different for different global-alpha values. In this case a
+ * won't be different for different global-alpha values. In this case a
* renderer could also use the GstBuffer pointers as a hint for changed
* pixel-data.
*
/**
* gst_video_scaler_2d:
- * @hscale: a horzontal #GstVideoScaler
+ * @hscale: a horizontal #GstVideoScaler
* @vscale: a vertical #GstVideoScaler
* @format: a #GstVideoFormat for @srcs and @dest
* @src: source pixels
g_value_init (&string, G_TYPE_STRING);
g_value_transform (value, &string);
- g_critical ("Badly formated rectangle, must contains four gint (got '%s')",
+ g_critical ("Badly formatted rectangle, must contains four gint (got '%s')",
g_value_get_string (&string));
g_value_unset (&string);
*
* We don't do synchronized mixing so this really depends on where the
* streams where punched in and what their relative offsets are against
- * eachother which we can get from the first timestamps we see.
+ * each other which we can get from the first timestamps we see.
*
* When we add a new stream (or remove a stream) the duration might
* also become invalid again and we need to post a new DURATION
{
GstChildProxyInterface *iface = g_iface;
- GST_INFO ("intializing child proxy interface");
+ GST_INFO ("initializing child proxy interface");
iface->get_child_by_index = gst_adder_child_proxy_get_child_by_index;
iface->get_children_count = gst_adder_child_proxy_get_children_count;
}
{
GstChildProxyInterface *iface = g_iface;
- GST_INFO ("intializing child proxy interface");
+ GST_INFO ("initializing child proxy interface");
iface->get_child_by_index =
gst_audio_interleave_child_proxy_get_child_by_index;
iface->get_children_count =
{
GstChildProxyInterface *iface = g_iface;
- GST_INFO ("intializing child proxy interface");
+ GST_INFO ("initializing child proxy interface");
iface->get_child_by_index = gst_audiomixer_child_proxy_get_child_by_index;
iface->get_children_count = gst_audiomixer_child_proxy_get_children_count;
}
/* transform single caps into input_caps + input_caps with the rate
* field set to our supported range. This ensures that upstream knows
- * about downstream's prefered rate(s) and can negotiate accordingly. */
+ * about downstream's preferred rate(s) and can negotiate accordingly. */
res = gst_caps_new_empty ();
n = gst_caps_get_size (caps);
for (i = 0; i < n; i++) {
/* Gaussian white noise using Box-Muller algorithm. unit variance
* normally-distributed random numbers are generated in pairs as the real
- * and imaginary parts of a compex random variable with
+ * and imaginary parts of a complex random variable with
* uniformly-distributed argument and \chi^{2}-distributed modulus.
*/
* compositor name=comp ! videoconvert ! ximagesink \
* videotestsrc ! \
* video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! comp.
- * ]| A pipeline to demostrate bgra comping. (This does not demonstrate alpha blending).
+ * ]| A pipeline to demonstrate bgra comping. (This does not demonstrate alpha blending).
* |[
* gst-launch-1.0 videotestsrc pattern=1 ! \
* video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
gst_element_class_set_static_metadata (gstelement_klass,
"streamcombiner", "Generic",
- "Recombines streams splitted by the streamsplitter element",
+ "Recombines streams split by the streamsplitter element",
"Edward Hervey <edward.hervey@collabora.co.uk>");
}
err->message);
g_clear_error (&err);
} else {
- GST_ERROR_OBJECT (src, "Seeking to the old position faile");
+ GST_ERROR_OBJECT (src, "Seeking to the old position failed");
}
return FALSE;
}
}
if (upstream_has_meta || caps_has_meta) {
- /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
+ /* Send caps immediately, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (self->srcpad, overlay_caps);
gboolean shutdown; /* if we are shutting down */
GList *blocked_pads; /* pads that have set to block */
- gboolean expose_allstreams; /* Whether to expose unknow type streams or not */
+ gboolean expose_allstreams; /* Whether to expose unknown type streams or not */
GList *filtered; /* elements for which error messages are filtered */
GList *filtered_errors; /* filtered error messages */
} else {
GST_WARNING_OBJECT (dbin,
"The connection speed property %" G_GUINT64_FORMAT " of type %s"
- " is not usefull not setting it", speed,
+ " is not useful not setting it", speed,
g_type_name (G_PARAM_SPEC_TYPE (pspec)));
wrong_type = TRUE;
}
if (!dbin->use_buffering)
return;
- GST_DEBUG_OBJECT (dbin, "Reseting multiqueues buffering");
+ GST_DEBUG_OBJECT (dbin, "Resetting multiqueues buffering");
if (dbin->decode_chain) {
CHAIN_MUTEX_LOCK (dbin->decode_chain);
gst_decode_chain_reset_buffering (dbin->decode_chain);
* and only creates new elements when streams change and an existing decoder
* is not capable of handling the new format.
*
- * * supports multiple input pads for the parallel decoding of auxilliary streams
+ * * supports multiple input pads for the parallel decoding of auxiliary streams
* not muxed with the primary stream.
*
* * does not handle network stream buffering. decodebin3 expects that network stream
/* counter for input */
guint32 input_counter;
/* Current stream group_id (default : GST_GROUP_ID_INVALID) */
- /* FIXME : Needs to be resetted appropriately (when upstream changes ?) */
+ /* FIXME : Needs to be reset appropriately (when upstream changes ?) */
guint32 current_group_id;
/* End of variables protected by input_lock */
* GstDecodebin3::about-to-finish:
*
* This signal is emitted when the data for the selected URI is
- * entirely buffered and it is safe to specify anothe URI.
+ * entirely buffered and it is safe to specify another URI.
*/
gst_decodebin3_signals[SIGNAL_ABOUT_TO_FINISH] =
g_signal_new ("about-to-finish", G_TYPE_FROM_CLASS (klass),
gboolean shutdown; /* if we are shutting down */
GList *blocked_pads; /* pads that have set to block */
- gboolean expose_allstreams; /* Whether to expose unknow type streams or not */
+ gboolean expose_allstreams; /* Whether to expose unknown type streams or not */
GList *filtered; /* elements for which error messages are filtered */
GList *filtered_errors; /* filtered error messages */
} else {
GST_WARNING_OBJECT (parsebin,
"The connection speed property %" G_GUINT64_FORMAT " of type %s"
- " is not usefull not setting it", speed,
+ " is not useful not setting it", speed,
g_type_name (G_PARAM_SPEC_TYPE (pspec)));
wrong_type = TRUE;
}
* Playback can be initiated by setting the element to PLAYING state using
* gst_element_set_state(). Note that the state change will take place in
* the background in a separate thread, when the function returns playback
- * is probably not happening yet and any errors might not have occured yet.
+ * is probably not happening yet and any errors might not have occurred yet.
* Applications using playbin should ideally be written to deal with things
* completely asynchroneous.
*
* When playback has finished (an EOS message has been received on the bus)
- * or an error has occured (an ERROR message has been received on the bus) or
+ * or an error has occurred (an ERROR message has been received on the bus) or
* the user wants to play a different track, playbin should be set back to
* READY or NULL state, then the #GstPlayBin:uri property should be set to the
* new location and then playbin be set to PLAYING state again.
/* also do missed state change down to READY */
if (do_save)
save_current_group (playbin);
- /* Deactive the groups, set the uridecodebins to NULL
+ /* Deactivate the groups, set the uridecodebins to NULL
* and unref them.
*/
for (i = 0; i < 2; i++) {
* * automatic file type recognition and based on that automatic
* selection and usage of the right audio/video/subtitle demuxers/decoders
*
- * * auxilliary files - such as external subtitles and audio tracks
+ * * auxiliary files - such as external subtitles and audio tracks
* * visualisations for audio files
* * subtitle support for video files. Subtitles can be store in external
* files.
* Playback can be initiated by setting the element to PLAYING state using
* gst_element_set_state(). Note that the state change will take place in
* the background in a separate thread, when the function returns playback
- * is probably not happening yet and any errors might not have occured yet.
+ * is probably not happening yet and any errors might not have occurred yet.
* Applications using playbin3 should ideally be written to deal with things
* completely asynchroneous.
*
* When playback has finished (an EOS message has been received on the bus)
- * or an error has occured (an ERROR message has been received on the bus) or
+ * or an error has occurred (an ERROR message has been received on the bus) or
* the user wants to play a different track, playbin3 should be set back to
* READY or NULL state, then the #GstPlayBin3:uri property should be set to the
* new location and then playbin3 be set to PLAYING state again.
/* also do missed state change down to READY */
if (do_save)
save_current_group (playbin);
- /* Deactive the groups, set uridecodebin to NULL and unref it */
+ /* Deactivate the groups, set uridecodebin to NULL and unref it */
for (i = 0; i < 2; i++) {
if (playbin->groups[i].active && playbin->groups[i].valid) {
deactivate_group (playbin, &playbin->groups[i]);
/* try to change the state of an element. This function returns the element when
* the state change could be performed. When this function returns NULL an error
- * occured and the element is unreffed if @unref is TRUE. */
+ * occurred and the element is unreffed if @unref is TRUE. */
static GstElement *
try_element (GstPlaySink * playsink, GstElement * element, gboolean unref)
{
return FALSE;
}
} else {
- /* Set src ghostpad target in the harware accelerated case */
+ /* Set src ghostpad target in the hardware accelerated case */
src = gst_element_get_static_pad (renderer, "src");
if (G_UNLIKELY (!src)) {
gboolean async_pending; /* async-start has been emitted */
- gboolean expose_allstreams; /* Whether to expose unknow type streams or not */
+ gboolean expose_allstreams; /* Whether to expose unknown type streams or not */
guint64 ring_buffer_max_size; /* 0 means disabled */
};
} else {
GST_WARNING_OBJECT (decoder,
"The connection speed property %" G_GUINT64_FORMAT
- " of type %s is not usefull not setting it", speed,
+ " of type %s is not useful not setting it", speed,
g_type_name (G_PARAM_SPEC_TYPE (pspec)));
wrong_type = TRUE;
}
* @is_dynamic: TRUE if the element will create (more) pads dynamically later
* on.
*
- * Returns: FALSE if a fatal error occured while scanning.
+ * Returns: FALSE if a fatal error occurred while scanning.
*/
static gboolean
analyse_source (GstURIDecodeBin * decoder, gboolean * is_raw,
switch (gst_iterator_next (pads_iter, &item)) {
case GST_ITERATOR_ERROR:
res = FALSE;
- /* FALLTROUGH */
+ /* FALLTHROUGH */
case GST_ITERATOR_DONE:
done = TRUE;
break;
break;
case GST_ITERATOR_OK:
pad = g_value_dup_object (&item);
- /* we now officially have an ouput pad */
+ /* we now officially have an output pad */
*have_out = TRUE;
/* if FALSE, this pad has no caps and we continue with the next pad. */
/* Downstream event probe id */
gulong probe_id;
- /* TRUE if the pad saw EOS. Resetted to FALSE on STREAM_START */
+ /* TRUE if the pad saw EOS. Reset to FALSE on STREAM_START */
gboolean is_eos;
/* The last seen (i.e. current) group_id
* GstURIDecodeBin3::about-to-finish:
*
* This signal is emitted when the data for the selected URI is
- * entirely buffered and it is safe to specify anothe URI.
+ * entirely buffered and it is safe to specify another URI.
*/
gst_uri_decode_bin3_signals[SIGNAL_ABOUT_TO_FINISH] =
g_signal_new ("about-to-finish", G_TYPE_FROM_CLASS (klass),
if (slot->linked_info) {
if (slot->is_eos) {
- /* linked_info is old input which is stil linked without removal */
+ /* linked_info is old input which is still linked without removal */
GST_DEBUG_OBJECT (pad, "push actual EOS");
seqnum = gst_event_get_seqnum (event);
eos = gst_event_new_eos ();
* @is_dynamic: TRUE if the element will create (more) pads dynamically later
* on.
*
- * Returns: FALSE if a fatal error occured while scanning.
+ * Returns: FALSE if a fatal error occurred while scanning.
*/
static gboolean
analyse_source (GstURISourceBin * urisrc, gboolean * is_raw,
switch (gst_iterator_next (pads_iter, &item)) {
case GST_ITERATOR_ERROR:
res = FALSE;
- /* FALLTROUGH */
+ /* FALLTHROUGH */
case GST_ITERATOR_DONE:
done = TRUE;
break;
break;
case GST_ITERATOR_OK:
pad = g_value_dup_object (&item);
- /* we now officially have an ouput pad */
+ /* we now officially have an output pad */
*have_out = TRUE;
/* if FALSE, this pad has no caps and we continue with the next pad. */
case GST_ITERATOR_ERROR:
GST_WARNING_OBJECT (urisrc,
"Error iterating pads on source element");
- /* FALLTROUGH */
+ /* FALLTHROUGH */
case GST_ITERATOR_DONE:
done = TRUE;
break;
/* Array of channel positions, one position per channel; its first
* num_channels values are valid. They are computed out of the number
- * of channels if no positions are explicitely given. */
+ * of channels if no positions are explicitly given. */
GstAudioChannelPosition channel_positions[64];
/* If the channel_positions are in a valid GStreamer channel order, then
* @get_current_config: Gets the current configuration. All return values except
* except GST_RAW_BASE_PARSE_CONFIG_CURRENT are valid.
* @set_config_from_caps: Parses the caps and copies its information to the configuration.
- * Returns FALSE if this failed, TRUE otheriwse. Specified caps
+ * Returns FALSE if this failed, TRUE otherwise. Specified caps
* are not unref'd.
* @get_caps_from_config: Creates a new caps structure out of the information from the
* specified configuration. Ownership over the returned caps are
{
gchar *write = read;
- /* Replace all occurences of '[br]' with a newline as version 2
+ /* Replace all occurrences of '[br]' with a newline as version 2
* of the subviewer format uses this for newlines */
if (read[0] == '\0' || read[1] == '\0' || read[2] == '\0' || read[3] == '\0')
}
if (text == end || errno != 0) {
- /* error occured. pass it */
+ /* error occurred. pass it */
goto next;
}
unescaped = g_string_append_unichar (unescaped, l);
/* handle a read on a client fd,
* which either indicates a close or should be ignored
- * returns FALSE if some error occured or the client closed. */
+ * returns FALSE if some error occurred or the client closed. */
static gboolean
gst_multi_fd_sink_handle_client_read (GstMultiFdSink * sink,
GstTCPClient * client)
* When the sending returns a partial buffer we stop sending more data as
* the next send operation could block.
*
- * This functions returns FALSE if some error occured.
+ * This functions returns FALSE if some error occurred.
*/
static gboolean
gst_multi_fd_sink_handle_client_write (GstMultiFdSink * sink,
/* Get the number of buffers from the buffer queue needed to satisfy
* the maximum max in the configured units.
* If units are not BUFFERS, and there are insufficient buffers in the
- * queue to satify the limit, return len(queue) + 1 */
+ * queue to satisfy the limit, return len(queue) + 1 */
gint
get_buffers_max (GstMultiHandleSink * sink, gint64 max)
{
* is satisfied
*/
/* count the amount of data in the buffers and return the index
- * that satifies the given limits.
+ * that satisfies the given limits.
*
* Returns: index @idx in the buffer queue so that the given limits are
* satisfied. TRUE if all the limits could be satisfied, FALSE if not
/* handle a read on a client socket,
* which either indicates a close or should be ignored
- * returns FALSE if some error occured or the client closed. */
+ * returns FALSE if some error occurred or the client closed. */
static gboolean
gst_multi_socket_sink_handle_client_read (GstMultiSocketSink * sink,
GstSocketClient * client)
* When the sending returns a partial buffer we stop sending more data as
* the next send operation could block.
*
- * This functions returns FALSE if some error occured.
+ * This functions returns FALSE if some error occurred.
*/
static gboolean
gst_multi_socket_sink_handle_client_write (GstMultiSocketSink * sink,
size = GST_READ_UINT32_BE (data);
if (size + offset >= G_MAXINT64)
break;
- /* check compatible brands rather than ever expaning major brands above */
+ /* check compatible brands rather than ever expanding major brands above */
if ((STRNCMP (&data[4], "ftyp", 4) == 0) && (size >= 16)) {
data = gst_type_find_peek (tf, offset, size);
if (data == NULL)
"endianness = (int) LITTLE_ENDIAN");
#define TIFF_LE_CAPS (gst_static_caps_get(&tiff_le_caps))
static void
-tiff_type_find (GstTypeFind * tf, gpointer ununsed)
+tiff_type_find (GstTypeFind * tf, gpointer unused)
{
const guint8 *data = gst_type_find_peek (tf, 0, 8);
guint8 le_header[4] = { 0x49, 0x49, 0x2A, 0x00 };
static GstStaticCaps exr_caps = GST_STATIC_CAPS ("image/x-exr");
#define EXR_CAPS (gst_static_caps_get(&exr_caps))
static void
-exr_type_find (GstTypeFind * tf, gpointer ununsed)
+exr_type_find (GstTypeFind * tf, gpointer unused)
{
const guint8 *data = gst_type_find_peek (tf, 0, 8);
((c) == ' ' || (c) == '\r' || (c) == '\n' || (c) == 't')
static void
-pnm_type_find (GstTypeFind * tf, gpointer ununsed)
+pnm_type_find (GstTypeFind * tf, gpointer unused)
{
const gchar *media_type = NULL;
DataScanCtx c = { 0, NULL, 0 };
#define SDS_CAPS (gst_static_caps_get(&sds_caps))
static void
-sds_type_find (GstTypeFind * tf, gpointer ununsed)
+sds_type_find (GstTypeFind * tf, gpointer unused)
{
const guint8 *data = gst_type_find_peek (tf, 0, 4);
guint8 mask[4] = { 0xFF, 0xFF, 0x80, 0xFF };
#define IRCAM_CAPS (gst_static_caps_get(&ircam_caps))
static void
-ircam_type_find (GstTypeFind * tf, gpointer ununsed)
+ircam_type_find (GstTypeFind * tf, gpointer unused)
{
const guint8 *data = gst_type_find_peek (tf, 0, 4);
guint8 mask[4] = { 0xFF, 0xFF, 0xF8, 0xFF };
#define MATROSKA_CAPS (gst_static_caps_get(&matroska_caps))
static void
-matroska_type_find (GstTypeFind * tf, gpointer ununsed)
+matroska_type_find (GstTypeFind * tf, gpointer unused)
{
GstTypeFindProbability prob;
GstMatroskaInfo info = { 0, };
* not contain the partition pack key.
*/
static void
-mxf_type_find (GstTypeFind * tf, gpointer ununsed)
+mxf_type_find (GstTypeFind * tf, gpointer unused)
{
static const guint8 partition_pack_key[] =
{ 0x06, 0x0e, 0x2b, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0d, 0x01, 0x02, 0x01,
}
/*
- * This is an incomplete matrix of in formats and a score for the prefered output
+ * This is an incomplete matrix of in formats and a score for the preferred output
* format.
*
* out: RGB24 RGB16 ARGB AYUV YUV444 YUV422 YUV420 YUV411 YUV410 PAL GRAY
* PAL 1 3 2 6 4 6 7 8 9 0 10
* GRAY 1 4 3 2 1 5 6 7 8 9 0
*
- * PAL or GRAY are never prefered, if we can we would convert to PAL instead
+ * PAL or GRAY are never preferred, if we can we would convert to PAL instead
* of GRAY, though
- * less subsampling is prefered and if any, preferably horizontal
+ * less subsampling is preferred and if any, preferably horizontal
* We would like to keep the alpha, even if we would need to to colorspace conversion
* or lose depth.
*/
gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
}
} else if (max_num != 0 || max_denom != 1) {
- /* We can provide everything upto the maximum framerate at the src */
+ /* We can provide everything up to the maximum framerate at the src */
gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
0, 1, max_num, max_denom, NULL);
}
(videorate->segment.rate < 0.0 && intime <= videorate->next_ts)) {
GstFlowReturn r;
- /* The buffer received from basetransform is garanteed to be writable.
+ /* The buffer received from basetransform is guaranteed to be writable.
* It just needs to be reffed so the buffer won't be consumed once pushed and
* GstBaseTransform can get its reference back. */
if ((r = gst_video_rate_push_buffer (videorate,
}
while (diff1 < diff2);
- /* if we outputed the first buffer more then once, we have dups */
+ /* if we outputted the first buffer more then once, we have dups */
if (count > 1) {
videorate->dup += count - 1;
if (!videorate->silent)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_KY,
g_param_spec_int ("ky", "Zoneplate 1st order y phase",
- "Zoneplate 1st order y phase, for generating contant vertical frequencies",
+ "Zoneplate 1st order y phase, for generating content vertical frequencies",
G_MININT32, G_MAXINT32, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_KT,
#!/bin/sh
#
-# Check that the code follows a consistant code style
+# Check that the code follows a consistent code style
#
# Check for existence of indent, and error out if not present.
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means Ogg was incorrectly installed"
+ echo "*** exact error that occurred. This usually means Ogg was incorrectly installed"
echo "*** or that you have moved Ogg since it was installed." ])
CFLAGS="$ac_save_CFLAGS"
LIBS="$ac_save_LIBS"
echo "*** If you have an old version installed, it is best to remove it, although"
echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
[ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means Vorbis was incorrectly installed"
+ echo "*** exact error that occurred. This usually means Vorbis was incorrectly installed"
echo "*** or that you have moved Vorbis since it was installed." ])
CFLAGS="$ac_save_CFLAGS"
LIBS="$ac_save_LIBS"
align = 15;
mem->ximage->data = g_malloc (allocsize + align);
GST_LOG_OBJECT (ximagesink,
- "non-XShm image size is %" G_GSIZE_FORMAT " (alloced: %u), width %d, "
+ "non-XShm image size is %" G_GSIZE_FORMAT " (allocated: %u), width %d, "
"stride %d", mem->size, allocsize, width, mem->ximage->bytes_per_line);
XSync (xcontext->disp, FALSE);
/* We are not converting the pointer coordinates as there's no hardware
scaling done here. The only possible scaling is done by videoscale and
- videoscale will have to catch those events and tranform the coordinates
+ videoscale will have to catch those events and transform the coordinates
to match the applied scaling. So here we just add the offset if the image
is centered in the window. */
* @heightmm: the height in millimeters of Display @disp
* @par: the pixel aspect ratio calculated from @width, @widthmm and @height,
* @heightmm ratio
- * @use_xshm: used to known wether of not XShm extension is usable or not even
+ * @use_xshm: used to known whether of not XShm extension is usable or not even
* if the Extension is present
* @caps: the #GstCaps that Display @disp can accept
*
- * Structure used to store various informations collected/calculated for a
+ * Structure used to store various information collected/calculated for a
* Display.
*/
struct _GstXContext
* through the #GstVideoOverlay interface
* @gc: the Graphical Context of Window @win
*
- * Structure used to store informations about a Window.
+ * Structure used to store information about a Window.
*/
struct _GstXWindow
{
* @heightmm: the height in millimeters of Display @disp
* @par: the pixel aspect ratio calculated from @width, @widthmm and @height,
* @heightmm ratio
- * @use_xshm: used to known wether of not XShm extension is usable or not even
+ * @use_xshm: used to known whether of not XShm extension is usable or not even
* if the Extension is present
* @xv_port_id: the XVideo port ID
* @im_format: used to store at least a valid format for XShm calls checks
* @channels_list: list of #GstColorBalanceChannels
* @caps: the #GstCaps that Display @disp can accept
*
- * Structure used to store various informations collected/calculated for a
+ * Structure used to store various information collected/calculated for a
* Display.
*/
struct _GstXvContext
* through the #GstVideoOverlay interface
* @gc: the Graphical Context of Window @win
*
- * Structure used to store informations about a Window.
+ * Structure used to store information about a Window.
*/
struct _GstXWindow
{
GST_TAG_LICENSE_TRANSLATIONS_DICT="$(top_srcdir)/gst-libs/gst/tag/license-translations.dict"
-# ths core dumps of some machines have PIDs appended
+# the core dumps of some machines have PIDs appended
CLEANFILES = core.* test-registry.*
clean-local: clean-local-check
fail_unless (gst_bin_add (GST_BIN (pipe), decodebin));
/* to simulate the buffering scenarios we stuff 2 multiqueues inside
- * decodebin. This is hacky, but sould make decodebin handle its buffering
+ * decodebin. This is hacky, but should make decodebin handle its buffering
* messages all the same */
mq0 = gst_element_factory_make ("multiqueue", NULL);
mq1 = gst_element_factory_make ("multiqueue", NULL);
fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PAUSED),
GST_STATE_CHANGE_ASYNC);
- /* currently we shoud have no buffering messages */
+ /* currently we should have no buffering messages */
msg = gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_BUFFERING, 0);
fail_unless (msg == NULL);
gst_object_unref (originpool);
/* At this point the gl pool contains all its buffers. We can
- * inactivate it to release the textures. Note that only the gl
+ * deactivate it to release the textures. Note that only the gl
* pool can release the textures properly because it has a
* reference on the gl context. */
fail_unless (gst_buffer_pool_set_active (pool, FALSE));
fail_unless_read ("client 1", pfd1[0], 16, "deadbee00000009");
/* second client only bursts 50 bytes = 4 buffers (we get 4 buffers since
- * the max alows it) */
+ * the max allows it) */
GST_DEBUG ("Reading from client 2");
fail_unless_read ("client 2", pfd2[0], 16, "deadbee00000006");
fail_unless_read ("client 2", pfd2[0], 16, "deadbee00000007");
g_value_init (&plane_strides_array, GST_TYPE_ARRAY);
if (set_properties) {
- /* When properties are explicitely set, we use Y444 as video format,
+ /* When properties are explicitly set, we use Y444 as video format,
* so in that case, plane stride values are all the same */
plane_offsets[0] = properties_ctx.plane_size * 0;
plane_offsets[1] = properties_ctx.plane_size * 1;
fail_unless_equals_int (g_list_length (buffers), 0);
assert_videorate_stats (videorate, "first buffer", 1, 0, 0, 0);
- /* second buffer; inbetween second and third output frame's timestamp */
+ /* second buffer; in between second and third output frame's timestamp */
second = gst_buffer_new_and_alloc (4);
GST_BUFFER_TIMESTAMP (second) = GST_SECOND * 3 / 50;
GST_BUFFER_OFFSET (second) = g_rand_int (rand);
}
{
- <ALSA unitialized access>
+ <ALSA uninitialized access>
Memcheck:Cond
obj:/usr/lib/libasound.so.*
obj:/usr/lib/libasound.so.*
...
}
{
- <unitialized values in nvidia driver>
+ <uninitialized values in nvidia driver>
Memcheck:Cond
obj:*libnvidia-glcore.so.*
}
expected[i]);
}
- /* now test the [0, 1]^3 matrix and update the test values acoordingly */
+ /* now test the [0, 1]^3 matrix and update the test values accordingly */
gst_gl_set_affine_transformation_meta_from_ndc (aff_meta, y_invert);
expected[1] = 0.;
"2D");
gst_caps_unref (out_caps);
- /* try setting the wrong type first tho */
+ /* try setting the wrong type first */
out_caps = gst_caps_from_string ("video/x-raw(memory:GLMemory),"
"format=RGBA,width=10,height=10,texture-target=RECTANGLE");
gst_gl_upload_set_caps (upload, in_caps, out_caps);
"duration", G_TYPE_UINT64, (guint64) 10 * GST_MSECOND, NULL)));
/* When a buffer is pushed, an updated (and more accurate) segment event
- * should aslo be sent. */
+ * should also be sent. */
gst_harness_push (h, gst_rtp_buffer_new_allocate (0, 0, 0));
/* Verify that setup events are sent before gap event */
* it should save these timestamps as they should affect the next segment event
* being pushed by the depayloader. a new segment event is not pushed by the
* depayloader until a flush_stop event and a succeeding segment event are
- * received. of course the intial event are unaffected, as is the incoming caps
+ * received. of course the initial event are unaffected, as is the incoming caps
* event.
*/
GST_START_TEST (rtp_base_depayload_npt_test)
* this rate as it should affect the next segment event being pushed by the
* depayloader. a new segment event is not pushed by the depayloader until a
* flush_stop event and a succeeding segment event are received. of course the
- * intial event are unaffected, as is the incoming caps event.
+ * initial event are unaffected, as is the incoming caps event.
*/
GST_START_TEST (rtp_base_depayload_play_scale_test)
{
* this rate as it should affect the next segment event being pushed by the
* depayloader. a new segment event is not pushed by the depayloader until a
* flush_stop event and a succeeding segment event are received. of course the
- * intial event are unaffected, as is the incoming caps event.
+ * initial event are unaffected, as is the incoming caps event.
*/
GST_START_TEST (rtp_base_depayload_play_speed_test)
{
GST_END_TEST
/* when a depayloader receives new caps events with npt-start, npt-stop and
* clock-base it should save these timestamps as they should affect the next
- * segment event being pushed by the depayloader. the produce segment should
- * make the positon of the stream reflect the postion form clock-base instead
+ * segment event being pushed by the depayloader. the produced segment should
+ * make the position of the stream reflect the position from clock-base instead
* of reflecting the running time (for RTSP).
*/
GST_START_TEST (rtp_base_depayload_clock_base_test)
/* push two buffers to the payloader which should successfully payload them
* into RTP packets. the first packet will have a random rtptime and sequence
* number, but the last packet should have an rtptime incremented by
- * DEFAULT_CLOCK_RATE and a sequence number incremented by one becuase the
+ * DEFAULT_CLOCK_RATE and a sequence number incremented by one because the
* packets are sequential. besides the two payloaded RTP packets there should
* be the three events initial events: stream-start, caps and segment.
*/
GST_END_TEST;
-/* validate that an upstream event different from GstRTPCollision is succesfully
+/* validate that an upstream event different from GstRTPCollision is successfully
* forwarded to upstream elements. in this test a caps reconfiguration event is
* pushed upstream to validate the behaviour.
*/
* timestamp updates that are not based on input buffer offsets as expected.
* lastly two buffers are pushed and the stats property retrieved after each
* time. here it is expected that the sequence numbers values are restarted at
- * the inital value while the timestamps and running-time reflect the input
+ * the initial value while the timestamps and running-time reflect the input
* buffers.
*/
GST_START_TEST (rtp_base_payload_property_stats_test)
tunnel_lost
};
-/* setts up a new tunnel, then disconnects the read connection and creates it
+/* sets up a new tunnel, then disconnects the read connection and creates it
* again */
GST_START_TEST (test_rtspconnection_tunnel_setup)
{
GST_END_TEST;
-/* setts up a new tunnel, starting with the read channel,
+/* sets up a new tunnel, starting with the read channel,
* then disconnects the read connection and creates it again
* ideally this test should be merged with test_rtspconnection_tunnel_setup but
* but it became quite messy */
/* test data */
list = gst_tag_list_new (GST_TAG_TITLE, "test title",
- GST_TAG_DESCRIPTION, "test decription",
+ GST_TAG_DESCRIPTION, "test description",
GST_TAG_KEYWORDS, "keyword1", GST_TAG_KEYWORDS, "keyword2", NULL);
buf = gst_tag_list_to_xmp_buffer (list, FALSE, NULL);
/* check the content */
fail_unless (g_strrstr_len (text, len, "<?xpacket begin") == text);
fail_unless (g_strrstr_len (text, len, ">test title<") != NULL);
- fail_unless (g_strrstr_len (text, len, ">test decription<") != NULL);
+ fail_unless (g_strrstr_len (text, len, ">test description<") != NULL);
fail_unless (g_strrstr_len (text, len, ">keyword1<") != NULL);
fail_unless (g_strrstr_len (text, len, ">keyword2<") != NULL);
fail_unless (g_strrstr_len (text, len, "<?xpacket end") != NULL);
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));
/* push a buffer, to have the segment attached to it.
- * unfortunatelly this buffer can't be decoded as it isn't a keyframe */
+ * unfortunately this buffer can't be decoded as it isn't a keyframe */
buffer = create_test_buffer (i++);
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
* every number that is divisible by 10 is set as a discont,
* if it is divisible by 20 it is also a keyframe
*
- * The logic here is that hte current i is the target, and then
+ * The logic here is that the current i is the target, and then
* it pushes buffers from 'target - 10' up to target.
*/
for (j = MAX (target - 10, 0); j < target; j++) {
* every number that is divisible by 10 is set as a discont,
* if it is divisible by 20 it is also a keyframe
*
- * The logic here is that hte current i is the target, and then
+ * The logic here is that the current i is the target, and then
* it pushes buffers from 'target - 10' up to target.
*/
for (j = MAX (target - 10, 0); j < target; j++) {
g_object_set (data->pipeline, "suburi", uri, NULL);
g_free (uri);
} else {
- g_warning ("Could not parse auxilliary file argument. Ignoring");
+ g_warning ("Could not parse auxiliary file argument. Ignoring");
}
}
GINT_TO_POINTER (6));
break;
case 6:
- g_print ("quiting\n");
+ g_print ("quitting\n");
g_main_loop_quit (loop);
break;
default:
/* get the snapshot buffer format now. We set the caps on the appsink so
* that it can only be an rgb buffer. The only thing we have not specified
- * on the caps is the height, which is dependant on the pixel-aspect-ratio
+ * on the caps is the height, which is dependent on the pixel-aspect-ratio
* of the source material */
caps = gst_sample_get_caps (sample);
if (!caps) {
/* get the snapshot buffer format now. We set the caps on the appsink so
* that it can only be an rgb buffer. The only thing we have not specified
- * on the caps is the height, which is dependant on the pixel-aspect-ratio
+ * on the caps is the height, which is dependent on the pixel-aspect-ratio
* of the source material */
#if 0
caps = GST_BUFFER_CAPS (buffer);
/* get the snapshot buffer format now. We set the caps on the appsink so
* that it can only be an rgb buffer. The only thing we have not specified
- * on the caps is the height, which is dependant on the pixel-aspect-ratio
+ * on the caps is the height, which is dependent on the pixel-aspect-ratio
* of the source material */
caps = gst_sample_get_caps (sample);
if (!caps) {
g_printerr ("Got EOS\n");
break;
default:
- g_printerr ("Got unexpected %s messge\n", GST_MESSAGE_TYPE_NAME (msg));
+ g_printerr ("Got unexpected %s message\n", GST_MESSAGE_TYPE_NAME (msg));
break;
}
gst_message_unref (msg);