- rtpjitterbuffer has improved end-of-stream handling
-- rtpmp4vpay will be prefered over rtpmp4gpay for MPEG-4 video in
+- rtpmp4vpay will be preferred over rtpmp4gpay for MPEG-4 video in
autoplugging scenarios now
- rtspsrc now allows applications to send RTSP SET_PARAMETER and
used in order to re-produce a specific build. To set a manifest, you
can set manifest = 'my_manifest.xml' in your configuration file, or
use the --manifest command line option. The command line option will
- take precendence over anything specific in the configuration file.
+ take precedence over anything specific in the configuration file.
- The new build-deps command can be used to build only the
dependencies of a recipe, without the recipe itself.
"writable": true
},
"min-quantizer": {
- "blurb": "Mininum (best quality) quantizer",
+ "blurb": "Minimum (best quality) quantizer",
"construct": false,
"construct-only": false,
"default": "0",
"url": "Unknown package origin"
},
"autoconvert": {
- "description": "Selects convertor element based on caps",
+ "description": "Selects converter element based on caps",
"elements": {
"autoconvert": {
"author": "Olivier Crete <olivier.crete@collabora.com>",
"GObject"
],
"klass": "Generic/Bin",
- "long-name": "Select convertor based on caps",
+ "long-name": "Select converter based on caps",
"name": "autoconvert",
"pad-templates": {
"sink": {
},
"autovideoconvert": {
"author": "Benjamin Gaignard <benjamin.gaignard@stericsson.com>",
- "description": "Selects the right color space convertor based on the caps",
+ "description": "Selects the right color space converter based on the caps",
"hierarchy": [
"GstAutoVideoConvert",
"GstBin",
"GObject"
],
"klass": "Generic/Bin",
- "long-name": "Select color space convertor based on caps",
+ "long-name": "Select color space converter based on caps",
"name": "autovideoconvert",
"pad-templates": {
"sink": {
"value": "1"
},
{
- "desc": "Calculate and cache color replacement values on first occurence",
+ "desc": "Calculate and cache color replacement values on first occurrence",
"name": "cached",
"value": "2"
}
"writable": true
},
"signal-fps-measurements": {
- "blurb": "If the fps-measurements signal should be emited.",
+ "blurb": "If the fps-measurements signal should be emitted.",
"construct": false,
"construct-only": false,
"default": "false",
"writable": true
},
"frequency": {
- "blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standars and Hz for all the rest",
+ "blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standards and Hz for all the rest",
"construct": false,
"construct-only": false,
"default": "0",
"writable": true
},
"frequency": {
- "blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standars and Hz for all the rest",
+ "blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standards and Hz for all the rest",
"construct": false,
"construct-only": false,
"default": "0",
},
"properties": {
"brightness": {
- "blurb": "Brightnesss of image",
+ "blurb": "Brightness of image",
"construct": false,
"construct-only": false,
"default": "0.75",
"writable": true
},
"connector-properties": {
- "blurb": "Additionnal properties for the connector",
+ "blurb": "Additional properties for the connector",
"construct": false,
"construct-only": false,
"type-name": "GstStructure",
"writable": true
},
"plane-properties": {
- "blurb": "Additionnal properties for the plane",
+ "blurb": "Additional properties for the plane",
"construct": false,
"construct-only": false,
"type-name": "GstStructure",
"writable": true
},
"unit-coeff-elim": {
- "blurb": "How agressively small-unit picture blocks should be skipped",
+ "blurb": "How aggressively small-unit picture blocks should be skipped",
"construct": false,
"construct-only": false,
"default": "0",
"value": "0"
},
{
- "desc": "Dewarped image is splitted in two images displayed one below the other",
+ "desc": "Dewarped image is split into two images displayed one below the other",
"name": "double-panorama",
"value": "1"
},
{
- "desc": "Dewarped image is splitted in four images dysplayed as a quad view",
+ "desc": "Dewarped image is split into four images displayed as a quad view",
"name": "quad-view",
"value": "2"
}
"GObject"
],
"klass": "Filter/Effect/Video",
- "long-name": "Retinex image colour enhacement",
+ "long-name": "Retinex image colour enhancement",
"name": "retinex",
"pad-templates": {
"sink": {
"writable": true
},
"remove": {
- "blurb": "Set to true to remove silence from the stream, false otherwhise",
+ "blurb": "Set to true to remove silence from the stream, false otherwise",
"construct": false,
"construct-only": false,
"default": "false",
"writable": true
},
"bonding-addresses": {
- "blurb": "Comma (,) seperated list of <address>:<port> to send to. ",
+ "blurb": "Comma (,) separated list of <address>:<port> to send to. ",
"construct": false,
"construct-only": false,
"default": "localhost:5004",
"writable": true
},
"repeat": {
- "blurb": "Whether to repeat specified tone indefinitly",
+ "blurb": "Whether to repeat specified tone indefinitely",
"construct": false,
"construct-only": false,
"default": "false",
"writable": true
},
"poll-timeout": {
- "blurb": "Return poll wait after timeout miliseconds (-1 = infinite)",
+ "blurb": "Return poll wait after timeout milliseconds (-1 = infinite)",
"construct": false,
"construct-only": false,
"default": "-1",
"writable": true
},
"poll-timeout": {
- "blurb": "Return poll wait after timeout miliseconds (-1 = infinite)",
+ "blurb": "Return poll wait after timeout milliseconds (-1 = infinite)",
"construct": false,
"construct-only": false,
"default": "-1",
"writable": true
},
"drop": {
- "blurb": "Drop data untill valid configuration data is received either in the stream or through caps",
+ "blurb": "Drop data until valid configuration data is received either in the stream or through caps",
"construct": true,
"construct-only": false,
"default": "true",
"GObject"
],
"klass": "Generic/Audio",
- "long-name": "Accoustic Echo Canceller probe",
+ "long-name": "Acoustic Echo Canceller probe",
"name": "webrtcechoprobe",
"pad-templates": {
"sink": {
g_object_class_install_property (gobject_class, PROP_MIN_QUANTIZER,
g_param_spec_uint ("min-quantizer", "Minimum (best quality) quantizer",
- "Mininum (best quality) quantizer",
+ "Minimum (best quality) quantizer",
0, G_MAXUINT, DEFAULT_MIN_QUANTIZER,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
if (upstream_has_meta || caps_has_meta) {
- /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
+ /* Send caps immediately, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (render->srcpad, overlay_caps);
render->window_height = height;
gst_ass_render_update_render_size (render);
- /* For backward compatbility, we will prefer bliting if downstream
+ /* For backward compatibility, we will prefer bliting if downstream
* allocation does not support the meta. In other case we will prefer
* attaching, and will fail the negotiation in the unlikely case we are
* force to blit, but format isn't supported. */
}
if (!display) {
- GST_DEBUG ("No visible text, skiping rendering");
+ GST_DEBUG ("No visible text, skipping rendering");
return;
}
* @overlay: The #GstCeaCcOverlay
* @user_data: The #GstMpegVideoCCData to decode
*
- * decode closed caption data and render when neccesary
+ * decode closed caption data and render when necessary
* in struct GstMpegVideoCCData type's user_data's data field, 3 byte's data construct 1 cc_data_pkt
*
* A cc_data_pkt is 3 bytes as follows:
* $return
* Set of data services $rd will be decode after the change.
* Can be zero if the sampling parameters are invalid or some
- * other error occured.
+ * other error occurred.
*/
/* Attn: strict must be int for compatibility with libzvbi 0.2 (-1 == 0) */
vbi_service_set
*/
typedef struct {
/**
- * A @ref VBI_SLICED_ symbol identifying the data service. Under cirumstances
+ * A @ref VBI_SLICED_ symbol identifying the data service. Under circumstances
* (see VBI_SLICED_TELETEXT_B) this can be a set of VBI_SLICED_ symbols.
*/
uint32_t id;
"Precalculate lookup table (takes a long time getting READY)",
"precalculated"},
{GST_LCMS_LOOKUP_METHOD_CACHED,
- "Calculate and cache color replacement values on first occurence",
+ "Calculate and cache color replacement values on first occurrence",
"cached"},
{0, NULL, NULL},
};
#ifdef CURL_VERSION_HTTP2
GSTCURL_HTTP_VERSION_2_0,
#endif
- GSTCURL_HTTP_NOT, /* For future use, incase not HTTP protocol! */
+ GSTCURL_HTTP_NOT, /* For future use if HTTP protocol not used! */
GSTCURL_HTTP_VERSION_MAX
} GstCurlHttpVersion;
* When requested (with GST_SEEK_FLAG_TRICKMODE_KEY_UNIT) and if the format
* is supported (ISOBMFF profiles), dashdemux can download only keyframes
* in order to provide fast forward/reverse playback without exceeding the
- * available bandwith/cpu/memory usage.
+ * available bandwidth/cpu/memory usage.
*
* This is done in two parts:
* 1) Parsing ISOBMFF atoms to detect the location of keyframes and only
*
* The main reason for doing keyframe-only downloads is for trick-modes
* (i.e. being able to do fast reverse/forward playback with limited
- * bandwith/cpu/memory).
+ * bandwidth/cpu/memory).
*
* Downloading all keyframes might not be the optimal solution, especially
* at high playback rates, since the time taken to download the keyframe
* buffering_level.
*
* The smaller the buffering level is (i.e. the closer we are between
- * current and downstream), the more aggresively we skip forward (and
+ * current and downstream), the more aggressively we skip forward (and
* guarantee the keyframe will be downloaded, decoded and displayed in
* time). And the higher the buffering level, the least aggresivelly
* we need to skip forward (and therefore display more frames per
* second).
*
- * Right now the threshold for agressive switching is set to 3
+ * Right now the threshold for aggressive switching is set to 3
* average_download_time. Below that buffering level we set the target time
* to at least 3 average_download_time distance beyond the
* qos_earliest_time.
if (stream == NULL)
return NULL;
- /* if bitstreamSwitching is true we dont need to swich pads on resolution change */
+ /* if bitstreamSwitching is true we don't need to switch pads on resolution change */
if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
width = gst_mpd_client_get_video_stream_width (stream);
height = gst_mpd_client_get_video_stream_height (stream);
if (stream == NULL)
return NULL;
- /* if bitstreamSwitching is true we dont need to swich pads on rate/channels change */
+ /* if bitstreamSwitching is true we don't need to switch pads on rate/channels change */
if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
channels = gst_mpd_client_get_audio_stream_num_channels (stream);
rate = gst_mpd_client_get_audio_stream_rate (stream);
"MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
} else if (diff < 4 * dashstream->average_download_time) {
- /* Go forward a bit less aggresively (and at most 1s forward) */
+ /* Go forward a bit less aggressively (and at most 1s forward) */
ret = gst_segment_position_from_running_time (&stream->segment,
GST_FORMAT_TIME, min_running + MIN (GST_SECOND,
2 * dashstream->average_download_time));
* to decide which one to use, so we have to resolve them all here
*/
for (list = period->period->AdaptationSets; list;
- /* advanced explicitely below */ ) {
+ /* advanced explicitly below */ ) {
GstAdaptationSetNode *adapt_set = (GstAdaptationSetNode *) list->data;
GList *new_adapt_sets = NULL, *prev, *next;
* application, that means it won't handle navigation events and won't resize
* the #GstDfbVideoSink:surface to fit video
* frames geometry. Application has to implement the necessary code to grab
- * informations about the negotiated geometry and resize there
+ * information about the negotiated geometry and resize there
* #GstDfbVideoSink:surface accordingly.
*
* For both modes the element implements a buffer pool allocation system to
void gst_dtls_connection_set_send_callback(GstDtlsConnection *, GClosure *);
/*
- * Processes data that has been recevied, the transformation is done in-place.
+ * Processes data that has been received, the transformation is done in-place.
* Returns the length of the plaintext data that was decoded, if no data is available, 0<= will be returned.
*/
gint gst_dtls_connection_process(GstDtlsConnection *, gpointer ptr, gint len);
if (hlsdemux->current_variant != NULL) {
gint i;
- //#warning FIXME: Synching fragments across variants
+ //#warning FIXME: Syncing fragments across variants
// should be done based on media timestamps, and
// discont-sequence-numbers not sequence numbers.
variant->m3u8->sequence_position =
gst_hls_demux_set_current_variant (hlsdemux, variant); // FIXME: inline?
}
- /* get the selected media playlist (unless the inital list was one already) */
+ /* get the selected media playlist (unless the initial list was one already) */
if (!hlsdemux->master->is_simple) {
GError *err = NULL;
*
* ## Example pipeline
*
- * This explicitely decodes a Kate stream:
+ * This explicitly decodes a Kate stream:
* |[
* gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE
* ]|
0, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, ARG_KEEPALIVE_MIN_TIME,
- g_param_spec_float ("keepalive-min-time", "Keepalive mimimum time",
+ g_param_spec_float ("keepalive-min-time", "Keepalive minimum time",
"Minimum time to emit keepalive packets (0 disables keepalive packets)",
0.0f, FLT_MAX, DEFAULT_KEEPALIVE_MIN_TIME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
* ## Example Sink/Audio/LADSPA line with this plugins
* |[
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
- * ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
+ * ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitly anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
*
*/
return FALSE;
}
start = mmsx_get_current_pos (mmssrc->connection);
- GST_INFO_OBJECT (mmssrc, "sought to %" GST_TIME_FORMAT ", offset after "
- "seek: %" G_GINT64_FORMAT, GST_TIME_ARGS (segment->start), start);
+ GST_INFO_OBJECT (mmssrc,
+ "performed seek to %" GST_TIME_FORMAT ", offset after " "seek: %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (segment->start), start);
} else if (segment->format == GST_FORMAT_BYTES) {
start = mmsx_seek (NULL, mmssrc->connection, segment->start, SEEK_SET);
/* mmsx_seek will close and reopen the connection when seeking with the
GST_DEBUG_OBJECT (mmssrc, "connection broken during seek");
return FALSE;
}
- GST_INFO_OBJECT (mmssrc, "sought to: %" G_GINT64_FORMAT " bytes, "
+ GST_INFO_OBJECT (mmssrc, "performed seek to: %" G_GINT64_FORMAT " bytes, "
"result: %" G_GINT64_FORMAT, segment->start, start);
} else {
GST_DEBUG_OBJECT (mmssrc, "unsupported seek segment format: %s",
}
#if 0
-/* Convert an LV2 port role to a Gst channel positon
+/* Convert an LV2 port role to a Gst channel position
* WARNING: If the group has only a single port,
* GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER will be returned for pg:centerRole
* (which is used by LV2 for mono groups), but this is not correct. In this
if (!gst_lv2_setup (&self->lv2, GST_AUDIO_INFO_RATE (info)))
goto no_instance;
- /* FIXME Handle audio channel positionning while negotiating CAPS */
+ /* FIXME Handle audio channel positioning while negotiating CAPS */
#if 0
gint i;
/* set input group pad audio channel position */
optional_pred);
GstLV2Port desc = { j, GST_LV2_PORT_AUDIO, -1, };
LilvNodes *lv2group = lilv_port_get (lv2plugin, port, group_pred);
- /* FIXME Handle channels positionning
+ /* FIXME Handle channels positioning
* GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_INVALID; */
if (lv2group) {
group->ports = g_array_new (FALSE, TRUE, sizeof (GstLV2Port));
}
- /* FIXME Handle channels positionning
+ /* FIXME Handle channels positioning
position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
sub_values = lilv_port_get_value (lv2plugin, port, designation_pred);
if (lilv_nodes_size (sub_values) > 0) {
* In particular, default property values are dependent on the format,
* and can even be forcibly restrained to certain pre-sets (and thereby ignored).
* Note that the (S)VCD profiles also restrict the image size, so some scaling
- * may be needed to accomodate this. The so-called generic profiles (as used
+ * may be needed to accommodate this. The so-called generic profiles (as used
* in the example above) allow most parameters to be adjusted.
*
* |[
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (klass, ARG_UNIT_COEFF_ELIM,
g_param_spec_int ("unit-coeff-elim", "Unit coefficience elimination",
- "How agressively small-unit picture blocks should be skipped",
+ "How aggressively small-unit picture blocks should be skipped",
-40, 40, 0,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
goto done;
}
if (!gst_neonhttp_src_set_proxy (src, proxy)) {
- GST_WARNING ("badly formated proxy");
+ GST_WARNING ("badly formatted proxy");
goto done;
}
break;
goto done;
}
if (!gst_neonhttp_src_set_location (src, location, NULL)) {
- GST_WARNING ("badly formated location");
+ GST_WARNING ("badly formatted location");
goto done;
}
break;
static void
oom_callback (void)
{
- GST_ERROR ("memory exeception in neon");
+ GST_ERROR ("memory exception in neon");
}
static GstFlowReturn
cntpixelsnum++;
if ((((uchar *) (m_pbwImage.data + m_pbwImage.step[0] * i))[j]) > 0) {
cntmotionpixelnum++;
- if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we dont needs calculate anymore
+ if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we don't need to calculate anymore
*p_motionarea = cntmotionpixelnum;
return (cntmotionpixelnum / cntpixelsnum);
}
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
static GType dewarp_display_mode_type = 0;
static const GEnumValue dewarp_display_mode[] = {
{GST_DEWARP_DISPLAY_PANORAMA, "Single panorama image", "single-panorama"},
- {GST_DEWARP_DISPLAY_DOUBLE_PANORAMA, "Dewarped image is splitted in two "
+ {GST_DEWARP_DISPLAY_DOUBLE_PANORAMA, "Dewarped image is split in two "
"images displayed one below the other", "double-panorama"},
- {GST_DEWARP_DISPLAY_QUAD_VIEW, "Dewarped image is splitted in four images "
+ {GST_DEWARP_DISPLAY_QUAD_VIEW, "Dewarped image is split in four images "
"dysplayed as a quad view",
"quad-view"},
{0, NULL, NULL},
filter->x_center = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "x center setted to %f", filter->x_center);
+ GST_LOG_OBJECT (filter, "x center set to %f", filter->x_center);
}
break;
case PROP_Y_CENTER:
filter->y_center = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "y center setted to %f", filter->y_center);
+ GST_LOG_OBJECT (filter, "y center set to %f", filter->y_center);
}
break;
case PROP_INNER_RADIUS:
filter->inner_radius = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "inner radius setted to %f",
+ GST_LOG_OBJECT (filter, "inner radius set to %f",
filter->inner_radius);
}
break;
filter->outer_radius = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "outer radius setted to %f",
+ GST_LOG_OBJECT (filter, "outer radius set to %f",
filter->outer_radius);
}
break;
filter->remap_correction_x = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "x remap correction setted to %f",
+ GST_LOG_OBJECT (filter, "x remap correction set to %f",
filter->remap_correction_x);
}
break;
filter->remap_correction_y = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "y remap correction setted to %f",
+ GST_LOG_OBJECT (filter, "y remap correction set to %f",
filter->remap_correction_y);
}
break;
case PROP_INTERPOLATION_MODE:
filter->interpolation_mode = g_value_get_enum (value);
- GST_LOG_OBJECT (filter, "interpolation mode setted to %" G_GINT32_FORMAT,
+ GST_LOG_OBJECT (filter, "interpolation mode set to %" G_GINT32_FORMAT,
filter->interpolation_mode);
break;
case PROP_DISPLAY_MODE:
if (disp_mode != filter->display_mode) {
filter->display_mode = disp_mode;
need_reconfigure = TRUE;
- GST_LOG_OBJECT (filter, "display mode setted to %" G_GINT32_FORMAT,
+ GST_LOG_OBJECT (filter, "display mode set to %" G_GINT32_FORMAT,
filter->display_mode);
}
break;
}
/* if outer_radius and inner radius are very close then width and height
- could be 0, we assume passtrough in this case
+ could be 0, we assume passthrough in this case
*/
if (G_UNLIKELY (*out_width == 0) || G_UNLIKELY (*out_height == 0)) {
GST_WARNING_OBJECT (filter,
GST_INFO_OBJECT (pad, " Negotiating caps via event %" GST_PTR_FORMAT,
caps);
if (!gst_pad_has_current_caps (fs->srcpad)) {
- /* Init image info (widht, height, etc) and all OpenCV matrices */
+ /* Init image info (width, height, etc) and all OpenCV matrices */
initialise_disparity (fs, info.width, info.height,
info.finfo->n_components);
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
DEFAULT_SCALE_FACTOR,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MIN_NEIGHBORS,
- g_param_spec_int ("min-neighbors", "Mininum neighbors",
+ g_param_spec_int ("min-neighbors", "Minimum neighbors",
"Minimum number (minus 1) of neighbor rectangles that makes up "
"an object", 0, G_MAXINT, DEFAULT_MIN_NEIGHBORS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
1.1, 10.0, DEFAULT_SCALE_FACTOR,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MIN_NEIGHBORS,
- g_param_spec_int ("min-neighbors", "Mininum neighbors",
+ g_param_spec_int ("min-neighbors", "Minimum neighbors",
"Minimum number (minus 1) of neighbor rectangles that makes up "
"an object", 0, G_MAXINT, DEFAULT_MIN_NEIGHBORS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
- "Retinex image colour enhacement", "Filter/Effect/Video",
+ "Retinex image colour enhancement", "Filter/Effect/Video",
"Multiscale retinex for colour image enhancement",
"Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>");
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
img.convertTo (retinex->cvA, retinex->cvA.type ());
log (retinex->cvA, retinex->cvB);
- /* Compute log of blured image */
+ /* Compute log of blurred image */
filter_size = (int) floor (sigma * 6) / 2;
filter_size = filter_size * 2 + 1;
}
/* Multiscale retinex restoration. The image and a set of filtered images are
converted to the log domain and subtracted from the original with some set
- of weights. Typicaly called with three equally weighted scales of fine,
+ of weights. Typically called with three equally weighted scales of fine,
medium and wide standard deviations.
O = Log(I) - sum_i [ wi * Log(H(I)) ]
where O is the output, H is a gaussian 2d filter and I is the input image
* and adapted. Its license reads:
* "Oct. 3, 2008
* Right to use this code in any way you want without warrenty, support or
- * any guarentee of it working. "
+ * any guarantee of it working. "
*
*
* Permission is hereby granted, free of charge, to any person obtaining a
* mixture model for real-time tracking with shadow detection", Proc. 2nd
* European Workshop on Advanced Video-Based Surveillance Systems, 2001
* [5] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
- * [6] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
+ * [6] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
* subtraction", International Conference Pattern Recognition, UK, August, 2004.
* [7] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation
* per Image Pixel for the Task of Background Subtraction", Pattern Recognition
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
* OpenCV MOG2 implements the algorithm described in [2] and [3].
*
* [1] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
- * [2] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
+ * [2] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
* subtraction", International Conference Pattern Recognition, UK, Aug 2004.
* [3] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation
* per Image Pixel for the Task of Background Subtraction", Pattern
maxMod Add this (possibly negative) number onto
max level when determining if new pixel is foreground
- minMod Subract this (possibly negative) number from
+ minMod Subtract this (possibly negative) number from
min level when determining if new pixel is foreground
NOTES:
[2] and [3].
[1] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
- [2] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
+ [2] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
subtraction", International Conference Pattern Recognition, UK, August, 2004.
[3] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation per
Image Pixel for the Task of Background Subtraction", Pattern Recognition
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
((sync & 0xe0) == 0xc0) || ((sync & 0xf0) == 0xe0);
}
-/* If we can pull that's prefered */
+/* If we can pull that's preferred */
static gboolean
gst_flups_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
gboolean ret = FALSE;
if (segment->format == rsndvd_format || src->first_seek) {
- /* The internal format has alread served its purpose of waking
+ /* The internal format has already served its purpose of waking
* everything up and flushing, we just need to step to the next
* data block (below) so we know our new position */
ret = TRUE;
*
* The active pad may push more buffers than what is currently displayed/consumed
* and when changing pads those buffers will be discarded and the only way to
- * reactivate that pad without loosing the already consumed buffers is to enable cache.
+ * reactivate that pad without losing the already consumed buffers is to enable cache.
*/
g_object_class_install_property (gobject_class, PROP_CACHE_BUFFERS,
g_param_spec_boolean ("cache-buffers", "Cache Buffers",
/* This is set here so that the call to create() above doesn't clear it */
src->discont = TRUE;
- GST_DEBUG_OBJECT (src, "Seek to %" GST_TIME_FORMAT " successfull",
+ GST_DEBUG_OBJECT (src, "Seek to %" GST_TIME_FORMAT " successful",
GST_TIME_ARGS (segment->start));
return TRUE;
/* this function returns FALSE if not enough data is known to transform the
* segment into proper downstream values. If the function does return false
* the segment should be stalled until enough information is available.
- * If the funtion returns TRUE, event will be replaced by the new downstream
+ * If the function returns TRUE, event will be replaced by the new downstream
* compatible event.
*/
static gboolean
* * gint `type` (0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are
- * specfied by their number. This element can only take events as input.
+ * specified by their number. This element can only take events as input.
* Do not confuse with "method" which specified the output.
* * gint `number` (0-16): The event number.
* * gint `method` (2): This field will always been 2 (ie sound) from this element.
g_object_class_install_property (gobject_class, PROP_REPEAT,
g_param_spec_boolean ("repeat", "Repeat the specified tone period ",
- "Whether to repeat specified tone indefinitly", DEFAULT_REPEAT,
+ "Whether to repeat specified tone indefinitely", DEFAULT_REPEAT,
G_PARAM_READWRITE));
gst_element_class_add_static_pad_template (gstelement_class,
*/
g_object_class_install_property (gobject_class, PROP_POLL_TIMEOUT,
g_param_spec_int ("poll-timeout", "Poll timeout",
- "Return poll wait after timeout miliseconds (-1 = infinite)", -1,
+ "Return poll wait after timeout milliseconds (-1 = infinite)", -1,
G_MAXINT32, GST_SRT_DEFAULT_POLL_TIMEOUT,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
- * Signal emited to get the parameters relevant to stream
+ * Signal emitted to get the parameters relevant to stream
* with @ssrc. User should provide the key and the RTP and
* RTCP encryption ciphers and authentication, and return
* them wrapped in a GstCaps.
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
- * Signal emited when the stream with @ssrc has reached the
+ * Signal emitted when the stream with @ssrc has reached the
* soft limit of utilisation of it's master encryption key.
* User should provide a new key and new RTP and RTCP encryption
* ciphers and authentication, and return them wrapped in a
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
- * Signal emited when the stream with @ssrc has reached the
+ * Signal emitted when the stream with @ssrc has reached the
* hard limit of utilisation of it's master encryption key.
* User should provide a new key and new RTP and RTCP encryption
* ciphers and authentication, and return them wrapped in a
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
* An application can request multiple RTP and RTCP pads to protect,
* but every sink pad requested must receive packets from the same
* source (identical SSRC). If a packet received contains a different
- * SSRC, a warning is emited and the valid SSRC is forced on the packet.
+ * SSRC, a warning is emitted and the valid SSRC is forced on the packet.
*
* This element uses libsrtp library. When receiving the first packet,
* the library is initialized with a new stream (based on the SSRC). It
* GstSrtpEnc::soft-limit:
* @gstsrtpenc: the element on which the signal is emitted
*
- * Signal emited when the stream with @ssrc has reached the soft
+ * Signal emitted when the stream with @ssrc has reached the soft
* limit of utilisation of it's master encryption key. User should
* provide a new key by setting the #GstSrtpEnc:key property.
*/
return ret;
}
-/* Release ressources and set default values
+/* Release resources and set default values
*/
static void
gst_srtp_enc_reset_no_lock (GstSrtpEnc * filter)
* @GST_SUBTITLE_TEXT_DIRECTION_RTL: Text direction is right-to-left.
*
* Defines the progression direction of unicode text that is being treated by
- * the unicode bidirectional algorithm as embedded or overidden (see
+ * the unicode bidirectional algorithm as embedded or overridden (see
* http://unicode.org/reports/tr9/ for more details of the unicode
* bidirectional algorithm).
*/
/**
* GstSubtitleStyleSet:
* @text_direction: Defines the direction of text that has been declared by the
- * #GstSubtitleStyleSet:unicode_bidi attribute to be embbedded or overridden.
+ * #GstSubtitleStyleSet:unicode_bidi attribute to be embedded or overridden.
* Applies to both #GstSubtitleBlocks and #GstSubtitleElements.
* @font_family: The name of the font family that should be used to render the
* text of an inline element. Applies only to #GstSubtitleElements.
gunichar u = g_utf8_get_char (c);
gint nbytes = g_unichar_to_utf8 (u, buf);
- /* Repace each newline or tab with a space. */
+ /* Replace each newline or tab with a space. */
if (nbytes == 1 && (buf[0] == TTML_CHAR_LF || buf[0] == TTML_CHAR_TAB)) {
*c = ' ';
buf[0] = TTML_CHAR_SPACE;
if (!gst_vulkan_ensure_element_data (element, NULL,
&vk_download->instance)) {
GST_ELEMENT_ERROR (vk_download, RESOURCE, NOT_FOUND,
- ("Failed to retreive vulkan instance"), (NULL));
+ ("Failed to retrieve vulkan instance"), (NULL));
return GST_STATE_CHANGE_FAILURE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (vk_download),
if (!gst_vulkan_ensure_element_data (GST_ELEMENT (bt), NULL,
&render->instance)) {
GST_ELEMENT_ERROR (render, RESOURCE, NOT_FOUND,
- ("Failed to retreive vulkan instance"), (NULL));
+ ("Failed to retrieve vulkan instance"), (NULL));
return FALSE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (render),
g_return_val_if_fail (first_word == SPIRV_MAGIC_NUMBER_NE
|| first_word == SPIRV_MAGIC_NUMBER_OE, NULL);
if (first_word == SPIRV_MAGIC_NUMBER_OE) {
- /* endianess swap... */
+ /* endianness swap... */
guint32 *old_code = (guint32 *) code;
gsize i;
- GST_DEBUG ("performaing endianess conversion on spirv shader of size %"
+ GST_DEBUG ("performaing endianness conversion on spirv shader of size %"
G_GSIZE_FORMAT, size);
new_code = g_new0 (guint32, size / 4);
case GST_STATE_CHANGE_READY_TO_PAUSED:
if (!gst_vulkan_ensure_element_data (element, NULL, &vk_upload->instance)) {
GST_ELEMENT_ERROR (vk_upload, RESOURCE, NOT_FOUND,
- ("Failed to retreive vulkan instance"), (NULL));
+ ("Failed to retrieve vulkan instance"), (NULL));
return GST_STATE_CHANGE_FAILURE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (vk_upload),
goto done;
}
- /* If all this failed, keep the height that was nearest to the orignal
+ /* If all this failed, keep the height that was nearest to the original
* height and the nearest possible width. This changes the DAR but
* there's not much else to do here.
*/
gst_buffer_pool_config_set_params (config, caps, sink->video_info.size,
2, 0);
- /* This is a video pool, it should not fail with basic setings */
+ /* This is a video pool, it should not fail with basic settings */
if (!gst_buffer_pool_set_config (sink->pool, config) ||
!gst_buffer_pool_set_active (sink->pool, TRUE))
goto activate_failed;
* holds a reference to the GstWlBuffer, but without having an actual reference.
* When we kill the display, there is no way for the GstWlBuffer, the associated
* GstBuffer and the GstBufferPool to get destroyed, so we are going to leak a
- * fair ammount of memory.
+ * fair amount of memory.
*
* Normally, this rarely happens, because the compositor releases buffers
* almost immediately and when waylandsink stops, they are already released.
if (features.width < MIN_WIDTH || features.width > MAX_WIDTH
|| features.height < MIN_HEIGHT || features.height > MAX_HEIGHT) {
- GST_ERROR_OBJECT (dec, "Dimensions of the frame is unspported by libwebp");
+ GST_ERROR_OBJECT (dec, "Dimensions of the frame is unsupported by libwebp");
return GST_FLOW_ERROR;
}
* balanced bundle policy
* setting custom DTLS certificates
*
- * seperate session id's from mlineindex properly
+ * separate session id's from mlineindex properly
* how to deal with replacing a input/output track/stream
*/
WebRTCTransceiver *trans = (WebRTCTransceiver *) rtp_trans;
GstCaps *ret = NULL;
- GST_LOG_OBJECT (webrtc, "retreiving codec preferences from %" GST_PTR_FORMAT,
+ GST_LOG_OBJECT (webrtc, "retrieving codec preferences from %" GST_PTR_FORMAT,
trans);
if (rtp_trans && rtp_trans->codec_preferences) {
/*if (!gst_structure_has_field (s, "rtcp-fb-transport-cc"))
gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL); */
- /* FIXME: codec-specific paramters? */
+ /* FIXME: codec-specific parameters? */
}
return ret;
* and is constantly changing these statistics may be changed to fit with
* the latest spec.
*
- * Each field key is a unique identifer for each RTCStats
+ * Each field key is a unique identifier for each RTCStats
* (https://www.w3.org/TR/webrtc/#rtcstats-dictionary) value (another
* GstStructure) in the RTCStatsReport
* (https://www.w3.org/TR/webrtc/#rtcstatsreport-object). Each supported
* members outlined https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit and
* and reproduced below
*
- * ordered G_TYPE_BOOLEAN Whether the channal will send data with guarenteed ordering
+ * ordered G_TYPE_BOOLEAN Whether the channal will send data with guaranteed ordering
* max-packet-lifetime G_TYPE_INT The time in milliseconds to attempt transmitting unacknowledged data. -1 for unset
* max-retransmits G_TYPE_INT The number of times data will be attempted to be transmitted without acknowledgement before dropping
* protocol G_TYPE_STRING The subprotocol used by this channel
- * negotiated G_TYPE_BOOLEAN Whether the created data channel should not perform in-band chnanel announcment. If %TRUE, then application must negotiate the channel itself and create the corresponding channel on the peer with the same id.
+ * negotiated G_TYPE_BOOLEAN Whether the created data channel should not perform in-band chnanel announcement. If %TRUE, then application must negotiate the channel itself and create the corresponding channel on the peer with the same id.
* id G_TYPE_INT Override the default identifier selection of this channel
* priority GST_TYPE_WEBRTC_PRIORITY_TYPE The priority to use for this channel
*
g_object_class_install_property (gobject_class,
PROP_STREAM,
g_param_spec_object ("stream", "Stream",
- "The TransportStream for this receiveing bin",
+ "The TransportStream for this receiving bin",
transport_stream_get_type (),
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
}
}
if (!_media_has_setup (media, i, error))
goto fail;
- /* check paramaters in bundle are the same */
+ /* check parameters in bundle are the same */
if (media_in_bundle) {
const gchar *ice_ufrag =
gst_sdp_media_get_attribute_val (media, "ice-ufrag");
* a single probe and DSP.
*
* The probe can only be used within the same top level GstPipeline.
- * Additonally, to simplify the code, the probe element must be created
+ * Additionally, to simplify the code, the probe element must be created
* before the DSP sink pad is activated. It does not need to be in any
* particular state and does not even need to be added to the pipeline yet.
*
* # Example launch line
*
- * As a conveniance, the echo canceller can be tested using an echo loop. In
+ * As a convenience, the echo canceller can be tested using an echo loop. In
* this configuration, one would expect a single echo to be heard.
*
* |[
static const gchar *
webrtc_error_to_string (gint err)
{
- const gchar *str = "unkown error";
+ const gchar *str = "unknown error";
switch (err) {
case webrtc::AudioProcessing::kNoError:
&gst_webrtc_echo_probe_sink_template);
gst_element_class_set_static_metadata (element_class,
- "Accoustic Echo Canceller probe",
+ "Acoustic Echo Canceller probe",
"Generic/Audio",
"Gathers playback buffers for webrtcdsp",
"Nicolas Dufresne <nicolas.dufrsesne@collabora.com>");
* object lock and also lock the probe. The natural order for the DSP is
* to lock the DSP and then the echo probe. If we where using the probe
* object lock, we'd be racing with GstBin which will lock sink to src,
- * and may accidently reverse the order. */
+ * and may accidentally reverse the order. */
GMutex lock;
/* Protected by the lock */
return FALSE;
/* Allocate output buffer
- * Multiply by 2 to accomodate for the sample size (16 bit = 2 byte) */
+ * Multiply by 2 to accommodate for the sample size (16 bit = 2 byte) */
outbuf_size = wildmidi_dec->output_buffer_size * 2 * WILDMIDI_NUM_CHANNELS;
outbuf =
gst_nonstream_audio_decoder_allocate_output_buffer (dec, outbuf_size);
case GST_NAVIGATION_EVENT_KEY_RELEASE:
if (gst_navigation_event_parse_key_event (event, &key)) {
/* FIXME: This is wrong... The GstNavigation API should pass
- hardware-level informations, not high-level keysym strings */
+ hardware-level information, not high-level keysym strings */
uint32_t keysym =
(uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
struct wpe_input_keyboard_event wpe_event;
}
if (i == encoder->peer_profiles->len) {
- GST_ERROR_OBJECT (encoder, "Could't apply peer profile");
+ GST_ERROR_OBJECT (encoder, "Couldn't apply peer profile");
GST_OBJECT_UNLOCK (encoder);
return FALSE;
GST_DEBUG_OBJECT (encoder, "%d nal units in header", i_nal);
/* x265 returns also non header nal units with the call x265_encoder_headers.
- * The usefull headers are sequential (VPS, SPS and PPS), so we look for this
+ * The useful headers are sequential (VPS, SPS and PPS), so we look for this
* nal units and only copy these tree nal units as the header */
vps_idx = sps_idx = pps_idx = -1;
* If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message
* includes a sample of the frame where the barcode was detected (Since 1.6).
*
- * The element generate messages named`barcode`. The structure containes these fields:
+ * The element generate messages named`barcode`. The structure contains these fields:
*
* * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
* * gchar * `type`: the symbol type.
- * * gchar * `symbol`: the deteted bar code data.
+ * * gchar * `symbol`: the detected bar code data.
* * gint `quality`: an unscaled, relative quantity: larger values are better than smaller
* values.
* * GstSample `frame`: the frame in which the barcode message was detected, if
* Subclasses:
* While GstAdaptiveDemux is responsible for the workflow, it knows nothing
* about the intrinsics of the subclass formats, so the subclasses are
- * resposible for maintaining the manifest data structures and stream
+ * responsible for maintaining the manifest data structures and stream
* information.
*/
demux_class->stream_seek (stream, rate >= 0, stream_seek_flags, ts, &ts);
}
- /* replace event with a new one without snaping to seek on all streams */
+ /* replace event with a new one without snapping to seek on all streams */
gst_event_unref (event);
if (rate >= 0) {
start = ts;
/* stop/duration members are not set, on purpose - in case of loops,
* new segments will be generated, which automatically put an implicit
- * end on the current segment (the segment implicitely "ends" when the
+ * end on the current segment (the segment implicitly "ends" when the
* new one starts), and having a stop value might cause very slight
* gaps occasionally due to slight jitter in the calculation of
* base times etc. */
* params are used */
if (!gst_nonstream_audio_decoder_negotiate (dec)) {
GST_ERROR_OBJECT (dec,
- "could not allocate output buffer because negotation failed");
+ "could not allocate output buffer because negotiation failed");
return NULL;
}
}
GST_DEBUG ("parsing \"Clock timestamp\"");
- /* defalt values */
+ /* default values */
tim->time_offset = 0;
READ_UINT8 (nr, tim->ct_type, 2);
GST_DEBUG ("parsing \"Recovery point\"");
if (!sps || !sps->valid) {
- GST_WARNING ("didn't get the associated sequence paramater set for the "
+ GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}
READ_UINT8 (nr, use_delta_flag[j], 1);
}
- /* 7-47: calcuate NumNegativePics, DeltaPocS0 and UsedByCurrPicS0 */
+ /* 7-47: calculate NumNegativePics, DeltaPocS0 and UsedByCurrPicS0 */
i = 0;
for (j = (RefRPS->NumPositivePics - 1); j >= 0; j--) {
dPoc = RefRPS->DeltaPocS1[j] + deltaRps;
}
stRPS->NumNegativePics = i;
- /* 7-48: calcuate NumPositivePics, DeltaPocS1 and UsedByCurrPicS1 */
+ /* 7-48: calculate NumPositivePics, DeltaPocS1 and UsedByCurrPicS1 */
i = 0;
for (j = (RefRPS->NumNegativePics - 1); j >= 0; j--) {
dPoc = RefRPS->DeltaPocS0[j] + deltaRps;
GST_DEBUG ("parsing \"Picture timing\"");
if (!parser->last_sps || !parser->last_sps->valid) {
- GST_WARNING ("didn't get the associated sequence paramater set for the "
+ GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}
GST_DEBUG ("parsing \"Recovery point\"");
if (!sps || !sps->valid) {
- GST_WARNING ("didn't get the associated sequence paramater set for the "
+ GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}
CHECK_ALLOWED_MAX (vps->max_layer_id, 63);
READ_UE_MAX (&nr, vps->num_layer_sets_minus1, 1023);
- /* allowd range is 0 to 1023 */
+ /* allowed range is 0 to 1023 */
CHECK_ALLOWED_MAX (vps->num_layer_sets_minus1, 1023);
for (i = 1; i <= vps->num_layer_sets_minus1; i++) {
READ_UE_MAX (&nr, vps->num_ticks_poc_diff_one_minus1, G_MAXUINT32 - 1);
READ_UE_MAX (&nr, vps->num_hrd_parameters, 1024);
- /* allowd range is
+ /* allowed range is
* 0 to vps_num_layer_sets_minus1 + 1 */
CHECK_ALLOWED_MAX (vps->num_hrd_parameters, vps->num_layer_sets_minus1 + 1);
if (vps->num_hrd_parameters) {
READ_UE_MAX (&nr, vps->hrd_layer_set_idx, 1023);
- /* allowd range is
+ /* allowed range is
* ( vps_base_layer_internal_flag ? 0 : 1 ) to vps_num_layer_sets_minus1
*/
CHECK_ALLOWED_MAX (vps->hrd_layer_set_idx, vps->num_layer_sets_minus1);
guint extra_constraints = 0;
FormatRangeExtensionProfileMatch *m;
- /* Filter out all the profiles having constraints not satisified by @ptl.
- * Then pick the one having the least extra contraints. This allow us
+ /* Filter out all the profiles having constraints not satisfied by @ptl.
+ * Then pick the one having the least extra constraints. This allow us
* to match the closet profile if bitstream contains not standard
* constraints. */
if (p.max_14bit_constraint_flag != ptl->max_14bit_constraint_flag) {
/**
* GstH265ParserResult:
- * @GST_H265_PARSER_OK: The parsing succeded
+ * @GST_H265_PARSER_OK: The parsing succeeded
* @GST_H265_PARSER_BROKEN_DATA: The data to parse is broken
* @GST_H265_PARSER_BROKEN_LINK: The link to structure needed for the parsing couldn't be found
* @GST_H265_PARSER_ERROR: An error accured when parsing
* @progressive_source_flag: flag to indicate the type of stream
* @interlaced_source_flag: flag to indicate the type of stream
* @non_packed_constraint_flag: indicate the presence of frame packing
- * arragement sei message
+ * arrangement sei message
* @frame_only_constraint_flag: recognize the field_seq_flag
* @max_12bit_constraint_flag: used to define profile extensions, see Annex A
* @max_10bit_constraint_flag: used to define profile extensions, see Annex A
* @temporal_id_nesting_flag: specifies whether inter prediction is
* additionally restricted
* @profile_tier_level: ProfileTierLevel info
- * @sub_layer_ordering_info_present_flag: indicates the presense of
+ * @sub_layer_ordering_info_present_flag: indicates the presence of
* vps_max_dec_pic_buffering_minus1, vps_max_num_reorder_pics and
* vps_max_latency_increase_plus1
* @max_dec_pic_buffering_minus1: specifies the maximum required size
first_resync_marker);
first_resync_marker = FALSE;
- /* We found a complet slice */
+ /* We found a complete slice */
if (resync_res == GST_MPEG4_PARSER_OK)
return resync_res;
else if (resync_res == GST_MPEG4_PARSER_NO_PACKET_END) {
vop->fcode_forward = 1;
vop->fcode_backward = 1;
- /* Compute macroblock informations */
+ /* Compute macroblock information */
if (vol->interlaced)
vop->mb_height = (2 * (vol->height + 31) / 32);
else
gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
MARKER_UNCHECKED (&br);
- /* Recompute the Macroblock informations
+ /* Recompute the Macroblock information
* accordingly to the new values */
if (vol->interlaced)
vop->mb_height = (2 * (vol->height + 31) / 32);
* gst_mpeg4_parse_video_packet_header:
* @videopackethdr: The #GstMpeg4VideoPacketHdr structure to fill
* @vol: The last parsed #GstMpeg4VideoObjectLayer, will be updated
- * with the informations found during the parsing
+ * with the information found during the parsing
* @vop: The last parsed #GstMpeg4VideoObjectPlane, will be updated
- * with the informations found during the parsing
+ * with the information found during the parsing
* @sprite_trajectory: A #GstMpeg4SpriteTrajectory to fill or %NULL
- * with the informations found during the parsing
+ * with the information found during the parsing
* @data: The data to parse, should be set after the resync marker.
* @size: The size of the data to parse
*
guint8 load_backward_shape;
guint8 ref_select_code;
- /* Computed macroblock informations */
+ /* Computed macroblock information */
guint16 mb_height;
guint16 mb_width;
guint mb_num;
/* Wmvp specific */
guint8 wmvp; /* Specify if the stream is wmp or not */
- /* In the wmvp case, the framerate is not computed but in the bistream */
+ /* In the wmvp case, the framerate is not computed but in the bitstream */
guint8 slice_code;
};
int seg_base = frame_hdr->quant_indices.y_ac_qi;
GstVp9ParserPrivate *priv = GST_VP9_PARSER_GET_PRIVATE (parser);
const GstVp9SegmentationInfoData *seg = priv->segmentation + segid;
- /* DEBUG("id = %d, seg_base = %d, seg enable = %d, alt eanble = %d, abs = %d, alt= %d\n",segid,
+ /* DEBUG("id = %d, seg_base = %d, seg enable = %d, alt enable = %d, abs = %d, alt= %d\n",segid,
seg_base, frame_hdr->segmentation.enabled, seg->alternate_quantizer_enabled, priv->segmentation_abs_delta, seg->alternate_quantizer);
*/
if (frame_hdr->segmentation.enabled && seg->alternate_quantizer_enabled) {
* GstVp9ParseResult:
* @GST_VP9_PARSER_OK: The parsing went well
* @GST_VP9_PARSER_BROKEN_DATA: The data to parse is broken
- * @GST_VP9_PARSER_NO_PACKET_ERROR: An error occured during the parsing
+ * @GST_VP9_PARSER_NO_PACKET_ERROR: An error occurred during the parsing
*
* Result type of any parsing function.
*
* @refresh_frame_context: refresh frame context indicator
* @frame_parallel_decoding_mode: enable or disable parallel decoding support.
* @loopfilter: loopfilter values
- * @quant_indices: quantization indeces
+ * @quant_indices: quantization indices
* @segmentation: segmentation info
* @log2_tile_rows: tile row indicator
* @log2_tile_columns: tile column indicator
* @reference_skip: a block skip mode that implies both the use of a (0,0)
* motion vector and that no residual will be coded
*
- * Segmentation info kept across multipe frames
+ * Segmentation info kept across multiple frames
*
* Since: 1.8
*/
*/
struct _GstVp9Parser
{
- /* private stuct for tracking state variables across frames */
+ /* private struct for tracking state variables across frames */
void *priv;
gint subsampling_x;
/**
* gst_photography_set_config:
* @photo: #GstPhotography interface of a #GstElement
- * @config: #GstPhotographySettings containg the configuration
+ * @config: #GstPhotographySettings containing the configuration
*
* Set all configuration settings at once.
*
/**
* gst_photography_get_config:
* @photo: #GstPhotography interface of a #GstElement
- * @config: #GstPhotographySettings containg the configuration
+ * @config: #GstPhotographySettings containing the configuration
*
* Get all configuration settings at once.
*
* GstPhotography:lens-focus:
*
* Manual changing of lens focus in diopter units.
- * Inteded use with GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL focus mode, otherwise
+ * Intended use with GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL focus mode, otherwise
* to be ignored.
*
*/
*
* Extracts the component tag from @descriptor.
*
- * Returns: %TRUE if the parsing happended correctly, else %FALSE.
+ * Returns: %TRUE if the parsing happened correctly, else %FALSE.
*/
gboolean
gst_mpegts_descriptor_parse_dvb_stream_identifier (const GstMpegtsDescriptor
* @priority: %TRUE High Priority %FALSE Low Priority
* @time_slicing: %TRUE no time slicing %FALSE time slicing
* @mpe_fec: %TRUE no mpe-fec is used %FALSE mpe-fec is use
- * @constellation: the constallation
+ * @constellation: the constellation
* @hierarchy: the hierarchy
* @code_rate_hp:
* @code_rate_lp:
* These are the base descriptor types and methods.
*
* For more details, refer to the ITU H.222.0 or ISO/IEC 13818-1 specifications
- * and other specifications mentionned in the documentation.
+ * and other specifications mentioned in the documentation.
*/
/* FIXME : Move this to proper file once we have a C file for ATSC/ISDB descriptors */
* Note: To look for descriptors that can be present more than once in an
* array of descriptors, iterate the #GArray manually.
*
- * Returns: (transfer none): the first descriptor matchin @tag, else %NULL.
+ * Returns: (transfer none): the first descriptor matching @tag, else %NULL.
*/
const GstMpegtsDescriptor *
gst_mpegts_find_descriptor (GPtrArray * descriptors, guint8 tag)
/**
* GstPlayerStreamInfo:
*
- * Base structure for information concering a media stream. Depending on
+ * Base structure for information concerning a media stream. Depending on
* the stream type, one can find more media-specific information in
* #GstPlayerVideoInfo, #GstPlayerAudioInfo, #GstPlayerSubtitleInfo.
*/
*
* Returns: %TRUE or %FALSE
*
- * Sets the subtitle strack @stream_index.
+ * Sets the subtitle stack @stream_index.
*/
gboolean
gst_player_set_subtitle_track (GstPlayer * self, gint stream_index)
"Name of the fragment (eg:fragment-12.ts)", NULL, G_PARAM_READABLE));
g_object_class_install_property (gobject_class, PROP_DISCONTINOUS,
- g_param_spec_boolean ("discontinuous", "Discontinous",
+ g_param_spec_boolean ("discontinuous", "Discontinuous",
"Whether this fragment has a discontinuity or not",
FALSE, G_PARAM_READABLE));
} else {
GstQuery *query;
- /* Download successfull, let's query the URI */
+ /* Download successful, let's query the URI */
query = gst_query_new_uri ();
if (gst_element_query (urisrc, query)) {
gst_query_parse_uri (query, &download->uri);
* @instance: a #GstVulkanInstance
* @error: #GError
*
- * Returns: whether the instance vould be created
+ * Returns: whether the instance could be created
*
* Since: 1.18
*/
"vkCreateDebugReportCallbackEXT");
if (!instance->dbgCreateDebugReportCallback) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
- "Failed to retreive vkCreateDebugReportCallback");
+ "Failed to retrieve vkCreateDebugReportCallback");
goto error;
}
instance->dbgDestroyDebugReportCallback =
"vkDestroyDebugReportCallbackEXT");
if (!instance->dbgDestroyDebugReportCallback) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
- "Failed to retreive vkDestroyDebugReportCallback");
+ "Failed to retrieve vkDestroyDebugReportCallback");
goto error;
}
instance->dbgReportMessage = (PFN_vkDebugReportMessageEXT)
"vkDebugReportMessageEXT");
if (!instance->dbgReportMessage) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
- "Failed to retreive vkDebugReportMessage");
+ "Failed to retrieve vkDebugReportMessage");
goto error;
}
_vk_format_to_video_format (VkFormat format)
{
switch (format) {
- /* double check endianess */
+ /* double check endianness */
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_R8G8B8A8_SRGB:
return GST_VIDEO_FORMAT_RGBA;
* @window: a #GstVulkanWindow
* @error: a #GError
*
- * Returns: whether @window could be sucessfully opened
+ * Returns: whether @window could be successfully opened
*
* Since: 1.18
*/
}
default:
{
- /* transmit messages to the parrent (ex: mouse/keyboard input) */
+ /* transmit messages to the parent (ex: mouse/keyboard input) */
HWND parent_id = window_win32->parent_win_id;
if (parent_id)
PostMessage (parent_id, uMsg, wParam, lParam);
*
* Please note that any calls to this method MUST be matched by
* calls to end_geometry_change() and AFTER the parent surface has
- * commited its geometry changes.
+ * committed its geometry changes.
*/
void
gst_wayland_video_begin_geometry_change (GstWaylandVideo * video)
*
* Notifies the video sink that we just finished changing the
* geometry of both itself and its parent surface. This should
- * have been earlier preceeded by a call to begin_geometry_change()
+ * have been earlier preceded by a call to begin_geometry_change()
* which notified the sink before any of these changes had happened.
*
* It is important to call this method only AFTER the parent surface
- * has commited its geometry changes, otherwise no synchronization
+ * has committed its geometry changes, otherwise no synchronization
* is actually achieved.
*/
void
* and get the same tags in the writing function */
/**
* gst_asf_mux_get_content_description_tags:
- * @asfmux: #GstAsfMux to have its tags proccessed
+ * @asfmux: #GstAsfMux to have its tags processed
* @asftags: #GstAsfTags to hold the results
*
* Inspects the tags received by the GstTagSetter interface
* size needed for the default and extended content description objects.
* This results and a copy of the #GstTagList
* are stored in the #GstAsfTags. We store a copy so that
- * the sizes estimated here mantain the same until they are
+ * the sizes estimated here maintain the same until they are
* written to the asf file.
*/
static void
*
* Writes the header of the header extension object. The buffer pointer
* is incremented to the next writing position (the header extension object
- * childs should be writen from that point)
+ * childs should be written from that point)
*/
static void
gst_asf_mux_write_header_extension (GstAsfMux * asfmux, guint8 ** buf,
* gst_asf_mux_write_extended_stream_properties:
* @asfmux:
* @buf: pointer to the buffer pointer
- * @asfpad: Pad that handles the stream of the properties to be writen
+ * @asfpad: Pad that handles the stream of the properties to be written
*
* Writes the extended stream properties object (that is part of the
* header extension objects) for the stream handled by asfpad
* @asfmux:
* @size_buf: pointer to the memory position to write the size of the string
* @str_buf: pointer to the memory position to write the string
- * @str: the string to be writen (in UTF-8)
- * @use32: if the string size should be writen with 32 bits (if true)
+ * @str: the string to be written (in UTF-8)
+ * @use32: if the string size should be written with 32 bits (if true)
* or with 16 (if false)
*
* Writes a string with its size as it is needed in many asf objects.
- * The size is writen to size_buf as a WORD field if use32 is false, and
- * as a DWORD if use32 is true. The string is writen to str_buf in UTF16-LE.
+ * The size is written to size_buf as a WORD field if use32 is false, and
+ * as a DWORD if use32 is true. The string is written to str_buf in UTF16-LE.
* The string should be passed in UTF-8.
*
* The string size in UTF16-LE is returned.
* @data_buf:
*
* Checks if a string tag with tagname exists in the taglist. If it
- * exists it is writen as an UTF-16LE to data_buf and its size in bytes
- * is writen to size_buf. It is used for writing content description
+ * exists it is written as an UTF-16LE to data_buf and its size in bytes
+ * is written to size_buf. It is used for writing content description
* object fields.
*
* Returns: the size of the string
* @videopad:
*
* Adds a new entry to the simple index of the stream handler by videopad.
- * This functions doesn't check if the time ellapsed
+ * This functions doesn't check if the time elapsed
* is larger than the established time interval between entries. The caller
* is responsible for verifying this.
*/
* gst_asf_payload_free:
* @payload: the #AsfPayload to be freed
*
- * Releases teh memory associated with this payload
+ * Releases the memory associated with this payload
*/
void
gst_asf_payload_free (AsfPayload * payload)
/**
* gst_asf_put_i32:
* @buf: the memory to write data to
- * @data: the value to be writen
+ * @data: the value to be written
*
* Writes a 32 bit signed integer to memory
*/
/**
* gst_asf_put_time:
* @buf: pointer to the buffer to write the value to
- * @time: value to be writen
+ * @time: value to be written
*
* Writes an asf time value to the buffer
*/
/**
* gst_asf_put_guid:
* @buf: the buffer to write the guid to
- * @guid: the guid to be writen
+ * @guid: the guid to be written
*
* Writes a GUID to the buffer
*/
/**
* gst_asf_put_payload:
* @buf: memory to write the payload to
- * @payload: #AsfPayload to be writen
+ * @payload: #AsfPayload to be written
*
* Writes the asf payload to the buffer. The #AsfPayload
* packet count is incremented.
/**
* gst_asf_put_subpayload:
* @buf: buffer to write the payload to
- * @payload: the payload to be writen
+ * @payload: the payload to be written
* @size: maximum size in bytes to write
*
* Serializes part of a payload to a buffer.
* The maximum size is checked against the payload length,
- * the minimum of this size and the payload length is writen
- * to the buffer and the writen size is returned.
+ * the minimum of this size and the payload length is written
+ * to the buffer and the written size is returned.
*
* It also updates the values of the payload to match the remaining
* data.
* In case there is not enough space to write the headers, nothing is done.
*
- * Returns: The writen size in bytes.
+ * Returns: The written size in bytes.
*/
guint16
gst_asf_put_subpayload (guint8 * buf, AsfPayload * payload, guint16 size)
* is the last one in an asf packet and the remaining data
* is probably uninteresting to the application.
*
- * Returns: true on success, false if some error occurrs
+ * Returns: true on success, false if some error occurs
*/
static gboolean
gst_asf_parse_mult_payload (GstByteReader * reader, gboolean * has_keyframe)
if (!gst_byte_reader_get_uint8 (reader, &stream_num))
goto error;
if ((stream_num & 0x80) != 0) {
- GST_LOG ("Keyframe found, stoping parse of payloads");
+ GST_LOG ("Keyframe found, stopping parse of payloads");
*has_keyframe = TRUE;
return TRUE;
}
* is the last one in an asf packet and the remaining data
* is probably uninteresting to the application.
*
- * Returns: true on success, false if some error occurrs
+ * Returns: true on success, false if some error occurs
*/
static gboolean
gst_asf_parse_single_payload (GstByteReader * reader, gboolean * has_keyframe)
return GST_FLOW_ERROR;
if (min_ps != max_ps) {
- GST_WARNING ("Mininum and maximum packet size differ "
+ GST_WARNING ("Minimum and maximum packet size differ "
"%" G_GUINT32_FORMAT " and %" G_GUINT32_FORMAT ", "
"ASF spec states they should be the same", min_ps, max_ps);
return FALSE;
fc = r + l;
x = (guint) (r * w / fc);
- /* the brighness scaling factor was picked by experimenting */
+ /* the brightness scaling factor was picked by experimenting */
br = b * fc * 0.01;
br1 = br * (clarity + 128) >> 8;
gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
gst_element_class_set_static_metadata (gstelement_class,
- "Select convertor based on caps", "Generic/Bin",
+ "Select converter based on caps", "Generic/Bin",
"Selects the right transform element based on the caps",
"Olivier Crete <olivier.crete@collabora.com>");
gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
gst_element_class_set_static_metadata (gstelement_class,
- "Select color space convertor based on caps", "Generic/Bin",
- "Selects the right color space convertor based on the caps",
+ "Select color space converter based on caps", "Generic/Bin",
+ "Selects the right color space converter based on the caps",
"Benjamin Gaignard <benjamin.gaignard@stericsson.com>");
gstelement_class->change_state =
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
autoconvert,
- "Selects convertor element based on caps",
+ "Selects converter element based on caps",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
* "nearest neighbor" principal, with some additional complexity for the
* calculation of the "green" element, where an "adaptive" pairing is used.
*
- * For purposes of documentation and indentification, each element of the
+ * For purposes of documentation and identification, each element of the
* original array can be put into one of four classes:
* R A red element
* B A blue element
has a smooth experience.
Note that basecamerasrc already has the mode property and start/stop-capture
-signals. It has functions that should be overriden by its child classes to
+signals. It has functions that should be overridden by its child classes to
implement the handling of these actions.
/* try to change the state of an element. This function returns the element
* when the state change could be performed. When this function returns NULL
- * an error occured and the element is unreffed. */
+ * an error occurred and the element is unreffed. */
static GstElement *
try_element (GstElement * bin, GstElement * element)
{
* show that jifmux is picked if image/jpeg is the caps of a container
* profile. So this could work.
* - There seems to be a problem with encodebin for images currently as
- * it autoplugs a videorate that only starts outputing buffers after
+ * it autoplugs a videorate that only starts outputting buffers after
* getting the 2nd buffer.
*/
g_object_class_install_property (object_class, PROP_IMAGE_ENCODING_PROFILE,
GST_PAD_LINK_CHECK_NOTHING);
vfbin->elements_created = TRUE;
- GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");
+ GST_DEBUG_OBJECT (vfbin, "Elements successfully created and linked");
updated_converters = TRUE;
}
/* After pipe was negotiated src_filter do not have any filter caps.
* In this situation we should compare negotiated caps on capsfilter pad
* with requested range of caps. If one of this caps intersect,
- * then we can avoid reseting.
+ * then we can avoid resetting.
*/
src_neg_caps = gst_pad_get_current_caps (self->srcfilter_pad);
if (src_neg_caps && new_filter_caps && gst_caps_is_fixed (new_filter_caps))
/* If new_filter_caps = NULL, then some body wont to empty
* capsfilter (set to ANY). In this case we will need to reset pipe,
* but if capsfilter is actually empthy, then we can avoid
- * one more reseting.
+ * one more resetting.
*/
GstCaps *old_filter_caps; /* range of caps on capsfilter */
/* keep a 'tee' element that has 2 source pads, one is linked to the
* vidsrc pad and the other is linked as needed to the viewfinder
- * when video recording is hapenning */
+ * when video recording is happening */
video_recording_tee = gst_element_factory_make ("tee", "video_rec_tee");
gst_bin_add (GST_BIN_CAST (self), video_recording_tee); /* TODO check returns */
self->video_tee_vf_pad =
{
GstWrapperCameraBinSrc *src = GST_WRAPPER_CAMERA_BIN_SRC (camerasrc);
- /* TODO shoud we access this directly? Maybe a macro is better? */
+ /* TODO should we access this directly? Maybe a macro is better? */
if (src->mode == MODE_VIDEO) {
if (src->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
GST_DEBUG_OBJECT (src, "Aborting, had not started recording");
g_object_class_install_property (gobject_klass, PROP_SIGNAL_FPS_MEASUREMENTS,
g_param_spec_boolean ("signal-fps-measurements",
"Signal fps measurements",
- "If the fps-measurements signal should be emited.",
+ "If the fps-measurements signal should be emitted.",
DEFAULT_SIGNAL_FPS_MEASUREMENTS,
G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE));
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
int fgcolor;
int bgcolor;
- /* FIXME: Should we use GSList? The relating interaction and pointer assigment is quite complex and perhaps unsuited for a plain GSList anyway */
+ /* FIXME: Should we use GSList? The relating interaction and pointer assignment is quite complex and perhaps unsuited for a plain GSList anyway */
struct DVBSubObjectDisplay *region_list_next;
struct DVBSubObjectDisplay *object_list_next;
} DVBSubObjectDisplay;
* the PTS information).
*
* Return value: -1 if data was unhandled (e.g, not a subtitle packet),
- * -2 if data parsing was unsuccesful (e.g, length was invalid),
+ * -2 if data parsing was unsuccessful (e.g, length was invalid),
* 0 or positive if data was handled. If positive, then amount of data consumed on success. FIXME: List the positive return values.
*/
gint
* as an index into the @palette.
* @palette: the palette used for this subtitle rectangle, up to 256 items depending
* on the depth of the subpicture; each palette item is in ARGB form, 8-bits per channel.
- * @palette_bits_count: the amount of bits used in indeces into @palette in @data.
+ * @palette_bits_count: the amount of bits used in indices into @palette in @data.
* @rowstride: the number of bytes between the start of a row and the start of the next row.
*
* A structure representing the contents of a subtitle rectangle.
\r
* type 0x03 : '03wxyz' - 3 bytes\r
this one has the palette information ; it basically says 'encoded color 0\r
- is the wth color of the palette, encoded color 1 is the xth color, aso.\r
+ is the with color of the palette, encoded color 1 is the xth color, aso.\r
\r
* type 0x04 : '04wxyz' - 3 bytes\r
I *think* this is the alpha channel information ; I only saw values of 0 or f\r
- for those nibbles, so I can't really be sure, but it seems plausable.\r
+ for those nibbles, so I can't really be sure, but it seems plausible.\r
\r
* type 0x05 : '05xxxXXXyyyYYY' - 7 bytes\r
the coordinates of the subtitle on the screen :\r
disp_end = state->vobsub.disp_rect.right + 1;
/* Work out the first pixel control info, which may point to the dummy entry if
- * the global palette/alpha need using initally */
+ * the global palette/alpha need using initially */
cur_pix_ctrl = chg_col->pix_ctrl_i;
end_pix_ctrl = chg_col->pix_ctrl_i + chg_col->n_changes;
{
/* Receive file (probably a waveform file) from socket using */
/* Festival key stuff technique, but long winded I know, sorry */
- /* but will receive any file without closeing the stream or */
+ /* but will receive any file without closing the stream or */
/* using OOB data */
static const char file_stuff_key[] = "ft_StUfF_key"; /* must == Festival's key */
char *buff;
* @subsection sec_icon_location Icon location
*
* The exact location where the application should look for the
- * plugin is platform dependant.
+ * plugin is platform dependent.
*
* For Windows platforms, the icon should be at the same place as
* the plugin containing the effect.
* List of supported color models.
*
* Note: the color models are endian independent, because the
- * color components are defined by their positon in memory, not
+ * color components are defined by their position in memory, not
* by their significance in an uint32_t value.
*
* For effects that work on the color components,
/**
* In PACKED32, each pixel is represented by 4 consecutive
- * bytes, but it is not defined how the color componets are
+ * bytes, but it is not defined how the color components are
* stored. The true color format could be RGBA8888,
* BGRA8888, a packed 32 bit YUV format, or any other
* color format that stores pixels in 32 bit.
instance = ftable.construct (640, 480);
if (!instance) {
- GST_WARNING ("Failed to instanciate plugin '%s'", info.name);
+ GST_WARNING ("Failed to instantiate plugin '%s'", info.name);
ftable.deinit ();
g_module_close (module);
return FALSE;
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
#define DEFAULT_SIGMA 1.2
-/* Initalize the gaussianblur's class. */
+/* Initialize the gaussianblur's class. */
static void
gst_gaussianblur_class_init (GstGaussianBlurClass * klass)
{
/* Initialize the element,
* instantiate pads and add them to element,
- * set pad calback functions, and
+ * set pad callback functions, and
* initialize instance structure.
*/
static void
gdouble width = gt->width;
gdouble height = gt->height;
- /* normalize in ((-1.0, -1.0), (1.0, 1.0) and traslate the center */
+ /* normalize in ((-1.0, -1.0), (1.0, 1.0) and translate the center */
norm_x = 2.0 * (x / width - cgt->x_center);
norm_y = 2.0 * (y / height - cgt->y_center);
gdouble height = gt->height;
gdouble a, b;
- /* normalize in ((-1.0, -1.0), (1.0, 1.0) and traslate the center */
+ /* normalize in ((-1.0, -1.0), (1.0, 1.0) and translate the center */
norm_x = 2.0 * (x / width - cgt->x_center);
norm_y = 2.0 * (y / height - cgt->y_center);
gdouble r;
- /* normalize in ((-1.0, -1.0), (1.0, 1.0) and traslate the center */
+ /* normalize in ((-1.0, -1.0), (1.0, 1.0) and translate the center */
/* plus a little trick to obtain a perfect circle, normalize in a
* square with sides equal to MAX(width, height) */
norm_x = 2.0 * (x - cgt->x_center * width) / MAX (width, height);
GST_LOG ("Processing tag %s (num=%u)", tag, num_tags);
if (num_tags > 1 && gst_tag_is_fixed (tag)) {
- GST_WARNING ("Multiple occurences of fixed tag '%s', ignoring some", tag);
+ GST_WARNING ("Multiple occurrences of fixed tag '%s', ignoring some", tag);
num_tags = 1;
}
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
const gchar *id_str = NULL;
if (gst_byte_reader_peek_string_utf8 (reader, &id_str)) {
- GST_DEBUG_OBJECT (parse, "unhandled marker %x: '%s' skiping %u bytes",
+ GST_DEBUG_OBJECT (parse, "unhandled marker %x: '%s' skipping %u bytes",
marker, id_str ? id_str : "(NULL)", size);
} else {
- GST_DEBUG_OBJECT (parse, "unhandled marker %x skiping %u bytes", marker,
+ GST_DEBUG_OBJECT (parse, "unhandled marker %x skipping %u bytes", marker,
size);
}
}
#else
- GST_DEBUG_OBJECT (parse, "unhandled marker %x skiping %u bytes", marker,
+ GST_DEBUG_OBJECT (parse, "unhandled marker %x skipping %u bytes", marker,
size);
#endif // GST_DISABLE_GST_DEBUG
*
* Initializes the connection with the rfb server
*
- * Returns: TRUE if initialization was succesfull, FALSE on fail.
+ * Returns: TRUE if initialization was successful, FALSE on fail.
*/
gboolean
rfb_decoder_iterate (RfbDecoder * decoder)
return FALSE;
}
- GST_DEBUG ("Security handshaking succesful");
+ GST_DEBUG ("Security handshake successful");
decoder->state = rfb_decoder_state_send_client_initialisation;
return TRUE;
*
* Sends the encoding types that the client can decode to the server
*
- * Returns: TRUE if initialization was succesfull, FALSE on fail.
+ * Returns: TRUE if initialization was successful, FALSE on fail.
*/
static gboolean
rfb_decoder_state_set_encodings (RfbDecoder * decoder)
h = RFB_GET_UINT16 (decoder->data + 6);
encoding = RFB_GET_UINT32 (decoder->data + 8);
- GST_DEBUG ("update recieved");
+ GST_DEBUG ("update received");
GST_DEBUG ("x:%d y:%d", x, y);
GST_DEBUG ("w:%d h:%d", w, h);
GST_DEBUG ("encoding: %d", encoding);
}
}
-/* If we can pull that's prefered */
+/* If we can pull that's preferred */
static gboolean
gst_ps_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
/* Time to write pack header */
/* FIXME: currently we write the mux rate of the PREVIOUS pack into the
* pack header, because of the incapability to calculate the mux_rate
- * before outputing the pack. To calculate the mux_rate for the current
+ * before outputting the pack. To calculate the mux_rate for the current
* pack, we need to put the whole pack into buffer, calculate the
* mux_rate, and then output the whole trunck.
*/
MIN (psmux_stream_bytes_in_buffer (stream), len - PSMUX_PES_MAX_HDR_LEN);
/* Note that we cannot make a better estimation of the header length for the
* time being; because the header length is dependent on whether we can find a
- * timestamp in the upcomming buffers, which in turn depends on
+ * timestamp in the upcoming buffers, which in turn depends on
* cur_pes_payload_size, which is exactly what we want to decide.
*/
if (stream->pi.flags & PSMUX_PACKET_FLAG_PES_EXT_STREAMID) {
guint8 ext_len;
- flags = 0x0f; /* preceeding flags all 0 | (reserved bits) | PES_extension_flag_2 */
+ flags = 0x0f; /* preceding flags all 0 | (reserved bits) | PES_extension_flag_2 */
*data++ = flags;
ext_len = 1; /* Only writing 1 byte into the extended fields */
/* get the pts of stream */
guint64 psmux_stream_get_pts (PsMuxStream *stream);
-/* stream_id assignemnt */
+/* stream_id assignment */
#define PSMUX_STREAM_ID_MPGA_INIT 0xc0
#define PSMUX_STREAM_ID_MPGA_MAX 0xcf
tsdemux/tsparse TODO
--------------------
-* Perfomance
+* Performance
* Bufferlist : Creating/Destroying very small buffers is too
costly. Switch to pre-/re-allocating outgoing buffers in which we
copy the data.
* mpegtsparser
* SERIOUS room for improvement performance-wise (see callgrind),
- mostly related to performance issues mentionned above.
+ mostly related to performance issues mentioned above.
* Random-access seeking
* Do minimal parsing of video headers to detect keyframes and use
Since no prerolling is happening downstream and the incoming buffers
do not have capture timestamps, we need to ensure the first buffer
- we push out corresponds to the base segment start runing time.
+ we push out corresponds to the base segment start running time.
=> The packetizer keeps track of PCR locations and offsets in
addition to the clock skew (in the case of upstream buffers
program = mpegts_base_new_program (base, program_number, section->pid);
program->patcount = old_program->patcount;
- /* Desactivate the old program */
+ /* Deactivate the old program */
/* FIXME : THIS IS BREAKING THE STREAM SWITCHING LOGIC !
* */
if (klass->can_remove_program (base, old_program)) {
GstFlowReturn (*drain) (MpegTSBase * base);
/* flush all streams
- * The hard inicator is used to flush completelly on FLUSH_STOP events
+ * The hard inicator is used to flush completely on FLUSH_STOP events
* or partially in pull mode seeks of tsdemux */
void (*flush) (MpegTSBase * base, gboolean hard);
* Cri : The time of the clock at the receiver for packet i
* D + ni : The jitter when receiving packet i
*
- * We see that the network delay is irrelevant here as we can elliminate D:
+ * We see that the network delay is irrelevant here as we can eliminate D:
*
* recv_diff(i) = (Cri + ni) - (Cr0 + n0))
*
* We will use raw (non-corrected/non-absolute) PCR values in a first time
* to detect wraparound/resets/gaps...
*
- * We will use the corrected/asolute PCR values to calculate
+ * We will use the corrected/absolute PCR values to calculate
* bitrate and estimate the target group pcr_offset.
* */
case ST_PS_VIDEO_MPEG2_DCII:
/* FIXME : Use DCII registration code (ETV1 ?) to handle that special
* Stream type (ST_PS_VIDEO_MPEG2_DCII) */
- /* FIXME : Use video decriptor (0x1) to refine caps with:
+ /* FIXME : Use video descriptor (0x1) to refine caps with:
* * frame_rate
* * profile_and_level
*/
switch (prop_id) {
case PROP_M2TS_MODE:
- /* set incase if the output stream need to be of 192 bytes */
+ /* set in case if the output stream need to be of 192 bytes */
mux->m2ts_mode = g_value_get_boolean (value);
gst_base_ts_mux_set_packet_size (GST_BASE_TS_MUX (mux),
mux->m2ts_mode ? M2TS_PACKET_LENGTH :
* tsmux_program_new:
* @mux: a #TsMux
*
- * Create a new program in the mising session @mux.
+ * Create a new program in the missing session @mux.
*
* Returns: a new #TsMuxProgram or %NULL when the maximum number of programs has
* been reached.
* @mux: a #TsMux
* @pid: the PID to find.
*
- * Find the stream associated wih PID.
+ * Find the stream associated with PID.
*
* Returns: a #TsMuxStream with @pid or NULL when the stream was not found.
*/
g_ptr_array_add (pmt_stream->descriptors, descriptor);
break;
case TSMUX_ST_PS_DVB_SUBPICTURE:
- /* falltrough ...
+ /* fallthrough ...
* that should never happen anyway as
* dvb subtitles are private data */
case TSMUX_ST_PRIVATE_DATA:
GST_DEBUG (" rights holder = %s", GST_STR_NULL (self->rights_holder));
} else if (memcmp (tag_ul, &rights_managment_authority_ul, 16) == 0) {
self->rights_managment_authority = mxf_utf16_to_utf8 (tag_data, tag_size);
- GST_DEBUG (" rights managment authority = %s",
+ GST_DEBUG (" rights management authority = %s",
GST_STR_NULL (self->rights_managment_authority));
} else if (memcmp (tag_ul, ®ion_or_area_of_ip_license_ul, 16) == 0) {
self->region_or_area_of_ip_license = mxf_utf16_to_utf8 (tag_data, tag_size);
PROP_ALLOW_REORDERING,
};
-/* these numbers are nothing but wild guesses and dont reflect any reality */
+/* these numbers are nothing but wild guesses and don't reflect any reality */
#define DEFAULT_MIN_DELAY 200
#define DEFAULT_MAX_DELAY 400
#define DEFAULT_DELAY_DISTRIBUTION DISTRIBUTION_UNIFORM
switch (IRTSPParse->state) {
case IRTSP_SEARCH_FRAME:
- /* Use the first occurence of 0x24 as a start of interleaved frames.
+ /* Use the first occurrence of 0x24 as a start of interleaved frames.
* This 'trick' allows us to parse a dump that doesn't contain RTSP
* handshake. It's up to user to provide the data where the first 0x24
* is an RTSP frame */
const gchar *fmtstr;
pnmdec->size = pnmdec->mngr.info.width * pnmdec->mngr.info.height * 2;
- /* perform some basic negotiation to resolve which endianess,
+ /* perform some basic negotiation to resolve which endianness,
* if any, is supported by the component downstream. Query
* the peer caps, intersecting with our preferred caps
*/
g_object_class_install_property (gobject_class, PROP_REMOVE,
g_param_spec_boolean ("remove", "Remove",
- "Set to true to remove silence from the stream, false otherwhise",
+ "Set to true to remove silence from the stream, false otherwise",
FALSE, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_HYSTERESIS,
/* initialize the new element
* instantiate pads and add them to element
- * set pad calback functions
+ * set pad callback functions
* initialize instance structure
*/
static void
inbuf = gst_buffer_make_writable (inbuf);
GST_BUFFER_PTS (inbuf) -= filter->ts_offset;
} else {
- GST_WARNING ("Invalid buffer pts, update not possibile");
+ GST_WARNING ("Invalid buffer pts, update not possible");
}
}
data = g_hash_table_lookup (rtx->ssrc_data, GUINT_TO_POINTER (ssrc));
if (!data) {
- /* See 5.3.2 Retransmitted Packets, orignal packet have SSRC LSB set to
+ /* See 5.3.2 Retransmitted Packets, original packet have SSRC LSB set to
* 0, while RTX packet have LSB set to 1 */
rtx_ssrc = ssrc + 1;
data = ssrc_rtx_data_new (rtx_ssrc);
g_object_class_install_property (object_class, PROP_BONDING_ADDRESSES,
g_param_spec_string ("bonding-addresses", "Bonding Addresses",
- "Comma (,) seperated list of <address>:<port> to send to. ", NULL,
+ "Comma (,) separated list of <address>:<port> to send to. ", NULL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (object_class, PROP_BONDING_METHOD,
* buffers over multiple src pads. This is the opposite of tee
* element, which duplicates buffers over all pads. This element
* can be used to distrute load across multiple branches when the buffer
- * can be processed indepently.
+ * can be processed independently.
*/
#include "gstroundrobin.h"
GstRtpSrc *self = GST_RTP_SRC (data);
GST_INFO_OBJECT (self,
- "Dectected an SSRC collision: session-id 0x%x, ssrc 0x%x.", session_id,
+ "Detected an SSRC collision: session-id 0x%x, ssrc 0x%x.", session_id,
ssrc);
}
{
GstRtpSrc *self = GST_RTP_SRC (data);
- GST_INFO_OBJECT (self, "Dectected a new SSRC: session-id 0x%x, ssrc 0x%x.",
+ GST_INFO_OBJECT (self, "Detected a new SSRC: session-id 0x%x, ssrc 0x%x.",
session_id, ssrc);
}
}
}
- /* FIXME We could fail the negotiation immediatly if caps are empty */
+ /* FIXME We could fail the negotiation immediately if caps are empty */
if (caps && !gst_caps_is_empty (caps)) {
/* fixate to avoid ambiguity with lists when parsing */
caps = gst_caps_fixate (caps);
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
h264parse->idr_pos);
}
- /* if SEI preceeds (faked) IDR, then we have to insert config there */
+ /* if SEI precedes (faked) IDR, then we have to insert config there */
if (h264parse->sei_pos >= 0 && h264parse->idr_pos > h264parse->sei_pos) {
h264parse->idr_pos = h264parse->sei_pos;
GST_DEBUG_OBJECT (h264parse, "moved IDR mark to SEI position %d",
h264parse->sent_codec_tag = TRUE;
}
- /* In case of byte-stream, insert au delimeter by default
+ /* In case of byte-stream, insert au delimiter by default
* if it doesn't exist */
if (h264parse->aud_insert && h264parse->format == GST_H264_PARSE_FORMAT_BYTE) {
if (h264parse->align == GST_H264_PARSE_ALIGN_AU) {
}
}
- /* Fixme: setting passthrough mode casuing multiple issues:
+ /* Fixme: setting passthrough mode causing multiple issues:
* For nal aligned multiresoluton streams, passthrough mode make h264parse
* unable to advertise the new resoultions. Also causing issues while
* parsing MVC streams when it has two layers.
}
}
- /* FIXME We could fail the negotiation immediatly if caps are empty */
+ /* FIXME We could fail the negotiation immediately if caps are empty */
if (caps && !gst_caps_is_empty (caps)) {
/* fixate to avoid ambiguity with lists when parsing */
caps = gst_caps_fixate (caps);
&& (buf = gst_value_get_buffer (value))) {
/* best possible parse attempt,
* src caps are based on sink caps so it will end up in there
- * whether sucessful or not */
+ * whether successful or not */
gst_buffer_map (buf, &map, GST_MAP_READ);
data = map.data;
size = map.size;
g_object_class_install_property (gobject_class, PROP_DROP,
g_param_spec_boolean ("drop", "drop",
- "Drop data untill valid configuration data is received either "
+ "Drop data until valid configuration data is received either "
"in the stream or through caps", DEFAULT_PROP_DROP,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_buffer_map (buf, &map, GST_MAP_READ);
/* best possible parse attempt,
* src caps are based on sink caps so it will end up in there
- * whether sucessful or not */
+ * whether successful or not */
mpvparse->seq_offset = 4;
gst_mpegv_parse_process_config (mpvparse, &map, gst_buffer_get_size (buf));
gst_buffer_unmap (buf, &map);
/* FIXME: Annex P (Reference Picture Resampling) can be signaled implicitly
* as well as in the header. Should we set the field to false in caps if it
- * is not specfied by the header? */
+ * is not specified by the header? */
/* FIXME: Annex U (Enhanced Reference Picture Selection) poses a problem - we
* have no means of specifying what sub-modes, if any, are used. */
format = gst_video_format_from_masks (dec->format.depth, bpp, endianness,
redmask, greenmask, bluemask, 0);
- GST_DEBUG_OBJECT (dec, "From depth: %d bpp: %u endianess: %s redmask: %X "
+ GST_DEBUG_OBJECT (dec, "From depth: %d bpp: %u endianness: %s redmask: %X "
"greenmask: %X bluemask: %X got format %s",
dec->format.depth, bpp, endianness == G_BIG_ENDIAN ? "BE" : "LE",
GUINT32_FROM_BE (redmask), GUINT32_FROM_BE (greenmask),
GUINT32_FROM_BE (bluemask),
- format == GST_VIDEO_FORMAT_UNKNOWN ? "UNKOWN" :
+ format == GST_VIDEO_FORMAT_UNKNOWN ? "UNKNOWN" :
gst_video_format_to_string (format));
if (format == GST_VIDEO_FORMAT_UNKNOWN) {
#!/bin/sh
#
-# Check that the code follows a consistant code style
+# Check that the code follows a consistent code style
#
# Check for existence of indent, and error out if not present.
* directory.
* The source will look for the environment variable “TMP� which must contain
* the absolute path to a writable directory.
- * It can be retreived using the following Java code :
+ * It can be retrieved using the following Java code :
* |[
* context.getCacheDir().getAbsolutePath();
* ]|
af_meta = gst_buffer_get_video_affine_transformation_meta (sync->buffer);
if (!af_meta) {
- GST_WARNING ("Failed to retreive the transformation meta from the "
+ GST_WARNING ("Failed to retrieve the transformation meta from the "
"gl_sync %p buffer %p", sync, sync->buffer);
} else if (gst_amc_surface_texture_get_transform_matrix (sync->surface,
matrix, &error)) {
return NULL;
unsupported_profile:
- GST_ERROR_OBJECT (encoder, "Unsupport profile '%s'", profile_string);
+ GST_ERROR_OBJECT (encoder, "Unsupported profile '%s'", profile_string);
gst_amc_format_free (format);
return NULL;
unsupported_level:
- GST_ERROR_OBJECT (encoder, "Unsupport level '%s'", level_string);
+ GST_ERROR_OBJECT (encoder, "Unsupported level '%s'", level_string);
gst_amc_format_free (format);
return NULL;
}
return caps;
unsupported_profile:
- GST_ERROR ("Unsupport amc profile id %d", amc_profile);
+ GST_ERROR ("Unsupported amc profile id %d", amc_profile);
g_free (mime);
gst_caps_unref (caps);
return NULL;
unsupported_level:
- GST_ERROR ("Unsupport amc level id %d", amc_level);
+ GST_ERROR ("Unsupported amc level id %d", amc_level);
g_free (mime);
gst_caps_unref (caps);
if (!get_class_loader) {
g_set_error (err, GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED,
- "Could not retreive application class loader function");
+ "Could not retrieve application class loader function");
goto done;
}
class_loader = get_class_loader ();
if (!class_loader) {
g_set_error (err, GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED,
- "Could not retreive application class loader");
+ "Could not retrieve application class loader");
goto done;
}
class_loader_cls = (*env)->GetObjectClass (env, class_loader);
if (!class_loader_cls) {
g_set_error (err, GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED,
- "Could not retreive application class loader java class");
+ "Could not retrieve application class loader java class");
goto done;
}
} @catch (NSException *exception) {
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
- GST_WARNING ("An unexpected error occured: %s",
+ GST_WARNING ("An unexpected error occurred: %s",
[[exception reason] UTF8String]);
}
GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
[device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
} @catch (NSException *exception) {
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
- GST_WARNING ("An unexcepted error occured: %s",
+ GST_WARNING ("An unexcepted error occurred: %s",
[exception.reason UTF8String]);
}
}
result = gst_caps_merge (result, [self getDeviceCaps]);
} @catch (NSException *exception) {
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
- GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
+ GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
return result;
}
} @catch (NSException *exception) {
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
- GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
+ GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
*successPtr = NO;
return;
}
gst_vtenc_finish_encoding (GstVTEnc * self, gboolean is_flushing)
{
GST_DEBUG_OBJECT (self,
- "complete enconding and clean buffer queue, is flushing %d", is_flushing);
+ "complete encoding and clean buffer queue, is flushing %d", is_flushing);
GstVideoCodecFrame *outframe;
GstFlowReturn ret = GST_FLOW_OK;
OSStatus vt_status;
* @device: a #GstD3D11Device
* @desc: a DXGI_SWAP_CHAIN_DESC structure for swapchain
*
- * Creat a IDXGISwapChain object. Caller must release returned swap chain object
+ * Create a IDXGISwapChain object. Caller must release returned swap chain object
* via IDXGISwapChain_Release()
*
* Returns: (transfer full) (nullable): a new IDXGISwapChain or %NULL
subdir_done()
endif
-# required for HDR meatadata
+# required for HDR metadata
if cc.has_header('dxgi1_5.h')
extra_c_args += ['-DHAVE_DXGI_1_5_H']
endif
goto end;
}
- /* Restore orignal WndProc for window_handle */
+ /* Restore original WndProc for window_handle */
if (!SetWindowLongPtr (sink->d3d.window_handle, GWLP_WNDPROC,
(LONG_PTR) sink->d3d.orig_wnd_proc)) {
GST_WARNING_OBJECT (sink, "D3D failed to set original WndProc");
/* PUBLIC FUNCTIONS */
-/* Iterface Registrations */
+/* Interface Registrations */
static void
gst_d3dvideosink_video_overlay_interface_init (GstVideoOverlayInterface * iface)
else if (res == RPC_E_CHANGED_MODE)
GST_WARNING ("The concurrency model of COM has changed.");
else
- GST_INFO ("COM intialized succesfully");
+ GST_INFO ("COM initialized successfully");
com_initialized = TRUE;
g_mutex_unlock (&com_init_lock);
- /* Wait until the unitialize condition is met to leave the COM apartement */
+ /* Wait until the uninitialize condition is met to leave the COM apartement */
g_mutex_lock (&com_deinit_lock);
g_cond_wait (&com_deinit_cond, &com_deinit_lock);
CoUninitialize ();
- GST_INFO ("COM unintialized succesfully");
+ GST_INFO ("COM uninitialized successfully");
com_initialized = FALSE;
g_cond_signal (&com_deinited_cond);
g_mutex_unlock (&com_deinit_lock);
* the parent device is also checked and configured accordingly.
*
* If
- * - full-duplex-mode is requsted and the device does not support it *or*
+ * - full-duplex-mode is requested and the device does not support it *or*
* - half-duplex-mode is requested and there is not parent-device *or*
* - half-duplex-mode is requested and neither the device nor the parent device does support setting
* the duplex-mode, DUPLEX_MODE_SET_UNSUPPORTED is returnded.
*
* # Duplex-Mode:
* Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
- * independant SDI units with two connectors each. These units can operate either
+ * independent SDI units with two connectors each. These units can operate either
* in half- or in full-duplex mode.
*
* The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
*
* ## Half-Duplex-Mode (default):
* By default decklinkvideosink will configure them into half-duplex mode, so that
- * each connector acts as if it were an independant DeckLink Card which can either
+ * each connector acts as if it were an independent DeckLink Card which can either
* be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
* In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
*
*
* # Duplex-Mode:
* Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
- * independant SDI units with two connectors each. These units can operate either
+ * independent SDI units with two connectors each. These units can operate either
* in half- or in full-duplex mode.
*
* The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
*
* ## Half-Duplex-Mode (default):
* By default decklinkvideosrc will configure them into half-duplex mode, so that
- * each connector acts as if it were an independant DeckLink Card which can either
+ * each connector acts as if it were an independent DeckLink Card which can either
* be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
* In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
*
GST_INFO_OBJECT (asrc, "actual latency time: %" G_GUINT64_FORMAT,
spec->latency_time);
- /* Init secondary buffer desciption */
+ /* Init secondary buffer description */
memset (&descSecondary, 0, sizeof (DSCBUFFERDESC));
descSecondary.dwSize = sizeof (DSCBUFFERDESC);
descSecondary.dwFlags = 0;
else if (res == RPC_E_CHANGED_MODE)
GST_WARNING_OBJECT (adec, "The concurrency model of COM has changed.");
else
- GST_INFO_OBJECT (adec, "COM intialized succesfully");
+ GST_INFO_OBJECT (adec, "COM initialized successfully");
adec->comInitialized = TRUE;
g_mutex_unlock (&adec->com_init_lock);
- /* Wait until the unitialize condition is met to leave the COM apartement */
+ /* Wait until the uninitialize condition is met to leave the COM apartement */
g_mutex_lock (&adec->com_deinit_lock);
g_cond_wait (&adec->com_uninitialize, &adec->com_deinit_lock);
CoUninitialize ();
- GST_INFO_OBJECT (adec, "COM unintialized succesfully");
+ GST_INFO_OBJECT (adec, "COM uninitialized successfully");
adec->comInitialized = FALSE;
g_cond_signal (&adec->com_uninitialized);
g_mutex_unlock (&adec->com_deinit_lock);
output_pin = gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT);
if (!output_pin) {
GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
- ("failed getting ouput pin from the decoder"), (NULL));
+ ("failed getting output pin from the decoder"), (NULL));
return FALSE;
}
else if (res == RPC_E_CHANGED_MODE)
GST_WARNING_OBJECT (vdec, "The concurrency model of COM has changed.");
else
- GST_INFO_OBJECT (vdec, "COM intialized succesfully");
+ GST_INFO_OBJECT (vdec, "COM initialized successfully");
vdec->comInitialized = TRUE;
g_mutex_unlock (&vdec->com_init_lock);
- /* Wait until the unitialize condition is met to leave the COM apartement */
+ /* Wait until the uninitialize condition is met to leave the COM apartement */
g_mutex_lock (&vdec->com_deinit_lock);
g_cond_wait (&vdec->com_uninitialize, &vdec->com_deinit_lock);
CoUninitialize ();
- GST_INFO_OBJECT (vdec, "COM unintialized succesfully");
+ GST_INFO_OBJECT (vdec, "COM uninitialized successfully");
vdec->comInitialized = FALSE;
g_cond_signal (&vdec->com_uninitialized);
g_mutex_unlock (&vdec->com_deinit_lock);
vdec->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template
(element_class, "src"), "src");
-/* needed to implement caps negociation on our src pad */
+/* needed to implement caps negotiation on our src pad */
/* gst_pad_set_getcaps_function (vdec->srcpad, gst_dshowvideodec_src_getcaps);
gst_pad_set_setcaps_function (vdec->srcpad, gst_dshowvideodec_src_setcaps);*/
gst_element_add_pad (GST_ELEMENT (vdec), vdec->srcpad);
output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
if (!output_pin) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
- ("failed getting ouput pin from the decoder"), (NULL));
+ ("failed getting output pin from the decoder"), (NULL));
goto beach;
}
output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
if (!output_pin) {
GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
- ("failed getting ouput pin from the decoder"), (NULL));
+ ("failed getting output pin from the decoder"), (NULL));
return FALSE;
}
GstFlowReturn last_ret;
- /* list of dshow mediatypes coresponding to the caps list */
+ /* list of dshow mediatypes corresponding to the caps list */
GList *mediatypes;
/* filters interfaces */
pmt->pbFormat = NULL;
}
if (pmt->pUnk != NULL) {
- /* Unecessary because pUnk should not be used, but safest. */
+ /* Unnecessary because pUnk should not be used, but safest. */
pmt->pUnk->Release ();
pmt->pUnk = NULL;
}
if (filter)
*filter = NULL;
- /* create a private copy of prefered filter substring in upper case */
+ /* create a private copy of preferred filter substring in upper case */
if (prefered_filter_name) {
prefered_filter_upper = g_strdup (prefered_filter_name);
_strupr (prefered_filter_upper);
src->is_running = FALSE;
}
- /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
+ /* search the negotiated caps in our caps list to get its index and the corresponding mediatype */
if (gst_caps_is_subset (spec->caps, src->caps)) {
guint i = 0;
gint res = -1;
static GstCaps *
gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
- /* If there is no desired video size, set default video size to device preffered video size */
+ /* If there is no desired video size, set default video size to device preferred video size */
GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
GstStructure *structure = gst_caps_get_structure (caps, 0);
src->video_cap_filter, (GList**)&src->pins_mediatypes);
if (gst_caps_is_empty (src->caps)) {
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
- ("Failed to get any caps from devce"), (NULL));
+ ("Failed to get any caps from device"), (NULL));
return FALSE;
}
src->is_running = FALSE;
}
- /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
+ /* search the negotiated caps in our caps list to get its index and the corresponding mediatype */
if (gst_caps_is_subset (caps, src->caps)) {
guint i = 0;
gint res = -1;
goto error;
}
- /* save width and height negociated */
+ /* save width and height negotiated */
gst_structure_get_int (s, "width", &src->width);
gst_structure_get_int (s, "height", &src->height);
else if (res == RPC_E_CHANGED_MODE)
GST_WARNING_OBJECT (sink, "The concurrency model of COM has changed.");
else
- GST_INFO_OBJECT (sink, "COM intialized succesfully");
+ GST_INFO_OBJECT (sink, "COM initialized successfully");
sink->comInitialized = TRUE;
g_mutex_unlock (&sink->com_init_lock);
- /* Wait until the unitialize condition is met to leave the COM apartement */
+ /* Wait until the uninitialize condition is met to leave the COM apartement */
g_mutex_lock (&sink->com_deinit_lock);
g_cond_wait (&sink->com_uninitialize, &sink->com_deinit_lock);
CoUninitialize ();
- GST_INFO_OBJECT (sink, "COM unintialized succesfully");
+ GST_INFO_OBJECT (sink, "COM uninitialized successfully");
sink->comInitialized = FALSE;
g_cond_signal (&sink->com_uninitialized);
g_mutex_unlock (&sink->com_deinit_lock);
sink->fakesrc->GetOutputPin()->SetMediaType (&sink->mediatype);
GST_DEBUG_OBJECT (sink, "Configured output pin media type");
- /* We have configured the ouput pin media type.
+ /* We have configured the output pin media type.
* So, create a window (or start using an application-supplied
* one, then connect the graph */
gst_dshowvideosink_prepare_window (sink);
tl->expected_tpdus += 1;
- GST_DEBUG ("Sucess writing tpdu 0x%x (%s)", buffer[2],
+ GST_DEBUG ("Success writing tpdu 0x%x (%s)", buffer[2],
tag_get_name (buffer[2]));
return CAM_RETURN_OK;
* DvbBaseBin::tuning-start:
* @dvbbasebin: the element on which the signal is emitted
*
- * Signal emited when the element first attempts to tune the
+ * Signal emitted when the element first attempts to tune the
* frontend tunner to a given frequency.
*/
dvb_base_bin_signals[SIGNAL_TUNING_START] =
* DvbBaseBin::tuning-done:
* @dvbbasebin: the element on which the signal is emitted
*
- * Signal emited when the tunner has successfully got a lock on a signal.
+ * Signal emitted when the tunner has successfully got a lock on a signal.
*/
dvb_base_bin_signals[SIGNAL_TUNING_DONE] =
g_signal_new ("tuning-done", G_TYPE_FROM_CLASS (klass),
* DvbBaseBin::tuning-fail:
* @dvbbasebin: the element on which the signal is emitted
*
- * Signal emited when the tunner failed to get a lock on the
+ * Signal emitted when the tunner failed to get a lock on the
* signal.
*/
dvb_base_bin_signals[SIGNAL_TUNING_FAIL] =
* DvbBaseBin::tune:
* @dvbbasesink: the element on which the signal is emitted
*
- * Signal emited from the application to the element, instructing it
+ * Signal emitted from the application to the element, instructing it
* to tune.
*/
dvb_base_bin_signals[SIGNAL_TUNE] =
g_object_class_install_property (gobject_class, ARG_DVBSRC_FREQUENCY,
g_param_spec_uint ("frequency", "Center frequency",
"Center frequency to tune into. Measured in kHz for the satellite "
- "distribution standars and Hz for all the rest",
+ "distribution standards and Hz for all the rest",
0, G_MAXUINT, DEFAULT_FREQUENCY,
GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE));
* GstDvbSrc::tuning-start:
* @gstdvbsrc: the element on which the signal is emitted
*
- * Signal emited when the element first attempts to tune the
+ * Signal emitted when the element first attempts to tune the
* frontend tunner to a given frequency.
*/
gst_dvbsrc_signals[SIGNAL_TUNING_START] =
* GstDvbSrc::tuning-done:
* @gstdvbsrc: the element on which the signal is emitted
*
- * Signal emited when the tunner has successfully got a lock on a signal.
+ * Signal emitted when the tunner has successfully got a lock on a signal.
*/
gst_dvbsrc_signals[SIGNAL_TUNING_DONE] =
g_signal_new ("tuning-done", G_TYPE_FROM_CLASS (klass),
* GstDvbSrc::tuning-fail:
* @gstdvbsrc: the element on which the signal is emitted
*
- * Signal emited when the tunner failed to get a lock on the
+ * Signal emitted when the tunner failed to get a lock on the
* signal.
*/
gst_dvbsrc_signals[SIGNAL_TUNING_FAIL] =
* GstDvbSrc::tune:
* @gstdvbsrc: the element on which the signal is emitted
*
- * Signal emited from the application to the element, instructing it
+ * Signal emitted from the application to the element, instructing it
* to tune.
*/
gst_dvbsrc_signals[SIGNAL_TUNE] =
return FALSE;
}
- /* If set, confirm the choosen delivery system is actually
+ /* If set, confirm the chosen delivery system is actually
* supported by the hardware */
if (object->delsys != SYS_UNDEFINED) {
GST_DEBUG_OBJECT (object, "Confirming delivery system '%u' is supported",
* serialized in a "packet" and sent over the socket. The sender then
* performs a blocking wait for a reply, if a return code is needed.
*
- * All objects that contan a GstStructure (messages, queries, events) are
+ * All objects that contain a GstStructure (messages, queries, events) are
* serialized by serializing the GstStructure to a string
* (gst_structure_to_string). This implies some limitations, of course.
* All fields of this structures that are not serializable to strings (ex.
/* change the state of the peer first */
/* If the fd out is -1, we do not actually call the peer. This will happen
- when we explicitely disconnected, and in that case we want to be able
+ when we explicitly disconnected, and in that case we want to be able
to bring the element down to NULL, so it can be restarted with a new
slave pipeline. */
if (sink->comm.fdout >= 0) {
}
/* the parent's (GstElement) state change func won't return ASYNC or
- * NO_PREROLL, so unless it has returned FAILURE, which we have catched above,
+ * NO_PREROLL, so unless it has returned FAILURE, which we have caught above,
* we are not interested in its return code... just return the peer's */
return peer_ret;
}
if (et == GST_TYPE_IPC_PIPELINE_SRC) {
g_signal_emit_by_name (G_OBJECT (e), "forward-message", message, &ret);
- /* if we succesfully sent this to the master and it's not ASYNC_DONE or EOS,
+ /* if we successfully sent this to the master and it's not ASYNC_DONE or EOS,
* we can skip sending it again through the other ipcpipelinesrcs */
if (ret && GST_MESSAGE_TYPE (message) != GST_MESSAGE_ASYNC_DONE &&
GST_MESSAGE_TYPE (message) != GST_MESSAGE_EOS)
self = GST_KMS_SINK (bsink);
/* We are going to change the internal buffer pool, which means it will no
- * longer be compatbile with the last_buffer size. Drain now, as we won't be
+ * longer be compatible with the last_buffer size. Drain now, as we won't be
* able to do that later on. */
gst_kms_sink_drain (self);
*/
g_properties[PROP_CONNECTOR_PROPS] =
g_param_spec_boxed ("connector-properties", "Connector Properties",
- "Additionnal properties for the connector",
+ "Additional properties for the connector",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS);
/**
*/
g_properties[PROP_PLANE_PROPS] =
g_param_spec_boxed ("plane-properties", "Connector Plane",
- "Additionnal properties for the plane",
+ "Additional properties for the plane",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS);
g_object_class_install_properties (gobject_class, PROP_N, g_properties);
msdk_mid = (GstMsdkMemoryID *) mfx_surface->Data.MemId;
dpy = gst_msdk_context_get_handle (context);
- /* Destory the underlined VAImage if already mapped */
+ /* Destroy the underlined VAImage if already mapped */
if (msdk_mid->image.image_id != VA_INVALID_ID
&& msdk_mid->image.buf != VA_INVALID_ID) {
status =
}
}
#ifndef _WIN32
- /* When using dmabuf, we should confirm that the fd of memeory and
+ /* When using dmabuf, we should confirm that the fd of memory and
* the fd of surface match, since there is no guarantee that fd matches
* between surface and memory.
*/
GST_DEBUG_OBJECT (thiz, "set target bitrate: %u kbit/sec", thiz->bitrate);
mfx->RateControlMethod = thiz->rate_control;
- /* No effect in CQP varient algorithms */
+ /* No effect in CQP variant algorithms */
if ((mfx->RateControlMethod != MFX_RATECONTROL_CQP) &&
(thiz->bitrate > G_MAXUINT16 || thiz->max_vbv_bitrate > G_MAXUINT16)) {
mfxU32 max_val = MAX (thiz->max_vbv_bitrate, thiz->bitrate);
g_object_class_install_property (gobject_class, PROP_B_PYRAMID,
g_param_spec_boolean ("b-pyramid", "B-pyramid",
- "Enable B-Pyramid Referene structure", FALSE,
+ "Enable B-Pyramid Reference structure", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_static_metadata (element_class,
if ((info->flags & GST_MAP_WRITE) && mem->surface
&& mem->surface->Data.Locked) {
- GST_WARNING ("The surface in memory %p is not still avaliable", mem);
+ GST_WARNING ("The surface in memory %p is not still available", mem);
return NULL;
}
decoder->param.mfx.CodecProfile = MFX_PROFILE_VC1_MAIN;
else {
decoder->param.mfx.CodecProfile = MFX_PROFILE_VC1_ADVANCED;
- /* asf advanced profile codec-data has 1 byte in the begining
+ /* asf advanced profile codec-data has 1 byte in the beginning
* which is the ASF binding byte. MediaSDK can't recognize this
* byte, so discard it */
if (decoder->input_state->codec_data) {
/*
* Every time releasing a gst buffer, we need to check the status of surface's lock,
- * so that we could manage locked surfaces seperatedly in the context.
+ * so that we could manage locked surfaces separately in the context.
* Otherwise, we put the surface to the available list.
*/
void
}
if ((flags & GST_MAP_WRITE) && mem->surface && mem->surface->Data.Locked) {
- GST_WARNING ("The surface in memory %p is not still avaliable", mem);
+ GST_WARNING ("The surface in memory %p is not still available", mem);
return FALSE;
}
if ((info->flags & GST_MAP_WRITE) && mem->surface
&& mem->surface->Data.Locked) {
- GST_WARNING ("The surface in memory %p is not still avaliable", mem);
+ GST_WARNING ("The surface in memory %p is not still available", mem);
return NULL;
}
if (!gst_buffer_pool_set_config (pool, config))
goto error_pool_config;
- /* Updating pool_info with algined info of allocator */
+ /* Updating pool_info with aligned info of allocator */
*pool_info = info;
return pool;
gst_object_unref (thiz->srcpad_buffer_pool);
/* Always create a pool for vpp out buffers. Each of the msdk element
- * has to create it's own mfxsurfacepool which is an msdk contraint.
+ * has to create it's own mfxsurfacepool which is an msdk constraint.
* For eg: Each Msdk component (vpp, dec and enc) will invoke the external
* Frame allocator for video-memory usage.So sharing the pool between
* gst-msdk elements might not be a good idea, rather each element
gst_query_add_allocation_param (query, allocator, ¶ms);
gst_structure_free (config);
- /* if upstream does't have a pool requirement, set only
+ /* if upstream doesn't have a pool requirement, set only
* size, min_buffers and max_buffers in query */
gst_query_add_allocation_pool (query, need_pool ? pool : NULL, size,
min_buffers, 0);
error_more_data:
GST_WARNING_OBJECT (thiz,
- "MSDK Requries additional input for processing, "
+ "MSDK Requires additional input for processing, "
"Retruning FLOW_DROPPED since no output buffer was generated");
ret = GST_BASE_TRANSFORM_FLOW_DROPPED;
goto transform_end;
/* Enable the required filters */
ensure_filters (thiz);
- /* Add exteneded buffers */
+ /* Add extended buffers */
if (thiz->num_extra_params) {
thiz->param.NumExtParam = thiz->num_extra_params;
thiz->param.ExtParam = thiz->extra_params;
goto done;
}
- /* If all this failed, keep the height that was nearest to the orignal
+ /* If all this failed, keep the height that was nearest to the original
* height and the nearest possible width. This changes the DAR but
* there's not much else to do here.
*/
default:
break;
}
- return "undefiend error";
+ return "undefined error";
}
void
/**
* gst_cuda_ensure_element_context:
* @element: the #GstElement running the query
- * @device_id: prefered device-id, pass device_id >=0 when
+ * @device_id: preferred device-id, pass device_id >=0 when
* the device_id explicitly required. Otherwise, set -1.
* @cuda_ctx: (inout): the resulting #GstCudaContext
*
* gst_cuda_handle_set_context:
* @element: a #GstElement
* @context: a #GstContext
- * @device_id: prefered device-id, pass device_id >=0 when
+ * @device_id: preferred device-id, pass device_id >=0 when
* the device_id explicitly required. Otherwise, set -1.
* @cuda_ctx: (inout) (transfer full): location of a #GstCudaContext
*
/**
* gst_cuda_graphics_resource_new: (skip)
* @context: (transfer none): a #GstCudaContext
- * @graphics_context: (transfer none) (nullable): a grapics API specific context object
+ * @graphics_context: (transfer none) (nullable): a graphics API specific context object
* @type: a #GstCudaGraphicsResourceType of resource registration
*
* Create new #GstCudaGraphicsResource with given @context and @type
GstFlowReturn flow = GST_FLOW_OK;
/* overview of operation:
- * 1. retreive the next buffer submitted to the bitstream pool
+ * 1. retrieve the next buffer submitted to the bitstream pool
* 2. wait for that buffer to be ready from nvenc (LockBitsream)
- * 3. retreive the GstVideoCodecFrame associated with that buffer
+ * 3. retrieve the GstVideoCodecFrame associated with that buffer
* 4. for each buffer in the frame
* 4.1 (step 2): wait for that buffer to be ready from nvenc (LockBitsream)
* 4.2 create an output GstBuffer from the nvenc buffers
memset (&resource->nv_mapped_resource, 0,
sizeof (resource->nv_mapped_resource));
- /* scratch buffer for non-contigious planer into a contigious buffer */
+ /* scratch buffer for non-contiguous planer into a contiguous buffer */
cu_ret =
CuMemAllocPitch (&resource->cuda_pointer,
&resource->cuda_stride, _get_plane_width (info, 0),
_get_frame_data_height (info), 16);
if (!gst_cuda_result (cu_ret)) {
- GST_ERROR_OBJECT (nvenc, "failed to alocate cuda scratch buffer "
+ GST_ERROR_OBJECT (nvenc, "failed to allocate cuda scratch buffer "
"ret %d", cu_ret);
g_assert_not_reached ();
}
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_PLAYING));
g_object_class_install_property (gobject_class, PROP_LTR_ENCODER_CONTROL,
- g_param_spec_int ("ltr-encoder-control", "LTR frames controled by device",
- "Number of LTR frames the device can control (dynamic control)",
- 0, G_MAXUINT8, DEFAULT_LTR_ENCODER_CONTROL,
+ g_param_spec_int ("ltr-encoder-control",
+ "LTR frames controlled by device",
+ "Number of LTR frames the device can control (dynamic control)", 0,
+ G_MAXUINT8, DEFAULT_LTR_ENCODER_CONTROL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_PLAYING));
* SECTION:provider-uvch264deviceprovider
*
* Device provider for uvch264 devices, it basically contains
- * the same informations as the v4l2 device provider but on top
+ * the same information as the v4l2 device provider but on top
* set the following properties:
*
* ```
have_frames = length / (self->mix_format->nBlockAlign);
if (self->sharemode == AUDCLNT_SHAREMODE_EXCLUSIVE) {
- /* In exlusive mode we have to wait always */
+ /* In exclusive mode we have to wait always */
dwWaitResult = WaitForSingleObject (self->event_handle, INFINITE);
if (dwWaitResult != WAIT_OBJECT_0) {
GST_ERROR_OBJECT (self, "Error waiting for event handle: %x",
}
/*
- * Figure out how many simultanous requests it prefers.
+ * Figure out how many simultaneous requests it prefers.
*
* This is really important as it depends on the driver and the device.
* Doing too few will result in poor capture performance, whilst doing too
guess_aspect (gint width, gint height, gint * par_width, gint * par_height)
{
/*
- * As we dont have access to the actual pixel aspect, we will try to do a
+ * As we don't have access to the actual pixel aspect, we will try to do a
* best-effort guess. The guess is based on most sensors being either 4/3
* or 16/9, and most pixel aspects being close to 1/1.
*/
static void
gst_dx9screencapsrc_init (GstDX9ScreenCapSrc * src)
{
- /* Set src element inital values... */
+ /* Set src element initial values... */
src->surface = NULL;
src->d3d9_device = NULL;
src->capture_x = 0;
new_buf_size, width, height);
/* Do screen capture and put it into buffer...
- * Aquire front buffer, and lock it
+ * Acquire front buffer, and lock it
*/
hres =
IDirect3DDevice9_GetFrontBufferData (src->d3d9_device, 0, src->surface);
static void
gst_gdiscreencapsrc_init (GstGDIScreenCapSrc * src)
{
- /* Set src element inital values... */
+ /* Set src element initial values... */
src->dibMem = NULL;
src->hBitmap = (HBITMAP) INVALID_HANDLE_VALUE;
src->memDC = (HDC) INVALID_HANDLE_VALUE;
* appsink_received_data: called each time AppSink receives data
* @engine: #GstAdaptiveDemuxTestEngine
* @stream: #GstAdaptiveDemuxTestOutputStream
- * @buffer: the #GstBuffer that was recevied by #GstAppSink
+ * @buffer: the #GstBuffer that was received by #GstAppSink
* @user_data: the user_data passed to gst_adaptive_demux_test_run()
* Returns: %TRUE to continue processing, %FALSE to cause EOS
*
gst_buffer_unref (out);
/* For those wondering why DATA_LEN_4 is 55 and not 50 - or why
- * comment above states that NAL units are broken "rougly" at 100 bytes:
+ * comment above states that NAL units are broken "roughly" at 100 bytes:
* With mtu=128, there are only 100 bytes left for NAL units, so anything
* bigger will be broken. But AVTP NAL units fragments have a header with
* two bytes, so NAL units will use only 98 bytes. This leaves the last
file_uri_double_period =
gst_filename_to_uri (XLINK_DOUBLE_PERIOD_FILENAME, NULL);
- /* constructs inital mpd using external xml uri */
+ /* constructs initial mpd using external xml uri */
/* For invalid URI, mpdparser should be ignore it */
xml_joined = g_strjoin ("", xml_frag_start,
xml_uri_front, "http://404/ERROR/XML.period", xml_uri_rear,
GstHLSMasterPlaylist *master;
GstM3U8 *pl;
GstM3U8MediaFile *mf;
- gboolean discontinous;
+ gboolean discontinuous;
GstClockTime timestamp;
master = load_playlist (BYTE_RANGES_PLAYLIST);
pl = master->default_variant->m3u8;
/* Check the next fragment */
- mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinous);
+ mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinuous);
fail_unless (mf != NULL);
- assert_equals_int (discontinous, FALSE);
+ assert_equals_int (discontinuous, FALSE);
assert_equals_string (mf->uri, "http://media.example.com/all.ts");
assert_equals_uint64 (timestamp, 0);
assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
gst_m3u8_advance_fragment (pl, TRUE);
/* Check next media segments */
- mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinous);
+ mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinuous);
fail_unless (mf != NULL);
- assert_equals_int (discontinous, FALSE);
+ assert_equals_int (discontinuous, FALSE);
assert_equals_string (mf->uri, "http://media.example.com/all.ts");
assert_equals_uint64 (timestamp, 10 * GST_SECOND);
assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
gst_m3u8_advance_fragment (pl, TRUE);
/* Check next media segments */
- mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinous);
- assert_equals_int (discontinous, FALSE);
+ mf = gst_m3u8_get_next_fragment (pl, TRUE, ×tamp, &discontinuous);
+ assert_equals_int (discontinuous, FALSE);
assert_equals_string (mf->uri, "http://media.example.com/all.ts");
assert_equals_uint64 (timestamp, 20 * GST_SECOND);
assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
/* iterate over our tag mapping */
for (i = 0; i < G_N_ELEMENTS (tag_map); i++) {
if (gst_tag_list_get_value_index (taglist, tag_map[i].gst_tag, 0)) {
- /* we have added this field to the taglist, check if it was writen in
+ /* we have added this field to the taglist, check if it was written in
* exif */
libexif_check_tag_exists (taglist, i, exif_data);
}
GST_BUFFER_DURATION (in_buf) = GST_SECOND;
- /* Push bufffers until get decoder output */
+ /* Push buffers until get decoder output */
do {
fail_if (i > MAX_PUSH_BUFFER);
/* when removing an element insertbin will look at the pending operations list
* and check if that element is pending and remove it before adding.
- * So we check that the callback count hapenned before the end, and it
+ * So we check that the callback count happened before the end, and it
* also happens from this same main thread. So we need to store the
* streaming thread to restore it after the check */
elem = gst_element_factory_make ("identity", NULL);
{
GList *list;
- /* gloabl tag */
+ /* global tag */
fail_unless (gst_player_media_info_is_seekable (media_info) == TRUE);
fail_unless (gst_player_media_info_get_tags (media_info) != NULL);
fail_unless_equals_string (gst_player_media_info_get_title (media_info),
}
} else if (GST_EVENT_TYPE (info->data) == GST_EVENT_SEGMENT) {
/* from the sink pipeline, we don't know whether the master issued a seek,
- as the seek_sent memory location isn't directly accesible to us, so we
+ as the seek_sent memory location isn't directly accessible to us, so we
look for a segment after a buffer to mean a seek was sent */
idx = pad2idx (pad, td->two_streams);
if (d->got_buffer_before_seek[idx])
/* state_change tests issue a number of state changes in
(hopefully) all interesting configurations, and checks
- the state changes occured on the slave pipeline. The links
+ the state changes occurred on the slave pipeline. The links
are disconnected and reconnected to check it all still
works after this. */
if (1) {
* while splitting the pipeline in two processes, running the source & demuxer
* on the master process and the decoders & sinks on the slave.
* See keyboard_cb() for the various keyboard shortcuts you can use to
- * interract with it while the video window is focused.
+ * interact with it while the video window is focused.
*/
#define _GNU_SOURCE
gulong video_pad_probe_block_id;
/* The current running time of this item; updated with every audio buffer if
- * this item has audio; otherwise it's updated withe very video buffer */
+ * this item has audio; otherwise it's updated with very video buffer */
guint64 running_time;
} PlayoutItem;
"GstEncodingProfile serialization format\n"
"---------------------------------------\n"
"\n"
- "GStreamer encoding profiles can be descibed with a quite extensive\n"
- "syntax which is descibed in the GstEncodingProfile documentation.\n"
+ "GStreamer encoding profiles can be described with a quite extensive\n"
+ "syntax which is described in the GstEncodingProfile documentation.\n"
"\n"
"The simple case looks like:\n"
"\n"
"msg-source-type", G_TYPE_GTYPE, &type, NULL) &&
type == g_type_from_name ("GstTranscodeBin")) {
error ("\nCould not setup transcoding pipeline,"
- " make sure that you transcoding format parametters"
+ " make sure that your transcoding format parameters"
" are compatible with the input stream.");
return;
else if (GST_IS_ENCODING_VIDEO_PROFILE (profile))
return "Video";
else
- return "Unkonwn";
+ return "Unknown";
}
static void