GstTask *task = encoder->srcpad->task;
/* Wait for the task to be drained */
+ GST_DEBUG_OBJECT (self, "Waiting for encoder stop");
GST_OBJECT_LOCK (task);
while (GST_TASK_STATE (task) == GST_TASK_STARTED)
GST_TASK_WAIT (task);
return FALSE;
}
- /* activating a capture pool will also call STREAMON. CODA driver will
- * refuse to configure the output if the capture is stremaing. */
- if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
- TRUE)) {
- GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
- return FALSE;
- }
-
self->input_state = gst_video_codec_state_ref (state);
GST_DEBUG_OBJECT (self, "output caps: %" GST_PTR_FORMAT, state->caps);
GST_DEBUG_OBJECT (self, "Negotiating %s profile and level.",
klass->codec_name);
+ /* Only renegotiate on upstream changes */
+ if (self->input_state)
+ return TRUE;
+
allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
if (allowed_caps) {
frame = gst_v4l2_video_enc_get_oldest_frame (encoder);
if (frame) {
+ /* At this point, the delta unit buffer flag is already correctly set by
+ * gst_v4l2_buffer_pool_process. Since gst_video_encoder_finish_frame
+ * will overwrite it from GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame),
+ * set that here.
+ */
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
+ else
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
frame->output_buffer = buffer;
buffer = NULL;
ret = gst_video_encoder_finish_frame (encoder, frame);
if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
goto flushing;
- task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
+ task_state = gst_pad_get_task_state (GST_VIDEO_ENCODER_SRC_PAD (self));
if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
goto activate_failed;
}
+ if (!gst_buffer_pool_set_active
+ (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) {
+ GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
+ goto activate_failed;
+ }
+
GST_DEBUG_OBJECT (self, "Starting encoding thread");
/* Start the processing task, when it quits, the task will disable input
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstVideoCodecState *state = gst_video_encoder_get_output_state (encoder);
+ GstCaps *caps;
GstV4l2Error error = GST_V4L2_ERROR_INIT;
GstClockTime latency;
gboolean ret = FALSE;
* GstVideoEncoder have set the width, height and framerate into the state
* caps. These are needed by the driver to calculate the buffer size and to
* implement bitrate adaptation. */
- if (!gst_v4l2_object_set_format (self->v4l2capture, state->caps, &error)) {
+ caps = gst_caps_copy (state->caps);
+ gst_structure_remove_field (gst_caps_get_structure (caps, 0), "colorimetry");
+ if (!gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) {
gst_v4l2_error (self, &error);
+ gst_caps_unref (caps);
ret = FALSE;
goto done;
}
+ gst_caps_unref (caps);
if (gst_v4l2_object_decide_allocation (self->v4l2capture, query)) {
GstVideoEncoderClass *enc_class = GST_VIDEO_ENCODER_CLASS (parent_class);
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (self, "flush start");
gst_v4l2_object_unlock (self->v4l2output);
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (encoder, event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
gst_pad_stop_task (encoder->srcpad);
GST_DEBUG_OBJECT (self, "flush start done");
GST_OBJECT (GST_VIDEO_ENCODER_SRC_PAD (self)),
V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
- self->v4l2capture->no_initial_format = TRUE;
- self->v4l2output->keep_aspect = FALSE;
}
static void