goto caps_failed;
}
+ GST_DEBUG_OBJECT (self, "Setting output caps: %" GST_PTR_FORMAT, caps);
+
if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (self), caps)) {
gst_caps_unref (caps);
if (buf)
GstBuffer *codec_data;
GstMapInfo map = GST_MAP_INFO_INIT;
+ GST_DEBUG_OBJECT (self, "Handling codec data");
caps =
gst_caps_copy (gst_pad_get_current_caps (GST_AUDIO_ENCODER_SRC_PAD
(self)));
GstBuffer *outbuf;
guint n_samples;
+ GST_DEBUG_OBJECT (self, "Handling output data");
+
n_samples =
klass->get_num_samples (self, self->out_port,
gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)), buf);
outbuf, n_samples);
}
+ GST_DEBUG_OBJECT (self, "Handled output data");
+
if (is_eos || flow_ret == GST_FLOW_EOS) {
g_mutex_lock (&self->drain_lock);
if (self->draining) {
* format change happened we can just exit here.
*/
if (needs_disable) {
+ GST_DEBUG_OBJECT (self, "Need to disable and drain encoder");
gst_omx_audio_enc_drain (self);
if (gst_omx_port_manual_reconfigure (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
if (gst_omx_port_set_enabled (self->in_port, FALSE) != OMX_ErrorNone)
return FALSE;
+
+ GST_DEBUG_OBJECT (self, "Encoder drained and disabled");
}
port_def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+ GST_DEBUG_OBJECT (self, "Setting inport port definition");
if (!gst_omx_port_update_port_definition (self->in_port, &port_def))
return FALSE;
+
+ GST_DEBUG_OBJECT (self, "Setting outport port definition");
if (!gst_omx_port_update_port_definition (self->out_port, NULL))
return FALSE;
pcm_param.eChannelMapping[i] = pos;
}
+ GST_DEBUG_OBJECT (self, "Setting PCM parameters");
err =
gst_omx_component_set_parameter (self->component, OMX_IndexParamAudioPcm,
&pcm_param);
}
}
+ GST_DEBUG_OBJECT (self, "Enabling component");
if (needs_disable) {
if (gst_omx_port_set_enabled (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
}
/* Start the srcpad loop again */
+ GST_DEBUG_OBJECT (self, "Starting task again");
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_AUDIO_ENCODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_audio_enc_loop, encoder, NULL);
goto full_buffer;
}
+ GST_DEBUG_OBJECT (self, "Handling frame at offset %d", offset);
+
/* Copy the buffer content in chunks of size as requested
* by the port */
buf->omx_buf->nFilledLen =
gst_omx_port_release_buffer (self->in_port, buf);
}
+ GST_DEBUG_OBJECT (self, "Passed frame to component");
+
return self->downstream_flow_ret;
full_buffer:
switch (port_def.format.video.eColorFormat) {
case OMX_COLOR_FormatYUV420Planar:
case OMX_COLOR_FormatYUV420PackedPlanar:
+ GST_DEBUG_OBJECT (self, "Output is I420 (%d)",
+ port_def.format.video.eColorFormat);
format = GST_VIDEO_FORMAT_I420;
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
+ GST_DEBUG_OBJECT (self, "Output is NV12 (%d)",
+ port_def.format.video.eColorFormat);
format = GST_VIDEO_FORMAT_NV12;
break;
default:
break;
}
+ GST_DEBUG_OBJECT (self,
+ "Setting output state: format %s, width %d, height %d",
+ gst_video_format_to_string (format), port_def.format.video.nFrameWidth,
+ port_def.format.video.nFrameHeight);
+
state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
format, port_def.format.video.nFrameWidth,
port_def.format.video.nFrameHeight, self->input_state);
gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
}
+ GST_DEBUG_OBJECT (self, "Read frame from component");
+
if (is_eos || flow_ret == GST_FLOW_EOS) {
g_mutex_lock (&self->drain_lock);
if (self->draining) {
GstStructure *s;
const gchar *format_str;
+ GST_DEBUG_OBJECT (self, "Trying to negotiate a video format with downstream");
+
intersection = gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self));
+ GST_DEBUG_OBJECT (self, "Allowed downstream caps: %" GST_PTR_FORMAT,
+ intersection);
+
GST_OMX_INIT_STRUCT (¶m);
param.nPortIndex = port->index;
param.nIndex = 0;
gst_caps_append_structure (comp_supported_caps,
gst_structure_new ("video/x-raw",
"format", G_TYPE_STRING, "I420", NULL));
+ GST_DEBUG_OBJECT (self, "Component supports I420 (%d) at index %d",
+ param.eColorFormat, param.nIndex);
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
m = g_slice_new0 (VideoNegotiationMap);
gst_caps_append_structure (comp_supported_caps,
gst_structure_new ("video/x-raw",
"format", G_TYPE_STRING, "NV12", NULL));
+ GST_DEBUG_OBJECT (self, "Component supports NV12 (%d) at index %d",
+ param.eColorFormat, param.nIndex);
break;
default:
break;
}
}
+ GST_DEBUG_OBJECT (self, "Negotiating color format %s (%d)", format_str,
+ param.eColorFormat);
+
/* We must find something here */
g_assert (l != NULL);
g_list_free_full (negotiation_map,
}
if (needs_disable && is_format_change) {
+ GST_DEBUG_OBJECT (self, "Need to disable and drain decoder");
+
gst_omx_video_dec_drain (self, FALSE);
if (klass->cdata.hacks & GST_OMX_HACK_NO_COMPONENT_RECONFIGURE) {
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
+
+ GST_DEBUG_OBJECT (self, "Decoder drained and disabled");
}
port_def.format.video.nFrameWidth = info->width;
else
port_def.format.video.xFramerate = (info->fps_n << 16) / (info->fps_d);
+ GST_DEBUG_OBJECT (self, "Setting inport port definition");
+
if (!gst_omx_port_update_port_definition (self->in_port, &port_def))
return FALSE;
+
+ GST_DEBUG_OBJECT (self, "Setting outport port definition");
if (!gst_omx_port_update_port_definition (self->out_port, NULL))
return FALSE;
GST_LOG_OBJECT (self, "Negotiation failed, will get output format later");
}
+ GST_DEBUG_OBJECT (self, "Enabling component");
+
if (needs_disable) {
if (gst_omx_port_set_enabled (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
}
/* Start the srcpad loop again */
+ GST_DEBUG_OBJECT (self, "Starting task again");
+
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_dec_loop, decoder, NULL);
}
if (self->codec_data) {
+ GST_DEBUG_OBJECT (self, "Passing codec data to the component");
+
codec_data = self->codec_data;
if (buf->omx_buf->nAllocLen - buf->omx_buf->nOffset <
}
/* Now handle the frame */
+ GST_DEBUG_OBJECT (self, "Passing frame offset %d to the component", offset);
/* Copy the buffer content in chunks of size as requested
* by the port */
gst_omx_port_release_buffer (self->in_port, buf);
}
+ GST_DEBUG_OBJECT (self, "Passed frame to component");
+
return self->downstream_flow_ret;
full_buffer:
GstMapInfo map = GST_MAP_INFO_INIT;
GstCaps *caps;
+ GST_DEBUG_OBJECT (self, "Handling codec data");
+
caps = klass->get_caps (self, self->out_port, self->input_state);
codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);
GstBuffer *outbuf;
GstMapInfo map = GST_MAP_INFO_INIT;
+ GST_DEBUG_OBJECT (self, "Handling output data");
+
if (buf->omx_buf->nFilledLen > 0) {
outbuf = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
goto caps_failed;
}
+
+ GST_DEBUG_OBJECT (self, "Setting output state: %" GST_PTR_FORMAT, caps);
+
state =
gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self), caps,
self->input_state);
flow_ret = GST_FLOW_EOS;
}
+ GST_DEBUG_OBJECT (self, "Read frame from component");
+
if (flow_ret != GST_FLOW_OK)
goto flow_error;
* format change happened we can just exit here.
*/
if (needs_disable) {
+ GST_DEBUG_OBJECT (self, "Need to disable and drain encoder");
gst_omx_video_enc_drain (self, FALSE);
if (gst_omx_port_manual_reconfigure (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
if (gst_omx_port_set_enabled (self->in_port, FALSE) != OMX_ErrorNone)
return FALSE;
+
+ GST_DEBUG_OBJECT (self, "Encoder drained and disabled");
}
switch (info->finfo->format) {
port_def.format.video.xFramerate = (info->fps_n) / (info->fps_d);
}
+ GST_DEBUG_OBJECT (self, "Setting inport port definition");
if (!gst_omx_port_update_port_definition (self->in_port, &port_def))
return FALSE;
+
+ GST_DEBUG_OBJECT (self, "Setting outport port definition");
if (!gst_omx_port_update_port_definition (self->out_port, NULL))
return FALSE;
}
}
+ GST_DEBUG_OBJECT (self, "Enabling component");
if (needs_disable) {
if (gst_omx_port_set_enabled (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
self->input_state = gst_video_codec_state_ref (self->input_state);
/* Start the srcpad loop again */
+ GST_DEBUG_OBJECT (self, "Starting task again");
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_VIDEO_ENCODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_enc_loop, encoder, NULL);
}
/* Now handle the frame */
+ GST_DEBUG_OBJECT (self, "Handling frame");
+
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
OMX_ERRORTYPE err;
OMX_CONFIG_INTRAREFRESHVOPTYPE config;
config.nPortIndex = self->out_port->index;
config.IntraRefreshVOP = OMX_TRUE;
+ GST_DEBUG_OBJECT (self, "Forcing a keyframe");
err =
gst_omx_component_set_config (self->component,
OMX_IndexConfigVideoIntraVOPRefresh, &config);
self->started = TRUE;
gst_omx_port_release_buffer (self->in_port, buf);
+
+ GST_DEBUG_OBJECT (self, "Passed frame to component");
}
return self->downstream_flow_ret;;