}
static void
-handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf, unsigned num)
+handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf, unsigned num_slice_buffers, unsigned num_slices)
{
switch (u_reduce_video_profile(context->templat.profile)) {
case PIPE_VIDEO_FORMAT_MPEG12:
break;
case PIPE_VIDEO_FORMAT_AV1:
- vlVaHandleSliceParameterBufferAV1(context, buf, num);
+ vlVaHandleSliceParameterBufferAV1(context, buf, num_slice_buffers, num_slices);
break;
default:
unsigned i;
unsigned slice_param_idx = 0;
+ unsigned slice_idx = 0;
if (!ctx)
return VA_STATUS_ERROR_INVALID_CONTEXT;
break;
case VASliceParameterBufferType:
- handleSliceParameterBuffer(context, buf, slice_param_idx++);
- break;
+ {
+ /* Some apps like gstreamer send all the slices at once
+ and some others send individual VASliceParameterBufferType buffers
+
+ slice_param_idx is the zero based count of VASliceParameterBufferType
+ (including multiple buffers with num_elements > 1) received
+ before this call to handleSliceParameterBuffer
+
+ slice_idx is the zero based number of total slices received
+ before this call to handleSliceParameterBuffer
+ */
+ handleSliceParameterBuffer(context, buf, slice_param_idx++, slice_idx);
+ slice_idx += buf->num_elements;
+ } break;
case VASliceDataBufferType:
vaStatus = handleVASliceDataBufferType(context, buf);
}
}
-void vlVaHandleSliceParameterBufferAV1(vlVaContext *context, vlVaBuffer *buf, unsigned int num)
+void vlVaHandleSliceParameterBufferAV1(vlVaContext *context, vlVaBuffer *buf, unsigned num_slice_buffers, unsigned num_slices)
{
- VASliceParameterBufferAV1 *av1 = buf->data;
-
- context->desc.av1.slice_parameter.slice_data_size[num] = av1->slice_data_size;
- context->desc.av1.slice_parameter.slice_data_offset[num] = av1->slice_data_offset;
- context->desc.av1.slice_parameter.slice_data_row[num] = av1->tile_row;
- context->desc.av1.slice_parameter.slice_data_col[num] = av1->tile_column;
- context->desc.av1.slice_parameter.slice_data_anchor_frame_idx[num] = av1->anchor_frame_idx;
+ for (uint32_t buffer_idx = 0; buffer_idx < buf->num_elements; buffer_idx++) {
+ uint32_t slice_index =
+ /* slices obtained so far from vaRenderPicture in previous calls*/
+ num_slices +
+ /* current slice index processing this VASliceParameterBufferAV1 */
+ buffer_idx;
+
+ VASliceParameterBufferAV1 *av1 = &(((VASliceParameterBufferAV1*)buf->data)[buffer_idx]);
+ context->desc.av1.slice_parameter.slice_data_size[slice_index] = av1->slice_data_size;
+ context->desc.av1.slice_parameter.slice_data_offset[slice_index] = av1->slice_data_offset;
+ context->desc.av1.slice_parameter.slice_data_row[slice_index] = av1->tile_row;
+ context->desc.av1.slice_parameter.slice_data_col[slice_index] = av1->tile_column;
+ context->desc.av1.slice_parameter.slice_data_anchor_frame_idx[slice_index] = av1->anchor_frame_idx;
+ }
}
void vlVaHandleSliceParameterBufferVP9(vlVaContext *context, vlVaBuffer *buf);
void vlVaDecoderVP9BitstreamHeader(vlVaContext *context, vlVaBuffer *buf);
void vlVaHandlePictureParameterBufferAV1(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf);
-void vlVaHandleSliceParameterBufferAV1(vlVaContext *context, vlVaBuffer *buf, unsigned int num);
+void vlVaHandleSliceParameterBufferAV1(vlVaContext *context, vlVaBuffer *buf, unsigned num_slice_buffers, unsigned num_slices);
void getEncParamPresetH264(vlVaContext *context);
void getEncParamPresetH265(vlVaContext *context);
void vlVaHandleVAEncMiscParameterTypeQualityLevel(struct pipe_enc_quality_modes *p, vlVaQualityBits *in);