return true;
}
+static bool vlVaQueryDecodeInterlacedH264(vlVaContext *context)
+{
+ struct pipe_h264_picture_desc *h264 = NULL;
+
+ if (u_reduce_video_profile(context->templat.profile) != PIPE_VIDEO_FORMAT_MPEG4_AVC ||
+ context->decoder->entrypoint != PIPE_VIDEO_ENTRYPOINT_BITSTREAM)
+ return false;
+
+ h264 = &context->desc.h264;
+
+ if (h264->pps->sps->frame_mbs_only_flag)
+ return false;
+
+ return h264->field_pic_flag || /* PAFF */
+ h264->pps->sps->mb_adaptive_frame_field_flag; /* MBAFF */
+}
+
VAStatus
vlVaEndPicture(VADriverContextP ctx, VAContextID context_id)
{
enum pipe_format format;
struct pipe_video_buffer **out_target;
int output_id;
+ bool decode_interlaced;
if (!ctx)
return VA_STATUS_ERROR_INVALID_CONTEXT;
output_id = context->target_id;
out_target = &context->target;
apply_av1_fg = vlVaQueryApplyFilmGrainAV1(context, &output_id, &out_target);
+ decode_interlaced = vlVaQueryDecodeInterlacedH264(context);
mtx_lock(&drv->mutex);
surf = handle_table_get(drv->htab, output_id);
screen = context->decoder->context->screen;
supported = screen->get_video_param(screen, context->decoder->profile,
context->decoder->entrypoint,
- surf->buffer->interlaced ?
+ decode_interlaced || surf->buffer->interlaced ?
PIPE_VIDEO_CAP_SUPPORTS_INTERLACED :
PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
context->decoder->profile,
context->decoder->entrypoint,
PIPE_VIDEO_CAP_PREFERS_INTERLACED);
+ realloc = surf->templat.interlaced != surf->buffer->interlaced;
+ } else if (decode_interlaced && !surf->buffer->interlaced) {
+ surf->templat.interlaced = true;
realloc = true;
}
}
if (surf->buffer)
surf->buffer->destroy(surf->buffer);
+ if (surf->deint_buffer)
+ surf->deint_buffer->destroy(surf->deint_buffer);
util_dynarray_fini(&surf->subpics);
FREE(surf);
handle_table_remove(drv->htab, surface_list[i]);
struct pipe_screen *screen;
VAStatus ret;
unsigned int usage;
+ struct pipe_video_buffer *buffer;
#ifdef _WIN32
if ((mem_type != VA_SURFACE_ATTRIB_MEM_TYPE_NTHANDLE)
return VA_STATUS_ERROR_INVALID_SURFACE;
}
- if (surf->buffer->interlaced) {
- struct pipe_video_buffer *interlaced = surf->buffer;
+ buffer = surf->buffer;
+
+ if (buffer->interlaced) {
+ struct pipe_video_buffer *interlaced = buffer;
struct u_rect src_rect, dst_rect;
- surf->templat.interlaced = false;
+ if (!surf->deint_buffer) {
+ surf->templat.interlaced = false;
- ret = vlVaHandleSurfaceAllocate(drv, surf, &surf->templat, NULL, 0);
- if (ret != VA_STATUS_SUCCESS) {
- mtx_unlock(&drv->mutex);
- return VA_STATUS_ERROR_ALLOCATION_FAILED;
+ ret = vlVaHandleSurfaceAllocate(drv, surf, &surf->templat, NULL, 0);
+ if (ret != VA_STATUS_SUCCESS) {
+ mtx_unlock(&drv->mutex);
+ return VA_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ surf->deint_buffer = surf->buffer;
+ surf->buffer = interlaced;
+ surf->templat.interlaced = true;
}
src_rect.x0 = dst_rect.x0 = 0;
src_rect.y1 = dst_rect.y1 = surf->templat.height;
vl_compositor_yuv_deint_full(&drv->cstate, &drv->compositor,
- interlaced, surf->buffer,
+ interlaced, surf->deint_buffer,
&src_rect, &dst_rect,
VL_COMPOSITOR_WEAVE);
- if (interlaced->codec && interlaced->codec->update_decoder_target)
- interlaced->codec->update_decoder_target(interlaced->codec, interlaced, surf->buffer);
- interlaced->destroy(interlaced);
+ buffer = surf->deint_buffer;
}
- surfaces = surf->buffer->get_surfaces(surf->buffer);
+ surfaces = buffer->get_surfaces(buffer);
usage = 0;
if (flags & VA_EXPORT_SURFACE_WRITE_ONLY)
#else
VADRMPRIMESurfaceDescriptor *desc = descriptor;
- desc->fourcc = PipeFormatToVaFourcc(surf->buffer->buffer_format);
+ desc->fourcc = PipeFormatToVaFourcc(buffer->buffer_format);
desc->width = surf->templat.width;
desc->height = surf->templat.height;
desc->num_objects = p;
if (flags & VA_EXPORT_SURFACE_COMPOSED_LAYERS) {
- uint32_t drm_format = pipe_format_to_drm_format(surf->buffer->buffer_format);
+ uint32_t drm_format = pipe_format_to_drm_format(buffer->buffer_format);
if (drm_format == DRM_FORMAT_INVALID) {
ret = VA_STATUS_ERROR_UNSUPPORTED_MEMORY_TYPE;
goto fail;