#include "i965_media.h"
static void
-i965_avc_bsd_free_private_surface_data(void **data)
+i965_avc_bsd_free_avc_bsd_surface(void **data)
{
struct i965_avc_bsd_surface *avc_bsd_surface = *data;
if (!avc_bsd_surface)
return;
- dri_bo_unreference(avc_bsd_surface->direct_mv_wr_top_bo);
- avc_bsd_surface->direct_mv_wr_top_bo = NULL;
- dri_bo_unreference(avc_bsd_surface->direct_mv_wr_bottom_bo);
- avc_bsd_surface->direct_mv_wr_bottom_bo = NULL;
+ dri_bo_unreference(avc_bsd_surface->dmv_top);
+ avc_bsd_surface->dmv_top = NULL;
+ dri_bo_unreference(avc_bsd_surface->dmv_bottom);
+ avc_bsd_surface->dmv_bottom = NULL;
+
free(avc_bsd_surface);
*data = NULL;
}
static void
-i965_avc_bsd_initialize_private_surface_data(VADriverContextP ctx, struct object_surface *obj_surface)
+i965_avc_bsd_init_avc_bsd_surface(VADriverContextP ctx,
+ struct object_surface *obj_surface,
+ VAPictureParameterBufferH264 *pic_param)
{
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct i965_media_state *media_state = &i965->media_state;
+ struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
+ struct i965_avc_bsd_context *i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
struct i965_avc_bsd_surface *avc_bsd_surface = obj_surface->private_data;
- obj_surface->free_private_data = i965_avc_bsd_free_private_surface_data;
+ obj_surface->free_private_data = i965_avc_bsd_free_avc_bsd_surface;
if (!avc_bsd_surface) {
avc_bsd_surface = calloc(sizeof(struct i965_avc_bsd_surface), 1);
-
- avc_bsd_surface->direct_mv_wr_top_bo = dri_bo_alloc(i965->intel.bufmgr,
- "direct mv wr top",
- 0x90000,
- 64);
- assert(avc_bsd_surface->direct_mv_wr_top_bo);
- avc_bsd_surface->direct_mv_wr_bottom_bo = dri_bo_alloc(i965->intel.bufmgr,
- "direct mv wr bottom",
- 0x90000,
- 64);
- assert(avc_bsd_surface->direct_mv_wr_bottom_bo);
+ assert((obj_surface->size & 0x3f) == 0);
obj_surface->private_data = avc_bsd_surface;
}
- avc_bsd_surface->direct_mv_flag = -1;
+ avc_bsd_surface->ctx = i965_avc_bsd_context;
+ avc_bsd_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
+ !pic_param->seq_fields.bits.direct_8x8_inference_flag);
+
+ if (avc_bsd_surface->dmv_top == NULL) {
+ avc_bsd_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
+ "direct mv w/r buffer",
+ DMV_SIZE,
+ 0x1000);
+ }
+
+ if (avc_bsd_surface->dmv_bottom_flag &&
+ avc_bsd_surface->dmv_bottom == NULL) {
+ avc_bsd_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
+ "direct mv w/r buffer",
+ DMV_SIZE,
+ 0x1000);
+ }
}
static void
static void
i965_avc_bsd_img_state(VADriverContextP ctx, struct decode_state *decode_state)
{
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct i965_media_state *media_state = &i965->media_state;
+ struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
int qm_present_flag;
int img_struct;
int mbaff_frame_flag;
(0 << 16) | /* Un-SRT (Unsynchronized Root Thread) */
(0 << 12) | /* FIXME: no 16MV ??? */
(pic_param->seq_fields.bits.chroma_format_idc << 10) |
- (1 << 8) | /* Enable ILDB writing output */
+ (i965_h264_context->enable_avc_ildb << 8) | /* Enable ILDB writing output */
(pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
(pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
OUT_BCS_RELOC(ctx, i965_h264_context->avc_it_data.bo,
I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
(i965_h264_context->avc_it_data.write_offset << 6));
- OUT_BCS_RELOC(ctx, i965_h264_context->avc_ildb_data.bo,
- I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
- 0);
+
+ if (i965_h264_context->enable_avc_ildb)
+ OUT_BCS_RELOC(ctx, i965_h264_context->avc_ildb_data.bo,
+ I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
+ 0);
+ else
+ OUT_BCS_BATCH(ctx, 0);
for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
OUT_BCS_BATCH(ctx, 0);
OUT_BCS_BATCH(ctx, 0);
} else {
- assert(avc_bsd_surface->direct_mv_flag != -1);
-
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_top,
I915_GEM_DOMAIN_INSTRUCTION, 0,
0);
- if (avc_bsd_surface->direct_mv_flag == 1)
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_bottom_bo,
+ if (avc_bsd_surface->dmv_bottom_flag == 1)
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_bottom,
I915_GEM_DOMAIN_INSTRUCTION, 0,
0);
else
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_top,
I915_GEM_DOMAIN_INSTRUCTION, 0,
0);
}
assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
obj_surface = SURFACE(va_pic->picture_id);
assert(obj_surface);
- i965_avc_bsd_initialize_private_surface_data(ctx, obj_surface);
+ obj_surface->flags = (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
+ i965_avc_bsd_init_avc_bsd_surface(ctx, obj_surface, pic_param);
avc_bsd_surface = obj_surface->private_data;
- avc_bsd_surface->direct_mv_flag = (pic_param->pic_fields.bits.field_pic_flag &&
- !pic_param->seq_fields.bits.direct_8x8_inference_flag);
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
+ if (obj_surface->bo == NULL) {
+ obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
+ "vaapi surface",
+ obj_surface->size,
+ 0x1000);
+ }
+
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_top,
I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
0);
- if (avc_bsd_surface->direct_mv_flag == 1)
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_bottom_bo,
+ if (avc_bsd_surface->dmv_bottom_flag == 1)
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_bottom,
I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
0);
else
- OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
+ OUT_BCS_RELOC(ctx, avc_bsd_surface->dmv_top,
I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
0);
}
if (!found) {
+ struct object_surface *obj_surface = SURFACE(i965_h264_context->fsid_list[i].surface_id);
+ obj_surface->flags &= ~SURFACE_REFERENCED;
+
+ if (obj_surface->flags & SURFACE_DISPLAYED) {
+ dri_bo_unreference(obj_surface->bo);
+ obj_surface->bo = NULL;
+ obj_surface->flags = 0;
+ }
+
+ if (obj_surface->free_private_data)
+ obj_surface->free_private_data(&obj_surface->private_data);
+
i965_h264_context->fsid_list[i].surface_id = VA_INVALID_ID;
i965_h264_context->fsid_list[i].frame_store_id = -1;
}
if (!found) {
int frame_idx;
+ struct object_surface *obj_surface = SURFACE(ref_pic->picture_id);
+ if (obj_surface->bo == NULL) {
+ /* Some broken sources such as conformance case FM2_SVA_C
+ * will get here !!!. Allocating a BO for it to avoid SEGMENT FAULT
+ */
+ obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
+ "vaapi surface",
+ obj_surface->size,
+ 0x1000);
+ }
+
for (frame_idx = 0; frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list); frame_idx++) {
for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_media_state *media_state = &i965->media_state;
struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
- int i, j;
VAPictureParameterBufferH264 *pic_param;
VASliceParameterBufferH264 *slice_param;
+ unsigned int *object_command;
+ int i, j;
assert(decode_state->pic_param && decode_state->pic_param->buffer);
pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
i965_avc_bsd_frame_store_index(ctx, pic_param);
+
+ i965_h264_context->enable_avc_ildb = 0;
+ for (j = 0; j < decode_state->num_slice_params && i965_h264_context->enable_avc_ildb == 0; j++) {
+ assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
+ slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
+
+ assert(decode_state->slice_params[j]->num_elements == 1); /* FIXME */
+ for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
+ assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
+ assert((slice_param->slice_type == SLICE_TYPE_I) ||
+ (slice_param->slice_type == SLICE_TYPE_SI) ||
+ (slice_param->slice_type == SLICE_TYPE_P) ||
+ (slice_param->slice_type == SLICE_TYPE_SP) ||
+ (slice_param->slice_type == SLICE_TYPE_B));
+
+ if (slice_param->disable_deblocking_filter_idc != 1) {
+ i965_h264_context->enable_avc_ildb = 1;
+ break;
+ }
+
+ slice_param++;
+ }
+ }
+
intel_batchbuffer_start_atomic_bcs(ctx, 0x1000);
i965_avc_bsd_img_state(ctx, decode_state);
i965_avc_bsd_qm_state(ctx, decode_state);
- i965_h264_context->enable_avc_ildb = 0;
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
i965_avc_bsd_slice_state(ctx, pic_param, slice_param);
i965_avc_bsd_buf_base_state(ctx, pic_param, slice_param);
i965_avc_bsd_object(ctx, decode_state, pic_param, slice_param);
-
- if (slice_param->disable_deblocking_filter_idc != 1)
- i965_h264_context->enable_avc_ildb = 1;
-
slice_param++;
}
-
}
i965_avc_bsd_phantom_slice(ctx, decode_state, pic_param);
intel_batchbuffer_emit_mi_flush_bcs(ctx);
intel_batchbuffer_end_atomic_bcs(ctx);
intel_batchbuffer_flush_bcs(ctx);
+
+ dri_bo_map(i965_h264_context->avc_it_command_mb_info.bo, True);
+ assert(i965_h264_context->avc_it_command_mb_info.bo->virtual);
+ object_command = i965_h264_context->avc_it_command_mb_info.bo->virtual;
+ memset(object_command, 0, i965_h264_context->avc_it_command_mb_info.mbs * i965_h264_context->use_avc_hw_scoreboard * MB_CMD_IN_BYTES);
+ object_command += i965_h264_context->avc_it_command_mb_info.mbs * (1 + i965_h264_context->use_avc_hw_scoreboard) * MB_CMD_IN_DWS;
+ *object_command = MI_BATCH_BUFFER_END;
+ dri_bo_unmap(i965_h264_context->avc_it_command_mb_info.bo);
}
void
dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
bo = dri_bo_alloc(i965->intel.bufmgr,
"bsd raw store",
- 0x4000, /* at least 11520 bytes to support 120 MBs per row */
+ 0x3000, /* at least 11520 bytes to support 120 MBs per row */
64);
assert(bo);
i965_avc_bsd_context->bsd_raw_store.bo = bo;
64);
assert(bo);
i965_avc_bsd_context->mpr_row_store.bo = bo;
+
+ if (!i965_avc_bsd_context->init) {
+ i965_avc_bsd_context->init = 1;
+ }
}
Bool
#ifndef __I965_AVC_BSD_H__
#define __I965_AVC_BSD_H__
-struct i965_avc_bsd_surface
-{
- dri_bo *direct_mv_wr_top_bo;
- dri_bo *direct_mv_wr_bottom_bo;
- int direct_mv_flag;
-};
+#define DMV_SIZE 0x88000 /* 557056 bytes for a frame */
struct i965_avc_bsd_context
{
dri_bo *bo;
} mpr_row_store;
- struct {
- dri_bo *bo;
- } avc_it_command_mb_info;
+ int init;
+};
- struct {
- dri_bo *bo;
- long write_offset;
- } avc_it_data;
+struct i965_avc_bsd_surface
+{
+ struct i965_avc_bsd_context *ctx;
+ dri_bo *dmv_top;
+ dri_bo *dmv_bottom;
+ int dmv_bottom_flag;
};
void i965_avc_bsd_pipeline(VADriverContextP, struct decode_state *);
{
unsigned char *constant_buffer;
+ if (avc_hw_scoreboard_context->curbe.upload)
+ return;
+
dri_bo_map(avc_hw_scoreboard_context->curbe.bo, 1);
assert(avc_hw_scoreboard_context->curbe.bo->virtual);
constant_buffer = avc_hw_scoreboard_context->curbe.bo->virtual;
memcpy(constant_buffer, avc_hw_scoreboard_constants, sizeof(avc_hw_scoreboard_constants));
dri_bo_unmap(avc_hw_scoreboard_context->curbe.bo);
+ avc_hw_scoreboard_context->curbe.upload = 1;
}
static void
struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
dri_bo *bo;
- dri_bo_unreference(avc_hw_scoreboard_context->curbe.bo);
- bo = dri_bo_alloc(i965->intel.bufmgr,
- "constant buffer",
- 4096, 64);
- assert(bo);
- avc_hw_scoreboard_context->curbe.bo = bo;
+ if (avc_hw_scoreboard_context->curbe.bo == NULL) {
+ bo = dri_bo_alloc(i965->intel.bufmgr,
+ "constant buffer",
+ 4096, 64);
+ assert(bo);
+ avc_hw_scoreboard_context->curbe.bo = bo;
+ avc_hw_scoreboard_context->curbe.upload = 0;
+ }
dri_bo_unreference(avc_hw_scoreboard_context->surface.s_bo);
avc_hw_scoreboard_context->surface.s_bo = i965_h264_context->avc_it_command_mb_info.bo;
struct {
dri_bo *bo;
+ int upload;
} curbe;
struct {
obj_surface->width = ALIGN(width, 16);
obj_surface->height = ALIGN(height, 16);
obj_surface->size = SIZE_YUV420(obj_surface->width, obj_surface->height);
- obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
- "vaapi surface",
- obj_surface->size,
- 64);
- assert(obj_surface->bo);
+ obj_surface->flags = SURFACE_REFERENCED;
+ obj_surface->bo = NULL;
obj_surface->private_data = NULL;
obj_surface->free_private_data = NULL;
}
VAStatus
i965_PutImage(VADriverContextP ctx,
- VASurfaceID surface,
- VAImageID image,
- int src_x,
- int src_y,
- unsigned int src_width,
- unsigned int src_height,
- int dest_x,
- int dest_y,
- unsigned int dest_width,
- unsigned int dest_height)
+ VASurfaceID surface,
+ VAImageID image,
+ int src_x,
+ int src_y,
+ unsigned int src_width,
+ unsigned int src_height,
+ int dest_x,
+ int dest_y,
+ unsigned int dest_width,
+ unsigned int dest_height)
{
return VA_STATUS_SUCCESS;
}
struct i965_driver_data *i965 = i965_driver_data(ctx);
VASubpictureID subpicID = NEW_SUBPIC_ID()
- struct object_subpic *obj_subpic = SUBPIC(subpicID);
+ struct object_subpic *obj_subpic = SUBPIC(subpicID);
if (!obj_subpic)
return VA_STATUS_ERROR_ALLOCATION_FAILED;
if (type == VASliceDataBufferType || type == VAImageBufferType) {
buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
- "Buffer",
- size * num_elements, 64);
+ "Buffer",
+ size * num_elements, 64);
assert(buffer_store->bo);
if (data)
assert(obj_buffer->buffer_store->buffer);
i965_release_buffer_store(&obj_context->decode_state.bit_plane);
i965_reference_buffer_store(&obj_context->decode_state.bit_plane,
- obj_buffer->buffer_store);
+ obj_buffer->buffer_store);
return VA_STATUS_SUCCESS;
}
if (obj_context->decode_state.num_slice_params == obj_context->decode_state.max_slice_params) {
obj_context->decode_state.slice_params = realloc(obj_context->decode_state.slice_params,
- (obj_context->decode_state.max_slice_params + NUM_SLICES) * sizeof(*obj_context->decode_state.slice_params));
+ (obj_context->decode_state.max_slice_params + NUM_SLICES) * sizeof(*obj_context->decode_state.slice_params));
memset(obj_context->decode_state.slice_params + obj_context->decode_state.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->decode_state.slice_params));
obj_context->decode_state.max_slice_params += NUM_SLICES;
}
*out_image = *image;
return VA_STATUS_SUCCESS;
- error:
+error:
i965_DestroyImage(ctx, image_id);
return va_status;
}
for (i = 0; i < obj_image->image.num_palette_entries; i++)
obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
((unsigned int)palette[3*i + 1] << 8) |
- (unsigned int)palette[3*i + 2]);
+ (unsigned int)palette[3*i + 2]);
return VA_STATUS_SUCCESS;
}
union dri_buffer *buffer;
struct intel_region *dest_region;
struct object_surface *obj_surface;
- int ret;
+ int ret;
uint32_t name;
Bool new_region = False;
/* Currently don't support DRI1 */
if (dri_state->driConnectedFlag != VA_DRI2)
return VA_STATUS_ERROR_UNKNOWN;
+ /* Some broken sources such as H.264 conformance case FM2_SVA_C
+ * will get here
+ */
+ obj_surface = SURFACE(surface);
+ if (obj_surface->bo == NULL)
+ return VA_STATUS_SUCCESS;
+
dri_drawable = dri_get_drawable(ctx, draw);
assert(dri_drawable);
i965_render_put_surface(ctx, surface,
srcx, srcy, srcw, srch,
destx, desty, destw, desth);
- obj_surface = SURFACE(surface);
+
if(obj_surface->subpic != VA_INVALID_ID) {
i965_render_put_subpic(ctx, surface,
- srcx, srcy, srcw, srch,
- destx, desty, destw, desth);
+ srcx, srcy, srcw, srch,
+ destx, desty, destw, desth);
}
+
dri_swap_buffer(ctx, dri_drawable);
+ obj_surface->flags |= SURFACE_DISPLAYED;
+
+ if (!(obj_surface->flags & SURFACE_REFERENCED)) {
+ dri_bo_unreference(obj_surface->bo);
+ obj_surface->bo = NULL;
+ obj_surface->flags = 0;
+
+ if (obj_surface->free_private_data)
+ obj_surface->free_private_data(&obj_surface->private_data);
+ }
return VA_STATUS_SUCCESS;
}
sizeof(struct object_subpic),
SUBPIC_ID_OFFSET);
assert(result == 0);
-
+
return i965_Init(ctx);
}
struct decode_state decode_state;
};
+#define SURFACE_REFERENCED (1 << 0)
+#define SURFACE_DISPLAYED (1 << 1)
+
struct object_surface
{
struct object_base base;
int width;
int height;
int size;
+ int flags;
dri_bo *bo;
void (*free_private_data)(void **data);
void *private_data;
dri_bo *bo;
};
-
-
struct i965_driver_data
{
struct intel_driver_data intel;
}
static void
-i965_media_decode_init(VADriverContextP ctx, VAProfile profile)
+i965_media_decode_init(VADriverContextP ctx, VAProfile profile, struct decode_state *decode_state)
{
int i;
struct i965_driver_data *i965 = i965_driver_data(ctx);
switch (profile) {
case VAProfileMPEG2Simple:
case VAProfileMPEG2Main:
- i965_media_mpeg2_decode_init(ctx);
+ i965_media_mpeg2_decode_init(ctx, decode_state);
break;
case VAProfileH264Baseline:
case VAProfileH264Main:
case VAProfileH264High:
- i965_media_h264_decode_init(ctx);
+ i965_media_h264_decode_init(ctx, decode_state);
break;
default:
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_media_state *media_state = &i965->media_state;
- i965_media_decode_init(ctx, profile);
+ i965_media_decode_init(ctx, profile, decode_state);
assert(media_state->media_states_setup);
media_state->media_states_setup(ctx, decode_state);
i965_media_pipeline_setup(ctx, decode_state);
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_media_state *media_state = &i965->media_state;
struct i965_h264_context *i965_h264_context;
- VAPictureParameterBufferH264 *pic_param;
assert(media_state->private_context);
i965_h264_context = (struct i965_h264_context *)media_state->private_context;
- assert(decode_state->pic_param && decode_state->pic_param->buffer);
- pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
-
- i965_h264_context->picture.width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
- i965_h264_context->picture.height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff) /
- (1 + !!pic_param->pic_fields.bits.field_pic_flag); /* picture height */
- i965_h264_context->picture.mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
- !pic_param->pic_fields.bits.field_pic_flag);
-
- i965_h264_context->avc_it_command_mb_info.mbs = (i965_h264_context->picture.width_in_mbs *
- i965_h264_context->picture.height_in_mbs);
i965_avc_bsd_pipeline(ctx, decode_state);
}
void
-i965_media_h264_decode_init(VADriverContextP ctx)
+i965_media_h264_decode_init(VADriverContextP ctx, struct decode_state *decode_state)
{
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_media_state *media_state = &i965->media_state;
struct i965_h264_context *i965_h264_context;
dri_bo *bo;
int i;
+ VAPictureParameterBufferH264 *pic_param;
i965_h264_context = media_state->private_context;
struct media_kernel *kernel = &h264_avc_kernels[i];
kernel->bo = dri_bo_alloc(i965->intel.bufmgr,
kernel->name,
- kernel->size, 64);
+ kernel->size, 0x1000);
assert(kernel->bo);
dri_bo_subdata(kernel->bo, 0, kernel->size, kernel->bin);
}
media_state->media_objects = i965_media_h264_objects;
}
- i965_h264_context->enable_avc_ildb = 0;
+ assert(decode_state->pic_param && decode_state->pic_param->buffer);
+ pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
+ i965_h264_context->picture.width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
+ i965_h264_context->picture.height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff) /
+ (1 + !!pic_param->pic_fields.bits.field_pic_flag); /* picture height */
+ i965_h264_context->picture.mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
+ !pic_param->pic_fields.bits.field_pic_flag);
+ i965_h264_context->avc_it_command_mb_info.mbs = (i965_h264_context->picture.width_in_mbs *
+ i965_h264_context->picture.height_in_mbs);
dri_bo_unreference(i965_h264_context->avc_it_command_mb_info.bo);
bo = dri_bo_alloc(i965->intel.bufmgr,
"avc it command mb info",
- 0x80000 * (1 + i965_h264_context->use_avc_hw_scoreboard), /* at least 522240 bytes */
+ i965_h264_context->avc_it_command_mb_info.mbs * MB_CMD_IN_BYTES * (1 + i965_h264_context->use_avc_hw_scoreboard) + 4,
0x1000);
assert(bo);
i965_h264_context->avc_it_command_mb_info.bo = bo;
dri_bo_unreference(i965_h264_context->avc_it_data.bo);
bo = dri_bo_alloc(i965->intel.bufmgr,
"avc it data",
- 0x1000000, /* at least 16711680 bytes */
- 4096);
+ i965_h264_context->avc_it_command_mb_info.mbs *
+ 0x800 *
+ (1 + !!pic_param->pic_fields.bits.field_pic_flag),
+ 0x1000);
assert(bo);
i965_h264_context->avc_it_data.bo = bo;
i965_h264_context->avc_it_data.write_offset = 0;
dri_bo_unreference(i965_h264_context->avc_ildb_data.bo);
bo = dri_bo_alloc(i965->intel.bufmgr,
"AVC-ILDB Data Buffer",
- 0x100000, /* at least 1044480 bytes */
- 64);
+ i965_h264_context->avc_it_command_mb_info.mbs * 64 * 2,
+ 0x1000);
assert(bo);
i965_h264_context->avc_ildb_data.bo = bo;
} fsid_list[16];
};
-void i965_media_h264_decode_init(VADriverContextP ctx);
+void i965_media_h264_decode_init(VADriverContextP ctx, struct decode_state *decode_state);
#endif /* _I965_MEDIA_H264_H_ */
int w = obj_surface->width;
int h = obj_surface->height;
+ if (obj_surface->bo == NULL) {
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+
+ obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
+ "vaapi surface",
+ obj_surface->size,
+ 0x1000);
+ }
+
if (picture_structure == MPEG_FRAME) {
i965_media_mpeg2_surface_state(ctx, base_index + 0, obj_surface,
0, w, h,
}
void
-i965_media_mpeg2_decode_init(VADriverContextP ctx)
+i965_media_mpeg2_decode_init(VADriverContextP ctx, struct decode_state *decode_state)
{
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_media_state *media_state = &i965->media_state;
struct decode_state;
-void i965_media_mpeg2_decode_init(VADriverContextP ctx);
+void i965_media_mpeg2_decode_init(VADriverContextP ctx, struct decode_state * decode_state);
#endif /* _I965_MEDIA_MPEG2_H_ */
struct i965_render_state *render_state = &i965->render_state;
unsigned short *constant_buffer;
+ if (render_state->curbe.upload)
+ return;
+
dri_bo_map(render_state->curbe.bo, 1);
assert(render_state->curbe.bo->virtual);
constant_buffer = render_state->curbe.bo->virtual;
*constant_buffer = 0;
dri_bo_unmap(render_state->curbe.bo);
+ render_state->curbe.upload = 1;
}
static void
struct render_kernel *kernel = &render_kernels[i];
kernel->bo = dri_bo_alloc(i965->intel.bufmgr,
kernel->name,
- kernel->size, 64);
+ kernel->size, 0x1000);
assert(kernel->bo);
dri_bo_subdata(kernel->bo, 0, kernel->size, kernel->bin);
}
"constant buffer",
4096, 64);
assert(render_state->curbe.bo);
+ render_state->curbe.upload = 0;
return True;
}
struct {
dri_bo *state;
- dri_bo *prog;
} sf;
struct {
dri_bo *surface[MAX_RENDER_SURFACES];
dri_bo *binding_table;
dri_bo *state;
- dri_bo *prog;
} wm;
struct {
struct {
dri_bo *bo;
+ int upload;
} curbe;
int interleaved_uv;