#include "i965_encoder.h"
static void
-gen6_mfc_pipe_mode_select(VADriverContextP ctx)
+gen6_mfc_pipe_mode_select(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
BEGIN_BCS_BATCH(batch,4);
static void
gen6_mfc_surface_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
BEGIN_BCS_BATCH(batch, 6);
static void
gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
int i;
static void
gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
BEGIN_BCS_BATCH(batch, 11);
static void
gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
BEGIN_BCS_BATCH(batch, 4);
static void
gen6_mfc_avc_img_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
}
-static void gen6_mfc_avc_directmode_state(VADriverContextP ctx)
+static void gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int i;
BEGIN_BCS_BATCH(batch, 69);
int intra_slice,
struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
BEGIN_BCS_BATCH(batch, 11);;
ADVANCE_BCS_BATCH(batch);
}
-static void gen6_mfc_avc_qm_state(VADriverContextP ctx)
+static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int i;
BEGIN_BCS_BATCH(batch, 58);
ADVANCE_BCS_BATCH(batch);
}
-static void gen6_mfc_avc_fqm_state(VADriverContextP ctx)
+static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int i;
BEGIN_BCS_BATCH(batch, 113);
ADVANCE_BCS_BATCH(batch);
}
-static void gen6_mfc_avc_ref_idx_state(VADriverContextP ctx)
+static void gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int i;
BEGIN_BCS_BATCH(batch, 10);
static void
-gen6_mfc_avc_insert_object(VADriverContextP ctx, int flush_data)
+gen6_mfc_avc_insert_object(VADriverContextP ctx, int flush_data, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
BEGIN_BCS_BATCH(batch, 4);
}
static int
-gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg)
+gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
+ struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int len_in_dwords = 11;
BEGIN_BCS_BATCH(batch, len_in_dwords);
return len_in_dwords;
}
-static int gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp, unsigned int offset)
+static int gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp, unsigned int offset,
+ struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
int len_in_dwords = 11;
BEGIN_BCS_BATCH(batch, len_in_dwords);
struct encode_state *encode_state,
struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param->buffer;
if (emit_new_state) {
intel_batchbuffer_emit_mi_flush(batch);
- gen6_mfc_pipe_mode_select(ctx);
+ gen6_mfc_pipe_mode_select(ctx, gen6_encoder_context);
gen6_mfc_surface_state(ctx, gen6_encoder_context);
gen6_mfc_pipe_buf_addr_state(ctx, gen6_encoder_context);
gen6_mfc_ind_obj_base_addr_state(ctx, gen6_encoder_context);
gen6_mfc_bsp_buf_base_addr_state(ctx, gen6_encoder_context);
gen6_mfc_avc_img_state(ctx, gen6_encoder_context);
- gen6_mfc_avc_qm_state(ctx);
- gen6_mfc_avc_fqm_state(ctx);
- gen6_mfc_avc_ref_idx_state(ctx);
+ gen6_mfc_avc_qm_state(ctx, gen6_encoder_context);
+ gen6_mfc_avc_fqm_state(ctx, gen6_encoder_context);
+ gen6_mfc_avc_ref_idx_state(ctx, gen6_encoder_context);
/*gen6_mfc_avc_directmode_state(ctx);*/
gen6_mfc_avc_slice_state(ctx, is_intra, gen6_encoder_context);
/*gen6_mfc_avc_insert_object(ctx, 0);*/
if (is_intra) {
assert(msg);
- object_len_in_bytes = gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg);
+ object_len_in_bytes = gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, gen6_encoder_context);
msg += 4;
} else {
- object_len_in_bytes = gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, offset);
+ object_len_in_bytes = gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, offset, gen6_encoder_context);
offset += 64;
}
struct encode_state *encode_state,
struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
intel_batchbuffer_flush(batch); //run the pipeline
int standard_select,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
assert(standard_select == MFX_FORMAT_MPEG2 ||
standard_select == MFX_FORMAT_AVC ||
static void
gen6_mfd_surface_state(VADriverContextP ctx,
struct decode_state *decode_state,
- int standard_select)
+ int standard_select,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct object_surface *obj_surface = SURFACE(decode_state->current_render_target);
assert(obj_surface);
int standard_select,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
int i;
static void
gen6_mfd_ind_obj_base_addr_state(VADriverContextP ctx,
dri_bo *slice_data_bo,
- int standard_select)
+ int standard_select,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
BEGIN_BCS_BATCH(batch, 11);
OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
int standard_select,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
BEGIN_BCS_BATCH(batch, 4);
OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
static void
gen6_mfd_wait(VADriverContextP ctx,
struct decode_state *decode_state,
- int standard_select)
+ int standard_select,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
BEGIN_BCS_BATCH(batch, 1);
OUT_BCS_BATCH(batch, MFX_WAIT | (1 << 8));
}
static void
-gen6_mfd_avc_img_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_avc_img_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int qm_present_flag;
int img_struct;
int mbaff_frame_flag;
}
static void
-gen6_mfd_avc_qm_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_avc_qm_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int cmd_len;
VAIQMatrixBufferH264 *iq_matrix;
VAPictureParameterBufferH264 *pic_param;
VASliceParameterBufferH264 *slice_param,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
struct object_surface *obj_surface;
struct gen6_avc_surface *gen6_avc_surface;
VAPictureH264 *va_pic;
gen6_mfd_avc_slice_state(VADriverContextP ctx,
VAPictureParameterBufferH264 *pic_param,
VASliceParameterBufferH264 *slice_param,
- VASliceParameterBufferH264 *next_slice_param)
+ VASliceParameterBufferH264 *next_slice_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
int slice_hor_pos, slice_ver_pos, next_slice_hor_pos, next_slice_ver_pos;
}
static void
-gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx, VAPictureParameterBufferH264 *pic_param)
+gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx,
+ VAPictureParameterBufferH264 *pic_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
VASliceParameterBufferH264 *slice_param,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int i, j, num_ref_list;
struct {
unsigned char bottom_idc:1;
static void
gen6_mfd_avc_weightoffset_state(VADriverContextP ctx,
VAPictureParameterBufferH264 *pic_param,
- VASliceParameterBufferH264 *slice_param)
+ VASliceParameterBufferH264 *slice_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int i, j, num_weight_offset_table = 0;
short weightoffsets[32 * 6];
gen6_mfd_avc_bsd_object(VADriverContextP ctx,
VAPictureParameterBufferH264 *pic_param,
VASliceParameterBufferH264 *slice_param,
- dri_bo *slice_data_bo)
+ dri_bo *slice_data_bo,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int slice_data_bit_offset;
uint8_t *slice_data = NULL;
}
static void
-gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx, VAPictureParameterBufferH264 *pic_param)
+gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx,
+ VAPictureParameterBufferH264 *pic_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
BEGIN_BCS_BATCH(batch, 6);
OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
}
static void
-gen6_mfd_avc_phantom_slice(VADriverContextP ctx, VAPictureParameterBufferH264 *pic_param)
+gen6_mfd_avc_phantom_slice(VADriverContextP ctx,
+ VAPictureParameterBufferH264 *pic_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- gen6_mfd_avc_phantom_slice_state(ctx, pic_param);
- gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param);
+ gen6_mfd_avc_phantom_slice_state(ctx, pic_param, gen6_mfd_context);
+ gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param, gen6_mfd_context);
}
static void
struct decode_state *decode_state,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferH264 *pic_param;
VASliceParameterBufferH264 *slice_param, *next_slice_param;
dri_bo *slice_data_bo;
intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
gen6_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_AVC, gen6_mfd_context);
- gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC);
+ gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC, gen6_mfd_context);
gen6_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen6_mfd_context);
gen6_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen6_mfd_context);
- gen6_mfd_avc_img_state(ctx, decode_state);
- gen6_mfd_avc_qm_state(ctx, decode_state);
+ gen6_mfd_avc_img_state(ctx, decode_state, gen6_mfd_context);
+ gen6_mfd_avc_qm_state(ctx, decode_state, gen6_mfd_context);
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
else
next_slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
- gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC);
+ gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC, gen6_mfd_context);
assert(decode_state->slice_params[j]->num_elements == 1);
for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
next_slice_param = slice_param + 1;
gen6_mfd_avc_directmode_state(ctx, pic_param, slice_param, gen6_mfd_context);
- gen6_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param);
+ gen6_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param, gen6_mfd_context);
gen6_mfd_avc_ref_idx_state(ctx, pic_param, slice_param, gen6_mfd_context);
- gen6_mfd_avc_weightoffset_state(ctx, pic_param, slice_param);
- gen6_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo);
+ gen6_mfd_avc_weightoffset_state(ctx, pic_param, slice_param, gen6_mfd_context);
+ gen6_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo, gen6_mfd_context);
slice_param++;
}
}
- gen6_mfd_avc_phantom_slice(ctx, pic_param);
+ gen6_mfd_avc_phantom_slice(ctx, pic_param, gen6_mfd_context);
intel_batchbuffer_end_atomic(batch);
intel_batchbuffer_flush(batch);
}
}
static void
-gen6_mfd_mpeg2_pic_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_mpeg2_pic_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferMPEG2 *pic_param;
assert(decode_state->pic_param && decode_state->pic_param->buffer);
}
static void
-gen6_mfd_mpeg2_qm_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_mpeg2_qm_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAIQMatrixBufferMPEG2 *iq_matrix;
int i;
gen6_mfd_mpeg2_bsd_object(VADriverContextP ctx,
VAPictureParameterBufferMPEG2 *pic_param,
VASliceParameterBufferMPEG2 *slice_param,
- VASliceParameterBufferMPEG2 *next_slice_param)
+ VASliceParameterBufferMPEG2 *next_slice_param,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
unsigned int height_in_mbs = ALIGN(pic_param->vertical_size, 16) / 16;
int mb_count;
struct decode_state *decode_state,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferMPEG2 *pic_param;
VASliceParameterBufferMPEG2 *slice_param, *next_slice_param;
dri_bo *slice_data_bo;
intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
gen6_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_MPEG2, gen6_mfd_context);
- gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2);
+ gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen6_mfd_context);
gen6_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen6_mfd_context);
gen6_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen6_mfd_context);
- gen6_mfd_mpeg2_pic_state(ctx, decode_state);
- gen6_mfd_mpeg2_qm_state(ctx, decode_state);
+ gen6_mfd_mpeg2_pic_state(ctx, decode_state, gen6_mfd_context);
+ gen6_mfd_mpeg2_qm_state(ctx, decode_state, gen6_mfd_context);
assert(decode_state->num_slice_params == 1);
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
slice_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer;
slice_data_bo = decode_state->slice_datas[j]->bo;
- gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2);
+ gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2, gen6_mfd_context);
for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
else
next_slice_param = NULL;
- gen6_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, next_slice_param);
+ gen6_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, next_slice_param, gen6_mfd_context);
slice_param++;
}
}
}
static void
-gen6_mfd_vc1_pic_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_vc1_pic_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferVC1 *pic_param;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct object_surface *obj_surface;
}
static void
-gen6_mfd_vc1_pred_pipe_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_vc1_pred_pipe_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferVC1 *pic_param;
int interpolation_mode = 0;
int intensitycomp_single;
static void
-gen6_mfd_vc1_directmode_state(VADriverContextP ctx, struct decode_state *decode_state)
+gen6_mfd_vc1_directmode_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferVC1 *pic_param;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct object_surface *obj_surface;
VAPictureParameterBufferVC1 *pic_param,
VASliceParameterBufferVC1 *slice_param,
VASliceParameterBufferVC1 *next_slice_param,
- dri_bo *slice_data_bo)
+ dri_bo *slice_data_bo,
+ struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
int next_slice_start_vert_pos;
int macroblock_offset;
uint8_t *slice_data = NULL;
struct decode_state *decode_state,
struct gen6_mfd_context *gen6_mfd_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
VAPictureParameterBufferVC1 *pic_param;
VASliceParameterBufferVC1 *slice_param, *next_slice_param;
dri_bo *slice_data_bo;
intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
gen6_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_VC1, gen6_mfd_context);
- gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1);
+ gen6_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1, gen6_mfd_context);
gen6_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen6_mfd_context);
gen6_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen6_mfd_context);
- gen6_mfd_vc1_pic_state(ctx, decode_state);
- gen6_mfd_vc1_pred_pipe_state(ctx, decode_state);
- gen6_mfd_vc1_directmode_state(ctx, decode_state);
+ gen6_mfd_vc1_pic_state(ctx, decode_state, gen6_mfd_context);
+ gen6_mfd_vc1_pred_pipe_state(ctx, decode_state, gen6_mfd_context);
+ gen6_mfd_vc1_directmode_state(ctx, decode_state, gen6_mfd_context);
assert(decode_state->num_slice_params == 1);
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
slice_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j]->buffer;
slice_data_bo = decode_state->slice_datas[j]->bo;
- gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1);
+ gen6_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1, gen6_mfd_context);
for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
else
next_slice_param = NULL;
- gen6_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo);
+ gen6_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen6_mfd_context);
slice_param++;
}
}
dri_bo_unreference(gen6_mfd_context->bitplane_read_buffer.bo);
gen6_mfd_context->bitplane_read_buffer.bo = NULL;
+ intel_batchbuffer_free(gen6_mfd_context->base.batch);
free(gen6_mfd_context);
}
struct hw_context *
gen6_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
{
+ struct intel_driver_data *intel = intel_driver_data(ctx);
struct gen6_mfd_context *gen6_mfd_context = calloc(1, sizeof(struct gen6_mfd_context));
int i;
gen6_mfd_context->base.destroy = gen6_mfd_context_destroy;
gen6_mfd_context->base.run = gen6_mfd_decode_picture;
+ gen6_mfd_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
for (i = 0; i < ARRAY_ELEMS(gen6_mfd_context->reference_surface); i++) {
gen6_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
return VA_STATUS_SUCCESS;
}
-static void gen6_vme_pipeline_select(VADriverContextP ctx)
+static void gen6_vme_pipeline_select(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
ADVANCE_BATCH(batch);
}
-static void gen6_vme_state_base_address(VADriverContextP ctx)
+static void gen6_vme_state_base_address(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
BEGIN_BATCH(batch, 10);
static void gen6_vme_vfe_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
BEGIN_BATCH(batch, 8);
static void gen6_vme_curbe_load(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
BEGIN_BATCH(batch, 4);
static void gen6_vme_idrt(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
BEGIN_BATCH(batch, 4);
static int gen6_vme_media_object(VADriverContextP ctx,
struct encode_state *encode_state,
int mb_x, int mb_y,
- int kernel)
+ int kernel,
+ struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
struct object_surface *obj_surface = SURFACE(encode_state->current_render_target);
int mb_width = ALIGN(obj_surface->orig_width, 16) / 16;
int len_in_dowrds = 6 + 1;
struct encode_state *encode_state,
struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
VAEncSliceParameterBuffer *pSliceParameter = (VAEncSliceParameterBuffer *)encode_state->slice_params[0]->buffer;
VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param->buffer;
int is_intra = pSliceParameter->slice_flags.bits.is_intra;
ADVANCE_BATCH(batch);
/*Step2: State command PIPELINE_SELECT*/
- gen6_vme_pipeline_select(ctx);
+ gen6_vme_pipeline_select(ctx, gen6_encoder_context);
/*Step3: State commands configuring pipeline states*/
- gen6_vme_state_base_address(ctx);
+ gen6_vme_state_base_address(ctx, gen6_encoder_context);
gen6_vme_vfe_state(ctx, gen6_encoder_context);
gen6_vme_curbe_load(ctx, gen6_encoder_context);
gen6_vme_idrt(ctx, gen6_encoder_context);
}
/*Step4: Primitive commands*/
- object_len_in_bytes = gen6_vme_media_object(ctx, encode_state, x, y, is_intra ? VME_INTRA_SHADER : VME_INTER_SHADER);
+ object_len_in_bytes = gen6_vme_media_object(ctx, encode_state, x, y, is_intra ? VME_INTRA_SHADER : VME_INTER_SHADER, gen6_encoder_context);
if (intel_batchbuffer_check_free_space(batch, object_len_in_bytes) == 0) {
assert(0);
struct encode_state *encode_state,
struct gen6_encoder_context *gen6_encoder_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
intel_batchbuffer_flush(batch);
}
static void
-i965_bsd_ind_obj_base_address(VADriverContextP ctx, struct decode_state *decode_state, int slice)
+i965_bsd_ind_obj_base_address(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ int slice,
+ struct i965_h264_context *i965_h264_context)
+
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
dri_bo *ind_bo = decode_state->slice_datas[slice]->bo;
struct decode_state *decode_state,
struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int qm_present_flag;
int img_struct;
int mbaff_frame_flag;
}
static void
-i965_avc_bsd_qm_state(VADriverContextP ctx, struct decode_state *decode_state)
+i965_avc_bsd_qm_state(VADriverContextP ctx,
+ struct decode_state *decode_state,
+ struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int cmd_len;
VAIQMatrixBufferH264 *iq_matrix;
VAPictureParameterBufferH264 *pic_param;
VASliceParameterBufferH264 *slice_param,
struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int present_flag, cmd_len, list, j;
struct {
unsigned char bottom_idc:1;
VASliceParameterBufferH264 *slice_param,
struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
struct i965_avc_bsd_context *i965_avc_bsd_context;
int i, j;
VAPictureH264 *va_pic;
int slice_index,
struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
int slice_index,
struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
void
i965_avc_bsd_pipeline(VADriverContextP ctx, struct decode_state *decode_state, void *h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
VAPictureParameterBufferH264 *pic_param;
VASliceParameterBufferH264 *slice_param;
int i, j;
intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
i965_avc_bsd_img_state(ctx, decode_state, i965_h264_context);
- i965_avc_bsd_qm_state(ctx, decode_state);
+ i965_avc_bsd_qm_state(ctx, decode_state, i965_h264_context);
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
- i965_bsd_ind_obj_base_address(ctx, decode_state, j);
+ i965_bsd_ind_obj_base_address(ctx, decode_state, j, i965_h264_context);
assert(decode_state->slice_params[j]->num_elements == 1); /* FIXME */
for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
};
static void
-i965_avc_hw_scoreboard_surface_state(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_surface_state(struct i965_h264_context *i965_h264_context)
{
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
struct i965_surface_state *ss;
dri_bo *bo;
}
static void
-i965_avc_hw_scoreboard_interface_descriptor_table(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_interface_descriptor_table(struct i965_h264_context *i965_h264_context)
{
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
struct i965_interface_descriptor *desc;
dri_bo *bo;
}
static void
-i965_avc_hw_scoreboard_binding_table(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_binding_table(struct i965_h264_context *i965_h264_context)
{
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
unsigned int *binding_table;
dri_bo *bo = avc_hw_scoreboard_context->binding_table.bo;
}
static void
-i965_avc_hw_scoreboard_vfe_state(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_vfe_state(struct i965_h264_context *i965_h264_context)
{
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
struct i965_vfe_state *vfe_state;
dri_bo *bo;
}
static void
-i965_avc_hw_scoreboard_upload_constants(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_upload_constants(struct i965_h264_context *i965_h264_context)
{
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
unsigned char *constant_buffer;
if (avc_hw_scoreboard_context->curbe.upload)
}
static void
-i965_avc_hw_scoreboard_states_setup(struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_states_setup(struct i965_h264_context *i965_h264_context)
{
- i965_avc_hw_scoreboard_surface_state(avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_binding_table(avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_interface_descriptor_table(avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_vfe_state(avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_upload_constants(avc_hw_scoreboard_context);
+ i965_avc_hw_scoreboard_surface_state(i965_h264_context);
+ i965_avc_hw_scoreboard_binding_table(i965_h264_context);
+ i965_avc_hw_scoreboard_interface_descriptor_table(i965_h264_context);
+ i965_avc_hw_scoreboard_vfe_state(i965_h264_context);
+ i965_avc_hw_scoreboard_upload_constants(i965_h264_context);
}
static void
-i965_avc_hw_scoreboard_pipeline_select(VADriverContextP ctx)
+i965_avc_hw_scoreboard_pipeline_select(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
}
static void
-i965_avc_hw_scoreboard_urb_layout(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
unsigned int vfe_fence, cs_fence;
vfe_fence = avc_hw_scoreboard_context->urb.cs_start;
}
static void
-i965_avc_hw_scoreboard_state_base_address(VADriverContextP ctx)
+i965_avc_hw_scoreboard_state_base_address(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 8);
OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
}
static void
-i965_avc_hw_scoreboard_state_pointers(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_state_pointers(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 3);
OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
}
static void
-i965_avc_hw_scoreboard_cs_urb_layout(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_cs_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
}
static void
-i965_avc_hw_scoreboard_constant_buffer(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_constant_buffer(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
}
static void
-i965_avc_hw_scoreboard_objects(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_objects(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
-
+ struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context = &i965_h264_context->avc_hw_scoreboard_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
int number_mb_cmds = 512;
int starting_mb_number = avc_hw_scoreboard_context->inline_data.starting_mb_number;
int i;
}
static void
-i965_avc_hw_scoreboard_pipeline_setup(VADriverContextP ctx, struct i965_avc_hw_scoreboard_context *avc_hw_scoreboard_context)
+i965_avc_hw_scoreboard_pipeline_setup(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
intel_batchbuffer_start_atomic(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
- i965_avc_hw_scoreboard_pipeline_select(ctx);
- i965_avc_hw_scoreboard_state_base_address(ctx);
- i965_avc_hw_scoreboard_state_pointers(ctx, avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_urb_layout(ctx, avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_cs_urb_layout(ctx, avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_constant_buffer(ctx, avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_objects(ctx, avc_hw_scoreboard_context);
+ i965_avc_hw_scoreboard_pipeline_select(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_state_base_address(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_state_pointers(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_urb_layout(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_cs_urb_layout(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_constant_buffer(ctx, i965_h264_context);
+ i965_avc_hw_scoreboard_objects(ctx, i965_h264_context);
intel_batchbuffer_end_atomic(batch);
}
else
avc_hw_scoreboard_context->hw_kernel.offset = avc_hw_scoreboard_kernel_offset[AVC_HW_SCOREBOARD];
- i965_avc_hw_scoreboard_states_setup(avc_hw_scoreboard_context);
- i965_avc_hw_scoreboard_pipeline_setup(ctx, avc_hw_scoreboard_context);
+ i965_avc_hw_scoreboard_states_setup(i965_h264_context);
+ i965_avc_hw_scoreboard_pipeline_setup(ctx, i965_h264_context);
}
}
}
static void
-i965_avc_ildb_pipeline_select(VADriverContextP ctx)
+i965_avc_ildb_pipeline_select(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
static void
i965_avc_ildb_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
-
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
unsigned int vfe_fence, cs_fence;
vfe_fence = avc_ildb_context->urb.cs_start;
}
static void
-i965_avc_ildb_state_base_address(VADriverContextP ctx)
+i965_avc_ildb_state_base_address(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
if (IS_IRONLAKE(i965->intel.device_id)) {
BEGIN_BATCH(batch, 8);
static void
i965_avc_ildb_state_pointers(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 3);
OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
static void
i965_avc_ildb_cs_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
static void
i965_avc_ildb_constant_buffer(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
static void
i965_avc_ildb_objects(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
BEGIN_BATCH(batch, 6);
OUT_BATCH(batch, CMD_MEDIA_OBJECT | 4);
static void
i965_avc_ildb_pipeline_setup(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = i965_h264_context->batch;
intel_batchbuffer_emit_mi_flush(batch);
- i965_avc_ildb_pipeline_select(ctx);
- i965_avc_ildb_state_base_address(ctx);
+ i965_avc_ildb_pipeline_select(ctx, i965_h264_context);
+ i965_avc_ildb_state_base_address(ctx, i965_h264_context);
i965_avc_ildb_state_pointers(ctx, i965_h264_context);
i965_avc_ildb_urb_layout(ctx, i965_h264_context);
i965_avc_ildb_cs_urb_layout(ctx, i965_h264_context);
VASurfaceID render_target,
VASurfaceStatus *status) /* out */
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct object_surface *obj_surface = SURFACE(render_target);
assert(obj_surface);
- /* Commit pending operations to the HW */
- intel_batchbuffer_flush(batch);
-
/* Usually GEM will handle synchronization with the graphics hardware */
#if 0
if (obj_surface->bo) {
if (i965_render_init(ctx) == False)
return VA_STATUS_ERROR_UNKNOWN;
+ i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
+
return VA_STATUS_SUCCESS;
}
unsigned int height,
VAImageID image)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
struct i965_render_state *render_state = &i965->render_state;
y + height > obj_image->image.height)
return VA_STATUS_ERROR_INVALID_PARAMETER;
- /* Commit pending operations to the HW */
- intel_batchbuffer_flush(batch);
-
VAStatus va_status;
void *image_data = NULL;
{
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ if (i965->batch)
+ intel_batchbuffer_free(i965->batch);
+
if (i965_render_terminate(ctx) == False)
return VA_STATUS_ERROR_UNKNOWN;
union codec_state *codec_state,
struct hw_context *hw_context);
void (*destroy)(void *);
+ struct intel_batchbuffer *batch;
};
struct object_context
struct object_heap buffer_heap;
struct object_heap image_heap;
struct object_heap subpic_heap;
- struct i965_render_state render_state;
struct hw_codec_info *codec_info;
+
+ struct intel_batchbuffer *batch;
+ struct i965_render_state render_state;
void *pp_context;
};
gen6_mfc_context_destroy(&gen6_encoder_context->mfc_context);
gen6_vme_context_destroy(&gen6_encoder_context->vme_context);
+ intel_batchbuffer_free(gen6_encoder_context->base.batch);
free(gen6_encoder_context);
}
struct hw_context *
gen6_enc_hw_context_init(VADriverContextP ctx, VAProfile profile)
{
+ struct intel_driver_data *intel = intel_driver_data(ctx);
struct gen6_encoder_context *gen6_encoder_context = calloc(1, sizeof(struct gen6_encoder_context));
gen6_encoder_context->base.destroy = gen6_encoder_context_destroy;
gen6_encoder_context->base.run = gen6_encoder_end_picture;
+ gen6_encoder_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
+
gen6_vme_context_init(ctx, &gen6_encoder_context->vme_context);
gen6_mfc_context_init(ctx, &gen6_encoder_context->mfc_context);
#include "i965_media_h264.h"
static void
-i965_media_pipeline_select(VADriverContextP ctx)
+i965_media_pipeline_select(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
static void
i965_media_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
-
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = media_context->base.batch;
unsigned int vfe_fence, cs_fence;
vfe_fence = media_context->urb.cs_start;
static void
i965_media_state_base_address(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = media_context->base.batch;
if (IS_IRONLAKE(i965->intel.device_id)) {
BEGIN_BATCH(batch, 8);
static void
i965_media_state_pointers(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
BEGIN_BATCH(batch, 3);
OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
static void
i965_media_cs_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
static void
i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
}
static void
-i965_media_depth_buffer(VADriverContextP ctx)
+i965_media_depth_buffer(VADriverContextP ctx, struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
BEGIN_BATCH(batch, 6);
OUT_BATCH(batch, CMD_DEPTH_BUFFER | 4);
struct decode_state *decode_state,
struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
intel_batchbuffer_start_atomic(batch, 0x1000);
- intel_batchbuffer_emit_mi_flush(batch); /* step 1 */
- i965_media_depth_buffer(ctx);
- i965_media_pipeline_select(ctx); /* step 2 */
+ intel_batchbuffer_emit_mi_flush(batch); /* step 1 */
+ i965_media_depth_buffer(ctx, media_context);
+ i965_media_pipeline_select(ctx, media_context); /* step 2 */
i965_media_urb_layout(ctx, media_context); /* step 3 */
i965_media_pipeline_state(ctx, media_context); /* step 4 */
i965_media_constant_buffer(ctx, decode_state, media_context); /* step 5 */
assert(media_context->media_states_setup);
media_context->media_states_setup(ctx, decode_state, media_context);
i965_media_pipeline_setup(ctx, decode_state, media_context);
+ intel_batchbuffer_flush(hw_context->batch);
}
static void
dri_bo_unreference(media_context->indirect_object.bo);
media_context->indirect_object.bo = NULL;
+ intel_batchbuffer_free(media_context->base.batch);
free(media_context);
}
struct hw_context *
g4x_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
{
+ struct intel_driver_data *intel = intel_driver_data(ctx);
struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
media_context->base.destroy = i965_media_context_destroy;
media_context->base.run = i965_media_decode_picture;
+ media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
switch (profile) {
case VAProfileMPEG2Simple:
struct hw_context *
ironlake_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
{
+ struct intel_driver_data *intel = intel_driver_data(ctx);
struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
media_context->base.destroy = i965_media_context_destroy;
media_context->base.run = i965_media_decode_picture;
+ media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
switch (profile) {
case VAProfileMPEG2Simple:
struct decode_state *decode_state,
struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct intel_batchbuffer *batch = media_context->base.batch;
struct i965_h264_context *i965_h264_context;
unsigned int *object_command;
i965_h264_context->fsid_list[i].frame_store_id = -1;
}
+ i965_h264_context->batch = media_context->base.batch;
+
media_context->private_context = i965_h264_context;
media_context->free_private_context = i965_media_h264_free_private_context;
} fsid_list[16];
struct i965_kernel avc_kernels[NUM_H264_AVC_KERNELS];
+ struct intel_batchbuffer *batch;
};
void i965_media_h264_decode_init(VADriverContextP ctx, struct decode_state *decode_state, struct i965_media_context *media_context);
struct decode_state *decode_state,
struct i965_media_context *media_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
- int i, j;
+ struct intel_batchbuffer *batch = media_context->base.batch;
VASliceParameterBufferMPEG2 *slice_param;
+ int i, j;
for (j = 0; j < decode_state->num_slice_params; j++) {
assert(decode_state->slice_params[j] && decode_state->slice_params[j]->buffer);
static void
ironlake_pp_pipeline_select(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
static void
ironlake_pp_urb_layout(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
unsigned int vfe_fence, cs_fence;
vfe_fence = pp_context->urb.cs_start;
static void
ironlake_pp_state_base_address(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 8);
OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
static void
ironlake_pp_state_pointers(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 3);
OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
static void
ironlake_pp_cs_urb_layout(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
static void
ironlake_pp_constant_buffer(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
static void
ironlake_pp_object_walker(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
int x, x_steps, y, y_steps;
x_steps = pp_context->pp_x_steps(&pp_context->private_context);
static void
ironlake_pp_pipeline_setup(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_post_processing_context *pp_context = i965->pp_context;
intel_batchbuffer_start_atomic(batch, 0x1000);
static void
gen6_pp_pipeline_select(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
static void
gen6_pp_state_base_address(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 10);
OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | (10 - 2));
static void
gen6_pp_vfe_state(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 8);
OUT_BATCH(batch, CMD_MEDIA_VFE_STATE | (8 - 2));
static void
gen6_pp_curbe_load(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
assert(pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 512 <= pp_context->curbe.bo->size);
static void
gen6_interface_descriptor_load(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 4);
OUT_BATCH(batch, CMD_MEDIA_INTERFACE_DESCRIPTOR_LOAD | (4 - 2));
static void
gen6_pp_object_walker(VADriverContextP ctx, struct i965_post_processing_context *pp_context)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
int x, x_steps, y, y_steps;
x_steps = pp_context->pp_x_steps(&pp_context->private_context);
static void
gen6_pp_pipeline_setup(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_post_processing_context *pp_context = i965->pp_context;
intel_batchbuffer_start_atomic(batch, 0x1000);
static void
i965_render_pipeline_select(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_3D);
static void
i965_render_state_sip(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_STATE_SIP | 0);
static void
i965_render_state_base_address(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
if (IS_IRONLAKE(i965->intel.device_id)) {
static void
i965_render_binding_table_pointers(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 6);
OUT_BATCH(batch, CMD_BINDING_TABLE_POINTERS | 4);
static void
i965_render_constant_color(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 5);
OUT_BATCH(batch, CMD_CONSTANT_COLOR | 3);
static void
i965_render_pipelined_pointers(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
BEGIN_BATCH(batch, 7);
static void
i965_render_urb_layout(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
int urb_vs_start, urb_vs_size;
int urb_gs_start, urb_gs_size;
int urb_clip_start, urb_clip_size;
static void
i965_render_cs_urb_layout(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
BEGIN_BATCH(batch, 2);
OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
static void
i965_render_constant_buffer(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
BEGIN_BATCH(batch, 2);
static void
i965_render_drawing_rectangle(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
- struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
struct intel_region *dest_region = render_state->draw_region;
static void
i965_render_vertex_elements(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
- struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
if (IS_IRONLAKE(i965->intel.device_id)) {
BEGIN_BATCH(batch, 5);
unsigned int alpha
)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
unsigned int i;
struct object_image *obj_image = IMAGE(image_id);
static void
i965_render_startup(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
BEGIN_BATCH(batch, 11);
static void
i965_clear_dest_region(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
- struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
struct intel_region *dest_region = render_state->draw_region;
unsigned int blt_cmd, br13;
static void
i965_surface_render_pipeline_setup(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
i965_clear_dest_region(ctx);
intel_batchbuffer_start_atomic(batch, 0x1000);
static void
i965_subpic_render_pipeline_setup(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
intel_batchbuffer_start_atomic(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
unsigned short desth,
unsigned int flag)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
i965_render_initialize(ctx);
i965_surface_render_state_setup(ctx, surface,
unsigned short destw,
unsigned short desth)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct object_surface *obj_surface = SURFACE(surface);
struct object_subpic *obj_subpic = SUBPIC(obj_surface->subpic);
static void
gen6_emit_invarient_states(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_3D);
static void
gen6_emit_state_base_address(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | (10 - 2));
static void
gen6_emit_viewport_state_pointers(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
OUT_BATCH(batch, GEN6_3DSTATE_VIEWPORT_STATE_POINTERS |
static void
gen6_emit_urb(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
OUT_BATCH(batch, GEN6_3DSTATE_URB | (3 - 2));
OUT_BATCH(batch, ((1 - 1) << GEN6_3DSTATE_URB_VS_SIZE_SHIFT) |
static void
gen6_emit_cc_state_pointers(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
OUT_BATCH(batch, GEN6_3DSTATE_CC_STATE_POINTERS | (4 - 2));
static void
gen6_emit_sampler_state_pointers(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
OUT_BATCH(batch, GEN6_3DSTATE_SAMPLER_STATE_POINTERS |
static void
gen6_emit_binding_table(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
/* Binding table pointers */
OUT_BATCH(batch, CMD_BINDING_TABLE_POINTERS |
static void
gen6_emit_depth_buffer_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
OUT_BATCH(batch, CMD_DEPTH_BUFFER | (7 - 2));
OUT_BATCH(batch, (I965_SURFACE_NULL << CMD_DEPTH_BUFFER_TYPE_SHIFT) |
static void
gen6_emit_vs_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
/* disable VS constant buffer */
OUT_BATCH(batch, GEN6_3DSTATE_CONSTANT_VS | (5 - 2));
static void
gen6_emit_gs_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
/* disable GS constant buffer */
OUT_BATCH(batch, GEN6_3DSTATE_CONSTANT_GS | (5 - 2));
static void
gen6_emit_clip_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
OUT_BATCH(batch, GEN6_3DSTATE_CLIP | (4 - 2));
OUT_BATCH(batch, 0);
static void
gen6_emit_sf_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
OUT_BATCH(batch, GEN6_3DSTATE_SF | (20 - 2));
OUT_BATCH(batch, (1 << GEN6_3DSTATE_SF_NUM_OUTPUTS_SHIFT) |
static void
gen6_emit_wm_state(VADriverContextP ctx, int kernel)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
OUT_BATCH(batch, GEN6_3DSTATE_CONSTANT_PS |
static void
gen6_emit_vertex_element_state(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
/* Set up our vertex elements, sourced from the single vertex buffer. */
OUT_BATCH(batch, CMD_VERTEX_ELEMENTS | (5 - 2));
static void
gen6_emit_vertices(VADriverContextP ctx)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct i965_render_state *render_state = &i965->render_state;
BEGIN_BATCH(batch, 11);
static void
gen6_render_emit_states(VADriverContextP ctx, int kernel)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
intel_batchbuffer_start_atomic(batch, 0x1000);
intel_batchbuffer_emit_mi_flush(batch);
unsigned short desth,
unsigned int flag)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
+ struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
gen6_render_initialize(ctx);
gen6_render_setup_states(ctx, surface,
unsigned short destw,
unsigned short desth)
{
- struct intel_driver_data *intel = intel_driver_data(ctx);
- struct intel_batchbuffer *batch = intel->batch;
struct i965_driver_data *i965 = i965_driver_data(ctx);
+ struct intel_batchbuffer *batch = i965->batch;
struct object_surface *obj_surface = SURFACE(surface);
struct object_subpic *obj_subpic = SUBPIC(obj_surface->subpic);
intel->has_blt = has_blt;
intel_memman_init(intel);
- intel->batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
return True;
}
struct intel_driver_data *intel = intel_driver_data(ctx);
intel_memman_terminate(intel);
- intel_batchbuffer_free(intel->batch);
pthread_mutex_destroy(&intel->ctxmutex);
return True;
pthread_mutex_t ctxmutex;
int locked;
- struct intel_batchbuffer *batch;
dri_bufmgr *bufmgr;
unsigned int has_exec2 : 1; /* Flag: has execbuffer2? */