i965_drv_video: [H.264] track frame store index
[profile/ivi/libva.git] / i965_drv_video / i965_avc_bsd.c
1 /*
2  * Copyright © 2010 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <string.h>
31 #include <assert.h>
32
33 #include "va_backend.h"
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_avc_bsd.h"
41 #include "i965_media_h264.h"
42 #include "i965_media.h"
43
44 static void 
45 i965_avc_bsd_free_private_surface_data(void **data)
46 {
47     struct i965_avc_bsd_surface *avc_bsd_surface = *data;
48
49     if (!avc_bsd_surface)
50         return;
51
52     dri_bo_unreference(avc_bsd_surface->direct_mv_wr_top_bo);
53     avc_bsd_surface->direct_mv_wr_top_bo = NULL;
54     dri_bo_unreference(avc_bsd_surface->direct_mv_wr_bottom_bo);
55     avc_bsd_surface->direct_mv_wr_bottom_bo = NULL;
56     free(avc_bsd_surface);
57     *data = NULL;
58 }
59
60 static void
61 i965_avc_bsd_initialize_private_surface_data(VADriverContextP ctx, struct object_surface *obj_surface)
62 {
63     struct i965_driver_data *i965 = i965_driver_data(ctx);
64     struct i965_avc_bsd_surface *avc_bsd_surface = obj_surface->private_data;
65
66     obj_surface->free_private_data = i965_avc_bsd_free_private_surface_data;
67
68     if (!avc_bsd_surface) {
69         avc_bsd_surface = calloc(sizeof(struct i965_avc_bsd_surface), 1);
70
71         avc_bsd_surface->direct_mv_wr_top_bo = dri_bo_alloc(i965->intel.bufmgr,
72                                                             "direct mv wr top",
73                                                             0x90000,
74                                                             64);
75         assert(avc_bsd_surface->direct_mv_wr_top_bo);
76         avc_bsd_surface->direct_mv_wr_bottom_bo = dri_bo_alloc(i965->intel.bufmgr,
77                                                                "direct mv wr bottom",
78                                                                0x90000,
79                                                                64);
80         assert(avc_bsd_surface->direct_mv_wr_bottom_bo);
81         obj_surface->private_data = avc_bsd_surface;
82     }
83
84     avc_bsd_surface->direct_mv_flag = -1;
85 }
86
87 static void
88 i965_bsd_ind_obj_base_address(VADriverContextP ctx, struct decode_state *decode_state, int slice)
89 {
90     dri_bo *ind_bo = decode_state->slice_datas[slice]->bo;
91
92     BEGIN_BCS_BATCH(ctx, 3);
93     OUT_BCS_BATCH(ctx, CMD_BSD_IND_OBJ_BASE_ADDR | (3 - 2));
94     OUT_BCS_RELOC(ctx, ind_bo,
95                   I915_GEM_DOMAIN_INSTRUCTION, 0,
96                   0);
97     OUT_BCS_BATCH(ctx, 0);
98     ADVANCE_BCS_BATCH(ctx);
99 }
100
101 static void
102 i965_avc_bsd_img_state(VADriverContextP ctx, struct decode_state *decode_state)
103 {
104     int qm_present_flag;
105     int img_struct;
106     int mbaff_frame_flag;
107     unsigned int avc_it_command_header;
108     unsigned int width_in_mbs, height_in_mbs;
109     VAPictureParameterBufferH264 *pic_param;
110
111     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
112         qm_present_flag = 1;
113     else
114         qm_present_flag = 0; /* built-in QM matrices */
115
116     assert(decode_state->pic_param && decode_state->pic_param->buffer);
117     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
118
119     assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
120
121     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
122         img_struct = 1;
123     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
124         img_struct = 3;
125     else
126         img_struct = 0;
127
128     if ((img_struct & 0x1) == 0x1) {
129         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
130     } else {
131         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
132     }
133
134     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
135         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
136         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
137     } else {
138         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
139     }
140
141     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
142                         !pic_param->pic_fields.bits.field_pic_flag);
143
144     width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
145     height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff); /* frame height */
146                                                                                
147     assert(!((width_in_mbs * height_in_mbs) & 0x8000)); /* hardware requirement */
148
149     /* BSD unit doesn't support 4:2:2 and 4:4:4 picture */
150     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
151            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
152     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
153
154     avc_it_command_header = (CMD_MEDIA_OBJECT_EX | (12 - 2));
155
156     BEGIN_BCS_BATCH(ctx, 6);
157     OUT_BCS_BATCH(ctx, CMD_AVC_BSD_IMG_STATE | (6 - 2));
158     OUT_BCS_BATCH(ctx, 
159                   ((width_in_mbs * height_in_mbs) & 0x7fff));
160     OUT_BCS_BATCH(ctx, 
161                   (height_in_mbs << 16) | 
162                   (width_in_mbs << 0));
163     OUT_BCS_BATCH(ctx, 
164                   (pic_param->second_chroma_qp_index_offset << 24) |
165                   (pic_param->chroma_qp_index_offset << 16) | 
166                   (SCAN_RASTER_ORDER << 15) | /* AVC ILDB Data */
167                   (SCAN_SPECIAL_ORDER << 14) | /* AVC IT Command */
168                   (SCAN_RASTER_ORDER << 13) | /* AVC IT Data */
169                   (1 << 12) | /* always 1, hardware requirement */
170                   (qm_present_flag << 10) |
171                   (img_struct << 8) |
172                   (16 << 0)); /* FIXME: always support 16 reference frames ??? */
173     OUT_BCS_BATCH(ctx,
174                   (RESIDUAL_DATA_OFFSET << 24) | /* residual data offset */
175                   (0 << 17) | /* don't overwrite SRT */
176                   (0 << 16) | /* Un-SRT (Unsynchronized Root Thread) */
177                   (0 << 12) | /* FIXME: no 16MV ??? */
178                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
179                   (1 << 8)  | /* Enable ILDB writing output */
180                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
181                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
182                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
183                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
184                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
185                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
186                   (mbaff_frame_flag << 1) |
187                   (pic_param->pic_fields.bits.field_pic_flag << 0));
188     OUT_BCS_BATCH(ctx, avc_it_command_header);
189     ADVANCE_BCS_BATCH(ctx);
190 }
191
192 static void
193 i965_avc_bsd_qm_state(VADriverContextP ctx, struct decode_state *decode_state)
194 {
195     int cmd_len;
196     VAIQMatrixBufferH264 *iq_matrix;
197     VAPictureParameterBufferH264 *pic_param;
198
199     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
200         return;
201
202     iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
203
204     assert(decode_state->pic_param && decode_state->pic_param->buffer);
205     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
206
207     cmd_len = 2 + 6 * 4; /* always load six 4x4 scaling matrices */
208
209     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
210         cmd_len += 2 * 16; /* load two 8x8 scaling matrices */
211
212     BEGIN_BCS_BATCH(ctx, cmd_len);
213     OUT_BCS_BATCH(ctx, CMD_AVC_BSD_QM_STATE | (cmd_len - 2));
214
215     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
216         OUT_BCS_BATCH(ctx, 
217                       (0x0  << 8) | /* don't use default built-in matrices */
218                       (0xff << 0)); /* six 4x4 and two 8x8 scaling matrices */
219     else
220         OUT_BCS_BATCH(ctx, 
221                       (0x0  << 8) | /* don't use default built-in matrices */
222                       (0x3f << 0)); /* six 4x4 scaling matrices */
223
224     intel_batchbuffer_data_bcs(ctx, &iq_matrix->ScalingList4x4[0][0], 6 * 4 * 4);
225
226     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
227         intel_batchbuffer_data_bcs(ctx, &iq_matrix->ScalingList8x8[0][0], 2 * 16 * 4);
228
229     ADVANCE_BCS_BATCH(ctx);
230 }
231
232 static void
233 i965_avc_bsd_slice_state(VADriverContextP ctx, 
234                          VAPictureParameterBufferH264 *pic_param, 
235                          VASliceParameterBufferH264 *slice_param)
236 {
237     struct i965_driver_data *i965 = i965_driver_data(ctx);
238     struct i965_media_state *media_state = &i965->media_state;
239     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
240     int present_flag, cmd_len, list, j;
241     struct {
242         unsigned char bottom_idc:1;
243         unsigned char frame_store_index:4;
244         unsigned char field_picture:1;
245         unsigned char long_term:1;
246         unsigned char non_exist:1;
247     } refs[32];
248     char weightoffsets[32 * 6];
249
250     /* don't issue SLICE_STATE for intra-prediction decoding */
251     if (slice_param->slice_type == SLICE_TYPE_I ||
252         slice_param->slice_type == SLICE_TYPE_SI)
253         return;
254
255     cmd_len = 2;
256
257     if (slice_param->slice_type == SLICE_TYPE_P ||
258         slice_param->slice_type == SLICE_TYPE_SP) {
259         present_flag = PRESENT_REF_LIST0;
260         cmd_len += 8;
261     } else { 
262         present_flag = PRESENT_REF_LIST0 | PRESENT_REF_LIST1;
263         cmd_len += 16;
264     }
265
266     if (slice_param->luma_weight_l0_flag | slice_param->chroma_weight_l0_flag) {
267         present_flag |= PRESENT_WEIGHT_OFFSET_L0;
268         cmd_len += 48;
269         assert((pic_param->pic_fields.bits.weighted_pred_flag == 1) || /* P slice */
270                (pic_param->pic_fields.bits.weighted_bipred_idc == 1)); /* B slice */
271     }
272
273     if (slice_param->luma_weight_l1_flag | slice_param->chroma_weight_l1_flag) {
274         present_flag |= PRESENT_WEIGHT_OFFSET_L1;
275         cmd_len += 48;
276         assert(slice_param->slice_type == SLICE_TYPE_B);
277         assert(pic_param->pic_fields.bits.weighted_bipred_idc == 1);
278     }
279
280     BEGIN_BCS_BATCH(ctx, cmd_len);
281     OUT_BCS_BATCH(ctx, CMD_AVC_BSD_SLICE_STATE | (cmd_len - 2));
282     OUT_BCS_BATCH(ctx, present_flag);
283
284     for (list = 0; list < 2; list++) {
285         int flag;
286         VAPictureH264 *va_pic;
287
288         if (list == 0) {
289             flag = PRESENT_REF_LIST0;
290             va_pic = slice_param->RefPicList0;
291         } else {
292             flag = PRESENT_REF_LIST1;
293             va_pic = slice_param->RefPicList1;
294         }
295
296         if (!(present_flag & flag))
297             continue;
298
299         for (j = 0; j < 32; j++) {
300             if (va_pic->flags & VA_PICTURE_H264_INVALID) {
301                 refs[j].non_exist = 1;
302                 refs[j].long_term = 1;
303                 refs[j].field_picture = 1;
304                 refs[j].frame_store_index = 0xf;
305                 refs[j].bottom_idc = 1;
306             } else {
307                 int frame_idx;
308                 
309                 for (frame_idx = 0; frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list); frame_idx++) {
310                     if (i965_h264_context->fsid_list[frame_idx].surface_id != VA_INVALID_ID &&
311                         va_pic->picture_id == i965_h264_context->fsid_list[frame_idx].surface_id) {
312                         assert(frame_idx == i965_h264_context->fsid_list[frame_idx].frame_store_id);
313                         break;
314                     }
315                 }
316
317                 assert(frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list));
318                 
319                 refs[j].non_exist = 0;
320                 refs[j].long_term = !!(va_pic->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE);
321                 refs[j].field_picture = !!(va_pic->flags & 
322                                            (VA_PICTURE_H264_TOP_FIELD | 
323                                             VA_PICTURE_H264_BOTTOM_FIELD));
324                 refs[j].frame_store_index = frame_idx;
325                 refs[j].bottom_idc = !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
326             }
327
328             va_pic++;
329         }
330         
331         intel_batchbuffer_data_bcs(ctx, refs, sizeof(refs));
332     }
333
334     i965_h264_context->weight128_luma_l0 = 0;
335     i965_h264_context->weight128_luma_l1 = 0;
336     i965_h264_context->weight128_chroma_l0 = 0;
337     i965_h264_context->weight128_chroma_l1 = 0;
338
339     i965_h264_context->weight128_offset0_flag = 0;
340     i965_h264_context->weight128_offset0 = 0;
341
342     if (present_flag & PRESENT_WEIGHT_OFFSET_L0) {
343         for (j = 0; j < 32; j++) {
344             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l0[j];
345             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l0[j];
346             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l0[j][0];
347             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l0[j][0];
348             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l0[j][1];
349             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l0[j][1];
350
351             if (pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
352                 if (i965_h264_context->use_hw_w128) {
353                     if (slice_param->luma_weight_l0[j] == 128)
354                         i965_h264_context->weight128_luma_l0 |= (1 << j);
355
356                     if (slice_param->chroma_weight_l0[j][0] == 128 ||
357                         slice_param->chroma_weight_l0[j][1] == 128)
358                         i965_h264_context->weight128_chroma_l0 |= (1 << j);
359                 } else {
360                     /* FIXME: workaround for weight 128 */
361                     if (slice_param->luma_weight_l0[j] == 128 ||
362                         slice_param->chroma_weight_l0[j][0] == 128 ||
363                         slice_param->chroma_weight_l0[j][1] == 128)
364                         i965_h264_context->weight128_offset0_flag = 1;
365                 }
366             }
367         }
368
369         intel_batchbuffer_data_bcs(ctx, weightoffsets, sizeof(weightoffsets));
370     }
371
372     if (present_flag & PRESENT_WEIGHT_OFFSET_L1) {
373         for (j = 0; j < 32; j++) {
374             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l1[j];
375             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l1[j];
376             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l1[j][0];
377             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l1[j][0];
378             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l1[j][1];
379             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l1[j][1];
380
381             if (pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
382                 if (i965_h264_context->use_hw_w128) {
383                     if (slice_param->luma_weight_l1[j] == 128)
384                         i965_h264_context->weight128_luma_l1 |= (1 << j);
385
386                     if (slice_param->chroma_weight_l1[j][0] == 128 ||
387                         slice_param->chroma_weight_l1[j][1] == 128)
388                         i965_h264_context->weight128_chroma_l1 |= (1 << j);
389                 } else {
390                     if (slice_param->luma_weight_l0[j] == 128 ||
391                         slice_param->chroma_weight_l0[j][0] == 128 ||
392                         slice_param->chroma_weight_l0[j][1] == 128)
393                         i965_h264_context->weight128_offset0_flag = 1;
394                 }
395             }
396         }
397
398         intel_batchbuffer_data_bcs(ctx, weightoffsets, sizeof(weightoffsets));
399     }
400
401     ADVANCE_BCS_BATCH(ctx);
402 }
403
404 static void
405 i965_avc_bsd_buf_base_state(VADriverContextP ctx,
406                             VAPictureParameterBufferH264 *pic_param, 
407                             VASliceParameterBufferH264 *slice_param)
408 {
409     struct i965_driver_data *i965 = i965_driver_data(ctx);
410     struct i965_media_state *media_state = &i965->media_state;
411     struct i965_h264_context *i965_h264_context;
412     struct i965_avc_bsd_context *i965_avc_bsd_context;
413     int i, j;
414     VAPictureH264 *va_pic;
415     struct object_surface *obj_surface;
416     struct i965_avc_bsd_surface *avc_bsd_surface;
417
418     assert(media_state->private_context);
419     i965_h264_context = (struct i965_h264_context *)media_state->private_context;
420     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
421
422     BEGIN_BCS_BATCH(ctx, 74);
423     OUT_BCS_BATCH(ctx, CMD_AVC_BSD_BUF_BASE_STATE | (74 - 2));
424     OUT_BCS_RELOC(ctx, i965_avc_bsd_context->bsd_raw_store.bo,
425                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
426                   0);
427     OUT_BCS_RELOC(ctx, i965_avc_bsd_context->mpr_row_store.bo,
428                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
429                   0);
430     OUT_BCS_RELOC(ctx, i965_h264_context->avc_it_command_mb_info.bo,
431                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
432                   i965_h264_context->avc_it_command_mb_info.mbs * i965_h264_context->use_avc_hw_scoreboard * MB_CMD_IN_BYTES);
433     OUT_BCS_RELOC(ctx, i965_h264_context->avc_it_data.bo,
434                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
435                   (i965_h264_context->avc_it_data.write_offset << 6));
436     OUT_BCS_RELOC(ctx, i965_avc_bsd_context->ildb_data.bo,
437                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
438                   0);
439
440     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
441         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
442             int found = 0;
443             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
444                 va_pic = &pic_param->ReferenceFrames[j];
445                 
446                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
447                     continue;
448
449                 if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
450                     found = 1;
451                     break;
452                 }
453             }
454
455             assert(found == 1);
456
457             if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
458                 obj_surface = SURFACE(va_pic->picture_id);
459                 assert(obj_surface);
460                 avc_bsd_surface = obj_surface->private_data;
461             
462                 if (avc_bsd_surface == NULL) {
463                     OUT_BCS_BATCH(ctx, 0);
464                     OUT_BCS_BATCH(ctx, 0);
465                 } else {
466                     assert(avc_bsd_surface->direct_mv_flag != -1);
467
468                     OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
469                                   I915_GEM_DOMAIN_INSTRUCTION, 0,
470                                   0);
471
472                     if (avc_bsd_surface->direct_mv_flag == 1)
473                         OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_bottom_bo,
474                                       I915_GEM_DOMAIN_INSTRUCTION, 0,
475                                       0);
476                     else
477                         OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
478                                       I915_GEM_DOMAIN_INSTRUCTION, 0,
479                                       0);
480                 }
481             } 
482         } else {
483             OUT_BCS_BATCH(ctx, 0);
484             OUT_BCS_BATCH(ctx, 0);
485         }
486     }
487
488     va_pic = &pic_param->CurrPic;
489     assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
490     obj_surface = SURFACE(va_pic->picture_id);
491     assert(obj_surface);
492     i965_avc_bsd_initialize_private_surface_data(ctx, obj_surface);
493     avc_bsd_surface = obj_surface->private_data;
494     avc_bsd_surface->direct_mv_flag = (pic_param->pic_fields.bits.field_pic_flag &&
495                                        !pic_param->seq_fields.bits.direct_8x8_inference_flag);
496
497     OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
498                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
499                   0);
500
501     if (avc_bsd_surface->direct_mv_flag == 1)
502         OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_bottom_bo,
503                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
504                       0);
505     else
506         OUT_BCS_RELOC(ctx, avc_bsd_surface->direct_mv_wr_top_bo,
507                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
508                       0);
509
510     /* POC List */
511     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
512         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
513             int found = 0;
514             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
515                 va_pic = &pic_param->ReferenceFrames[j];
516                 
517                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
518                     continue;
519
520                 if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
521                     found = 1;
522                     break;
523                 }
524             }
525
526             assert(found == 1);
527
528             if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
529                 OUT_BCS_BATCH(ctx, va_pic->TopFieldOrderCnt);
530                 OUT_BCS_BATCH(ctx, va_pic->BottomFieldOrderCnt);
531             } 
532         } else {
533             OUT_BCS_BATCH(ctx, 0);
534             OUT_BCS_BATCH(ctx, 0);
535         }
536     }
537
538     va_pic = &pic_param->CurrPic;
539     OUT_BCS_BATCH(ctx, va_pic->TopFieldOrderCnt);
540     OUT_BCS_BATCH(ctx, va_pic->BottomFieldOrderCnt);
541
542     ADVANCE_BCS_BATCH(ctx);
543 }
544
545 static void
546 g4x_avc_bsd_object(VADriverContextP ctx, 
547                    struct decode_state *decode_state,
548                    VAPictureParameterBufferH264 *pic_param,
549                    VASliceParameterBufferH264 *slice_param)
550 {
551     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
552     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
553
554     if (slice_param) {
555         int encrypted, counter_value, cmd_len;
556         int slice_hor_pos, slice_ver_pos;
557         int num_ref_idx_l0, num_ref_idx_l1;
558         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
559                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
560         int slice_data_bit_offset;
561         int weighted_pred_idc = 0;
562         int first_mb_in_slice = 0;
563         int slice_type;
564
565         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
566
567         if (encrypted) {
568             cmd_len = 9;
569             counter_value = 0; /* FIXME: ??? */
570         } else 
571             cmd_len = 8;
572
573         slice_data_bit_offset = slice_param->slice_data_bit_offset;    
574
575         if (pic_param->pic_fields.bits.entropy_coding_mode_flag == ENTROPY_CABAC)
576             slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
577
578         if (slice_param->slice_type == SLICE_TYPE_I ||
579             slice_param->slice_type == SLICE_TYPE_SI)
580             slice_type = SLICE_TYPE_I;
581         else if (slice_param->slice_type == SLICE_TYPE_P ||
582                  slice_param->slice_type == SLICE_TYPE_SP)
583             slice_type = SLICE_TYPE_P;
584         else {
585             assert(slice_param->slice_type == SLICE_TYPE_B);
586             slice_type = SLICE_TYPE_B;
587         }
588
589         if (slice_type == SLICE_TYPE_I) {
590             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
591             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
592             num_ref_idx_l0 = 0;
593             num_ref_idx_l1 = 0;
594         } else if (slice_type == SLICE_TYPE_P) {
595             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
596             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
597             num_ref_idx_l1 = 0;
598         } else {
599             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
600             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
601         }
602
603         if (slice_type == SLICE_TYPE_P)
604             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
605         else if (slice_type == SLICE_TYPE_B)
606             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
607
608         first_mb_in_slice = slice_param->first_mb_in_slice;
609         slice_hor_pos = first_mb_in_slice % width_in_mbs; 
610         slice_ver_pos = first_mb_in_slice / width_in_mbs;
611         first_mb_in_slice = (slice_ver_pos << mbaff_picture) * width_in_mbs + slice_hor_pos;
612         slice_hor_pos <<= mbaff_picture;
613
614         BEGIN_BCS_BATCH(ctx, cmd_len);
615         OUT_BCS_BATCH(ctx, CMD_AVC_BSD_OBJECT | (cmd_len - 2));
616         OUT_BCS_BATCH(ctx, 
617                       (encrypted << 31) |
618                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
619         OUT_BCS_BATCH(ctx, 
620                       (slice_param->slice_data_offset +
621                        (slice_data_bit_offset >> 3)));
622         OUT_BCS_BATCH(ctx, 
623                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
624                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
625                       (0 << 13) | /* FIXME: ??? */
626                       (0 << 12) | /* ignore MPR Error handling */
627                       (0 << 10) | /* ignore Entropy Error handling */
628                       (0 << 8)  | /* ignore MB Header Error handling */
629                       (slice_type << 0));
630         OUT_BCS_BATCH(ctx, 
631                       (num_ref_idx_l1 << 24) |
632                       (num_ref_idx_l0 << 16) |
633                       (slice_param->chroma_log2_weight_denom << 8) |
634                       (slice_param->luma_log2_weight_denom << 0));
635         OUT_BCS_BATCH(ctx, 
636                       (weighted_pred_idc << 30) |
637                       (slice_param->direct_spatial_mv_pred_flag << 29) |
638                       (slice_param->disable_deblocking_filter_idc << 27) |
639                       (slice_param->cabac_init_idc << 24) |
640                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
641                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
642                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
643         OUT_BCS_BATCH(ctx, 
644                       (slice_ver_pos << 24) |
645                       (slice_hor_pos << 16) | 
646                       (first_mb_in_slice << 0));
647         OUT_BCS_BATCH(ctx, 
648                       (0 << 7) | /* FIXME: ??? */
649                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
650
651         if (encrypted) {
652             OUT_BCS_BATCH(ctx, counter_value);
653         }
654
655         ADVANCE_BCS_BATCH(ctx); 
656     } else {
657         BEGIN_BCS_BATCH(ctx, 8); 
658         OUT_BCS_BATCH(ctx, CMD_AVC_BSD_OBJECT | (8 - 2));
659         OUT_BCS_BATCH(ctx, 0); /* indirect data length for phantom slice is 0 */
660         OUT_BCS_BATCH(ctx, 0); /* indirect data start address for phantom slice is 0 */
661         OUT_BCS_BATCH(ctx, 0);
662         OUT_BCS_BATCH(ctx, 0);
663         OUT_BCS_BATCH(ctx, 0);
664         OUT_BCS_BATCH(ctx, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
665         OUT_BCS_BATCH(ctx, 0);
666         ADVANCE_BCS_BATCH(ctx);
667     }
668 }
669
670 static void
671 ironlake_avc_bsd_object(VADriverContextP ctx, 
672                         struct decode_state *decode_state,
673                         VAPictureParameterBufferH264 *pic_param,
674                         VASliceParameterBufferH264 *slice_param)
675 {
676     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
677     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
678
679     if (slice_param) {
680         struct i965_driver_data *i965 = i965_driver_data(ctx);
681         struct i965_media_state *media_state = &i965->media_state;
682         struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
683         int encrypted, counter_value;
684         int slice_hor_pos, slice_ver_pos;
685         int num_ref_idx_l0, num_ref_idx_l1;
686         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
687                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
688         int slice_data_bit_offset;
689         int weighted_pred_idc = 0;
690         int first_mb_in_slice;
691         int slice_type;
692         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
693
694         if (encrypted) {
695             counter_value = 0; /* FIXME: ??? */
696         } else 
697             counter_value = 0;
698
699         slice_data_bit_offset = slice_param->slice_data_bit_offset;    
700
701         if (pic_param->pic_fields.bits.entropy_coding_mode_flag == ENTROPY_CABAC)
702             slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
703
704         if (slice_param->slice_type == SLICE_TYPE_I ||
705             slice_param->slice_type == SLICE_TYPE_SI)
706             slice_type = SLICE_TYPE_I;
707         else if (slice_param->slice_type == SLICE_TYPE_P ||
708                  slice_param->slice_type == SLICE_TYPE_SP)
709             slice_type = SLICE_TYPE_P;
710         else {
711             assert(slice_param->slice_type == SLICE_TYPE_B);
712             slice_type = SLICE_TYPE_B;
713         }
714
715         if (slice_type == SLICE_TYPE_I) {
716             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
717             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
718             num_ref_idx_l0 = 0;
719             num_ref_idx_l1 = 0;
720         } else if (slice_type == SLICE_TYPE_P) {
721             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
722             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
723             num_ref_idx_l1 = 0;
724         } else {
725             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
726             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
727         }
728
729         if (slice_type == SLICE_TYPE_P)
730             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
731         else if (slice_type == SLICE_TYPE_B)
732             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
733
734         first_mb_in_slice = slice_param->first_mb_in_slice;
735         slice_hor_pos = first_mb_in_slice % width_in_mbs; 
736         slice_ver_pos = first_mb_in_slice / width_in_mbs;
737         first_mb_in_slice = (slice_ver_pos << mbaff_picture) * width_in_mbs + slice_hor_pos;
738         slice_hor_pos <<= mbaff_picture;
739
740         BEGIN_BCS_BATCH(ctx, 16);
741         OUT_BCS_BATCH(ctx, CMD_AVC_BSD_OBJECT | (16 - 2));
742         OUT_BCS_BATCH(ctx, 
743                       (encrypted << 31) |
744                       (0 << 30) | /* FIXME: packet based bit stream */
745                       (0 << 29) | /* FIXME: packet format */
746                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
747         OUT_BCS_BATCH(ctx, 
748                       (slice_param->slice_data_offset +
749                        (slice_data_bit_offset >> 3)));
750         OUT_BCS_BATCH(ctx, 
751                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
752                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
753                       (0 << 13) | /* FIXME: ??? */
754                       (0 << 12) | /* ignore MPR Error handling */
755                       (0 << 10) | /* ignore Entropy Error handling */
756                       (0 << 8)  | /* ignore MB Header Error handling */
757                       (slice_type << 0));
758         OUT_BCS_BATCH(ctx, 
759                       (num_ref_idx_l1 << 24) |
760                       (num_ref_idx_l0 << 16) |
761                       (slice_param->chroma_log2_weight_denom << 8) |
762                       (slice_param->luma_log2_weight_denom << 0));
763         OUT_BCS_BATCH(ctx, 
764                       (weighted_pred_idc << 30) |
765                       (slice_param->direct_spatial_mv_pred_flag << 29) |
766                       (slice_param->disable_deblocking_filter_idc << 27) |
767                       (slice_param->cabac_init_idc << 24) |
768                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
769                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
770                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
771         OUT_BCS_BATCH(ctx, 
772                       (slice_ver_pos << 24) |
773                       (slice_hor_pos << 16) | 
774                       (first_mb_in_slice << 0));
775         OUT_BCS_BATCH(ctx, 
776                       (0 << 7) | /* FIXME: ??? */
777                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
778         OUT_BCS_BATCH(ctx, counter_value);
779         
780         /* FIXME: dw9-dw11 */
781         OUT_BCS_BATCH(ctx, 0);
782         OUT_BCS_BATCH(ctx, 0);
783         OUT_BCS_BATCH(ctx, 0);
784         OUT_BCS_BATCH(ctx, i965_h264_context->weight128_luma_l0);
785         OUT_BCS_BATCH(ctx, i965_h264_context->weight128_luma_l1);
786         OUT_BCS_BATCH(ctx, i965_h264_context->weight128_chroma_l0);
787         OUT_BCS_BATCH(ctx, i965_h264_context->weight128_chroma_l1);
788
789         ADVANCE_BCS_BATCH(ctx); 
790     } else {
791         BEGIN_BCS_BATCH(ctx, 16);
792         OUT_BCS_BATCH(ctx, CMD_AVC_BSD_OBJECT | (16 - 2));
793         OUT_BCS_BATCH(ctx, 0); /* indirect data length for phantom slice is 0 */
794         OUT_BCS_BATCH(ctx, 0); /* indirect data start address for phantom slice is 0 */
795         OUT_BCS_BATCH(ctx, 0);
796         OUT_BCS_BATCH(ctx, 0);
797         OUT_BCS_BATCH(ctx, 0);
798         OUT_BCS_BATCH(ctx, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
799         OUT_BCS_BATCH(ctx, 0);
800         OUT_BCS_BATCH(ctx, 0);
801         OUT_BCS_BATCH(ctx, 0);
802         OUT_BCS_BATCH(ctx, 0);
803         OUT_BCS_BATCH(ctx, 0);
804         OUT_BCS_BATCH(ctx, 0);
805         OUT_BCS_BATCH(ctx, 0);
806         OUT_BCS_BATCH(ctx, 0);
807         OUT_BCS_BATCH(ctx, 0);
808         ADVANCE_BCS_BATCH(ctx);
809     }
810 }
811
812 static void
813 i965_avc_bsd_object(VADriverContextP ctx, 
814                     struct decode_state *decode_state,
815                     VAPictureParameterBufferH264 *pic_param,
816                     VASliceParameterBufferH264 *slice_param)
817 {
818     struct i965_driver_data *i965 = i965_driver_data(ctx);
819
820     if (IS_IRONLAKE(i965->intel.device_id))
821         ironlake_avc_bsd_object(ctx, decode_state, pic_param, slice_param);
822     else
823         g4x_avc_bsd_object(ctx, decode_state, pic_param, slice_param);
824 }
825
826 static void
827 i965_avc_bsd_phantom_slice(VADriverContextP ctx, 
828                            struct decode_state *decode_state,
829                            VAPictureParameterBufferH264 *pic_param)
830 {
831     i965_avc_bsd_object(ctx, decode_state, pic_param, NULL);
832 }
833
834 static void
835 i965_avc_bsd_frame_store_index(VADriverContextP ctx,
836                                VAPictureParameterBufferH264 *pic_param)
837 {
838     struct i965_driver_data *i965 = i965_driver_data(ctx);
839     struct i965_media_state *media_state = &i965->media_state;
840     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
841     int i, j;
842
843     assert(ARRAY_ELEMS(i965_h264_context->fsid_list) == ARRAY_ELEMS(pic_param->ReferenceFrames));
844
845     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
846         int found = 0;
847
848         if (i965_h264_context->fsid_list[i].surface_id == VA_INVALID_ID)
849             continue;
850
851         for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
852             VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
853             if (ref_pic->flags & VA_PICTURE_H264_INVALID)
854                 continue;
855
856             if (i965_h264_context->fsid_list[i].surface_id == ref_pic->picture_id) {
857                 found = 1;
858                 break;
859             }
860         }
861
862         if (!found) {
863             i965_h264_context->fsid_list[i].surface_id = VA_INVALID_ID;
864             i965_h264_context->fsid_list[i].frame_store_id = -1;
865         }
866     }
867
868     for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
869         VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
870         int found = 0;
871
872         if (ref_pic->flags & VA_PICTURE_H264_INVALID)
873             continue;
874
875         for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
876             if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
877                 continue;
878             
879             if (i965_h264_context->fsid_list[j].surface_id == ref_pic->picture_id) {
880                 found = 1;
881                 break;
882             }
883         }
884
885         if (!found) {
886             int frame_idx;
887             
888             for (frame_idx = 0; frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list); frame_idx++) {
889                 for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
890                     if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
891                         continue;
892
893                     if (i965_h264_context->fsid_list[j].frame_store_id == frame_idx)
894                         break;
895                 }
896
897                 if (j == ARRAY_ELEMS(i965_h264_context->fsid_list))
898                     break;
899             }
900
901             assert(frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list));
902
903             for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
904                 if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID) {
905                     i965_h264_context->fsid_list[j].surface_id = ref_pic->picture_id;
906                     i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
907                     break;
908                 }
909             }
910         }
911     }
912
913     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list) - 1; i++) {
914         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID &&
915             i965_h264_context->fsid_list[i].frame_store_id == i)
916             continue;
917
918         for (j = i + 1; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
919             if (i965_h264_context->fsid_list[j].surface_id != VA_INVALID_ID &&
920                 i965_h264_context->fsid_list[j].frame_store_id == i) {
921                 VASurfaceID id = i965_h264_context->fsid_list[i].surface_id;
922                 int frame_idx = i965_h264_context->fsid_list[i].frame_store_id;
923
924                 i965_h264_context->fsid_list[i].surface_id = i965_h264_context->fsid_list[j].surface_id;
925                 i965_h264_context->fsid_list[i].frame_store_id = i965_h264_context->fsid_list[j].frame_store_id;
926                 i965_h264_context->fsid_list[j].surface_id = id;
927                 i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
928                 break;
929             }
930         }
931     }
932 }
933
934 void 
935 i965_avc_bsd_pipeline(VADriverContextP ctx, struct decode_state *decode_state)
936 {
937     int i, j;
938     VAPictureParameterBufferH264 *pic_param;
939     VASliceParameterBufferH264 *slice_param;
940
941     assert(decode_state->pic_param && decode_state->pic_param->buffer);
942     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
943     i965_avc_bsd_frame_store_index(ctx, pic_param);
944     intel_batchbuffer_start_atomic_bcs(ctx, 0x1000);
945
946     i965_avc_bsd_img_state(ctx, decode_state);
947     i965_avc_bsd_qm_state(ctx, decode_state);
948
949     for (j = 0; j < decode_state->num_slice_params; j++) {
950         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
951         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
952
953         i965_bsd_ind_obj_base_address(ctx, decode_state, j);
954
955         assert(decode_state->slice_params[j]->num_elements == 1);  /* FIXME */
956         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
957             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
958             assert((slice_param->slice_type == SLICE_TYPE_I) ||
959                    (slice_param->slice_type == SLICE_TYPE_SI) ||
960                    (slice_param->slice_type == SLICE_TYPE_P) ||
961                    (slice_param->slice_type == SLICE_TYPE_SP) ||
962                    (slice_param->slice_type == SLICE_TYPE_B));
963
964             i965_avc_bsd_slice_state(ctx, pic_param, slice_param);
965             i965_avc_bsd_buf_base_state(ctx, pic_param, slice_param);
966             i965_avc_bsd_object(ctx, decode_state, pic_param, slice_param);
967             slice_param++;
968         }
969
970     }
971
972     i965_avc_bsd_phantom_slice(ctx, decode_state, pic_param);
973     intel_batchbuffer_emit_mi_flush_bcs(ctx);
974     intel_batchbuffer_end_atomic_bcs(ctx);
975     intel_batchbuffer_flush_bcs(ctx);
976 }
977
978 void 
979 i965_avc_bsd_decode_init(VADriverContextP ctx)
980 {
981     struct i965_driver_data *i965 = i965_driver_data(ctx);
982     struct i965_media_state *media_state = &i965->media_state;
983     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)media_state->private_context;
984     struct i965_avc_bsd_context *i965_avc_bsd_context;
985     dri_bo *bo;
986
987     assert(i965_h264_context);
988     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
989
990     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
991     bo = dri_bo_alloc(i965->intel.bufmgr,
992                       "bsd raw store",
993                       0x4000, /* at least 11520 bytes to support 120 MBs per row */
994                       64);
995     assert(bo);
996     i965_avc_bsd_context->bsd_raw_store.bo = bo;
997
998     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
999     bo = dri_bo_alloc(i965->intel.bufmgr,
1000                       "mpr row store",
1001                       0x2000, /* at least 7680 bytes to support 120 MBs per row */
1002                       64);
1003     assert(bo);
1004     i965_avc_bsd_context->mpr_row_store.bo = bo;
1005
1006     dri_bo_unreference(i965_avc_bsd_context->ildb_data.bo);
1007     bo = dri_bo_alloc(i965->intel.bufmgr,
1008                       "ildb data",
1009                       0x100000, /* at least 1044480 bytes */
1010                       64);
1011     assert(bo);
1012     i965_avc_bsd_context->ildb_data.bo = bo;
1013 }
1014
1015 Bool 
1016 i965_avc_bsd_ternimate(struct i965_avc_bsd_context *i965_avc_bsd_context)
1017 {
1018     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
1019     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
1020     dri_bo_unreference(i965_avc_bsd_context->ildb_data.bo);
1021
1022     return True;
1023 }