VME uses reference frame parsed from slice_param instead of hacked DPB
[platform/upstream/libva-intel-driver.git] / src / gen7_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <stdbool.h>
32 #include <string.h>
33 #include <assert.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_encoder.h"
41 #include "gen6_vme.h"
42 #include "gen6_mfc.h"
43 #ifdef SURFACE_STATE_PADDED_SIZE
44 #undef SURFACE_STATE_PADDED_SIZE
45 #endif
46
47 #define VME_MSG_LENGTH          32
48 #define SURFACE_STATE_PADDED_SIZE_0_GEN7        ALIGN(sizeof(struct gen7_surface_state), 32)
49 #define SURFACE_STATE_PADDED_SIZE_1_GEN7        ALIGN(sizeof(struct gen7_surface_state2), 32)
50 #define SURFACE_STATE_PADDED_SIZE_GEN7          MAX(SURFACE_STATE_PADDED_SIZE_0_GEN7, SURFACE_STATE_PADDED_SIZE_1_GEN7)
51
52 #define SURFACE_STATE_PADDED_SIZE               SURFACE_STATE_PADDED_SIZE_GEN7
53 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
54 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
55
56 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
57 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
58 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
59
60 enum VIDEO_CODING_TYPE{
61     VIDEO_CODING_AVC = 0,
62     VIDEO_CODING_MPEG2,
63     VIDEO_CODING_SUM
64 };
65
66 enum AVC_VME_KERNEL_TYPE{ 
67     AVC_VME_INTRA_SHADER = 0,
68     AVC_VME_INTER_SHADER,
69     AVC_VME_BATCHBUFFER,
70     AVC_VME_BINTER_SHADER,
71     AVC_VME_KERNEL_SUM
72 };
73
74 enum MPEG2_VME_KERNEL_TYPE{
75     MPEG2_VME_INTER_SHADER = 0,
76     MPEG2_VME_BATCHBUFFER,
77     MPEG2_VME_KERNEL_SUM
78 };
79  
80
81 static const uint32_t gen7_vme_intra_frame[][4] = {
82 #include "shaders/vme/intra_frame_ivb.g7b"
83 };
84
85 static const uint32_t gen7_vme_inter_frame[][4] = {
86 #include "shaders/vme/inter_frame_ivb.g7b"
87 };
88
89 static const uint32_t gen7_vme_batchbuffer[][4] = {
90 #include "shaders/vme/batchbuffer.g7b"
91 };
92
93 static const uint32_t gen7_vme_binter_frame[][4] = {
94 #include "shaders/vme/inter_bframe_ivb.g7b"
95 };
96
97 static struct i965_kernel gen7_vme_kernels[] = {
98     {
99         "AVC VME Intra Frame",
100         AVC_VME_INTRA_SHADER,                   /*index*/
101         gen7_vme_intra_frame,                   
102         sizeof(gen7_vme_intra_frame),           
103         NULL
104     },
105     {
106         "AVC VME inter Frame",
107         AVC_VME_INTER_SHADER,
108         gen7_vme_inter_frame,
109         sizeof(gen7_vme_inter_frame),
110         NULL
111     },
112     {
113         "AVC VME BATCHBUFFER",
114         AVC_VME_BATCHBUFFER,
115         gen7_vme_batchbuffer,
116         sizeof(gen7_vme_batchbuffer),
117         NULL
118     },
119     {
120         "AVC VME binter Frame",
121         AVC_VME_BINTER_SHADER,
122         gen7_vme_binter_frame,
123         sizeof(gen7_vme_binter_frame),
124         NULL
125     }
126 };
127
128 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
129 #include "shaders/vme/mpeg2_inter_frame.g7b"
130 };
131
132 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
133 #include "shaders/vme/batchbuffer.g7b"
134 };
135
136 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
137     {
138         "MPEG2 VME inter Frame",
139         MPEG2_VME_INTER_SHADER,
140         gen7_vme_mpeg2_inter_frame,
141         sizeof(gen7_vme_mpeg2_inter_frame),
142         NULL
143     },
144     {
145         "MPEG2 VME BATCHBUFFER",
146         MPEG2_VME_BATCHBUFFER,
147         gen7_vme_mpeg2_batchbuffer,
148         sizeof(gen7_vme_mpeg2_batchbuffer),
149         NULL
150     },
151 };
152
153 /* only used for VME source surface state */
154 static void 
155 gen7_vme_source_surface_state(VADriverContextP ctx,
156                               int index,
157                               struct object_surface *obj_surface,
158                               struct intel_encoder_context *encoder_context)
159 {
160     struct gen6_vme_context *vme_context = encoder_context->vme_context;
161
162     vme_context->vme_surface2_setup(ctx,
163                                     &vme_context->gpe_context,
164                                     obj_surface,
165                                     BINDING_TABLE_OFFSET(index),
166                                     SURFACE_STATE_OFFSET(index));
167 }
168
169 static void
170 gen7_vme_media_source_surface_state(VADriverContextP ctx,
171                                     int index,
172                                     struct object_surface *obj_surface,
173                                     struct intel_encoder_context *encoder_context)
174 {
175     struct gen6_vme_context *vme_context = encoder_context->vme_context;
176
177     vme_context->vme_media_rw_surface_setup(ctx,
178                                             &vme_context->gpe_context,
179                                             obj_surface,
180                                             BINDING_TABLE_OFFSET(index),
181                                             SURFACE_STATE_OFFSET(index));
182 }
183
184 static void
185 gen7_vme_output_buffer_setup(VADriverContextP ctx,
186                              struct encode_state *encode_state,
187                              int index,
188                              struct intel_encoder_context *encoder_context)
189
190 {
191     struct i965_driver_data *i965 = i965_driver_data(ctx);
192     struct gen6_vme_context *vme_context = encoder_context->vme_context;
193     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
194     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
195     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
196     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
197     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
198
199     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
200     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
201
202     if (is_intra)
203         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
204     else
205         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
206
207     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr, 
208                                               "VME output buffer",
209                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
210                                               0x1000);
211     assert(vme_context->vme_output.bo);
212     vme_context->vme_buffer_suface_setup(ctx,
213                                          &vme_context->gpe_context,
214                                          &vme_context->vme_output,
215                                          BINDING_TABLE_OFFSET(index),
216                                          SURFACE_STATE_OFFSET(index));
217 }
218
219 static void
220 gen7_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
221                                       struct encode_state *encode_state,
222                                       int index,
223                                       struct intel_encoder_context *encoder_context)
224
225 {
226     struct i965_driver_data *i965 = i965_driver_data(ctx);
227     struct gen6_vme_context *vme_context = encoder_context->vme_context;
228     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
229     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
230     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
231
232     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
233     vme_context->vme_batchbuffer.size_block = 32; /* 2 OWORDs */
234     vme_context->vme_batchbuffer.pitch = 16;
235     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
236                                                    "VME batchbuffer",
237                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
238                                                    0x1000);
239     vme_context->vme_buffer_suface_setup(ctx,
240                                          &vme_context->gpe_context,
241                                          &vme_context->vme_batchbuffer,
242                                          BINDING_TABLE_OFFSET(index),
243                                          SURFACE_STATE_OFFSET(index));
244 }
245
246 static VAStatus
247 gen7_vme_surface_setup(VADriverContextP ctx, 
248                        struct encode_state *encode_state,
249                        int is_intra,
250                        struct intel_encoder_context *encoder_context)
251 {
252     struct object_surface *obj_surface;
253     struct i965_driver_data *i965 = i965_driver_data(ctx);
254
255     /*Setup surfaces state*/
256     /* current picture for encoding */
257     obj_surface = encode_state->input_yuv_object;
258     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
259     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
260
261     if (!is_intra) {
262         VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
263         int slice_type;
264         struct object_surface *slice_obj_surface;
265         int ref_surface_id;
266
267         slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
268
269         if (slice_type == SLICE_TYPE_P || slice_type == SLICE_TYPE_B) {
270                 slice_obj_surface = NULL;
271                 ref_surface_id = slice_param->RefPicList0[0].picture_id;
272                 if (ref_surface_id != 0 && ref_surface_id != VA_INVALID_SURFACE) {
273                         slice_obj_surface = SURFACE(ref_surface_id);
274                 }
275                 if (slice_obj_surface && slice_obj_surface->bo) {
276                         obj_surface = slice_obj_surface;
277                 } else {
278                         obj_surface = encode_state->reference_objects[0];
279                 }
280                 /* reference 0 */
281                 if (obj_surface && obj_surface->bo)
282                     gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
283         }
284         if (slice_type == SLICE_TYPE_B) {
285                 /* reference 1 */
286                 slice_obj_surface = NULL;
287                 ref_surface_id = slice_param->RefPicList1[0].picture_id;
288                 if (ref_surface_id != 0 && ref_surface_id != VA_INVALID_SURFACE) {
289                         slice_obj_surface = SURFACE(ref_surface_id);
290                 }
291                 if (slice_obj_surface && slice_obj_surface->bo) {
292                         obj_surface = slice_obj_surface;
293                 } else {
294                         obj_surface = encode_state->reference_objects[0];
295                 }
296
297                 obj_surface = encode_state->reference_objects[1];
298                 if (obj_surface && obj_surface->bo)
299                         gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
300         }
301     }
302
303     /* VME output */
304     gen7_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
305     gen7_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
306
307     return VA_STATUS_SUCCESS;
308 }
309
310 static VAStatus gen7_vme_interface_setup(VADriverContextP ctx, 
311                                          struct encode_state *encode_state,
312                                          struct intel_encoder_context *encoder_context)
313 {
314     struct gen6_vme_context *vme_context = encoder_context->vme_context;
315     struct gen6_interface_descriptor_data *desc;   
316     int i;
317     dri_bo *bo;
318
319     bo = vme_context->gpe_context.idrt.bo;
320     dri_bo_map(bo, 1);
321     assert(bo->virtual);
322     desc = bo->virtual;
323
324     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
325         struct i965_kernel *kernel;
326         kernel = &vme_context->gpe_context.kernels[i];
327         assert(sizeof(*desc) == 32);
328         /*Setup the descritor table*/
329         memset(desc, 0, sizeof(*desc));
330         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
331         desc->desc2.sampler_count = 1; /* FIXME: */
332         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
333         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
334         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
335         desc->desc4.constant_urb_entry_read_offset = 0;
336         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
337                 
338         /*kernel start*/
339         dri_bo_emit_reloc(bo,   
340                           I915_GEM_DOMAIN_INSTRUCTION, 0,
341                           0,
342                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
343                           kernel->bo);
344         /*Sampler State(VME state pointer)*/
345         dri_bo_emit_reloc(bo,
346                           I915_GEM_DOMAIN_INSTRUCTION, 0,
347                           (1 << 2),                                                                     //
348                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
349                           vme_context->vme_state.bo);
350         desc++;
351     }
352     dri_bo_unmap(bo);
353
354     return VA_STATUS_SUCCESS;
355 }
356
357 static VAStatus gen7_vme_constant_setup(VADriverContextP ctx, 
358                                         struct encode_state *encode_state,
359                                         struct intel_encoder_context *encoder_context)
360 {
361     struct gen6_vme_context *vme_context = encoder_context->vme_context;
362     // unsigned char *constant_buffer;
363     unsigned int *vme_state_message;
364     int mv_num = 32;
365     if (vme_context->h264_level >= 30) {
366         mv_num = 16;
367         if (vme_context->h264_level >= 31)
368                 mv_num = 8;
369     } 
370
371     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
372     assert(vme_context->gpe_context.curbe.bo->virtual);
373     // constant_buffer = vme_context->curbe.bo->virtual;
374     vme_state_message = (unsigned int *)vme_context->gpe_context.curbe.bo->virtual;
375     vme_state_message[31] = mv_num;
376         
377     /*TODO copy buffer into CURB*/
378
379     dri_bo_unmap( vme_context->gpe_context.curbe.bo);
380
381     return VA_STATUS_SUCCESS;
382 }
383
384 static const unsigned int intra_mb_mode_cost_table[] = {
385     0x31110001, // for qp0
386     0x09110001, // for qp1
387     0x15030001, // for qp2
388     0x0b030001, // for qp3
389     0x0d030011, // for qp4
390     0x17210011, // for qp5
391     0x41210011, // for qp6
392     0x19210011, // for qp7
393     0x25050003, // for qp8
394     0x1b130003, // for qp9
395     0x1d130003, // for qp10
396     0x27070021, // for qp11
397     0x51310021, // for qp12
398     0x29090021, // for qp13
399     0x35150005, // for qp14
400     0x2b0b0013, // for qp15
401     0x2d0d0013, // for qp16
402     0x37170007, // for qp17
403     0x61410031, // for qp18
404     0x39190009, // for qp19
405     0x45250015, // for qp20
406     0x3b1b000b, // for qp21
407     0x3d1d000d, // for qp22
408     0x47270017, // for qp23
409     0x71510041, // for qp24 ! center for qp=0..30
410     0x49290019, // for qp25
411     0x55350025, // for qp26
412     0x4b2b001b, // for qp27
413     0x4d2d001d, // for qp28
414     0x57370027, // for qp29
415     0x81610051, // for qp30
416     0x57270017, // for qp31
417     0x81510041, // for qp32 ! center for qp=31..51
418     0x59290019, // for qp33
419     0x65350025, // for qp34
420     0x5b2b001b, // for qp35
421     0x5d2d001d, // for qp36
422     0x67370027, // for qp37
423     0x91610051, // for qp38
424     0x69390029, // for qp39
425     0x75450035, // for qp40
426     0x6b3b002b, // for qp41
427     0x6d3d002d, // for qp42
428     0x77470037, // for qp43
429     0xa1710061, // for qp44
430     0x79490039, // for qp45
431     0x85550045, // for qp46
432     0x7b4b003b, // for qp47
433     0x7d4d003d, // for qp48
434     0x87570047, // for qp49
435     0xb1810071, // for qp50
436     0x89590049  // for qp51
437 };
438
439 static void gen7_vme_state_setup_fixup(VADriverContextP ctx,
440                                        struct encode_state *encode_state,
441                                        struct intel_encoder_context *encoder_context,
442                                        unsigned int *vme_state_message)
443 {
444     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
445     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
446     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
447
448     if (slice_param->slice_type != SLICE_TYPE_I &&
449         slice_param->slice_type != SLICE_TYPE_SI)
450         return;
451     if (encoder_context->rate_control_mode == VA_RC_CQP)
452         vme_state_message[16] = intra_mb_mode_cost_table[pic_param->pic_init_qp + slice_param->slice_qp_delta];
453     else
454         vme_state_message[16] = intra_mb_mode_cost_table[mfc_context->bit_rate_control_context[SLICE_TYPE_I].QpPrimeY];
455 }
456
457 static VAStatus gen7_vme_avc_state_setup(VADriverContextP ctx,
458                                          struct encode_state *encode_state,
459                                          int is_intra,
460                                          struct intel_encoder_context *encoder_context)
461 {
462     struct gen6_vme_context *vme_context = encoder_context->vme_context;
463     unsigned int *vme_state_message;
464         unsigned int *mb_cost_table;
465     int i;
466     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
467
468         mb_cost_table = (unsigned int *)vme_context->vme_state_message;
469     //building VME state message
470     dri_bo_map(vme_context->vme_state.bo, 1);
471     assert(vme_context->vme_state.bo->virtual);
472     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
473
474     if ((slice_param->slice_type == SLICE_TYPE_P) ||
475         (slice_param->slice_type == SLICE_TYPE_SP)) {
476             vme_state_message[0] = 0x01010101;
477             vme_state_message[1] = 0x10010101;
478             vme_state_message[2] = 0x0F0F0F0F;
479             vme_state_message[3] = 0x100F0F0F;
480             vme_state_message[4] = 0x01010101;
481             vme_state_message[5] = 0x10010101;
482             vme_state_message[6] = 0x0F0F0F0F;
483             vme_state_message[7] = 0x100F0F0F;
484             vme_state_message[8] = 0x01010101;
485             vme_state_message[9] = 0x10010101;
486             vme_state_message[10] = 0x0F0F0F0F;
487             vme_state_message[11] = 0x000F0F0F;
488             vme_state_message[12] = 0x00;
489             vme_state_message[13] = 0x00;
490         } else {
491             vme_state_message[0] = 0x10010101;
492             vme_state_message[1] = 0x100F0F0F;
493             vme_state_message[2] = 0x10010101;
494             vme_state_message[3] = 0x000F0F0F;
495             vme_state_message[4] = 0;
496             vme_state_message[5] = 0;
497             vme_state_message[6] = 0;
498             vme_state_message[7] = 0;
499             vme_state_message[8] = 0;
500             vme_state_message[9] = 0;
501             vme_state_message[10] = 0;
502             vme_state_message[11] = 0;
503             vme_state_message[12] = 0;
504             vme_state_message[13] = 0;
505         }
506
507     vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
508     vme_state_message[15] = 0;
509     vme_state_message[16] = mb_cost_table[0];
510     vme_state_message[17] = mb_cost_table[1];
511     vme_state_message[18] = mb_cost_table[3];
512     vme_state_message[19] = mb_cost_table[4];
513
514     for(i = 20; i < 32; i++) {
515         vme_state_message[i] = 0;
516     }
517
518     dri_bo_unmap( vme_context->vme_state.bo);
519     return VA_STATUS_SUCCESS;
520 }
521
522 static VAStatus gen7_vme_vme_state_setup(VADriverContextP ctx,
523                                          struct encode_state *encode_state,
524                                          int is_intra,
525                                          struct intel_encoder_context *encoder_context)
526 {
527     struct gen6_vme_context *vme_context = encoder_context->vme_context;
528     unsigned int *vme_state_message;
529     int i;
530         
531     //building VME state message
532     dri_bo_map(vme_context->vme_state.bo, 1);
533     assert(vme_context->vme_state.bo->virtual);
534     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
535
536     vme_state_message[0] = 0x01010101;
537     vme_state_message[1] = 0x10010101;
538     vme_state_message[2] = 0x0F0F0F0F;
539     vme_state_message[3] = 0x100F0F0F;
540     vme_state_message[4] = 0x01010101;
541     vme_state_message[5] = 0x10010101;
542     vme_state_message[6] = 0x0F0F0F0F;
543     vme_state_message[7] = 0x100F0F0F;
544     vme_state_message[8] = 0x01010101;
545     vme_state_message[9] = 0x10010101;
546     vme_state_message[10] = 0x0F0F0F0F;
547     vme_state_message[11] = 0x000F0F0F;
548     vme_state_message[12] = 0x00;
549     vme_state_message[13] = 0x00;
550
551     vme_state_message[14] = 0x4a4a;
552     vme_state_message[15] = 0x0;
553     vme_state_message[16] = 0x4a4a4a4a;
554     vme_state_message[17] = 0x4a4a4a4a;
555     vme_state_message[18] = 0x21110100;
556     vme_state_message[19] = 0x61514131;
557
558     for(i = 20; i < 32; i++) {
559         vme_state_message[i] = 0;
560     }
561     //vme_state_message[16] = 0x42424242;                       //cost function LUT set 0 for Intra
562
563     gen7_vme_state_setup_fixup(ctx, encode_state, encoder_context, vme_state_message);
564
565     dri_bo_unmap( vme_context->vme_state.bo);
566     return VA_STATUS_SUCCESS;
567 }
568
569 static void
570 gen7_vme_fill_vme_batchbuffer(VADriverContextP ctx, 
571                               struct encode_state *encode_state,
572                               int mb_width, int mb_height,
573                               int kernel,
574                               int transform_8x8_mode_flag,
575                               struct intel_encoder_context *encoder_context)
576 {
577     struct gen6_vme_context *vme_context = encoder_context->vme_context;
578     int mb_x = 0, mb_y = 0;
579     int i, s, j;
580     unsigned int *command_ptr;
581
582
583     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
584     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
585
586     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
587         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
588
589         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
590             int slice_mb_begin = slice_param->macroblock_address;
591             int slice_mb_number = slice_param->num_macroblocks;
592             unsigned int mb_intra_ub;
593             int slice_mb_x = slice_param->macroblock_address % mb_width;
594
595             for (i = 0; i < slice_mb_number;) {
596                 int mb_count = i + slice_mb_begin;    
597
598                 mb_x = mb_count % mb_width;
599                 mb_y = mb_count / mb_width;
600                 mb_intra_ub = 0;
601
602                 if (mb_x != 0) {
603                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
604                 }
605
606                 if (mb_y != 0) {
607                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
608
609                     if (mb_x != 0)
610                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
611
612                     if (mb_x != (mb_width -1))
613                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
614                 }
615
616                 if (i < mb_width) {
617                     if (i == 0)
618                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
619
620                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
621
622                     if ((i == (mb_width - 1)) && slice_mb_x) {
623                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
624                     }
625                 }
626                 
627                 if ((i == mb_width) && slice_mb_x) {
628                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
629                 }
630
631                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
632                 *command_ptr++ = kernel;
633                 *command_ptr++ = 0;
634                 *command_ptr++ = 0;
635                 *command_ptr++ = 0;
636                 *command_ptr++ = 0;
637    
638                 /*inline data */
639                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
640                 *command_ptr++ = ( (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
641
642                 i += 1;
643             }
644
645             slice_param++;
646         }
647     }
648
649     *command_ptr++ = 0;
650     *command_ptr++ = MI_BATCH_BUFFER_END;
651
652     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
653 }
654
655
656 static void gen7_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
657 {
658     struct i965_driver_data *i965 = i965_driver_data(ctx);
659     struct gen6_vme_context *vme_context = encoder_context->vme_context;
660     dri_bo *bo;
661
662     i965_gpe_context_init(ctx, &vme_context->gpe_context);
663
664     /* VME output buffer */
665     dri_bo_unreference(vme_context->vme_output.bo);
666     vme_context->vme_output.bo = NULL;
667
668     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
669     vme_context->vme_batchbuffer.bo = NULL;
670
671     /* VME state */
672     dri_bo_unreference(vme_context->vme_state.bo);
673     bo = dri_bo_alloc(i965->intel.bufmgr,
674                       "Buffer",
675                       1024*16, 64);
676     assert(bo);
677     vme_context->vme_state.bo = bo;
678 }
679
680 static void gen7_vme_pipeline_programing(VADriverContextP ctx, 
681                                          struct encode_state *encode_state,
682                                          struct intel_encoder_context *encoder_context)
683 {
684     struct gen6_vme_context *vme_context = encoder_context->vme_context;
685     struct intel_batchbuffer *batch = encoder_context->base.batch;
686     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
687     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
688     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
689     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
690     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
691     int s;
692     bool allow_hwscore = true;
693     int kernel_shader;
694
695     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
696         pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer; 
697         if ((pSliceParameter->macroblock_address % width_in_mbs)) {
698                 allow_hwscore = false;
699                 break;
700         }
701     }
702
703     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
704         (pSliceParameter->slice_type == SLICE_TYPE_I)) {
705         kernel_shader = AVC_VME_INTRA_SHADER;
706     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
707         (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
708         kernel_shader = AVC_VME_INTER_SHADER;
709     } else {
710         kernel_shader = AVC_VME_BINTER_SHADER;
711         if (!allow_hwscore)
712              kernel_shader = AVC_VME_INTER_SHADER;
713     }
714
715     if (allow_hwscore)
716         gen7_vme_walker_fill_vme_batchbuffer(ctx, 
717                                   encode_state,
718                                   width_in_mbs, height_in_mbs,
719                                   kernel_shader,
720                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
721                                   encoder_context);
722         
723     else
724         gen7_vme_fill_vme_batchbuffer(ctx, 
725                                   encode_state,
726                                   width_in_mbs, height_in_mbs,
727                                   kernel_shader, 
728                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
729                                   encoder_context);
730
731     intel_batchbuffer_start_atomic(batch, 0x1000);
732     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
733     BEGIN_BATCH(batch, 2);
734     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
735     OUT_RELOC(batch,
736               vme_context->vme_batchbuffer.bo,
737               I915_GEM_DOMAIN_COMMAND, 0, 
738               0);
739     ADVANCE_BATCH(batch);
740
741     intel_batchbuffer_end_atomic(batch);        
742 }
743
744 static VAStatus gen7_vme_prepare(VADriverContextP ctx, 
745                                  struct encode_state *encode_state,
746                                  struct intel_encoder_context *encoder_context)
747 {
748     VAStatus vaStatus = VA_STATUS_SUCCESS;
749     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
750     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
751     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
752     struct gen6_vme_context *vme_context = encoder_context->vme_context;
753
754     if (!vme_context->h264_level ||
755                 (vme_context->h264_level != pSequenceParameter->level_idc)) {
756         vme_context->h264_level = pSequenceParameter->level_idc;        
757     }
758         
759     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
760     /*Setup all the memory object*/
761     gen7_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
762     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
763     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
764     gen7_vme_avc_state_setup(ctx, encode_state, is_intra, encoder_context);
765
766     /*Programing media pipeline*/
767     gen7_vme_pipeline_programing(ctx, encode_state, encoder_context);
768
769     return vaStatus;
770 }
771
772 static VAStatus gen7_vme_run(VADriverContextP ctx, 
773                              struct encode_state *encode_state,
774                              struct intel_encoder_context *encoder_context)
775 {
776     struct intel_batchbuffer *batch = encoder_context->base.batch;
777
778     intel_batchbuffer_flush(batch);
779
780     return VA_STATUS_SUCCESS;
781 }
782
783 static VAStatus gen7_vme_stop(VADriverContextP ctx, 
784                               struct encode_state *encode_state,
785                               struct intel_encoder_context *encoder_context)
786 {
787     return VA_STATUS_SUCCESS;
788 }
789
790 static VAStatus
791 gen7_vme_pipeline(VADriverContextP ctx,
792                   VAProfile profile,
793                   struct encode_state *encode_state,
794                   struct intel_encoder_context *encoder_context)
795 {
796     gen7_vme_media_init(ctx, encoder_context);
797     gen7_vme_prepare(ctx, encode_state, encoder_context);
798     gen7_vme_run(ctx, encode_state, encoder_context);
799     gen7_vme_stop(ctx, encode_state, encoder_context);
800
801     return VA_STATUS_SUCCESS;
802 }
803
804 static void
805 gen7_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
806                                     struct encode_state *encode_state,
807                                     int index,
808                                     int is_intra,
809                                     struct intel_encoder_context *encoder_context)
810
811 {
812     struct i965_driver_data *i965 = i965_driver_data(ctx);
813     struct gen6_vme_context *vme_context = encoder_context->vme_context;
814     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
815     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
816     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
817
818     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
819     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
820
821     if (is_intra)
822         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
823     else
824         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
825
826     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
827                                               "VME output buffer",
828                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
829                                               0x1000);
830     assert(vme_context->vme_output.bo);
831     vme_context->vme_buffer_suface_setup(ctx,
832                                          &vme_context->gpe_context,
833                                          &vme_context->vme_output,
834                                          BINDING_TABLE_OFFSET(index),
835                                          SURFACE_STATE_OFFSET(index));
836 }
837
838 static void
839 gen7_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
840                                              struct encode_state *encode_state,
841                                              int index,
842                                              struct intel_encoder_context *encoder_context)
843
844 {
845     struct i965_driver_data *i965 = i965_driver_data(ctx);
846     struct gen6_vme_context *vme_context = encoder_context->vme_context;
847     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
848     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
849     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
850
851     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
852     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
853     vme_context->vme_batchbuffer.pitch = 16;
854     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
855                                                    "VME batchbuffer",
856                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
857                                                    0x1000);
858     vme_context->vme_buffer_suface_setup(ctx,
859                                          &vme_context->gpe_context,
860                                          &vme_context->vme_batchbuffer,
861                                          BINDING_TABLE_OFFSET(index),
862                                          SURFACE_STATE_OFFSET(index));
863 }
864
865 static VAStatus
866 gen7_vme_mpeg2_surface_setup(VADriverContextP ctx, 
867                               struct encode_state *encode_state,
868                               int is_intra,
869                               struct intel_encoder_context *encoder_context)
870 {
871     struct object_surface *obj_surface;
872
873     /*Setup surfaces state*/
874     /* current picture for encoding */
875     obj_surface = encode_state->input_yuv_object;
876     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
877     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
878
879     if (!is_intra) {
880         /* reference 0 */
881         obj_surface = encode_state->reference_objects[0];
882         if (obj_surface->bo != NULL)
883             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
884
885         /* reference 1 */
886         obj_surface = encode_state->reference_objects[1];
887         if (obj_surface && obj_surface->bo != NULL) 
888             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
889     }
890
891     /* VME output */
892     gen7_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
893     gen7_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
894
895     return VA_STATUS_SUCCESS;
896 }
897
898 static void
899 gen7_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
900                                      struct encode_state *encode_state,
901                                      int mb_width, int mb_height,
902                                      int kernel,
903                                      int transform_8x8_mode_flag,
904                                      struct intel_encoder_context *encoder_context)
905 {
906     struct gen6_vme_context *vme_context = encoder_context->vme_context;
907     int number_mb_cmds;
908     int mb_x = 0, mb_y = 0;
909     int i, s, j;
910     unsigned int *command_ptr;
911
912     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
913     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
914
915     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
916         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
917
918         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
919             int slice_mb_begin = slice_param->macroblock_address;
920             int slice_mb_number = slice_param->num_macroblocks;
921
922             for (i = 0; i < slice_mb_number;) {
923                 int mb_count = i + slice_mb_begin;
924
925                 mb_x = mb_count % mb_width;
926                 mb_y = mb_count / mb_width;
927
928                 if( i == 0) {
929                     number_mb_cmds = mb_width;
930                 } else if ((i + 128) <= slice_mb_number) {
931                     number_mb_cmds = 128;
932                 } else {
933                     number_mb_cmds = slice_mb_number - i;
934                 }
935
936                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
937                 *command_ptr++ = kernel;
938                 *command_ptr++ = 0;
939                 *command_ptr++ = 0;
940                 *command_ptr++ = 0;
941                 *command_ptr++ = 0;
942  
943                 /*inline data */
944                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
945                 *command_ptr++ = ( (number_mb_cmds << 16) | transform_8x8_mode_flag | ((i == 0) << 1));
946
947                 i += number_mb_cmds;
948             }
949
950             slice_param++;
951         }
952     }
953
954     *command_ptr++ = 0;
955     *command_ptr++ = MI_BATCH_BUFFER_END;
956
957     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
958 }
959
960 static void
961 gen7_vme_mpeg2_pipeline_programing(VADriverContextP ctx, 
962                                     struct encode_state *encode_state,
963                                     int is_intra,
964                                     struct intel_encoder_context *encoder_context)
965 {
966     struct gen6_vme_context *vme_context = encoder_context->vme_context;
967     struct intel_batchbuffer *batch = encoder_context->base.batch;
968     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
969     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
970     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
971
972     gen7_vme_mpeg2_fill_vme_batchbuffer(ctx, 
973                                          encode_state,
974                                          width_in_mbs, height_in_mbs,
975                                          MPEG2_VME_INTER_SHADER,
976                                          0,
977                                          encoder_context);
978
979     intel_batchbuffer_start_atomic(batch, 0x1000);
980     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
981     BEGIN_BATCH(batch, 2);
982     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
983     OUT_RELOC(batch,
984               vme_context->vme_batchbuffer.bo,
985               I915_GEM_DOMAIN_COMMAND, 0, 
986               0);
987     ADVANCE_BATCH(batch);
988
989     intel_batchbuffer_end_atomic(batch);
990 }
991
992 static VAStatus
993 gen7_vme_mpeg2_prepare(VADriverContextP ctx, 
994                         struct encode_state *encode_state,
995                         struct intel_encoder_context *encoder_context)
996 {
997     VAStatus vaStatus = VA_STATUS_SUCCESS;
998
999    /*Setup all the memory object*/
1000     gen7_vme_mpeg2_surface_setup(ctx, encode_state, 0, encoder_context);
1001     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
1002     gen7_vme_vme_state_setup(ctx, encode_state, 0, encoder_context);
1003     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
1004
1005     /*Programing media pipeline*/
1006     gen7_vme_mpeg2_pipeline_programing(ctx, encode_state, 0, encoder_context);
1007
1008     return vaStatus;
1009 }
1010
1011 static VAStatus
1012 gen7_vme_mpeg2_pipeline(VADriverContextP ctx,
1013                          VAProfile profile,
1014                          struct encode_state *encode_state,
1015                          struct intel_encoder_context *encoder_context)
1016 {
1017     struct i965_driver_data *i965 = i965_driver_data(ctx);
1018     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1019     VAEncSliceParameterBufferMPEG2 *slice_param = 
1020         (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
1021     VAEncSequenceParameterBufferMPEG2 *seq_param = 
1022        (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
1023  
1024     /*No need of to exec VME for Intra slice */
1025     if (slice_param->is_intra_slice) {
1026          if(!vme_context->vme_output.bo) {
1027              int w_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
1028              int h_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
1029
1030              vme_context->vme_output.num_blocks = w_in_mbs * h_in_mbs;
1031              vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
1032              vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
1033              vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
1034                                                        "MPEG2 VME output buffer",
1035                                                        vme_context->vme_output.num_blocks
1036                                                            * vme_context->vme_output.size_block,
1037                                                        0x1000);
1038          }
1039
1040          return VA_STATUS_SUCCESS;
1041     }
1042
1043     gen7_vme_media_init(ctx, encoder_context);
1044     gen7_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1045     gen7_vme_run(ctx, encode_state, encoder_context);
1046     gen7_vme_stop(ctx, encode_state, encoder_context);
1047
1048     return VA_STATUS_SUCCESS;
1049 }
1050
1051 static void
1052 gen7_vme_context_destroy(void *context)
1053 {
1054     struct gen6_vme_context *vme_context = context;
1055
1056     i965_gpe_context_destroy(&vme_context->gpe_context);
1057
1058     dri_bo_unreference(vme_context->vme_output.bo);
1059     vme_context->vme_output.bo = NULL;
1060
1061     dri_bo_unreference(vme_context->vme_state.bo);
1062     vme_context->vme_state.bo = NULL;
1063
1064     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1065     vme_context->vme_batchbuffer.bo = NULL;
1066
1067     if (vme_context->vme_state_message) {
1068         free(vme_context->vme_state_message);
1069         vme_context->vme_state_message = NULL;
1070     }
1071
1072     free(vme_context);
1073 }
1074
1075 Bool gen7_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1076 {
1077     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1078     struct i965_kernel *vme_kernel_list = NULL;
1079
1080     vme_context->gpe_context.surface_state_binding_table.length =
1081               (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1082
1083     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1084     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1085     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1086
1087     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1088     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
1089     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1090     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1091     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1092
1093     gen7_vme_scoreboard_init(ctx, vme_context);
1094
1095     if(encoder_context->profile == VAProfileH264Baseline ||
1096        encoder_context->profile == VAProfileH264Main     ||
1097        encoder_context->profile == VAProfileH264High ){
1098         vme_kernel_list = gen7_vme_kernels;
1099         vme_context->video_coding_type = VIDEO_CODING_AVC;
1100         vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM; 
1101         encoder_context->vme_pipeline = gen7_vme_pipeline; 
1102     } else if (encoder_context->profile == VAProfileMPEG2Simple ||
1103                encoder_context->profile == VAProfileMPEG2Main ){
1104         vme_kernel_list = gen7_vme_mpeg2_kernels;
1105         vme_context->video_coding_type = VIDEO_CODING_MPEG2;
1106         vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM;
1107         encoder_context->vme_pipeline = gen7_vme_mpeg2_pipeline;
1108     } else {
1109         /* Unsupported encoding profile */
1110         assert(0);
1111     }
1112
1113     i965_gpe_load_kernels(ctx,
1114                           &vme_context->gpe_context,
1115                           vme_kernel_list,
1116                           vme_context->vme_kernel_sum);
1117
1118     vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1119     vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1120     vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1121
1122     encoder_context->vme_context = vme_context;
1123     encoder_context->vme_context_destroy = gen7_vme_context_destroy;
1124     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1125
1126     return True;
1127 }