Encoder: directly use the surface object of the input surface
[platform/upstream/libva-intel-driver.git] / src / gen7_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <stdbool.h>
32 #include <string.h>
33 #include <assert.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_encoder.h"
41 #include "gen6_vme.h"
42 #include "gen6_mfc.h"
43 #ifdef SURFACE_STATE_PADDED_SIZE
44 #undef SURFACE_STATE_PADDED_SIZE
45 #endif
46
47 #define VME_MSG_LENGTH          32
48 #define SURFACE_STATE_PADDED_SIZE_0_GEN7        ALIGN(sizeof(struct gen7_surface_state), 32)
49 #define SURFACE_STATE_PADDED_SIZE_1_GEN7        ALIGN(sizeof(struct gen7_surface_state2), 32)
50 #define SURFACE_STATE_PADDED_SIZE_GEN7          MAX(SURFACE_STATE_PADDED_SIZE_0_GEN7, SURFACE_STATE_PADDED_SIZE_1_GEN7)
51
52 #define SURFACE_STATE_PADDED_SIZE               SURFACE_STATE_PADDED_SIZE_GEN7
53 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
54 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
55
56 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
57 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
58 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
59
60 enum VIDEO_CODING_TYPE{
61     VIDEO_CODING_AVC = 0,
62     VIDEO_CODING_MPEG2,
63     VIDEO_CODING_SUM
64 };
65
66 enum AVC_VME_KERNEL_TYPE{ 
67     AVC_VME_INTRA_SHADER = 0,
68     AVC_VME_INTER_SHADER,
69     AVC_VME_BATCHBUFFER,
70     AVC_VME_BINTER_SHADER,
71     AVC_VME_KERNEL_SUM
72 };
73
74 enum MPEG2_VME_KERNEL_TYPE{
75     MPEG2_VME_INTER_SHADER = 0,
76     MPEG2_VME_BATCHBUFFER,
77     MPEG2_VME_KERNEL_SUM
78 };
79  
80
81 static const uint32_t gen7_vme_intra_frame[][4] = {
82 #include "shaders/vme/intra_frame_ivb.g7b"
83 };
84
85 static const uint32_t gen7_vme_inter_frame[][4] = {
86 #include "shaders/vme/inter_frame_ivb.g7b"
87 };
88
89 static const uint32_t gen7_vme_batchbuffer[][4] = {
90 #include "shaders/vme/batchbuffer.g7b"
91 };
92
93 static const uint32_t gen7_vme_binter_frame[][4] = {
94 #include "shaders/vme/inter_bframe_ivb.g7b"
95 };
96
97 static struct i965_kernel gen7_vme_kernels[] = {
98     {
99         "AVC VME Intra Frame",
100         AVC_VME_INTRA_SHADER,                   /*index*/
101         gen7_vme_intra_frame,                   
102         sizeof(gen7_vme_intra_frame),           
103         NULL
104     },
105     {
106         "AVC VME inter Frame",
107         AVC_VME_INTER_SHADER,
108         gen7_vme_inter_frame,
109         sizeof(gen7_vme_inter_frame),
110         NULL
111     },
112     {
113         "AVC VME BATCHBUFFER",
114         AVC_VME_BATCHBUFFER,
115         gen7_vme_batchbuffer,
116         sizeof(gen7_vme_batchbuffer),
117         NULL
118     },
119     {
120         "AVC VME binter Frame",
121         AVC_VME_BINTER_SHADER,
122         gen7_vme_binter_frame,
123         sizeof(gen7_vme_binter_frame),
124         NULL
125     }
126 };
127
128 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
129 #include "shaders/vme/mpeg2_inter_frame.g7b"
130 };
131
132 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
133 #include "shaders/vme/batchbuffer.g7b"
134 };
135
136 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
137     {
138         "MPEG2 VME inter Frame",
139         MPEG2_VME_INTER_SHADER,
140         gen7_vme_mpeg2_inter_frame,
141         sizeof(gen7_vme_mpeg2_inter_frame),
142         NULL
143     },
144     {
145         "MPEG2 VME BATCHBUFFER",
146         MPEG2_VME_BATCHBUFFER,
147         gen7_vme_mpeg2_batchbuffer,
148         sizeof(gen7_vme_mpeg2_batchbuffer),
149         NULL
150     },
151 };
152
153 /* only used for VME source surface state */
154 static void 
155 gen7_vme_source_surface_state(VADriverContextP ctx,
156                               int index,
157                               struct object_surface *obj_surface,
158                               struct intel_encoder_context *encoder_context)
159 {
160     struct gen6_vme_context *vme_context = encoder_context->vme_context;
161
162     vme_context->vme_surface2_setup(ctx,
163                                     &vme_context->gpe_context,
164                                     obj_surface,
165                                     BINDING_TABLE_OFFSET(index),
166                                     SURFACE_STATE_OFFSET(index));
167 }
168
169 static void
170 gen7_vme_media_source_surface_state(VADriverContextP ctx,
171                                     int index,
172                                     struct object_surface *obj_surface,
173                                     struct intel_encoder_context *encoder_context)
174 {
175     struct gen6_vme_context *vme_context = encoder_context->vme_context;
176
177     vme_context->vme_media_rw_surface_setup(ctx,
178                                             &vme_context->gpe_context,
179                                             obj_surface,
180                                             BINDING_TABLE_OFFSET(index),
181                                             SURFACE_STATE_OFFSET(index));
182 }
183
184 static void
185 gen7_vme_output_buffer_setup(VADriverContextP ctx,
186                              struct encode_state *encode_state,
187                              int index,
188                              struct intel_encoder_context *encoder_context)
189
190 {
191     struct i965_driver_data *i965 = i965_driver_data(ctx);
192     struct gen6_vme_context *vme_context = encoder_context->vme_context;
193     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
194     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
195     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
196     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
197     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
198
199     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
200     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
201
202     if (is_intra)
203         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
204     else
205         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
206
207     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr, 
208                                               "VME output buffer",
209                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
210                                               0x1000);
211     assert(vme_context->vme_output.bo);
212     vme_context->vme_buffer_suface_setup(ctx,
213                                          &vme_context->gpe_context,
214                                          &vme_context->vme_output,
215                                          BINDING_TABLE_OFFSET(index),
216                                          SURFACE_STATE_OFFSET(index));
217 }
218
219 static void
220 gen7_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
221                                       struct encode_state *encode_state,
222                                       int index,
223                                       struct intel_encoder_context *encoder_context)
224
225 {
226     struct i965_driver_data *i965 = i965_driver_data(ctx);
227     struct gen6_vme_context *vme_context = encoder_context->vme_context;
228     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
229     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
230     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
231
232     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
233     vme_context->vme_batchbuffer.size_block = 32; /* 2 OWORDs */
234     vme_context->vme_batchbuffer.pitch = 16;
235     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
236                                                    "VME batchbuffer",
237                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
238                                                    0x1000);
239     vme_context->vme_buffer_suface_setup(ctx,
240                                          &vme_context->gpe_context,
241                                          &vme_context->vme_batchbuffer,
242                                          BINDING_TABLE_OFFSET(index),
243                                          SURFACE_STATE_OFFSET(index));
244 }
245
246 static VAStatus
247 gen7_vme_surface_setup(VADriverContextP ctx, 
248                        struct encode_state *encode_state,
249                        int is_intra,
250                        struct intel_encoder_context *encoder_context)
251 {
252     struct i965_driver_data *i965 = i965_driver_data(ctx);
253     struct object_surface *obj_surface;
254     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
255
256     /*Setup surfaces state*/
257     /* current picture for encoding */
258     obj_surface = encode_state->input_yuv_object;
259     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
260     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
261
262     if (!is_intra) {
263         /* reference 0 */
264         obj_surface = SURFACE(pPicParameter->ReferenceFrames[0].picture_id);
265         assert(obj_surface);
266         if ( obj_surface->bo != NULL)
267             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
268
269         /* reference 1 */
270         obj_surface = SURFACE(pPicParameter->ReferenceFrames[1].picture_id);
271         assert(obj_surface);
272         if ( obj_surface->bo != NULL ) 
273             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
274     }
275
276     /* VME output */
277     gen7_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
278     gen7_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
279
280     return VA_STATUS_SUCCESS;
281 }
282
283 static VAStatus gen7_vme_interface_setup(VADriverContextP ctx, 
284                                          struct encode_state *encode_state,
285                                          struct intel_encoder_context *encoder_context)
286 {
287     struct gen6_vme_context *vme_context = encoder_context->vme_context;
288     struct gen6_interface_descriptor_data *desc;   
289     int i;
290     dri_bo *bo;
291
292     bo = vme_context->gpe_context.idrt.bo;
293     dri_bo_map(bo, 1);
294     assert(bo->virtual);
295     desc = bo->virtual;
296
297     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
298         struct i965_kernel *kernel;
299         kernel = &vme_context->gpe_context.kernels[i];
300         assert(sizeof(*desc) == 32);
301         /*Setup the descritor table*/
302         memset(desc, 0, sizeof(*desc));
303         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
304         desc->desc2.sampler_count = 1; /* FIXME: */
305         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
306         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
307         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
308         desc->desc4.constant_urb_entry_read_offset = 0;
309         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
310                 
311         /*kernel start*/
312         dri_bo_emit_reloc(bo,   
313                           I915_GEM_DOMAIN_INSTRUCTION, 0,
314                           0,
315                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
316                           kernel->bo);
317         /*Sampler State(VME state pointer)*/
318         dri_bo_emit_reloc(bo,
319                           I915_GEM_DOMAIN_INSTRUCTION, 0,
320                           (1 << 2),                                                                     //
321                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
322                           vme_context->vme_state.bo);
323         desc++;
324     }
325     dri_bo_unmap(bo);
326
327     return VA_STATUS_SUCCESS;
328 }
329
330 static VAStatus gen7_vme_constant_setup(VADriverContextP ctx, 
331                                         struct encode_state *encode_state,
332                                         struct intel_encoder_context *encoder_context)
333 {
334     struct gen6_vme_context *vme_context = encoder_context->vme_context;
335     // unsigned char *constant_buffer;
336     unsigned int *vme_state_message;
337     int mv_num = 32;
338     if (vme_context->h264_level >= 30) {
339         mv_num = 16;
340         if (vme_context->h264_level >= 31)
341                 mv_num = 8;
342     } 
343
344     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
345     assert(vme_context->gpe_context.curbe.bo->virtual);
346     // constant_buffer = vme_context->curbe.bo->virtual;
347     vme_state_message = (unsigned int *)vme_context->gpe_context.curbe.bo->virtual;
348     vme_state_message[31] = mv_num;
349         
350     /*TODO copy buffer into CURB*/
351
352     dri_bo_unmap( vme_context->gpe_context.curbe.bo);
353
354     return VA_STATUS_SUCCESS;
355 }
356
357 static const unsigned int intra_mb_mode_cost_table[] = {
358     0x31110001, // for qp0
359     0x09110001, // for qp1
360     0x15030001, // for qp2
361     0x0b030001, // for qp3
362     0x0d030011, // for qp4
363     0x17210011, // for qp5
364     0x41210011, // for qp6
365     0x19210011, // for qp7
366     0x25050003, // for qp8
367     0x1b130003, // for qp9
368     0x1d130003, // for qp10
369     0x27070021, // for qp11
370     0x51310021, // for qp12
371     0x29090021, // for qp13
372     0x35150005, // for qp14
373     0x2b0b0013, // for qp15
374     0x2d0d0013, // for qp16
375     0x37170007, // for qp17
376     0x61410031, // for qp18
377     0x39190009, // for qp19
378     0x45250015, // for qp20
379     0x3b1b000b, // for qp21
380     0x3d1d000d, // for qp22
381     0x47270017, // for qp23
382     0x71510041, // for qp24 ! center for qp=0..30
383     0x49290019, // for qp25
384     0x55350025, // for qp26
385     0x4b2b001b, // for qp27
386     0x4d2d001d, // for qp28
387     0x57370027, // for qp29
388     0x81610051, // for qp30
389     0x57270017, // for qp31
390     0x81510041, // for qp32 ! center for qp=31..51
391     0x59290019, // for qp33
392     0x65350025, // for qp34
393     0x5b2b001b, // for qp35
394     0x5d2d001d, // for qp36
395     0x67370027, // for qp37
396     0x91610051, // for qp38
397     0x69390029, // for qp39
398     0x75450035, // for qp40
399     0x6b3b002b, // for qp41
400     0x6d3d002d, // for qp42
401     0x77470037, // for qp43
402     0xa1710061, // for qp44
403     0x79490039, // for qp45
404     0x85550045, // for qp46
405     0x7b4b003b, // for qp47
406     0x7d4d003d, // for qp48
407     0x87570047, // for qp49
408     0xb1810071, // for qp50
409     0x89590049  // for qp51
410 };
411
412 static void gen7_vme_state_setup_fixup(VADriverContextP ctx,
413                                        struct encode_state *encode_state,
414                                        struct intel_encoder_context *encoder_context,
415                                        unsigned int *vme_state_message)
416 {
417     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
418     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
419     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
420
421     if (slice_param->slice_type != SLICE_TYPE_I &&
422         slice_param->slice_type != SLICE_TYPE_SI)
423         return;
424     if (encoder_context->rate_control_mode == VA_RC_CQP)
425         vme_state_message[16] = intra_mb_mode_cost_table[pic_param->pic_init_qp + slice_param->slice_qp_delta];
426     else
427         vme_state_message[16] = intra_mb_mode_cost_table[mfc_context->bit_rate_control_context[SLICE_TYPE_I].QpPrimeY];
428 }
429
430 static VAStatus gen7_vme_avc_state_setup(VADriverContextP ctx,
431                                          struct encode_state *encode_state,
432                                          int is_intra,
433                                          struct intel_encoder_context *encoder_context)
434 {
435     struct gen6_vme_context *vme_context = encoder_context->vme_context;
436     unsigned int *vme_state_message;
437         unsigned int *mb_cost_table;
438     int i;
439     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
440
441         mb_cost_table = (unsigned int *)vme_context->vme_state_message;
442     //building VME state message
443     dri_bo_map(vme_context->vme_state.bo, 1);
444     assert(vme_context->vme_state.bo->virtual);
445     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
446
447     if ((slice_param->slice_type == SLICE_TYPE_P) ||
448         (slice_param->slice_type == SLICE_TYPE_SP)) {
449             vme_state_message[0] = 0x01010101;
450             vme_state_message[1] = 0x10010101;
451             vme_state_message[2] = 0x0F0F0F0F;
452             vme_state_message[3] = 0x100F0F0F;
453             vme_state_message[4] = 0x01010101;
454             vme_state_message[5] = 0x10010101;
455             vme_state_message[6] = 0x0F0F0F0F;
456             vme_state_message[7] = 0x100F0F0F;
457             vme_state_message[8] = 0x01010101;
458             vme_state_message[9] = 0x10010101;
459             vme_state_message[10] = 0x0F0F0F0F;
460             vme_state_message[11] = 0x000F0F0F;
461             vme_state_message[12] = 0x00;
462             vme_state_message[13] = 0x00;
463         } else {
464             vme_state_message[0] = 0x10010101;
465             vme_state_message[1] = 0x100F0F0F;
466             vme_state_message[2] = 0x10010101;
467             vme_state_message[3] = 0x000F0F0F;
468             vme_state_message[4] = 0;
469             vme_state_message[5] = 0;
470             vme_state_message[6] = 0;
471             vme_state_message[7] = 0;
472             vme_state_message[8] = 0;
473             vme_state_message[9] = 0;
474             vme_state_message[10] = 0;
475             vme_state_message[11] = 0;
476             vme_state_message[12] = 0;
477             vme_state_message[13] = 0;
478         }
479
480     vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
481     vme_state_message[15] = 0;
482     vme_state_message[16] = mb_cost_table[0];
483     vme_state_message[17] = mb_cost_table[1];
484     vme_state_message[18] = mb_cost_table[3];
485     vme_state_message[19] = mb_cost_table[4];
486
487     for(i = 20; i < 32; i++) {
488         vme_state_message[i] = 0;
489     }
490
491     dri_bo_unmap( vme_context->vme_state.bo);
492     return VA_STATUS_SUCCESS;
493 }
494
495 static VAStatus gen7_vme_vme_state_setup(VADriverContextP ctx,
496                                          struct encode_state *encode_state,
497                                          int is_intra,
498                                          struct intel_encoder_context *encoder_context)
499 {
500     struct gen6_vme_context *vme_context = encoder_context->vme_context;
501     unsigned int *vme_state_message;
502     int i;
503         
504     //building VME state message
505     dri_bo_map(vme_context->vme_state.bo, 1);
506     assert(vme_context->vme_state.bo->virtual);
507     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
508
509     vme_state_message[0] = 0x01010101;
510     vme_state_message[1] = 0x10010101;
511     vme_state_message[2] = 0x0F0F0F0F;
512     vme_state_message[3] = 0x100F0F0F;
513     vme_state_message[4] = 0x01010101;
514     vme_state_message[5] = 0x10010101;
515     vme_state_message[6] = 0x0F0F0F0F;
516     vme_state_message[7] = 0x100F0F0F;
517     vme_state_message[8] = 0x01010101;
518     vme_state_message[9] = 0x10010101;
519     vme_state_message[10] = 0x0F0F0F0F;
520     vme_state_message[11] = 0x000F0F0F;
521     vme_state_message[12] = 0x00;
522     vme_state_message[13] = 0x00;
523
524     vme_state_message[14] = 0x4a4a;
525     vme_state_message[15] = 0x0;
526     vme_state_message[16] = 0x4a4a4a4a;
527     vme_state_message[17] = 0x4a4a4a4a;
528     vme_state_message[18] = 0x21110100;
529     vme_state_message[19] = 0x61514131;
530
531     for(i = 20; i < 32; i++) {
532         vme_state_message[i] = 0;
533     }
534     //vme_state_message[16] = 0x42424242;                       //cost function LUT set 0 for Intra
535
536     gen7_vme_state_setup_fixup(ctx, encode_state, encoder_context, vme_state_message);
537
538     dri_bo_unmap( vme_context->vme_state.bo);
539     return VA_STATUS_SUCCESS;
540 }
541
542 static void
543 gen7_vme_fill_vme_batchbuffer(VADriverContextP ctx, 
544                               struct encode_state *encode_state,
545                               int mb_width, int mb_height,
546                               int kernel,
547                               int transform_8x8_mode_flag,
548                               struct intel_encoder_context *encoder_context)
549 {
550     struct gen6_vme_context *vme_context = encoder_context->vme_context;
551     int mb_x = 0, mb_y = 0;
552     int i, s, j;
553     unsigned int *command_ptr;
554
555
556     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
557     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
558
559     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
560         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
561
562         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
563             int slice_mb_begin = slice_param->macroblock_address;
564             int slice_mb_number = slice_param->num_macroblocks;
565             unsigned int mb_intra_ub;
566             int slice_mb_x = slice_param->macroblock_address % mb_width;
567
568             for (i = 0; i < slice_mb_number;) {
569                 int mb_count = i + slice_mb_begin;    
570
571                 mb_x = mb_count % mb_width;
572                 mb_y = mb_count / mb_width;
573                 mb_intra_ub = 0;
574
575                 if (mb_x != 0) {
576                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
577                 }
578
579                 if (mb_y != 0) {
580                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
581
582                     if (mb_x != 0)
583                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
584
585                     if (mb_x != (mb_width -1))
586                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
587                 }
588
589                 if (i < mb_width) {
590                     if (i == 0)
591                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
592
593                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
594
595                     if ((i == (mb_width - 1)) && slice_mb_x) {
596                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
597                     }
598                 }
599                 
600                 if ((i == mb_width) && slice_mb_x) {
601                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
602                 }
603
604                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
605                 *command_ptr++ = kernel;
606                 *command_ptr++ = 0;
607                 *command_ptr++ = 0;
608                 *command_ptr++ = 0;
609                 *command_ptr++ = 0;
610    
611                 /*inline data */
612                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
613                 *command_ptr++ = ( (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
614
615                 i += 1;
616             }
617
618             slice_param++;
619         }
620     }
621
622     *command_ptr++ = 0;
623     *command_ptr++ = MI_BATCH_BUFFER_END;
624
625     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
626 }
627
628
629 static void gen7_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
630 {
631     struct i965_driver_data *i965 = i965_driver_data(ctx);
632     struct gen6_vme_context *vme_context = encoder_context->vme_context;
633     dri_bo *bo;
634
635     i965_gpe_context_init(ctx, &vme_context->gpe_context);
636
637     /* VME output buffer */
638     dri_bo_unreference(vme_context->vme_output.bo);
639     vme_context->vme_output.bo = NULL;
640
641     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
642     vme_context->vme_batchbuffer.bo = NULL;
643
644     /* VME state */
645     dri_bo_unreference(vme_context->vme_state.bo);
646     bo = dri_bo_alloc(i965->intel.bufmgr,
647                       "Buffer",
648                       1024*16, 64);
649     assert(bo);
650     vme_context->vme_state.bo = bo;
651 }
652
653 static void gen7_vme_pipeline_programing(VADriverContextP ctx, 
654                                          struct encode_state *encode_state,
655                                          struct intel_encoder_context *encoder_context)
656 {
657     struct gen6_vme_context *vme_context = encoder_context->vme_context;
658     struct intel_batchbuffer *batch = encoder_context->base.batch;
659     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
660     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
661     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
662     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
663     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
664     int s;
665     bool allow_hwscore = true;
666     int kernel_shader;
667
668     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
669         pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer; 
670         if ((pSliceParameter->macroblock_address % width_in_mbs)) {
671                 allow_hwscore = false;
672                 break;
673         }
674     }
675
676     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
677         (pSliceParameter->slice_type == SLICE_TYPE_I)) {
678         kernel_shader = AVC_VME_INTRA_SHADER;
679     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
680         (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
681         kernel_shader = AVC_VME_INTER_SHADER;
682     } else {
683         kernel_shader = AVC_VME_BINTER_SHADER;
684         if (!allow_hwscore)
685              kernel_shader = AVC_VME_INTER_SHADER;
686     }
687
688     if (allow_hwscore)
689         gen7_vme_walker_fill_vme_batchbuffer(ctx, 
690                                   encode_state,
691                                   width_in_mbs, height_in_mbs,
692                                   kernel_shader,
693                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
694                                   encoder_context);
695         
696     else
697         gen7_vme_fill_vme_batchbuffer(ctx, 
698                                   encode_state,
699                                   width_in_mbs, height_in_mbs,
700                                   kernel_shader, 
701                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
702                                   encoder_context);
703
704     intel_batchbuffer_start_atomic(batch, 0x1000);
705     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
706     BEGIN_BATCH(batch, 2);
707     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
708     OUT_RELOC(batch,
709               vme_context->vme_batchbuffer.bo,
710               I915_GEM_DOMAIN_COMMAND, 0, 
711               0);
712     ADVANCE_BATCH(batch);
713
714     intel_batchbuffer_end_atomic(batch);        
715 }
716
717 static VAStatus gen7_vme_prepare(VADriverContextP ctx, 
718                                  struct encode_state *encode_state,
719                                  struct intel_encoder_context *encoder_context)
720 {
721     VAStatus vaStatus = VA_STATUS_SUCCESS;
722     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
723     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
724     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
725     struct gen6_vme_context *vme_context = encoder_context->vme_context;
726
727     if (!vme_context->h264_level ||
728                 (vme_context->h264_level != pSequenceParameter->level_idc)) {
729         vme_context->h264_level = pSequenceParameter->level_idc;        
730     }
731         
732     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
733     /*Setup all the memory object*/
734     gen7_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
735     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
736     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
737     gen7_vme_avc_state_setup(ctx, encode_state, is_intra, encoder_context);
738
739     /*Programing media pipeline*/
740     gen7_vme_pipeline_programing(ctx, encode_state, encoder_context);
741
742     return vaStatus;
743 }
744
745 static VAStatus gen7_vme_run(VADriverContextP ctx, 
746                              struct encode_state *encode_state,
747                              struct intel_encoder_context *encoder_context)
748 {
749     struct intel_batchbuffer *batch = encoder_context->base.batch;
750
751     intel_batchbuffer_flush(batch);
752
753     return VA_STATUS_SUCCESS;
754 }
755
756 static VAStatus gen7_vme_stop(VADriverContextP ctx, 
757                               struct encode_state *encode_state,
758                               struct intel_encoder_context *encoder_context)
759 {
760     return VA_STATUS_SUCCESS;
761 }
762
763 static VAStatus
764 gen7_vme_pipeline(VADriverContextP ctx,
765                   VAProfile profile,
766                   struct encode_state *encode_state,
767                   struct intel_encoder_context *encoder_context)
768 {
769     gen7_vme_media_init(ctx, encoder_context);
770     gen7_vme_prepare(ctx, encode_state, encoder_context);
771     gen7_vme_run(ctx, encode_state, encoder_context);
772     gen7_vme_stop(ctx, encode_state, encoder_context);
773
774     return VA_STATUS_SUCCESS;
775 }
776
777 static void
778 gen7_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
779                                     struct encode_state *encode_state,
780                                     int index,
781                                     int is_intra,
782                                     struct intel_encoder_context *encoder_context)
783
784 {
785     struct i965_driver_data *i965 = i965_driver_data(ctx);
786     struct gen6_vme_context *vme_context = encoder_context->vme_context;
787     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
788     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
789     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
790
791     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
792     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
793
794     if (is_intra)
795         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
796     else
797         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
798
799     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
800                                               "VME output buffer",
801                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
802                                               0x1000);
803     assert(vme_context->vme_output.bo);
804     vme_context->vme_buffer_suface_setup(ctx,
805                                          &vme_context->gpe_context,
806                                          &vme_context->vme_output,
807                                          BINDING_TABLE_OFFSET(index),
808                                          SURFACE_STATE_OFFSET(index));
809 }
810
811 static void
812 gen7_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
813                                              struct encode_state *encode_state,
814                                              int index,
815                                              struct intel_encoder_context *encoder_context)
816
817 {
818     struct i965_driver_data *i965 = i965_driver_data(ctx);
819     struct gen6_vme_context *vme_context = encoder_context->vme_context;
820     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
821     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
822     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
823
824     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
825     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
826     vme_context->vme_batchbuffer.pitch = 16;
827     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
828                                                    "VME batchbuffer",
829                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
830                                                    0x1000);
831     vme_context->vme_buffer_suface_setup(ctx,
832                                          &vme_context->gpe_context,
833                                          &vme_context->vme_batchbuffer,
834                                          BINDING_TABLE_OFFSET(index),
835                                          SURFACE_STATE_OFFSET(index));
836 }
837
838 static VAStatus
839 gen7_vme_mpeg2_surface_setup(VADriverContextP ctx, 
840                               struct encode_state *encode_state,
841                               int is_intra,
842                               struct intel_encoder_context *encoder_context)
843 {
844     struct i965_driver_data *i965 = i965_driver_data(ctx);
845     struct object_surface *obj_surface;
846     VAEncPictureParameterBufferMPEG2 *pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer;
847
848     /*Setup surfaces state*/
849     /* current picture for encoding */
850     obj_surface = encode_state->input_yuv_object;
851     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
852     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
853
854     if (!is_intra) {
855         /* reference 0 */
856         obj_surface = SURFACE(pic_param->forward_reference_picture);
857         assert(obj_surface);
858         if ( obj_surface->bo != NULL)
859             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
860
861         /* reference 1 */
862         obj_surface = SURFACE(pic_param->backward_reference_picture);
863         if (obj_surface && obj_surface->bo != NULL) 
864             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
865     }
866
867     /* VME output */
868     gen7_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
869     gen7_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
870
871     return VA_STATUS_SUCCESS;
872 }
873
874 static void
875 gen7_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
876                                      struct encode_state *encode_state,
877                                      int mb_width, int mb_height,
878                                      int kernel,
879                                      int transform_8x8_mode_flag,
880                                      struct intel_encoder_context *encoder_context)
881 {
882     struct gen6_vme_context *vme_context = encoder_context->vme_context;
883     int number_mb_cmds;
884     int mb_x = 0, mb_y = 0;
885     int i, s, j;
886     unsigned int *command_ptr;
887
888     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
889     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
890
891     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
892         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
893
894         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
895             int slice_mb_begin = slice_param->macroblock_address;
896             int slice_mb_number = slice_param->num_macroblocks;
897
898             for (i = 0; i < slice_mb_number;) {
899                 int mb_count = i + slice_mb_begin;
900
901                 mb_x = mb_count % mb_width;
902                 mb_y = mb_count / mb_width;
903
904                 if( i == 0) {
905                     number_mb_cmds = mb_width;
906                 } else if ((i + 128) <= slice_mb_number) {
907                     number_mb_cmds = 128;
908                 } else {
909                     number_mb_cmds = slice_mb_number - i;
910                 }
911
912                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
913                 *command_ptr++ = kernel;
914                 *command_ptr++ = 0;
915                 *command_ptr++ = 0;
916                 *command_ptr++ = 0;
917                 *command_ptr++ = 0;
918  
919                 /*inline data */
920                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
921                 *command_ptr++ = ( (number_mb_cmds << 16) | transform_8x8_mode_flag | ((i == 0) << 1));
922
923                 i += number_mb_cmds;
924             }
925
926             slice_param++;
927         }
928     }
929
930     *command_ptr++ = 0;
931     *command_ptr++ = MI_BATCH_BUFFER_END;
932
933     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
934 }
935
936 static void
937 gen7_vme_mpeg2_pipeline_programing(VADriverContextP ctx, 
938                                     struct encode_state *encode_state,
939                                     int is_intra,
940                                     struct intel_encoder_context *encoder_context)
941 {
942     struct gen6_vme_context *vme_context = encoder_context->vme_context;
943     struct intel_batchbuffer *batch = encoder_context->base.batch;
944     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
945     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
946     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
947
948     gen7_vme_mpeg2_fill_vme_batchbuffer(ctx, 
949                                          encode_state,
950                                          width_in_mbs, height_in_mbs,
951                                          MPEG2_VME_INTER_SHADER,
952                                          0,
953                                          encoder_context);
954
955     intel_batchbuffer_start_atomic(batch, 0x1000);
956     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
957     BEGIN_BATCH(batch, 2);
958     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
959     OUT_RELOC(batch,
960               vme_context->vme_batchbuffer.bo,
961               I915_GEM_DOMAIN_COMMAND, 0, 
962               0);
963     ADVANCE_BATCH(batch);
964
965     intel_batchbuffer_end_atomic(batch);
966 }
967
968 static VAStatus
969 gen7_vme_mpeg2_prepare(VADriverContextP ctx, 
970                         struct encode_state *encode_state,
971                         struct intel_encoder_context *encoder_context)
972 {
973     VAStatus vaStatus = VA_STATUS_SUCCESS;
974
975    /*Setup all the memory object*/
976     gen7_vme_mpeg2_surface_setup(ctx, encode_state, 0, encoder_context);
977     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
978     gen7_vme_vme_state_setup(ctx, encode_state, 0, encoder_context);
979     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
980
981     /*Programing media pipeline*/
982     gen7_vme_mpeg2_pipeline_programing(ctx, encode_state, 0, encoder_context);
983
984     return vaStatus;
985 }
986
987 static VAStatus
988 gen7_vme_mpeg2_pipeline(VADriverContextP ctx,
989                          VAProfile profile,
990                          struct encode_state *encode_state,
991                          struct intel_encoder_context *encoder_context)
992 {
993     struct i965_driver_data *i965 = i965_driver_data(ctx);
994     struct gen6_vme_context *vme_context = encoder_context->vme_context;
995     VAEncSliceParameterBufferMPEG2 *slice_param = 
996         (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
997     VAEncSequenceParameterBufferMPEG2 *seq_param = 
998        (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
999  
1000     /*No need of to exec VME for Intra slice */
1001     if (slice_param->is_intra_slice) {
1002          if(!vme_context->vme_output.bo) {
1003              int w_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
1004              int h_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
1005
1006              vme_context->vme_output.num_blocks = w_in_mbs * h_in_mbs;
1007              vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
1008              vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
1009              vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
1010                                                        "MPEG2 VME output buffer",
1011                                                        vme_context->vme_output.num_blocks
1012                                                            * vme_context->vme_output.size_block,
1013                                                        0x1000);
1014          }
1015
1016          return VA_STATUS_SUCCESS;
1017     }
1018
1019     gen7_vme_media_init(ctx, encoder_context);
1020     gen7_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1021     gen7_vme_run(ctx, encode_state, encoder_context);
1022     gen7_vme_stop(ctx, encode_state, encoder_context);
1023
1024     return VA_STATUS_SUCCESS;
1025 }
1026
1027 static void
1028 gen7_vme_context_destroy(void *context)
1029 {
1030     struct gen6_vme_context *vme_context = context;
1031
1032     i965_gpe_context_destroy(&vme_context->gpe_context);
1033
1034     dri_bo_unreference(vme_context->vme_output.bo);
1035     vme_context->vme_output.bo = NULL;
1036
1037     dri_bo_unreference(vme_context->vme_state.bo);
1038     vme_context->vme_state.bo = NULL;
1039
1040     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1041     vme_context->vme_batchbuffer.bo = NULL;
1042
1043     if (vme_context->vme_state_message) {
1044         free(vme_context->vme_state_message);
1045         vme_context->vme_state_message = NULL;
1046     }
1047
1048     free(vme_context);
1049 }
1050
1051 Bool gen7_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1052 {
1053     struct i965_driver_data *i965 = i965_driver_data(ctx);
1054     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1055
1056     vme_context->gpe_context.surface_state_binding_table.length =
1057               (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1058
1059     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1060     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1061     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1062
1063     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1064     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
1065     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1066     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1067     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1068
1069     gen7_vme_scoreboard_init(ctx, vme_context);
1070
1071     if(encoder_context->profile == VAProfileH264Baseline ||
1072        encoder_context->profile == VAProfileH264Main     ||
1073        encoder_context->profile == VAProfileH264High ){
1074        vme_context->video_coding_type = VIDEO_CODING_AVC;
1075        vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM; 
1076  
1077     } else if (encoder_context->profile == VAProfileMPEG2Simple ||
1078                encoder_context->profile == VAProfileMPEG2Main ){
1079        vme_context->video_coding_type = VIDEO_CODING_MPEG2;
1080        vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM; 
1081     } else {
1082         /* Unsupported encoding profile */
1083         assert(0);
1084     }
1085
1086     if (IS_GEN7(i965->intel.device_id)) {
1087         if (vme_context->video_coding_type == VIDEO_CODING_AVC) {
1088               i965_gpe_load_kernels(ctx,
1089                                     &vme_context->gpe_context,
1090                                     gen7_vme_kernels,
1091                                     vme_context->vme_kernel_sum);
1092               encoder_context->vme_pipeline = gen7_vme_pipeline;
1093  
1094         } else {
1095               i965_gpe_load_kernels(ctx,
1096                                     &vme_context->gpe_context,
1097                                     gen7_vme_mpeg2_kernels,
1098                                     vme_context->vme_kernel_sum);
1099               encoder_context->vme_pipeline = gen7_vme_mpeg2_pipeline;
1100  
1101         }
1102
1103         vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1104         vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1105         vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1106     }
1107
1108     encoder_context->vme_context = vme_context;
1109     encoder_context->vme_context_destroy = gen7_vme_context_destroy;
1110     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1111
1112     return True;
1113 }