24a22c53ce1a632bbc6f21a2079058ce5301d3b3
[platform/upstream/libva-intel-driver.git] / src / gen7_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <stdbool.h>
32 #include <string.h>
33 #include <assert.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_encoder.h"
41 #include "gen6_vme.h"
42 #include "gen6_mfc.h"
43 #ifdef SURFACE_STATE_PADDED_SIZE
44 #undef SURFACE_STATE_PADDED_SIZE
45 #endif
46
47 #define VME_MSG_LENGTH          32
48 #define SURFACE_STATE_PADDED_SIZE_0_GEN7        ALIGN(sizeof(struct gen7_surface_state), 32)
49 #define SURFACE_STATE_PADDED_SIZE_1_GEN7        ALIGN(sizeof(struct gen7_surface_state2), 32)
50 #define SURFACE_STATE_PADDED_SIZE_GEN7          MAX(SURFACE_STATE_PADDED_SIZE_0_GEN7, SURFACE_STATE_PADDED_SIZE_1_GEN7)
51
52 #define SURFACE_STATE_PADDED_SIZE               SURFACE_STATE_PADDED_SIZE_GEN7
53 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
54 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
55
56 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
57 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
58 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
59
60 enum VIDEO_CODING_TYPE{
61     VIDEO_CODING_AVC = 0,
62     VIDEO_CODING_MPEG2,
63     VIDEO_CODING_SUM
64 };
65
66 enum AVC_VME_KERNEL_TYPE{ 
67     AVC_VME_INTRA_SHADER = 0,
68     AVC_VME_INTER_SHADER,
69     AVC_VME_BATCHBUFFER,
70     AVC_VME_BINTER_SHADER,
71     AVC_VME_KERNEL_SUM
72 };
73
74 enum MPEG2_VME_KERNEL_TYPE{
75     MPEG2_VME_INTER_SHADER = 0,
76     MPEG2_VME_BATCHBUFFER,
77     MPEG2_VME_KERNEL_SUM
78 };
79  
80
81 static const uint32_t gen7_vme_intra_frame[][4] = {
82 #include "shaders/vme/intra_frame_ivb.g7b"
83 };
84
85 static const uint32_t gen7_vme_inter_frame[][4] = {
86 #include "shaders/vme/inter_frame_ivb.g7b"
87 };
88
89 static const uint32_t gen7_vme_batchbuffer[][4] = {
90 #include "shaders/vme/batchbuffer.g7b"
91 };
92
93 static const uint32_t gen7_vme_binter_frame[][4] = {
94 #include "shaders/vme/inter_bframe_ivb.g7b"
95 };
96
97 static struct i965_kernel gen7_vme_kernels[] = {
98     {
99         "AVC VME Intra Frame",
100         AVC_VME_INTRA_SHADER,                   /*index*/
101         gen7_vme_intra_frame,                   
102         sizeof(gen7_vme_intra_frame),           
103         NULL
104     },
105     {
106         "AVC VME inter Frame",
107         AVC_VME_INTER_SHADER,
108         gen7_vme_inter_frame,
109         sizeof(gen7_vme_inter_frame),
110         NULL
111     },
112     {
113         "AVC VME BATCHBUFFER",
114         AVC_VME_BATCHBUFFER,
115         gen7_vme_batchbuffer,
116         sizeof(gen7_vme_batchbuffer),
117         NULL
118     },
119     {
120         "AVC VME binter Frame",
121         AVC_VME_BINTER_SHADER,
122         gen7_vme_binter_frame,
123         sizeof(gen7_vme_binter_frame),
124         NULL
125     }
126 };
127
128 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
129 #include "shaders/vme/mpeg2_inter_frame.g7b"
130 };
131
132 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
133 #include "shaders/vme/batchbuffer.g7b"
134 };
135
136 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
137     {
138         "MPEG2 VME inter Frame",
139         MPEG2_VME_INTER_SHADER,
140         gen7_vme_mpeg2_inter_frame,
141         sizeof(gen7_vme_mpeg2_inter_frame),
142         NULL
143     },
144     {
145         "MPEG2 VME BATCHBUFFER",
146         MPEG2_VME_BATCHBUFFER,
147         gen7_vme_mpeg2_batchbuffer,
148         sizeof(gen7_vme_mpeg2_batchbuffer),
149         NULL
150     },
151 };
152
153 /* only used for VME source surface state */
154 static void 
155 gen7_vme_source_surface_state(VADriverContextP ctx,
156                               int index,
157                               struct object_surface *obj_surface,
158                               struct intel_encoder_context *encoder_context)
159 {
160     struct gen6_vme_context *vme_context = encoder_context->vme_context;
161
162     vme_context->vme_surface2_setup(ctx,
163                                     &vme_context->gpe_context,
164                                     obj_surface,
165                                     BINDING_TABLE_OFFSET(index),
166                                     SURFACE_STATE_OFFSET(index));
167 }
168
169 static void
170 gen7_vme_media_source_surface_state(VADriverContextP ctx,
171                                     int index,
172                                     struct object_surface *obj_surface,
173                                     struct intel_encoder_context *encoder_context)
174 {
175     struct gen6_vme_context *vme_context = encoder_context->vme_context;
176
177     vme_context->vme_media_rw_surface_setup(ctx,
178                                             &vme_context->gpe_context,
179                                             obj_surface,
180                                             BINDING_TABLE_OFFSET(index),
181                                             SURFACE_STATE_OFFSET(index));
182 }
183
184 static void
185 gen7_vme_output_buffer_setup(VADriverContextP ctx,
186                              struct encode_state *encode_state,
187                              int index,
188                              struct intel_encoder_context *encoder_context)
189
190 {
191     struct i965_driver_data *i965 = i965_driver_data(ctx);
192     struct gen6_vme_context *vme_context = encoder_context->vme_context;
193     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
194     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
195     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
196     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
197     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
198
199     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
200     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
201
202     if (is_intra)
203         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
204     else
205         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
206
207     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr, 
208                                               "VME output buffer",
209                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
210                                               0x1000);
211     assert(vme_context->vme_output.bo);
212     vme_context->vme_buffer_suface_setup(ctx,
213                                          &vme_context->gpe_context,
214                                          &vme_context->vme_output,
215                                          BINDING_TABLE_OFFSET(index),
216                                          SURFACE_STATE_OFFSET(index));
217 }
218
219 static void
220 gen7_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
221                                       struct encode_state *encode_state,
222                                       int index,
223                                       struct intel_encoder_context *encoder_context)
224
225 {
226     struct i965_driver_data *i965 = i965_driver_data(ctx);
227     struct gen6_vme_context *vme_context = encoder_context->vme_context;
228     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
229     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
230     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
231
232     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
233     vme_context->vme_batchbuffer.size_block = 32; /* 2 OWORDs */
234     vme_context->vme_batchbuffer.pitch = 16;
235     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
236                                                    "VME batchbuffer",
237                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
238                                                    0x1000);
239     vme_context->vme_buffer_suface_setup(ctx,
240                                          &vme_context->gpe_context,
241                                          &vme_context->vme_batchbuffer,
242                                          BINDING_TABLE_OFFSET(index),
243                                          SURFACE_STATE_OFFSET(index));
244 }
245
246 static VAStatus
247 gen7_vme_surface_setup(VADriverContextP ctx, 
248                        struct encode_state *encode_state,
249                        int is_intra,
250                        struct intel_encoder_context *encoder_context)
251 {
252     struct object_surface *obj_surface;
253
254     /*Setup surfaces state*/
255     /* current picture for encoding */
256     obj_surface = encode_state->input_yuv_object;
257     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
258     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
259
260     if (!is_intra) {
261         /* reference 0 */
262         obj_surface = encode_state->reference_objects[0];
263
264         if (obj_surface && obj_surface->bo)
265             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
266
267         /* reference 1 */
268         obj_surface = encode_state->reference_objects[1];
269
270         if (obj_surface && obj_surface->bo)
271             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
272     }
273
274     /* VME output */
275     gen7_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
276     gen7_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
277
278     return VA_STATUS_SUCCESS;
279 }
280
281 static VAStatus gen7_vme_interface_setup(VADriverContextP ctx, 
282                                          struct encode_state *encode_state,
283                                          struct intel_encoder_context *encoder_context)
284 {
285     struct gen6_vme_context *vme_context = encoder_context->vme_context;
286     struct gen6_interface_descriptor_data *desc;   
287     int i;
288     dri_bo *bo;
289
290     bo = vme_context->gpe_context.idrt.bo;
291     dri_bo_map(bo, 1);
292     assert(bo->virtual);
293     desc = bo->virtual;
294
295     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
296         struct i965_kernel *kernel;
297         kernel = &vme_context->gpe_context.kernels[i];
298         assert(sizeof(*desc) == 32);
299         /*Setup the descritor table*/
300         memset(desc, 0, sizeof(*desc));
301         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
302         desc->desc2.sampler_count = 1; /* FIXME: */
303         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
304         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
305         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
306         desc->desc4.constant_urb_entry_read_offset = 0;
307         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
308                 
309         /*kernel start*/
310         dri_bo_emit_reloc(bo,   
311                           I915_GEM_DOMAIN_INSTRUCTION, 0,
312                           0,
313                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
314                           kernel->bo);
315         /*Sampler State(VME state pointer)*/
316         dri_bo_emit_reloc(bo,
317                           I915_GEM_DOMAIN_INSTRUCTION, 0,
318                           (1 << 2),                                                                     //
319                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
320                           vme_context->vme_state.bo);
321         desc++;
322     }
323     dri_bo_unmap(bo);
324
325     return VA_STATUS_SUCCESS;
326 }
327
328 static VAStatus gen7_vme_constant_setup(VADriverContextP ctx, 
329                                         struct encode_state *encode_state,
330                                         struct intel_encoder_context *encoder_context)
331 {
332     struct gen6_vme_context *vme_context = encoder_context->vme_context;
333     // unsigned char *constant_buffer;
334     unsigned int *vme_state_message;
335     int mv_num = 32;
336     if (vme_context->h264_level >= 30) {
337         mv_num = 16;
338         if (vme_context->h264_level >= 31)
339                 mv_num = 8;
340     } 
341
342     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
343     assert(vme_context->gpe_context.curbe.bo->virtual);
344     // constant_buffer = vme_context->curbe.bo->virtual;
345     vme_state_message = (unsigned int *)vme_context->gpe_context.curbe.bo->virtual;
346     vme_state_message[31] = mv_num;
347         
348     /*TODO copy buffer into CURB*/
349
350     dri_bo_unmap( vme_context->gpe_context.curbe.bo);
351
352     return VA_STATUS_SUCCESS;
353 }
354
355 static const unsigned int intra_mb_mode_cost_table[] = {
356     0x31110001, // for qp0
357     0x09110001, // for qp1
358     0x15030001, // for qp2
359     0x0b030001, // for qp3
360     0x0d030011, // for qp4
361     0x17210011, // for qp5
362     0x41210011, // for qp6
363     0x19210011, // for qp7
364     0x25050003, // for qp8
365     0x1b130003, // for qp9
366     0x1d130003, // for qp10
367     0x27070021, // for qp11
368     0x51310021, // for qp12
369     0x29090021, // for qp13
370     0x35150005, // for qp14
371     0x2b0b0013, // for qp15
372     0x2d0d0013, // for qp16
373     0x37170007, // for qp17
374     0x61410031, // for qp18
375     0x39190009, // for qp19
376     0x45250015, // for qp20
377     0x3b1b000b, // for qp21
378     0x3d1d000d, // for qp22
379     0x47270017, // for qp23
380     0x71510041, // for qp24 ! center for qp=0..30
381     0x49290019, // for qp25
382     0x55350025, // for qp26
383     0x4b2b001b, // for qp27
384     0x4d2d001d, // for qp28
385     0x57370027, // for qp29
386     0x81610051, // for qp30
387     0x57270017, // for qp31
388     0x81510041, // for qp32 ! center for qp=31..51
389     0x59290019, // for qp33
390     0x65350025, // for qp34
391     0x5b2b001b, // for qp35
392     0x5d2d001d, // for qp36
393     0x67370027, // for qp37
394     0x91610051, // for qp38
395     0x69390029, // for qp39
396     0x75450035, // for qp40
397     0x6b3b002b, // for qp41
398     0x6d3d002d, // for qp42
399     0x77470037, // for qp43
400     0xa1710061, // for qp44
401     0x79490039, // for qp45
402     0x85550045, // for qp46
403     0x7b4b003b, // for qp47
404     0x7d4d003d, // for qp48
405     0x87570047, // for qp49
406     0xb1810071, // for qp50
407     0x89590049  // for qp51
408 };
409
410 static void gen7_vme_state_setup_fixup(VADriverContextP ctx,
411                                        struct encode_state *encode_state,
412                                        struct intel_encoder_context *encoder_context,
413                                        unsigned int *vme_state_message)
414 {
415     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
416     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
417     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
418
419     if (slice_param->slice_type != SLICE_TYPE_I &&
420         slice_param->slice_type != SLICE_TYPE_SI)
421         return;
422     if (encoder_context->rate_control_mode == VA_RC_CQP)
423         vme_state_message[16] = intra_mb_mode_cost_table[pic_param->pic_init_qp + slice_param->slice_qp_delta];
424     else
425         vme_state_message[16] = intra_mb_mode_cost_table[mfc_context->bit_rate_control_context[SLICE_TYPE_I].QpPrimeY];
426 }
427
428 static VAStatus gen7_vme_avc_state_setup(VADriverContextP ctx,
429                                          struct encode_state *encode_state,
430                                          int is_intra,
431                                          struct intel_encoder_context *encoder_context)
432 {
433     struct gen6_vme_context *vme_context = encoder_context->vme_context;
434     unsigned int *vme_state_message;
435         unsigned int *mb_cost_table;
436     int i;
437     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
438
439         mb_cost_table = (unsigned int *)vme_context->vme_state_message;
440     //building VME state message
441     dri_bo_map(vme_context->vme_state.bo, 1);
442     assert(vme_context->vme_state.bo->virtual);
443     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
444
445     if ((slice_param->slice_type == SLICE_TYPE_P) ||
446         (slice_param->slice_type == SLICE_TYPE_SP)) {
447             vme_state_message[0] = 0x01010101;
448             vme_state_message[1] = 0x10010101;
449             vme_state_message[2] = 0x0F0F0F0F;
450             vme_state_message[3] = 0x100F0F0F;
451             vme_state_message[4] = 0x01010101;
452             vme_state_message[5] = 0x10010101;
453             vme_state_message[6] = 0x0F0F0F0F;
454             vme_state_message[7] = 0x100F0F0F;
455             vme_state_message[8] = 0x01010101;
456             vme_state_message[9] = 0x10010101;
457             vme_state_message[10] = 0x0F0F0F0F;
458             vme_state_message[11] = 0x000F0F0F;
459             vme_state_message[12] = 0x00;
460             vme_state_message[13] = 0x00;
461         } else {
462             vme_state_message[0] = 0x10010101;
463             vme_state_message[1] = 0x100F0F0F;
464             vme_state_message[2] = 0x10010101;
465             vme_state_message[3] = 0x000F0F0F;
466             vme_state_message[4] = 0;
467             vme_state_message[5] = 0;
468             vme_state_message[6] = 0;
469             vme_state_message[7] = 0;
470             vme_state_message[8] = 0;
471             vme_state_message[9] = 0;
472             vme_state_message[10] = 0;
473             vme_state_message[11] = 0;
474             vme_state_message[12] = 0;
475             vme_state_message[13] = 0;
476         }
477
478     vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
479     vme_state_message[15] = 0;
480     vme_state_message[16] = mb_cost_table[0];
481     vme_state_message[17] = mb_cost_table[1];
482     vme_state_message[18] = mb_cost_table[3];
483     vme_state_message[19] = mb_cost_table[4];
484
485     for(i = 20; i < 32; i++) {
486         vme_state_message[i] = 0;
487     }
488
489     dri_bo_unmap( vme_context->vme_state.bo);
490     return VA_STATUS_SUCCESS;
491 }
492
493 static VAStatus gen7_vme_vme_state_setup(VADriverContextP ctx,
494                                          struct encode_state *encode_state,
495                                          int is_intra,
496                                          struct intel_encoder_context *encoder_context)
497 {
498     struct gen6_vme_context *vme_context = encoder_context->vme_context;
499     unsigned int *vme_state_message;
500     int i;
501         
502     //building VME state message
503     dri_bo_map(vme_context->vme_state.bo, 1);
504     assert(vme_context->vme_state.bo->virtual);
505     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
506
507     vme_state_message[0] = 0x01010101;
508     vme_state_message[1] = 0x10010101;
509     vme_state_message[2] = 0x0F0F0F0F;
510     vme_state_message[3] = 0x100F0F0F;
511     vme_state_message[4] = 0x01010101;
512     vme_state_message[5] = 0x10010101;
513     vme_state_message[6] = 0x0F0F0F0F;
514     vme_state_message[7] = 0x100F0F0F;
515     vme_state_message[8] = 0x01010101;
516     vme_state_message[9] = 0x10010101;
517     vme_state_message[10] = 0x0F0F0F0F;
518     vme_state_message[11] = 0x000F0F0F;
519     vme_state_message[12] = 0x00;
520     vme_state_message[13] = 0x00;
521
522     vme_state_message[14] = 0x4a4a;
523     vme_state_message[15] = 0x0;
524     vme_state_message[16] = 0x4a4a4a4a;
525     vme_state_message[17] = 0x4a4a4a4a;
526     vme_state_message[18] = 0x21110100;
527     vme_state_message[19] = 0x61514131;
528
529     for(i = 20; i < 32; i++) {
530         vme_state_message[i] = 0;
531     }
532     //vme_state_message[16] = 0x42424242;                       //cost function LUT set 0 for Intra
533
534     gen7_vme_state_setup_fixup(ctx, encode_state, encoder_context, vme_state_message);
535
536     dri_bo_unmap( vme_context->vme_state.bo);
537     return VA_STATUS_SUCCESS;
538 }
539
540 static void
541 gen7_vme_fill_vme_batchbuffer(VADriverContextP ctx, 
542                               struct encode_state *encode_state,
543                               int mb_width, int mb_height,
544                               int kernel,
545                               int transform_8x8_mode_flag,
546                               struct intel_encoder_context *encoder_context)
547 {
548     struct gen6_vme_context *vme_context = encoder_context->vme_context;
549     int mb_x = 0, mb_y = 0;
550     int i, s, j;
551     unsigned int *command_ptr;
552
553
554     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
555     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
556
557     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
558         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
559
560         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
561             int slice_mb_begin = slice_param->macroblock_address;
562             int slice_mb_number = slice_param->num_macroblocks;
563             unsigned int mb_intra_ub;
564             int slice_mb_x = slice_param->macroblock_address % mb_width;
565
566             for (i = 0; i < slice_mb_number;) {
567                 int mb_count = i + slice_mb_begin;    
568
569                 mb_x = mb_count % mb_width;
570                 mb_y = mb_count / mb_width;
571                 mb_intra_ub = 0;
572
573                 if (mb_x != 0) {
574                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
575                 }
576
577                 if (mb_y != 0) {
578                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
579
580                     if (mb_x != 0)
581                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
582
583                     if (mb_x != (mb_width -1))
584                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
585                 }
586
587                 if (i < mb_width) {
588                     if (i == 0)
589                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
590
591                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
592
593                     if ((i == (mb_width - 1)) && slice_mb_x) {
594                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
595                     }
596                 }
597                 
598                 if ((i == mb_width) && slice_mb_x) {
599                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
600                 }
601
602                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
603                 *command_ptr++ = kernel;
604                 *command_ptr++ = 0;
605                 *command_ptr++ = 0;
606                 *command_ptr++ = 0;
607                 *command_ptr++ = 0;
608    
609                 /*inline data */
610                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
611                 *command_ptr++ = ( (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
612
613                 i += 1;
614             }
615
616             slice_param++;
617         }
618     }
619
620     *command_ptr++ = 0;
621     *command_ptr++ = MI_BATCH_BUFFER_END;
622
623     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
624 }
625
626
627 static void gen7_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
628 {
629     struct i965_driver_data *i965 = i965_driver_data(ctx);
630     struct gen6_vme_context *vme_context = encoder_context->vme_context;
631     dri_bo *bo;
632
633     i965_gpe_context_init(ctx, &vme_context->gpe_context);
634
635     /* VME output buffer */
636     dri_bo_unreference(vme_context->vme_output.bo);
637     vme_context->vme_output.bo = NULL;
638
639     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
640     vme_context->vme_batchbuffer.bo = NULL;
641
642     /* VME state */
643     dri_bo_unreference(vme_context->vme_state.bo);
644     bo = dri_bo_alloc(i965->intel.bufmgr,
645                       "Buffer",
646                       1024*16, 64);
647     assert(bo);
648     vme_context->vme_state.bo = bo;
649 }
650
651 static void gen7_vme_pipeline_programing(VADriverContextP ctx, 
652                                          struct encode_state *encode_state,
653                                          struct intel_encoder_context *encoder_context)
654 {
655     struct gen6_vme_context *vme_context = encoder_context->vme_context;
656     struct intel_batchbuffer *batch = encoder_context->base.batch;
657     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
658     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
659     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
660     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
661     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
662     int s;
663     bool allow_hwscore = true;
664     int kernel_shader;
665
666     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
667         pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer; 
668         if ((pSliceParameter->macroblock_address % width_in_mbs)) {
669                 allow_hwscore = false;
670                 break;
671         }
672     }
673
674     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
675         (pSliceParameter->slice_type == SLICE_TYPE_I)) {
676         kernel_shader = AVC_VME_INTRA_SHADER;
677     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
678         (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
679         kernel_shader = AVC_VME_INTER_SHADER;
680     } else {
681         kernel_shader = AVC_VME_BINTER_SHADER;
682         if (!allow_hwscore)
683              kernel_shader = AVC_VME_INTER_SHADER;
684     }
685
686     if (allow_hwscore)
687         gen7_vme_walker_fill_vme_batchbuffer(ctx, 
688                                   encode_state,
689                                   width_in_mbs, height_in_mbs,
690                                   kernel_shader,
691                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
692                                   encoder_context);
693         
694     else
695         gen7_vme_fill_vme_batchbuffer(ctx, 
696                                   encode_state,
697                                   width_in_mbs, height_in_mbs,
698                                   kernel_shader, 
699                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
700                                   encoder_context);
701
702     intel_batchbuffer_start_atomic(batch, 0x1000);
703     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
704     BEGIN_BATCH(batch, 2);
705     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
706     OUT_RELOC(batch,
707               vme_context->vme_batchbuffer.bo,
708               I915_GEM_DOMAIN_COMMAND, 0, 
709               0);
710     ADVANCE_BATCH(batch);
711
712     intel_batchbuffer_end_atomic(batch);        
713 }
714
715 static VAStatus gen7_vme_prepare(VADriverContextP ctx, 
716                                  struct encode_state *encode_state,
717                                  struct intel_encoder_context *encoder_context)
718 {
719     VAStatus vaStatus = VA_STATUS_SUCCESS;
720     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
721     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
722     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
723     struct gen6_vme_context *vme_context = encoder_context->vme_context;
724
725     if (!vme_context->h264_level ||
726                 (vme_context->h264_level != pSequenceParameter->level_idc)) {
727         vme_context->h264_level = pSequenceParameter->level_idc;        
728     }
729         
730     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
731     /*Setup all the memory object*/
732     gen7_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
733     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
734     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
735     gen7_vme_avc_state_setup(ctx, encode_state, is_intra, encoder_context);
736
737     /*Programing media pipeline*/
738     gen7_vme_pipeline_programing(ctx, encode_state, encoder_context);
739
740     return vaStatus;
741 }
742
743 static VAStatus gen7_vme_run(VADriverContextP ctx, 
744                              struct encode_state *encode_state,
745                              struct intel_encoder_context *encoder_context)
746 {
747     struct intel_batchbuffer *batch = encoder_context->base.batch;
748
749     intel_batchbuffer_flush(batch);
750
751     return VA_STATUS_SUCCESS;
752 }
753
754 static VAStatus gen7_vme_stop(VADriverContextP ctx, 
755                               struct encode_state *encode_state,
756                               struct intel_encoder_context *encoder_context)
757 {
758     return VA_STATUS_SUCCESS;
759 }
760
761 static VAStatus
762 gen7_vme_pipeline(VADriverContextP ctx,
763                   VAProfile profile,
764                   struct encode_state *encode_state,
765                   struct intel_encoder_context *encoder_context)
766 {
767     gen7_vme_media_init(ctx, encoder_context);
768     gen7_vme_prepare(ctx, encode_state, encoder_context);
769     gen7_vme_run(ctx, encode_state, encoder_context);
770     gen7_vme_stop(ctx, encode_state, encoder_context);
771
772     return VA_STATUS_SUCCESS;
773 }
774
775 static void
776 gen7_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
777                                     struct encode_state *encode_state,
778                                     int index,
779                                     int is_intra,
780                                     struct intel_encoder_context *encoder_context)
781
782 {
783     struct i965_driver_data *i965 = i965_driver_data(ctx);
784     struct gen6_vme_context *vme_context = encoder_context->vme_context;
785     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
786     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
787     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
788
789     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
790     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
791
792     if (is_intra)
793         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
794     else
795         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
796
797     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
798                                               "VME output buffer",
799                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
800                                               0x1000);
801     assert(vme_context->vme_output.bo);
802     vme_context->vme_buffer_suface_setup(ctx,
803                                          &vme_context->gpe_context,
804                                          &vme_context->vme_output,
805                                          BINDING_TABLE_OFFSET(index),
806                                          SURFACE_STATE_OFFSET(index));
807 }
808
809 static void
810 gen7_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
811                                              struct encode_state *encode_state,
812                                              int index,
813                                              struct intel_encoder_context *encoder_context)
814
815 {
816     struct i965_driver_data *i965 = i965_driver_data(ctx);
817     struct gen6_vme_context *vme_context = encoder_context->vme_context;
818     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
819     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
820     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
821
822     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
823     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
824     vme_context->vme_batchbuffer.pitch = 16;
825     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
826                                                    "VME batchbuffer",
827                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
828                                                    0x1000);
829     vme_context->vme_buffer_suface_setup(ctx,
830                                          &vme_context->gpe_context,
831                                          &vme_context->vme_batchbuffer,
832                                          BINDING_TABLE_OFFSET(index),
833                                          SURFACE_STATE_OFFSET(index));
834 }
835
836 static VAStatus
837 gen7_vme_mpeg2_surface_setup(VADriverContextP ctx, 
838                               struct encode_state *encode_state,
839                               int is_intra,
840                               struct intel_encoder_context *encoder_context)
841 {
842     struct object_surface *obj_surface;
843
844     /*Setup surfaces state*/
845     /* current picture for encoding */
846     obj_surface = encode_state->input_yuv_object;
847     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
848     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
849
850     if (!is_intra) {
851         /* reference 0 */
852         obj_surface = encode_state->reference_objects[0];
853         if (obj_surface->bo != NULL)
854             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
855
856         /* reference 1 */
857         obj_surface = encode_state->reference_objects[1];
858         if (obj_surface && obj_surface->bo != NULL) 
859             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
860     }
861
862     /* VME output */
863     gen7_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
864     gen7_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
865
866     return VA_STATUS_SUCCESS;
867 }
868
869 static void
870 gen7_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
871                                      struct encode_state *encode_state,
872                                      int mb_width, int mb_height,
873                                      int kernel,
874                                      int transform_8x8_mode_flag,
875                                      struct intel_encoder_context *encoder_context)
876 {
877     struct gen6_vme_context *vme_context = encoder_context->vme_context;
878     int number_mb_cmds;
879     int mb_x = 0, mb_y = 0;
880     int i, s, j;
881     unsigned int *command_ptr;
882
883     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
884     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
885
886     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
887         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
888
889         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
890             int slice_mb_begin = slice_param->macroblock_address;
891             int slice_mb_number = slice_param->num_macroblocks;
892
893             for (i = 0; i < slice_mb_number;) {
894                 int mb_count = i + slice_mb_begin;
895
896                 mb_x = mb_count % mb_width;
897                 mb_y = mb_count / mb_width;
898
899                 if( i == 0) {
900                     number_mb_cmds = mb_width;
901                 } else if ((i + 128) <= slice_mb_number) {
902                     number_mb_cmds = 128;
903                 } else {
904                     number_mb_cmds = slice_mb_number - i;
905                 }
906
907                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
908                 *command_ptr++ = kernel;
909                 *command_ptr++ = 0;
910                 *command_ptr++ = 0;
911                 *command_ptr++ = 0;
912                 *command_ptr++ = 0;
913  
914                 /*inline data */
915                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
916                 *command_ptr++ = ( (number_mb_cmds << 16) | transform_8x8_mode_flag | ((i == 0) << 1));
917
918                 i += number_mb_cmds;
919             }
920
921             slice_param++;
922         }
923     }
924
925     *command_ptr++ = 0;
926     *command_ptr++ = MI_BATCH_BUFFER_END;
927
928     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
929 }
930
931 static void
932 gen7_vme_mpeg2_pipeline_programing(VADriverContextP ctx, 
933                                     struct encode_state *encode_state,
934                                     int is_intra,
935                                     struct intel_encoder_context *encoder_context)
936 {
937     struct gen6_vme_context *vme_context = encoder_context->vme_context;
938     struct intel_batchbuffer *batch = encoder_context->base.batch;
939     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
940     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
941     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
942
943     gen7_vme_mpeg2_fill_vme_batchbuffer(ctx, 
944                                          encode_state,
945                                          width_in_mbs, height_in_mbs,
946                                          MPEG2_VME_INTER_SHADER,
947                                          0,
948                                          encoder_context);
949
950     intel_batchbuffer_start_atomic(batch, 0x1000);
951     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
952     BEGIN_BATCH(batch, 2);
953     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
954     OUT_RELOC(batch,
955               vme_context->vme_batchbuffer.bo,
956               I915_GEM_DOMAIN_COMMAND, 0, 
957               0);
958     ADVANCE_BATCH(batch);
959
960     intel_batchbuffer_end_atomic(batch);
961 }
962
963 static VAStatus
964 gen7_vme_mpeg2_prepare(VADriverContextP ctx, 
965                         struct encode_state *encode_state,
966                         struct intel_encoder_context *encoder_context)
967 {
968     VAStatus vaStatus = VA_STATUS_SUCCESS;
969
970    /*Setup all the memory object*/
971     gen7_vme_mpeg2_surface_setup(ctx, encode_state, 0, encoder_context);
972     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
973     gen7_vme_vme_state_setup(ctx, encode_state, 0, encoder_context);
974     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
975
976     /*Programing media pipeline*/
977     gen7_vme_mpeg2_pipeline_programing(ctx, encode_state, 0, encoder_context);
978
979     return vaStatus;
980 }
981
982 static VAStatus
983 gen7_vme_mpeg2_pipeline(VADriverContextP ctx,
984                          VAProfile profile,
985                          struct encode_state *encode_state,
986                          struct intel_encoder_context *encoder_context)
987 {
988     struct i965_driver_data *i965 = i965_driver_data(ctx);
989     struct gen6_vme_context *vme_context = encoder_context->vme_context;
990     VAEncSliceParameterBufferMPEG2 *slice_param = 
991         (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
992     VAEncSequenceParameterBufferMPEG2 *seq_param = 
993        (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
994  
995     /*No need of to exec VME for Intra slice */
996     if (slice_param->is_intra_slice) {
997          if(!vme_context->vme_output.bo) {
998              int w_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
999              int h_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
1000
1001              vme_context->vme_output.num_blocks = w_in_mbs * h_in_mbs;
1002              vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
1003              vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
1004              vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
1005                                                        "MPEG2 VME output buffer",
1006                                                        vme_context->vme_output.num_blocks
1007                                                            * vme_context->vme_output.size_block,
1008                                                        0x1000);
1009          }
1010
1011          return VA_STATUS_SUCCESS;
1012     }
1013
1014     gen7_vme_media_init(ctx, encoder_context);
1015     gen7_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1016     gen7_vme_run(ctx, encode_state, encoder_context);
1017     gen7_vme_stop(ctx, encode_state, encoder_context);
1018
1019     return VA_STATUS_SUCCESS;
1020 }
1021
1022 static void
1023 gen7_vme_context_destroy(void *context)
1024 {
1025     struct gen6_vme_context *vme_context = context;
1026
1027     i965_gpe_context_destroy(&vme_context->gpe_context);
1028
1029     dri_bo_unreference(vme_context->vme_output.bo);
1030     vme_context->vme_output.bo = NULL;
1031
1032     dri_bo_unreference(vme_context->vme_state.bo);
1033     vme_context->vme_state.bo = NULL;
1034
1035     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1036     vme_context->vme_batchbuffer.bo = NULL;
1037
1038     if (vme_context->vme_state_message) {
1039         free(vme_context->vme_state_message);
1040         vme_context->vme_state_message = NULL;
1041     }
1042
1043     free(vme_context);
1044 }
1045
1046 Bool gen7_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1047 {
1048     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1049     struct i965_kernel *vme_kernel_list = NULL;
1050
1051     vme_context->gpe_context.surface_state_binding_table.length =
1052               (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1053
1054     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1055     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1056     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1057
1058     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1059     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
1060     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1061     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1062     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1063
1064     gen7_vme_scoreboard_init(ctx, vme_context);
1065
1066     if(encoder_context->profile == VAProfileH264Baseline ||
1067        encoder_context->profile == VAProfileH264Main     ||
1068        encoder_context->profile == VAProfileH264High ){
1069         vme_kernel_list = gen7_vme_kernels;
1070         vme_context->video_coding_type = VIDEO_CODING_AVC;
1071         vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM; 
1072         encoder_context->vme_pipeline = gen7_vme_pipeline; 
1073     } else if (encoder_context->profile == VAProfileMPEG2Simple ||
1074                encoder_context->profile == VAProfileMPEG2Main ){
1075         vme_kernel_list = gen7_vme_mpeg2_kernels;
1076         vme_context->video_coding_type = VIDEO_CODING_MPEG2;
1077         vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM;
1078         encoder_context->vme_pipeline = gen7_vme_mpeg2_pipeline;
1079     } else {
1080         /* Unsupported encoding profile */
1081         assert(0);
1082     }
1083
1084     i965_gpe_load_kernels(ctx,
1085                           &vme_context->gpe_context,
1086                           vme_kernel_list,
1087                           vme_context->vme_kernel_sum);
1088
1089     vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1090     vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1091     vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1092
1093     encoder_context->vme_context = vme_context;
1094     encoder_context->vme_context_destroy = gen7_vme_context_destroy;
1095     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1096
1097     return True;
1098 }