d1f0448a5b8ef76d06437deb374052181c47abab
[profile/ivi/vaapi-intel-driver.git] / src / gen6_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhou Chang <chang.zhou@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 #include <assert.h>
33
34 #include "intel_batchbuffer.h"
35 #include "intel_driver.h"
36
37 #include "i965_defines.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
40 #include "gen6_vme.h"
41 #include "gen6_mfc.h"
42
43 #define SURFACE_STATE_PADDED_SIZE_0_GEN7        ALIGN(sizeof(struct gen7_surface_state), 32)
44 #define SURFACE_STATE_PADDED_SIZE_1_GEN7        ALIGN(sizeof(struct gen7_surface_state2), 32)
45 #define SURFACE_STATE_PADDED_SIZE_GEN7          MAX(SURFACE_STATE_PADDED_SIZE_0_GEN7, SURFACE_STATE_PADDED_SIZE_1_GEN7)
46
47 #define SURFACE_STATE_PADDED_SIZE_0_GEN6        ALIGN(sizeof(struct i965_surface_state), 32)
48 #define SURFACE_STATE_PADDED_SIZE_1_GEN6        ALIGN(sizeof(struct i965_surface_state2), 32)
49 #define SURFACE_STATE_PADDED_SIZE_GEN6          MAX(SURFACE_STATE_PADDED_SIZE_0_GEN6, SURFACE_STATE_PADDED_SIZE_1_GEN6)
50
51 #define SURFACE_STATE_PADDED_SIZE               MAX(SURFACE_STATE_PADDED_SIZE_GEN6, SURFACE_STATE_PADDED_SIZE_GEN7)
52 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
53 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
54
55 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
56 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
57 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
58
59 enum VIDEO_CODING_TYPE{
60     VIDEO_CODING_AVC = 0,
61     VIDEO_CODING_MPEG2,
62     VIDEO_CODING_SUM
63 };
64
65 enum AVC_VME_KERNEL_TYPE{ 
66     AVC_VME_INTRA_SHADER = 0,
67     AVC_VME_INTER_SHADER,
68     AVC_VME_BATCHBUFFER,
69     AVC_VME_KERNEL_SUM
70 };
71
72 enum MPEG2_VME_KERNEL_TYPE{
73     MPEG2_VME_INTER_SHADER = 0,
74     MPEG2_VME_BATCHBUFFER,
75     MPEG2_VME_KERNEL_SUM
76 };
77  
78 static const uint32_t gen6_vme_intra_frame[][4] = {
79 #include "shaders/vme/intra_frame.g6b"
80 };
81
82 static const uint32_t gen6_vme_inter_frame[][4] = {
83 #include "shaders/vme/inter_frame.g6b"
84 };
85
86 static const uint32_t gen6_vme_batchbuffer[][4] = {
87 #include "shaders/vme/batchbuffer.g6b"
88 };
89
90 static struct i965_kernel gen6_vme_kernels[] = {
91     {
92         "AVC VME Intra Frame",
93         AVC_VME_INTRA_SHADER,                   /*index*/
94         gen6_vme_intra_frame,                   
95         sizeof(gen6_vme_intra_frame),           
96         NULL
97     },
98     {
99         "AVC VME inter Frame",
100         AVC_VME_INTER_SHADER,
101         gen6_vme_inter_frame,
102         sizeof(gen6_vme_inter_frame),
103         NULL
104     },
105     {
106         "AVC VME BATCHBUFFER",
107         AVC_VME_BATCHBUFFER,
108         gen6_vme_batchbuffer,
109         sizeof(gen6_vme_batchbuffer),
110         NULL
111     },
112 };
113
114 static const uint32_t gen7_vme_intra_frame[][4] = {
115 #include "shaders/vme/intra_frame.g7b"
116 };
117
118 static const uint32_t gen7_vme_inter_frame[][4] = {
119 #include "shaders/vme/inter_frame.g7b"
120 };
121
122 static const uint32_t gen7_vme_batchbuffer[][4] = {
123 #include "shaders/vme/batchbuffer.g7b"
124 };
125
126 static struct i965_kernel gen7_vme_kernels[] = {
127     {
128         "AVC VME Intra Frame",
129         AVC_VME_INTRA_SHADER,                   /*index*/
130         gen7_vme_intra_frame,                   
131         sizeof(gen7_vme_intra_frame),           
132         NULL
133     },
134     {
135         "AVC VME inter Frame",
136         AVC_VME_INTER_SHADER,
137         gen7_vme_inter_frame,
138         sizeof(gen7_vme_inter_frame),
139         NULL
140     },
141     {
142         "AVC VME BATCHBUFFER",
143         AVC_VME_BATCHBUFFER,
144         gen7_vme_batchbuffer,
145         sizeof(gen7_vme_batchbuffer),
146         NULL
147     },
148 };
149
150 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
151 #include "shaders/vme/mpeg2_inter_frame.g7b"
152 };
153
154 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
155 #include "shaders/vme/batchbuffer.g7b"
156 };
157
158 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
159     {
160         "MPEG2 VME inter Frame",
161         MPEG2_VME_INTER_SHADER,
162         gen7_vme_mpeg2_inter_frame,
163         sizeof(gen7_vme_mpeg2_inter_frame),
164         NULL
165     },
166     {
167         "MPEG2 VME BATCHBUFFER",
168         MPEG2_VME_BATCHBUFFER,
169         gen7_vme_mpeg2_batchbuffer,
170         sizeof(gen7_vme_mpeg2_batchbuffer),
171         NULL
172     },
173 };
174
175 /* only used for VME source surface state */
176 static void 
177 gen6_vme_source_surface_state(VADriverContextP ctx,
178                               int index,
179                               struct object_surface *obj_surface,
180                               struct intel_encoder_context *encoder_context)
181 {
182     struct gen6_vme_context *vme_context = encoder_context->vme_context;
183
184     vme_context->vme_surface2_setup(ctx,
185                                     &vme_context->gpe_context,
186                                     obj_surface,
187                                     BINDING_TABLE_OFFSET(index),
188                                     SURFACE_STATE_OFFSET(index));
189 }
190
191 static void
192 gen6_vme_media_source_surface_state(VADriverContextP ctx,
193                                     int index,
194                                     struct object_surface *obj_surface,
195                                     struct intel_encoder_context *encoder_context)
196 {
197     struct gen6_vme_context *vme_context = encoder_context->vme_context;
198
199     vme_context->vme_media_rw_surface_setup(ctx,
200                                             &vme_context->gpe_context,
201                                             obj_surface,
202                                             BINDING_TABLE_OFFSET(index),
203                                             SURFACE_STATE_OFFSET(index));
204 }
205
206 static void
207 gen6_vme_output_buffer_setup(VADriverContextP ctx,
208                              struct encode_state *encode_state,
209                              int index,
210                              struct intel_encoder_context *encoder_context)
211
212 {
213     struct i965_driver_data *i965 = i965_driver_data(ctx);
214     struct gen6_vme_context *vme_context = encoder_context->vme_context;
215     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
216     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
217     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
218     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
219     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
220
221     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
222     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
223
224     if (is_intra)
225         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
226     else
227         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
228
229     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr, 
230                                               "VME output buffer",
231                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
232                                               0x1000);
233     assert(vme_context->vme_output.bo);
234     vme_context->vme_buffer_suface_setup(ctx,
235                                          &vme_context->gpe_context,
236                                          &vme_context->vme_output,
237                                          BINDING_TABLE_OFFSET(index),
238                                          SURFACE_STATE_OFFSET(index));
239 }
240
241 static void
242 gen6_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
243                                       struct encode_state *encode_state,
244                                       int index,
245                                       struct intel_encoder_context *encoder_context)
246
247 {
248     struct i965_driver_data *i965 = i965_driver_data(ctx);
249     struct gen6_vme_context *vme_context = encoder_context->vme_context;
250     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
251     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
252     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
253
254     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
255     vme_context->vme_batchbuffer.size_block = 32; /* 2 OWORDs */
256     vme_context->vme_batchbuffer.pitch = 16;
257     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
258                                                    "VME batchbuffer",
259                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
260                                                    0x1000);
261     vme_context->vme_buffer_suface_setup(ctx,
262                                          &vme_context->gpe_context,
263                                          &vme_context->vme_batchbuffer,
264                                          BINDING_TABLE_OFFSET(index),
265                                          SURFACE_STATE_OFFSET(index));
266 }
267
268 static VAStatus
269 gen6_vme_surface_setup(VADriverContextP ctx, 
270                        struct encode_state *encode_state,
271                        int is_intra,
272                        struct intel_encoder_context *encoder_context)
273 {
274     struct i965_driver_data *i965 = i965_driver_data(ctx);
275     struct object_surface *obj_surface;
276     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
277
278     /*Setup surfaces state*/
279     /* current picture for encoding */
280     obj_surface = SURFACE(encoder_context->input_yuv_surface);
281     assert(obj_surface);
282     gen6_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
283     gen6_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
284
285     if (!is_intra) {
286         /* reference 0 */
287         obj_surface = SURFACE(pPicParameter->ReferenceFrames[0].picture_id);
288         assert(obj_surface);
289         if ( obj_surface->bo != NULL)
290             gen6_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
291
292         /* reference 1 */
293         obj_surface = SURFACE(pPicParameter->ReferenceFrames[1].picture_id);
294         assert(obj_surface);
295         if ( obj_surface->bo != NULL ) 
296             gen6_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
297     }
298
299     /* VME output */
300     gen6_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
301     gen6_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
302
303     return VA_STATUS_SUCCESS;
304 }
305
306 static VAStatus gen6_vme_interface_setup(VADriverContextP ctx, 
307                                          struct encode_state *encode_state,
308                                          struct intel_encoder_context *encoder_context)
309 {
310     struct gen6_vme_context *vme_context = encoder_context->vme_context;
311     struct gen6_interface_descriptor_data *desc;   
312     int i;
313     dri_bo *bo;
314
315     bo = vme_context->gpe_context.idrt.bo;
316     dri_bo_map(bo, 1);
317     assert(bo->virtual);
318     desc = bo->virtual;
319
320     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
321         struct i965_kernel *kernel;
322         kernel = &vme_context->gpe_context.kernels[i];
323         assert(sizeof(*desc) == 32);
324         /*Setup the descritor table*/
325         memset(desc, 0, sizeof(*desc));
326         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
327         desc->desc2.sampler_count = 1; /* FIXME: */
328         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
329         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
330         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
331         desc->desc4.constant_urb_entry_read_offset = 0;
332         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
333                 
334         /*kernel start*/
335         dri_bo_emit_reloc(bo,   
336                           I915_GEM_DOMAIN_INSTRUCTION, 0,
337                           0,
338                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
339                           kernel->bo);
340         /*Sampler State(VME state pointer)*/
341         dri_bo_emit_reloc(bo,
342                           I915_GEM_DOMAIN_INSTRUCTION, 0,
343                           (1 << 2),                                                                     //
344                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
345                           vme_context->vme_state.bo);
346         desc++;
347     }
348     dri_bo_unmap(bo);
349
350     return VA_STATUS_SUCCESS;
351 }
352
353 static VAStatus gen6_vme_constant_setup(VADriverContextP ctx, 
354                                         struct encode_state *encode_state,
355                                         struct intel_encoder_context *encoder_context)
356 {
357     struct gen6_vme_context *vme_context = encoder_context->vme_context;
358     // unsigned char *constant_buffer;
359     unsigned int *vme_state_message;
360     int mv_num = 32;
361     if (vme_context->h264_level >= 30) {
362         mv_num = 16;
363         if (vme_context->h264_level >= 31)
364                 mv_num = 8;
365     } 
366
367     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
368     assert(vme_context->gpe_context.curbe.bo->virtual);
369     // constant_buffer = vme_context->curbe.bo->virtual;
370     vme_state_message = (unsigned int *)vme_context->gpe_context.curbe.bo->virtual;
371     vme_state_message[31] = mv_num;
372         
373     /*TODO copy buffer into CURB*/
374
375     dri_bo_unmap( vme_context->gpe_context.curbe.bo);
376
377     return VA_STATUS_SUCCESS;
378 }
379
380 static const unsigned int intra_mb_mode_cost_table[] = {
381     0x31110001, // for qp0
382     0x09110001, // for qp1
383     0x15030001, // for qp2
384     0x0b030001, // for qp3
385     0x0d030011, // for qp4
386     0x17210011, // for qp5
387     0x41210011, // for qp6
388     0x19210011, // for qp7
389     0x25050003, // for qp8
390     0x1b130003, // for qp9
391     0x1d130003, // for qp10
392     0x27070021, // for qp11
393     0x51310021, // for qp12
394     0x29090021, // for qp13
395     0x35150005, // for qp14
396     0x2b0b0013, // for qp15
397     0x2d0d0013, // for qp16
398     0x37170007, // for qp17
399     0x61410031, // for qp18
400     0x39190009, // for qp19
401     0x45250015, // for qp20
402     0x3b1b000b, // for qp21
403     0x3d1d000d, // for qp22
404     0x47270017, // for qp23
405     0x71510041, // for qp24 ! center for qp=0..30
406     0x49290019, // for qp25
407     0x55350025, // for qp26
408     0x4b2b001b, // for qp27
409     0x4d2d001d, // for qp28
410     0x57370027, // for qp29
411     0x81610051, // for qp30
412     0x57270017, // for qp31
413     0x81510041, // for qp32 ! center for qp=31..51
414     0x59290019, // for qp33
415     0x65350025, // for qp34
416     0x5b2b001b, // for qp35
417     0x5d2d001d, // for qp36
418     0x67370027, // for qp37
419     0x91610051, // for qp38
420     0x69390029, // for qp39
421     0x75450035, // for qp40
422     0x6b3b002b, // for qp41
423     0x6d3d002d, // for qp42
424     0x77470037, // for qp43
425     0xa1710061, // for qp44
426     0x79490039, // for qp45
427     0x85550045, // for qp46
428     0x7b4b003b, // for qp47
429     0x7d4d003d, // for qp48
430     0x87570047, // for qp49
431     0xb1810071, // for qp50
432     0x89590049  // for qp51
433 };
434
435 static void gen6_vme_state_setup_fixup(VADriverContextP ctx,
436                                        struct encode_state *encode_state,
437                                        struct intel_encoder_context *encoder_context,
438                                        unsigned int *vme_state_message)
439 {
440     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
441     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
442     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
443
444     if (slice_param->slice_type != SLICE_TYPE_I &&
445         slice_param->slice_type != SLICE_TYPE_SI)
446         return;
447     if (encoder_context->rate_control_mode == VA_RC_CQP)
448         vme_state_message[16] = intra_mb_mode_cost_table[pic_param->pic_init_qp + slice_param->slice_qp_delta];
449     else
450         vme_state_message[16] = intra_mb_mode_cost_table[mfc_context->bit_rate_control_context[slice_param->slice_type].QpPrimeY];
451 }
452
453 static VAStatus gen6_vme_vme_state_setup(VADriverContextP ctx,
454                                          struct encode_state *encode_state,
455                                          int is_intra,
456                                          struct intel_encoder_context *encoder_context)
457 {
458     struct gen6_vme_context *vme_context = encoder_context->vme_context;
459     unsigned int *vme_state_message;
460     int i;
461         
462     //building VME state message
463     dri_bo_map(vme_context->vme_state.bo, 1);
464     assert(vme_context->vme_state.bo->virtual);
465     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
466
467     vme_state_message[0] = 0x01010101;
468     vme_state_message[1] = 0x10010101;
469     vme_state_message[2] = 0x0F0F0F0F;
470     vme_state_message[3] = 0x100F0F0F;
471     vme_state_message[4] = 0x01010101;
472     vme_state_message[5] = 0x10010101;
473     vme_state_message[6] = 0x0F0F0F0F;
474     vme_state_message[7] = 0x100F0F0F;
475     vme_state_message[8] = 0x01010101;
476     vme_state_message[9] = 0x10010101;
477     vme_state_message[10] = 0x0F0F0F0F;
478     vme_state_message[11] = 0x000F0F0F;
479     vme_state_message[12] = 0x00;
480     vme_state_message[13] = 0x00;
481
482     vme_state_message[14] = 0x4a4a;
483     vme_state_message[15] = 0x0;
484     vme_state_message[16] = 0x4a4a4a4a;
485     vme_state_message[17] = 0x4a4a4a4a;
486     vme_state_message[18] = 0x21110100;
487     vme_state_message[19] = 0x61514131;
488
489     for(i = 20; i < 32; i++) {
490         vme_state_message[i] = 0;
491     }
492     //vme_state_message[16] = 0x42424242;                       //cost function LUT set 0 for Intra
493
494     gen6_vme_state_setup_fixup(ctx, encode_state, encoder_context, vme_state_message);
495
496     dri_bo_unmap( vme_context->vme_state.bo);
497     return VA_STATUS_SUCCESS;
498 }
499
500 static void
501 gen6_vme_fill_vme_batchbuffer(VADriverContextP ctx, 
502                               struct encode_state *encode_state,
503                               int mb_width, int mb_height,
504                               int kernel,
505                               int transform_8x8_mode_flag,
506                               struct intel_encoder_context *encoder_context)
507 {
508     struct gen6_vme_context *vme_context = encoder_context->vme_context;
509     int number_mb_cmds;
510     int mb_x = 0, mb_y = 0;
511     int i, s;
512     unsigned int *command_ptr;
513
514     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
515     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
516
517     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
518         VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer; 
519         int slice_mb_begin = pSliceParameter->macroblock_address;
520         int slice_mb_number = pSliceParameter->num_macroblocks;
521         
522         for (i = 0; i < slice_mb_number;  ) {
523             int mb_count = i + slice_mb_begin;    
524             mb_x = mb_count % mb_width;
525             mb_y = mb_count / mb_width;
526             if( i == 0 ) {
527                 number_mb_cmds = mb_width;          // we must mark the slice edge. 
528             } else if ( (i + 128 ) <= slice_mb_number) {
529                 number_mb_cmds = 128;
530             } else {
531                 number_mb_cmds = slice_mb_number - i;
532             }
533
534             *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
535             *command_ptr++ = kernel;
536             *command_ptr++ = 0;
537             *command_ptr++ = 0;
538             *command_ptr++ = 0;
539             *command_ptr++ = 0;
540    
541             /*inline data */
542             *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
543             *command_ptr++ = (number_mb_cmds << 16 | transform_8x8_mode_flag | ((i==0) << 1));
544
545             i += number_mb_cmds;
546         } 
547     }
548
549     *command_ptr++ = 0;
550     *command_ptr++ = MI_BATCH_BUFFER_END;
551
552     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
553 }
554
555 static void gen6_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
556 {
557     struct i965_driver_data *i965 = i965_driver_data(ctx);
558     struct gen6_vme_context *vme_context = encoder_context->vme_context;
559     dri_bo *bo;
560
561     i965_gpe_context_init(ctx, &vme_context->gpe_context);
562
563     /* VME output buffer */
564     dri_bo_unreference(vme_context->vme_output.bo);
565     vme_context->vme_output.bo = NULL;
566
567     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
568     vme_context->vme_batchbuffer.bo = NULL;
569
570     /* VME state */
571     dri_bo_unreference(vme_context->vme_state.bo);
572     bo = dri_bo_alloc(i965->intel.bufmgr,
573                       "Buffer",
574                       1024*16, 64);
575     assert(bo);
576     vme_context->vme_state.bo = bo;
577 }
578
579 static void gen6_vme_pipeline_programing(VADriverContextP ctx, 
580                                          struct encode_state *encode_state,
581                                          struct intel_encoder_context *encoder_context)
582 {
583     struct gen6_vme_context *vme_context = encoder_context->vme_context;
584     struct intel_batchbuffer *batch = encoder_context->base.batch;
585     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
586     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
587     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
588     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
589     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
590     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
591
592     gen6_vme_fill_vme_batchbuffer(ctx, 
593                                   encode_state,
594                                   width_in_mbs, height_in_mbs,
595                                   is_intra ? AVC_VME_INTRA_SHADER : AVC_VME_INTER_SHADER, 
596                                   pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
597                                   encoder_context);
598
599     intel_batchbuffer_start_atomic(batch, 0x1000);
600     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
601     BEGIN_BATCH(batch, 2);
602     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
603     OUT_RELOC(batch,
604               vme_context->vme_batchbuffer.bo,
605               I915_GEM_DOMAIN_COMMAND, 0, 
606               0);
607     ADVANCE_BATCH(batch);
608
609     intel_batchbuffer_end_atomic(batch);        
610 }
611
612 static VAStatus gen6_vme_prepare(VADriverContextP ctx, 
613                                  struct encode_state *encode_state,
614                                  struct intel_encoder_context *encoder_context)
615 {
616     VAStatus vaStatus = VA_STATUS_SUCCESS;
617     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
618     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
619     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
620     struct gen6_vme_context *vme_context = encoder_context->vme_context;
621
622     if (!vme_context->h264_level ||
623                 (vme_context->h264_level != pSequenceParameter->level_idc)) {
624         vme_context->h264_level = pSequenceParameter->level_idc;        
625     }   
626     /*Setup all the memory object*/
627     gen6_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
628     gen6_vme_interface_setup(ctx, encode_state, encoder_context);
629     gen6_vme_constant_setup(ctx, encode_state, encoder_context);
630     gen6_vme_vme_state_setup(ctx, encode_state, is_intra, encoder_context);
631
632     /*Programing media pipeline*/
633     gen6_vme_pipeline_programing(ctx, encode_state, encoder_context);
634
635     return vaStatus;
636 }
637
638 static VAStatus gen6_vme_run(VADriverContextP ctx, 
639                              struct encode_state *encode_state,
640                              struct intel_encoder_context *encoder_context)
641 {
642     struct intel_batchbuffer *batch = encoder_context->base.batch;
643
644     intel_batchbuffer_flush(batch);
645
646     return VA_STATUS_SUCCESS;
647 }
648
649 static VAStatus gen6_vme_stop(VADriverContextP ctx, 
650                               struct encode_state *encode_state,
651                               struct intel_encoder_context *encoder_context)
652 {
653     return VA_STATUS_SUCCESS;
654 }
655
656 static VAStatus
657 gen6_vme_pipeline(VADriverContextP ctx,
658                   VAProfile profile,
659                   struct encode_state *encode_state,
660                   struct intel_encoder_context *encoder_context)
661 {
662     gen6_vme_media_init(ctx, encoder_context);
663     gen6_vme_prepare(ctx, encode_state, encoder_context);
664     gen6_vme_run(ctx, encode_state, encoder_context);
665     gen6_vme_stop(ctx, encode_state, encoder_context);
666
667     return VA_STATUS_SUCCESS;
668 }
669
670 static void
671 gen6_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
672                                     struct encode_state *encode_state,
673                                     int index,
674                                     int is_intra,
675                                     struct intel_encoder_context *encoder_context)
676
677 {
678     struct i965_driver_data *i965 = i965_driver_data(ctx);
679     struct gen6_vme_context *vme_context = encoder_context->vme_context;
680     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
681     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
682     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
683
684     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
685     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
686
687     if (is_intra)
688         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
689     else
690         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
691
692     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
693                                               "VME output buffer",
694                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
695                                               0x1000);
696     assert(vme_context->vme_output.bo);
697     vme_context->vme_buffer_suface_setup(ctx,
698                                          &vme_context->gpe_context,
699                                          &vme_context->vme_output,
700                                          BINDING_TABLE_OFFSET(index),
701                                          SURFACE_STATE_OFFSET(index));
702 }
703
704 static void
705 gen6_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
706                                              struct encode_state *encode_state,
707                                              int index,
708                                              struct intel_encoder_context *encoder_context)
709
710 {
711     struct i965_driver_data *i965 = i965_driver_data(ctx);
712     struct gen6_vme_context *vme_context = encoder_context->vme_context;
713     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
714     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
715     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
716
717     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
718     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
719     vme_context->vme_batchbuffer.pitch = 16;
720     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
721                                                    "VME batchbuffer",
722                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
723                                                    0x1000);
724     vme_context->vme_buffer_suface_setup(ctx,
725                                          &vme_context->gpe_context,
726                                          &vme_context->vme_batchbuffer,
727                                          BINDING_TABLE_OFFSET(index),
728                                          SURFACE_STATE_OFFSET(index));
729 }
730
731 static VAStatus
732 gen6_vme_mpeg2_surface_setup(VADriverContextP ctx, 
733                               struct encode_state *encode_state,
734                               int is_intra,
735                               struct intel_encoder_context *encoder_context)
736 {
737     struct i965_driver_data *i965 = i965_driver_data(ctx);
738     struct object_surface *obj_surface;
739     VAEncPictureParameterBufferMPEG2 *pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer;
740
741     /*Setup surfaces state*/
742     /* current picture for encoding */
743     obj_surface = SURFACE(encoder_context->input_yuv_surface);
744     assert(obj_surface);
745     gen6_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
746     gen6_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
747
748     if (!is_intra) {
749         /* reference 0 */
750         obj_surface = SURFACE(pic_param->forward_reference_picture);
751         assert(obj_surface);
752         if ( obj_surface->bo != NULL)
753             gen6_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
754
755         /* reference 1 */
756         obj_surface = SURFACE(pic_param->backward_reference_picture);
757         if (obj_surface && obj_surface->bo != NULL) 
758             gen6_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
759     }
760
761     /* VME output */
762     gen6_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
763     gen6_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
764
765     return VA_STATUS_SUCCESS;
766 }
767
768 static void
769 gen6_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
770                                      struct encode_state *encode_state,
771                                      int mb_width, int mb_height,
772                                      int kernel,
773                                      int transform_8x8_mode_flag,
774                                      struct intel_encoder_context *encoder_context)
775 {
776     struct gen6_vme_context *vme_context = encoder_context->vme_context;
777     int number_mb_cmds;
778     int mb_x = 0, mb_y = 0;
779     int i, s, j;
780     unsigned int *command_ptr;
781
782     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
783     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
784
785     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
786         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
787
788         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
789             int slice_mb_begin = slice_param->macroblock_address;
790             int slice_mb_number = slice_param->num_macroblocks;
791
792             for (i = 0; i < slice_mb_number;) {
793                 int mb_count = i + slice_mb_begin;
794
795                 mb_x = mb_count % mb_width;
796                 mb_y = mb_count / mb_width;
797
798                 if( i == 0) {
799                     number_mb_cmds = mb_width;
800                 } else if ((i + 128) <= slice_mb_number) {
801                     number_mb_cmds = 128;
802                 } else {
803                     number_mb_cmds = slice_mb_number - i;
804                 }
805
806                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
807                 *command_ptr++ = kernel;
808                 *command_ptr++ = 0;
809                 *command_ptr++ = 0;
810                 *command_ptr++ = 0;
811                 *command_ptr++ = 0;
812  
813                 /*inline data */
814                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
815                 *command_ptr++ = ( (number_mb_cmds << 16) | transform_8x8_mode_flag | ((i == 0) << 1));
816
817                 i += number_mb_cmds;
818             }
819
820             slice_param++;
821         }
822     }
823
824     *command_ptr++ = 0;
825     *command_ptr++ = MI_BATCH_BUFFER_END;
826
827     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
828 }
829
830 static void
831 gen6_vme_mpeg2_pipeline_programing(VADriverContextP ctx, 
832                                     struct encode_state *encode_state,
833                                     int is_intra,
834                                     struct intel_encoder_context *encoder_context)
835 {
836     struct gen6_vme_context *vme_context = encoder_context->vme_context;
837     struct intel_batchbuffer *batch = encoder_context->base.batch;
838     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
839     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
840     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
841
842     gen6_vme_mpeg2_fill_vme_batchbuffer(ctx, 
843                                          encode_state,
844                                          width_in_mbs, height_in_mbs,
845                                          MPEG2_VME_INTER_SHADER,
846                                          0,
847                                          encoder_context);
848
849     intel_batchbuffer_start_atomic(batch, 0x1000);
850     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
851     BEGIN_BATCH(batch, 2);
852     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (2 << 6));
853     OUT_RELOC(batch,
854               vme_context->vme_batchbuffer.bo,
855               I915_GEM_DOMAIN_COMMAND, 0, 
856               0);
857     ADVANCE_BATCH(batch);
858
859     intel_batchbuffer_end_atomic(batch);
860 }
861
862 static VAStatus
863 gen6_vme_mpeg2_prepare(VADriverContextP ctx, 
864                         struct encode_state *encode_state,
865                         struct intel_encoder_context *encoder_context)
866 {
867     VAStatus vaStatus = VA_STATUS_SUCCESS;
868     VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
869
870     /*No need of vme for Intra slice */
871     if(slice_param->is_intra_slice){
872        return vaStatus;
873     }
874
875     /*Setup all the memory object*/
876     gen6_vme_mpeg2_surface_setup(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
877     gen6_vme_interface_setup(ctx, encode_state, encoder_context);
878     gen6_vme_vme_state_setup(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
879     gen6_vme_constant_setup(ctx, encode_state, encoder_context);
880
881     /*Programing media pipeline*/
882     gen6_vme_mpeg2_pipeline_programing(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
883
884     return vaStatus;
885 }
886
887 static VAStatus
888 gen6_vme_mpeg2_pipeline(VADriverContextP ctx,
889                          VAProfile profile,
890                          struct encode_state *encode_state,
891                          struct intel_encoder_context *encoder_context)
892 {
893     gen6_vme_media_init(ctx, encoder_context);
894     gen6_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
895     gen6_vme_run(ctx, encode_state, encoder_context);
896     gen6_vme_stop(ctx, encode_state, encoder_context);
897
898     return VA_STATUS_SUCCESS;
899 }
900
901 static void
902 gen6_vme_context_destroy(void *context)
903 {
904     struct gen6_vme_context *vme_context = context;
905
906     i965_gpe_context_destroy(&vme_context->gpe_context);
907
908     dri_bo_unreference(vme_context->vme_output.bo);
909     vme_context->vme_output.bo = NULL;
910
911     dri_bo_unreference(vme_context->vme_state.bo);
912     vme_context->vme_state.bo = NULL;
913
914     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
915     vme_context->vme_batchbuffer.bo = NULL;
916
917     free(vme_context);
918 }
919
920 Bool gen6_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
921 {
922     struct i965_driver_data *i965 = i965_driver_data(ctx);
923     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
924
925     vme_context->gpe_context.surface_state_binding_table.length =
926               (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
927
928     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
929     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
930     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
931
932     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
933     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
934     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
935     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
936     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
937
938     if(encoder_context->profile == VAProfileH264Baseline ||
939        encoder_context->profile == VAProfileH264Main     ||
940        encoder_context->profile == VAProfileH264High ){
941        vme_context->video_coding_type = VIDEO_CODING_AVC;
942        vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM; 
943  
944     } else if (encoder_context->profile == VAProfileMPEG2Simple ||
945                encoder_context->profile == VAProfileMPEG2Main ){
946        vme_context->video_coding_type = VIDEO_CODING_MPEG2;
947        vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM; 
948     } else {
949         /* Unsupported encoding profile */
950         assert(0);
951     }
952
953     if (IS_GEN7(i965->intel.device_id)) {
954         if (vme_context->video_coding_type == VIDEO_CODING_AVC) {
955               i965_gpe_load_kernels(ctx,
956                                     &vme_context->gpe_context,
957                                     gen7_vme_kernels,
958                                     vme_context->vme_kernel_sum);
959               encoder_context->vme_pipeline = gen6_vme_pipeline;
960  
961         } else {
962               i965_gpe_load_kernels(ctx,
963                                     &vme_context->gpe_context,
964                                     gen7_vme_mpeg2_kernels,
965                                     vme_context->vme_kernel_sum);
966               encoder_context->vme_pipeline = gen6_vme_mpeg2_pipeline;
967  
968         }
969
970         vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
971         vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
972         vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
973     } else {
974         if (vme_context->video_coding_type == VIDEO_CODING_MPEG2) {
975             i965_gpe_load_kernels(ctx,
976                                   &vme_context->gpe_context,
977                                   gen6_vme_kernels,
978                                   vme_context->vme_kernel_sum);
979
980              encoder_context->vme_pipeline = gen6_vme_pipeline;
981         } else {
982           /* Unsupported encoding profile for gen6 */
983           assert(0);
984        }
985
986         vme_context->vme_surface2_setup = i965_gpe_surface2_setup;
987         vme_context->vme_media_rw_surface_setup = i965_gpe_media_rw_surface_setup;
988         vme_context->vme_buffer_suface_setup = i965_gpe_buffer_suface_setup;
989     }
990
991     encoder_context->vme_context = vme_context;
992     encoder_context->vme_context_destroy = gen6_vme_context_destroy;
993
994     return True;
995 }