i965_drv_video: handle VPP pipeline
[profile/ivi/vaapi-intel-driver.git] / src / i965_post_processing.c
1 /*
2  * Copyright © 2010 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 #include <assert.h>
33
34 #include <va/va_backend.h>
35
36 #include "intel_batchbuffer.h"
37 #include "intel_driver.h"
38 #include "i965_defines.h"
39 #include "i965_structs.h"
40 #include "i965_drv_video.h"
41 #include "i965_post_processing.h"
42 #include "i965_render.h"
43
44 #define HAS_PP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) ||     \
45                      IS_GEN6((ctx)->intel.device_id) ||         \
46                      IS_GEN7((ctx)->intel.device_id))
47
48 static const uint32_t pp_null_gen5[][4] = {
49 #include "shaders/post_processing/null.g4b.gen5"
50 };
51
52 static const uint32_t pp_nv12_load_save_nv12_gen5[][4] = {
53 #include "shaders/post_processing/nv12_load_save_nv12.g4b.gen5"
54 };
55
56 static const uint32_t pp_nv12_load_save_pl3_gen5[][4] = {
57 #include "shaders/post_processing/nv12_load_save_pl3.g4b.gen5"
58 };
59
60 static const uint32_t pp_pl3_load_save_nv12_gen5[][4] = {
61 #include "shaders/post_processing/pl3_load_save_nv12.g4b.gen5"
62 };
63
64 static const uint32_t pp_pl3_load_save_pl3_gen5[][4] = {
65 #include "shaders/post_processing/pl3_load_save_pl3.g4b.gen5"
66 };
67
68 static const uint32_t pp_nv12_scaling_gen5[][4] = {
69 #include "shaders/post_processing/nv12_scaling_nv12.g4b.gen5"
70 };
71
72 static const uint32_t pp_nv12_avs_gen5[][4] = {
73 #include "shaders/post_processing/nv12_avs_nv12.g4b.gen5"
74 };
75
76 static const uint32_t pp_nv12_dndi_gen5[][4] = {
77 #include "shaders/post_processing/nv12_dndi_nv12.g4b.gen5"
78 };
79
80 static void pp_null_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
81                                const struct i965_surface *src_surface,
82                                const VARectangle *src_rect,
83                                const struct i965_surface *dst_surface,
84                                const VARectangle *dst_rect,
85                                void *filter_param);
86 static void pp_nv12_avs_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
87                                    const struct i965_surface *src_surface,
88                                    const VARectangle *src_rect,
89                                    const struct i965_surface *dst_surface,
90                                    const VARectangle *dst_rect,
91                                    void *filter_param);
92 static void pp_nv12_scaling_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
93                                        const struct i965_surface *src_surface,
94                                        const VARectangle *src_rect,
95                                        const struct i965_surface *dst_surface,
96                                        const VARectangle *dst_rect,
97                                        void *filter_param);
98 static void pp_plx_load_save_plx_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
99                                             const struct i965_surface *src_surface,
100                                             const VARectangle *src_rect,
101                                             const struct i965_surface *dst_surface,
102                                             const VARectangle *dst_rect,
103                                             void *filter_param);
104 static void pp_nv12_dndi_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
105                                     const struct i965_surface *src_surface,
106                                     const VARectangle *src_rect,
107                                     const struct i965_surface *dst_surface,
108                                     const VARectangle *dst_rect,
109                                     void *filter_param);
110
111 static struct pp_module pp_modules_gen5[] = {
112     {
113         {
114             "NULL module (for testing)",
115             PP_NULL,
116             pp_null_gen5,
117             sizeof(pp_null_gen5),
118             NULL,
119         },
120
121         pp_null_initialize,
122     },
123
124     {
125         {
126             "NV12_NV12",
127             PP_NV12_LOAD_SAVE_N12,
128             pp_nv12_load_save_nv12_gen5,
129             sizeof(pp_nv12_load_save_nv12_gen5),
130             NULL,
131         },
132
133         pp_plx_load_save_plx_initialize,
134     },
135
136     {
137         {
138             "NV12_PL3",
139             PP_NV12_LOAD_SAVE_PL3,
140             pp_nv12_load_save_pl3_gen5,
141             sizeof(pp_nv12_load_save_pl3_gen5),
142             NULL,
143         },
144
145         pp_plx_load_save_plx_initialize,
146     },
147
148     {
149         {
150             "PL3_NV12",
151             PP_PL3_LOAD_SAVE_N12,
152             pp_pl3_load_save_nv12_gen5,
153             sizeof(pp_pl3_load_save_nv12_gen5),
154             NULL,
155         },
156
157         pp_plx_load_save_plx_initialize,
158     },
159
160     {
161         {
162             "PL3_PL3",
163             PP_PL3_LOAD_SAVE_N12,
164             pp_pl3_load_save_pl3_gen5,
165             sizeof(pp_pl3_load_save_pl3_gen5),
166             NULL,
167         },
168
169         pp_plx_load_save_plx_initialize
170     },
171
172     {
173         {
174             "NV12 Scaling module",
175             PP_NV12_SCALING,
176             pp_nv12_scaling_gen5,
177             sizeof(pp_nv12_scaling_gen5),
178             NULL,
179         },
180
181         pp_nv12_scaling_initialize,
182     },
183
184     {
185         {
186             "NV12 AVS module",
187             PP_NV12_AVS,
188             pp_nv12_avs_gen5,
189             sizeof(pp_nv12_avs_gen5),
190             NULL,
191         },
192
193         pp_nv12_avs_initialize,
194     },
195
196     {
197         {
198             "NV12 DNDI module",
199             PP_NV12_DNDI,
200             pp_nv12_dndi_gen5,
201             sizeof(pp_nv12_dndi_gen5),
202             NULL,
203         },
204
205         pp_nv12_dndi_initialize,
206     },
207 };
208
209 static const uint32_t pp_null_gen6[][4] = {
210 #include "shaders/post_processing/null.g6b"
211 };
212
213 static const uint32_t pp_nv12_load_save_nv12_gen6[][4] = {
214 #include "shaders/post_processing/nv12_load_save_nv12.g6b"
215 };
216
217 static const uint32_t pp_nv12_load_save_pl3_gen6[][4] = {
218 #include "shaders/post_processing/nv12_load_save_pl3.g6b"
219 };
220
221 static const uint32_t pp_pl3_load_save_nv12_gen6[][4] = {
222 #include "shaders/post_processing/pl3_load_save_nv12.g6b"
223 };
224
225 static const uint32_t pp_pl3_load_save_pl3_gen6[][4] = {
226 #include "shaders/post_processing/pl3_load_save_pl3.g6b"
227 };
228
229 static const uint32_t pp_nv12_scaling_gen6[][4] = {
230 #include "shaders/post_processing/nv12_scaling_nv12.g6b"
231 };
232
233 static const uint32_t pp_nv12_avs_gen6[][4] = {
234 #include "shaders/post_processing/nv12_avs_nv12.g6b"
235 };
236
237 static const uint32_t pp_nv12_dndi_gen6[][4] = {
238 #include "shaders/post_processing/nv12_dndi_nv12.g6b"
239 };
240
241 static struct pp_module pp_modules_gen6[] = {
242     {
243         {
244             "NULL module (for testing)",
245             PP_NULL,
246             pp_null_gen6,
247             sizeof(pp_null_gen6),
248             NULL,
249         },
250
251         pp_null_initialize,
252     },
253
254     {
255         {
256             "NV12_NV12",
257             PP_NV12_LOAD_SAVE_N12,
258             pp_nv12_load_save_nv12_gen6,
259             sizeof(pp_nv12_load_save_nv12_gen6),
260             NULL,
261         },
262
263         pp_plx_load_save_plx_initialize,
264     },
265
266     {
267         {
268             "NV12_PL3",
269             PP_NV12_LOAD_SAVE_PL3,
270             pp_nv12_load_save_pl3_gen6,
271             sizeof(pp_nv12_load_save_pl3_gen6),
272             NULL,
273         },
274         
275         pp_plx_load_save_plx_initialize,
276     },
277
278     {
279         {
280             "PL3_NV12",
281             PP_PL3_LOAD_SAVE_N12,
282             pp_pl3_load_save_nv12_gen6,
283             sizeof(pp_pl3_load_save_nv12_gen6),
284             NULL,
285         },
286
287         pp_plx_load_save_plx_initialize,
288     },
289
290     {
291         {
292             "PL3_PL3",
293             PP_PL3_LOAD_SAVE_N12,
294             pp_pl3_load_save_pl3_gen6,
295             sizeof(pp_pl3_load_save_pl3_gen6),
296             NULL,
297         },
298
299         pp_plx_load_save_plx_initialize,
300     },
301
302     {
303         {
304             "NV12 Scaling module",
305             PP_NV12_SCALING,
306             pp_nv12_scaling_gen6,
307             sizeof(pp_nv12_scaling_gen6),
308             NULL,
309         },
310
311         pp_nv12_scaling_initialize,
312     },
313
314     {
315         {
316             "NV12 AVS module",
317             PP_NV12_AVS,
318             pp_nv12_avs_gen6,
319             sizeof(pp_nv12_avs_gen6),
320             NULL,
321         },
322
323         pp_nv12_avs_initialize,
324     },
325
326     {
327         {
328             "NV12 DNDI module",
329             PP_NV12_DNDI,
330             pp_nv12_dndi_gen6,
331             sizeof(pp_nv12_dndi_gen6),
332             NULL,
333         },
334
335         pp_nv12_dndi_initialize,
336     },
337 };
338
339 #define pp_static_parameter     pp_context->pp_static_parameter
340 #define pp_inline_parameter     pp_context->pp_inline_parameter
341
342 static int
343 pp_get_surface_fourcc(VADriverContextP ctx, const struct i965_surface *surface)
344 {
345     struct i965_driver_data *i965 = i965_driver_data(ctx);
346     int fourcc;
347
348     if (surface->flag == I965_SURFACE_IMAGE) {
349         struct object_image *obj_image = IMAGE(surface->id);
350         fourcc = obj_image->image.format.fourcc;
351     } else {
352         struct object_surface *obj_surface = SURFACE(surface->id);
353         fourcc = obj_surface->fourcc;
354     }
355
356     return fourcc;
357 }
358
359 static void
360 pp_set_surface_tiling(struct i965_surface_state *ss, unsigned int tiling)
361 {
362     switch (tiling) {
363     case I915_TILING_NONE:
364         ss->ss3.tiled_surface = 0;
365         ss->ss3.tile_walk = 0;
366         break;
367     case I915_TILING_X:
368         ss->ss3.tiled_surface = 1;
369         ss->ss3.tile_walk = I965_TILEWALK_XMAJOR;
370         break;
371     case I915_TILING_Y:
372         ss->ss3.tiled_surface = 1;
373         ss->ss3.tile_walk = I965_TILEWALK_YMAJOR;
374         break;
375     }
376 }
377
378 static void
379 pp_set_surface2_tiling(struct i965_surface_state2 *ss, unsigned int tiling)
380 {
381     switch (tiling) {
382     case I915_TILING_NONE:
383         ss->ss2.tiled_surface = 0;
384         ss->ss2.tile_walk = 0;
385         break;
386     case I915_TILING_X:
387         ss->ss2.tiled_surface = 1;
388         ss->ss2.tile_walk = I965_TILEWALK_XMAJOR;
389         break;
390     case I915_TILING_Y:
391         ss->ss2.tiled_surface = 1;
392         ss->ss2.tile_walk = I965_TILEWALK_YMAJOR;
393         break;
394     }
395 }
396
397 static void
398 ironlake_pp_surface_state(struct i965_post_processing_context *pp_context)
399 {
400
401 }
402
403 static void
404 ironlake_pp_interface_descriptor_table(struct i965_post_processing_context *pp_context)
405 {
406     struct i965_interface_descriptor *desc;
407     dri_bo *bo;
408     int pp_index = pp_context->current_pp;
409
410     bo = pp_context->idrt.bo;
411     dri_bo_map(bo, 1);
412     assert(bo->virtual);
413     desc = bo->virtual;
414     memset(desc, 0, sizeof(*desc));
415     desc->desc0.grf_reg_blocks = 10;
416     desc->desc0.kernel_start_pointer = pp_context->pp_modules[pp_index].kernel.bo->offset >> 6; /* reloc */
417     desc->desc1.const_urb_entry_read_offset = 0;
418     desc->desc1.const_urb_entry_read_len = 4; /* grf 1-4 */
419     desc->desc2.sampler_state_pointer = pp_context->sampler_state_table.bo->offset >> 5;
420     desc->desc2.sampler_count = 0;
421     desc->desc3.binding_table_entry_count = 0;
422     desc->desc3.binding_table_pointer = 
423         pp_context->binding_table.bo->offset >> 5; /*reloc */
424
425     dri_bo_emit_reloc(bo,
426                       I915_GEM_DOMAIN_INSTRUCTION, 0,
427                       desc->desc0.grf_reg_blocks,
428                       offsetof(struct i965_interface_descriptor, desc0),
429                       pp_context->pp_modules[pp_index].kernel.bo);
430
431     dri_bo_emit_reloc(bo,
432                       I915_GEM_DOMAIN_INSTRUCTION, 0,
433                       desc->desc2.sampler_count << 2,
434                       offsetof(struct i965_interface_descriptor, desc2),
435                       pp_context->sampler_state_table.bo);
436
437     dri_bo_emit_reloc(bo,
438                       I915_GEM_DOMAIN_INSTRUCTION, 0,
439                       desc->desc3.binding_table_entry_count,
440                       offsetof(struct i965_interface_descriptor, desc3),
441                       pp_context->binding_table.bo);
442
443     dri_bo_unmap(bo);
444     pp_context->idrt.num_interface_descriptors++;
445 }
446
447 static void
448 ironlake_pp_binding_table(struct i965_post_processing_context *pp_context)
449 {
450     unsigned int *binding_table;
451     dri_bo *bo = pp_context->binding_table.bo;
452     int i;
453
454     dri_bo_map(bo, 1);
455     assert(bo->virtual);
456     binding_table = bo->virtual;
457     memset(binding_table, 0, bo->size);
458
459     for (i = 0; i < MAX_PP_SURFACES; i++) {
460         if (pp_context->surfaces[i].ss_bo) {
461             assert(pp_context->surfaces[i].s_bo);
462
463             binding_table[i] = pp_context->surfaces[i].ss_bo->offset;
464             dri_bo_emit_reloc(bo,
465                               I915_GEM_DOMAIN_INSTRUCTION, 0,
466                               0,
467                               i * sizeof(*binding_table),
468                               pp_context->surfaces[i].ss_bo);
469         }
470     
471     }
472
473     dri_bo_unmap(bo);
474 }
475
476 static void
477 ironlake_pp_vfe_state(struct i965_post_processing_context *pp_context)
478 {
479     struct i965_vfe_state *vfe_state;
480     dri_bo *bo;
481
482     bo = pp_context->vfe_state.bo;
483     dri_bo_map(bo, 1);
484     assert(bo->virtual);
485     vfe_state = bo->virtual;
486     memset(vfe_state, 0, sizeof(*vfe_state));
487     vfe_state->vfe1.max_threads = pp_context->urb.num_vfe_entries - 1;
488     vfe_state->vfe1.urb_entry_alloc_size = pp_context->urb.size_vfe_entry - 1;
489     vfe_state->vfe1.num_urb_entries = pp_context->urb.num_vfe_entries;
490     vfe_state->vfe1.vfe_mode = VFE_GENERIC_MODE;
491     vfe_state->vfe1.children_present = 0;
492     vfe_state->vfe2.interface_descriptor_base = 
493         pp_context->idrt.bo->offset >> 4; /* reloc */
494     dri_bo_emit_reloc(bo,
495                       I915_GEM_DOMAIN_INSTRUCTION, 0,
496                       0,
497                       offsetof(struct i965_vfe_state, vfe2),
498                       pp_context->idrt.bo);
499     dri_bo_unmap(bo);
500 }
501
502 static void
503 ironlake_pp_upload_constants(struct i965_post_processing_context *pp_context)
504 {
505     unsigned char *constant_buffer;
506
507     assert(sizeof(pp_static_parameter) == 128);
508     dri_bo_map(pp_context->curbe.bo, 1);
509     assert(pp_context->curbe.bo->virtual);
510     constant_buffer = pp_context->curbe.bo->virtual;
511     memcpy(constant_buffer, &pp_static_parameter, sizeof(pp_static_parameter));
512     dri_bo_unmap(pp_context->curbe.bo);
513 }
514
515 static void
516 ironlake_pp_states_setup(VADriverContextP ctx,
517                          struct i965_post_processing_context *pp_context)
518 {
519     ironlake_pp_surface_state(pp_context);
520     ironlake_pp_binding_table(pp_context);
521     ironlake_pp_interface_descriptor_table(pp_context);
522     ironlake_pp_vfe_state(pp_context);
523     ironlake_pp_upload_constants(pp_context);
524 }
525
526 static void
527 ironlake_pp_pipeline_select(VADriverContextP ctx,
528                             struct i965_post_processing_context *pp_context)
529 {
530     struct intel_batchbuffer *batch = pp_context->batch;
531
532     BEGIN_BATCH(batch, 1);
533     OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
534     ADVANCE_BATCH(batch);
535 }
536
537 static void
538 ironlake_pp_urb_layout(VADriverContextP ctx,
539                        struct i965_post_processing_context *pp_context)
540 {
541     struct intel_batchbuffer *batch = pp_context->batch;
542     unsigned int vfe_fence, cs_fence;
543
544     vfe_fence = pp_context->urb.cs_start;
545     cs_fence = pp_context->urb.size;
546
547     BEGIN_BATCH(batch, 3);
548     OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
549     OUT_BATCH(batch, 0);
550     OUT_BATCH(batch, 
551               (vfe_fence << UF2_VFE_FENCE_SHIFT) |      /* VFE_SIZE */
552               (cs_fence << UF2_CS_FENCE_SHIFT));        /* CS_SIZE */
553     ADVANCE_BATCH(batch);
554 }
555
556 static void
557 ironlake_pp_state_base_address(VADriverContextP ctx,
558                                struct i965_post_processing_context *pp_context)
559 {
560     struct intel_batchbuffer *batch = pp_context->batch;
561
562     BEGIN_BATCH(batch, 8);
563     OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
564     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
565     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
566     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
567     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
568     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
569     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
570     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
571     ADVANCE_BATCH(batch);
572 }
573
574 static void
575 ironlake_pp_state_pointers(VADriverContextP ctx,
576                            struct i965_post_processing_context *pp_context)
577 {
578     struct intel_batchbuffer *batch = pp_context->batch;
579
580     BEGIN_BATCH(batch, 3);
581     OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
582     OUT_BATCH(batch, 0);
583     OUT_RELOC(batch, pp_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
584     ADVANCE_BATCH(batch);
585 }
586
587 static void 
588 ironlake_pp_cs_urb_layout(VADriverContextP ctx,
589                           struct i965_post_processing_context *pp_context)
590 {
591     struct intel_batchbuffer *batch = pp_context->batch;
592
593     BEGIN_BATCH(batch, 2);
594     OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
595     OUT_BATCH(batch,
596               ((pp_context->urb.size_cs_entry - 1) << 4) |     /* URB Entry Allocation Size */
597               (pp_context->urb.num_cs_entries << 0));          /* Number of URB Entries */
598     ADVANCE_BATCH(batch);
599 }
600
601 static void
602 ironlake_pp_constant_buffer(VADriverContextP ctx,
603                             struct i965_post_processing_context *pp_context)
604 {
605     struct intel_batchbuffer *batch = pp_context->batch;
606
607     BEGIN_BATCH(batch, 2);
608     OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
609     OUT_RELOC(batch, pp_context->curbe.bo,
610               I915_GEM_DOMAIN_INSTRUCTION, 0,
611               pp_context->urb.size_cs_entry - 1);
612     ADVANCE_BATCH(batch);    
613 }
614
615 static void
616 ironlake_pp_object_walker(VADriverContextP ctx,
617                           struct i965_post_processing_context *pp_context)
618 {
619     struct intel_batchbuffer *batch = pp_context->batch;
620     int x, x_steps, y, y_steps;
621
622     x_steps = pp_context->pp_x_steps(&pp_context->private_context);
623     y_steps = pp_context->pp_y_steps(&pp_context->private_context);
624
625     for (y = 0; y < y_steps; y++) {
626         for (x = 0; x < x_steps; x++) {
627             if (!pp_context->pp_set_block_parameter(pp_context, x, y)) {
628                 BEGIN_BATCH(batch, 20);
629                 OUT_BATCH(batch, CMD_MEDIA_OBJECT | 18);
630                 OUT_BATCH(batch, 0);
631                 OUT_BATCH(batch, 0); /* no indirect data */
632                 OUT_BATCH(batch, 0);
633
634                 /* inline data grf 5-6 */
635                 assert(sizeof(pp_inline_parameter) == 64);
636                 intel_batchbuffer_data(batch, &pp_inline_parameter, sizeof(pp_inline_parameter));
637
638                 ADVANCE_BATCH(batch);
639             }
640         }
641     }
642 }
643
644 static void
645 ironlake_pp_pipeline_setup(VADriverContextP ctx,
646                            struct i965_post_processing_context *pp_context)
647 {
648     struct intel_batchbuffer *batch = pp_context->batch;
649
650     intel_batchbuffer_start_atomic(batch, 0x1000);
651     intel_batchbuffer_emit_mi_flush(batch);
652     ironlake_pp_pipeline_select(ctx, pp_context);
653     ironlake_pp_state_base_address(ctx, pp_context);
654     ironlake_pp_state_pointers(ctx, pp_context);
655     ironlake_pp_urb_layout(ctx, pp_context);
656     ironlake_pp_cs_urb_layout(ctx, pp_context);
657     ironlake_pp_constant_buffer(ctx, pp_context);
658     ironlake_pp_object_walker(ctx, pp_context);
659     intel_batchbuffer_end_atomic(batch);
660 }
661
662 static void
663 i965_pp_set_surface_state(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
664                           dri_bo *surf_bo, unsigned long surf_bo_offset,
665                           int width, int height, int pitch, int format, 
666                           int index, int is_target)
667 {
668     struct i965_driver_data *i965 = i965_driver_data(ctx);
669     struct i965_surface_state *ss;
670     dri_bo *ss_bo;
671     unsigned int tiling;
672     unsigned int swizzle;
673
674     dri_bo_get_tiling(surf_bo, &tiling, &swizzle);
675     pp_context->surfaces[index].s_bo = surf_bo;
676     dri_bo_reference(pp_context->surfaces[index].s_bo);
677     ss_bo = dri_bo_alloc(i965->intel.bufmgr, 
678                          "surface state", 
679                          sizeof(struct i965_surface_state), 
680                          4096);
681     assert(ss_bo);
682     pp_context->surfaces[index].ss_bo = ss_bo;
683     dri_bo_map(ss_bo, True);
684     assert(ss_bo->virtual);
685     ss = ss_bo->virtual;
686     memset(ss, 0, sizeof(*ss));
687     ss->ss0.surface_type = I965_SURFACE_2D;
688     ss->ss0.surface_format = format;
689     ss->ss1.base_addr = surf_bo->offset + surf_bo_offset;
690     ss->ss2.width = width - 1;
691     ss->ss2.height = height - 1;
692     ss->ss3.pitch = pitch - 1;
693     pp_set_surface_tiling(ss, tiling);
694     dri_bo_emit_reloc(ss_bo,
695                       I915_GEM_DOMAIN_RENDER, is_target ? I915_GEM_DOMAIN_RENDER : 0,
696                       surf_bo_offset,
697                       offsetof(struct i965_surface_state, ss1),
698                       pp_context->surfaces[index].s_bo);
699     dri_bo_unmap(ss_bo);
700 }
701
702 static void
703 i965_pp_set_surface2_state(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
704                            dri_bo *surf_bo, unsigned long surf_bo_offset,
705                            int width, int height, int wpitch,
706                            int xoffset, int yoffset,
707                            int format, int interleave_chroma,
708                            int index)
709 {
710     struct i965_driver_data *i965 = i965_driver_data(ctx);
711     struct i965_surface_state2 *ss2;
712     dri_bo *ss2_bo;
713     unsigned int tiling;
714     unsigned int swizzle;
715
716     dri_bo_get_tiling(surf_bo, &tiling, &swizzle);
717     pp_context->surfaces[index].s_bo = surf_bo;
718     dri_bo_reference(pp_context->surfaces[index].s_bo);
719     ss2_bo = dri_bo_alloc(i965->intel.bufmgr, 
720                           "YUV surface state", 
721                           sizeof(struct i965_surface_state2), 
722                           4096);
723     assert(ss2_bo);
724     pp_context->surfaces[index].ss_bo = ss2_bo;
725     dri_bo_map(ss2_bo, True);
726     assert(ss2_bo->virtual);
727     ss2 = ss2_bo->virtual;
728     memset(ss2, 0, sizeof(*ss2));
729     ss2->ss0.surface_base_address = surf_bo->offset + surf_bo_offset;
730     ss2->ss1.cbcr_pixel_offset_v_direction = 0;
731     ss2->ss1.width = width - 1;
732     ss2->ss1.height = height - 1;
733     ss2->ss2.pitch = wpitch - 1;
734     ss2->ss2.interleave_chroma = interleave_chroma;
735     ss2->ss2.surface_format = format;
736     ss2->ss3.x_offset_for_cb = xoffset;
737     ss2->ss3.y_offset_for_cb = yoffset;
738     pp_set_surface2_tiling(ss2, tiling);
739     dri_bo_emit_reloc(ss2_bo,
740                       I915_GEM_DOMAIN_RENDER, 0,
741                       surf_bo_offset,
742                       offsetof(struct i965_surface_state2, ss0),
743                       surf_bo);
744     dri_bo_unmap(ss2_bo);
745 }
746
747 static void 
748 pp_set_media_rw_message_surface(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
749                                 const struct i965_surface *surface, 
750                                 int base_index, int is_target,
751                                 int *width, int *height, int *pitch, int *offset)
752 {
753     struct i965_driver_data *i965 = i965_driver_data(ctx);
754     struct object_surface *obj_surface;
755     struct object_image *obj_image;
756     dri_bo *bo;
757     int fourcc = pp_get_surface_fourcc(ctx, surface);
758     const int Y = 0;
759     const int U = fourcc == VA_FOURCC('Y', 'V', '1', '2') ? 2 : 1;
760     const int V = fourcc == VA_FOURCC('Y', 'V', '1', '2') ? 1 : 2;
761     const int UV = 1;
762     int interleaved_uv = fourcc == VA_FOURCC('N', 'V', '1', '2');
763
764     if (surface->flag == I965_SURFACE_SURFACE) {
765         obj_surface = SURFACE(surface->id);
766         bo = obj_surface->bo;
767         width[0] = obj_surface->orig_width;
768         height[0] = obj_surface->orig_height;
769         pitch[0] = obj_surface->width;
770         offset[0] = 0;
771
772         if (interleaved_uv) {
773             width[1] = obj_surface->orig_width;
774             height[1] = obj_surface->orig_height / 2;
775             pitch[1] = obj_surface->width;
776             offset[1] = offset[0] + obj_surface->width * obj_surface->height;
777         } else {
778             width[1] = obj_surface->orig_width / 2;
779             height[1] = obj_surface->orig_height / 2;
780             pitch[1] = obj_surface->width / 2;
781             offset[1] = offset[0] + obj_surface->width * obj_surface->height;
782             width[2] = obj_surface->orig_width / 2;
783             height[2] = obj_surface->orig_height / 2;
784             pitch[2] = obj_surface->width / 2;
785             offset[2] = offset[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
786         }
787     } else {
788         obj_image = IMAGE(surface->id);
789         bo = obj_image->bo;
790         width[0] = obj_image->image.width;
791         height[0] = obj_image->image.height;
792         pitch[0] = obj_image->image.pitches[0];
793         offset[0] = obj_image->image.offsets[0];
794
795         if (interleaved_uv) {
796             width[1] = obj_image->image.width;
797             height[1] = obj_image->image.height / 2;
798             pitch[1] = obj_image->image.pitches[1];
799             offset[1] = obj_image->image.offsets[1];
800         } else {
801             width[1] = obj_image->image.width / 2;
802             height[1] = obj_image->image.height / 2;
803             pitch[1] = obj_image->image.pitches[1];
804             offset[1] = obj_image->image.offsets[1];
805             width[2] = obj_image->image.width / 2;
806             height[2] = obj_image->image.height / 2;
807             pitch[2] = obj_image->image.pitches[2];
808             offset[2] = obj_image->image.offsets[2];
809         }
810     }
811
812     /* Y surface */
813     i965_pp_set_surface_state(ctx, pp_context,
814                               bo, offset[Y],
815                               width[Y] / 4, height[Y], pitch[Y], I965_SURFACEFORMAT_R8_UNORM,
816                               base_index, is_target);
817
818     if (interleaved_uv) {
819         i965_pp_set_surface_state(ctx, pp_context,
820                                   bo, offset[UV],
821                                   width[UV] / 4, height[UV], pitch[UV], I965_SURFACEFORMAT_R8_UNORM,
822                                   base_index + 1, is_target);
823     } else {
824         /* U surface */
825         i965_pp_set_surface_state(ctx, pp_context,
826                                   bo, offset[U],
827                                   width[U] / 4, height[U], pitch[U], I965_SURFACEFORMAT_R8_UNORM,
828                                   base_index + 1, is_target);
829
830         /* V surface */
831         i965_pp_set_surface_state(ctx, pp_context,
832                                   bo, offset[V],
833                                   width[V] / 4, height[V], pitch[V], I965_SURFACEFORMAT_R8_UNORM,
834                                   base_index + 2, is_target);
835     }
836
837 }
838
839 static int
840 pp_null_x_steps(void *private_context)
841 {
842     return 1;
843 }
844
845 static int
846 pp_null_y_steps(void *private_context)
847 {
848     return 1;
849 }
850
851 static int
852 pp_null_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
853 {
854     return 0;
855 }
856
857 static void
858 pp_null_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
859                    const struct i965_surface *src_surface,
860                    const VARectangle *src_rect,
861                    const struct i965_surface *dst_surface,
862                    const VARectangle *dst_rect,
863                    void *filter_param)
864 {
865     /* private function & data */
866     pp_context->pp_x_steps = pp_null_x_steps;
867     pp_context->pp_y_steps = pp_null_y_steps;
868     pp_context->pp_set_block_parameter = pp_null_set_block_parameter;
869 }
870
871 static int
872 pp_load_save_x_steps(void *private_context)
873 {
874     return 1;
875 }
876
877 static int
878 pp_load_save_y_steps(void *private_context)
879 {
880     struct pp_load_save_context *pp_load_save_context = private_context;
881
882     return pp_load_save_context->dest_h / 8;
883 }
884
885 static int
886 pp_load_save_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
887 {
888     pp_inline_parameter.grf5.block_vertical_mask = 0xff;
889     pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
890     pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16;
891     pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8;
892
893     return 0;
894 }
895
896 static void
897 pp_plx_load_save_plx_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
898                                 const struct i965_surface *src_surface,
899                                 const VARectangle *src_rect,
900                                 const struct i965_surface *dst_surface,
901                                 const VARectangle *dst_rect,
902                                 void *filter_param)
903 {
904     struct pp_load_save_context *pp_load_save_context = (struct pp_load_save_context *)&pp_context->private_context;
905     int width[3], height[3], pitch[3], offset[3];
906     const int Y = 0;
907
908     /* source surface */
909     pp_set_media_rw_message_surface(ctx, pp_context, src_surface, 1, 0,
910                                     width, height, pitch, offset);
911
912     /* destination surface */
913     pp_set_media_rw_message_surface(ctx, pp_context, dst_surface, 7, 1,
914                                     width, height, pitch, offset);
915
916     /* private function & data */
917     pp_context->pp_x_steps = pp_load_save_x_steps;
918     pp_context->pp_y_steps = pp_load_save_y_steps;
919     pp_context->pp_set_block_parameter = pp_load_save_set_block_parameter;
920     pp_load_save_context->dest_h = ALIGN(height[Y], 16);
921     pp_load_save_context->dest_w = ALIGN(width[Y], 16);
922
923     pp_inline_parameter.grf5.block_count_x = ALIGN(width[Y], 16) / 16;   /* 1 x N */
924     pp_inline_parameter.grf5.number_blocks = ALIGN(width[Y], 16) / 16;
925 }
926
927 static int
928 pp_scaling_x_steps(void *private_context)
929 {
930     return 1;
931 }
932
933 static int
934 pp_scaling_y_steps(void *private_context)
935 {
936     struct pp_scaling_context *pp_scaling_context = private_context;
937
938     return pp_scaling_context->dest_h / 8;
939 }
940
941 static int
942 pp_scaling_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
943 {
944     struct pp_scaling_context *pp_scaling_context = (struct pp_scaling_context *)&pp_context->private_context;
945     float src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
946     float src_y_steping = pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step;
947
948     pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = src_x_steping * x * 16 + pp_scaling_context->src_normalized_x;
949     pp_inline_parameter.grf5.source_surface_block_normalized_vertical_origin = src_y_steping * y * 8 + pp_scaling_context->src_normalized_y;
950     pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16 + pp_scaling_context->dest_x;
951     pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8 + pp_scaling_context->dest_y;
952     
953     return 0;
954 }
955
956 static void
957 pp_nv12_scaling_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
958                            const struct i965_surface *src_surface,
959                            const VARectangle *src_rect,
960                            const struct i965_surface *dst_surface,
961                            const VARectangle *dst_rect,
962                            void *filter_param)
963 {
964     struct i965_driver_data *i965 = i965_driver_data(ctx);
965     struct pp_scaling_context *pp_scaling_context = (struct pp_scaling_context *)&pp_context->private_context;
966     struct object_surface *obj_surface;
967     struct i965_sampler_state *sampler_state;
968     int in_w, in_h, in_wpitch, in_hpitch;
969     int out_w, out_h, out_wpitch, out_hpitch;
970
971     /* source surface */
972     obj_surface = SURFACE(src_surface->id);
973     in_w = obj_surface->orig_width;
974     in_h = obj_surface->orig_height;
975     in_wpitch = obj_surface->width;
976     in_hpitch = obj_surface->height;
977
978     /* source Y surface index 1 */
979     i965_pp_set_surface_state(ctx, pp_context,
980                               obj_surface->bo, 0,
981                               in_w, in_h, in_wpitch, I965_SURFACEFORMAT_R8_UNORM,
982                               1, 0);
983
984     /* source UV surface index 2 */
985     i965_pp_set_surface_state(ctx, pp_context,
986                               obj_surface->bo, in_wpitch * in_hpitch,
987                               in_w / 2, in_h / 2, in_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
988                               2, 0);
989
990     /* destination surface */
991     obj_surface = SURFACE(dst_surface->id);
992     out_w = obj_surface->orig_width;
993     out_h = obj_surface->orig_height;
994     out_wpitch = obj_surface->width;
995     out_hpitch = obj_surface->height;
996
997     /* destination Y surface index 7 */
998     i965_pp_set_surface_state(ctx, pp_context,
999                               obj_surface->bo, 0,
1000                               out_w / 4, out_h, out_wpitch, I965_SURFACEFORMAT_R8_UNORM,
1001                               7, 1);
1002
1003     /* destination UV surface index 8 */
1004     i965_pp_set_surface_state(ctx, pp_context,
1005                               obj_surface->bo, out_wpitch * out_hpitch,
1006                               out_w / 4, out_h / 2, out_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
1007                               8, 1);
1008
1009     /* sampler state */
1010     dri_bo_map(pp_context->sampler_state_table.bo, True);
1011     assert(pp_context->sampler_state_table.bo->virtual);
1012     sampler_state = pp_context->sampler_state_table.bo->virtual;
1013
1014     /* SIMD16 Y index 1 */
1015     sampler_state[1].ss0.min_filter = I965_MAPFILTER_LINEAR;
1016     sampler_state[1].ss0.mag_filter = I965_MAPFILTER_LINEAR;
1017     sampler_state[1].ss1.r_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1018     sampler_state[1].ss1.s_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1019     sampler_state[1].ss1.t_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1020
1021     /* SIMD16 UV index 2 */
1022     sampler_state[2].ss0.min_filter = I965_MAPFILTER_LINEAR;
1023     sampler_state[2].ss0.mag_filter = I965_MAPFILTER_LINEAR;
1024     sampler_state[2].ss1.r_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1025     sampler_state[2].ss1.s_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1026     sampler_state[2].ss1.t_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1027
1028     dri_bo_unmap(pp_context->sampler_state_table.bo);
1029
1030     /* private function & data */
1031     pp_context->pp_x_steps = pp_scaling_x_steps;
1032     pp_context->pp_y_steps = pp_scaling_y_steps;
1033     pp_context->pp_set_block_parameter = pp_scaling_set_block_parameter;
1034
1035     pp_scaling_context->dest_x = dst_rect->x;
1036     pp_scaling_context->dest_y = dst_rect->y;
1037     pp_scaling_context->dest_w = ALIGN(dst_rect->width, 16);
1038     pp_scaling_context->dest_h = ALIGN(dst_rect->height, 16);
1039     pp_scaling_context->src_normalized_x = (float)src_rect->x / in_w / out_w;
1040     pp_scaling_context->src_normalized_y = (float)src_rect->y / in_h / out_h;
1041
1042     pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step = (float) src_rect->height / in_h / out_h;
1043
1044     pp_inline_parameter.grf5.normalized_video_x_scaling_step = (float) src_rect->width / in_w / out_w;
1045     pp_inline_parameter.grf5.block_count_x = pp_scaling_context->dest_w / 16;   /* 1 x N */
1046     pp_inline_parameter.grf5.number_blocks = pp_scaling_context->dest_w / 16;
1047     pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1048     pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1049 }
1050
1051 static int
1052 pp_avs_x_steps(void *private_context)
1053 {
1054     struct pp_avs_context *pp_avs_context = private_context;
1055
1056     return pp_avs_context->dest_w / 16;
1057 }
1058
1059 static int
1060 pp_avs_y_steps(void *private_context)
1061 {
1062     return 1;
1063 }
1064
1065 static int
1066 pp_avs_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
1067 {
1068     struct pp_avs_context *pp_avs_context = (struct pp_avs_context *)&pp_context->private_context;
1069     float src_x_steping, src_y_steping, video_step_delta;
1070     int tmp_w = ALIGN(pp_avs_context->dest_h * pp_avs_context->src_w / pp_avs_context->src_h, 16);
1071
1072     if (tmp_w >= pp_avs_context->dest_w) {
1073         pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / tmp_w;
1074         pp_inline_parameter.grf6.video_step_delta = 0;
1075         
1076         if (x == 0) {
1077             pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = (float)(tmp_w - pp_avs_context->dest_w) / tmp_w / 2 +
1078                 pp_avs_context->src_normalized_x;
1079         } else {
1080             src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1081             video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1082             pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1083                 16 * 15 * video_step_delta / 2;
1084         }
1085     } else {
1086         int n0, n1, n2, nls_left, nls_right;
1087         int factor_a = 5, factor_b = 4;
1088         float f;
1089
1090         n0 = (pp_avs_context->dest_w - tmp_w) / (16 * 2);
1091         n1 = (pp_avs_context->dest_w - tmp_w) / 16 - n0;
1092         n2 = tmp_w / (16 * factor_a);
1093         nls_left = n0 + n2;
1094         nls_right = n1 + n2;
1095         f = (float) n2 * 16 / tmp_w;
1096         
1097         if (n0 < 5) {
1098             pp_inline_parameter.grf6.video_step_delta = 0.0;
1099
1100             if (x == 0) {
1101                 pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / pp_avs_context->dest_w;
1102                 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = pp_avs_context->src_normalized_x;
1103             } else {
1104                 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1105                 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1106                 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1107                     16 * 15 * video_step_delta / 2;
1108             }
1109         } else {
1110             if (x < nls_left) {
1111                 /* f = a * nls_left * 16 + b * nls_left * 16 * (nls_left * 16 - 1) / 2 */
1112                 float a = f / (nls_left * 16 * factor_b);
1113                 float b = (f - nls_left * 16 * a) * 2 / (nls_left * 16 * (nls_left * 16 - 1));
1114                 
1115                 pp_inline_parameter.grf6.video_step_delta = b;
1116
1117                 if (x == 0) {
1118                     pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = pp_avs_context->src_normalized_x;
1119                     pp_inline_parameter.grf5.normalized_video_x_scaling_step = a;
1120                 } else {
1121                     src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1122                     video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1123                     pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1124                         16 * 15 * video_step_delta / 2;
1125                     pp_inline_parameter.grf5.normalized_video_x_scaling_step += 16 * b;
1126                 }
1127             } else if (x < (pp_avs_context->dest_w / 16 - nls_right)) {
1128                 /* scale the center linearly */
1129                 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1130                 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1131                 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1132                     16 * 15 * video_step_delta / 2;
1133                 pp_inline_parameter.grf6.video_step_delta = 0.0;
1134                 pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / tmp_w;
1135             } else {
1136                 float a = f / (nls_right * 16 * factor_b);
1137                 float b = (f - nls_right * 16 * a) * 2 / (nls_right * 16 * (nls_right * 16 - 1));
1138
1139                 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1140                 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1141                 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1142                     16 * 15 * video_step_delta / 2;
1143                 pp_inline_parameter.grf6.video_step_delta = -b;
1144
1145                 if (x == (pp_avs_context->dest_w / 16 - nls_right))
1146                     pp_inline_parameter.grf5.normalized_video_x_scaling_step = a + (nls_right * 16  - 1) * b;
1147                 else
1148                     pp_inline_parameter.grf5.normalized_video_x_scaling_step -= b * 16;
1149             }
1150         }
1151     }
1152
1153     src_y_steping = pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step;
1154     pp_inline_parameter.grf5.source_surface_block_normalized_vertical_origin = src_y_steping * y * 8 + pp_avs_context->src_normalized_y;
1155     pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16 + pp_avs_context->dest_x;
1156     pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8 + pp_avs_context->dest_y;
1157
1158     return 0;
1159 }
1160
1161 static void
1162 pp_nv12_avs_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
1163                        const struct i965_surface *src_surface,
1164                        const VARectangle *src_rect,
1165                        const struct i965_surface *dst_surface,
1166                        const VARectangle *dst_rect,
1167                        void *filter_param)
1168 {
1169     struct i965_driver_data *i965 = i965_driver_data(ctx);
1170     struct pp_avs_context *pp_avs_context = (struct pp_avs_context *)&pp_context->private_context;
1171     struct object_surface *obj_surface;
1172     struct i965_sampler_8x8 *sampler_8x8;
1173     struct i965_sampler_8x8_state *sampler_8x8_state;
1174     int index;
1175     int in_w, in_h, in_wpitch, in_hpitch;
1176     int out_w, out_h, out_wpitch, out_hpitch;
1177
1178     /* surface */
1179     obj_surface = SURFACE(src_surface->id);
1180     in_w = obj_surface->orig_width;
1181     in_h = obj_surface->orig_height;
1182     in_wpitch = obj_surface->width;
1183     in_hpitch = obj_surface->height;
1184
1185     /* source Y surface index 1 */
1186     i965_pp_set_surface2_state(ctx, pp_context,
1187                                obj_surface->bo, 0,
1188                                in_w, in_h, in_wpitch,
1189                                0, 0,
1190                                SURFACE_FORMAT_Y8_UNORM, 0,
1191                                1);
1192
1193     /* source UV surface index 2 */
1194     i965_pp_set_surface2_state(ctx, pp_context,
1195                                obj_surface->bo, in_wpitch * in_hpitch,
1196                                in_w, in_h, in_wpitch,
1197                                0, 0,
1198                                SURFACE_FORMAT_PLANAR_420_8, 1,
1199                                2);
1200
1201     /* destination surface */
1202     obj_surface = SURFACE(dst_surface->id);
1203     out_w = obj_surface->orig_width;
1204     out_h = obj_surface->orig_height;
1205     out_wpitch = obj_surface->width;
1206     out_hpitch = obj_surface->height;
1207     assert(out_w <= out_wpitch && out_h <= out_hpitch);
1208
1209     /* destination Y surface index 7 */
1210     i965_pp_set_surface_state(ctx, pp_context,
1211                               obj_surface->bo, 0,
1212                               out_w / 4, out_h, out_wpitch, I965_SURFACEFORMAT_R8_UNORM,
1213                               7, 1);
1214
1215     /* destination UV surface index 8 */
1216     i965_pp_set_surface_state(ctx, pp_context,
1217                               obj_surface->bo, out_wpitch * out_hpitch,
1218                               out_w / 4, out_h / 2, out_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
1219                               8, 1);
1220
1221     /* sampler 8x8 state */
1222     dri_bo_map(pp_context->sampler_state_table.bo_8x8, True);
1223     assert(pp_context->sampler_state_table.bo_8x8->virtual);
1224     assert(sizeof(*sampler_8x8_state) == sizeof(int) * 138);
1225     sampler_8x8_state = pp_context->sampler_state_table.bo_8x8->virtual;
1226     memset(sampler_8x8_state, 0, sizeof(*sampler_8x8_state));
1227     sampler_8x8_state->dw136.default_sharpness_level = 0;
1228     sampler_8x8_state->dw137.adaptive_filter_for_all_channel = 1;
1229     sampler_8x8_state->dw137.bypass_y_adaptive_filtering = 1;
1230     sampler_8x8_state->dw137.bypass_x_adaptive_filtering = 1;
1231     dri_bo_unmap(pp_context->sampler_state_table.bo_8x8);
1232
1233     /* sampler 8x8 */
1234     dri_bo_map(pp_context->sampler_state_table.bo, True);
1235     assert(pp_context->sampler_state_table.bo->virtual);
1236     assert(sizeof(*sampler_8x8) == sizeof(int) * 16);
1237     sampler_8x8 = pp_context->sampler_state_table.bo->virtual;
1238
1239     /* sample_8x8 Y index 1 */
1240     index = 1;
1241     memset(&sampler_8x8[index], 0, sizeof(*sampler_8x8));
1242     sampler_8x8[index].dw0.avs_filter_type = AVS_FILTER_ADAPTIVE_8_TAP;
1243     sampler_8x8[index].dw0.ief_bypass = 0;
1244     sampler_8x8[index].dw0.ief_filter_type = IEF_FILTER_DETAIL;
1245     sampler_8x8[index].dw0.ief_filter_size = IEF_FILTER_SIZE_5X5;
1246     sampler_8x8[index].dw1.sampler_8x8_state_pointer = pp_context->sampler_state_table.bo_8x8->offset >> 5;
1247     sampler_8x8[index].dw2.global_noise_estimation = 22;
1248     sampler_8x8[index].dw2.strong_edge_threshold = 8;
1249     sampler_8x8[index].dw2.weak_edge_threshold = 1;
1250     sampler_8x8[index].dw3.strong_edge_weight = 7;
1251     sampler_8x8[index].dw3.regular_weight = 2;
1252     sampler_8x8[index].dw3.non_edge_weight = 0;
1253     sampler_8x8[index].dw3.gain_factor = 40;
1254     sampler_8x8[index].dw4.steepness_boost = 0;
1255     sampler_8x8[index].dw4.steepness_threshold = 0;
1256     sampler_8x8[index].dw4.mr_boost = 0;
1257     sampler_8x8[index].dw4.mr_threshold = 5;
1258     sampler_8x8[index].dw5.pwl1_point_1 = 4;
1259     sampler_8x8[index].dw5.pwl1_point_2 = 12;
1260     sampler_8x8[index].dw5.pwl1_point_3 = 16;
1261     sampler_8x8[index].dw5.pwl1_point_4 = 26;
1262     sampler_8x8[index].dw6.pwl1_point_5 = 40;
1263     sampler_8x8[index].dw6.pwl1_point_6 = 160;
1264     sampler_8x8[index].dw6.pwl1_r3_bias_0 = 127;
1265     sampler_8x8[index].dw6.pwl1_r3_bias_1 = 98;
1266     sampler_8x8[index].dw7.pwl1_r3_bias_2 = 88;
1267     sampler_8x8[index].dw7.pwl1_r3_bias_3 = 64;
1268     sampler_8x8[index].dw7.pwl1_r3_bias_4 = 44;
1269     sampler_8x8[index].dw7.pwl1_r3_bias_5 = 0;
1270     sampler_8x8[index].dw8.pwl1_r3_bias_6 = 0;
1271     sampler_8x8[index].dw8.pwl1_r5_bias_0 = 3;
1272     sampler_8x8[index].dw8.pwl1_r5_bias_1 = 32;
1273     sampler_8x8[index].dw8.pwl1_r5_bias_2 = 32;
1274     sampler_8x8[index].dw9.pwl1_r5_bias_3 = 58;
1275     sampler_8x8[index].dw9.pwl1_r5_bias_4 = 100;
1276     sampler_8x8[index].dw9.pwl1_r5_bias_5 = 108;
1277     sampler_8x8[index].dw9.pwl1_r5_bias_6 = 88;
1278     sampler_8x8[index].dw10.pwl1_r3_slope_0 = -116;
1279     sampler_8x8[index].dw10.pwl1_r3_slope_1 = -20;
1280     sampler_8x8[index].dw10.pwl1_r3_slope_2 = -96;
1281     sampler_8x8[index].dw10.pwl1_r3_slope_3 = -32;
1282     sampler_8x8[index].dw11.pwl1_r3_slope_4 = -50;
1283     sampler_8x8[index].dw11.pwl1_r3_slope_5 = 0;
1284     sampler_8x8[index].dw11.pwl1_r3_slope_6 = 0;
1285     sampler_8x8[index].dw11.pwl1_r5_slope_0 = 116;
1286     sampler_8x8[index].dw12.pwl1_r5_slope_1 = 0;
1287     sampler_8x8[index].dw12.pwl1_r5_slope_2 = 114;
1288     sampler_8x8[index].dw12.pwl1_r5_slope_3 = 67;
1289     sampler_8x8[index].dw12.pwl1_r5_slope_4 = 9;
1290     sampler_8x8[index].dw13.pwl1_r5_slope_5 = -3;
1291     sampler_8x8[index].dw13.pwl1_r5_slope_6 = -15;
1292     sampler_8x8[index].dw13.limiter_boost = 0;
1293     sampler_8x8[index].dw13.minimum_limiter = 10;
1294     sampler_8x8[index].dw13.maximum_limiter = 11;
1295     sampler_8x8[index].dw14.clip_limiter = 130;
1296     dri_bo_emit_reloc(pp_context->sampler_state_table.bo,
1297                       I915_GEM_DOMAIN_RENDER, 
1298                       0,
1299                       0,
1300                       sizeof(*sampler_8x8) * index + offsetof(struct i965_sampler_8x8, dw1),
1301                       pp_context->sampler_state_table.bo_8x8);
1302
1303     dri_bo_map(pp_context->sampler_state_table.bo_8x8_uv, True);
1304     assert(pp_context->sampler_state_table.bo_8x8_uv->virtual);
1305     assert(sizeof(*sampler_8x8_state) == sizeof(int) * 138);
1306     sampler_8x8_state = pp_context->sampler_state_table.bo_8x8_uv->virtual;
1307     memset(sampler_8x8_state, 0, sizeof(*sampler_8x8_state));
1308     sampler_8x8_state->dw136.default_sharpness_level = 0;
1309     sampler_8x8_state->dw137.adaptive_filter_for_all_channel = 0;
1310     sampler_8x8_state->dw137.bypass_y_adaptive_filtering = 1;
1311     sampler_8x8_state->dw137.bypass_x_adaptive_filtering = 1;
1312     dri_bo_unmap(pp_context->sampler_state_table.bo_8x8_uv);
1313
1314     /* sample_8x8 UV index 2 */
1315     index = 2;
1316     memset(&sampler_8x8[index], 0, sizeof(*sampler_8x8));
1317     sampler_8x8[index].dw0.avs_filter_type = AVS_FILTER_NEAREST;
1318     sampler_8x8[index].dw0.ief_bypass = 0;
1319     sampler_8x8[index].dw0.ief_filter_type = IEF_FILTER_DETAIL;
1320     sampler_8x8[index].dw0.ief_filter_size = IEF_FILTER_SIZE_5X5;
1321     sampler_8x8[index].dw1.sampler_8x8_state_pointer = pp_context->sampler_state_table.bo_8x8_uv->offset >> 5;
1322     sampler_8x8[index].dw2.global_noise_estimation = 22;
1323     sampler_8x8[index].dw2.strong_edge_threshold = 8;
1324     sampler_8x8[index].dw2.weak_edge_threshold = 1;
1325     sampler_8x8[index].dw3.strong_edge_weight = 7;
1326     sampler_8x8[index].dw3.regular_weight = 2;
1327     sampler_8x8[index].dw3.non_edge_weight = 0;
1328     sampler_8x8[index].dw3.gain_factor = 40;
1329     sampler_8x8[index].dw4.steepness_boost = 0;
1330     sampler_8x8[index].dw4.steepness_threshold = 0;
1331     sampler_8x8[index].dw4.mr_boost = 0;
1332     sampler_8x8[index].dw4.mr_threshold = 5;
1333     sampler_8x8[index].dw5.pwl1_point_1 = 4;
1334     sampler_8x8[index].dw5.pwl1_point_2 = 12;
1335     sampler_8x8[index].dw5.pwl1_point_3 = 16;
1336     sampler_8x8[index].dw5.pwl1_point_4 = 26;
1337     sampler_8x8[index].dw6.pwl1_point_5 = 40;
1338     sampler_8x8[index].dw6.pwl1_point_6 = 160;
1339     sampler_8x8[index].dw6.pwl1_r3_bias_0 = 127;
1340     sampler_8x8[index].dw6.pwl1_r3_bias_1 = 98;
1341     sampler_8x8[index].dw7.pwl1_r3_bias_2 = 88;
1342     sampler_8x8[index].dw7.pwl1_r3_bias_3 = 64;
1343     sampler_8x8[index].dw7.pwl1_r3_bias_4 = 44;
1344     sampler_8x8[index].dw7.pwl1_r3_bias_5 = 0;
1345     sampler_8x8[index].dw8.pwl1_r3_bias_6 = 0;
1346     sampler_8x8[index].dw8.pwl1_r5_bias_0 = 3;
1347     sampler_8x8[index].dw8.pwl1_r5_bias_1 = 32;
1348     sampler_8x8[index].dw8.pwl1_r5_bias_2 = 32;
1349     sampler_8x8[index].dw9.pwl1_r5_bias_3 = 58;
1350     sampler_8x8[index].dw9.pwl1_r5_bias_4 = 100;
1351     sampler_8x8[index].dw9.pwl1_r5_bias_5 = 108;
1352     sampler_8x8[index].dw9.pwl1_r5_bias_6 = 88;
1353     sampler_8x8[index].dw10.pwl1_r3_slope_0 = -116;
1354     sampler_8x8[index].dw10.pwl1_r3_slope_1 = -20;
1355     sampler_8x8[index].dw10.pwl1_r3_slope_2 = -96;
1356     sampler_8x8[index].dw10.pwl1_r3_slope_3 = -32;
1357     sampler_8x8[index].dw11.pwl1_r3_slope_4 = -50;
1358     sampler_8x8[index].dw11.pwl1_r3_slope_5 = 0;
1359     sampler_8x8[index].dw11.pwl1_r3_slope_6 = 0;
1360     sampler_8x8[index].dw11.pwl1_r5_slope_0 = 116;
1361     sampler_8x8[index].dw12.pwl1_r5_slope_1 = 0;
1362     sampler_8x8[index].dw12.pwl1_r5_slope_2 = 114;
1363     sampler_8x8[index].dw12.pwl1_r5_slope_3 = 67;
1364     sampler_8x8[index].dw12.pwl1_r5_slope_4 = 9;
1365     sampler_8x8[index].dw13.pwl1_r5_slope_5 = -3;
1366     sampler_8x8[index].dw13.pwl1_r5_slope_6 = -15;
1367     sampler_8x8[index].dw13.limiter_boost = 0;
1368     sampler_8x8[index].dw13.minimum_limiter = 10;
1369     sampler_8x8[index].dw13.maximum_limiter = 11;
1370     sampler_8x8[index].dw14.clip_limiter = 130;
1371     dri_bo_emit_reloc(pp_context->sampler_state_table.bo,
1372                       I915_GEM_DOMAIN_RENDER, 
1373                       0,
1374                       0,
1375                       sizeof(*sampler_8x8) * index + offsetof(struct i965_sampler_8x8, dw1),
1376                       pp_context->sampler_state_table.bo_8x8_uv);
1377
1378     dri_bo_unmap(pp_context->sampler_state_table.bo);
1379
1380     /* private function & data */
1381     pp_context->pp_x_steps = pp_avs_x_steps;
1382     pp_context->pp_y_steps = pp_avs_y_steps;
1383     pp_context->pp_set_block_parameter = pp_avs_set_block_parameter;
1384
1385     pp_avs_context->dest_x = dst_rect->x;
1386     pp_avs_context->dest_y = dst_rect->y;
1387     pp_avs_context->dest_w = ALIGN(dst_rect->width, 16);
1388     pp_avs_context->dest_h = ALIGN(dst_rect->height, 16);
1389     pp_avs_context->src_normalized_x = (float)src_rect->x / in_w / out_w;
1390     pp_avs_context->src_normalized_y = (float)src_rect->y / in_h / out_h;
1391     pp_avs_context->src_w = src_rect->width;
1392     pp_avs_context->src_h = src_rect->height;
1393
1394     pp_static_parameter.grf4.r4_2.avs.nlas = 1;
1395     pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step = (float) src_rect->height / in_h / out_h;
1396
1397     pp_inline_parameter.grf5.normalized_video_x_scaling_step = (float) src_rect->width / in_w / out_w;
1398     pp_inline_parameter.grf5.block_count_x = 1;        /* M x 1 */
1399     pp_inline_parameter.grf5.number_blocks = pp_avs_context->dest_h / 8;
1400     pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1401     pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1402     pp_inline_parameter.grf6.video_step_delta = 0.0;
1403 }
1404
1405 static int
1406 pp_dndi_x_steps(void *private_context)
1407 {
1408     return 1;
1409 }
1410
1411 static int
1412 pp_dndi_y_steps(void *private_context)
1413 {
1414     struct pp_dndi_context *pp_dndi_context = private_context;
1415
1416     return pp_dndi_context->dest_h / 4;
1417 }
1418
1419 static int
1420 pp_dndi_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
1421 {
1422     pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16;
1423     pp_inline_parameter.grf5.destination_block_vertical_origin = y * 4;
1424
1425     return 0;
1426 }
1427
1428 static 
1429 void pp_nv12_dndi_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
1430                              const struct i965_surface *src_surface,
1431                              const VARectangle *src_rect,
1432                              const struct i965_surface *dst_surface,
1433                              const VARectangle *dst_rect,
1434                              void *filter_param)
1435 {
1436     struct i965_driver_data *i965 = i965_driver_data(ctx);
1437     struct pp_dndi_context *pp_dndi_context = (struct pp_dndi_context *)&pp_context->private_context;
1438     struct object_surface *obj_surface;
1439     struct i965_sampler_dndi *sampler_dndi;
1440     int index;
1441     int w, h;
1442     int orig_w, orig_h;
1443
1444     /* surface */
1445     obj_surface = SURFACE(src_surface->id);
1446     orig_w = obj_surface->orig_width;
1447     orig_h = obj_surface->orig_height;
1448     w = obj_surface->width;
1449     h = obj_surface->height;
1450
1451     if (pp_context->stmm.bo == NULL) {
1452         pp_context->stmm.bo = dri_bo_alloc(i965->intel.bufmgr,
1453                                            "STMM surface",
1454                                            w * h,
1455                                            4096);
1456         assert(pp_context->stmm.bo);
1457     }
1458
1459     /* source UV surface index 2 */
1460     i965_pp_set_surface_state(ctx, pp_context,
1461                               obj_surface->bo, w * h,
1462                               orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1463                               2, 0);
1464
1465     /* source YUV surface index 4 */
1466     i965_pp_set_surface2_state(ctx, pp_context,
1467                                obj_surface->bo, 0,
1468                                orig_w, orig_w, w,
1469                                0, h,
1470                                SURFACE_FORMAT_PLANAR_420_8, 1,
1471                                4);
1472
1473     /* source STMM surface index 20 */
1474     i965_pp_set_surface_state(ctx, pp_context,
1475                               pp_context->stmm.bo, 0,
1476                               orig_w, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1477                               20, 1);
1478
1479     /* destination surface */
1480     obj_surface = SURFACE(dst_surface->id);
1481     orig_w = obj_surface->orig_width;
1482     orig_h = obj_surface->orig_height;
1483     w = obj_surface->width;
1484     h = obj_surface->height;
1485
1486     /* destination Y surface index 7 */
1487     i965_pp_set_surface_state(ctx, pp_context,
1488                               obj_surface->bo, 0,
1489                               orig_w / 4, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1490                               7, 1);
1491
1492     /* destination UV surface index 8 */
1493     i965_pp_set_surface_state(ctx, pp_context,
1494                               obj_surface->bo, w * h,
1495                               orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1496                               8, 1);
1497     /* sampler dndi */
1498     dri_bo_map(pp_context->sampler_state_table.bo, True);
1499     assert(pp_context->sampler_state_table.bo->virtual);
1500     assert(sizeof(*sampler_dndi) == sizeof(int) * 8);
1501     sampler_dndi = pp_context->sampler_state_table.bo->virtual;
1502
1503     /* sample dndi index 1 */
1504     index = 0;
1505     sampler_dndi[index].dw0.denoise_asd_threshold = 0;
1506     sampler_dndi[index].dw0.denoise_history_delta = 8;          // 0-15, default is 8
1507     sampler_dndi[index].dw0.denoise_maximum_history = 128;      // 128-240
1508     sampler_dndi[index].dw0.denoise_stad_threshold = 0;
1509
1510     sampler_dndi[index].dw1.denoise_threshold_for_sum_of_complexity_measure = 64;
1511     sampler_dndi[index].dw1.denoise_moving_pixel_threshold = 0;
1512     sampler_dndi[index].dw1.stmm_c2 = 0;
1513     sampler_dndi[index].dw1.low_temporal_difference_threshold = 8;
1514     sampler_dndi[index].dw1.temporal_difference_threshold = 16;
1515
1516     sampler_dndi[index].dw2.block_noise_estimate_noise_threshold = 15;   // 0-31
1517     sampler_dndi[index].dw2.block_noise_estimate_edge_threshold = 7;    // 0-15
1518     sampler_dndi[index].dw2.denoise_edge_threshold = 7;                 // 0-15
1519     sampler_dndi[index].dw2.good_neighbor_threshold = 7;                // 0-63
1520
1521     sampler_dndi[index].dw3.maximum_stmm = 128;
1522     sampler_dndi[index].dw3.multipler_for_vecm = 2;
1523     sampler_dndi[index].dw3.blending_constant_across_time_for_small_values_of_stmm = 0;
1524     sampler_dndi[index].dw3.blending_constant_across_time_for_large_values_of_stmm = 64;
1525     sampler_dndi[index].dw3.stmm_blending_constant_select = 0;
1526
1527     sampler_dndi[index].dw4.sdi_delta = 8;
1528     sampler_dndi[index].dw4.sdi_threshold = 128;
1529     sampler_dndi[index].dw4.stmm_output_shift = 7;                      // stmm_max - stmm_min = 2 ^ stmm_output_shift
1530     sampler_dndi[index].dw4.stmm_shift_up = 0;
1531     sampler_dndi[index].dw4.stmm_shift_down = 0;
1532     sampler_dndi[index].dw4.minimum_stmm = 0;
1533
1534     sampler_dndi[index].dw5.fmd_temporal_difference_threshold = 0;
1535     sampler_dndi[index].dw5.sdi_fallback_mode_2_constant = 0;
1536     sampler_dndi[index].dw5.sdi_fallback_mode_1_t2_constant = 0;
1537     sampler_dndi[index].dw5.sdi_fallback_mode_1_t1_constant = 0;
1538
1539     sampler_dndi[index].dw6.dn_enable = 1;
1540     sampler_dndi[index].dw6.di_enable = 1;
1541     sampler_dndi[index].dw6.di_partial = 0;
1542     sampler_dndi[index].dw6.dndi_top_first = 1;
1543     sampler_dndi[index].dw6.dndi_stream_id = 1;
1544     sampler_dndi[index].dw6.dndi_first_frame = 1;
1545     sampler_dndi[index].dw6.progressive_dn = 0;
1546     sampler_dndi[index].dw6.fmd_tear_threshold = 32;
1547     sampler_dndi[index].dw6.fmd2_vertical_difference_threshold = 32;
1548     sampler_dndi[index].dw6.fmd1_vertical_difference_threshold = 32;
1549
1550     sampler_dndi[index].dw7.fmd_for_1st_field_of_current_frame = 2;
1551     sampler_dndi[index].dw7.fmd_for_2nd_field_of_previous_frame = 1;
1552     sampler_dndi[index].dw7.vdi_walker_enable = 0;
1553     sampler_dndi[index].dw7.column_width_minus1 = w / 16;
1554
1555     dri_bo_unmap(pp_context->sampler_state_table.bo);
1556
1557     /* private function & data */
1558     pp_context->pp_x_steps = pp_dndi_x_steps;
1559     pp_context->pp_y_steps = pp_dndi_y_steps;
1560     pp_context->pp_set_block_parameter = pp_dndi_set_block_parameter;
1561
1562     pp_static_parameter.grf1.statistics_surface_picth = w / 2;
1563     pp_static_parameter.grf1.r1_6.di.top_field_first = 0;
1564     pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m2 = 64;
1565     pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m1 = 192;
1566
1567     pp_inline_parameter.grf5.block_count_x = w / 16;   /* 1 x N */
1568     pp_inline_parameter.grf5.number_blocks = w / 16;
1569     pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1570     pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1571
1572     pp_dndi_context->dest_w = w;
1573     pp_dndi_context->dest_h = h;
1574 }
1575
1576 static void
1577 ironlake_pp_initialize(
1578     VADriverContextP   ctx,
1579     struct i965_post_processing_context *pp_context,
1580     const struct i965_surface *src_surface,
1581     const VARectangle *src_rect,
1582     const struct i965_surface *dst_surface,
1583     const VARectangle *dst_rect,
1584     int                pp_index,
1585     void *filter_param
1586 )
1587 {
1588     struct i965_driver_data *i965 = i965_driver_data(ctx);
1589     struct pp_module *pp_module;
1590     dri_bo *bo;
1591     int i;
1592
1593     dri_bo_unreference(pp_context->curbe.bo);
1594     bo = dri_bo_alloc(i965->intel.bufmgr,
1595                       "constant buffer",
1596                       4096, 
1597                       4096);
1598     assert(bo);
1599     pp_context->curbe.bo = bo;
1600
1601     dri_bo_unreference(pp_context->binding_table.bo);
1602     bo = dri_bo_alloc(i965->intel.bufmgr, 
1603                       "binding table",
1604                       sizeof(unsigned int), 
1605                       4096);
1606     assert(bo);
1607     pp_context->binding_table.bo = bo;
1608
1609     dri_bo_unreference(pp_context->idrt.bo);
1610     bo = dri_bo_alloc(i965->intel.bufmgr, 
1611                       "interface discriptor", 
1612                       sizeof(struct i965_interface_descriptor), 
1613                       4096);
1614     assert(bo);
1615     pp_context->idrt.bo = bo;
1616     pp_context->idrt.num_interface_descriptors = 0;
1617
1618     dri_bo_unreference(pp_context->sampler_state_table.bo);
1619     bo = dri_bo_alloc(i965->intel.bufmgr, 
1620                       "sampler state table", 
1621                       4096,
1622                       4096);
1623     assert(bo);
1624     dri_bo_map(bo, True);
1625     memset(bo->virtual, 0, bo->size);
1626     dri_bo_unmap(bo);
1627     pp_context->sampler_state_table.bo = bo;
1628
1629     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
1630     bo = dri_bo_alloc(i965->intel.bufmgr, 
1631                       "sampler 8x8 state ",
1632                       4096,
1633                       4096);
1634     assert(bo);
1635     pp_context->sampler_state_table.bo_8x8 = bo;
1636
1637     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
1638     bo = dri_bo_alloc(i965->intel.bufmgr, 
1639                       "sampler 8x8 state ",
1640                       4096,
1641                       4096);
1642     assert(bo);
1643     pp_context->sampler_state_table.bo_8x8_uv = bo;
1644
1645     dri_bo_unreference(pp_context->vfe_state.bo);
1646     bo = dri_bo_alloc(i965->intel.bufmgr, 
1647                       "vfe state", 
1648                       sizeof(struct i965_vfe_state), 
1649                       4096);
1650     assert(bo);
1651     pp_context->vfe_state.bo = bo;
1652     
1653     for (i = 0; i < MAX_PP_SURFACES; i++) {
1654         dri_bo_unreference(pp_context->surfaces[i].ss_bo);
1655         pp_context->surfaces[i].ss_bo = NULL;
1656
1657         dri_bo_unreference(pp_context->surfaces[i].s_bo);
1658         pp_context->surfaces[i].s_bo = NULL;
1659     }
1660
1661     memset(&pp_static_parameter, 0, sizeof(pp_static_parameter));
1662     memset(&pp_inline_parameter, 0, sizeof(pp_inline_parameter));
1663     assert(pp_index >= PP_NULL && pp_index < NUM_PP_MODULES);
1664     pp_context->current_pp = pp_index;
1665     pp_module = &pp_context->pp_modules[pp_index];
1666     
1667     if (pp_module->initialize)
1668         pp_module->initialize(ctx, pp_context,
1669                               src_surface,
1670                               src_rect,
1671                               dst_surface,
1672                               dst_rect,
1673                               filter_param);
1674 }
1675
1676 static void
1677 ironlake_post_processing(
1678     VADriverContextP   ctx,
1679     struct i965_post_processing_context *pp_context,
1680     const struct i965_surface *src_surface,
1681     const VARectangle *src_rect,
1682     const struct i965_surface *dst_surface,
1683     const VARectangle *dst_rect,
1684     int                pp_index,
1685     void *filter_param
1686 )
1687 {
1688     ironlake_pp_initialize(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
1689     ironlake_pp_states_setup(ctx, pp_context);
1690     ironlake_pp_pipeline_setup(ctx, pp_context);
1691 }
1692
1693 static void
1694 gen6_pp_initialize(
1695     VADriverContextP   ctx,
1696     struct i965_post_processing_context *pp_context,
1697     const struct i965_surface *src_surface,
1698     const VARectangle *src_rect,
1699     const struct i965_surface *dst_surface,
1700     const VARectangle *dst_rect,
1701     int                pp_index,
1702     void *filter_param
1703 )
1704 {
1705     struct i965_driver_data *i965 = i965_driver_data(ctx);
1706     struct pp_module *pp_module;
1707     dri_bo *bo;
1708     int i;
1709
1710     dri_bo_unreference(pp_context->curbe.bo);
1711     bo = dri_bo_alloc(i965->intel.bufmgr,
1712                       "constant buffer",
1713                       4096, 
1714                       4096);
1715     assert(bo);
1716     pp_context->curbe.bo = bo;
1717
1718     dri_bo_unreference(pp_context->binding_table.bo);
1719     bo = dri_bo_alloc(i965->intel.bufmgr, 
1720                       "binding table",
1721                       sizeof(unsigned int), 
1722                       4096);
1723     assert(bo);
1724     pp_context->binding_table.bo = bo;
1725
1726     dri_bo_unreference(pp_context->idrt.bo);
1727     bo = dri_bo_alloc(i965->intel.bufmgr, 
1728                       "interface discriptor", 
1729                       sizeof(struct gen6_interface_descriptor_data), 
1730                       4096);
1731     assert(bo);
1732     pp_context->idrt.bo = bo;
1733     pp_context->idrt.num_interface_descriptors = 0;
1734
1735     dri_bo_unreference(pp_context->sampler_state_table.bo);
1736     bo = dri_bo_alloc(i965->intel.bufmgr, 
1737                       "sampler state table", 
1738                       4096,
1739                       4096);
1740     assert(bo);
1741     dri_bo_map(bo, True);
1742     memset(bo->virtual, 0, bo->size);
1743     dri_bo_unmap(bo);
1744     pp_context->sampler_state_table.bo = bo;
1745
1746     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
1747     bo = dri_bo_alloc(i965->intel.bufmgr, 
1748                       "sampler 8x8 state ",
1749                       4096,
1750                       4096);
1751     assert(bo);
1752     pp_context->sampler_state_table.bo_8x8 = bo;
1753
1754     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
1755     bo = dri_bo_alloc(i965->intel.bufmgr, 
1756                       "sampler 8x8 state ",
1757                       4096,
1758                       4096);
1759     assert(bo);
1760     pp_context->sampler_state_table.bo_8x8_uv = bo;
1761
1762     dri_bo_unreference(pp_context->vfe_state.bo);
1763     bo = dri_bo_alloc(i965->intel.bufmgr, 
1764                       "vfe state", 
1765                       sizeof(struct i965_vfe_state), 
1766                       4096);
1767     assert(bo);
1768     pp_context->vfe_state.bo = bo;
1769     
1770     for (i = 0; i < MAX_PP_SURFACES; i++) {
1771         dri_bo_unreference(pp_context->surfaces[i].ss_bo);
1772         pp_context->surfaces[i].ss_bo = NULL;
1773
1774         dri_bo_unreference(pp_context->surfaces[i].s_bo);
1775         pp_context->surfaces[i].s_bo = NULL;
1776     }
1777
1778     memset(&pp_static_parameter, 0, sizeof(pp_static_parameter));
1779     memset(&pp_inline_parameter, 0, sizeof(pp_inline_parameter));
1780     assert(pp_index >= PP_NULL && pp_index < NUM_PP_MODULES);
1781     pp_context->current_pp = pp_index;
1782     pp_module = &pp_context->pp_modules[pp_index];
1783     
1784     if (pp_module->initialize)
1785         pp_module->initialize(ctx, pp_context,
1786                               src_surface,
1787                               src_rect,
1788                               dst_surface,
1789                               dst_rect,
1790                               filter_param);
1791 }
1792
1793 static void
1794 gen6_pp_binding_table(struct i965_post_processing_context *pp_context)
1795 {
1796     unsigned int *binding_table;
1797     dri_bo *bo = pp_context->binding_table.bo;
1798     int i;
1799
1800     dri_bo_map(bo, 1);
1801     assert(bo->virtual);
1802     binding_table = bo->virtual;
1803     memset(binding_table, 0, bo->size);
1804
1805     for (i = 0; i < MAX_PP_SURFACES; i++) {
1806         if (pp_context->surfaces[i].ss_bo) {
1807             assert(pp_context->surfaces[i].s_bo);
1808
1809             binding_table[i] = pp_context->surfaces[i].ss_bo->offset;
1810             dri_bo_emit_reloc(bo,
1811                               I915_GEM_DOMAIN_INSTRUCTION, 0,
1812                               0,
1813                               i * sizeof(*binding_table),
1814                               pp_context->surfaces[i].ss_bo);
1815         }
1816     
1817     }
1818
1819     dri_bo_unmap(bo);
1820 }
1821
1822 static void
1823 gen6_pp_interface_descriptor_table(struct i965_post_processing_context *pp_context)
1824 {
1825     struct gen6_interface_descriptor_data *desc;
1826     dri_bo *bo;
1827     int pp_index = pp_context->current_pp;
1828
1829     bo = pp_context->idrt.bo;
1830     dri_bo_map(bo, True);
1831     assert(bo->virtual);
1832     desc = bo->virtual;
1833     memset(desc, 0, sizeof(*desc));
1834     desc->desc0.kernel_start_pointer = 
1835         pp_context->pp_modules[pp_index].kernel.bo->offset >> 6; /* reloc */
1836     desc->desc1.single_program_flow = 1;
1837     desc->desc1.floating_point_mode = FLOATING_POINT_IEEE_754;
1838     desc->desc2.sampler_count = 1;      /* 1 - 4 samplers used */
1839     desc->desc2.sampler_state_pointer = 
1840         pp_context->sampler_state_table.bo->offset >> 5;
1841     desc->desc3.binding_table_entry_count = 0;
1842     desc->desc3.binding_table_pointer = 
1843         pp_context->binding_table.bo->offset >> 5; /*reloc */
1844     desc->desc4.constant_urb_entry_read_offset = 0;
1845     desc->desc4.constant_urb_entry_read_length = 4; /* grf 1-4 */
1846
1847     dri_bo_emit_reloc(bo,
1848                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1849                       0,
1850                       offsetof(struct gen6_interface_descriptor_data, desc0),
1851                       pp_context->pp_modules[pp_index].kernel.bo);
1852
1853     dri_bo_emit_reloc(bo,
1854                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1855                       desc->desc2.sampler_count << 2,
1856                       offsetof(struct gen6_interface_descriptor_data, desc2),
1857                       pp_context->sampler_state_table.bo);
1858
1859     dri_bo_emit_reloc(bo,
1860                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1861                       desc->desc3.binding_table_entry_count,
1862                       offsetof(struct gen6_interface_descriptor_data, desc3),
1863                       pp_context->binding_table.bo);
1864
1865     dri_bo_unmap(bo);
1866     pp_context->idrt.num_interface_descriptors++;
1867 }
1868
1869 static void
1870 gen6_pp_upload_constants(struct i965_post_processing_context *pp_context)
1871 {
1872     unsigned char *constant_buffer;
1873
1874     assert(sizeof(pp_static_parameter) == 128);
1875     dri_bo_map(pp_context->curbe.bo, 1);
1876     assert(pp_context->curbe.bo->virtual);
1877     constant_buffer = pp_context->curbe.bo->virtual;
1878     memcpy(constant_buffer, &pp_static_parameter, sizeof(pp_static_parameter));
1879     dri_bo_unmap(pp_context->curbe.bo);
1880 }
1881
1882 static void
1883 gen6_pp_states_setup(VADriverContextP ctx,
1884                      struct i965_post_processing_context *pp_context)
1885 {
1886     gen6_pp_binding_table(pp_context);
1887     gen6_pp_interface_descriptor_table(pp_context);
1888     gen6_pp_upload_constants(pp_context);
1889 }
1890
1891 static void
1892 gen6_pp_pipeline_select(VADriverContextP ctx,
1893                         struct i965_post_processing_context *pp_context)
1894 {
1895     struct intel_batchbuffer *batch = pp_context->batch;
1896
1897     BEGIN_BATCH(batch, 1);
1898     OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
1899     ADVANCE_BATCH(batch);
1900 }
1901
1902 static void
1903 gen6_pp_state_base_address(VADriverContextP ctx,
1904                            struct i965_post_processing_context *pp_context)
1905 {
1906     struct intel_batchbuffer *batch = pp_context->batch;
1907
1908     BEGIN_BATCH(batch, 10);
1909     OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | (10 - 2));
1910     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1911     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1912     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1913     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1914     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1915     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1916     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1917     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1918     OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
1919     ADVANCE_BATCH(batch);
1920 }
1921
1922 static void
1923 gen6_pp_vfe_state(VADriverContextP ctx,
1924                   struct i965_post_processing_context *pp_context)
1925 {
1926     struct intel_batchbuffer *batch = pp_context->batch;
1927
1928     BEGIN_BATCH(batch, 8);
1929     OUT_BATCH(batch, CMD_MEDIA_VFE_STATE | (8 - 2));
1930     OUT_BATCH(batch, 0);
1931     OUT_BATCH(batch,
1932               (pp_context->urb.num_vfe_entries - 1) << 16 |
1933               pp_context->urb.num_vfe_entries << 8);
1934     OUT_BATCH(batch, 0);
1935     OUT_BATCH(batch,
1936               (pp_context->urb.size_vfe_entry * 2) << 16 |  /* in 256 bits unit */
1937               (pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 2 - 1));            /* in 256 bits unit */
1938     OUT_BATCH(batch, 0);
1939     OUT_BATCH(batch, 0);
1940     OUT_BATCH(batch, 0);
1941     ADVANCE_BATCH(batch);
1942 }
1943
1944 static void
1945 gen6_pp_curbe_load(VADriverContextP ctx,
1946                    struct i965_post_processing_context *pp_context)
1947 {
1948     struct intel_batchbuffer *batch = pp_context->batch;
1949
1950     assert(pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 512 <= pp_context->curbe.bo->size);
1951
1952     BEGIN_BATCH(batch, 4);
1953     OUT_BATCH(batch, CMD_MEDIA_CURBE_LOAD | (4 - 2));
1954     OUT_BATCH(batch, 0);
1955     OUT_BATCH(batch,
1956               pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 512);
1957     OUT_RELOC(batch, 
1958               pp_context->curbe.bo,
1959               I915_GEM_DOMAIN_INSTRUCTION, 0,
1960               0);
1961     ADVANCE_BATCH(batch);
1962 }
1963
1964 static void
1965 gen6_interface_descriptor_load(VADriverContextP ctx,
1966                                struct i965_post_processing_context *pp_context)
1967 {
1968     struct intel_batchbuffer *batch = pp_context->batch;
1969
1970     BEGIN_BATCH(batch, 4);
1971     OUT_BATCH(batch, CMD_MEDIA_INTERFACE_DESCRIPTOR_LOAD | (4 - 2));
1972     OUT_BATCH(batch, 0);
1973     OUT_BATCH(batch,
1974               pp_context->idrt.num_interface_descriptors * sizeof(struct gen6_interface_descriptor_data));
1975     OUT_RELOC(batch, 
1976               pp_context->idrt.bo,
1977               I915_GEM_DOMAIN_INSTRUCTION, 0,
1978               0);
1979     ADVANCE_BATCH(batch);
1980 }
1981
1982 static void
1983 gen6_pp_object_walker(VADriverContextP ctx,
1984                       struct i965_post_processing_context *pp_context)
1985 {
1986     struct intel_batchbuffer *batch = pp_context->batch;
1987     int x, x_steps, y, y_steps;
1988
1989     x_steps = pp_context->pp_x_steps(&pp_context->private_context);
1990     y_steps = pp_context->pp_y_steps(&pp_context->private_context);
1991
1992     for (y = 0; y < y_steps; y++) {
1993         for (x = 0; x < x_steps; x++) {
1994             if (!pp_context->pp_set_block_parameter(pp_context, x, y)) {
1995                 BEGIN_BATCH(batch, 22);
1996                 OUT_BATCH(batch, CMD_MEDIA_OBJECT | 20);
1997                 OUT_BATCH(batch, 0);
1998                 OUT_BATCH(batch, 0); /* no indirect data */
1999                 OUT_BATCH(batch, 0);
2000                 OUT_BATCH(batch, 0); /* scoreboard */
2001                 OUT_BATCH(batch, 0);
2002
2003                 /* inline data grf 5-6 */
2004                 assert(sizeof(pp_inline_parameter) == 64);
2005                 intel_batchbuffer_data(batch, &pp_inline_parameter, sizeof(pp_inline_parameter));
2006
2007                 ADVANCE_BATCH(batch);
2008             }
2009         }
2010     }
2011 }
2012
2013 static void
2014 gen6_pp_pipeline_setup(VADriverContextP ctx,
2015                        struct i965_post_processing_context *pp_context)
2016 {
2017     struct intel_batchbuffer *batch = pp_context->batch;
2018
2019     intel_batchbuffer_start_atomic(batch, 0x1000);
2020     intel_batchbuffer_emit_mi_flush(batch);
2021     gen6_pp_pipeline_select(ctx, pp_context);
2022     gen6_pp_curbe_load(ctx, pp_context);
2023     gen6_interface_descriptor_load(ctx, pp_context);
2024     gen6_pp_state_base_address(ctx, pp_context);
2025     gen6_pp_vfe_state(ctx, pp_context);
2026     gen6_pp_object_walker(ctx, pp_context);
2027     intel_batchbuffer_end_atomic(batch);
2028 }
2029
2030 static void
2031 gen6_post_processing(
2032     VADriverContextP   ctx,
2033     struct i965_post_processing_context *pp_context,
2034     const struct i965_surface *src_surface,
2035     const VARectangle *src_rect,
2036     const struct i965_surface *dst_surface,
2037     const VARectangle *dst_rect,
2038     int                pp_index,
2039     void * filter_param
2040 )
2041 {
2042     gen6_pp_initialize(ctx, pp_context,
2043                        src_surface,
2044                        src_rect,
2045                        dst_surface,
2046                        dst_rect,
2047                        pp_index,
2048                        filter_param);
2049     gen6_pp_states_setup(ctx, pp_context);
2050     gen6_pp_pipeline_setup(ctx, pp_context);
2051 }
2052
2053 static void
2054 i965_post_processing_internal(
2055     VADriverContextP   ctx,
2056     struct i965_post_processing_context *pp_context,
2057     const struct i965_surface *src_surface,
2058     const VARectangle *src_rect,
2059     const struct i965_surface *dst_surface,
2060     const VARectangle *dst_rect,
2061     int                pp_index,
2062     void *filter_param
2063 )
2064 {
2065     struct i965_driver_data *i965 = i965_driver_data(ctx);
2066
2067     if (IS_GEN6(i965->intel.device_id) ||
2068         IS_GEN7(i965->intel.device_id))
2069         gen6_post_processing(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
2070     else
2071         ironlake_post_processing(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
2072 }
2073
2074 VAStatus 
2075 i965_DestroySurfaces(VADriverContextP ctx,
2076                      VASurfaceID *surface_list,
2077                      int num_surfaces);
2078 VAStatus 
2079 i965_CreateSurfaces(VADriverContextP ctx,
2080                     int width,
2081                     int height,
2082                     int format,
2083                     int num_surfaces,
2084                     VASurfaceID *surfaces);
2085 VASurfaceID
2086 i965_post_processing(
2087     VADriverContextP   ctx,
2088     VASurfaceID        surface,
2089     const VARectangle *src_rect,
2090     const VARectangle *dst_rect,
2091     unsigned int       flags,
2092     int               *has_done_scaling  
2093 )
2094 {
2095     struct i965_driver_data *i965 = i965_driver_data(ctx);
2096     VASurfaceID in_surface_id = surface;
2097     VASurfaceID out_surface_id = VA_INVALID_ID;
2098     
2099     *has_done_scaling = 0;
2100
2101     if (HAS_PP(i965)) {
2102         struct object_surface *obj_surface;
2103         VAStatus status;
2104         struct i965_surface src_surface;
2105         struct i965_surface dst_surface;
2106
2107         obj_surface = SURFACE(in_surface_id);
2108
2109         /* Currently only support post processing for NV12 surface */
2110         if (obj_surface->fourcc != VA_FOURCC('N', 'V', '1', '2'))
2111             return out_surface_id;
2112
2113         if (flags & I965_PP_FLAG_DEINTERLACING) {
2114             status = i965_CreateSurfaces(ctx,
2115                                          obj_surface->orig_width,
2116                                          obj_surface->orig_height,
2117                                          VA_RT_FORMAT_YUV420,
2118                                          1,
2119                                          &out_surface_id);
2120             assert(status == VA_STATUS_SUCCESS);
2121             obj_surface = SURFACE(out_surface_id);
2122             i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2123
2124             src_surface.id = in_surface_id;
2125             src_surface.flag = I965_SURFACE_SURFACE;
2126             dst_surface.id = out_surface_id;
2127             dst_surface.flag = I965_SURFACE_SURFACE;
2128
2129             i965_post_processing_internal(ctx, i965->pp_context,
2130                                           &src_surface,
2131                                           src_rect,
2132                                           &dst_surface,
2133                                           dst_rect,
2134                                           PP_NV12_DNDI,
2135                                           NULL);
2136         }
2137
2138         if (flags & I965_PP_FLAG_AVS) {
2139             struct i965_render_state *render_state = &i965->render_state;
2140             struct intel_region *dest_region = render_state->draw_region;
2141
2142             if (out_surface_id != VA_INVALID_ID)
2143                 in_surface_id = out_surface_id;
2144
2145             status = i965_CreateSurfaces(ctx,
2146                                          dest_region->width,
2147                                          dest_region->height,
2148                                          VA_RT_FORMAT_YUV420,
2149                                          1,
2150                                          &out_surface_id);
2151             assert(status == VA_STATUS_SUCCESS);
2152             obj_surface = SURFACE(out_surface_id);
2153             i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2154
2155             src_surface.id = in_surface_id;
2156             src_surface.flag = I965_SURFACE_SURFACE;
2157             dst_surface.id = out_surface_id;
2158             dst_surface.flag = I965_SURFACE_SURFACE;
2159
2160             i965_post_processing_internal(ctx, i965->pp_context,
2161                                           &src_surface,
2162                                           src_rect,
2163                                           &dst_surface,
2164                                           dst_rect,
2165                                           PP_NV12_AVS,
2166                                           NULL);
2167
2168             if (in_surface_id != surface)
2169                 i965_DestroySurfaces(ctx, &in_surface_id, 1);
2170                 
2171             *has_done_scaling = 1;
2172         }
2173     }
2174
2175     return out_surface_id;
2176 }       
2177
2178 static VAStatus
2179 i965_image_i420_processing(VADriverContextP ctx,
2180                            const struct i965_surface *src_surface,
2181                            const VARectangle *src_rect,
2182                            const struct i965_surface *dst_surface,
2183                            const VARectangle *dst_rect)
2184 {
2185     struct i965_driver_data *i965 = i965_driver_data(ctx);
2186     struct i965_post_processing_context *pp_context = i965->pp_context;
2187     int fourcc = pp_get_surface_fourcc(ctx, dst_surface);
2188
2189     if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2190         i965_post_processing_internal(ctx, i965->pp_context,
2191                                       src_surface,
2192                                       src_rect,
2193                                       dst_surface,
2194                                       dst_rect,
2195                                       PP_PL3_LOAD_SAVE_N12,
2196                                       NULL);
2197     } else {
2198         i965_post_processing_internal(ctx, i965->pp_context,
2199                                       src_surface,
2200                                       src_rect,
2201                                       dst_surface,
2202                                       dst_rect,
2203                                       PP_PL3_LOAD_SAVE_PL3,
2204                                       NULL);
2205     }
2206
2207     intel_batchbuffer_flush(pp_context->batch);
2208
2209     return VA_STATUS_SUCCESS;
2210 }
2211
2212 static VAStatus
2213 i965_image_nv12_processing(VADriverContextP ctx,
2214                            const struct i965_surface *src_surface,
2215                            const VARectangle *src_rect,
2216                            const struct i965_surface *dst_surface,
2217                            const VARectangle *dst_rect)
2218 {
2219     struct i965_driver_data *i965 = i965_driver_data(ctx);
2220     struct i965_post_processing_context *pp_context = i965->pp_context;
2221     int fourcc = pp_get_surface_fourcc(ctx, dst_surface);
2222
2223     if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2224         i965_post_processing_internal(ctx, i965->pp_context,
2225                                       src_surface,
2226                                       src_rect,
2227                                       dst_surface,
2228                                       dst_rect,
2229                                       PP_NV12_LOAD_SAVE_N12,
2230                                       NULL);
2231     } else {
2232         i965_post_processing_internal(ctx, i965->pp_context,
2233                                       src_surface,
2234                                       src_rect,
2235                                       dst_surface,
2236                                       dst_rect,
2237                                       PP_NV12_LOAD_SAVE_PL3,
2238                                       NULL);
2239     }
2240
2241     intel_batchbuffer_flush(pp_context->batch);
2242
2243     return VA_STATUS_SUCCESS;
2244 }
2245
2246 VAStatus
2247 i965_image_processing(VADriverContextP ctx,
2248                       const struct i965_surface *src_surface,
2249                       const VARectangle *src_rect,
2250                       const struct i965_surface *dst_surface,
2251                       const VARectangle *dst_rect)
2252 {
2253     struct i965_driver_data *i965 = i965_driver_data(ctx);
2254     VAStatus status = VA_STATUS_ERROR_UNIMPLEMENTED;
2255
2256     if (HAS_PP(i965)) {
2257         int fourcc = pp_get_surface_fourcc(ctx, src_surface);
2258
2259         switch (fourcc) {
2260         case VA_FOURCC('Y', 'V', '1', '2'):
2261         case VA_FOURCC('I', '4', '2', '0'):
2262             status = i965_image_i420_processing(ctx,
2263                                                 src_surface,
2264                                                 src_rect,
2265                                                 dst_surface,
2266                                                 dst_rect);
2267             break;
2268
2269         case  VA_FOURCC('N', 'V', '1', '2'):
2270             status = i965_image_nv12_processing(ctx,
2271                                                 src_surface,
2272                                                 src_rect,
2273                                                 dst_surface,
2274                                                 dst_rect);
2275             break;
2276
2277         default:
2278             status = VA_STATUS_ERROR_UNIMPLEMENTED;
2279             break;
2280         }
2281     }
2282
2283     return status;
2284 }       
2285
2286 static void
2287 i965_post_processing_context_finalize(struct i965_post_processing_context *pp_context)
2288 {
2289     int i;
2290
2291     dri_bo_unreference(pp_context->curbe.bo);
2292     pp_context->curbe.bo = NULL;
2293
2294     for (i = 0; i < MAX_PP_SURFACES; i++) {
2295         dri_bo_unreference(pp_context->surfaces[i].ss_bo);
2296         pp_context->surfaces[i].ss_bo = NULL;
2297
2298         dri_bo_unreference(pp_context->surfaces[i].s_bo);
2299         pp_context->surfaces[i].s_bo = NULL;
2300     }
2301
2302     dri_bo_unreference(pp_context->sampler_state_table.bo);
2303     pp_context->sampler_state_table.bo = NULL;
2304
2305     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
2306     pp_context->sampler_state_table.bo_8x8 = NULL;
2307
2308     dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
2309     pp_context->sampler_state_table.bo_8x8_uv = NULL;
2310
2311     dri_bo_unreference(pp_context->binding_table.bo);
2312     pp_context->binding_table.bo = NULL;
2313
2314     dri_bo_unreference(pp_context->idrt.bo);
2315     pp_context->idrt.bo = NULL;
2316     pp_context->idrt.num_interface_descriptors = 0;
2317
2318     dri_bo_unreference(pp_context->vfe_state.bo);
2319     pp_context->vfe_state.bo = NULL;
2320
2321     dri_bo_unreference(pp_context->stmm.bo);
2322     pp_context->stmm.bo = NULL;
2323
2324     for (i = 0; i < NUM_PP_MODULES; i++) {
2325         struct pp_module *pp_module = &pp_context->pp_modules[i];
2326
2327         dri_bo_unreference(pp_module->kernel.bo);
2328         pp_module->kernel.bo = NULL;
2329     }
2330
2331 }
2332
2333 Bool
2334 i965_post_processing_terminate(VADriverContextP ctx)
2335 {
2336     struct i965_driver_data *i965 = i965_driver_data(ctx);
2337     struct i965_post_processing_context *pp_context = i965->pp_context;
2338
2339     if (pp_context) {
2340         i965_post_processing_context_finalize(pp_context);
2341         free(pp_context);
2342     }
2343
2344     i965->pp_context = NULL;
2345
2346     return True;
2347 }
2348
2349 static void
2350 i965_post_processing_context_init(VADriverContextP ctx,
2351                                   struct i965_post_processing_context *pp_context,
2352                                   struct intel_batchbuffer *batch)
2353 {
2354     struct i965_driver_data *i965 = i965_driver_data(ctx);
2355     int i;
2356
2357     pp_context->urb.size = URB_SIZE((&i965->intel));
2358     pp_context->urb.num_vfe_entries = 32;
2359     pp_context->urb.size_vfe_entry = 1;     /* in 512 bits unit */
2360     pp_context->urb.num_cs_entries = 1;
2361     pp_context->urb.size_cs_entry = 2;      /* in 512 bits unit */
2362     pp_context->urb.vfe_start = 0;
2363     pp_context->urb.cs_start = pp_context->urb.vfe_start + 
2364         pp_context->urb.num_vfe_entries * pp_context->urb.size_vfe_entry;
2365     assert(pp_context->urb.cs_start + 
2366            pp_context->urb.num_cs_entries * pp_context->urb.size_cs_entry <= URB_SIZE((&i965->intel)));
2367
2368     assert(NUM_PP_MODULES == ARRAY_ELEMS(pp_modules_gen5));
2369     assert(NUM_PP_MODULES == ARRAY_ELEMS(pp_modules_gen6));
2370
2371     if (IS_GEN6(i965->intel.device_id) ||
2372         IS_GEN7(i965->intel.device_id))
2373         memcpy(pp_context->pp_modules, pp_modules_gen6, sizeof(pp_context->pp_modules));
2374     else if (IS_IRONLAKE(i965->intel.device_id))
2375         memcpy(pp_context->pp_modules, pp_modules_gen5, sizeof(pp_context->pp_modules));
2376
2377     for (i = 0; i < NUM_PP_MODULES; i++) {
2378         struct pp_module *pp_module = &pp_context->pp_modules[i];
2379         dri_bo_unreference(pp_module->kernel.bo);
2380         if (pp_module->kernel.bin) {
2381             pp_module->kernel.bo = dri_bo_alloc(i965->intel.bufmgr,
2382                                                 pp_module->kernel.name,
2383                                                 pp_module->kernel.size,
2384                                                 4096);
2385             assert(pp_module->kernel.bo);
2386             dri_bo_subdata(pp_module->kernel.bo, 0, pp_module->kernel.size, pp_module->kernel.bin);
2387         } else {
2388             pp_module->kernel.bo = NULL;
2389         }
2390     }
2391
2392     pp_context->batch = batch;
2393 }
2394
2395 Bool
2396 i965_post_processing_init(VADriverContextP ctx)
2397 {
2398     struct i965_driver_data *i965 = i965_driver_data(ctx);
2399     struct i965_post_processing_context *pp_context = i965->pp_context;
2400
2401     if (HAS_PP(i965)) {
2402         if (pp_context == NULL) {
2403             pp_context = calloc(1, sizeof(*pp_context));
2404             i965_post_processing_context_init(ctx, pp_context, i965->batch);
2405             i965->pp_context = pp_context;
2406         }
2407     }
2408
2409     return True;
2410 }
2411
2412 static const int procfilter_to_pp_flag[10] = {
2413     PP_NULL,    /* VAProcFilterNone */
2414     PP_NULL,    /* VAProcFilterDering */
2415     PP_NULL,    /* VAProcFilterDeblocking */
2416     PP_NV12_DNDI, /* VAProcFilterNoiseReduction */
2417     PP_NV12_DNDI, /* VAProcFilterDeinterlacing */
2418     PP_NULL,    /* VAProcFilterSharpening */
2419     PP_NULL,    /* VAProcFilterColorEnhancement */
2420     PP_NULL,    /* VAProcFilterProcAmp */
2421     PP_NULL,    /* VAProcFilterComposition */
2422     PP_NULL,    /* VAProcFilterFrameRateConversion */
2423 };
2424
2425 static void 
2426 i965_proc_picture(VADriverContextP ctx, 
2427                   VAProfile profile, 
2428                   union codec_state *codec_state,
2429                   struct hw_context *hw_context)
2430 {
2431     struct i965_driver_data *i965 = i965_driver_data(ctx);
2432     struct i965_proc_context *proc_context = (struct i965_proc_context *)hw_context;
2433     struct proc_state *proc_state = &codec_state->proc;
2434     VAProcPipelineParameterBuffer *pipeline_param = (VAProcPipelineParameterBuffer *)proc_state->pipeline_param->buffer;
2435     VAProcInputParameterBuffer *input_param = (VAProcInputParameterBuffer *)proc_state->input_param->buffer;
2436     struct object_surface *obj_surface;
2437     struct i965_surface src_surface, dst_surface;
2438     VAStatus status;
2439     int i;
2440     VASurfaceID tmp_surfaces[VA_PROC_PIPELINE_MAX_NUM_FILTERS];
2441     int num_tmp_surfaces = 0;
2442
2443     assert(input_param->surface != VA_INVALID_ID);
2444     assert(proc_state->current_render_target != VA_INVALID_ID);
2445
2446     obj_surface = SURFACE(proc_state->current_render_target);
2447     i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2448
2449     obj_surface = SURFACE(input_param->surface);
2450     assert(obj_surface->fourcc == VA_FOURCC('N', 'V', '1', '2'));
2451
2452     src_surface.id = input_param->surface;
2453     src_surface.flag = I965_SURFACE_SURFACE;
2454     
2455     for (i = 0; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++) {
2456         VAProcFilterType filter_type = pipeline_param->filter_pipeline[i];
2457         VASurfaceID out_surface_id = VA_INVALID_ID;
2458         void *filter_param = NULL;
2459
2460         if (procfilter_to_pp_flag[filter_type] != PP_NULL) {
2461             if (proc_state->filter_param[filter_type])
2462                 filter_param = proc_state->filter_param[filter_type]->buffer;
2463
2464             status = i965_CreateSurfaces(ctx,
2465                                          obj_surface->orig_width,
2466                                          obj_surface->orig_height,
2467                                          VA_RT_FORMAT_YUV420,
2468                                          1,
2469                                          &out_surface_id);
2470             assert(status == VA_STATUS_SUCCESS);
2471             tmp_surfaces[num_tmp_surfaces++] = out_surface_id;
2472             obj_surface = SURFACE(out_surface_id);
2473             i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2474             dst_surface.id = out_surface_id;
2475             dst_surface.flag = I965_SURFACE_SURFACE;
2476             i965_post_processing_internal(ctx, &proc_context->pp_context,
2477                                           &src_surface,
2478                                           &input_param->region,
2479                                           &dst_surface,
2480                                           &input_param->region,
2481                                           procfilter_to_pp_flag[filter_type],
2482                                           filter_param);
2483             src_surface.id = dst_surface.id;
2484         }
2485     }
2486
2487     dst_surface.id = proc_state->current_render_target;
2488     dst_surface.flag = I965_SURFACE_SURFACE;
2489     i965_post_processing_internal(ctx, &proc_context->pp_context,
2490                                   &src_surface,
2491                                   &input_param->region,
2492                                   &dst_surface,
2493                                   &pipeline_param->output_region,
2494                                   PP_NV12_AVS,
2495                                   NULL);
2496
2497     if (num_tmp_surfaces)
2498         i965_DestroySurfaces(ctx,
2499                              tmp_surfaces,
2500                              num_tmp_surfaces);
2501
2502     intel_batchbuffer_flush(hw_context->batch);
2503 }
2504
2505 static void
2506 i965_proc_context_destroy(void *hw_context)
2507 {
2508     struct i965_proc_context *proc_context = (struct i965_proc_context *)hw_context;
2509
2510     i965_post_processing_context_finalize(&proc_context->pp_context);
2511     intel_batchbuffer_free(proc_context->base.batch);
2512     free(proc_context);
2513 }
2514
2515 struct hw_context *
2516 i965_proc_context_init(VADriverContextP ctx, VAProfile profile)
2517 {
2518     struct intel_driver_data *intel = intel_driver_data(ctx);
2519     struct i965_proc_context *proc_context = calloc(1, sizeof(struct i965_proc_context));
2520
2521     proc_context->base.destroy = i965_proc_context_destroy;
2522     proc_context->base.run = i965_proc_picture;
2523     proc_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
2524     i965_post_processing_context_init(ctx, &proc_context->pp_context, proc_context->base.batch);
2525
2526     return (struct hw_context *)proc_context;
2527 }