2 * Copyright © 2010 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
34 #include <va/va_backend.h>
36 #include "intel_batchbuffer.h"
37 #include "intel_driver.h"
38 #include "i965_defines.h"
39 #include "i965_structs.h"
40 #include "i965_drv_video.h"
41 #include "i965_post_processing.h"
42 #include "i965_render.h"
44 #define HAS_PP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) || \
45 IS_GEN6((ctx)->intel.device_id) || \
46 IS_GEN7((ctx)->intel.device_id))
48 #define SURFACE_STATE_PADDED_SIZE_0_I965 ALIGN(sizeof(struct i965_surface_state), 32)
49 #define SURFACE_STATE_PADDED_SIZE_1_I965 ALIGN(sizeof(struct i965_surface_state2), 32)
50 #define SURFACE_STATE_PADDED_SIZE_I965 MAX(SURFACE_STATE_PADDED_SIZE_0_I965, SURFACE_STATE_PADDED_SIZE_1_I965)
52 #define SURFACE_STATE_PADDED_SIZE SURFACE_STATE_PADDED_SIZE_I965
53 #define SURFACE_STATE_OFFSET(index) (SURFACE_STATE_PADDED_SIZE * index)
54 #define BINDING_TABLE_OFFSET SURFACE_STATE_OFFSET(MAX_PP_SURFACES)
56 static const uint32_t pp_null_gen5[][4] = {
57 #include "shaders/post_processing/gen5_6/null.g4b.gen5"
60 static const uint32_t pp_nv12_load_save_nv12_gen5[][4] = {
61 #include "shaders/post_processing/gen5_6/nv12_load_save_nv12.g4b.gen5"
64 static const uint32_t pp_nv12_load_save_pl3_gen5[][4] = {
65 #include "shaders/post_processing/gen5_6/nv12_load_save_pl3.g4b.gen5"
68 static const uint32_t pp_pl3_load_save_nv12_gen5[][4] = {
69 #include "shaders/post_processing/gen5_6/pl3_load_save_nv12.g4b.gen5"
72 static const uint32_t pp_pl3_load_save_pl3_gen5[][4] = {
73 #include "shaders/post_processing/gen5_6/pl3_load_save_pl3.g4b.gen5"
76 static const uint32_t pp_nv12_scaling_gen5[][4] = {
77 #include "shaders/post_processing/gen5_6/nv12_scaling_nv12.g4b.gen5"
80 static const uint32_t pp_nv12_avs_gen5[][4] = {
81 #include "shaders/post_processing/gen5_6/nv12_avs_nv12.g4b.gen5"
84 static const uint32_t pp_nv12_dndi_gen5[][4] = {
85 #include "shaders/post_processing/gen5_6/nv12_dndi_nv12.g4b.gen5"
88 static const uint32_t pp_nv12_dn_gen5[][4] = {
89 #include "shaders/post_processing/gen5_6/nv12_dn_nv12.g4b.gen5"
92 static void pp_null_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
93 const struct i965_surface *src_surface,
94 const VARectangle *src_rect,
95 const struct i965_surface *dst_surface,
96 const VARectangle *dst_rect,
98 static void pp_nv12_avs_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
99 const struct i965_surface *src_surface,
100 const VARectangle *src_rect,
101 const struct i965_surface *dst_surface,
102 const VARectangle *dst_rect,
104 static void pp_nv12_scaling_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
105 const struct i965_surface *src_surface,
106 const VARectangle *src_rect,
107 const struct i965_surface *dst_surface,
108 const VARectangle *dst_rect,
110 static void pp_plx_load_save_plx_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
111 const struct i965_surface *src_surface,
112 const VARectangle *src_rect,
113 const struct i965_surface *dst_surface,
114 const VARectangle *dst_rect,
116 static void pp_nv12_dndi_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
117 const struct i965_surface *src_surface,
118 const VARectangle *src_rect,
119 const struct i965_surface *dst_surface,
120 const VARectangle *dst_rect,
122 static void pp_nv12_dn_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
123 const struct i965_surface *src_surface,
124 const VARectangle *src_rect,
125 const struct i965_surface *dst_surface,
126 const VARectangle *dst_rect,
129 static struct pp_module pp_modules_gen5[] = {
132 "NULL module (for testing)",
135 sizeof(pp_null_gen5),
145 PP_NV12_LOAD_SAVE_N12,
146 pp_nv12_load_save_nv12_gen5,
147 sizeof(pp_nv12_load_save_nv12_gen5),
151 pp_plx_load_save_plx_initialize,
157 PP_NV12_LOAD_SAVE_PL3,
158 pp_nv12_load_save_pl3_gen5,
159 sizeof(pp_nv12_load_save_pl3_gen5),
163 pp_plx_load_save_plx_initialize,
169 PP_PL3_LOAD_SAVE_N12,
170 pp_pl3_load_save_nv12_gen5,
171 sizeof(pp_pl3_load_save_nv12_gen5),
175 pp_plx_load_save_plx_initialize,
181 PP_PL3_LOAD_SAVE_N12,
182 pp_pl3_load_save_pl3_gen5,
183 sizeof(pp_pl3_load_save_pl3_gen5),
187 pp_plx_load_save_plx_initialize
192 "NV12 Scaling module",
194 pp_nv12_scaling_gen5,
195 sizeof(pp_nv12_scaling_gen5),
199 pp_nv12_scaling_initialize,
207 sizeof(pp_nv12_avs_gen5),
211 pp_nv12_avs_initialize,
219 sizeof(pp_nv12_dndi_gen5),
223 pp_nv12_dndi_initialize,
231 sizeof(pp_nv12_dn_gen5),
235 pp_nv12_dn_initialize,
239 static const uint32_t pp_null_gen6[][4] = {
240 #include "shaders/post_processing/gen5_6/null.g6b"
243 static const uint32_t pp_nv12_load_save_nv12_gen6[][4] = {
244 #include "shaders/post_processing/gen5_6/nv12_load_save_nv12.g6b"
247 static const uint32_t pp_nv12_load_save_pl3_gen6[][4] = {
248 #include "shaders/post_processing/gen5_6/nv12_load_save_pl3.g6b"
251 static const uint32_t pp_pl3_load_save_nv12_gen6[][4] = {
252 #include "shaders/post_processing/gen5_6/pl3_load_save_nv12.g6b"
255 static const uint32_t pp_pl3_load_save_pl3_gen6[][4] = {
256 #include "shaders/post_processing/gen5_6/pl3_load_save_pl3.g6b"
259 static const uint32_t pp_nv12_scaling_gen6[][4] = {
260 #include "shaders/post_processing/gen5_6/nv12_scaling_nv12.g6b"
263 static const uint32_t pp_nv12_avs_gen6[][4] = {
264 #include "shaders/post_processing/gen5_6/nv12_avs_nv12.g6b"
267 static const uint32_t pp_nv12_dndi_gen6[][4] = {
268 #include "shaders/post_processing/gen5_6/nv12_dndi_nv12.g6b"
271 static const uint32_t pp_nv12_dn_gen6[][4] = {
272 #include "shaders/post_processing/gen5_6/nv12_dn_nv12.g6b"
275 static struct pp_module pp_modules_gen6[] = {
278 "NULL module (for testing)",
281 sizeof(pp_null_gen6),
291 PP_NV12_LOAD_SAVE_N12,
292 pp_nv12_load_save_nv12_gen6,
293 sizeof(pp_nv12_load_save_nv12_gen6),
297 pp_plx_load_save_plx_initialize,
303 PP_NV12_LOAD_SAVE_PL3,
304 pp_nv12_load_save_pl3_gen6,
305 sizeof(pp_nv12_load_save_pl3_gen6),
309 pp_plx_load_save_plx_initialize,
315 PP_PL3_LOAD_SAVE_N12,
316 pp_pl3_load_save_nv12_gen6,
317 sizeof(pp_pl3_load_save_nv12_gen6),
321 pp_plx_load_save_plx_initialize,
327 PP_PL3_LOAD_SAVE_N12,
328 pp_pl3_load_save_pl3_gen6,
329 sizeof(pp_pl3_load_save_pl3_gen6),
333 pp_plx_load_save_plx_initialize,
338 "NV12 Scaling module",
340 pp_nv12_scaling_gen6,
341 sizeof(pp_nv12_scaling_gen6),
345 pp_nv12_scaling_initialize,
353 sizeof(pp_nv12_avs_gen6),
357 pp_nv12_avs_initialize,
365 sizeof(pp_nv12_dndi_gen6),
369 pp_nv12_dndi_initialize,
377 sizeof(pp_nv12_dn_gen6),
381 pp_nv12_dn_initialize,
385 #define pp_static_parameter pp_context->pp_static_parameter
386 #define pp_inline_parameter pp_context->pp_inline_parameter
389 pp_get_surface_fourcc(VADriverContextP ctx, const struct i965_surface *surface)
391 struct i965_driver_data *i965 = i965_driver_data(ctx);
394 if (surface->flag == I965_SURFACE_IMAGE) {
395 struct object_image *obj_image = IMAGE(surface->id);
396 fourcc = obj_image->image.format.fourcc;
398 struct object_surface *obj_surface = SURFACE(surface->id);
399 fourcc = obj_surface->fourcc;
406 pp_set_surface_tiling(struct i965_surface_state *ss, unsigned int tiling)
409 case I915_TILING_NONE:
410 ss->ss3.tiled_surface = 0;
411 ss->ss3.tile_walk = 0;
414 ss->ss3.tiled_surface = 1;
415 ss->ss3.tile_walk = I965_TILEWALK_XMAJOR;
418 ss->ss3.tiled_surface = 1;
419 ss->ss3.tile_walk = I965_TILEWALK_YMAJOR;
425 pp_set_surface2_tiling(struct i965_surface_state2 *ss, unsigned int tiling)
428 case I915_TILING_NONE:
429 ss->ss2.tiled_surface = 0;
430 ss->ss2.tile_walk = 0;
433 ss->ss2.tiled_surface = 1;
434 ss->ss2.tile_walk = I965_TILEWALK_XMAJOR;
437 ss->ss2.tiled_surface = 1;
438 ss->ss2.tile_walk = I965_TILEWALK_YMAJOR;
444 ironlake_pp_interface_descriptor_table(struct i965_post_processing_context *pp_context)
446 struct i965_interface_descriptor *desc;
448 int pp_index = pp_context->current_pp;
450 bo = pp_context->idrt.bo;
454 memset(desc, 0, sizeof(*desc));
455 desc->desc0.grf_reg_blocks = 10;
456 desc->desc0.kernel_start_pointer = pp_context->pp_modules[pp_index].kernel.bo->offset >> 6; /* reloc */
457 desc->desc1.const_urb_entry_read_offset = 0;
458 desc->desc1.const_urb_entry_read_len = 4; /* grf 1-4 */
459 desc->desc2.sampler_state_pointer = pp_context->sampler_state_table.bo->offset >> 5;
460 desc->desc2.sampler_count = 0;
461 desc->desc3.binding_table_entry_count = 0;
462 desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET >> 5);
464 dri_bo_emit_reloc(bo,
465 I915_GEM_DOMAIN_INSTRUCTION, 0,
466 desc->desc0.grf_reg_blocks,
467 offsetof(struct i965_interface_descriptor, desc0),
468 pp_context->pp_modules[pp_index].kernel.bo);
470 dri_bo_emit_reloc(bo,
471 I915_GEM_DOMAIN_INSTRUCTION, 0,
472 desc->desc2.sampler_count << 2,
473 offsetof(struct i965_interface_descriptor, desc2),
474 pp_context->sampler_state_table.bo);
477 pp_context->idrt.num_interface_descriptors++;
481 ironlake_pp_vfe_state(struct i965_post_processing_context *pp_context)
483 struct i965_vfe_state *vfe_state;
486 bo = pp_context->vfe_state.bo;
489 vfe_state = bo->virtual;
490 memset(vfe_state, 0, sizeof(*vfe_state));
491 vfe_state->vfe1.max_threads = pp_context->urb.num_vfe_entries - 1;
492 vfe_state->vfe1.urb_entry_alloc_size = pp_context->urb.size_vfe_entry - 1;
493 vfe_state->vfe1.num_urb_entries = pp_context->urb.num_vfe_entries;
494 vfe_state->vfe1.vfe_mode = VFE_GENERIC_MODE;
495 vfe_state->vfe1.children_present = 0;
496 vfe_state->vfe2.interface_descriptor_base =
497 pp_context->idrt.bo->offset >> 4; /* reloc */
498 dri_bo_emit_reloc(bo,
499 I915_GEM_DOMAIN_INSTRUCTION, 0,
501 offsetof(struct i965_vfe_state, vfe2),
502 pp_context->idrt.bo);
507 ironlake_pp_upload_constants(struct i965_post_processing_context *pp_context)
509 unsigned char *constant_buffer;
511 assert(sizeof(pp_static_parameter) == 128);
512 dri_bo_map(pp_context->curbe.bo, 1);
513 assert(pp_context->curbe.bo->virtual);
514 constant_buffer = pp_context->curbe.bo->virtual;
515 memcpy(constant_buffer, &pp_static_parameter, sizeof(pp_static_parameter));
516 dri_bo_unmap(pp_context->curbe.bo);
520 ironlake_pp_states_setup(VADriverContextP ctx,
521 struct i965_post_processing_context *pp_context)
523 ironlake_pp_interface_descriptor_table(pp_context);
524 ironlake_pp_vfe_state(pp_context);
525 ironlake_pp_upload_constants(pp_context);
529 ironlake_pp_pipeline_select(VADriverContextP ctx,
530 struct i965_post_processing_context *pp_context)
532 struct intel_batchbuffer *batch = pp_context->batch;
534 BEGIN_BATCH(batch, 1);
535 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
536 ADVANCE_BATCH(batch);
540 ironlake_pp_urb_layout(VADriverContextP ctx,
541 struct i965_post_processing_context *pp_context)
543 struct intel_batchbuffer *batch = pp_context->batch;
544 unsigned int vfe_fence, cs_fence;
546 vfe_fence = pp_context->urb.cs_start;
547 cs_fence = pp_context->urb.size;
549 BEGIN_BATCH(batch, 3);
550 OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
553 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
554 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
555 ADVANCE_BATCH(batch);
559 ironlake_pp_state_base_address(VADriverContextP ctx,
560 struct i965_post_processing_context *pp_context)
562 struct intel_batchbuffer *batch = pp_context->batch;
564 BEGIN_BATCH(batch, 8);
565 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
566 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
567 OUT_RELOC(batch, pp_context->surface_state_binding_table.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, BASE_ADDRESS_MODIFY);
568 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
569 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
570 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
571 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
572 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
573 ADVANCE_BATCH(batch);
577 ironlake_pp_state_pointers(VADriverContextP ctx,
578 struct i965_post_processing_context *pp_context)
580 struct intel_batchbuffer *batch = pp_context->batch;
582 BEGIN_BATCH(batch, 3);
583 OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
585 OUT_RELOC(batch, pp_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
586 ADVANCE_BATCH(batch);
590 ironlake_pp_cs_urb_layout(VADriverContextP ctx,
591 struct i965_post_processing_context *pp_context)
593 struct intel_batchbuffer *batch = pp_context->batch;
595 BEGIN_BATCH(batch, 2);
596 OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
598 ((pp_context->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
599 (pp_context->urb.num_cs_entries << 0)); /* Number of URB Entries */
600 ADVANCE_BATCH(batch);
604 ironlake_pp_constant_buffer(VADriverContextP ctx,
605 struct i965_post_processing_context *pp_context)
607 struct intel_batchbuffer *batch = pp_context->batch;
609 BEGIN_BATCH(batch, 2);
610 OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
611 OUT_RELOC(batch, pp_context->curbe.bo,
612 I915_GEM_DOMAIN_INSTRUCTION, 0,
613 pp_context->urb.size_cs_entry - 1);
614 ADVANCE_BATCH(batch);
618 ironlake_pp_object_walker(VADriverContextP ctx,
619 struct i965_post_processing_context *pp_context)
621 struct intel_batchbuffer *batch = pp_context->batch;
622 int x, x_steps, y, y_steps;
624 x_steps = pp_context->pp_x_steps(&pp_context->private_context);
625 y_steps = pp_context->pp_y_steps(&pp_context->private_context);
627 for (y = 0; y < y_steps; y++) {
628 for (x = 0; x < x_steps; x++) {
629 if (!pp_context->pp_set_block_parameter(pp_context, x, y)) {
630 BEGIN_BATCH(batch, 20);
631 OUT_BATCH(batch, CMD_MEDIA_OBJECT | 18);
633 OUT_BATCH(batch, 0); /* no indirect data */
636 /* inline data grf 5-6 */
637 assert(sizeof(pp_inline_parameter) == 64);
638 intel_batchbuffer_data(batch, &pp_inline_parameter, sizeof(pp_inline_parameter));
640 ADVANCE_BATCH(batch);
647 ironlake_pp_pipeline_setup(VADriverContextP ctx,
648 struct i965_post_processing_context *pp_context)
650 struct intel_batchbuffer *batch = pp_context->batch;
652 intel_batchbuffer_start_atomic(batch, 0x1000);
653 intel_batchbuffer_emit_mi_flush(batch);
654 ironlake_pp_pipeline_select(ctx, pp_context);
655 ironlake_pp_state_base_address(ctx, pp_context);
656 ironlake_pp_state_pointers(ctx, pp_context);
657 ironlake_pp_urb_layout(ctx, pp_context);
658 ironlake_pp_cs_urb_layout(ctx, pp_context);
659 ironlake_pp_constant_buffer(ctx, pp_context);
660 ironlake_pp_object_walker(ctx, pp_context);
661 intel_batchbuffer_end_atomic(batch);
665 i965_pp_set_surface_state(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
666 dri_bo *surf_bo, unsigned long surf_bo_offset,
667 int width, int height, int pitch, int format,
668 int index, int is_target)
670 struct i965_surface_state *ss;
673 unsigned int swizzle;
675 dri_bo_get_tiling(surf_bo, &tiling, &swizzle);
676 ss_bo = pp_context->surface_state_binding_table.bo;
679 dri_bo_map(ss_bo, True);
680 assert(ss_bo->virtual);
681 ss = (struct i965_surface_state *)((char *)ss_bo->virtual + SURFACE_STATE_OFFSET(index));
682 memset(ss, 0, sizeof(*ss));
683 ss->ss0.surface_type = I965_SURFACE_2D;
684 ss->ss0.surface_format = format;
685 ss->ss1.base_addr = surf_bo->offset + surf_bo_offset;
686 ss->ss2.width = width - 1;
687 ss->ss2.height = height - 1;
688 ss->ss3.pitch = pitch - 1;
689 pp_set_surface_tiling(ss, tiling);
690 dri_bo_emit_reloc(ss_bo,
691 I915_GEM_DOMAIN_RENDER, is_target ? I915_GEM_DOMAIN_RENDER : 0,
693 SURFACE_STATE_OFFSET(index) + offsetof(struct i965_surface_state, ss1),
695 ((unsigned int *)((char *)ss_bo->virtual + BINDING_TABLE_OFFSET))[index] = SURFACE_STATE_OFFSET(index);
700 i965_pp_set_surface2_state(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
701 dri_bo *surf_bo, unsigned long surf_bo_offset,
702 int width, int height, int wpitch,
703 int xoffset, int yoffset,
704 int format, int interleave_chroma,
707 struct i965_surface_state2 *ss2;
710 unsigned int swizzle;
712 dri_bo_get_tiling(surf_bo, &tiling, &swizzle);
713 ss2_bo = pp_context->surface_state_binding_table.bo;
716 dri_bo_map(ss2_bo, True);
717 assert(ss2_bo->virtual);
718 ss2 = (struct i965_surface_state2 *)((char *)ss2_bo->virtual + SURFACE_STATE_OFFSET(index));
719 memset(ss2, 0, sizeof(*ss2));
720 ss2->ss0.surface_base_address = surf_bo->offset + surf_bo_offset;
721 ss2->ss1.cbcr_pixel_offset_v_direction = 0;
722 ss2->ss1.width = width - 1;
723 ss2->ss1.height = height - 1;
724 ss2->ss2.pitch = wpitch - 1;
725 ss2->ss2.interleave_chroma = interleave_chroma;
726 ss2->ss2.surface_format = format;
727 ss2->ss3.x_offset_for_cb = xoffset;
728 ss2->ss3.y_offset_for_cb = yoffset;
729 pp_set_surface2_tiling(ss2, tiling);
730 dri_bo_emit_reloc(ss2_bo,
731 I915_GEM_DOMAIN_RENDER, 0,
733 SURFACE_STATE_OFFSET(index) + offsetof(struct i965_surface_state2, ss0),
735 ((unsigned int *)((char *)ss2_bo->virtual + BINDING_TABLE_OFFSET))[index] = SURFACE_STATE_OFFSET(index);
736 dri_bo_unmap(ss2_bo);
740 pp_set_media_rw_message_surface(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
741 const struct i965_surface *surface,
742 int base_index, int is_target,
743 int *width, int *height, int *pitch, int *offset)
745 struct i965_driver_data *i965 = i965_driver_data(ctx);
746 struct object_surface *obj_surface;
747 struct object_image *obj_image;
749 int fourcc = pp_get_surface_fourcc(ctx, surface);
751 const int U = fourcc == VA_FOURCC('Y', 'V', '1', '2') ? 2 : 1;
752 const int V = fourcc == VA_FOURCC('Y', 'V', '1', '2') ? 1 : 2;
754 int interleaved_uv = fourcc == VA_FOURCC('N', 'V', '1', '2');
756 if (surface->flag == I965_SURFACE_SURFACE) {
757 obj_surface = SURFACE(surface->id);
758 bo = obj_surface->bo;
759 width[0] = obj_surface->orig_width;
760 height[0] = obj_surface->orig_height;
761 pitch[0] = obj_surface->width;
764 if (interleaved_uv) {
765 width[1] = obj_surface->orig_width;
766 height[1] = obj_surface->orig_height / 2;
767 pitch[1] = obj_surface->width;
768 offset[1] = offset[0] + obj_surface->width * obj_surface->height;
770 width[1] = obj_surface->orig_width / 2;
771 height[1] = obj_surface->orig_height / 2;
772 pitch[1] = obj_surface->width / 2;
773 offset[1] = offset[0] + obj_surface->width * obj_surface->height;
774 width[2] = obj_surface->orig_width / 2;
775 height[2] = obj_surface->orig_height / 2;
776 pitch[2] = obj_surface->width / 2;
777 offset[2] = offset[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
780 obj_image = IMAGE(surface->id);
782 width[0] = obj_image->image.width;
783 height[0] = obj_image->image.height;
784 pitch[0] = obj_image->image.pitches[0];
785 offset[0] = obj_image->image.offsets[0];
787 if (interleaved_uv) {
788 width[1] = obj_image->image.width;
789 height[1] = obj_image->image.height / 2;
790 pitch[1] = obj_image->image.pitches[1];
791 offset[1] = obj_image->image.offsets[1];
793 width[1] = obj_image->image.width / 2;
794 height[1] = obj_image->image.height / 2;
795 pitch[1] = obj_image->image.pitches[1];
796 offset[1] = obj_image->image.offsets[1];
797 width[2] = obj_image->image.width / 2;
798 height[2] = obj_image->image.height / 2;
799 pitch[2] = obj_image->image.pitches[2];
800 offset[2] = obj_image->image.offsets[2];
805 i965_pp_set_surface_state(ctx, pp_context,
807 width[Y] / 4, height[Y], pitch[Y], I965_SURFACEFORMAT_R8_UNORM,
808 base_index, is_target);
810 if (interleaved_uv) {
811 i965_pp_set_surface_state(ctx, pp_context,
813 width[UV] / 4, height[UV], pitch[UV], I965_SURFACEFORMAT_R8_UNORM,
814 base_index + 1, is_target);
817 i965_pp_set_surface_state(ctx, pp_context,
819 width[U] / 4, height[U], pitch[U], I965_SURFACEFORMAT_R8_UNORM,
820 base_index + 1, is_target);
823 i965_pp_set_surface_state(ctx, pp_context,
825 width[V] / 4, height[V], pitch[V], I965_SURFACEFORMAT_R8_UNORM,
826 base_index + 2, is_target);
832 pp_null_x_steps(void *private_context)
838 pp_null_y_steps(void *private_context)
844 pp_null_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
850 pp_null_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
851 const struct i965_surface *src_surface,
852 const VARectangle *src_rect,
853 const struct i965_surface *dst_surface,
854 const VARectangle *dst_rect,
857 /* private function & data */
858 pp_context->pp_x_steps = pp_null_x_steps;
859 pp_context->pp_y_steps = pp_null_y_steps;
860 pp_context->pp_set_block_parameter = pp_null_set_block_parameter;
864 pp_load_save_x_steps(void *private_context)
870 pp_load_save_y_steps(void *private_context)
872 struct pp_load_save_context *pp_load_save_context = private_context;
874 return pp_load_save_context->dest_h / 8;
878 pp_load_save_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
880 pp_inline_parameter.grf5.block_vertical_mask = 0xff;
881 pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
882 pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16;
883 pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8;
889 pp_plx_load_save_plx_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
890 const struct i965_surface *src_surface,
891 const VARectangle *src_rect,
892 const struct i965_surface *dst_surface,
893 const VARectangle *dst_rect,
896 struct pp_load_save_context *pp_load_save_context = (struct pp_load_save_context *)&pp_context->private_context;
897 int width[3], height[3], pitch[3], offset[3];
901 pp_set_media_rw_message_surface(ctx, pp_context, src_surface, 1, 0,
902 width, height, pitch, offset);
904 /* destination surface */
905 pp_set_media_rw_message_surface(ctx, pp_context, dst_surface, 7, 1,
906 width, height, pitch, offset);
908 /* private function & data */
909 pp_context->pp_x_steps = pp_load_save_x_steps;
910 pp_context->pp_y_steps = pp_load_save_y_steps;
911 pp_context->pp_set_block_parameter = pp_load_save_set_block_parameter;
912 pp_load_save_context->dest_h = ALIGN(height[Y], 16);
913 pp_load_save_context->dest_w = ALIGN(width[Y], 16);
915 pp_inline_parameter.grf5.block_count_x = ALIGN(width[Y], 16) / 16; /* 1 x N */
916 pp_inline_parameter.grf5.number_blocks = ALIGN(width[Y], 16) / 16;
920 pp_scaling_x_steps(void *private_context)
926 pp_scaling_y_steps(void *private_context)
928 struct pp_scaling_context *pp_scaling_context = private_context;
930 return pp_scaling_context->dest_h / 8;
934 pp_scaling_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
936 struct pp_scaling_context *pp_scaling_context = (struct pp_scaling_context *)&pp_context->private_context;
937 float src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
938 float src_y_steping = pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step;
940 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = src_x_steping * x * 16 + pp_scaling_context->src_normalized_x;
941 pp_inline_parameter.grf5.source_surface_block_normalized_vertical_origin = src_y_steping * y * 8 + pp_scaling_context->src_normalized_y;
942 pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16 + pp_scaling_context->dest_x;
943 pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8 + pp_scaling_context->dest_y;
949 pp_nv12_scaling_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
950 const struct i965_surface *src_surface,
951 const VARectangle *src_rect,
952 const struct i965_surface *dst_surface,
953 const VARectangle *dst_rect,
956 struct i965_driver_data *i965 = i965_driver_data(ctx);
957 struct pp_scaling_context *pp_scaling_context = (struct pp_scaling_context *)&pp_context->private_context;
958 struct object_surface *obj_surface;
959 struct i965_sampler_state *sampler_state;
960 int in_w, in_h, in_wpitch, in_hpitch;
961 int out_w, out_h, out_wpitch, out_hpitch;
964 obj_surface = SURFACE(src_surface->id);
965 in_w = obj_surface->orig_width;
966 in_h = obj_surface->orig_height;
967 in_wpitch = obj_surface->width;
968 in_hpitch = obj_surface->height;
970 /* source Y surface index 1 */
971 i965_pp_set_surface_state(ctx, pp_context,
973 in_w, in_h, in_wpitch, I965_SURFACEFORMAT_R8_UNORM,
976 /* source UV surface index 2 */
977 i965_pp_set_surface_state(ctx, pp_context,
978 obj_surface->bo, in_wpitch * in_hpitch,
979 in_w / 2, in_h / 2, in_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
982 /* destination surface */
983 obj_surface = SURFACE(dst_surface->id);
984 out_w = obj_surface->orig_width;
985 out_h = obj_surface->orig_height;
986 out_wpitch = obj_surface->width;
987 out_hpitch = obj_surface->height;
989 /* destination Y surface index 7 */
990 i965_pp_set_surface_state(ctx, pp_context,
992 out_w / 4, out_h, out_wpitch, I965_SURFACEFORMAT_R8_UNORM,
995 /* destination UV surface index 8 */
996 i965_pp_set_surface_state(ctx, pp_context,
997 obj_surface->bo, out_wpitch * out_hpitch,
998 out_w / 4, out_h / 2, out_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
1002 dri_bo_map(pp_context->sampler_state_table.bo, True);
1003 assert(pp_context->sampler_state_table.bo->virtual);
1004 sampler_state = pp_context->sampler_state_table.bo->virtual;
1006 /* SIMD16 Y index 1 */
1007 sampler_state[1].ss0.min_filter = I965_MAPFILTER_LINEAR;
1008 sampler_state[1].ss0.mag_filter = I965_MAPFILTER_LINEAR;
1009 sampler_state[1].ss1.r_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1010 sampler_state[1].ss1.s_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1011 sampler_state[1].ss1.t_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1013 /* SIMD16 UV index 2 */
1014 sampler_state[2].ss0.min_filter = I965_MAPFILTER_LINEAR;
1015 sampler_state[2].ss0.mag_filter = I965_MAPFILTER_LINEAR;
1016 sampler_state[2].ss1.r_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1017 sampler_state[2].ss1.s_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1018 sampler_state[2].ss1.t_wrap_mode = I965_TEXCOORDMODE_CLAMP;
1020 dri_bo_unmap(pp_context->sampler_state_table.bo);
1022 /* private function & data */
1023 pp_context->pp_x_steps = pp_scaling_x_steps;
1024 pp_context->pp_y_steps = pp_scaling_y_steps;
1025 pp_context->pp_set_block_parameter = pp_scaling_set_block_parameter;
1027 pp_scaling_context->dest_x = dst_rect->x;
1028 pp_scaling_context->dest_y = dst_rect->y;
1029 pp_scaling_context->dest_w = ALIGN(dst_rect->width, 16);
1030 pp_scaling_context->dest_h = ALIGN(dst_rect->height, 16);
1031 pp_scaling_context->src_normalized_x = (float)src_rect->x / in_w / out_w;
1032 pp_scaling_context->src_normalized_y = (float)src_rect->y / in_h / out_h;
1034 pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step = (float) src_rect->height / in_h / out_h;
1036 pp_inline_parameter.grf5.normalized_video_x_scaling_step = (float) src_rect->width / in_w / out_w;
1037 pp_inline_parameter.grf5.block_count_x = pp_scaling_context->dest_w / 16; /* 1 x N */
1038 pp_inline_parameter.grf5.number_blocks = pp_scaling_context->dest_w / 16;
1039 pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1040 pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1044 pp_avs_x_steps(void *private_context)
1046 struct pp_avs_context *pp_avs_context = private_context;
1048 return pp_avs_context->dest_w / 16;
1052 pp_avs_y_steps(void *private_context)
1058 pp_avs_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
1060 struct pp_avs_context *pp_avs_context = (struct pp_avs_context *)&pp_context->private_context;
1061 float src_x_steping, src_y_steping, video_step_delta;
1062 int tmp_w = ALIGN(pp_avs_context->dest_h * pp_avs_context->src_w / pp_avs_context->src_h, 16);
1064 if (tmp_w >= pp_avs_context->dest_w) {
1065 pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / tmp_w;
1066 pp_inline_parameter.grf6.video_step_delta = 0;
1069 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = (float)(tmp_w - pp_avs_context->dest_w) / tmp_w / 2 +
1070 pp_avs_context->src_normalized_x;
1072 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1073 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1074 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1075 16 * 15 * video_step_delta / 2;
1078 int n0, n1, n2, nls_left, nls_right;
1079 int factor_a = 5, factor_b = 4;
1082 n0 = (pp_avs_context->dest_w - tmp_w) / (16 * 2);
1083 n1 = (pp_avs_context->dest_w - tmp_w) / 16 - n0;
1084 n2 = tmp_w / (16 * factor_a);
1086 nls_right = n1 + n2;
1087 f = (float) n2 * 16 / tmp_w;
1090 pp_inline_parameter.grf6.video_step_delta = 0.0;
1093 pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / pp_avs_context->dest_w;
1094 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = pp_avs_context->src_normalized_x;
1096 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1097 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1098 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1099 16 * 15 * video_step_delta / 2;
1103 /* f = a * nls_left * 16 + b * nls_left * 16 * (nls_left * 16 - 1) / 2 */
1104 float a = f / (nls_left * 16 * factor_b);
1105 float b = (f - nls_left * 16 * a) * 2 / (nls_left * 16 * (nls_left * 16 - 1));
1107 pp_inline_parameter.grf6.video_step_delta = b;
1110 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin = pp_avs_context->src_normalized_x;
1111 pp_inline_parameter.grf5.normalized_video_x_scaling_step = a;
1113 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1114 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1115 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1116 16 * 15 * video_step_delta / 2;
1117 pp_inline_parameter.grf5.normalized_video_x_scaling_step += 16 * b;
1119 } else if (x < (pp_avs_context->dest_w / 16 - nls_right)) {
1120 /* scale the center linearly */
1121 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1122 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1123 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1124 16 * 15 * video_step_delta / 2;
1125 pp_inline_parameter.grf6.video_step_delta = 0.0;
1126 pp_inline_parameter.grf5.normalized_video_x_scaling_step = 1.0 / tmp_w;
1128 float a = f / (nls_right * 16 * factor_b);
1129 float b = (f - nls_right * 16 * a) * 2 / (nls_right * 16 * (nls_right * 16 - 1));
1131 src_x_steping = pp_inline_parameter.grf5.normalized_video_x_scaling_step;
1132 video_step_delta = pp_inline_parameter.grf6.video_step_delta;
1133 pp_inline_parameter.grf5.r5_1.source_surface_block_normalized_horizontal_origin += src_x_steping * 16 +
1134 16 * 15 * video_step_delta / 2;
1135 pp_inline_parameter.grf6.video_step_delta = -b;
1137 if (x == (pp_avs_context->dest_w / 16 - nls_right))
1138 pp_inline_parameter.grf5.normalized_video_x_scaling_step = a + (nls_right * 16 - 1) * b;
1140 pp_inline_parameter.grf5.normalized_video_x_scaling_step -= b * 16;
1145 src_y_steping = pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step;
1146 pp_inline_parameter.grf5.source_surface_block_normalized_vertical_origin = src_y_steping * y * 8 + pp_avs_context->src_normalized_y;
1147 pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16 + pp_avs_context->dest_x;
1148 pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8 + pp_avs_context->dest_y;
1154 pp_nv12_avs_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
1155 const struct i965_surface *src_surface,
1156 const VARectangle *src_rect,
1157 const struct i965_surface *dst_surface,
1158 const VARectangle *dst_rect,
1161 struct i965_driver_data *i965 = i965_driver_data(ctx);
1162 struct pp_avs_context *pp_avs_context = (struct pp_avs_context *)&pp_context->private_context;
1163 struct object_surface *obj_surface;
1164 struct i965_sampler_8x8 *sampler_8x8;
1165 struct i965_sampler_8x8_state *sampler_8x8_state;
1167 int in_w, in_h, in_wpitch, in_hpitch;
1168 int out_w, out_h, out_wpitch, out_hpitch;
1171 obj_surface = SURFACE(src_surface->id);
1172 in_w = obj_surface->orig_width;
1173 in_h = obj_surface->orig_height;
1174 in_wpitch = obj_surface->width;
1175 in_hpitch = obj_surface->height;
1177 /* source Y surface index 1 */
1178 i965_pp_set_surface2_state(ctx, pp_context,
1180 in_w, in_h, in_wpitch,
1182 SURFACE_FORMAT_Y8_UNORM, 0,
1185 /* source UV surface index 2 */
1186 i965_pp_set_surface2_state(ctx, pp_context,
1187 obj_surface->bo, in_wpitch * in_hpitch,
1188 in_w, in_h, in_wpitch,
1190 SURFACE_FORMAT_PLANAR_420_8, 1,
1193 /* destination surface */
1194 obj_surface = SURFACE(dst_surface->id);
1195 out_w = obj_surface->orig_width;
1196 out_h = obj_surface->orig_height;
1197 out_wpitch = obj_surface->width;
1198 out_hpitch = obj_surface->height;
1199 assert(out_w <= out_wpitch && out_h <= out_hpitch);
1201 /* destination Y surface index 7 */
1202 i965_pp_set_surface_state(ctx, pp_context,
1204 out_w / 4, out_h, out_wpitch, I965_SURFACEFORMAT_R8_UNORM,
1207 /* destination UV surface index 8 */
1208 i965_pp_set_surface_state(ctx, pp_context,
1209 obj_surface->bo, out_wpitch * out_hpitch,
1210 out_w / 4, out_h / 2, out_wpitch, I965_SURFACEFORMAT_R8G8_UNORM,
1213 /* sampler 8x8 state */
1214 dri_bo_map(pp_context->sampler_state_table.bo_8x8, True);
1215 assert(pp_context->sampler_state_table.bo_8x8->virtual);
1216 assert(sizeof(*sampler_8x8_state) == sizeof(int) * 138);
1217 sampler_8x8_state = pp_context->sampler_state_table.bo_8x8->virtual;
1218 memset(sampler_8x8_state, 0, sizeof(*sampler_8x8_state));
1219 sampler_8x8_state->dw136.default_sharpness_level = 0;
1220 sampler_8x8_state->dw137.adaptive_filter_for_all_channel = 1;
1221 sampler_8x8_state->dw137.bypass_y_adaptive_filtering = 1;
1222 sampler_8x8_state->dw137.bypass_x_adaptive_filtering = 1;
1223 dri_bo_unmap(pp_context->sampler_state_table.bo_8x8);
1226 dri_bo_map(pp_context->sampler_state_table.bo, True);
1227 assert(pp_context->sampler_state_table.bo->virtual);
1228 assert(sizeof(*sampler_8x8) == sizeof(int) * 16);
1229 sampler_8x8 = pp_context->sampler_state_table.bo->virtual;
1231 /* sample_8x8 Y index 1 */
1233 memset(&sampler_8x8[index], 0, sizeof(*sampler_8x8));
1234 sampler_8x8[index].dw0.avs_filter_type = AVS_FILTER_ADAPTIVE_8_TAP;
1235 sampler_8x8[index].dw0.ief_bypass = 0;
1236 sampler_8x8[index].dw0.ief_filter_type = IEF_FILTER_DETAIL;
1237 sampler_8x8[index].dw0.ief_filter_size = IEF_FILTER_SIZE_5X5;
1238 sampler_8x8[index].dw1.sampler_8x8_state_pointer = pp_context->sampler_state_table.bo_8x8->offset >> 5;
1239 sampler_8x8[index].dw2.global_noise_estimation = 22;
1240 sampler_8x8[index].dw2.strong_edge_threshold = 8;
1241 sampler_8x8[index].dw2.weak_edge_threshold = 1;
1242 sampler_8x8[index].dw3.strong_edge_weight = 7;
1243 sampler_8x8[index].dw3.regular_weight = 2;
1244 sampler_8x8[index].dw3.non_edge_weight = 0;
1245 sampler_8x8[index].dw3.gain_factor = 40;
1246 sampler_8x8[index].dw4.steepness_boost = 0;
1247 sampler_8x8[index].dw4.steepness_threshold = 0;
1248 sampler_8x8[index].dw4.mr_boost = 0;
1249 sampler_8x8[index].dw4.mr_threshold = 5;
1250 sampler_8x8[index].dw5.pwl1_point_1 = 4;
1251 sampler_8x8[index].dw5.pwl1_point_2 = 12;
1252 sampler_8x8[index].dw5.pwl1_point_3 = 16;
1253 sampler_8x8[index].dw5.pwl1_point_4 = 26;
1254 sampler_8x8[index].dw6.pwl1_point_5 = 40;
1255 sampler_8x8[index].dw6.pwl1_point_6 = 160;
1256 sampler_8x8[index].dw6.pwl1_r3_bias_0 = 127;
1257 sampler_8x8[index].dw6.pwl1_r3_bias_1 = 98;
1258 sampler_8x8[index].dw7.pwl1_r3_bias_2 = 88;
1259 sampler_8x8[index].dw7.pwl1_r3_bias_3 = 64;
1260 sampler_8x8[index].dw7.pwl1_r3_bias_4 = 44;
1261 sampler_8x8[index].dw7.pwl1_r3_bias_5 = 0;
1262 sampler_8x8[index].dw8.pwl1_r3_bias_6 = 0;
1263 sampler_8x8[index].dw8.pwl1_r5_bias_0 = 3;
1264 sampler_8x8[index].dw8.pwl1_r5_bias_1 = 32;
1265 sampler_8x8[index].dw8.pwl1_r5_bias_2 = 32;
1266 sampler_8x8[index].dw9.pwl1_r5_bias_3 = 58;
1267 sampler_8x8[index].dw9.pwl1_r5_bias_4 = 100;
1268 sampler_8x8[index].dw9.pwl1_r5_bias_5 = 108;
1269 sampler_8x8[index].dw9.pwl1_r5_bias_6 = 88;
1270 sampler_8x8[index].dw10.pwl1_r3_slope_0 = -116;
1271 sampler_8x8[index].dw10.pwl1_r3_slope_1 = -20;
1272 sampler_8x8[index].dw10.pwl1_r3_slope_2 = -96;
1273 sampler_8x8[index].dw10.pwl1_r3_slope_3 = -32;
1274 sampler_8x8[index].dw11.pwl1_r3_slope_4 = -50;
1275 sampler_8x8[index].dw11.pwl1_r3_slope_5 = 0;
1276 sampler_8x8[index].dw11.pwl1_r3_slope_6 = 0;
1277 sampler_8x8[index].dw11.pwl1_r5_slope_0 = 116;
1278 sampler_8x8[index].dw12.pwl1_r5_slope_1 = 0;
1279 sampler_8x8[index].dw12.pwl1_r5_slope_2 = 114;
1280 sampler_8x8[index].dw12.pwl1_r5_slope_3 = 67;
1281 sampler_8x8[index].dw12.pwl1_r5_slope_4 = 9;
1282 sampler_8x8[index].dw13.pwl1_r5_slope_5 = -3;
1283 sampler_8x8[index].dw13.pwl1_r5_slope_6 = -15;
1284 sampler_8x8[index].dw13.limiter_boost = 0;
1285 sampler_8x8[index].dw13.minimum_limiter = 10;
1286 sampler_8x8[index].dw13.maximum_limiter = 11;
1287 sampler_8x8[index].dw14.clip_limiter = 130;
1288 dri_bo_emit_reloc(pp_context->sampler_state_table.bo,
1289 I915_GEM_DOMAIN_RENDER,
1292 sizeof(*sampler_8x8) * index + offsetof(struct i965_sampler_8x8, dw1),
1293 pp_context->sampler_state_table.bo_8x8);
1295 dri_bo_map(pp_context->sampler_state_table.bo_8x8_uv, True);
1296 assert(pp_context->sampler_state_table.bo_8x8_uv->virtual);
1297 assert(sizeof(*sampler_8x8_state) == sizeof(int) * 138);
1298 sampler_8x8_state = pp_context->sampler_state_table.bo_8x8_uv->virtual;
1299 memset(sampler_8x8_state, 0, sizeof(*sampler_8x8_state));
1300 sampler_8x8_state->dw136.default_sharpness_level = 0;
1301 sampler_8x8_state->dw137.adaptive_filter_for_all_channel = 0;
1302 sampler_8x8_state->dw137.bypass_y_adaptive_filtering = 1;
1303 sampler_8x8_state->dw137.bypass_x_adaptive_filtering = 1;
1304 dri_bo_unmap(pp_context->sampler_state_table.bo_8x8_uv);
1306 /* sample_8x8 UV index 2 */
1308 memset(&sampler_8x8[index], 0, sizeof(*sampler_8x8));
1309 sampler_8x8[index].dw0.avs_filter_type = AVS_FILTER_NEAREST;
1310 sampler_8x8[index].dw0.ief_bypass = 0;
1311 sampler_8x8[index].dw0.ief_filter_type = IEF_FILTER_DETAIL;
1312 sampler_8x8[index].dw0.ief_filter_size = IEF_FILTER_SIZE_5X5;
1313 sampler_8x8[index].dw1.sampler_8x8_state_pointer = pp_context->sampler_state_table.bo_8x8_uv->offset >> 5;
1314 sampler_8x8[index].dw2.global_noise_estimation = 22;
1315 sampler_8x8[index].dw2.strong_edge_threshold = 8;
1316 sampler_8x8[index].dw2.weak_edge_threshold = 1;
1317 sampler_8x8[index].dw3.strong_edge_weight = 7;
1318 sampler_8x8[index].dw3.regular_weight = 2;
1319 sampler_8x8[index].dw3.non_edge_weight = 0;
1320 sampler_8x8[index].dw3.gain_factor = 40;
1321 sampler_8x8[index].dw4.steepness_boost = 0;
1322 sampler_8x8[index].dw4.steepness_threshold = 0;
1323 sampler_8x8[index].dw4.mr_boost = 0;
1324 sampler_8x8[index].dw4.mr_threshold = 5;
1325 sampler_8x8[index].dw5.pwl1_point_1 = 4;
1326 sampler_8x8[index].dw5.pwl1_point_2 = 12;
1327 sampler_8x8[index].dw5.pwl1_point_3 = 16;
1328 sampler_8x8[index].dw5.pwl1_point_4 = 26;
1329 sampler_8x8[index].dw6.pwl1_point_5 = 40;
1330 sampler_8x8[index].dw6.pwl1_point_6 = 160;
1331 sampler_8x8[index].dw6.pwl1_r3_bias_0 = 127;
1332 sampler_8x8[index].dw6.pwl1_r3_bias_1 = 98;
1333 sampler_8x8[index].dw7.pwl1_r3_bias_2 = 88;
1334 sampler_8x8[index].dw7.pwl1_r3_bias_3 = 64;
1335 sampler_8x8[index].dw7.pwl1_r3_bias_4 = 44;
1336 sampler_8x8[index].dw7.pwl1_r3_bias_5 = 0;
1337 sampler_8x8[index].dw8.pwl1_r3_bias_6 = 0;
1338 sampler_8x8[index].dw8.pwl1_r5_bias_0 = 3;
1339 sampler_8x8[index].dw8.pwl1_r5_bias_1 = 32;
1340 sampler_8x8[index].dw8.pwl1_r5_bias_2 = 32;
1341 sampler_8x8[index].dw9.pwl1_r5_bias_3 = 58;
1342 sampler_8x8[index].dw9.pwl1_r5_bias_4 = 100;
1343 sampler_8x8[index].dw9.pwl1_r5_bias_5 = 108;
1344 sampler_8x8[index].dw9.pwl1_r5_bias_6 = 88;
1345 sampler_8x8[index].dw10.pwl1_r3_slope_0 = -116;
1346 sampler_8x8[index].dw10.pwl1_r3_slope_1 = -20;
1347 sampler_8x8[index].dw10.pwl1_r3_slope_2 = -96;
1348 sampler_8x8[index].dw10.pwl1_r3_slope_3 = -32;
1349 sampler_8x8[index].dw11.pwl1_r3_slope_4 = -50;
1350 sampler_8x8[index].dw11.pwl1_r3_slope_5 = 0;
1351 sampler_8x8[index].dw11.pwl1_r3_slope_6 = 0;
1352 sampler_8x8[index].dw11.pwl1_r5_slope_0 = 116;
1353 sampler_8x8[index].dw12.pwl1_r5_slope_1 = 0;
1354 sampler_8x8[index].dw12.pwl1_r5_slope_2 = 114;
1355 sampler_8x8[index].dw12.pwl1_r5_slope_3 = 67;
1356 sampler_8x8[index].dw12.pwl1_r5_slope_4 = 9;
1357 sampler_8x8[index].dw13.pwl1_r5_slope_5 = -3;
1358 sampler_8x8[index].dw13.pwl1_r5_slope_6 = -15;
1359 sampler_8x8[index].dw13.limiter_boost = 0;
1360 sampler_8x8[index].dw13.minimum_limiter = 10;
1361 sampler_8x8[index].dw13.maximum_limiter = 11;
1362 sampler_8x8[index].dw14.clip_limiter = 130;
1363 dri_bo_emit_reloc(pp_context->sampler_state_table.bo,
1364 I915_GEM_DOMAIN_RENDER,
1367 sizeof(*sampler_8x8) * index + offsetof(struct i965_sampler_8x8, dw1),
1368 pp_context->sampler_state_table.bo_8x8_uv);
1370 dri_bo_unmap(pp_context->sampler_state_table.bo);
1372 /* private function & data */
1373 pp_context->pp_x_steps = pp_avs_x_steps;
1374 pp_context->pp_y_steps = pp_avs_y_steps;
1375 pp_context->pp_set_block_parameter = pp_avs_set_block_parameter;
1377 pp_avs_context->dest_x = dst_rect->x;
1378 pp_avs_context->dest_y = dst_rect->y;
1379 pp_avs_context->dest_w = ALIGN(dst_rect->width, 16);
1380 pp_avs_context->dest_h = ALIGN(dst_rect->height, 16);
1381 pp_avs_context->src_normalized_x = (float)src_rect->x / in_w / out_w;
1382 pp_avs_context->src_normalized_y = (float)src_rect->y / in_h / out_h;
1383 pp_avs_context->src_w = src_rect->width;
1384 pp_avs_context->src_h = src_rect->height;
1386 pp_static_parameter.grf4.r4_2.avs.nlas = 1;
1387 pp_static_parameter.grf1.r1_6.normalized_video_y_scaling_step = (float) src_rect->height / in_h / out_h;
1389 pp_inline_parameter.grf5.normalized_video_x_scaling_step = (float) src_rect->width / in_w / out_w;
1390 pp_inline_parameter.grf5.block_count_x = 1; /* M x 1 */
1391 pp_inline_parameter.grf5.number_blocks = pp_avs_context->dest_h / 8;
1392 pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1393 pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1394 pp_inline_parameter.grf6.video_step_delta = 0.0;
1398 pp_dndi_x_steps(void *private_context)
1404 pp_dndi_y_steps(void *private_context)
1406 struct pp_dndi_context *pp_dndi_context = private_context;
1408 return pp_dndi_context->dest_h / 4;
1412 pp_dndi_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
1414 pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16;
1415 pp_inline_parameter.grf5.destination_block_vertical_origin = y * 4;
1421 void pp_nv12_dndi_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
1422 const struct i965_surface *src_surface,
1423 const VARectangle *src_rect,
1424 const struct i965_surface *dst_surface,
1425 const VARectangle *dst_rect,
1428 struct i965_driver_data *i965 = i965_driver_data(ctx);
1429 struct pp_dndi_context *pp_dndi_context = (struct pp_dndi_context *)&pp_context->private_context;
1430 struct object_surface *obj_surface;
1431 struct i965_sampler_dndi *sampler_dndi;
1437 obj_surface = SURFACE(src_surface->id);
1438 orig_w = obj_surface->orig_width;
1439 orig_h = obj_surface->orig_height;
1440 w = obj_surface->width;
1441 h = obj_surface->height;
1443 if (pp_context->stmm.bo == NULL) {
1444 pp_context->stmm.bo = dri_bo_alloc(i965->intel.bufmgr,
1448 assert(pp_context->stmm.bo);
1451 /* source UV surface index 2 */
1452 i965_pp_set_surface_state(ctx, pp_context,
1453 obj_surface->bo, w * h,
1454 orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1457 /* source YUV surface index 4 */
1458 i965_pp_set_surface2_state(ctx, pp_context,
1462 SURFACE_FORMAT_PLANAR_420_8, 1,
1465 /* source STMM surface index 20 */
1466 i965_pp_set_surface_state(ctx, pp_context,
1467 pp_context->stmm.bo, 0,
1468 orig_w, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1471 /* destination surface */
1472 obj_surface = SURFACE(dst_surface->id);
1473 orig_w = obj_surface->orig_width;
1474 orig_h = obj_surface->orig_height;
1475 w = obj_surface->width;
1476 h = obj_surface->height;
1478 /* destination Y surface index 7 */
1479 i965_pp_set_surface_state(ctx, pp_context,
1481 orig_w / 4, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1484 /* destination UV surface index 8 */
1485 i965_pp_set_surface_state(ctx, pp_context,
1486 obj_surface->bo, w * h,
1487 orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1490 dri_bo_map(pp_context->sampler_state_table.bo, True);
1491 assert(pp_context->sampler_state_table.bo->virtual);
1492 assert(sizeof(*sampler_dndi) == sizeof(int) * 8);
1493 sampler_dndi = pp_context->sampler_state_table.bo->virtual;
1495 /* sample dndi index 1 */
1497 sampler_dndi[index].dw0.denoise_asd_threshold = 0;
1498 sampler_dndi[index].dw0.denoise_history_delta = 8; // 0-15, default is 8
1499 sampler_dndi[index].dw0.denoise_maximum_history = 128; // 128-240
1500 sampler_dndi[index].dw0.denoise_stad_threshold = 0;
1502 sampler_dndi[index].dw1.denoise_threshold_for_sum_of_complexity_measure = 64;
1503 sampler_dndi[index].dw1.denoise_moving_pixel_threshold = 0;
1504 sampler_dndi[index].dw1.stmm_c2 = 0;
1505 sampler_dndi[index].dw1.low_temporal_difference_threshold = 8;
1506 sampler_dndi[index].dw1.temporal_difference_threshold = 16;
1508 sampler_dndi[index].dw2.block_noise_estimate_noise_threshold = 15; // 0-31
1509 sampler_dndi[index].dw2.block_noise_estimate_edge_threshold = 7; // 0-15
1510 sampler_dndi[index].dw2.denoise_edge_threshold = 7; // 0-15
1511 sampler_dndi[index].dw2.good_neighbor_threshold = 7; // 0-63
1513 sampler_dndi[index].dw3.maximum_stmm = 128;
1514 sampler_dndi[index].dw3.multipler_for_vecm = 2;
1515 sampler_dndi[index].dw3.blending_constant_across_time_for_small_values_of_stmm = 0;
1516 sampler_dndi[index].dw3.blending_constant_across_time_for_large_values_of_stmm = 64;
1517 sampler_dndi[index].dw3.stmm_blending_constant_select = 0;
1519 sampler_dndi[index].dw4.sdi_delta = 8;
1520 sampler_dndi[index].dw4.sdi_threshold = 128;
1521 sampler_dndi[index].dw4.stmm_output_shift = 7; // stmm_max - stmm_min = 2 ^ stmm_output_shift
1522 sampler_dndi[index].dw4.stmm_shift_up = 0;
1523 sampler_dndi[index].dw4.stmm_shift_down = 0;
1524 sampler_dndi[index].dw4.minimum_stmm = 0;
1526 sampler_dndi[index].dw5.fmd_temporal_difference_threshold = 0;
1527 sampler_dndi[index].dw5.sdi_fallback_mode_2_constant = 0;
1528 sampler_dndi[index].dw5.sdi_fallback_mode_1_t2_constant = 0;
1529 sampler_dndi[index].dw5.sdi_fallback_mode_1_t1_constant = 0;
1531 sampler_dndi[index].dw6.dn_enable = 1;
1532 sampler_dndi[index].dw6.di_enable = 1;
1533 sampler_dndi[index].dw6.di_partial = 0;
1534 sampler_dndi[index].dw6.dndi_top_first = 1;
1535 sampler_dndi[index].dw6.dndi_stream_id = 1;
1536 sampler_dndi[index].dw6.dndi_first_frame = 1;
1537 sampler_dndi[index].dw6.progressive_dn = 0;
1538 sampler_dndi[index].dw6.fmd_tear_threshold = 32;
1539 sampler_dndi[index].dw6.fmd2_vertical_difference_threshold = 32;
1540 sampler_dndi[index].dw6.fmd1_vertical_difference_threshold = 32;
1542 sampler_dndi[index].dw7.fmd_for_1st_field_of_current_frame = 2;
1543 sampler_dndi[index].dw7.fmd_for_2nd_field_of_previous_frame = 1;
1544 sampler_dndi[index].dw7.vdi_walker_enable = 0;
1545 sampler_dndi[index].dw7.column_width_minus1 = w / 16;
1547 dri_bo_unmap(pp_context->sampler_state_table.bo);
1549 /* private function & data */
1550 pp_context->pp_x_steps = pp_dndi_x_steps;
1551 pp_context->pp_y_steps = pp_dndi_y_steps;
1552 pp_context->pp_set_block_parameter = pp_dndi_set_block_parameter;
1554 pp_static_parameter.grf1.statistics_surface_picth = w / 2;
1555 pp_static_parameter.grf1.r1_6.di.top_field_first = 0;
1556 pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m2 = 64;
1557 pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m1 = 192;
1559 pp_inline_parameter.grf5.block_count_x = w / 16; /* 1 x N */
1560 pp_inline_parameter.grf5.number_blocks = w / 16;
1561 pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1562 pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1564 pp_dndi_context->dest_w = w;
1565 pp_dndi_context->dest_h = h;
1569 pp_dn_x_steps(void *private_context)
1575 pp_dn_y_steps(void *private_context)
1577 struct pp_dn_context *pp_dn_context = private_context;
1579 return pp_dn_context->dest_h / 8;
1583 pp_dn_set_block_parameter(struct i965_post_processing_context *pp_context, int x, int y)
1585 pp_inline_parameter.grf5.destination_block_horizontal_origin = x * 16;
1586 pp_inline_parameter.grf5.destination_block_vertical_origin = y * 8;
1592 void pp_nv12_dn_initialize(VADriverContextP ctx, struct i965_post_processing_context *pp_context,
1593 const struct i965_surface *src_surface,
1594 const VARectangle *src_rect,
1595 const struct i965_surface *dst_surface,
1596 const VARectangle *dst_rect,
1599 struct i965_driver_data *i965 = i965_driver_data(ctx);
1600 struct pp_dn_context *pp_dn_context = (struct pp_dn_context *)&pp_context->private_context;
1601 struct object_surface *obj_surface;
1602 struct i965_sampler_dndi *sampler_dndi;
1603 VAProcFilterBaseParameterBuffer *dn_filter_param = filter_param;
1607 int dn_strength = 15;
1609 if (dn_filter_param) {
1610 int value = dn_filter_param->value;
1618 dn_strength = (int)(value * 31.0F);
1622 obj_surface = SURFACE(src_surface->id);
1623 orig_w = obj_surface->orig_width;
1624 orig_h = obj_surface->orig_height;
1625 w = obj_surface->width;
1626 h = obj_surface->height;
1628 if (pp_context->stmm.bo == NULL) {
1629 pp_context->stmm.bo = dri_bo_alloc(i965->intel.bufmgr,
1633 assert(pp_context->stmm.bo);
1636 /* source UV surface index 2 */
1637 i965_pp_set_surface_state(ctx, pp_context,
1638 obj_surface->bo, w * h,
1639 orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1642 /* source YUV surface index 4 */
1643 i965_pp_set_surface2_state(ctx, pp_context,
1647 SURFACE_FORMAT_PLANAR_420_8, 1,
1650 /* source STMM surface index 20 */
1651 i965_pp_set_surface_state(ctx, pp_context,
1652 pp_context->stmm.bo, 0,
1653 orig_w, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1656 /* destination surface */
1657 obj_surface = SURFACE(dst_surface->id);
1658 orig_w = obj_surface->orig_width;
1659 orig_h = obj_surface->orig_height;
1660 w = obj_surface->width;
1661 h = obj_surface->height;
1663 /* destination Y surface index 7 */
1664 i965_pp_set_surface_state(ctx, pp_context,
1666 orig_w / 4, orig_h, w, I965_SURFACEFORMAT_R8_UNORM,
1669 /* destination UV surface index 8 */
1670 i965_pp_set_surface_state(ctx, pp_context,
1671 obj_surface->bo, w * h,
1672 orig_w / 4, orig_h / 2, w, I965_SURFACEFORMAT_R8G8_UNORM,
1675 dri_bo_map(pp_context->sampler_state_table.bo, True);
1676 assert(pp_context->sampler_state_table.bo->virtual);
1677 assert(sizeof(*sampler_dndi) == sizeof(int) * 8);
1678 sampler_dndi = pp_context->sampler_state_table.bo->virtual;
1680 /* sample dndi index 1 */
1682 sampler_dndi[index].dw0.denoise_asd_threshold = 0;
1683 sampler_dndi[index].dw0.denoise_history_delta = 8; // 0-15, default is 8
1684 sampler_dndi[index].dw0.denoise_maximum_history = 128; // 128-240
1685 sampler_dndi[index].dw0.denoise_stad_threshold = 0;
1687 sampler_dndi[index].dw1.denoise_threshold_for_sum_of_complexity_measure = 64;
1688 sampler_dndi[index].dw1.denoise_moving_pixel_threshold = 0;
1689 sampler_dndi[index].dw1.stmm_c2 = 0;
1690 sampler_dndi[index].dw1.low_temporal_difference_threshold = 8;
1691 sampler_dndi[index].dw1.temporal_difference_threshold = 16;
1693 sampler_dndi[index].dw2.block_noise_estimate_noise_threshold = dn_strength; // 0-31
1694 sampler_dndi[index].dw2.block_noise_estimate_edge_threshold = 7; // 0-15
1695 sampler_dndi[index].dw2.denoise_edge_threshold = 7; // 0-15
1696 sampler_dndi[index].dw2.good_neighbor_threshold = 7; // 0-63
1698 sampler_dndi[index].dw3.maximum_stmm = 128;
1699 sampler_dndi[index].dw3.multipler_for_vecm = 2;
1700 sampler_dndi[index].dw3.blending_constant_across_time_for_small_values_of_stmm = 0;
1701 sampler_dndi[index].dw3.blending_constant_across_time_for_large_values_of_stmm = 64;
1702 sampler_dndi[index].dw3.stmm_blending_constant_select = 0;
1704 sampler_dndi[index].dw4.sdi_delta = 8;
1705 sampler_dndi[index].dw4.sdi_threshold = 128;
1706 sampler_dndi[index].dw4.stmm_output_shift = 7; // stmm_max - stmm_min = 2 ^ stmm_output_shift
1707 sampler_dndi[index].dw4.stmm_shift_up = 0;
1708 sampler_dndi[index].dw4.stmm_shift_down = 0;
1709 sampler_dndi[index].dw4.minimum_stmm = 0;
1711 sampler_dndi[index].dw5.fmd_temporal_difference_threshold = 0;
1712 sampler_dndi[index].dw5.sdi_fallback_mode_2_constant = 0;
1713 sampler_dndi[index].dw5.sdi_fallback_mode_1_t2_constant = 0;
1714 sampler_dndi[index].dw5.sdi_fallback_mode_1_t1_constant = 0;
1716 sampler_dndi[index].dw6.dn_enable = 1;
1717 sampler_dndi[index].dw6.di_enable = 0;
1718 sampler_dndi[index].dw6.di_partial = 0;
1719 sampler_dndi[index].dw6.dndi_top_first = 1;
1720 sampler_dndi[index].dw6.dndi_stream_id = 1;
1721 sampler_dndi[index].dw6.dndi_first_frame = 1;
1722 sampler_dndi[index].dw6.progressive_dn = 0;
1723 sampler_dndi[index].dw6.fmd_tear_threshold = 32;
1724 sampler_dndi[index].dw6.fmd2_vertical_difference_threshold = 32;
1725 sampler_dndi[index].dw6.fmd1_vertical_difference_threshold = 32;
1727 sampler_dndi[index].dw7.fmd_for_1st_field_of_current_frame = 2;
1728 sampler_dndi[index].dw7.fmd_for_2nd_field_of_previous_frame = 1;
1729 sampler_dndi[index].dw7.vdi_walker_enable = 0;
1730 sampler_dndi[index].dw7.column_width_minus1 = w / 16;
1732 dri_bo_unmap(pp_context->sampler_state_table.bo);
1734 /* private function & data */
1735 pp_context->pp_x_steps = pp_dn_x_steps;
1736 pp_context->pp_y_steps = pp_dn_y_steps;
1737 pp_context->pp_set_block_parameter = pp_dn_set_block_parameter;
1739 pp_static_parameter.grf1.statistics_surface_picth = w / 2;
1740 pp_static_parameter.grf1.r1_6.di.top_field_first = 0;
1741 pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m2 = 64;
1742 pp_static_parameter.grf4.r4_2.di.motion_history_coefficient_m1 = 192;
1744 pp_inline_parameter.grf5.block_count_x = w / 16; /* 1 x N */
1745 pp_inline_parameter.grf5.number_blocks = w / 16;
1746 pp_inline_parameter.grf5.block_vertical_mask = 0xff;
1747 pp_inline_parameter.grf5.block_horizontal_mask = 0xffff;
1749 pp_dn_context->dest_w = w;
1750 pp_dn_context->dest_h = h;
1754 ironlake_pp_initialize(
1755 VADriverContextP ctx,
1756 struct i965_post_processing_context *pp_context,
1757 const struct i965_surface *src_surface,
1758 const VARectangle *src_rect,
1759 const struct i965_surface *dst_surface,
1760 const VARectangle *dst_rect,
1765 struct i965_driver_data *i965 = i965_driver_data(ctx);
1766 struct pp_module *pp_module;
1769 dri_bo_unreference(pp_context->surface_state_binding_table.bo);
1770 bo = dri_bo_alloc(i965->intel.bufmgr,
1771 "surface state & binding table",
1772 (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_PP_SURFACES,
1775 pp_context->surface_state_binding_table.bo = bo;
1777 dri_bo_unreference(pp_context->curbe.bo);
1778 bo = dri_bo_alloc(i965->intel.bufmgr,
1783 pp_context->curbe.bo = bo;
1785 dri_bo_unreference(pp_context->idrt.bo);
1786 bo = dri_bo_alloc(i965->intel.bufmgr,
1787 "interface discriptor",
1788 sizeof(struct i965_interface_descriptor),
1791 pp_context->idrt.bo = bo;
1792 pp_context->idrt.num_interface_descriptors = 0;
1794 dri_bo_unreference(pp_context->sampler_state_table.bo);
1795 bo = dri_bo_alloc(i965->intel.bufmgr,
1796 "sampler state table",
1800 dri_bo_map(bo, True);
1801 memset(bo->virtual, 0, bo->size);
1803 pp_context->sampler_state_table.bo = bo;
1805 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
1806 bo = dri_bo_alloc(i965->intel.bufmgr,
1807 "sampler 8x8 state ",
1811 pp_context->sampler_state_table.bo_8x8 = bo;
1813 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
1814 bo = dri_bo_alloc(i965->intel.bufmgr,
1815 "sampler 8x8 state ",
1819 pp_context->sampler_state_table.bo_8x8_uv = bo;
1821 dri_bo_unreference(pp_context->vfe_state.bo);
1822 bo = dri_bo_alloc(i965->intel.bufmgr,
1824 sizeof(struct i965_vfe_state),
1827 pp_context->vfe_state.bo = bo;
1829 memset(&pp_static_parameter, 0, sizeof(pp_static_parameter));
1830 memset(&pp_inline_parameter, 0, sizeof(pp_inline_parameter));
1831 assert(pp_index >= PP_NULL && pp_index < NUM_PP_MODULES);
1832 pp_context->current_pp = pp_index;
1833 pp_module = &pp_context->pp_modules[pp_index];
1835 if (pp_module->initialize)
1836 pp_module->initialize(ctx, pp_context,
1845 ironlake_post_processing(
1846 VADriverContextP ctx,
1847 struct i965_post_processing_context *pp_context,
1848 const struct i965_surface *src_surface,
1849 const VARectangle *src_rect,
1850 const struct i965_surface *dst_surface,
1851 const VARectangle *dst_rect,
1856 ironlake_pp_initialize(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
1857 ironlake_pp_states_setup(ctx, pp_context);
1858 ironlake_pp_pipeline_setup(ctx, pp_context);
1863 VADriverContextP ctx,
1864 struct i965_post_processing_context *pp_context,
1865 const struct i965_surface *src_surface,
1866 const VARectangle *src_rect,
1867 const struct i965_surface *dst_surface,
1868 const VARectangle *dst_rect,
1873 struct i965_driver_data *i965 = i965_driver_data(ctx);
1874 struct pp_module *pp_module;
1877 dri_bo_unreference(pp_context->surface_state_binding_table.bo);
1878 bo = dri_bo_alloc(i965->intel.bufmgr,
1879 "surface state & binding table",
1880 (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_PP_SURFACES,
1883 pp_context->surface_state_binding_table.bo = bo;
1885 dri_bo_unreference(pp_context->curbe.bo);
1886 bo = dri_bo_alloc(i965->intel.bufmgr,
1891 pp_context->curbe.bo = bo;
1893 dri_bo_unreference(pp_context->idrt.bo);
1894 bo = dri_bo_alloc(i965->intel.bufmgr,
1895 "interface discriptor",
1896 sizeof(struct gen6_interface_descriptor_data),
1899 pp_context->idrt.bo = bo;
1900 pp_context->idrt.num_interface_descriptors = 0;
1902 dri_bo_unreference(pp_context->sampler_state_table.bo);
1903 bo = dri_bo_alloc(i965->intel.bufmgr,
1904 "sampler state table",
1908 dri_bo_map(bo, True);
1909 memset(bo->virtual, 0, bo->size);
1911 pp_context->sampler_state_table.bo = bo;
1913 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
1914 bo = dri_bo_alloc(i965->intel.bufmgr,
1915 "sampler 8x8 state ",
1919 pp_context->sampler_state_table.bo_8x8 = bo;
1921 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
1922 bo = dri_bo_alloc(i965->intel.bufmgr,
1923 "sampler 8x8 state ",
1927 pp_context->sampler_state_table.bo_8x8_uv = bo;
1929 dri_bo_unreference(pp_context->vfe_state.bo);
1930 bo = dri_bo_alloc(i965->intel.bufmgr,
1932 sizeof(struct i965_vfe_state),
1935 pp_context->vfe_state.bo = bo;
1937 memset(&pp_static_parameter, 0, sizeof(pp_static_parameter));
1938 memset(&pp_inline_parameter, 0, sizeof(pp_inline_parameter));
1939 assert(pp_index >= PP_NULL && pp_index < NUM_PP_MODULES);
1940 pp_context->current_pp = pp_index;
1941 pp_module = &pp_context->pp_modules[pp_index];
1943 if (pp_module->initialize)
1944 pp_module->initialize(ctx, pp_context,
1953 gen6_pp_interface_descriptor_table(struct i965_post_processing_context *pp_context)
1955 struct gen6_interface_descriptor_data *desc;
1957 int pp_index = pp_context->current_pp;
1959 bo = pp_context->idrt.bo;
1960 dri_bo_map(bo, True);
1961 assert(bo->virtual);
1963 memset(desc, 0, sizeof(*desc));
1964 desc->desc0.kernel_start_pointer =
1965 pp_context->pp_modules[pp_index].kernel.bo->offset >> 6; /* reloc */
1966 desc->desc1.single_program_flow = 1;
1967 desc->desc1.floating_point_mode = FLOATING_POINT_IEEE_754;
1968 desc->desc2.sampler_count = 1; /* 1 - 4 samplers used */
1969 desc->desc2.sampler_state_pointer =
1970 pp_context->sampler_state_table.bo->offset >> 5;
1971 desc->desc3.binding_table_entry_count = 0;
1972 desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET >> 5);
1973 desc->desc4.constant_urb_entry_read_offset = 0;
1974 desc->desc4.constant_urb_entry_read_length = 4; /* grf 1-4 */
1976 dri_bo_emit_reloc(bo,
1977 I915_GEM_DOMAIN_INSTRUCTION, 0,
1979 offsetof(struct gen6_interface_descriptor_data, desc0),
1980 pp_context->pp_modules[pp_index].kernel.bo);
1982 dri_bo_emit_reloc(bo,
1983 I915_GEM_DOMAIN_INSTRUCTION, 0,
1984 desc->desc2.sampler_count << 2,
1985 offsetof(struct gen6_interface_descriptor_data, desc2),
1986 pp_context->sampler_state_table.bo);
1989 pp_context->idrt.num_interface_descriptors++;
1993 gen6_pp_upload_constants(struct i965_post_processing_context *pp_context)
1995 unsigned char *constant_buffer;
1997 assert(sizeof(pp_static_parameter) == 128);
1998 dri_bo_map(pp_context->curbe.bo, 1);
1999 assert(pp_context->curbe.bo->virtual);
2000 constant_buffer = pp_context->curbe.bo->virtual;
2001 memcpy(constant_buffer, &pp_static_parameter, sizeof(pp_static_parameter));
2002 dri_bo_unmap(pp_context->curbe.bo);
2006 gen6_pp_states_setup(VADriverContextP ctx,
2007 struct i965_post_processing_context *pp_context)
2009 gen6_pp_interface_descriptor_table(pp_context);
2010 gen6_pp_upload_constants(pp_context);
2014 gen6_pp_pipeline_select(VADriverContextP ctx,
2015 struct i965_post_processing_context *pp_context)
2017 struct intel_batchbuffer *batch = pp_context->batch;
2019 BEGIN_BATCH(batch, 1);
2020 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
2021 ADVANCE_BATCH(batch);
2025 gen6_pp_state_base_address(VADriverContextP ctx,
2026 struct i965_post_processing_context *pp_context)
2028 struct intel_batchbuffer *batch = pp_context->batch;
2030 BEGIN_BATCH(batch, 10);
2031 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | (10 - 2));
2032 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2033 OUT_RELOC(batch, pp_context->surface_state_binding_table.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, BASE_ADDRESS_MODIFY); /* Surface state base address */
2034 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2035 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2036 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2037 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2038 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2039 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2040 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
2041 ADVANCE_BATCH(batch);
2045 gen6_pp_vfe_state(VADriverContextP ctx,
2046 struct i965_post_processing_context *pp_context)
2048 struct intel_batchbuffer *batch = pp_context->batch;
2050 BEGIN_BATCH(batch, 8);
2051 OUT_BATCH(batch, CMD_MEDIA_VFE_STATE | (8 - 2));
2052 OUT_BATCH(batch, 0);
2054 (pp_context->urb.num_vfe_entries - 1) << 16 |
2055 pp_context->urb.num_vfe_entries << 8);
2056 OUT_BATCH(batch, 0);
2058 (pp_context->urb.size_vfe_entry * 2) << 16 | /* in 256 bits unit */
2059 (pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 2 - 1)); /* in 256 bits unit */
2060 OUT_BATCH(batch, 0);
2061 OUT_BATCH(batch, 0);
2062 OUT_BATCH(batch, 0);
2063 ADVANCE_BATCH(batch);
2067 gen6_pp_curbe_load(VADriverContextP ctx,
2068 struct i965_post_processing_context *pp_context)
2070 struct intel_batchbuffer *batch = pp_context->batch;
2072 assert(pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 512 <= pp_context->curbe.bo->size);
2074 BEGIN_BATCH(batch, 4);
2075 OUT_BATCH(batch, CMD_MEDIA_CURBE_LOAD | (4 - 2));
2076 OUT_BATCH(batch, 0);
2078 pp_context->urb.size_cs_entry * pp_context->urb.num_cs_entries * 512);
2080 pp_context->curbe.bo,
2081 I915_GEM_DOMAIN_INSTRUCTION, 0,
2083 ADVANCE_BATCH(batch);
2087 gen6_interface_descriptor_load(VADriverContextP ctx,
2088 struct i965_post_processing_context *pp_context)
2090 struct intel_batchbuffer *batch = pp_context->batch;
2092 BEGIN_BATCH(batch, 4);
2093 OUT_BATCH(batch, CMD_MEDIA_INTERFACE_DESCRIPTOR_LOAD | (4 - 2));
2094 OUT_BATCH(batch, 0);
2096 pp_context->idrt.num_interface_descriptors * sizeof(struct gen6_interface_descriptor_data));
2098 pp_context->idrt.bo,
2099 I915_GEM_DOMAIN_INSTRUCTION, 0,
2101 ADVANCE_BATCH(batch);
2105 gen6_pp_object_walker(VADriverContextP ctx,
2106 struct i965_post_processing_context *pp_context)
2108 struct intel_batchbuffer *batch = pp_context->batch;
2109 int x, x_steps, y, y_steps;
2111 x_steps = pp_context->pp_x_steps(&pp_context->private_context);
2112 y_steps = pp_context->pp_y_steps(&pp_context->private_context);
2114 for (y = 0; y < y_steps; y++) {
2115 for (x = 0; x < x_steps; x++) {
2116 if (!pp_context->pp_set_block_parameter(pp_context, x, y)) {
2117 BEGIN_BATCH(batch, 22);
2118 OUT_BATCH(batch, CMD_MEDIA_OBJECT | 20);
2119 OUT_BATCH(batch, 0);
2120 OUT_BATCH(batch, 0); /* no indirect data */
2121 OUT_BATCH(batch, 0);
2122 OUT_BATCH(batch, 0); /* scoreboard */
2123 OUT_BATCH(batch, 0);
2125 /* inline data grf 5-6 */
2126 assert(sizeof(pp_inline_parameter) == 64);
2127 intel_batchbuffer_data(batch, &pp_inline_parameter, sizeof(pp_inline_parameter));
2129 ADVANCE_BATCH(batch);
2136 gen6_pp_pipeline_setup(VADriverContextP ctx,
2137 struct i965_post_processing_context *pp_context)
2139 struct intel_batchbuffer *batch = pp_context->batch;
2141 intel_batchbuffer_start_atomic(batch, 0x1000);
2142 intel_batchbuffer_emit_mi_flush(batch);
2143 gen6_pp_pipeline_select(ctx, pp_context);
2144 gen6_pp_state_base_address(ctx, pp_context);
2145 gen6_pp_curbe_load(ctx, pp_context);
2146 gen6_interface_descriptor_load(ctx, pp_context);
2147 gen6_pp_vfe_state(ctx, pp_context);
2148 gen6_pp_object_walker(ctx, pp_context);
2149 intel_batchbuffer_end_atomic(batch);
2153 gen6_post_processing(
2154 VADriverContextP ctx,
2155 struct i965_post_processing_context *pp_context,
2156 const struct i965_surface *src_surface,
2157 const VARectangle *src_rect,
2158 const struct i965_surface *dst_surface,
2159 const VARectangle *dst_rect,
2164 gen6_pp_initialize(ctx, pp_context,
2171 gen6_pp_states_setup(ctx, pp_context);
2172 gen6_pp_pipeline_setup(ctx, pp_context);
2176 i965_post_processing_internal(
2177 VADriverContextP ctx,
2178 struct i965_post_processing_context *pp_context,
2179 const struct i965_surface *src_surface,
2180 const VARectangle *src_rect,
2181 const struct i965_surface *dst_surface,
2182 const VARectangle *dst_rect,
2187 struct i965_driver_data *i965 = i965_driver_data(ctx);
2189 if (IS_GEN6(i965->intel.device_id) ||
2190 IS_GEN7(i965->intel.device_id))
2191 gen6_post_processing(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
2193 ironlake_post_processing(ctx, pp_context, src_surface, src_rect, dst_surface, dst_rect, pp_index, filter_param);
2197 i965_DestroySurfaces(VADriverContextP ctx,
2198 VASurfaceID *surface_list,
2201 i965_CreateSurfaces(VADriverContextP ctx,
2206 VASurfaceID *surfaces);
2208 i965_post_processing(
2209 VADriverContextP ctx,
2210 VASurfaceID surface,
2211 const VARectangle *src_rect,
2212 const VARectangle *dst_rect,
2214 int *has_done_scaling
2217 struct i965_driver_data *i965 = i965_driver_data(ctx);
2218 VASurfaceID in_surface_id = surface;
2219 VASurfaceID out_surface_id = VA_INVALID_ID;
2221 *has_done_scaling = 0;
2224 struct object_surface *obj_surface;
2226 struct i965_surface src_surface;
2227 struct i965_surface dst_surface;
2229 obj_surface = SURFACE(in_surface_id);
2231 /* Currently only support post processing for NV12 surface */
2232 if (obj_surface->fourcc != VA_FOURCC('N', 'V', '1', '2'))
2233 return out_surface_id;
2235 if (flags & I965_PP_FLAG_DEINTERLACING) {
2236 status = i965_CreateSurfaces(ctx,
2237 obj_surface->orig_width,
2238 obj_surface->orig_height,
2239 VA_RT_FORMAT_YUV420,
2242 assert(status == VA_STATUS_SUCCESS);
2243 obj_surface = SURFACE(out_surface_id);
2244 i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2246 src_surface.id = in_surface_id;
2247 src_surface.flag = I965_SURFACE_SURFACE;
2248 dst_surface.id = out_surface_id;
2249 dst_surface.flag = I965_SURFACE_SURFACE;
2251 i965_post_processing_internal(ctx, i965->pp_context,
2260 if (flags & I965_PP_FLAG_AVS) {
2261 struct i965_render_state *render_state = &i965->render_state;
2262 struct intel_region *dest_region = render_state->draw_region;
2264 if (out_surface_id != VA_INVALID_ID)
2265 in_surface_id = out_surface_id;
2267 status = i965_CreateSurfaces(ctx,
2269 dest_region->height,
2270 VA_RT_FORMAT_YUV420,
2273 assert(status == VA_STATUS_SUCCESS);
2274 obj_surface = SURFACE(out_surface_id);
2275 i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2277 src_surface.id = in_surface_id;
2278 src_surface.flag = I965_SURFACE_SURFACE;
2279 dst_surface.id = out_surface_id;
2280 dst_surface.flag = I965_SURFACE_SURFACE;
2282 i965_post_processing_internal(ctx, i965->pp_context,
2290 if (in_surface_id != surface)
2291 i965_DestroySurfaces(ctx, &in_surface_id, 1);
2293 *has_done_scaling = 1;
2297 return out_surface_id;
2301 i965_image_i420_processing(VADriverContextP ctx,
2302 const struct i965_surface *src_surface,
2303 const VARectangle *src_rect,
2304 const struct i965_surface *dst_surface,
2305 const VARectangle *dst_rect)
2307 struct i965_driver_data *i965 = i965_driver_data(ctx);
2308 struct i965_post_processing_context *pp_context = i965->pp_context;
2309 int fourcc = pp_get_surface_fourcc(ctx, dst_surface);
2311 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2312 i965_post_processing_internal(ctx, i965->pp_context,
2317 PP_PL3_LOAD_SAVE_N12,
2320 i965_post_processing_internal(ctx, i965->pp_context,
2325 PP_PL3_LOAD_SAVE_PL3,
2329 intel_batchbuffer_flush(pp_context->batch);
2331 return VA_STATUS_SUCCESS;
2335 i965_image_nv12_processing(VADriverContextP ctx,
2336 const struct i965_surface *src_surface,
2337 const VARectangle *src_rect,
2338 const struct i965_surface *dst_surface,
2339 const VARectangle *dst_rect)
2341 struct i965_driver_data *i965 = i965_driver_data(ctx);
2342 struct i965_post_processing_context *pp_context = i965->pp_context;
2343 int fourcc = pp_get_surface_fourcc(ctx, dst_surface);
2345 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2346 i965_post_processing_internal(ctx, i965->pp_context,
2351 PP_NV12_LOAD_SAVE_N12,
2354 i965_post_processing_internal(ctx, i965->pp_context,
2359 PP_NV12_LOAD_SAVE_PL3,
2363 intel_batchbuffer_flush(pp_context->batch);
2365 return VA_STATUS_SUCCESS;
2369 i965_image_processing(VADriverContextP ctx,
2370 const struct i965_surface *src_surface,
2371 const VARectangle *src_rect,
2372 const struct i965_surface *dst_surface,
2373 const VARectangle *dst_rect)
2375 struct i965_driver_data *i965 = i965_driver_data(ctx);
2376 VAStatus status = VA_STATUS_ERROR_UNIMPLEMENTED;
2379 int fourcc = pp_get_surface_fourcc(ctx, src_surface);
2382 case VA_FOURCC('Y', 'V', '1', '2'):
2383 case VA_FOURCC('I', '4', '2', '0'):
2384 status = i965_image_i420_processing(ctx,
2391 case VA_FOURCC('N', 'V', '1', '2'):
2392 status = i965_image_nv12_processing(ctx,
2400 status = VA_STATUS_ERROR_UNIMPLEMENTED;
2409 i965_post_processing_context_finalize(struct i965_post_processing_context *pp_context)
2413 dri_bo_unreference(pp_context->surface_state_binding_table.bo);
2414 pp_context->surface_state_binding_table.bo = NULL;
2416 dri_bo_unreference(pp_context->curbe.bo);
2417 pp_context->curbe.bo = NULL;
2419 dri_bo_unreference(pp_context->sampler_state_table.bo);
2420 pp_context->sampler_state_table.bo = NULL;
2422 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8);
2423 pp_context->sampler_state_table.bo_8x8 = NULL;
2425 dri_bo_unreference(pp_context->sampler_state_table.bo_8x8_uv);
2426 pp_context->sampler_state_table.bo_8x8_uv = NULL;
2428 dri_bo_unreference(pp_context->idrt.bo);
2429 pp_context->idrt.bo = NULL;
2430 pp_context->idrt.num_interface_descriptors = 0;
2432 dri_bo_unreference(pp_context->vfe_state.bo);
2433 pp_context->vfe_state.bo = NULL;
2435 dri_bo_unreference(pp_context->stmm.bo);
2436 pp_context->stmm.bo = NULL;
2438 for (i = 0; i < NUM_PP_MODULES; i++) {
2439 struct pp_module *pp_module = &pp_context->pp_modules[i];
2441 dri_bo_unreference(pp_module->kernel.bo);
2442 pp_module->kernel.bo = NULL;
2448 i965_post_processing_terminate(VADriverContextP ctx)
2450 struct i965_driver_data *i965 = i965_driver_data(ctx);
2451 struct i965_post_processing_context *pp_context = i965->pp_context;
2454 i965_post_processing_context_finalize(pp_context);
2458 i965->pp_context = NULL;
2464 i965_post_processing_context_init(VADriverContextP ctx,
2465 struct i965_post_processing_context *pp_context,
2466 struct intel_batchbuffer *batch)
2468 struct i965_driver_data *i965 = i965_driver_data(ctx);
2471 pp_context->urb.size = URB_SIZE((&i965->intel));
2472 pp_context->urb.num_vfe_entries = 32;
2473 pp_context->urb.size_vfe_entry = 1; /* in 512 bits unit */
2474 pp_context->urb.num_cs_entries = 1;
2475 pp_context->urb.size_cs_entry = 2; /* in 512 bits unit */
2476 pp_context->urb.vfe_start = 0;
2477 pp_context->urb.cs_start = pp_context->urb.vfe_start +
2478 pp_context->urb.num_vfe_entries * pp_context->urb.size_vfe_entry;
2479 assert(pp_context->urb.cs_start +
2480 pp_context->urb.num_cs_entries * pp_context->urb.size_cs_entry <= URB_SIZE((&i965->intel)));
2482 assert(NUM_PP_MODULES == ARRAY_ELEMS(pp_modules_gen5));
2483 assert(NUM_PP_MODULES == ARRAY_ELEMS(pp_modules_gen6));
2485 if (IS_GEN6(i965->intel.device_id) ||
2486 IS_GEN7(i965->intel.device_id))
2487 memcpy(pp_context->pp_modules, pp_modules_gen6, sizeof(pp_context->pp_modules));
2488 else if (IS_IRONLAKE(i965->intel.device_id))
2489 memcpy(pp_context->pp_modules, pp_modules_gen5, sizeof(pp_context->pp_modules));
2491 for (i = 0; i < NUM_PP_MODULES; i++) {
2492 struct pp_module *pp_module = &pp_context->pp_modules[i];
2493 dri_bo_unreference(pp_module->kernel.bo);
2494 if (pp_module->kernel.bin) {
2495 pp_module->kernel.bo = dri_bo_alloc(i965->intel.bufmgr,
2496 pp_module->kernel.name,
2497 pp_module->kernel.size,
2499 assert(pp_module->kernel.bo);
2500 dri_bo_subdata(pp_module->kernel.bo, 0, pp_module->kernel.size, pp_module->kernel.bin);
2502 pp_module->kernel.bo = NULL;
2506 pp_context->batch = batch;
2510 i965_post_processing_init(VADriverContextP ctx)
2512 struct i965_driver_data *i965 = i965_driver_data(ctx);
2513 struct i965_post_processing_context *pp_context = i965->pp_context;
2516 if (pp_context == NULL) {
2517 pp_context = calloc(1, sizeof(*pp_context));
2518 i965_post_processing_context_init(ctx, pp_context, i965->batch);
2519 i965->pp_context = pp_context;
2526 static const int procfilter_to_pp_flag[10] = {
2527 PP_NULL, /* VAProcFilterNone */
2528 PP_NULL, /* VAProcFilterDering */
2529 PP_NULL, /* VAProcFilterDeblocking */
2530 PP_NV12_DN, /* VAProcFilterNoiseReduction */
2531 PP_NV12_DNDI, /* VAProcFilterDeinterlacing */
2532 PP_NULL, /* VAProcFilterSharpening */
2533 PP_NULL, /* VAProcFilterColorEnhancement */
2534 PP_NULL, /* VAProcFilterProcAmp */
2535 PP_NULL, /* VAProcFilterComposition */
2536 PP_NULL, /* VAProcFilterFrameRateConversion */
2540 i965_proc_picture(VADriverContextP ctx,
2542 union codec_state *codec_state,
2543 struct hw_context *hw_context)
2545 struct i965_driver_data *i965 = i965_driver_data(ctx);
2546 struct i965_proc_context *proc_context = (struct i965_proc_context *)hw_context;
2547 struct proc_state *proc_state = &codec_state->proc;
2548 VAProcPipelineParameterBuffer *pipeline_param = (VAProcPipelineParameterBuffer *)proc_state->pipeline_param->buffer;
2549 VAProcInputParameterBuffer *input_param = (VAProcInputParameterBuffer *)proc_state->input_param->buffer;
2550 struct object_surface *obj_surface;
2551 struct i965_surface src_surface, dst_surface;
2554 VASurfaceID tmp_surfaces[VA_PROC_PIPELINE_MAX_NUM_FILTERS];
2555 int num_tmp_surfaces = 0;
2557 assert(input_param->surface != VA_INVALID_ID);
2558 assert(proc_state->current_render_target != VA_INVALID_ID);
2560 obj_surface = SURFACE(proc_state->current_render_target);
2561 i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2563 obj_surface = SURFACE(input_param->surface);
2564 assert(obj_surface->fourcc == VA_FOURCC('N', 'V', '1', '2'));
2566 src_surface.id = input_param->surface;
2567 src_surface.flag = I965_SURFACE_SURFACE;
2569 for (i = 0; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++) {
2570 VAProcFilterType filter_type = pipeline_param->filter_pipeline[i];
2571 VASurfaceID out_surface_id = VA_INVALID_ID;
2572 void *filter_param = NULL;
2574 if (procfilter_to_pp_flag[filter_type] != PP_NULL) {
2575 if (proc_state->filter_param[filter_type])
2576 filter_param = proc_state->filter_param[filter_type]->buffer;
2578 status = i965_CreateSurfaces(ctx,
2579 obj_surface->orig_width,
2580 obj_surface->orig_height,
2581 VA_RT_FORMAT_YUV420,
2584 assert(status == VA_STATUS_SUCCESS);
2585 tmp_surfaces[num_tmp_surfaces++] = out_surface_id;
2586 obj_surface = SURFACE(out_surface_id);
2587 i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'));
2588 dst_surface.id = out_surface_id;
2589 dst_surface.flag = I965_SURFACE_SURFACE;
2590 i965_post_processing_internal(ctx, &proc_context->pp_context,
2592 &input_param->region,
2594 &input_param->region,
2595 procfilter_to_pp_flag[filter_type],
2597 src_surface.id = dst_surface.id;
2601 dst_surface.id = proc_state->current_render_target;
2602 dst_surface.flag = I965_SURFACE_SURFACE;
2603 i965_post_processing_internal(ctx, &proc_context->pp_context,
2605 &input_param->region,
2607 &pipeline_param->output_region,
2611 if (num_tmp_surfaces)
2612 i965_DestroySurfaces(ctx,
2616 intel_batchbuffer_flush(hw_context->batch);
2620 i965_proc_context_destroy(void *hw_context)
2622 struct i965_proc_context *proc_context = (struct i965_proc_context *)hw_context;
2624 i965_post_processing_context_finalize(&proc_context->pp_context);
2625 intel_batchbuffer_free(proc_context->base.batch);
2630 i965_proc_context_init(VADriverContextP ctx, VAProfile profile)
2632 struct intel_driver_data *intel = intel_driver_data(ctx);
2633 struct i965_proc_context *proc_context = calloc(1, sizeof(struct i965_proc_context));
2635 proc_context->base.destroy = i965_proc_context_destroy;
2636 proc_context->base.run = i965_proc_picture;
2637 proc_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
2638 i965_post_processing_context_init(ctx, &proc_context->pp_context, proc_context->base.batch);
2640 return (struct hw_context *)proc_context;