2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
36 #include <va/va_dricommon.h>
38 #include "intel_driver.h"
39 #include "intel_memman.h"
40 #include "intel_batchbuffer.h"
41 #include "i965_defines.h"
42 #include "i965_drv_video.h"
44 #define CONFIG_ID_OFFSET 0x01000000
45 #define CONTEXT_ID_OFFSET 0x02000000
46 #define SURFACE_ID_OFFSET 0x04000000
47 #define BUFFER_ID_OFFSET 0x08000000
48 #define IMAGE_ID_OFFSET 0x0a000000
49 #define SUBPIC_ID_OFFSET 0x10000000
51 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
52 IS_IRONLAKE((ctx)->intel.device_id) || \
53 ((IS_GEN6((ctx)->intel.device_id) || \
54 IS_GEN7((ctx)->intel.device_id)) && \
55 (ctx)->intel.has_bsd))
57 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
58 IS_GEN6((ctx)->intel.device_id) || \
59 IS_IRONLAKE((ctx)->intel.device_id)) && \
62 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
63 IS_GEN6((ctx)->intel.device_id)) && \
66 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
67 IS_GEN6((ctx)->intel.device_id)))
69 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
70 IS_GEN6((ctx)->intel.device_id)) && \
73 #define HAS_VPP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) || \
74 IS_GEN6((ctx)->intel.device_id) || \
75 IS_GEN7((ctx)->intel.device_id))
77 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
80 #define HAS_ACCELERATED_GETIMAGE(ctx) (IS_GEN6((ctx)->intel.device_id) || \
81 IS_GEN7((ctx)->intel.device_id))
83 #define HAS_ACCELERATED_PUTIMAGE(ctx) HAS_VPP(ctx)
86 I965_SURFACETYPE_RGBA = 1,
88 I965_SURFACETYPE_INDEXED
91 /* List of supported image formats */
94 VAImageFormat va_format;
95 } i965_image_format_map_t;
97 static const i965_image_format_map_t
98 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
99 { I965_SURFACETYPE_YUV,
100 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
101 { I965_SURFACETYPE_YUV,
102 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
103 { I965_SURFACETYPE_YUV,
104 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
107 /* List of supported subpicture formats */
111 VAImageFormat va_format;
112 unsigned int va_flags;
113 } i965_subpic_format_map_t;
115 static const i965_subpic_format_map_t
116 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
117 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
118 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
119 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
120 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
121 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
122 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
123 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
124 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
125 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
126 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
127 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
128 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
129 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
130 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
133 static const i965_subpic_format_map_t *
134 get_subpic_format(const VAImageFormat *va_format)
137 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
138 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
139 if (m->va_format.fourcc == va_format->fourcc &&
140 (m->type == I965_SURFACETYPE_RGBA ?
141 (m->va_format.byte_order == va_format->byte_order &&
142 m->va_format.red_mask == va_format->red_mask &&
143 m->va_format.green_mask == va_format->green_mask &&
144 m->va_format.blue_mask == va_format->blue_mask &&
145 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
151 extern struct hw_context *i965_proc_context_init(VADriverContextP, struct object_config *);
152 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, struct object_config *);
153 static struct hw_codec_info g4x_hw_codec_info = {
154 .dec_hw_context_init = g4x_dec_hw_context_init,
155 .enc_hw_context_init = NULL,
156 .proc_hw_context_init = NULL,
161 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, struct object_config *);
162 static struct hw_codec_info ironlake_hw_codec_info = {
163 .dec_hw_context_init = ironlake_dec_hw_context_init,
164 .enc_hw_context_init = NULL,
165 .proc_hw_context_init = i965_proc_context_init,
170 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, struct object_config *);
171 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, struct object_config *);
172 static struct hw_codec_info gen6_hw_codec_info = {
173 .dec_hw_context_init = gen6_dec_hw_context_init,
174 .enc_hw_context_init = gen6_enc_hw_context_init,
175 .proc_hw_context_init = i965_proc_context_init,
180 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, struct object_config *);
181 extern struct hw_context *gen7_enc_hw_context_init(VADriverContextP, struct object_config *);
182 static struct hw_codec_info gen7_hw_codec_info = {
183 .dec_hw_context_init = gen7_dec_hw_context_init,
184 .enc_hw_context_init = gen7_enc_hw_context_init,
185 .proc_hw_context_init = i965_proc_context_init,
191 i965_QueryConfigProfiles(VADriverContextP ctx,
192 VAProfile *profile_list, /* out */
193 int *num_profiles) /* out */
195 struct i965_driver_data * const i965 = i965_driver_data(ctx);
198 if (HAS_MPEG2(i965)) {
199 profile_list[i++] = VAProfileMPEG2Simple;
200 profile_list[i++] = VAProfileMPEG2Main;
203 if (HAS_H264(i965)) {
204 profile_list[i++] = VAProfileH264Baseline;
205 profile_list[i++] = VAProfileH264Main;
206 profile_list[i++] = VAProfileH264High;
210 profile_list[i++] = VAProfileVC1Simple;
211 profile_list[i++] = VAProfileVC1Main;
212 profile_list[i++] = VAProfileVC1Advanced;
216 profile_list[i++] = VAProfileNone;
219 if (HAS_JPEG(i965)) {
220 profile_list[i++] = VAProfileJPEGBaseline;
223 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
224 assert(i <= I965_MAX_PROFILES);
227 return VA_STATUS_SUCCESS;
231 i965_QueryConfigEntrypoints(VADriverContextP ctx,
233 VAEntrypoint *entrypoint_list, /* out */
234 int *num_entrypoints) /* out */
236 struct i965_driver_data * const i965 = i965_driver_data(ctx);
240 case VAProfileMPEG2Simple:
241 case VAProfileMPEG2Main:
243 entrypoint_list[n++] = VAEntrypointVLD;
246 case VAProfileH264Baseline:
247 case VAProfileH264Main:
248 case VAProfileH264High:
250 entrypoint_list[n++] = VAEntrypointVLD;
252 if (HAS_ENCODER(i965))
253 entrypoint_list[n++] = VAEntrypointEncSlice;
257 case VAProfileVC1Simple:
258 case VAProfileVC1Main:
259 case VAProfileVC1Advanced:
261 entrypoint_list[n++] = VAEntrypointVLD;
266 entrypoint_list[n++] = VAEntrypointVideoProc;
269 case VAProfileJPEGBaseline:
271 entrypoint_list[n++] = VAEntrypointVLD;
278 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
279 assert(n <= I965_MAX_ENTRYPOINTS);
280 *num_entrypoints = n;
281 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
285 i965_GetConfigAttributes(VADriverContextP ctx,
287 VAEntrypoint entrypoint,
288 VAConfigAttrib *attrib_list, /* in/out */
293 /* Other attributes don't seem to be defined */
294 /* What to do if we don't know the attribute? */
295 for (i = 0; i < num_attribs; i++) {
296 switch (attrib_list[i].type) {
297 case VAConfigAttribRTFormat:
298 attrib_list[i].value = VA_RT_FORMAT_YUV420;
301 case VAConfigAttribRateControl:
302 if (entrypoint == VAEntrypointEncSlice) {
303 attrib_list[i].value = VA_RC_CBR | VA_RC_CQP;
307 case VAConfigAttribEncPackedHeaders:
308 if (entrypoint == VAEntrypointEncSlice) {
309 attrib_list[i].value = VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE;
315 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
320 return VA_STATUS_SUCCESS;
324 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
326 object_heap_free(heap, obj);
330 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
334 /* Check existing attrbiutes */
335 for (i = 0; i < obj_config->num_attribs; i++) {
336 if (obj_config->attrib_list[i].type == attrib->type) {
337 /* Update existing attribute */
338 obj_config->attrib_list[i].value = attrib->value;
339 return VA_STATUS_SUCCESS;
343 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
344 i = obj_config->num_attribs;
345 obj_config->attrib_list[i].type = attrib->type;
346 obj_config->attrib_list[i].value = attrib->value;
347 obj_config->num_attribs++;
348 return VA_STATUS_SUCCESS;
351 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
355 i965_CreateConfig(VADriverContextP ctx,
357 VAEntrypoint entrypoint,
358 VAConfigAttrib *attrib_list,
360 VAConfigID *config_id) /* out */
362 struct i965_driver_data * const i965 = i965_driver_data(ctx);
363 struct object_config *obj_config;
368 /* Validate profile & entrypoint */
370 case VAProfileMPEG2Simple:
371 case VAProfileMPEG2Main:
372 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
373 vaStatus = VA_STATUS_SUCCESS;
375 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
379 case VAProfileH264Baseline:
380 case VAProfileH264Main:
381 case VAProfileH264High:
382 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
383 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
384 vaStatus = VA_STATUS_SUCCESS;
386 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
391 case VAProfileVC1Simple:
392 case VAProfileVC1Main:
393 case VAProfileVC1Advanced:
394 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
395 vaStatus = VA_STATUS_SUCCESS;
397 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
403 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
404 vaStatus = VA_STATUS_SUCCESS;
406 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
411 case VAProfileJPEGBaseline:
412 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
413 vaStatus = VA_STATUS_SUCCESS;
415 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
421 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
425 if (VA_STATUS_SUCCESS != vaStatus) {
429 configID = NEW_CONFIG_ID();
430 obj_config = CONFIG(configID);
432 if (NULL == obj_config) {
433 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
437 obj_config->profile = profile;
438 obj_config->entrypoint = entrypoint;
439 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
440 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
441 obj_config->num_attribs = 1;
443 for(i = 0; i < num_attribs; i++) {
444 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
446 if (VA_STATUS_SUCCESS != vaStatus) {
452 if (VA_STATUS_SUCCESS != vaStatus) {
453 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
455 *config_id = configID;
462 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
464 struct i965_driver_data *i965 = i965_driver_data(ctx);
465 struct object_config *obj_config = CONFIG(config_id);
468 if (NULL == obj_config) {
469 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
473 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
474 return VA_STATUS_SUCCESS;
477 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
478 VAConfigID config_id,
479 VAProfile *profile, /* out */
480 VAEntrypoint *entrypoint, /* out */
481 VAConfigAttrib *attrib_list, /* out */
482 int *num_attribs) /* out */
484 struct i965_driver_data *i965 = i965_driver_data(ctx);
485 struct object_config *obj_config = CONFIG(config_id);
486 VAStatus vaStatus = VA_STATUS_SUCCESS;
490 *profile = obj_config->profile;
491 *entrypoint = obj_config->entrypoint;
492 *num_attribs = obj_config->num_attribs;
494 for(i = 0; i < obj_config->num_attribs; i++) {
495 attrib_list[i] = obj_config->attrib_list[i];
502 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
504 struct object_surface *obj_surface = (struct object_surface *)obj;
506 dri_bo_unreference(obj_surface->bo);
507 obj_surface->bo = NULL;
509 if (obj_surface->free_private_data != NULL) {
510 obj_surface->free_private_data(&obj_surface->private_data);
511 obj_surface->private_data = NULL;
514 object_heap_free(heap, obj);
518 i965_CreateSurfaces2(
519 VADriverContextP ctx,
523 VASurfaceID *surfaces,
524 unsigned int num_surfaces,
525 VASurfaceAttrib *attrib_list,
526 unsigned int num_attribs
529 struct i965_driver_data *i965 = i965_driver_data(ctx);
531 VAStatus vaStatus = VA_STATUS_SUCCESS;
532 int expected_fourcc = 0;
534 for (i = 0; i < num_attribs && attrib_list; i++) {
535 if ((attrib_list[i].type == VASurfaceAttribPixelFormat) &&
536 (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {
537 assert(attrib_list[i].value.type == VAGenericValueTypeInteger);
538 expected_fourcc = attrib_list[i].value.value.i;
543 /* We only support one format */
544 if (VA_RT_FORMAT_YUV420 != format) {
545 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
548 for (i = 0; i < num_surfaces; i++) {
549 int surfaceID = NEW_SURFACE_ID();
550 struct object_surface *obj_surface = SURFACE(surfaceID);
552 if (NULL == obj_surface) {
553 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
557 surfaces[i] = surfaceID;
558 obj_surface->status = VASurfaceReady;
559 obj_surface->subpic = VA_INVALID_ID;
560 obj_surface->orig_width = width;
561 obj_surface->orig_height = height;
563 obj_surface->width = ALIGN(width, 16);
564 obj_surface->height = ALIGN(height, 16);
565 obj_surface->flags = SURFACE_REFERENCED;
566 obj_surface->fourcc = 0;
567 obj_surface->bo = NULL;
568 obj_surface->locked_image_id = VA_INVALID_ID;
569 obj_surface->private_data = NULL;
570 obj_surface->free_private_data = NULL;
571 obj_surface->subsampling = SUBSAMPLE_YUV420;
573 if (expected_fourcc) {
574 int tiling = HAS_TILED_SURFACE(i965);
576 if (expected_fourcc != VA_FOURCC('N', 'V', '1', '2'))
579 i965_check_alloc_surface_bo(ctx, obj_surface, tiling, expected_fourcc, SUBSAMPLE_YUV420);
584 if (VA_STATUS_SUCCESS != vaStatus) {
585 /* surfaces[i-1] was the last successful allocation */
587 struct object_surface *obj_surface = SURFACE(surfaces[i]);
589 surfaces[i] = VA_INVALID_SURFACE;
591 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
599 i965_CreateSurfaces(VADriverContextP ctx,
604 VASurfaceID *surfaces) /* out */
606 return i965_CreateSurfaces2(ctx,
617 i965_DestroySurfaces(VADriverContextP ctx,
618 VASurfaceID *surface_list,
621 struct i965_driver_data *i965 = i965_driver_data(ctx);
624 for (i = num_surfaces; i--; ) {
625 struct object_surface *obj_surface = SURFACE(surface_list[i]);
628 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
631 return VA_STATUS_SUCCESS;
635 i965_QueryImageFormats(VADriverContextP ctx,
636 VAImageFormat *format_list, /* out */
637 int *num_formats) /* out */
641 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
642 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
644 format_list[n] = m->va_format;
650 return VA_STATUS_SUCCESS;
654 * Guess the format when the usage of a VA surface is unknown
655 * 1. Without a valid context: YV12
656 * 2. The current context is valid:
657 * a) always NV12 on GEN6 and later
658 * b) I420 for MPEG-2 and NV12 for other codec on GEN4 & GEN5
661 i965_guess_surface_format(VADriverContextP ctx,
663 unsigned int *fourcc,
664 unsigned int *is_tiled)
666 struct i965_driver_data *i965 = i965_driver_data(ctx);
667 struct object_context *obj_context = NULL;
668 struct object_config *obj_config = NULL;
670 *fourcc = VA_FOURCC('Y', 'V', '1', '2');
673 if (i965->current_context_id == VA_INVALID_ID)
676 obj_context = CONTEXT(i965->current_context_id);
678 if (!obj_context || obj_context->config_id == VA_INVALID_ID)
681 obj_config = CONFIG(obj_context->config_id);
686 if (IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id)) {
687 *fourcc = VA_FOURCC('N', 'V', '1', '2');
692 switch (obj_config->profile) {
693 case VAProfileMPEG2Simple:
694 case VAProfileMPEG2Main:
695 *fourcc = VA_FOURCC('I', '4', '2', '0');
700 *fourcc = VA_FOURCC('N', 'V', '1', '2');
707 i965_QuerySubpictureFormats(VADriverContextP ctx,
708 VAImageFormat *format_list, /* out */
709 unsigned int *flags, /* out */
710 unsigned int *num_formats) /* out */
714 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
715 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
717 format_list[n] = m->va_format;
719 flags[n] = m->va_flags;
725 return VA_STATUS_SUCCESS;
729 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
731 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
733 object_heap_free(heap, obj);
737 i965_CreateSubpicture(VADriverContextP ctx,
739 VASubpictureID *subpicture) /* out */
741 struct i965_driver_data *i965 = i965_driver_data(ctx);
742 VASubpictureID subpicID = NEW_SUBPIC_ID()
743 struct object_subpic *obj_subpic = SUBPIC(subpicID);
746 return VA_STATUS_ERROR_ALLOCATION_FAILED;
748 struct object_image *obj_image = IMAGE(image);
750 return VA_STATUS_ERROR_INVALID_IMAGE;
752 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
754 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
756 *subpicture = subpicID;
757 obj_subpic->image = image;
758 obj_subpic->format = m->format;
759 obj_subpic->width = obj_image->image.width;
760 obj_subpic->height = obj_image->image.height;
761 obj_subpic->pitch = obj_image->image.pitches[0];
762 obj_subpic->bo = obj_image->bo;
763 return VA_STATUS_SUCCESS;
767 i965_DestroySubpicture(VADriverContextP ctx,
768 VASubpictureID subpicture)
770 struct i965_driver_data *i965 = i965_driver_data(ctx);
771 struct object_subpic *obj_subpic = SUBPIC(subpicture);
772 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
773 return VA_STATUS_SUCCESS;
777 i965_SetSubpictureImage(VADriverContextP ctx,
778 VASubpictureID subpicture,
782 return VA_STATUS_ERROR_UNIMPLEMENTED;
786 i965_SetSubpictureChromakey(VADriverContextP ctx,
787 VASubpictureID subpicture,
788 unsigned int chromakey_min,
789 unsigned int chromakey_max,
790 unsigned int chromakey_mask)
793 return VA_STATUS_ERROR_UNIMPLEMENTED;
797 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
798 VASubpictureID subpicture,
802 return VA_STATUS_ERROR_UNIMPLEMENTED;
806 i965_AssociateSubpicture(VADriverContextP ctx,
807 VASubpictureID subpicture,
808 VASurfaceID *target_surfaces,
810 short src_x, /* upper left offset in subpicture */
812 unsigned short src_width,
813 unsigned short src_height,
814 short dest_x, /* upper left offset in surface */
816 unsigned short dest_width,
817 unsigned short dest_height,
819 * whether to enable chroma-keying or global-alpha
820 * see VA_SUBPICTURE_XXX values
824 struct i965_driver_data *i965 = i965_driver_data(ctx);
825 struct object_subpic *obj_subpic = SUBPIC(subpicture);
828 obj_subpic->src_rect.x = src_x;
829 obj_subpic->src_rect.y = src_y;
830 obj_subpic->src_rect.width = src_width;
831 obj_subpic->src_rect.height = src_height;
832 obj_subpic->dst_rect.x = dest_x;
833 obj_subpic->dst_rect.y = dest_y;
834 obj_subpic->dst_rect.width = dest_width;
835 obj_subpic->dst_rect.height = dest_height;
836 obj_subpic->flags = flags;
838 for (i = 0; i < num_surfaces; i++) {
839 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
841 return VA_STATUS_ERROR_INVALID_SURFACE;
842 obj_surface->subpic = subpicture;
844 return VA_STATUS_SUCCESS;
849 i965_DeassociateSubpicture(VADriverContextP ctx,
850 VASubpictureID subpicture,
851 VASurfaceID *target_surfaces,
854 struct i965_driver_data *i965 = i965_driver_data(ctx);
857 for (i = 0; i < num_surfaces; i++) {
858 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
860 return VA_STATUS_ERROR_INVALID_SURFACE;
861 if (obj_surface->subpic == subpicture)
862 obj_surface->subpic = VA_INVALID_ID;
864 return VA_STATUS_SUCCESS;
868 i965_reference_buffer_store(struct buffer_store **ptr,
869 struct buffer_store *buffer_store)
871 assert(*ptr == NULL);
874 buffer_store->ref_count++;
880 i965_release_buffer_store(struct buffer_store **ptr)
882 struct buffer_store *buffer_store = *ptr;
884 if (buffer_store == NULL)
887 assert(buffer_store->bo || buffer_store->buffer);
888 assert(!(buffer_store->bo && buffer_store->buffer));
889 buffer_store->ref_count--;
891 if (buffer_store->ref_count == 0) {
892 dri_bo_unreference(buffer_store->bo);
893 free(buffer_store->buffer);
894 buffer_store->bo = NULL;
895 buffer_store->buffer = NULL;
903 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
905 struct object_context *obj_context = (struct object_context *)obj;
908 if (obj_context->hw_context) {
909 obj_context->hw_context->destroy(obj_context->hw_context);
910 obj_context->hw_context = NULL;
913 if (obj_context->codec_type == CODEC_PROC) {
914 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
916 } else if (obj_context->codec_type == CODEC_ENC) {
917 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
918 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
919 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
921 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
922 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
924 free(obj_context->codec_state.encode.slice_params);
926 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
927 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
928 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
930 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
931 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
933 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
934 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
936 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.misc_param); i++)
937 i965_release_buffer_store(&obj_context->codec_state.encode.misc_param[i]);
939 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
940 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
942 free(obj_context->codec_state.encode.slice_params_ext);
944 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
945 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
947 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
948 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
949 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
951 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
952 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
954 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
955 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
957 free(obj_context->codec_state.decode.slice_params);
958 free(obj_context->codec_state.decode.slice_datas);
961 free(obj_context->render_targets);
962 object_heap_free(heap, obj);
966 i965_CreateContext(VADriverContextP ctx,
967 VAConfigID config_id,
971 VASurfaceID *render_targets,
972 int num_render_targets,
973 VAContextID *context) /* out */
975 struct i965_driver_data *i965 = i965_driver_data(ctx);
976 struct i965_render_state *render_state = &i965->render_state;
977 struct object_config *obj_config = CONFIG(config_id);
978 struct object_context *obj_context = NULL;
979 VAStatus vaStatus = VA_STATUS_SUCCESS;
983 if (NULL == obj_config) {
984 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
988 if (picture_width > i965->codec_info->max_width ||
989 picture_height > i965->codec_info->max_height) {
990 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
995 /* Validate picture dimensions */
996 contextID = NEW_CONTEXT_ID();
997 obj_context = CONTEXT(contextID);
999 if (NULL == obj_context) {
1000 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
1004 render_state->inited = 1;
1006 switch (obj_config->profile) {
1007 case VAProfileH264Baseline:
1008 case VAProfileH264Main:
1009 case VAProfileH264High:
1010 if (!HAS_H264(i965))
1011 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1012 render_state->interleaved_uv = 1;
1015 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
1019 *context = contextID;
1020 obj_context->flags = flag;
1021 obj_context->context_id = contextID;
1022 obj_context->config_id = config_id;
1023 obj_context->picture_width = picture_width;
1024 obj_context->picture_height = picture_height;
1025 obj_context->num_render_targets = num_render_targets;
1026 obj_context->render_targets =
1027 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
1028 obj_context->hw_context = NULL;
1030 for(i = 0; i < num_render_targets; i++) {
1031 if (NULL == SURFACE(render_targets[i])) {
1032 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
1036 obj_context->render_targets[i] = render_targets[i];
1039 if (VA_STATUS_SUCCESS == vaStatus) {
1040 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1041 obj_context->codec_type = CODEC_PROC;
1042 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
1043 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
1044 assert(i965->codec_info->proc_hw_context_init);
1045 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config);
1046 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1047 obj_context->codec_type = CODEC_ENC;
1048 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1049 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1050 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1051 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1052 sizeof(*obj_context->codec_state.encode.slice_params));
1053 assert(i965->codec_info->enc_hw_context_init);
1054 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config);
1056 obj_context->codec_type = CODEC_DEC;
1057 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1058 obj_context->codec_state.decode.current_render_target = -1;
1059 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1060 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1061 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1062 sizeof(*obj_context->codec_state.decode.slice_params));
1063 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1064 sizeof(*obj_context->codec_state.decode.slice_datas));
1066 assert(i965->codec_info->dec_hw_context_init);
1067 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config);
1071 /* Error recovery */
1072 if (VA_STATUS_SUCCESS != vaStatus) {
1073 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1076 i965->current_context_id = contextID;
1082 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1084 struct i965_driver_data *i965 = i965_driver_data(ctx);
1085 struct object_context *obj_context = CONTEXT(context);
1087 assert(obj_context);
1089 if (i965->current_context_id == context)
1090 i965->current_context_id = VA_INVALID_ID;
1092 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1094 return VA_STATUS_SUCCESS;
1098 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1100 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1102 assert(obj_buffer->buffer_store);
1103 i965_release_buffer_store(&obj_buffer->buffer_store);
1104 object_heap_free(heap, obj);
1108 i965_create_buffer_internal(VADriverContextP ctx,
1109 VAContextID context,
1112 unsigned int num_elements,
1117 struct i965_driver_data *i965 = i965_driver_data(ctx);
1118 struct object_buffer *obj_buffer = NULL;
1119 struct buffer_store *buffer_store = NULL;
1124 case VAPictureParameterBufferType:
1125 case VAIQMatrixBufferType:
1126 case VAQMatrixBufferType:
1127 case VABitPlaneBufferType:
1128 case VASliceGroupMapBufferType:
1129 case VASliceParameterBufferType:
1130 case VASliceDataBufferType:
1131 case VAMacroblockParameterBufferType:
1132 case VAResidualDataBufferType:
1133 case VADeblockingParameterBufferType:
1134 case VAImageBufferType:
1135 case VAEncCodedBufferType:
1136 case VAEncSequenceParameterBufferType:
1137 case VAEncPictureParameterBufferType:
1138 case VAEncSliceParameterBufferType:
1139 case VAEncPackedHeaderParameterBufferType:
1140 case VAEncPackedHeaderDataBufferType:
1141 case VAEncMiscParameterBufferType:
1142 case VAProcPipelineParameterBufferType:
1143 case VAProcFilterParameterBufferType:
1144 #ifdef HAVE_JPEG_DECODING
1145 case VAHuffmanTableBufferType:
1151 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1154 bufferID = NEW_BUFFER_ID();
1155 obj_buffer = BUFFER(bufferID);
1157 if (NULL == obj_buffer) {
1158 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1161 if (type == VAEncCodedBufferType) {
1162 size += ALIGN(sizeof(VACodedBufferSegment), 64);
1163 size += 0x1000; /* for upper bound check */
1166 obj_buffer->max_num_elements = num_elements;
1167 obj_buffer->num_elements = num_elements;
1168 obj_buffer->size_element = size;
1169 obj_buffer->type = type;
1170 obj_buffer->buffer_store = NULL;
1171 buffer_store = calloc(1, sizeof(struct buffer_store));
1172 assert(buffer_store);
1173 buffer_store->ref_count = 1;
1175 if (store_bo != NULL) {
1176 buffer_store->bo = store_bo;
1177 dri_bo_reference(buffer_store->bo);
1180 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1181 } else if (type == VASliceDataBufferType ||
1182 type == VAImageBufferType ||
1183 type == VAEncCodedBufferType) {
1184 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1186 size * num_elements, 64);
1187 assert(buffer_store->bo);
1189 if (type == VAEncCodedBufferType) {
1190 VACodedBufferSegment *coded_buffer_segment;
1191 dri_bo_map(buffer_store->bo, 1);
1192 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1193 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
1194 coded_buffer_segment->bit_offset = 0;
1195 coded_buffer_segment->status = 0;
1196 coded_buffer_segment->buf = NULL;
1197 coded_buffer_segment->next = NULL;
1198 dri_bo_unmap(buffer_store->bo);
1200 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1206 if (type == VAEncPackedHeaderDataBufferType) {
1207 msize = ALIGN(size, 4);
1210 buffer_store->buffer = malloc(msize * num_elements);
1211 assert(buffer_store->buffer);
1214 memcpy(buffer_store->buffer, data, size * num_elements);
1217 buffer_store->num_elements = obj_buffer->num_elements;
1218 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1219 i965_release_buffer_store(&buffer_store);
1222 return VA_STATUS_SUCCESS;
1226 i965_CreateBuffer(VADriverContextP ctx,
1227 VAContextID context, /* in */
1228 VABufferType type, /* in */
1229 unsigned int size, /* in */
1230 unsigned int num_elements, /* in */
1231 void *data, /* in */
1232 VABufferID *buf_id) /* out */
1234 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1239 i965_BufferSetNumElements(VADriverContextP ctx,
1240 VABufferID buf_id, /* in */
1241 unsigned int num_elements) /* in */
1243 struct i965_driver_data *i965 = i965_driver_data(ctx);
1244 struct object_buffer *obj_buffer = BUFFER(buf_id);
1245 VAStatus vaStatus = VA_STATUS_SUCCESS;
1249 if ((num_elements < 0) ||
1250 (num_elements > obj_buffer->max_num_elements)) {
1251 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1253 obj_buffer->num_elements = num_elements;
1254 if (obj_buffer->buffer_store != NULL) {
1255 obj_buffer->buffer_store->num_elements = num_elements;
1263 i965_MapBuffer(VADriverContextP ctx,
1264 VABufferID buf_id, /* in */
1265 void **pbuf) /* out */
1267 struct i965_driver_data *i965 = i965_driver_data(ctx);
1268 struct object_buffer *obj_buffer = BUFFER(buf_id);
1269 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1271 assert(obj_buffer && obj_buffer->buffer_store);
1272 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1273 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1275 if (NULL != obj_buffer->buffer_store->bo) {
1276 unsigned int tiling, swizzle;
1278 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1280 if (tiling != I915_TILING_NONE)
1281 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1283 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1285 assert(obj_buffer->buffer_store->bo->virtual);
1286 *pbuf = obj_buffer->buffer_store->bo->virtual;
1288 if (obj_buffer->type == VAEncCodedBufferType) {
1290 unsigned char *buffer = NULL;
1291 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1293 coded_buffer_segment->buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1295 for (i = 0; i < obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000; i++) {
1304 if (i == obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000) {
1305 coded_buffer_segment->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1308 coded_buffer_segment->size = i;
1311 vaStatus = VA_STATUS_SUCCESS;
1312 } else if (NULL != obj_buffer->buffer_store->buffer) {
1313 *pbuf = obj_buffer->buffer_store->buffer;
1314 vaStatus = VA_STATUS_SUCCESS;
1321 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1323 struct i965_driver_data *i965 = i965_driver_data(ctx);
1324 struct object_buffer *obj_buffer = BUFFER(buf_id);
1325 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1327 assert(obj_buffer && obj_buffer->buffer_store);
1328 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1329 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1331 if (NULL != obj_buffer->buffer_store->bo) {
1332 unsigned int tiling, swizzle;
1334 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1336 if (tiling != I915_TILING_NONE)
1337 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1339 dri_bo_unmap(obj_buffer->buffer_store->bo);
1341 vaStatus = VA_STATUS_SUCCESS;
1342 } else if (NULL != obj_buffer->buffer_store->buffer) {
1344 vaStatus = VA_STATUS_SUCCESS;
1351 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1353 struct i965_driver_data *i965 = i965_driver_data(ctx);
1354 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1357 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1359 return VA_STATUS_SUCCESS;
1363 i965_BeginPicture(VADriverContextP ctx,
1364 VAContextID context,
1365 VASurfaceID render_target)
1367 struct i965_driver_data *i965 = i965_driver_data(ctx);
1368 struct object_context *obj_context = CONTEXT(context);
1369 struct object_surface *obj_surface = SURFACE(render_target);
1370 struct object_config *obj_config;
1375 assert(obj_context);
1376 assert(obj_surface);
1378 config = obj_context->config_id;
1379 obj_config = CONFIG(config);
1382 switch (obj_config->profile) {
1383 case VAProfileMPEG2Simple:
1384 case VAProfileMPEG2Main:
1385 vaStatus = VA_STATUS_SUCCESS;
1388 case VAProfileH264Baseline:
1389 case VAProfileH264Main:
1390 case VAProfileH264High:
1391 vaStatus = VA_STATUS_SUCCESS;
1394 case VAProfileVC1Simple:
1395 case VAProfileVC1Main:
1396 case VAProfileVC1Advanced:
1397 vaStatus = VA_STATUS_SUCCESS;
1400 case VAProfileJPEGBaseline:
1401 vaStatus = VA_STATUS_SUCCESS;
1405 vaStatus = VA_STATUS_SUCCESS;
1410 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1414 if (obj_context->codec_type == CODEC_PROC) {
1415 obj_context->codec_state.proc.current_render_target = render_target;
1416 } else if (obj_context->codec_type == CODEC_ENC) {
1417 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1418 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1420 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1421 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1424 obj_context->codec_state.encode.num_slice_params = 0;
1427 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1428 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1430 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1431 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1433 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1434 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1436 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1437 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1439 obj_context->codec_state.encode.num_slice_params_ext = 0;
1440 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1441 obj_context->codec_state.encode.last_packed_header_type = 0;
1443 obj_context->codec_state.decode.current_render_target = render_target;
1444 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1445 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1446 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1447 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1449 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1450 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1451 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1454 obj_context->codec_state.decode.num_slice_params = 0;
1455 obj_context->codec_state.decode.num_slice_datas = 0;
1461 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1463 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1465 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1466 struct object_context *obj_context, \
1467 struct object_buffer *obj_buffer) \
1469 struct category##_state *category = &obj_context->codec_state.category; \
1470 assert(obj_buffer->buffer_store->bo == NULL); \
1471 assert(obj_buffer->buffer_store->buffer); \
1472 i965_release_buffer_store(&category->member); \
1473 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1474 return VA_STATUS_SUCCESS; \
1477 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1479 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1480 struct object_context *obj_context, \
1481 struct object_buffer *obj_buffer) \
1483 struct category##_state *category = &obj_context->codec_state.category; \
1484 if (category->num_##member == category->max_##member) { \
1485 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1486 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1487 category->max_##member += NUM_SLICES; \
1489 i965_release_buffer_store(&category->member[category->num_##member]); \
1490 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1491 category->num_##member++; \
1492 return VA_STATUS_SUCCESS; \
1495 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1497 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1498 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1499 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1500 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1501 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1503 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1504 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1505 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1508 i965_decoder_render_picture(VADriverContextP ctx,
1509 VAContextID context,
1510 VABufferID *buffers,
1513 struct i965_driver_data *i965 = i965_driver_data(ctx);
1514 struct object_context *obj_context = CONTEXT(context);
1515 VAStatus vaStatus = VA_STATUS_SUCCESS;
1518 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1519 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1522 switch (obj_buffer->type) {
1523 case VAPictureParameterBufferType:
1524 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1527 case VAIQMatrixBufferType:
1528 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1531 case VABitPlaneBufferType:
1532 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1535 case VASliceParameterBufferType:
1536 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1539 case VASliceDataBufferType:
1540 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1543 #ifdef HAVE_JPEG_DECODING
1544 case VAHuffmanTableBufferType:
1545 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1550 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1558 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1560 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1561 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1562 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1563 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1564 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1565 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1566 /* extended buffer */
1567 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1568 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1570 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1571 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1572 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1575 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1576 struct object_context *obj_context,
1577 struct object_buffer *obj_buffer,
1578 VAEncPackedHeaderType type)
1580 struct encode_state *encode = &obj_context->codec_state.encode;
1582 assert(obj_buffer->buffer_store->bo == NULL);
1583 assert(obj_buffer->buffer_store->buffer);
1584 i965_release_buffer_store(&encode->packed_header_param[type]);
1585 i965_reference_buffer_store(&encode->packed_header_param[type], obj_buffer->buffer_store);
1587 return VA_STATUS_SUCCESS;
1591 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1592 struct object_context *obj_context,
1593 struct object_buffer *obj_buffer,
1594 VAEncPackedHeaderType type)
1596 struct encode_state *encode = &obj_context->codec_state.encode;
1598 assert(obj_buffer->buffer_store->bo == NULL);
1599 assert(obj_buffer->buffer_store->buffer);
1600 i965_release_buffer_store(&encode->packed_header_data[type]);
1601 i965_reference_buffer_store(&encode->packed_header_data[type], obj_buffer->buffer_store);
1603 return VA_STATUS_SUCCESS;
1607 i965_encoder_render_misc_parameter_buffer(VADriverContextP ctx,
1608 struct object_context *obj_context,
1609 struct object_buffer *obj_buffer)
1611 struct encode_state *encode = &obj_context->codec_state.encode;
1612 VAEncMiscParameterBuffer *param = NULL;
1614 assert(obj_buffer->buffer_store->bo == NULL);
1615 assert(obj_buffer->buffer_store->buffer);
1617 param = (VAEncMiscParameterBuffer *)obj_buffer->buffer_store->buffer;
1618 i965_release_buffer_store(&encode->misc_param[param->type]);
1619 i965_reference_buffer_store(&encode->misc_param[param->type], obj_buffer->buffer_store);
1621 return VA_STATUS_SUCCESS;
1625 i965_encoder_render_picture(VADriverContextP ctx,
1626 VAContextID context,
1627 VABufferID *buffers,
1630 struct i965_driver_data *i965 = i965_driver_data(ctx);
1631 struct object_context *obj_context = CONTEXT(context);
1632 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1635 for (i = 0; i < num_buffers; i++) {
1636 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1639 switch (obj_buffer->type) {
1640 case VAQMatrixBufferType:
1641 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1644 case VAIQMatrixBufferType:
1645 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1648 case VAEncSequenceParameterBufferType:
1649 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1652 case VAEncPictureParameterBufferType:
1653 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1656 case VAEncSliceParameterBufferType:
1657 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1660 case VAEncPackedHeaderParameterBufferType:
1662 struct encode_state *encode = &obj_context->codec_state.encode;
1663 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1664 encode->last_packed_header_type = param->type;
1666 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1669 encode->last_packed_header_type);
1673 case VAEncPackedHeaderDataBufferType:
1675 struct encode_state *encode = &obj_context->codec_state.encode;
1677 assert(encode->last_packed_header_type == VAEncPackedHeaderSequence ||
1678 encode->last_packed_header_type == VAEncPackedHeaderPicture ||
1679 encode->last_packed_header_type == VAEncPackedHeaderSlice);
1680 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1683 encode->last_packed_header_type);
1687 case VAEncMiscParameterBufferType:
1688 vaStatus = i965_encoder_render_misc_parameter_buffer(ctx,
1694 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1702 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1704 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1705 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1708 i965_proc_render_picture(VADriverContextP ctx,
1709 VAContextID context,
1710 VABufferID *buffers,
1713 struct i965_driver_data *i965 = i965_driver_data(ctx);
1714 struct object_context *obj_context = CONTEXT(context);
1715 VAStatus vaStatus = VA_STATUS_SUCCESS;
1718 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1719 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1722 switch (obj_buffer->type) {
1723 case VAProcPipelineParameterBufferType:
1724 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
1728 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1737 i965_RenderPicture(VADriverContextP ctx,
1738 VAContextID context,
1739 VABufferID *buffers,
1742 struct i965_driver_data *i965 = i965_driver_data(ctx);
1743 struct object_context *obj_context;
1744 struct object_config *obj_config;
1746 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1748 obj_context = CONTEXT(context);
1749 assert(obj_context);
1751 config = obj_context->config_id;
1752 obj_config = CONFIG(config);
1755 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1756 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
1757 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
1758 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1760 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1767 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1769 struct i965_driver_data *i965 = i965_driver_data(ctx);
1770 struct object_context *obj_context = CONTEXT(context);
1771 struct object_config *obj_config;
1774 assert(obj_context);
1775 config = obj_context->config_id;
1776 obj_config = CONFIG(config);
1779 if (obj_context->codec_type == CODEC_PROC) {
1780 assert(VAEntrypointVideoProc == obj_config->entrypoint);
1781 } else if (obj_context->codec_type == CODEC_ENC) {
1782 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1784 assert(obj_context->codec_state.encode.pic_param ||
1785 obj_context->codec_state.encode.pic_param_ext);
1786 assert(obj_context->codec_state.encode.seq_param ||
1787 obj_context->codec_state.encode.seq_param_ext);
1788 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
1789 obj_context->codec_state.encode.num_slice_params_ext >= 1);
1791 assert(obj_context->codec_state.decode.pic_param);
1792 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1793 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1794 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1797 assert(obj_context->hw_context->run);
1798 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1800 return VA_STATUS_SUCCESS;
1804 i965_SyncSurface(VADriverContextP ctx,
1805 VASurfaceID render_target)
1807 struct i965_driver_data *i965 = i965_driver_data(ctx);
1808 struct object_surface *obj_surface = SURFACE(render_target);
1810 assert(obj_surface);
1812 return VA_STATUS_SUCCESS;
1816 i965_QuerySurfaceStatus(VADriverContextP ctx,
1817 VASurfaceID render_target,
1818 VASurfaceStatus *status) /* out */
1820 struct i965_driver_data *i965 = i965_driver_data(ctx);
1821 struct object_surface *obj_surface = SURFACE(render_target);
1823 assert(obj_surface);
1825 /* Usually GEM will handle synchronization with the graphics hardware */
1827 if (obj_surface->bo) {
1828 dri_bo_map(obj_surface->bo, 0);
1829 dri_bo_unmap(obj_surface->bo);
1833 *status = obj_surface->status;
1835 return VA_STATUS_SUCCESS;
1840 * Query display attributes
1841 * The caller must provide a "attr_list" array that can hold at
1842 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1843 * returned in "attr_list" is returned in "num_attributes".
1846 i965_QueryDisplayAttributes(VADriverContextP ctx,
1847 VADisplayAttribute *attr_list, /* out */
1848 int *num_attributes) /* out */
1851 *num_attributes = 0;
1853 return VA_STATUS_SUCCESS;
1857 * Get display attributes
1858 * This function returns the current attribute values in "attr_list".
1859 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1860 * from vaQueryDisplayAttributes() can have their values retrieved.
1863 i965_GetDisplayAttributes(VADriverContextP ctx,
1864 VADisplayAttribute *attr_list, /* in/out */
1868 return VA_STATUS_ERROR_UNIMPLEMENTED;
1872 * Set display attributes
1873 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1874 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1875 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1878 i965_SetDisplayAttributes(VADriverContextP ctx,
1879 VADisplayAttribute *attr_list,
1883 return VA_STATUS_ERROR_UNIMPLEMENTED;
1887 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1888 VASurfaceID surface,
1889 void **buffer, /* out */
1890 unsigned int *stride) /* out */
1893 return VA_STATUS_ERROR_UNIMPLEMENTED;
1897 i965_Init(VADriverContextP ctx)
1899 struct i965_driver_data *i965 = i965_driver_data(ctx);
1901 if (intel_driver_init(ctx) == False)
1902 return VA_STATUS_ERROR_UNKNOWN;
1904 if (IS_G4X(i965->intel.device_id))
1905 i965->codec_info = &g4x_hw_codec_info;
1906 else if (IS_IRONLAKE(i965->intel.device_id))
1907 i965->codec_info = &ironlake_hw_codec_info;
1908 else if (IS_GEN6(i965->intel.device_id))
1909 i965->codec_info = &gen6_hw_codec_info;
1910 else if (IS_GEN7(i965->intel.device_id))
1911 i965->codec_info = &gen7_hw_codec_info;
1913 return VA_STATUS_ERROR_UNKNOWN;
1915 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1917 if (i965_post_processing_init(ctx) == False)
1918 return VA_STATUS_ERROR_UNKNOWN;
1920 if (i965_render_init(ctx) == False)
1921 return VA_STATUS_ERROR_UNKNOWN;
1923 _i965InitMutex(&i965->render_mutex);
1924 _i965InitMutex(&i965->pp_mutex);
1926 return VA_STATUS_SUCCESS;
1930 i965_destroy_heap(struct object_heap *heap,
1931 void (*func)(struct object_heap *heap, struct object_base *object))
1933 struct object_base *object;
1934 object_heap_iterator iter;
1936 object = object_heap_first(heap, &iter);
1942 object = object_heap_next(heap, &iter);
1945 object_heap_destroy(heap);
1950 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1953 i965_CreateImage(VADriverContextP ctx,
1954 VAImageFormat *format,
1957 VAImage *out_image) /* out */
1959 struct i965_driver_data *i965 = i965_driver_data(ctx);
1960 struct object_image *obj_image;
1961 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1963 unsigned int width2, height2, size2, size;
1965 out_image->image_id = VA_INVALID_ID;
1966 out_image->buf = VA_INVALID_ID;
1968 image_id = NEW_IMAGE_ID();
1969 if (image_id == VA_INVALID_ID)
1970 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1972 obj_image = IMAGE(image_id);
1974 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1975 obj_image->bo = NULL;
1976 obj_image->palette = NULL;
1977 obj_image->derived_surface = VA_INVALID_ID;
1979 VAImage * const image = &obj_image->image;
1980 image->image_id = image_id;
1981 image->buf = VA_INVALID_ID;
1983 size = width * height;
1984 width2 = (width + 1) / 2;
1985 height2 = (height + 1) / 2;
1986 size2 = width2 * height2;
1988 image->num_palette_entries = 0;
1989 image->entry_bytes = 0;
1990 memset(image->component_order, 0, sizeof(image->component_order));
1992 switch (format->fourcc) {
1993 case VA_FOURCC('I','A','4','4'):
1994 case VA_FOURCC('A','I','4','4'):
1995 image->num_planes = 1;
1996 image->pitches[0] = width;
1997 image->offsets[0] = 0;
1998 image->data_size = image->offsets[0] + image->pitches[0] * height;
1999 image->num_palette_entries = 16;
2000 image->entry_bytes = 3;
2001 image->component_order[0] = 'R';
2002 image->component_order[1] = 'G';
2003 image->component_order[2] = 'B';
2005 case VA_FOURCC('A','R','G','B'):
2006 case VA_FOURCC('A','B','G','R'):
2007 case VA_FOURCC('B','G','R','A'):
2008 case VA_FOURCC('R','G','B','A'):
2009 image->num_planes = 1;
2010 image->pitches[0] = width * 4;
2011 image->offsets[0] = 0;
2012 image->data_size = image->offsets[0] + image->pitches[0] * height;
2014 case VA_FOURCC('Y','V','1','2'):
2015 image->num_planes = 3;
2016 image->pitches[0] = width;
2017 image->offsets[0] = 0;
2018 image->pitches[1] = width2;
2019 image->offsets[1] = size + size2;
2020 image->pitches[2] = width2;
2021 image->offsets[2] = size;
2022 image->data_size = size + 2 * size2;
2024 case VA_FOURCC('I','4','2','0'):
2025 image->num_planes = 3;
2026 image->pitches[0] = width;
2027 image->offsets[0] = 0;
2028 image->pitches[1] = width2;
2029 image->offsets[1] = size;
2030 image->pitches[2] = width2;
2031 image->offsets[2] = size + size2;
2032 image->data_size = size + 2 * size2;
2034 case VA_FOURCC('N','V','1','2'):
2035 image->num_planes = 2;
2036 image->pitches[0] = width;
2037 image->offsets[0] = 0;
2038 image->pitches[1] = width;
2039 image->offsets[1] = size;
2040 image->data_size = size + 2 * size2;
2046 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2047 image->data_size, 1, NULL, &image->buf);
2048 if (va_status != VA_STATUS_SUCCESS)
2051 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2052 dri_bo_reference(obj_image->bo);
2054 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2055 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2056 if (!obj_image->palette)
2060 image->image_id = image_id;
2061 image->format = *format;
2062 image->width = width;
2063 image->height = height;
2065 *out_image = *image;
2066 return VA_STATUS_SUCCESS;
2069 i965_DestroyImage(ctx, image_id);
2074 i965_check_alloc_surface_bo(VADriverContextP ctx,
2075 struct object_surface *obj_surface,
2077 unsigned int fourcc,
2078 unsigned int subsampling)
2080 struct i965_driver_data *i965 = i965_driver_data(ctx);
2081 int region_width, region_height;
2083 if (obj_surface->bo) {
2084 assert(obj_surface->fourcc);
2085 assert(obj_surface->fourcc == fourcc);
2086 assert(obj_surface->subsampling == subsampling);
2090 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
2091 obj_surface->x_cr_offset = 0;
2094 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2095 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2096 fourcc == VA_FOURCC('I', 'M', 'C', '3'));
2098 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
2099 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
2100 obj_surface->cb_cr_pitch = obj_surface->width;
2101 region_width = obj_surface->width;
2102 region_height = obj_surface->height;
2104 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2105 assert(subsampling == SUBSAMPLE_YUV420);
2106 obj_surface->y_cb_offset = obj_surface->height;
2107 obj_surface->y_cr_offset = obj_surface->height;
2108 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2109 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2110 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
2111 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2112 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
2113 switch (subsampling) {
2114 case SUBSAMPLE_YUV400:
2115 obj_surface->cb_cr_width = 0;
2116 obj_surface->cb_cr_height = 0;
2119 case SUBSAMPLE_YUV420:
2120 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2121 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2124 case SUBSAMPLE_YUV422H:
2125 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2126 obj_surface->cb_cr_height = obj_surface->orig_height;
2129 case SUBSAMPLE_YUV422V:
2130 obj_surface->cb_cr_width = obj_surface->orig_width;
2131 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2134 case SUBSAMPLE_YUV444:
2135 obj_surface->cb_cr_width = obj_surface->orig_width;
2136 obj_surface->cb_cr_height = obj_surface->orig_height;
2139 case SUBSAMPLE_YUV411:
2140 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2141 obj_surface->cb_cr_height = obj_surface->orig_height;
2149 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2151 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2152 obj_surface->y_cr_offset = obj_surface->height;
2153 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2155 obj_surface->y_cb_offset = obj_surface->height;
2156 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2160 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2161 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2162 assert(subsampling == SUBSAMPLE_YUV420);
2164 region_width = obj_surface->width;
2165 region_height = obj_surface->height;
2168 case VA_FOURCC('N', 'V', '1', '2'):
2169 obj_surface->y_cb_offset = obj_surface->height;
2170 obj_surface->y_cr_offset = obj_surface->height;
2171 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2172 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2173 obj_surface->cb_cr_pitch = obj_surface->width;
2174 region_height = obj_surface->height + obj_surface->height / 2;
2177 case VA_FOURCC('Y', 'V', '1', '2'):
2178 case VA_FOURCC('I', '4', '2', '0'):
2179 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2180 obj_surface->y_cr_offset = obj_surface->height;
2181 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2183 obj_surface->y_cb_offset = obj_surface->height;
2184 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2187 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2188 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2189 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2190 region_height = obj_surface->height + obj_surface->height / 2;
2199 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2202 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2203 unsigned long pitch;
2205 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2213 assert(tiling_mode == I915_TILING_Y);
2214 assert(pitch == obj_surface->width);
2216 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2222 obj_surface->fourcc = fourcc;
2223 obj_surface->subsampling = subsampling;
2224 assert(obj_surface->bo);
2227 VAStatus i965_DeriveImage(VADriverContextP ctx,
2228 VASurfaceID surface,
2229 VAImage *out_image) /* out */
2231 struct i965_driver_data *i965 = i965_driver_data(ctx);
2232 struct object_image *obj_image;
2233 struct object_surface *obj_surface;
2235 unsigned int w_pitch, h_pitch;
2236 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2238 out_image->image_id = VA_INVALID_ID;
2239 obj_surface = SURFACE(surface);
2242 return VA_STATUS_ERROR_INVALID_SURFACE;
2244 if (!obj_surface->bo) {
2245 unsigned int is_tiled = 0;
2246 unsigned int fourcc = VA_FOURCC('Y', 'V', '1', '2');
2247 i965_guess_surface_format(ctx, surface, &fourcc, &is_tiled);
2248 i965_check_alloc_surface_bo(ctx, obj_surface, is_tiled, fourcc, SUBSAMPLE_YUV420);
2251 assert(obj_surface->fourcc);
2253 w_pitch = obj_surface->width;
2254 h_pitch = obj_surface->height;
2256 image_id = NEW_IMAGE_ID();
2258 if (image_id == VA_INVALID_ID)
2259 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2261 obj_image = IMAGE(image_id);
2264 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2266 obj_image->bo = NULL;
2267 obj_image->palette = NULL;
2268 obj_image->derived_surface = VA_INVALID_ID;
2270 VAImage * const image = &obj_image->image;
2272 memset(image, 0, sizeof(*image));
2273 image->image_id = image_id;
2274 image->buf = VA_INVALID_ID;
2275 image->num_palette_entries = 0;
2276 image->entry_bytes = 0;
2277 image->width = obj_surface->orig_width;
2278 image->height = obj_surface->orig_height;
2279 image->data_size = obj_surface->size;
2281 image->format.fourcc = obj_surface->fourcc;
2282 image->format.byte_order = VA_LSB_FIRST;
2283 image->format.bits_per_pixel = 12;
2285 switch (image->format.fourcc) {
2286 case VA_FOURCC('Y', 'V', '1', '2'):
2287 image->num_planes = 3;
2288 image->pitches[0] = w_pitch; /* Y */
2289 image->offsets[0] = 0;
2290 image->pitches[1] = obj_surface->cb_cr_pitch; /* V */
2291 image->offsets[1] = w_pitch * obj_surface->y_cr_offset;
2292 image->pitches[2] = obj_surface->cb_cr_pitch; /* U */
2293 image->offsets[2] = w_pitch * obj_surface->y_cb_offset;
2296 case VA_FOURCC('N', 'V', '1', '2'):
2297 image->num_planes = 2;
2298 image->pitches[0] = w_pitch; /* Y */
2299 image->offsets[0] = 0;
2300 image->pitches[1] = obj_surface->cb_cr_pitch; /* UV */
2301 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2304 case VA_FOURCC('I', '4', '2', '0'):
2305 image->num_planes = 3;
2306 image->pitches[0] = w_pitch; /* Y */
2307 image->offsets[0] = 0;
2308 image->pitches[1] = obj_surface->cb_cr_pitch; /* U */
2309 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2310 image->pitches[2] = obj_surface->cb_cr_pitch; /* V */
2311 image->offsets[2] = w_pitch * obj_surface->y_cr_offset;
2318 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2319 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2320 if (va_status != VA_STATUS_SUCCESS)
2323 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2324 dri_bo_reference(obj_image->bo);
2326 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2327 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2328 if (!obj_image->palette) {
2329 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2334 *out_image = *image;
2335 obj_surface->flags |= SURFACE_DERIVED;
2336 obj_image->derived_surface = surface;
2338 return VA_STATUS_SUCCESS;
2341 i965_DestroyImage(ctx, image_id);
2346 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2348 object_heap_free(heap, obj);
2353 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2355 struct i965_driver_data *i965 = i965_driver_data(ctx);
2356 struct object_image *obj_image = IMAGE(image);
2357 struct object_surface *obj_surface;
2360 return VA_STATUS_SUCCESS;
2362 dri_bo_unreference(obj_image->bo);
2363 obj_image->bo = NULL;
2365 if (obj_image->image.buf != VA_INVALID_ID) {
2366 i965_DestroyBuffer(ctx, obj_image->image.buf);
2367 obj_image->image.buf = VA_INVALID_ID;
2370 if (obj_image->palette) {
2371 free(obj_image->palette);
2372 obj_image->palette = NULL;
2375 obj_surface = SURFACE(obj_image->derived_surface);
2378 obj_surface->flags &= ~SURFACE_DERIVED;
2381 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2383 return VA_STATUS_SUCCESS;
2387 * pointer to an array holding the palette data. The size of the array is
2388 * num_palette_entries * entry_bytes in size. The order of the components
2389 * in the palette is described by the component_order in VASubpicture struct
2392 i965_SetImagePalette(VADriverContextP ctx,
2394 unsigned char *palette)
2396 struct i965_driver_data *i965 = i965_driver_data(ctx);
2399 struct object_image *obj_image = IMAGE(image);
2401 return VA_STATUS_ERROR_INVALID_IMAGE;
2403 if (!obj_image->palette)
2404 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2406 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2407 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2408 ((unsigned int)palette[3*i + 1] << 8) |
2409 (unsigned int)palette[3*i + 2]);
2410 return VA_STATUS_SUCCESS;
2414 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2415 const uint8_t *src, unsigned int src_stride,
2416 unsigned int len, unsigned int height)
2420 for (i = 0; i < height; i++) {
2421 memcpy(dst, src, len);
2428 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2429 struct object_surface *obj_surface,
2430 const VARectangle *rect)
2432 uint8_t *dst[3], *src[3];
2434 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2435 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2436 unsigned int tiling, swizzle;
2438 if (!obj_surface->bo)
2441 assert(obj_surface->fourcc);
2442 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2444 if (tiling != I915_TILING_NONE)
2445 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2447 dri_bo_map(obj_surface->bo, 0);
2449 if (!obj_surface->bo->virtual)
2452 /* Dest VA image has either I420 or YV12 format.
2453 Source VA surface alway has I420 format */
2454 dst[Y] = image_data + obj_image->image.offsets[Y];
2455 src[0] = (uint8_t *)obj_surface->bo->virtual;
2456 dst[U] = image_data + obj_image->image.offsets[U];
2457 src[1] = src[0] + obj_surface->width * obj_surface->height;
2458 dst[V] = image_data + obj_image->image.offsets[V];
2459 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2462 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2463 src[0] += rect->y * obj_surface->width + rect->x;
2464 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2465 src[0], obj_surface->width,
2466 rect->width, rect->height);
2469 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2470 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2471 memcpy_pic(dst[U], obj_image->image.pitches[U],
2472 src[1], obj_surface->width / 2,
2473 rect->width / 2, rect->height / 2);
2476 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2477 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2478 memcpy_pic(dst[V], obj_image->image.pitches[V],
2479 src[2], obj_surface->width / 2,
2480 rect->width / 2, rect->height / 2);
2482 if (tiling != I915_TILING_NONE)
2483 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2485 dri_bo_unmap(obj_surface->bo);
2489 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2490 struct object_surface *obj_surface,
2491 const VARectangle *rect)
2493 uint8_t *dst[2], *src[2];
2494 unsigned int tiling, swizzle;
2496 if (!obj_surface->bo)
2499 assert(obj_surface->fourcc);
2500 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2502 if (tiling != I915_TILING_NONE)
2503 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2505 dri_bo_map(obj_surface->bo, 0);
2507 if (!obj_surface->bo->virtual)
2510 /* Both dest VA image and source surface have NV12 format */
2511 dst[0] = image_data + obj_image->image.offsets[0];
2512 src[0] = (uint8_t *)obj_surface->bo->virtual;
2513 dst[1] = image_data + obj_image->image.offsets[1];
2514 src[1] = src[0] + obj_surface->width * obj_surface->height;
2517 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2518 src[0] += rect->y * obj_surface->width + rect->x;
2519 memcpy_pic(dst[0], obj_image->image.pitches[0],
2520 src[0], obj_surface->width,
2521 rect->width, rect->height);
2524 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2525 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2526 memcpy_pic(dst[1], obj_image->image.pitches[1],
2527 src[1], obj_surface->width,
2528 rect->width, rect->height / 2);
2530 if (tiling != I915_TILING_NONE)
2531 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2533 dri_bo_unmap(obj_surface->bo);
2537 i965_sw_getimage(VADriverContextP ctx,
2538 VASurfaceID surface,
2539 int x, /* coordinates of the upper left source pixel */
2541 unsigned int width, /* width and height of the region */
2542 unsigned int height,
2545 struct i965_driver_data *i965 = i965_driver_data(ctx);
2546 struct i965_render_state *render_state = &i965->render_state;
2548 struct object_surface *obj_surface = SURFACE(surface);
2550 return VA_STATUS_ERROR_INVALID_SURFACE;
2552 struct object_image *obj_image = IMAGE(image);
2554 return VA_STATUS_ERROR_INVALID_IMAGE;
2557 return VA_STATUS_ERROR_INVALID_PARAMETER;
2558 if (x + width > obj_surface->orig_width ||
2559 y + height > obj_surface->orig_height)
2560 return VA_STATUS_ERROR_INVALID_PARAMETER;
2561 if (x + width > obj_image->image.width ||
2562 y + height > obj_image->image.height)
2563 return VA_STATUS_ERROR_INVALID_PARAMETER;
2566 void *image_data = NULL;
2568 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2569 if (va_status != VA_STATUS_SUCCESS)
2576 rect.height = height;
2578 switch (obj_image->image.format.fourcc) {
2579 case VA_FOURCC('Y','V','1','2'):
2580 case VA_FOURCC('I','4','2','0'):
2581 /* I420 is native format for MPEG-2 decoded surfaces */
2582 if (render_state->interleaved_uv)
2583 goto operation_failed;
2584 get_image_i420(obj_image, image_data, obj_surface, &rect);
2586 case VA_FOURCC('N','V','1','2'):
2587 /* NV12 is native format for H.264 decoded surfaces */
2588 if (!render_state->interleaved_uv)
2589 goto operation_failed;
2590 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2594 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2598 i965_UnmapBuffer(ctx, obj_image->image.buf);
2603 i965_hw_getimage(VADriverContextP ctx,
2604 VASurfaceID surface,
2605 int x, /* coordinates of the upper left source pixel */
2607 unsigned int width, /* width and height of the region */
2608 unsigned int height,
2611 struct i965_driver_data *i965 = i965_driver_data(ctx);
2612 struct i965_surface src_surface;
2613 struct i965_surface dst_surface;
2616 struct object_surface *obj_surface = SURFACE(surface);
2617 struct object_image *obj_image = IMAGE(image);
2620 return VA_STATUS_ERROR_INVALID_SURFACE;
2623 return VA_STATUS_ERROR_INVALID_IMAGE;
2626 return VA_STATUS_ERROR_INVALID_PARAMETER;
2627 if (x + width > obj_surface->orig_width ||
2628 y + height > obj_surface->orig_height)
2629 return VA_STATUS_ERROR_INVALID_PARAMETER;
2630 if (x + width > obj_image->image.width ||
2631 y + height > obj_image->image.height)
2632 return VA_STATUS_ERROR_INVALID_PARAMETER;
2634 if (!obj_surface->bo)
2635 return VA_STATUS_SUCCESS;
2640 rect.height = height;
2642 src_surface.id = surface;
2643 src_surface.type = I965_SURFACE_TYPE_SURFACE;
2644 src_surface.flags = I965_SURFACE_FLAG_FRAME;
2646 dst_surface.id = image;
2647 dst_surface.type = I965_SURFACE_TYPE_IMAGE;
2648 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
2650 va_status = i965_image_processing(ctx,
2661 i965_GetImage(VADriverContextP ctx,
2662 VASurfaceID surface,
2663 int x, /* coordinates of the upper left source pixel */
2665 unsigned int width, /* width and height of the region */
2666 unsigned int height,
2669 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2672 if (HAS_ACCELERATED_GETIMAGE(i965))
2673 va_status = i965_hw_getimage(ctx,
2679 va_status = i965_sw_getimage(ctx,
2689 put_image_i420(struct object_surface *obj_surface,
2690 const VARectangle *dst_rect,
2691 struct object_image *obj_image, uint8_t *image_data,
2692 const VARectangle *src_rect)
2694 uint8_t *dst[3], *src[3];
2696 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2697 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2698 unsigned int tiling, swizzle;
2700 if (!obj_surface->bo)
2703 assert(obj_surface->fourcc);
2704 assert(dst_rect->width == src_rect->width);
2705 assert(dst_rect->height == src_rect->height);
2706 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2708 if (tiling != I915_TILING_NONE)
2709 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2711 dri_bo_map(obj_surface->bo, 0);
2713 if (!obj_surface->bo->virtual)
2716 /* Dest VA image has either I420 or YV12 format.
2717 Source VA surface alway has I420 format */
2718 dst[0] = (uint8_t *)obj_surface->bo->virtual;
2719 src[Y] = image_data + obj_image->image.offsets[Y];
2720 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
2721 src[U] = image_data + obj_image->image.offsets[U];
2722 dst[2] = dst[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2723 src[V] = image_data + obj_image->image.offsets[V];
2726 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
2727 src[Y] += src_rect->y * obj_image->image.pitches[Y] + src_rect->x;
2728 memcpy_pic(dst[0], obj_surface->width,
2729 src[Y], obj_image->image.pitches[Y],
2730 src_rect->width, src_rect->height);
2733 dst[1] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
2734 src[U] += (src_rect->y / 2) * obj_image->image.pitches[U] + src_rect->x / 2;
2735 memcpy_pic(dst[1], obj_surface->width / 2,
2736 src[U], obj_image->image.pitches[U],
2737 src_rect->width / 2, src_rect->height / 2);
2740 dst[2] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
2741 src[V] += (src_rect->y / 2) * obj_image->image.pitches[V] + src_rect->x / 2;
2742 memcpy_pic(dst[2], obj_surface->width / 2,
2743 src[V], obj_image->image.pitches[V],
2744 src_rect->width / 2, src_rect->height / 2);
2746 if (tiling != I915_TILING_NONE)
2747 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2749 dri_bo_unmap(obj_surface->bo);
2753 put_image_nv12(struct object_surface *obj_surface,
2754 const VARectangle *dst_rect,
2755 struct object_image *obj_image, uint8_t *image_data,
2756 const VARectangle *src_rect)
2758 uint8_t *dst[2], *src[2];
2759 unsigned int tiling, swizzle;
2761 if (!obj_surface->bo)
2764 assert(obj_surface->fourcc);
2765 assert(dst_rect->width == src_rect->width);
2766 assert(dst_rect->height == src_rect->height);
2767 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2769 if (tiling != I915_TILING_NONE)
2770 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2772 dri_bo_map(obj_surface->bo, 0);
2774 if (!obj_surface->bo->virtual)
2777 /* Both dest VA image and source surface have NV12 format */
2778 dst[0] = (uint8_t *)obj_surface->bo->virtual;
2779 src[0] = image_data + obj_image->image.offsets[0];
2780 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
2781 src[1] = image_data + obj_image->image.offsets[1];
2784 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
2785 src[0] += src_rect->y * obj_image->image.pitches[0] + src_rect->x;
2786 memcpy_pic(dst[0], obj_surface->width,
2787 src[0], obj_image->image.pitches[0],
2788 src_rect->width, src_rect->height);
2791 dst[1] += (dst_rect->y / 2) * obj_surface->width + (dst_rect->x & -2);
2792 src[1] += (src_rect->y / 2) * obj_image->image.pitches[1] + (src_rect->x & -2);
2793 memcpy_pic(dst[1], obj_surface->width,
2794 src[1], obj_image->image.pitches[1],
2795 src_rect->width, src_rect->height / 2);
2797 if (tiling != I915_TILING_NONE)
2798 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2800 dri_bo_unmap(obj_surface->bo);
2804 i965_sw_putimage(VADriverContextP ctx,
2805 VASurfaceID surface,
2809 unsigned int src_width,
2810 unsigned int src_height,
2813 unsigned int dest_width,
2814 unsigned int dest_height)
2816 struct i965_driver_data *i965 = i965_driver_data(ctx);
2817 struct object_surface *obj_surface = SURFACE(surface);
2820 return VA_STATUS_ERROR_INVALID_SURFACE;
2822 struct object_image *obj_image = IMAGE(image);
2824 return VA_STATUS_ERROR_INVALID_IMAGE;
2826 if (src_x < 0 || src_y < 0)
2827 return VA_STATUS_ERROR_INVALID_PARAMETER;
2828 if (src_x + src_width > obj_image->image.width ||
2829 src_y + src_height > obj_image->image.height)
2830 return VA_STATUS_ERROR_INVALID_PARAMETER;
2831 if (dest_x < 0 || dest_y < 0)
2832 return VA_STATUS_ERROR_INVALID_PARAMETER;
2833 if (dest_x + dest_width > obj_surface->orig_width ||
2834 dest_y + dest_height > obj_surface->orig_height)
2835 return VA_STATUS_ERROR_INVALID_PARAMETER;
2837 /* XXX: don't allow scaling */
2838 if (src_width != dest_width || src_height != dest_height)
2839 return VA_STATUS_ERROR_INVALID_PARAMETER;
2841 if (obj_surface->fourcc) {
2842 /* Don't allow format mismatch */
2843 if (obj_surface->fourcc != obj_image->image.format.fourcc)
2844 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
2848 /* VA is surface not used for decoding, use same VA image format */
2849 i965_check_alloc_surface_bo(
2852 0, /* XXX: don't use tiled surface */
2853 obj_image->image.format.fourcc,
2858 void *image_data = NULL;
2860 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2861 if (va_status != VA_STATUS_SUCCESS)
2864 VARectangle src_rect, dest_rect;
2867 src_rect.width = src_width;
2868 src_rect.height = src_height;
2869 dest_rect.x = dest_x;
2870 dest_rect.y = dest_y;
2871 dest_rect.width = dest_width;
2872 dest_rect.height = dest_height;
2874 switch (obj_image->image.format.fourcc) {
2875 case VA_FOURCC('Y','V','1','2'):
2876 case VA_FOURCC('I','4','2','0'):
2877 put_image_i420(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
2879 case VA_FOURCC('N','V','1','2'):
2880 put_image_nv12(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
2883 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2887 i965_UnmapBuffer(ctx, obj_image->image.buf);
2892 i965_hw_putimage(VADriverContextP ctx,
2893 VASurfaceID surface,
2897 unsigned int src_width,
2898 unsigned int src_height,
2901 unsigned int dest_width,
2902 unsigned int dest_height)
2904 struct i965_driver_data *i965 = i965_driver_data(ctx);
2905 struct object_surface *obj_surface = SURFACE(surface);
2906 struct object_image *obj_image = IMAGE(image);
2907 struct i965_surface src_surface, dst_surface;
2908 VAStatus va_status = VA_STATUS_SUCCESS;
2909 VARectangle src_rect, dst_rect;
2912 return VA_STATUS_ERROR_INVALID_SURFACE;
2914 if (!obj_image || !obj_image->bo)
2915 return VA_STATUS_ERROR_INVALID_IMAGE;
2919 src_x + src_width > obj_image->image.width ||
2920 src_y + src_height > obj_image->image.height)
2921 return VA_STATUS_ERROR_INVALID_PARAMETER;
2925 dest_x + dest_width > obj_surface->orig_width ||
2926 dest_y + dest_height > obj_surface->orig_height)
2927 return VA_STATUS_ERROR_INVALID_PARAMETER;
2929 if (!obj_surface->bo) {
2930 unsigned int tiling, swizzle;
2931 dri_bo_get_tiling(obj_image->bo, &tiling, &swizzle);
2933 i965_check_alloc_surface_bo(ctx,
2936 obj_image->image.format.fourcc,
2940 assert(obj_surface->fourcc);
2942 src_surface.id = image;
2943 src_surface.type = I965_SURFACE_TYPE_IMAGE;
2944 src_surface.flags = I965_SURFACE_FLAG_FRAME;
2947 src_rect.width = src_width;
2948 src_rect.height = src_height;
2950 dst_surface.id = surface;
2951 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
2952 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
2953 dst_rect.x = dest_x;
2954 dst_rect.y = dest_y;
2955 dst_rect.width = dest_width;
2956 dst_rect.height = dest_height;
2958 va_status = i965_image_processing(ctx,
2968 i965_PutImage(VADriverContextP ctx,
2969 VASurfaceID surface,
2973 unsigned int src_width,
2974 unsigned int src_height,
2977 unsigned int dest_width,
2978 unsigned int dest_height)
2980 struct i965_driver_data *i965 = i965_driver_data(ctx);
2981 VAStatus va_status = VA_STATUS_SUCCESS;
2983 if (HAS_ACCELERATED_PUTIMAGE(i965))
2984 va_status = i965_hw_putimage(ctx,
2996 va_status = i965_sw_putimage(ctx,
3012 i965_PutSurface(VADriverContextP ctx,
3013 VASurfaceID surface,
3014 void *draw, /* X Drawable */
3017 unsigned short srcw,
3018 unsigned short srch,
3021 unsigned short destw,
3022 unsigned short desth,
3023 VARectangle *cliprects, /* client supplied clip list */
3024 unsigned int number_cliprects, /* number of clip rects in the clip list */
3025 unsigned int flags) /* de-interlacing flags */
3027 struct i965_driver_data *i965 = i965_driver_data(ctx);
3028 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
3029 struct i965_render_state *render_state = &i965->render_state;
3030 struct dri_drawable *dri_drawable;
3031 union dri_buffer *buffer;
3032 struct intel_region *dest_region;
3033 struct object_surface *obj_surface;
3034 VARectangle src_rect, dst_rect;
3037 Bool new_region = False;
3040 /* Currently don't support DRI1 */
3041 if (dri_state->driConnectedFlag != VA_DRI2)
3042 return VA_STATUS_ERROR_UNKNOWN;
3044 /* Some broken sources such as H.264 conformance case FM2_SVA_C
3047 obj_surface = SURFACE(surface);
3048 if (!obj_surface || !obj_surface->bo)
3049 return VA_STATUS_SUCCESS;
3051 _i965LockMutex(&i965->render_mutex);
3053 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
3054 assert(dri_drawable);
3056 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
3059 dest_region = render_state->draw_region;
3062 assert(dest_region->bo);
3063 dri_bo_flink(dest_region->bo, &name);
3065 if (buffer->dri2.name != name) {
3067 dri_bo_unreference(dest_region->bo);
3070 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
3071 assert(dest_region);
3072 render_state->draw_region = dest_region;
3077 dest_region->x = dri_drawable->x;
3078 dest_region->y = dri_drawable->y;
3079 dest_region->width = dri_drawable->width;
3080 dest_region->height = dri_drawable->height;
3081 dest_region->cpp = buffer->dri2.cpp;
3082 dest_region->pitch = buffer->dri2.pitch;
3084 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
3085 assert(dest_region->bo);
3087 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
3091 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
3092 pp_flag |= I965_PP_FLAG_AVS;
3094 if (flags & VA_TOP_FIELD)
3095 pp_flag |= I965_PP_FLAG_DEINTERLACING_TOP_FISRT;
3096 else if (flags & VA_BOTTOM_FIELD)
3097 pp_flag |= I965_PP_FLAG_DEINTERLACING_BOTTOM_FIRST;
3101 src_rect.width = srcw;
3102 src_rect.height = srch;
3106 dst_rect.width = destw;
3107 dst_rect.height = desth;
3109 intel_render_put_surface(ctx, surface, &src_rect, &dst_rect, pp_flag);
3111 if(obj_surface->subpic != VA_INVALID_ID) {
3112 intel_render_put_subpicture(ctx, surface, &src_rect, &dst_rect);
3115 dri_swap_buffer(ctx, dri_drawable);
3116 obj_surface->flags |= SURFACE_DISPLAYED;
3118 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
3119 dri_bo_unreference(obj_surface->bo);
3120 obj_surface->bo = NULL;
3121 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
3123 if (obj_surface->free_private_data)
3124 obj_surface->free_private_data(&obj_surface->private_data);
3127 _i965UnlockMutex(&i965->render_mutex);
3129 return VA_STATUS_SUCCESS;
3133 i965_Terminate(VADriverContextP ctx)
3135 struct i965_driver_data *i965 = i965_driver_data(ctx);
3138 intel_batchbuffer_free(i965->batch);
3140 _i965DestroyMutex(&i965->pp_mutex);
3141 _i965DestroyMutex(&i965->render_mutex);
3143 if (i965_render_terminate(ctx) == False)
3144 return VA_STATUS_ERROR_UNKNOWN;
3146 if (i965_post_processing_terminate(ctx) == False)
3147 return VA_STATUS_ERROR_UNKNOWN;
3149 if (intel_driver_terminate(ctx) == False)
3150 return VA_STATUS_ERROR_UNKNOWN;
3152 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
3153 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
3154 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
3155 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
3156 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
3157 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
3159 free(ctx->pDriverData);
3160 ctx->pDriverData = NULL;
3162 return VA_STATUS_SUCCESS;
3167 VADriverContextP ctx, /* in */
3168 VABufferID buf_id, /* in */
3169 VABufferType *type, /* out */
3170 unsigned int *size, /* out */
3171 unsigned int *num_elements /* out */
3174 struct i965_driver_data *i965 = NULL;
3175 struct object_buffer *obj_buffer = NULL;
3177 i965 = i965_driver_data(ctx);
3178 obj_buffer = BUFFER(buf_id);
3180 *type = obj_buffer->type;
3181 *size = obj_buffer->size_element;
3182 *num_elements = obj_buffer->num_elements;
3184 return VA_STATUS_SUCCESS;
3189 VADriverContextP ctx, /* in */
3190 VASurfaceID surface, /* in */
3191 unsigned int *fourcc, /* out */
3192 unsigned int *luma_stride, /* out */
3193 unsigned int *chroma_u_stride, /* out */
3194 unsigned int *chroma_v_stride, /* out */
3195 unsigned int *luma_offset, /* out */
3196 unsigned int *chroma_u_offset, /* out */
3197 unsigned int *chroma_v_offset, /* out */
3198 unsigned int *buffer_name, /* out */
3199 void **buffer /* out */
3202 VAStatus vaStatus = VA_STATUS_SUCCESS;
3203 struct i965_driver_data *i965 = i965_driver_data(ctx);
3204 struct object_surface *obj_surface = NULL;
3208 assert(luma_stride);
3209 assert(chroma_u_stride);
3210 assert(chroma_v_stride);
3211 assert(luma_offset);
3212 assert(chroma_u_offset);
3213 assert(chroma_v_offset);
3214 assert(buffer_name);
3217 tmpImage.image_id = VA_INVALID_ID;
3219 obj_surface = SURFACE(surface);
3220 if (obj_surface == NULL) {
3221 // Surface is absent.
3222 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3226 // Lock functionality is absent now.
3227 if (obj_surface->locked_image_id != VA_INVALID_ID) {
3228 // Surface is locked already.
3229 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3233 vaStatus = i965_DeriveImage(
3237 if (vaStatus != VA_STATUS_SUCCESS) {
3241 obj_surface->locked_image_id = tmpImage.image_id;
3243 vaStatus = i965_MapBuffer(
3247 if (vaStatus != VA_STATUS_SUCCESS) {
3251 *fourcc = tmpImage.format.fourcc;
3252 *luma_offset = tmpImage.offsets[0];
3253 *luma_stride = tmpImage.pitches[0];
3254 *chroma_u_offset = tmpImage.offsets[1];
3255 *chroma_u_stride = tmpImage.pitches[1];
3256 *chroma_v_offset = tmpImage.offsets[2];
3257 *chroma_v_stride = tmpImage.pitches[2];
3258 *buffer_name = tmpImage.buf;
3261 if (vaStatus != VA_STATUS_SUCCESS) {
3270 VADriverContextP ctx, /* in */
3271 VASurfaceID surface /* in */
3274 VAStatus vaStatus = VA_STATUS_SUCCESS;
3275 struct i965_driver_data *i965 = i965_driver_data(ctx);
3276 struct object_image *locked_img = NULL;
3277 struct object_surface *obj_surface = NULL;
3279 obj_surface = SURFACE(surface);
3281 if (obj_surface == NULL) {
3282 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
3285 if (obj_surface->locked_image_id == VA_INVALID_ID) {
3286 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
3290 locked_img = IMAGE(obj_surface->locked_image_id);
3291 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
3292 // Work image was deallocated before i965_UnlockSurface()
3293 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3297 vaStatus = i965_UnmapBuffer(
3299 locked_img->image.buf);
3300 if (vaStatus != VA_STATUS_SUCCESS) {
3304 vaStatus = i965_DestroyImage(
3306 locked_img->image.image_id);
3307 if (vaStatus != VA_STATUS_SUCCESS) {
3311 locked_img->image.image_id = VA_INVALID_ID;
3318 i965_GetSurfaceAttributes(
3319 VADriverContextP ctx,
3321 VASurfaceAttrib *attrib_list,
3322 unsigned int num_attribs
3325 VAStatus vaStatus = VA_STATUS_SUCCESS;
3326 struct i965_driver_data *i965 = i965_driver_data(ctx);
3327 struct object_config *obj_config;
3330 if (config == VA_INVALID_ID)
3331 return VA_STATUS_ERROR_INVALID_CONFIG;
3333 obj_config = CONFIG(config);
3335 if (obj_config == NULL)
3336 return VA_STATUS_ERROR_INVALID_CONFIG;
3338 if (attrib_list == NULL || num_attribs)
3339 return VA_STATUS_ERROR_INVALID_PARAMETER;
3341 for (i = 0; i < num_attribs; i++) {
3342 switch (attrib_list[i].type) {
3343 case VASurfaceAttribPixelFormat:
3344 attrib_list[i].value.type = VAGenericValueTypeInteger;
3345 attrib_list[i].flags = VA_SURFACE_ATTRIB_GETTABLE | VA_SURFACE_ATTRIB_SETTABLE;
3347 if (attrib_list[i].value.value.i == 0) {
3348 if (IS_G4X(i965->intel.device_id)) {
3349 if (obj_config->profile == VAProfileMPEG2Simple ||
3350 obj_config->profile == VAProfileMPEG2Main) {
3351 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3354 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3356 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3357 if (obj_config->profile == VAProfileMPEG2Simple ||
3358 obj_config->profile == VAProfileMPEG2Main) {
3359 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3360 } else if (obj_config->profile == VAProfileH264Baseline ||
3361 obj_config->profile == VAProfileH264Main ||
3362 obj_config->profile == VAProfileH264High) {
3363 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3364 } else if (obj_config->profile == VAProfileNone) {
3365 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3368 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3370 } else if (IS_GEN6(i965->intel.device_id)) {
3371 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3372 } else if (IS_GEN7(i965->intel.device_id)) {
3373 if (obj_config->profile == VAProfileJPEGBaseline)
3374 attrib_list[i].value.value.i = 0; /* internal format */
3376 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3379 if (IS_G4X(i965->intel.device_id)) {
3380 if (obj_config->profile == VAProfileMPEG2Simple ||
3381 obj_config->profile == VAProfileMPEG2Main) {
3382 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3383 attrib_list[i].value.value.i = 0;
3384 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3388 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3390 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3391 if (obj_config->profile == VAProfileMPEG2Simple ||
3392 obj_config->profile == VAProfileMPEG2Main) {
3393 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3394 attrib_list[i].value.value.i = 0;
3395 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3397 } else if (obj_config->profile == VAProfileH264Baseline ||
3398 obj_config->profile == VAProfileH264Main ||
3399 obj_config->profile == VAProfileH264High) {
3400 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3401 attrib_list[i].value.value.i = 0;
3402 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3404 } else if (obj_config->profile == VAProfileNone) {
3405 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2') &&
3406 attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0') &&
3407 attrib_list[i].value.value.i != VA_FOURCC('Y', 'V', '1', '2')) {
3408 attrib_list[i].value.value.i = 0;
3409 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3413 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3415 } else if (IS_GEN6(i965->intel.device_id)) {
3416 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3417 obj_config->entrypoint == VAEntrypointVideoProc) {
3418 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2') &&
3419 attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0') &&
3420 attrib_list[i].value.value.i != VA_FOURCC('Y', 'V', '1', '2')) {
3421 attrib_list[i].value.value.i = 0;
3422 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3425 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3426 attrib_list[i].value.value.i = 0;
3427 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3430 } else if (IS_GEN7(i965->intel.device_id)) {
3431 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3432 obj_config->entrypoint == VAEntrypointVideoProc) {
3433 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2') &&
3434 attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0') &&
3435 attrib_list[i].value.value.i != VA_FOURCC('Y', 'V', '1', '2')) {
3436 attrib_list[i].value.value.i = 0;
3437 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3440 if (obj_config->profile == VAProfileJPEGBaseline) {
3441 attrib_list[i].value.value.i = 0; /* JPEG decoding always uses an internal format */
3442 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3444 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3445 attrib_list[i].value.value.i = 0;
3446 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3454 case VASurfaceAttribMinWidth:
3455 /* FIXME: add support for it later */
3456 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3458 case VASurfaceAttribMaxWidth:
3459 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3461 case VASurfaceAttribMinHeight:
3462 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3464 case VASurfaceAttribMaxHeight:
3465 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3468 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3477 * Query video processing pipeline
3479 VAStatus i965_QueryVideoProcFilters(
3480 VADriverContextP ctx,
3481 VAContextID context,
3482 VAProcFilterType *filters,
3483 unsigned int *num_filters
3486 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3489 if (HAS_VPP(i965)) {
3490 filters[i++] = VAProcFilterNoiseReduction;
3491 filters[i++] = VAProcFilterDeinterlacing;
3496 return VA_STATUS_SUCCESS;
3499 VAStatus i965_QueryVideoProcFilterCaps(
3500 VADriverContextP ctx,
3501 VAContextID context,
3502 VAProcFilterType type,
3504 unsigned int *num_filter_caps
3509 if (type == VAProcFilterNoiseReduction) {
3510 VAProcFilterCap *cap = filter_caps;
3512 cap->range.min_value = 0.0;
3513 cap->range.max_value = 1.0;
3514 cap->range.default_value = 0.5;
3515 cap->range.step = 0.03125; /* 1.0 / 32 */
3517 } else if (type == VAProcFilterDeinterlacing) {
3518 VAProcFilterCapDeinterlacing *cap = filter_caps;
3520 cap->type = VAProcDeinterlacingBob;
3525 *num_filter_caps = i;
3527 return VA_STATUS_SUCCESS;
3530 static VAProcColorStandardType vpp_input_color_standards[VAProcColorStandardCount] = {
3531 VAProcColorStandardBT601,
3534 static VAProcColorStandardType vpp_output_color_standards[VAProcColorStandardCount] = {
3535 VAProcColorStandardBT601,
3538 VAStatus i965_QueryVideoProcPipelineCaps(
3539 VADriverContextP ctx,
3540 VAContextID context,
3541 VABufferID *filters,
3542 unsigned int num_filters,
3543 VAProcPipelineCaps *pipeline_cap /* out */
3546 struct i965_driver_data * const i965 = i965_driver_data(ctx);
3549 pipeline_cap->flags = 0;
3550 pipeline_cap->pipeline_flags = 0;
3551 pipeline_cap->filter_flags = 0;
3552 pipeline_cap->num_forward_references = 0;
3553 pipeline_cap->num_backward_references = 0;
3554 pipeline_cap->num_input_color_standards = 1;
3555 pipeline_cap->input_color_standards = vpp_input_color_standards;
3556 pipeline_cap->num_output_color_standards = 1;
3557 pipeline_cap->output_color_standards = vpp_output_color_standards;
3559 for (i = 0; i < num_filters; i++) {
3560 struct object_buffer *obj_buffer = BUFFER(filters[i]);
3561 VAProcFilterParameterBufferBase *base = (VAProcFilterParameterBufferBase *)obj_buffer->buffer_store->buffer;
3563 if (base->type == VAProcFilterNoiseReduction) {
3564 VAProcFilterParameterBuffer *denoise = (VAProcFilterParameterBuffer *)base;
3566 } else if (base->type == VAProcFilterDeinterlacing) {
3567 VAProcFilterParameterBufferDeinterlacing *deint = (VAProcFilterParameterBufferDeinterlacing *)base;
3569 assert(deint->algorithm == VAProcDeinterlacingWeave ||
3570 deint->algorithm == VAProcDeinterlacingBob);
3574 return VA_STATUS_SUCCESS;
3578 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
3581 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
3583 struct VADriverVTable * const vtable = ctx->vtable;
3584 struct VADriverVTableVPP * const vtable_vpp = ctx->vtable_vpp;
3586 struct i965_driver_data *i965;
3589 ctx->version_major = VA_MAJOR_VERSION;
3590 ctx->version_minor = VA_MINOR_VERSION;
3591 ctx->max_profiles = I965_MAX_PROFILES;
3592 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
3593 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
3594 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
3595 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
3596 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
3598 vtable->vaTerminate = i965_Terminate;
3599 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
3600 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
3601 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
3602 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
3603 vtable->vaCreateConfig = i965_CreateConfig;
3604 vtable->vaDestroyConfig = i965_DestroyConfig;
3605 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
3606 vtable->vaCreateSurfaces = i965_CreateSurfaces;
3607 vtable->vaDestroySurfaces = i965_DestroySurfaces;
3608 vtable->vaCreateContext = i965_CreateContext;
3609 vtable->vaDestroyContext = i965_DestroyContext;
3610 vtable->vaCreateBuffer = i965_CreateBuffer;
3611 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
3612 vtable->vaMapBuffer = i965_MapBuffer;
3613 vtable->vaUnmapBuffer = i965_UnmapBuffer;
3614 vtable->vaDestroyBuffer = i965_DestroyBuffer;
3615 vtable->vaBeginPicture = i965_BeginPicture;
3616 vtable->vaRenderPicture = i965_RenderPicture;
3617 vtable->vaEndPicture = i965_EndPicture;
3618 vtable->vaSyncSurface = i965_SyncSurface;
3619 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
3620 vtable->vaPutSurface = i965_PutSurface;
3621 vtable->vaQueryImageFormats = i965_QueryImageFormats;
3622 vtable->vaCreateImage = i965_CreateImage;
3623 vtable->vaDeriveImage = i965_DeriveImage;
3624 vtable->vaDestroyImage = i965_DestroyImage;
3625 vtable->vaSetImagePalette = i965_SetImagePalette;
3626 vtable->vaGetImage = i965_GetImage;
3627 vtable->vaPutImage = i965_PutImage;
3628 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
3629 vtable->vaCreateSubpicture = i965_CreateSubpicture;
3630 vtable->vaDestroySubpicture = i965_DestroySubpicture;
3631 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
3632 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
3633 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
3634 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
3635 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
3636 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
3637 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
3638 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
3639 vtable->vaBufferInfo = i965_BufferInfo;
3640 vtable->vaLockSurface = i965_LockSurface;
3641 vtable->vaUnlockSurface = i965_UnlockSurface;
3642 vtable->vaGetSurfaceAttributes = i965_GetSurfaceAttributes;
3643 vtable->vaCreateSurfaces2 = i965_CreateSurfaces2;
3645 vtable_vpp->vaQueryVideoProcFilters = i965_QueryVideoProcFilters;
3646 vtable_vpp->vaQueryVideoProcFilterCaps = i965_QueryVideoProcFilterCaps;
3647 vtable_vpp->vaQueryVideoProcPipelineCaps = i965_QueryVideoProcPipelineCaps;
3649 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
3651 ctx->pDriverData = (void *)i965;
3653 result = object_heap_init(&i965->config_heap,
3654 sizeof(struct object_config),
3656 assert(result == 0);
3658 result = object_heap_init(&i965->context_heap,
3659 sizeof(struct object_context),
3661 assert(result == 0);
3663 result = object_heap_init(&i965->surface_heap,
3664 sizeof(struct object_surface),
3666 assert(result == 0);
3668 result = object_heap_init(&i965->buffer_heap,
3669 sizeof(struct object_buffer),
3671 assert(result == 0);
3673 result = object_heap_init(&i965->image_heap,
3674 sizeof(struct object_image),
3676 assert(result == 0);
3678 result = object_heap_init(&i965->subpic_heap,
3679 sizeof(struct object_subpic),
3681 assert(result == 0);
3683 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
3684 INTEL_STR_DRIVER_VENDOR,
3685 INTEL_STR_DRIVER_NAME,
3686 INTEL_DRIVER_MAJOR_VERSION,
3687 INTEL_DRIVER_MINOR_VERSION,
3688 INTEL_DRIVER_MICRO_VERSION);
3690 if (INTEL_DRIVER_PRE_VERSION > 0) {
3691 const int len = strlen(i965->va_vendor);
3692 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
3695 i965->current_context_id = VA_INVALID_ID;
3697 ctx->str_vendor = i965->va_vendor;
3699 return i965_Init(ctx);