2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
30 #include "config_android.h"
41 # include "i965_output_dri.h"
44 #ifdef HAVE_VA_WAYLAND
45 # include "i965_output_wayland.h"
48 #include "intel_driver.h"
49 #include "intel_memman.h"
50 #include "intel_batchbuffer.h"
51 #include "i965_defines.h"
52 #include "i965_drv_video.h"
53 #include "i965_decoder.h"
54 #include "i965_encoder.h"
56 #define CONFIG_ID_OFFSET 0x01000000
57 #define CONTEXT_ID_OFFSET 0x02000000
58 #define SURFACE_ID_OFFSET 0x04000000
59 #define BUFFER_ID_OFFSET 0x08000000
60 #define IMAGE_ID_OFFSET 0x0a000000
61 #define SUBPIC_ID_OFFSET 0x10000000
63 #define HAS_MPEG2_DECODING(ctx) ((ctx)->codec_info->has_mpeg2_decoding && \
66 #define HAS_H264_DECODING(ctx) ((ctx)->codec_info->has_h264_decoding && \
69 #define HAS_H264_ENCODING(ctx) ((ctx)->codec_info->has_h264_encoding && \
72 #define HAS_VC1_DECODING(ctx) ((ctx)->codec_info->has_vc1_decoding && \
75 #define HAS_JPEG_DECODING(ctx) ((ctx)->codec_info->has_jpeg_decoding && \
78 #define HAS_VPP(ctx) ((ctx)->codec_info->has_vpp)
80 #define HAS_ACCELERATED_GETIMAGE(ctx) ((ctx)->codec_info->has_accelerated_getimage)
82 #define HAS_ACCELERATED_PUTIMAGE(ctx) ((ctx)->codec_info->has_accelerated_putimage)
84 #define HAS_TILED_SURFACE(ctx) ((ctx)->codec_info->has_tiled_surface)
86 static int get_sampling_from_fourcc(unsigned int fourcc);
88 /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
89 #define IS_VA_X11(ctx) \
90 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
92 /* Check whether we are rendering to Wayland */
93 #define IS_VA_WAYLAND(ctx) \
94 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
97 I965_SURFACETYPE_RGBA = 1,
99 I965_SURFACETYPE_INDEXED
102 /* List of supported display attributes */
103 static const VADisplayAttribute i965_display_attributes[] = {
105 VADisplayAttribRotation,
106 0, 3, VA_ROTATION_NONE,
107 VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
111 /* List of supported image formats */
114 VAImageFormat va_format;
115 } i965_image_format_map_t;
117 static const i965_image_format_map_t
118 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
119 { I965_SURFACETYPE_YUV,
120 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
121 { I965_SURFACETYPE_YUV,
122 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
123 { I965_SURFACETYPE_YUV,
124 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
125 { I965_SURFACETYPE_YUV,
126 { VA_FOURCC('Y','U','Y','2'), VA_LSB_FIRST, 16, } },
127 { I965_SURFACETYPE_YUV,
128 { VA_FOURCC('U','Y','V','Y'), VA_LSB_FIRST, 16, } },
129 { I965_SURFACETYPE_RGBA,
130 { VA_FOURCC('R','G','B','X'), VA_LSB_FIRST, 32, 24, 0x000000ff, 0x0000ff00, 0x00ff0000 } },
131 { I965_SURFACETYPE_RGBA,
132 { VA_FOURCC('B','G','R','X'), VA_LSB_FIRST, 32, 24, 0x00ff0000, 0x0000ff00, 0x000000ff } },
135 /* List of supported subpicture formats */
139 VAImageFormat va_format;
140 unsigned int va_flags;
141 } i965_subpic_format_map_t;
143 static const i965_subpic_format_map_t
144 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
145 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
146 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
147 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
148 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
149 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
150 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
151 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P8A8_UNORM,
152 { VA_FOURCC('I','A','8','8'), VA_MSB_FIRST, 16, },
153 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
154 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A8P8_UNORM,
155 { VA_FOURCC('A','I','8','8'), VA_MSB_FIRST, 16, },
156 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
157 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
158 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
159 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
160 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
161 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
162 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
163 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
164 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
167 static const i965_subpic_format_map_t *
168 get_subpic_format(const VAImageFormat *va_format)
171 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
172 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
173 if (m->va_format.fourcc == va_format->fourcc &&
174 (m->type == I965_SURFACETYPE_RGBA ?
175 (m->va_format.byte_order == va_format->byte_order &&
176 m->va_format.red_mask == va_format->red_mask &&
177 m->va_format.green_mask == va_format->green_mask &&
178 m->va_format.blue_mask == va_format->blue_mask &&
179 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
185 extern struct hw_context *i965_proc_context_init(VADriverContextP, struct object_config *);
186 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, struct object_config *);
187 static struct hw_codec_info g4x_hw_codec_info = {
188 .dec_hw_context_init = g4x_dec_hw_context_init,
189 .enc_hw_context_init = NULL,
190 .proc_hw_context_init = NULL,
194 .has_mpeg2_decoding = 1,
197 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, struct object_config *);
198 static struct hw_codec_info ironlake_hw_codec_info = {
199 .dec_hw_context_init = ironlake_dec_hw_context_init,
200 .enc_hw_context_init = NULL,
201 .proc_hw_context_init = i965_proc_context_init,
205 .has_mpeg2_decoding = 1,
206 .has_h264_decoding = 1,
208 .has_accelerated_putimage = 1,
211 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, struct object_config *);
212 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, struct object_config *);
213 static struct hw_codec_info gen6_hw_codec_info = {
214 .dec_hw_context_init = gen6_dec_hw_context_init,
215 .enc_hw_context_init = gen6_enc_hw_context_init,
216 .proc_hw_context_init = i965_proc_context_init,
220 .has_mpeg2_decoding = 1,
221 .has_h264_decoding = 1,
222 .has_h264_encoding = 1,
223 .has_vc1_decoding = 1,
225 .has_accelerated_getimage = 1,
226 .has_accelerated_putimage = 1,
227 .has_tiled_surface = 1,
230 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, struct object_config *);
231 extern struct hw_context *gen7_enc_hw_context_init(VADriverContextP, struct object_config *);
232 static struct hw_codec_info gen7_hw_codec_info = {
233 .dec_hw_context_init = gen7_dec_hw_context_init,
234 .enc_hw_context_init = gen7_enc_hw_context_init,
235 .proc_hw_context_init = i965_proc_context_init,
239 .has_mpeg2_decoding = 1,
240 .has_h264_decoding = 1,
241 .has_h264_encoding = 1,
242 .has_vc1_decoding = 1,
243 .has_jpeg_decoding = 1,
245 .has_accelerated_getimage = 1,
246 .has_accelerated_putimage = 1,
247 .has_tiled_surface = 1,
250 extern struct hw_context *gen75_proc_context_init(VADriverContextP, struct object_config *);
251 static struct hw_codec_info gen75_hw_codec_info = {
252 .dec_hw_context_init = gen75_dec_hw_context_init,
253 .enc_hw_context_init = gen75_enc_hw_context_init,
254 .proc_hw_context_init = gen75_proc_context_init,
258 .has_mpeg2_decoding = 1,
259 .has_h264_decoding = 1,
260 .has_h264_encoding = 1,
261 .has_vc1_decoding = 1,
262 .has_jpeg_decoding = 1,
264 .has_accelerated_getimage = 1,
265 .has_accelerated_putimage = 1,
266 .has_tiled_surface = 1,
269 #define I965_PACKED_HEADER_BASE 0
270 #define I965_PACKED_MISC_HEADER_BASE 3
273 va_enc_packed_type_to_idx(int packed_type)
277 if (packed_type & VAEncPackedHeaderMiscMask) {
278 idx = I965_PACKED_MISC_HEADER_BASE;
279 packed_type = (~VAEncPackedHeaderMiscMask & packed_type);
280 assert(packed_type > 0);
281 idx += (packed_type - 1);
283 idx = I965_PACKED_HEADER_BASE;
285 switch (packed_type) {
286 case VAEncPackedHeaderSequence:
287 idx = I965_PACKED_HEADER_BASE + 0;
290 case VAEncPackedHeaderPicture:
291 idx = I965_PACKED_HEADER_BASE + 1;
294 case VAEncPackedHeaderSlice:
295 idx = I965_PACKED_HEADER_BASE + 2;
299 /* Should not get here */
311 i965_QueryConfigProfiles(VADriverContextP ctx,
312 VAProfile *profile_list, /* out */
313 int *num_profiles) /* out */
315 struct i965_driver_data * const i965 = i965_driver_data(ctx);
318 if (HAS_MPEG2_DECODING(i965)) {
319 profile_list[i++] = VAProfileMPEG2Simple;
320 profile_list[i++] = VAProfileMPEG2Main;
323 if (HAS_H264_DECODING(i965) ||
324 HAS_H264_ENCODING(i965)) {
325 profile_list[i++] = VAProfileH264Baseline;
326 profile_list[i++] = VAProfileH264Main;
327 profile_list[i++] = VAProfileH264High;
330 if (HAS_VC1_DECODING(i965)) {
331 profile_list[i++] = VAProfileVC1Simple;
332 profile_list[i++] = VAProfileVC1Main;
333 profile_list[i++] = VAProfileVC1Advanced;
337 profile_list[i++] = VAProfileNone;
340 if (HAS_JPEG_DECODING(i965)) {
341 profile_list[i++] = VAProfileJPEGBaseline;
344 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
345 assert(i <= I965_MAX_PROFILES);
348 return VA_STATUS_SUCCESS;
352 i965_QueryConfigEntrypoints(VADriverContextP ctx,
354 VAEntrypoint *entrypoint_list, /* out */
355 int *num_entrypoints) /* out */
357 struct i965_driver_data * const i965 = i965_driver_data(ctx);
361 case VAProfileMPEG2Simple:
362 case VAProfileMPEG2Main:
363 if (HAS_MPEG2_DECODING(i965))
364 entrypoint_list[n++] = VAEntrypointVLD;
367 case VAProfileH264Baseline:
368 case VAProfileH264Main:
369 case VAProfileH264High:
370 if (HAS_H264_DECODING(i965))
371 entrypoint_list[n++] = VAEntrypointVLD;
373 if (HAS_H264_ENCODING(i965))
374 entrypoint_list[n++] = VAEntrypointEncSlice;
378 case VAProfileVC1Simple:
379 case VAProfileVC1Main:
380 case VAProfileVC1Advanced:
381 if (HAS_VC1_DECODING(i965))
382 entrypoint_list[n++] = VAEntrypointVLD;
387 entrypoint_list[n++] = VAEntrypointVideoProc;
390 case VAProfileJPEGBaseline:
391 if (HAS_JPEG_DECODING(i965))
392 entrypoint_list[n++] = VAEntrypointVLD;
399 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
400 assert(n <= I965_MAX_ENTRYPOINTS);
401 *num_entrypoints = n;
402 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
406 i965_GetConfigAttributes(VADriverContextP ctx,
408 VAEntrypoint entrypoint,
409 VAConfigAttrib *attrib_list, /* in/out */
414 /* Other attributes don't seem to be defined */
415 /* What to do if we don't know the attribute? */
416 for (i = 0; i < num_attribs; i++) {
417 switch (attrib_list[i].type) {
418 case VAConfigAttribRTFormat:
419 attrib_list[i].value = VA_RT_FORMAT_YUV420;
422 case VAConfigAttribRateControl:
423 if (entrypoint == VAEntrypointEncSlice) {
424 attrib_list[i].value = VA_RC_CBR | VA_RC_CQP;
428 case VAConfigAttribEncPackedHeaders:
429 if (entrypoint == VAEntrypointEncSlice) {
430 attrib_list[i].value = VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE | VA_ENC_PACKED_HEADER_MISC;
436 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
441 return VA_STATUS_SUCCESS;
445 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
447 object_heap_free(heap, obj);
451 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
455 /* Check existing attrbiutes */
456 for (i = 0; i < obj_config->num_attribs; i++) {
457 if (obj_config->attrib_list[i].type == attrib->type) {
458 /* Update existing attribute */
459 obj_config->attrib_list[i].value = attrib->value;
460 return VA_STATUS_SUCCESS;
464 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
465 i = obj_config->num_attribs;
466 obj_config->attrib_list[i].type = attrib->type;
467 obj_config->attrib_list[i].value = attrib->value;
468 obj_config->num_attribs++;
469 return VA_STATUS_SUCCESS;
472 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
476 i965_CreateConfig(VADriverContextP ctx,
478 VAEntrypoint entrypoint,
479 VAConfigAttrib *attrib_list,
481 VAConfigID *config_id) /* out */
483 struct i965_driver_data * const i965 = i965_driver_data(ctx);
484 struct object_config *obj_config;
489 /* Validate profile & entrypoint */
491 case VAProfileMPEG2Simple:
492 case VAProfileMPEG2Main:
493 if (HAS_MPEG2_DECODING(i965) && VAEntrypointVLD == entrypoint) {
494 vaStatus = VA_STATUS_SUCCESS;
496 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
500 case VAProfileH264Baseline:
501 case VAProfileH264Main:
502 case VAProfileH264High:
503 if ((HAS_H264_DECODING(i965) && VAEntrypointVLD == entrypoint) ||
504 (HAS_H264_ENCODING(i965) && VAEntrypointEncSlice == entrypoint)) {
505 vaStatus = VA_STATUS_SUCCESS;
507 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
512 case VAProfileVC1Simple:
513 case VAProfileVC1Main:
514 case VAProfileVC1Advanced:
515 if (HAS_VC1_DECODING(i965) && VAEntrypointVLD == entrypoint) {
516 vaStatus = VA_STATUS_SUCCESS;
518 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
524 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
525 vaStatus = VA_STATUS_SUCCESS;
527 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
532 case VAProfileJPEGBaseline:
533 if (HAS_JPEG_DECODING(i965) && VAEntrypointVLD == entrypoint) {
534 vaStatus = VA_STATUS_SUCCESS;
536 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
542 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
546 if (VA_STATUS_SUCCESS != vaStatus) {
550 configID = NEW_CONFIG_ID();
551 obj_config = CONFIG(configID);
553 if (NULL == obj_config) {
554 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
558 obj_config->profile = profile;
559 obj_config->entrypoint = entrypoint;
560 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
561 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
562 obj_config->num_attribs = 1;
564 for(i = 0; i < num_attribs; i++) {
565 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
567 if (VA_STATUS_SUCCESS != vaStatus) {
573 if (VA_STATUS_SUCCESS != vaStatus) {
574 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
576 *config_id = configID;
583 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
585 struct i965_driver_data *i965 = i965_driver_data(ctx);
586 struct object_config *obj_config = CONFIG(config_id);
589 if (NULL == obj_config) {
590 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
594 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
595 return VA_STATUS_SUCCESS;
598 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
599 VAConfigID config_id,
600 VAProfile *profile, /* out */
601 VAEntrypoint *entrypoint, /* out */
602 VAConfigAttrib *attrib_list, /* out */
603 int *num_attribs) /* out */
605 struct i965_driver_data *i965 = i965_driver_data(ctx);
606 struct object_config *obj_config = CONFIG(config_id);
607 VAStatus vaStatus = VA_STATUS_SUCCESS;
611 *profile = obj_config->profile;
612 *entrypoint = obj_config->entrypoint;
613 *num_attribs = obj_config->num_attribs;
615 for(i = 0; i < obj_config->num_attribs; i++) {
616 attrib_list[i] = obj_config->attrib_list[i];
623 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
625 struct object_surface *obj_surface = (struct object_surface *)obj;
627 dri_bo_unreference(obj_surface->bo);
628 obj_surface->bo = NULL;
630 if (obj_surface->free_private_data != NULL) {
631 obj_surface->free_private_data(&obj_surface->private_data);
632 obj_surface->private_data = NULL;
635 object_heap_free(heap, obj);
639 i965_CreateSurfaces2(
640 VADriverContextP ctx,
644 VASurfaceID *surfaces,
645 unsigned int num_surfaces,
646 VASurfaceAttrib *attrib_list,
647 unsigned int num_attribs
650 struct i965_driver_data *i965 = i965_driver_data(ctx);
652 VAStatus vaStatus = VA_STATUS_SUCCESS;
653 int expected_fourcc = 0;
655 for (i = 0; i < num_attribs && attrib_list; i++) {
656 if ((attrib_list[i].type == VASurfaceAttribPixelFormat) &&
657 (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {
658 assert(attrib_list[i].value.type == VAGenericValueTypeInteger);
659 expected_fourcc = attrib_list[i].value.value.i;
664 /* support 420 & 422 & RGB32 format, 422 and RGB32 are only used
665 * for post-processing (including color conversion) */
666 if (VA_RT_FORMAT_YUV420 != format &&
667 VA_RT_FORMAT_YUV422 != format &&
668 VA_RT_FORMAT_RGB32 != format) {
669 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
672 for (i = 0; i < num_surfaces; i++) {
673 int surfaceID = NEW_SURFACE_ID();
674 struct object_surface *obj_surface = SURFACE(surfaceID);
676 if (NULL == obj_surface) {
677 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
681 surfaces[i] = surfaceID;
682 obj_surface->status = VASurfaceReady;
683 obj_surface->orig_width = width;
684 obj_surface->orig_height = height;
686 obj_surface->subpic_render_idx = 0;
687 for(j = 0; j < I965_MAX_SUBPIC_SUM; j++){
688 obj_surface->subpic[j] = VA_INVALID_ID;
691 obj_surface->width = ALIGN(width, 16);
692 obj_surface->height = ALIGN(height, 16);
693 obj_surface->flags = SURFACE_REFERENCED;
694 obj_surface->fourcc = 0;
695 obj_surface->bo = NULL;
696 obj_surface->locked_image_id = VA_INVALID_ID;
697 obj_surface->private_data = NULL;
698 obj_surface->free_private_data = NULL;
699 obj_surface->subsampling = SUBSAMPLE_YUV420;
701 if (expected_fourcc) {
702 int tiling = HAS_TILED_SURFACE(i965);
704 if (expected_fourcc != VA_FOURCC('N', 'V', '1', '2') &&
705 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'X') &&
706 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'A') )
708 // todo, should we disable tiling for 422 format?
710 if (VA_RT_FORMAT_YUV420 == format) {
711 obj_surface->subsampling = SUBSAMPLE_YUV420;
713 else if (VA_RT_FORMAT_YUV422 == format) {
714 obj_surface->subsampling = SUBSAMPLE_YUV422H;
716 else if (VA_RT_FORMAT_RGB32 == format) {
717 obj_surface->subsampling = SUBSAMPLE_RGBX;
723 i965_check_alloc_surface_bo(ctx, obj_surface, tiling, expected_fourcc, obj_surface->subsampling);
728 if (VA_STATUS_SUCCESS != vaStatus) {
729 /* surfaces[i-1] was the last successful allocation */
731 struct object_surface *obj_surface = SURFACE(surfaces[i]);
733 surfaces[i] = VA_INVALID_SURFACE;
735 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
743 i965_CreateSurfaces(VADriverContextP ctx,
748 VASurfaceID *surfaces) /* out */
750 return i965_CreateSurfaces2(ctx,
761 i965_DestroySurfaces(VADriverContextP ctx,
762 VASurfaceID *surface_list,
765 struct i965_driver_data *i965 = i965_driver_data(ctx);
768 for (i = num_surfaces; i--; ) {
769 struct object_surface *obj_surface = SURFACE(surface_list[i]);
772 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
775 return VA_STATUS_SUCCESS;
779 i965_QueryImageFormats(VADriverContextP ctx,
780 VAImageFormat *format_list, /* out */
781 int *num_formats) /* out */
785 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
786 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
788 format_list[n] = m->va_format;
794 return VA_STATUS_SUCCESS;
798 * Guess the format when the usage of a VA surface is unknown
799 * 1. Without a valid context: YV12
800 * 2. The current context is valid:
801 * a) always NV12 on GEN6 and later
802 * b) I420 for MPEG-2 and NV12 for other codec on GEN4 & GEN5
805 i965_guess_surface_format(VADriverContextP ctx,
807 unsigned int *fourcc,
808 unsigned int *is_tiled)
810 struct i965_driver_data *i965 = i965_driver_data(ctx);
811 struct object_context *obj_context = NULL;
812 struct object_config *obj_config = NULL;
814 *fourcc = VA_FOURCC('Y', 'V', '1', '2');
817 if (i965->current_context_id == VA_INVALID_ID)
820 obj_context = CONTEXT(i965->current_context_id);
822 if (!obj_context || obj_context->config_id == VA_INVALID_ID)
825 obj_config = CONFIG(obj_context->config_id);
830 if (IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id)) {
831 *fourcc = VA_FOURCC('N', 'V', '1', '2');
836 switch (obj_config->profile) {
837 case VAProfileMPEG2Simple:
838 case VAProfileMPEG2Main:
839 *fourcc = VA_FOURCC('I', '4', '2', '0');
844 *fourcc = VA_FOURCC('N', 'V', '1', '2');
851 i965_QuerySubpictureFormats(VADriverContextP ctx,
852 VAImageFormat *format_list, /* out */
853 unsigned int *flags, /* out */
854 unsigned int *num_formats) /* out */
858 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
859 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
861 format_list[n] = m->va_format;
863 flags[n] = m->va_flags;
869 return VA_STATUS_SUCCESS;
873 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
875 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
877 object_heap_free(heap, obj);
881 i965_CreateSubpicture(VADriverContextP ctx,
883 VASubpictureID *subpicture) /* out */
885 struct i965_driver_data *i965 = i965_driver_data(ctx);
886 VASubpictureID subpicID = NEW_SUBPIC_ID()
887 struct object_subpic *obj_subpic = SUBPIC(subpicID);
890 return VA_STATUS_ERROR_ALLOCATION_FAILED;
892 struct object_image *obj_image = IMAGE(image);
894 return VA_STATUS_ERROR_INVALID_IMAGE;
896 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
898 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
900 *subpicture = subpicID;
901 obj_subpic->image = image;
902 obj_subpic->format = m->format;
903 obj_subpic->width = obj_image->image.width;
904 obj_subpic->height = obj_image->image.height;
905 obj_subpic->pitch = obj_image->image.pitches[0];
906 obj_subpic->bo = obj_image->bo;
907 obj_subpic->global_alpha = 1.0;
909 return VA_STATUS_SUCCESS;
913 i965_DestroySubpicture(VADriverContextP ctx,
914 VASubpictureID subpicture)
916 struct i965_driver_data *i965 = i965_driver_data(ctx);
917 struct object_subpic *obj_subpic = SUBPIC(subpicture);
918 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
919 return VA_STATUS_SUCCESS;
923 i965_SetSubpictureImage(VADriverContextP ctx,
924 VASubpictureID subpicture,
928 return VA_STATUS_ERROR_UNIMPLEMENTED;
932 i965_SetSubpictureChromakey(VADriverContextP ctx,
933 VASubpictureID subpicture,
934 unsigned int chromakey_min,
935 unsigned int chromakey_max,
936 unsigned int chromakey_mask)
939 return VA_STATUS_ERROR_UNIMPLEMENTED;
943 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
944 VASubpictureID subpicture,
947 struct i965_driver_data *i965 = i965_driver_data(ctx);
948 struct object_subpic *obj_subpic = SUBPIC(subpicture);
950 if(global_alpha > 1.0 || global_alpha < 0.0){
951 return VA_STATUS_ERROR_INVALID_PARAMETER;
953 obj_subpic->global_alpha = global_alpha;
955 return VA_STATUS_SUCCESS;
959 i965_AssociateSubpicture(VADriverContextP ctx,
960 VASubpictureID subpicture,
961 VASurfaceID *target_surfaces,
963 short src_x, /* upper left offset in subpicture */
965 unsigned short src_width,
966 unsigned short src_height,
967 short dest_x, /* upper left offset in surface */
969 unsigned short dest_width,
970 unsigned short dest_height,
972 * whether to enable chroma-keying or global-alpha
973 * see VA_SUBPICTURE_XXX values
977 struct i965_driver_data *i965 = i965_driver_data(ctx);
978 struct object_subpic *obj_subpic = SUBPIC(subpicture);
981 obj_subpic->src_rect.x = src_x;
982 obj_subpic->src_rect.y = src_y;
983 obj_subpic->src_rect.width = src_width;
984 obj_subpic->src_rect.height = src_height;
985 obj_subpic->dst_rect.x = dest_x;
986 obj_subpic->dst_rect.y = dest_y;
987 obj_subpic->dst_rect.width = dest_width;
988 obj_subpic->dst_rect.height = dest_height;
989 obj_subpic->flags = flags;
991 for (i = 0; i < num_surfaces; i++) {
992 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
994 return VA_STATUS_ERROR_INVALID_SURFACE;
996 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
997 if(obj_surface->subpic[j] == VA_INVALID_ID){
998 obj_surface->subpic[j] = subpicture;
1003 if(j == I965_MAX_SUBPIC_SUM){
1004 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1008 return VA_STATUS_SUCCESS;
1013 i965_DeassociateSubpicture(VADriverContextP ctx,
1014 VASubpictureID subpicture,
1015 VASurfaceID *target_surfaces,
1018 struct i965_driver_data *i965 = i965_driver_data(ctx);
1021 for (i = 0; i < num_surfaces; i++) {
1022 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
1024 return VA_STATUS_ERROR_INVALID_SURFACE;
1026 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
1027 if(obj_surface->subpic[j] == subpicture){
1028 obj_surface->subpic[j] = VA_INVALID_ID;
1033 if(j == I965_MAX_SUBPIC_SUM){
1034 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1037 return VA_STATUS_SUCCESS;
1041 i965_reference_buffer_store(struct buffer_store **ptr,
1042 struct buffer_store *buffer_store)
1044 assert(*ptr == NULL);
1047 buffer_store->ref_count++;
1048 *ptr = buffer_store;
1053 i965_release_buffer_store(struct buffer_store **ptr)
1055 struct buffer_store *buffer_store = *ptr;
1057 if (buffer_store == NULL)
1060 assert(buffer_store->bo || buffer_store->buffer);
1061 assert(!(buffer_store->bo && buffer_store->buffer));
1062 buffer_store->ref_count--;
1064 if (buffer_store->ref_count == 0) {
1065 dri_bo_unreference(buffer_store->bo);
1066 free(buffer_store->buffer);
1067 buffer_store->bo = NULL;
1068 buffer_store->buffer = NULL;
1076 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
1078 struct object_context *obj_context = (struct object_context *)obj;
1081 if (obj_context->hw_context) {
1082 obj_context->hw_context->destroy(obj_context->hw_context);
1083 obj_context->hw_context = NULL;
1086 if (obj_context->codec_type == CODEC_PROC) {
1087 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
1089 } else if (obj_context->codec_type == CODEC_ENC) {
1090 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
1091 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1092 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1094 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
1095 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1097 free(obj_context->codec_state.encode.slice_params);
1099 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
1100 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1101 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1103 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1104 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1106 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1107 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1109 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.misc_param); i++)
1110 i965_release_buffer_store(&obj_context->codec_state.encode.misc_param[i]);
1112 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1113 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1115 free(obj_context->codec_state.encode.slice_params_ext);
1117 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
1118 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
1120 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1121 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1122 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1124 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
1125 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1127 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
1128 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1130 free(obj_context->codec_state.decode.slice_params);
1131 free(obj_context->codec_state.decode.slice_datas);
1134 free(obj_context->render_targets);
1135 object_heap_free(heap, obj);
1139 i965_CreateContext(VADriverContextP ctx,
1140 VAConfigID config_id,
1144 VASurfaceID *render_targets,
1145 int num_render_targets,
1146 VAContextID *context) /* out */
1148 struct i965_driver_data *i965 = i965_driver_data(ctx);
1149 struct i965_render_state *render_state = &i965->render_state;
1150 struct object_config *obj_config = CONFIG(config_id);
1151 struct object_context *obj_context = NULL;
1152 VAStatus vaStatus = VA_STATUS_SUCCESS;
1156 if (NULL == obj_config) {
1157 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
1161 if (picture_width > i965->codec_info->max_width ||
1162 picture_height > i965->codec_info->max_height) {
1163 vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
1168 /* Validate picture dimensions */
1169 contextID = NEW_CONTEXT_ID();
1170 obj_context = CONTEXT(contextID);
1172 if (NULL == obj_context) {
1173 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
1177 render_state->inited = 1;
1179 switch (obj_config->profile) {
1180 case VAProfileH264Baseline:
1181 case VAProfileH264Main:
1182 case VAProfileH264High:
1183 if (!HAS_H264_DECODING(i965) &&
1184 !HAS_H264_ENCODING(i965))
1185 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1186 render_state->interleaved_uv = 1;
1189 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
1193 *context = contextID;
1194 obj_context->flags = flag;
1195 obj_context->context_id = contextID;
1196 obj_context->config_id = config_id;
1197 obj_context->picture_width = picture_width;
1198 obj_context->picture_height = picture_height;
1199 obj_context->num_render_targets = num_render_targets;
1200 obj_context->render_targets =
1201 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
1202 obj_context->hw_context = NULL;
1204 for(i = 0; i < num_render_targets; i++) {
1205 if (NULL == SURFACE(render_targets[i])) {
1206 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
1210 obj_context->render_targets[i] = render_targets[i];
1213 if (VA_STATUS_SUCCESS == vaStatus) {
1214 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1215 obj_context->codec_type = CODEC_PROC;
1216 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
1217 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
1218 assert(i965->codec_info->proc_hw_context_init);
1219 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config);
1220 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1221 obj_context->codec_type = CODEC_ENC;
1222 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1223 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1224 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1225 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1226 sizeof(*obj_context->codec_state.encode.slice_params));
1227 assert(i965->codec_info->enc_hw_context_init);
1228 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config);
1230 obj_context->codec_type = CODEC_DEC;
1231 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1232 obj_context->codec_state.decode.current_render_target = -1;
1233 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1234 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1235 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1236 sizeof(*obj_context->codec_state.decode.slice_params));
1237 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1238 sizeof(*obj_context->codec_state.decode.slice_datas));
1240 assert(i965->codec_info->dec_hw_context_init);
1241 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config);
1245 /* Error recovery */
1246 if (VA_STATUS_SUCCESS != vaStatus) {
1247 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1250 i965->current_context_id = contextID;
1256 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1258 struct i965_driver_data *i965 = i965_driver_data(ctx);
1259 struct object_context *obj_context = CONTEXT(context);
1261 assert(obj_context);
1263 if (i965->current_context_id == context)
1264 i965->current_context_id = VA_INVALID_ID;
1266 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1268 return VA_STATUS_SUCCESS;
1272 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1274 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1276 assert(obj_buffer->buffer_store);
1277 i965_release_buffer_store(&obj_buffer->buffer_store);
1278 object_heap_free(heap, obj);
1282 i965_create_buffer_internal(VADriverContextP ctx,
1283 VAContextID context,
1286 unsigned int num_elements,
1291 struct i965_driver_data *i965 = i965_driver_data(ctx);
1292 struct object_buffer *obj_buffer = NULL;
1293 struct buffer_store *buffer_store = NULL;
1298 case VAPictureParameterBufferType:
1299 case VAIQMatrixBufferType:
1300 case VAQMatrixBufferType:
1301 case VABitPlaneBufferType:
1302 case VASliceGroupMapBufferType:
1303 case VASliceParameterBufferType:
1304 case VASliceDataBufferType:
1305 case VAMacroblockParameterBufferType:
1306 case VAResidualDataBufferType:
1307 case VADeblockingParameterBufferType:
1308 case VAImageBufferType:
1309 case VAEncCodedBufferType:
1310 case VAEncSequenceParameterBufferType:
1311 case VAEncPictureParameterBufferType:
1312 case VAEncSliceParameterBufferType:
1313 case VAEncPackedHeaderParameterBufferType:
1314 case VAEncPackedHeaderDataBufferType:
1315 case VAEncMiscParameterBufferType:
1316 case VAProcPipelineParameterBufferType:
1317 case VAProcFilterParameterBufferType:
1318 case VAHuffmanTableBufferType:
1323 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1326 bufferID = NEW_BUFFER_ID();
1327 obj_buffer = BUFFER(bufferID);
1329 if (NULL == obj_buffer) {
1330 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1333 if (type == VAEncCodedBufferType) {
1334 size += I965_CODEDBUFFER_SIZE;
1335 size += 0x1000; /* for upper bound check */
1338 obj_buffer->max_num_elements = num_elements;
1339 obj_buffer->num_elements = num_elements;
1340 obj_buffer->size_element = size;
1341 obj_buffer->type = type;
1342 obj_buffer->buffer_store = NULL;
1343 buffer_store = calloc(1, sizeof(struct buffer_store));
1344 assert(buffer_store);
1345 buffer_store->ref_count = 1;
1347 if (store_bo != NULL) {
1348 buffer_store->bo = store_bo;
1349 dri_bo_reference(buffer_store->bo);
1352 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1353 } else if (type == VASliceDataBufferType ||
1354 type == VAImageBufferType ||
1355 type == VAEncCodedBufferType) {
1356 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1358 size * num_elements, 64);
1359 assert(buffer_store->bo);
1361 if (type == VAEncCodedBufferType) {
1362 VACodedBufferSegment *coded_buffer_segment;
1363 unsigned char *flag = NULL;
1364 dri_bo_map(buffer_store->bo, 1);
1365 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1366 coded_buffer_segment->size = size - I965_CODEDBUFFER_SIZE;
1367 coded_buffer_segment->bit_offset = 0;
1368 coded_buffer_segment->status = 0;
1369 coded_buffer_segment->buf = NULL;
1370 coded_buffer_segment->next = NULL;
1371 flag = (unsigned char *)(coded_buffer_segment + 1);
1373 dri_bo_unmap(buffer_store->bo);
1375 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1381 if (type == VAEncPackedHeaderDataBufferType) {
1382 msize = ALIGN(size, 4);
1385 buffer_store->buffer = malloc(msize * num_elements);
1386 assert(buffer_store->buffer);
1389 memcpy(buffer_store->buffer, data, size * num_elements);
1392 buffer_store->num_elements = obj_buffer->num_elements;
1393 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1394 i965_release_buffer_store(&buffer_store);
1397 return VA_STATUS_SUCCESS;
1401 i965_CreateBuffer(VADriverContextP ctx,
1402 VAContextID context, /* in */
1403 VABufferType type, /* in */
1404 unsigned int size, /* in */
1405 unsigned int num_elements, /* in */
1406 void *data, /* in */
1407 VABufferID *buf_id) /* out */
1409 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1414 i965_BufferSetNumElements(VADriverContextP ctx,
1415 VABufferID buf_id, /* in */
1416 unsigned int num_elements) /* in */
1418 struct i965_driver_data *i965 = i965_driver_data(ctx);
1419 struct object_buffer *obj_buffer = BUFFER(buf_id);
1420 VAStatus vaStatus = VA_STATUS_SUCCESS;
1424 if ((num_elements < 0) ||
1425 (num_elements > obj_buffer->max_num_elements)) {
1426 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1428 obj_buffer->num_elements = num_elements;
1429 if (obj_buffer->buffer_store != NULL) {
1430 obj_buffer->buffer_store->num_elements = num_elements;
1438 i965_MapBuffer(VADriverContextP ctx,
1439 VABufferID buf_id, /* in */
1440 void **pbuf) /* out */
1442 struct i965_driver_data *i965 = i965_driver_data(ctx);
1443 struct object_buffer *obj_buffer = BUFFER(buf_id);
1444 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1446 assert(obj_buffer && obj_buffer->buffer_store);
1447 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1448 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1450 if (NULL != obj_buffer->buffer_store->bo) {
1451 unsigned int tiling, swizzle;
1453 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1455 if (tiling != I915_TILING_NONE)
1456 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1458 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1460 assert(obj_buffer->buffer_store->bo->virtual);
1461 *pbuf = obj_buffer->buffer_store->bo->virtual;
1463 if (obj_buffer->type == VAEncCodedBufferType) {
1465 unsigned char *buffer = NULL;
1466 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1467 unsigned char *flag = (unsigned char *)(coded_buffer_segment + 1);
1470 coded_buffer_segment->buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + I965_CODEDBUFFER_SIZE;
1472 for (i = 0; i < obj_buffer->size_element - I965_CODEDBUFFER_SIZE - 3 - 0x1000; i++) {
1481 if (i == obj_buffer->size_element - I965_CODEDBUFFER_SIZE - 3 - 0x1000) {
1482 coded_buffer_segment->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1485 coded_buffer_segment->size = i;
1488 assert(coded_buffer_segment->buf);
1492 vaStatus = VA_STATUS_SUCCESS;
1493 } else if (NULL != obj_buffer->buffer_store->buffer) {
1494 *pbuf = obj_buffer->buffer_store->buffer;
1495 vaStatus = VA_STATUS_SUCCESS;
1502 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1504 struct i965_driver_data *i965 = i965_driver_data(ctx);
1505 struct object_buffer *obj_buffer = BUFFER(buf_id);
1506 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1508 assert(obj_buffer && obj_buffer->buffer_store);
1509 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1510 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1512 if (NULL != obj_buffer->buffer_store->bo) {
1513 unsigned int tiling, swizzle;
1515 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1517 if (tiling != I915_TILING_NONE)
1518 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1520 dri_bo_unmap(obj_buffer->buffer_store->bo);
1522 vaStatus = VA_STATUS_SUCCESS;
1523 } else if (NULL != obj_buffer->buffer_store->buffer) {
1525 vaStatus = VA_STATUS_SUCCESS;
1532 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1534 struct i965_driver_data *i965 = i965_driver_data(ctx);
1535 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1538 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1540 return VA_STATUS_SUCCESS;
1544 i965_BeginPicture(VADriverContextP ctx,
1545 VAContextID context,
1546 VASurfaceID render_target)
1548 struct i965_driver_data *i965 = i965_driver_data(ctx);
1549 struct object_context *obj_context = CONTEXT(context);
1550 struct object_surface *obj_surface = SURFACE(render_target);
1551 struct object_config *obj_config;
1556 assert(obj_context);
1557 assert(obj_surface);
1559 config = obj_context->config_id;
1560 obj_config = CONFIG(config);
1563 switch (obj_config->profile) {
1564 case VAProfileMPEG2Simple:
1565 case VAProfileMPEG2Main:
1566 vaStatus = VA_STATUS_SUCCESS;
1569 case VAProfileH264Baseline:
1570 case VAProfileH264Main:
1571 case VAProfileH264High:
1572 vaStatus = VA_STATUS_SUCCESS;
1575 case VAProfileVC1Simple:
1576 case VAProfileVC1Main:
1577 case VAProfileVC1Advanced:
1578 vaStatus = VA_STATUS_SUCCESS;
1581 case VAProfileJPEGBaseline:
1582 vaStatus = VA_STATUS_SUCCESS;
1586 vaStatus = VA_STATUS_SUCCESS;
1591 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1595 if (obj_context->codec_type == CODEC_PROC) {
1596 obj_context->codec_state.proc.current_render_target = render_target;
1597 } else if (obj_context->codec_type == CODEC_ENC) {
1598 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1600 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1601 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1604 obj_context->codec_state.encode.num_slice_params = 0;
1607 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1609 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1610 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1612 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1613 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1615 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1616 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1618 obj_context->codec_state.encode.num_slice_params_ext = 0;
1619 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1620 obj_context->codec_state.encode.last_packed_header_type = 0;
1622 obj_context->codec_state.decode.current_render_target = render_target;
1623 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1624 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1625 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1626 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1628 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1629 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1630 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1633 obj_context->codec_state.decode.num_slice_params = 0;
1634 obj_context->codec_state.decode.num_slice_datas = 0;
1640 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1642 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1644 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1645 struct object_context *obj_context, \
1646 struct object_buffer *obj_buffer) \
1648 struct category##_state *category = &obj_context->codec_state.category; \
1649 assert(obj_buffer->buffer_store->bo == NULL); \
1650 assert(obj_buffer->buffer_store->buffer); \
1651 i965_release_buffer_store(&category->member); \
1652 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1653 return VA_STATUS_SUCCESS; \
1656 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1658 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1659 struct object_context *obj_context, \
1660 struct object_buffer *obj_buffer) \
1662 struct category##_state *category = &obj_context->codec_state.category; \
1663 if (category->num_##member == category->max_##member) { \
1664 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1665 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1666 category->max_##member += NUM_SLICES; \
1668 i965_release_buffer_store(&category->member[category->num_##member]); \
1669 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1670 category->num_##member++; \
1671 return VA_STATUS_SUCCESS; \
1674 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1676 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1677 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1678 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1679 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1680 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1682 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1683 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1684 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1687 i965_decoder_render_picture(VADriverContextP ctx,
1688 VAContextID context,
1689 VABufferID *buffers,
1692 struct i965_driver_data *i965 = i965_driver_data(ctx);
1693 struct object_context *obj_context = CONTEXT(context);
1694 VAStatus vaStatus = VA_STATUS_SUCCESS;
1697 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1698 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1701 switch (obj_buffer->type) {
1702 case VAPictureParameterBufferType:
1703 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1706 case VAIQMatrixBufferType:
1707 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1710 case VABitPlaneBufferType:
1711 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1714 case VASliceParameterBufferType:
1715 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1718 case VASliceDataBufferType:
1719 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1722 case VAHuffmanTableBufferType:
1723 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1727 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1735 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1737 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1738 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1739 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1740 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1741 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1742 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1743 /* extended buffer */
1744 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1745 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1747 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1748 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1749 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1752 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1753 struct object_context *obj_context,
1754 struct object_buffer *obj_buffer,
1757 struct encode_state *encode = &obj_context->codec_state.encode;
1759 assert(obj_buffer->buffer_store->bo == NULL);
1760 assert(obj_buffer->buffer_store->buffer);
1761 i965_release_buffer_store(&encode->packed_header_param[type_index]);
1762 i965_reference_buffer_store(&encode->packed_header_param[type_index], obj_buffer->buffer_store);
1764 return VA_STATUS_SUCCESS;
1768 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1769 struct object_context *obj_context,
1770 struct object_buffer *obj_buffer,
1773 struct encode_state *encode = &obj_context->codec_state.encode;
1775 assert(obj_buffer->buffer_store->bo == NULL);
1776 assert(obj_buffer->buffer_store->buffer);
1777 i965_release_buffer_store(&encode->packed_header_data[type_index]);
1778 i965_reference_buffer_store(&encode->packed_header_data[type_index], obj_buffer->buffer_store);
1780 return VA_STATUS_SUCCESS;
1784 i965_encoder_render_misc_parameter_buffer(VADriverContextP ctx,
1785 struct object_context *obj_context,
1786 struct object_buffer *obj_buffer)
1788 struct encode_state *encode = &obj_context->codec_state.encode;
1789 VAEncMiscParameterBuffer *param = NULL;
1791 assert(obj_buffer->buffer_store->bo == NULL);
1792 assert(obj_buffer->buffer_store->buffer);
1794 param = (VAEncMiscParameterBuffer *)obj_buffer->buffer_store->buffer;
1795 i965_release_buffer_store(&encode->misc_param[param->type]);
1796 i965_reference_buffer_store(&encode->misc_param[param->type], obj_buffer->buffer_store);
1798 return VA_STATUS_SUCCESS;
1802 i965_encoder_render_picture(VADriverContextP ctx,
1803 VAContextID context,
1804 VABufferID *buffers,
1807 struct i965_driver_data *i965 = i965_driver_data(ctx);
1808 struct object_context *obj_context = CONTEXT(context);
1809 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1812 for (i = 0; i < num_buffers; i++) {
1813 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1816 switch (obj_buffer->type) {
1817 case VAQMatrixBufferType:
1818 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1821 case VAIQMatrixBufferType:
1822 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1825 case VAEncSequenceParameterBufferType:
1826 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1829 case VAEncPictureParameterBufferType:
1830 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1833 case VAEncSliceParameterBufferType:
1834 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1837 case VAEncPackedHeaderParameterBufferType:
1839 struct encode_state *encode = &obj_context->codec_state.encode;
1840 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1841 encode->last_packed_header_type = param->type;
1843 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1846 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1850 case VAEncPackedHeaderDataBufferType:
1852 struct encode_state *encode = &obj_context->codec_state.encode;
1854 assert(encode->last_packed_header_type == VAEncPackedHeaderSequence ||
1855 encode->last_packed_header_type == VAEncPackedHeaderPicture ||
1856 encode->last_packed_header_type == VAEncPackedHeaderSlice ||
1857 ((encode->last_packed_header_type & VAEncPackedHeaderMiscMask == VAEncPackedHeaderMiscMask) &&
1858 (encode->last_packed_header_type & (~VAEncPackedHeaderMiscMask) != 0)));
1859 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1862 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1866 case VAEncMiscParameterBufferType:
1867 vaStatus = i965_encoder_render_misc_parameter_buffer(ctx,
1873 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1881 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1883 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1884 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1887 i965_proc_render_picture(VADriverContextP ctx,
1888 VAContextID context,
1889 VABufferID *buffers,
1892 struct i965_driver_data *i965 = i965_driver_data(ctx);
1893 struct object_context *obj_context = CONTEXT(context);
1894 VAStatus vaStatus = VA_STATUS_SUCCESS;
1897 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1898 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1901 switch (obj_buffer->type) {
1902 case VAProcPipelineParameterBufferType:
1903 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
1907 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1916 i965_RenderPicture(VADriverContextP ctx,
1917 VAContextID context,
1918 VABufferID *buffers,
1921 struct i965_driver_data *i965 = i965_driver_data(ctx);
1922 struct object_context *obj_context;
1923 struct object_config *obj_config;
1925 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1927 obj_context = CONTEXT(context);
1928 assert(obj_context);
1930 config = obj_context->config_id;
1931 obj_config = CONFIG(config);
1934 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1935 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
1936 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
1937 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1939 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1946 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1948 struct i965_driver_data *i965 = i965_driver_data(ctx);
1949 struct object_context *obj_context = CONTEXT(context);
1950 struct object_config *obj_config;
1953 assert(obj_context);
1954 config = obj_context->config_id;
1955 obj_config = CONFIG(config);
1958 if (obj_context->codec_type == CODEC_PROC) {
1959 assert(VAEntrypointVideoProc == obj_config->entrypoint);
1960 } else if (obj_context->codec_type == CODEC_ENC) {
1961 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1963 assert(obj_context->codec_state.encode.pic_param ||
1964 obj_context->codec_state.encode.pic_param_ext);
1965 assert(obj_context->codec_state.encode.seq_param ||
1966 obj_context->codec_state.encode.seq_param_ext);
1967 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
1968 obj_context->codec_state.encode.num_slice_params_ext >= 1);
1970 assert(obj_context->codec_state.decode.pic_param);
1971 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1972 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1973 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1976 assert(obj_context->hw_context->run);
1977 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1979 return VA_STATUS_SUCCESS;
1983 i965_SyncSurface(VADriverContextP ctx,
1984 VASurfaceID render_target)
1986 struct i965_driver_data *i965 = i965_driver_data(ctx);
1987 struct object_surface *obj_surface = SURFACE(render_target);
1989 assert(obj_surface);
1992 drm_intel_bo_wait_rendering(obj_surface->bo);
1994 return VA_STATUS_SUCCESS;
1998 i965_QuerySurfaceStatus(VADriverContextP ctx,
1999 VASurfaceID render_target,
2000 VASurfaceStatus *status) /* out */
2002 struct i965_driver_data *i965 = i965_driver_data(ctx);
2003 struct object_surface *obj_surface = SURFACE(render_target);
2005 assert(obj_surface);
2007 if (obj_surface->bo) {
2008 if (drm_intel_bo_busy(obj_surface->bo)){
2009 *status = VASurfaceRendering;
2012 *status = VASurfaceReady;
2015 *status = VASurfaceReady;
2018 return VA_STATUS_SUCCESS;
2021 static VADisplayAttribute *
2022 get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
2024 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2027 if (!i965->display_attributes)
2030 for (i = 0; i < i965->num_display_attributes; i++) {
2031 if (i965->display_attributes[i].type == type)
2032 return &i965->display_attributes[i];
2038 i965_display_attributes_init(VADriverContextP ctx)
2040 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2042 i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
2043 i965->display_attributes = malloc(
2044 i965->num_display_attributes * sizeof(i965->display_attributes[0]));
2045 if (!i965->display_attributes)
2049 i965->display_attributes,
2050 i965_display_attributes,
2051 sizeof(i965_display_attributes)
2054 i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
2055 if (!i965->rotation_attrib)
2061 i965_display_attributes_terminate(VADriverContextP ctx)
2063 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2065 if (i965->display_attributes) {
2066 free(i965->display_attributes);
2067 i965->display_attributes = NULL;
2068 i965->num_display_attributes = 0;
2073 * Query display attributes
2074 * The caller must provide a "attr_list" array that can hold at
2075 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
2076 * returned in "attr_list" is returned in "num_attributes".
2079 i965_QueryDisplayAttributes(
2080 VADriverContextP ctx,
2081 VADisplayAttribute *attribs, /* out */
2082 int *num_attribs_ptr /* out */
2085 const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
2087 if (attribs && num_attribs > 0)
2088 memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
2090 if (num_attribs_ptr)
2091 *num_attribs_ptr = num_attribs;
2093 return VA_STATUS_SUCCESS;
2097 * Get display attributes
2098 * This function returns the current attribute values in "attr_list".
2099 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
2100 * from vaQueryDisplayAttributes() can have their values retrieved.
2103 i965_GetDisplayAttributes(
2104 VADriverContextP ctx,
2105 VADisplayAttribute *attribs, /* inout */
2106 int num_attribs /* in */
2111 for (i = 0; i < num_attribs; i++) {
2112 VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
2114 src_attrib = get_display_attribute(ctx, dst_attrib->type);
2115 if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
2116 dst_attrib->min_value = src_attrib->min_value;
2117 dst_attrib->max_value = src_attrib->max_value;
2118 dst_attrib->value = src_attrib->value;
2121 dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
2123 return VA_STATUS_SUCCESS;
2127 * Set display attributes
2128 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
2129 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
2130 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
2133 i965_SetDisplayAttributes(
2134 VADriverContextP ctx,
2135 VADisplayAttribute *attribs, /* in */
2136 int num_attribs /* in */
2141 for (i = 0; i < num_attribs; i++) {
2142 VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
2144 dst_attrib = get_display_attribute(ctx, src_attrib->type);
2146 return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
2148 if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
2151 if (src_attrib->value < dst_attrib->min_value ||
2152 src_attrib->value > dst_attrib->max_value)
2153 return VA_STATUS_ERROR_INVALID_PARAMETER;
2155 dst_attrib->value = src_attrib->value;
2156 /* XXX: track modified attributes through timestamps */
2158 return VA_STATUS_SUCCESS;
2162 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
2163 VASurfaceID surface,
2164 void **buffer, /* out */
2165 unsigned int *stride) /* out */
2168 return VA_STATUS_ERROR_UNIMPLEMENTED;
2172 i965_Init(VADriverContextP ctx)
2174 struct i965_driver_data *i965 = i965_driver_data(ctx);
2176 if (intel_driver_init(ctx) == False)
2177 return VA_STATUS_ERROR_UNKNOWN;
2179 if (IS_HASWELL(i965->intel.device_id))
2180 i965->codec_info = &gen75_hw_codec_info;
2181 else if (IS_G4X(i965->intel.device_id))
2182 i965->codec_info = &g4x_hw_codec_info;
2183 else if (IS_IRONLAKE(i965->intel.device_id))
2184 i965->codec_info = &ironlake_hw_codec_info;
2185 else if (IS_GEN6(i965->intel.device_id))
2186 i965->codec_info = &gen6_hw_codec_info;
2187 else if (IS_GEN7(i965->intel.device_id))
2188 i965->codec_info = &gen7_hw_codec_info;
2190 return VA_STATUS_ERROR_UNKNOWN;
2192 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER, 0);
2194 if (!i965_display_attributes_init(ctx))
2195 return VA_STATUS_ERROR_UNKNOWN;
2197 if (i965_post_processing_init(ctx) == False)
2198 return VA_STATUS_ERROR_UNKNOWN;
2200 if (i965_render_init(ctx) == False)
2201 return VA_STATUS_ERROR_UNKNOWN;
2203 #ifdef HAVE_VA_WAYLAND
2204 if (IS_VA_WAYLAND(ctx) && !i965_output_wayland_init(ctx))
2205 return VA_STATUS_ERROR_UNKNOWN;
2209 if (IS_VA_X11(ctx) && !i965_output_dri_init(ctx))
2210 return VA_STATUS_ERROR_UNKNOWN;
2213 _i965InitMutex(&i965->render_mutex);
2214 _i965InitMutex(&i965->pp_mutex);
2216 return VA_STATUS_SUCCESS;
2220 i965_destroy_heap(struct object_heap *heap,
2221 void (*func)(struct object_heap *heap, struct object_base *object))
2223 struct object_base *object;
2224 object_heap_iterator iter;
2226 object = object_heap_first(heap, &iter);
2232 object = object_heap_next(heap, &iter);
2235 object_heap_destroy(heap);
2240 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
2243 i965_CreateImage(VADriverContextP ctx,
2244 VAImageFormat *format,
2247 VAImage *out_image) /* out */
2249 struct i965_driver_data *i965 = i965_driver_data(ctx);
2250 struct object_image *obj_image;
2251 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2253 unsigned int width2, height2, size2, size;
2255 out_image->image_id = VA_INVALID_ID;
2256 out_image->buf = VA_INVALID_ID;
2258 image_id = NEW_IMAGE_ID();
2259 if (image_id == VA_INVALID_ID)
2260 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2262 obj_image = IMAGE(image_id);
2264 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2265 obj_image->bo = NULL;
2266 obj_image->palette = NULL;
2267 obj_image->derived_surface = VA_INVALID_ID;
2269 VAImage * const image = &obj_image->image;
2270 image->image_id = image_id;
2271 image->buf = VA_INVALID_ID;
2273 size = width * height;
2274 width2 = (width + 1) / 2;
2275 height2 = (height + 1) / 2;
2276 size2 = width2 * height2;
2278 image->num_palette_entries = 0;
2279 image->entry_bytes = 0;
2280 memset(image->component_order, 0, sizeof(image->component_order));
2282 switch (format->fourcc) {
2283 case VA_FOURCC('I','A','4','4'):
2284 case VA_FOURCC('A','I','4','4'):
2285 image->num_planes = 1;
2286 image->pitches[0] = width;
2287 image->offsets[0] = 0;
2288 image->data_size = image->offsets[0] + image->pitches[0] * height;
2289 image->num_palette_entries = 16;
2290 image->entry_bytes = 3;
2291 image->component_order[0] = 'R';
2292 image->component_order[1] = 'G';
2293 image->component_order[2] = 'B';
2295 case VA_FOURCC('B','G','R','A'):
2296 case VA_FOURCC('R','G','B','A'):
2297 case VA_FOURCC('B','G','R','X'):
2298 case VA_FOURCC('R','G','B','X'):
2299 image->num_planes = 1;
2300 image->pitches[0] = width * 4;
2301 image->offsets[0] = 0;
2302 image->data_size = image->offsets[0] + image->pitches[0] * height;
2304 case VA_FOURCC('Y','V','1','2'):
2305 image->num_planes = 3;
2306 image->pitches[0] = width;
2307 image->offsets[0] = 0;
2308 image->pitches[1] = width2;
2309 image->offsets[1] = size + size2;
2310 image->pitches[2] = width2;
2311 image->offsets[2] = size;
2312 image->data_size = size + 2 * size2;
2314 case VA_FOURCC('I','4','2','0'):
2315 image->num_planes = 3;
2316 image->pitches[0] = width;
2317 image->offsets[0] = 0;
2318 image->pitches[1] = width2;
2319 image->offsets[1] = size;
2320 image->pitches[2] = width2;
2321 image->offsets[2] = size + size2;
2322 image->data_size = size + 2 * size2;
2324 case VA_FOURCC('N','V','1','2'):
2325 image->num_planes = 2;
2326 image->pitches[0] = width;
2327 image->offsets[0] = 0;
2328 image->pitches[1] = width;
2329 image->offsets[1] = size;
2330 image->data_size = size + 2 * size2;
2332 case VA_FOURCC('Y','U','Y','2'):
2333 case VA_FOURCC('U','Y','V','Y'):
2334 image->num_planes = 1;
2335 image->pitches[0] = width * 2;
2336 image->offsets[0] = 0;
2337 image->data_size = size * 2;
2343 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2344 image->data_size, 1, NULL, &image->buf);
2345 if (va_status != VA_STATUS_SUCCESS)
2348 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2349 dri_bo_reference(obj_image->bo);
2351 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2352 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2353 if (!obj_image->palette)
2357 image->image_id = image_id;
2358 image->format = *format;
2359 image->width = width;
2360 image->height = height;
2362 *out_image = *image;
2363 return VA_STATUS_SUCCESS;
2366 i965_DestroyImage(ctx, image_id);
2371 i965_check_alloc_surface_bo(VADriverContextP ctx,
2372 struct object_surface *obj_surface,
2374 unsigned int fourcc,
2375 unsigned int subsampling)
2377 struct i965_driver_data *i965 = i965_driver_data(ctx);
2378 int region_width, region_height;
2380 if (obj_surface->bo) {
2381 assert(obj_surface->fourcc);
2382 assert(obj_surface->fourcc == fourcc);
2383 assert(obj_surface->subsampling == subsampling);
2387 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
2388 obj_surface->x_cr_offset = 0;
2391 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2392 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2393 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2394 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2395 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2396 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2397 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2398 fourcc == VA_FOURCC('Y', 'U', 'Y', '2'));
2400 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
2401 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
2402 region_height = obj_surface->height;
2404 if (fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2405 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2406 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
2407 obj_surface->cb_cr_pitch = obj_surface->width;
2408 region_width = obj_surface->width;
2410 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2411 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2412 region_width = obj_surface->width * 2;
2414 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2415 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2416 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2417 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2418 region_width = obj_surface->width * 4;
2425 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2426 assert(subsampling == SUBSAMPLE_YUV420);
2427 obj_surface->y_cb_offset = obj_surface->height;
2428 obj_surface->y_cr_offset = obj_surface->height;
2429 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2430 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2431 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
2432 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2433 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2434 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2435 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2436 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2437 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2438 fourcc == VA_FOURCC('Y', 'U', 'Y', '2')) {
2439 switch (subsampling) {
2440 case SUBSAMPLE_YUV400:
2441 obj_surface->cb_cr_width = 0;
2442 obj_surface->cb_cr_height = 0;
2445 case SUBSAMPLE_YUV420:
2446 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2447 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2450 case SUBSAMPLE_YUV422H:
2451 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2452 obj_surface->cb_cr_height = obj_surface->orig_height;
2455 case SUBSAMPLE_YUV422V:
2456 obj_surface->cb_cr_width = obj_surface->orig_width;
2457 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2460 case SUBSAMPLE_YUV444:
2461 obj_surface->cb_cr_width = obj_surface->orig_width;
2462 obj_surface->cb_cr_height = obj_surface->orig_height;
2465 case SUBSAMPLE_YUV411:
2466 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2467 obj_surface->cb_cr_height = obj_surface->orig_height;
2469 case SUBSAMPLE_RGBX:
2476 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2478 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2479 obj_surface->y_cr_offset = obj_surface->height;
2480 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2481 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '3')){
2482 obj_surface->y_cb_offset = obj_surface->height;
2483 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2485 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2486 obj_surface->y_cb_offset = 0;
2487 obj_surface->y_cr_offset = 0;
2488 region_height = obj_surface->height;
2490 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2491 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2492 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2493 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2494 region_height = obj_surface->height;
2498 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2499 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2500 assert(subsampling == SUBSAMPLE_YUV420 ||
2501 subsampling == SUBSAMPLE_YUV422H ||
2502 subsampling == SUBSAMPLE_YUV422V ||
2503 subsampling == SUBSAMPLE_RGBX);
2505 region_width = obj_surface->width;
2506 region_height = obj_surface->height;
2509 case VA_FOURCC('N', 'V', '1', '2'):
2510 obj_surface->y_cb_offset = obj_surface->height;
2511 obj_surface->y_cr_offset = obj_surface->height;
2512 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2513 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2514 obj_surface->cb_cr_pitch = obj_surface->width;
2515 region_height = obj_surface->height + obj_surface->height / 2;
2518 case VA_FOURCC('Y', 'V', '1', '2'):
2519 case VA_FOURCC('I', '4', '2', '0'):
2520 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2521 obj_surface->y_cr_offset = obj_surface->height;
2522 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2524 obj_surface->y_cb_offset = obj_surface->height;
2525 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2528 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2529 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2530 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2531 region_height = obj_surface->height + obj_surface->height / 2;
2534 case VA_FOURCC('Y','U', 'Y', '2'):
2535 obj_surface->y_cb_offset = 0;
2536 obj_surface->y_cr_offset = 0;
2537 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2538 obj_surface->cb_cr_height = obj_surface->orig_height;
2539 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2540 region_width = obj_surface->width * 2;
2541 region_height = obj_surface->height;
2543 case VA_FOURCC('R', 'G', 'B', 'A'):
2544 case VA_FOURCC('R', 'G', 'B', 'X'):
2545 case VA_FOURCC('B', 'G', 'R', 'A'):
2546 case VA_FOURCC('B', 'G', 'R', 'X'):
2547 region_width = obj_surface->width * 4;
2548 region_height = obj_surface->height;
2557 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2560 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2561 unsigned long pitch;
2563 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2571 assert(tiling_mode == I915_TILING_Y);
2572 assert(pitch == obj_surface->width ||
2573 pitch == obj_surface->width * 2 ||
2574 pitch == obj_surface->width * 4) ;
2576 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2582 obj_surface->fourcc = fourcc;
2583 obj_surface->subsampling = subsampling;
2584 assert(obj_surface->bo);
2587 VAStatus i965_DeriveImage(VADriverContextP ctx,
2588 VASurfaceID surface,
2589 VAImage *out_image) /* out */
2591 struct i965_driver_data *i965 = i965_driver_data(ctx);
2592 struct object_image *obj_image;
2593 struct object_surface *obj_surface;
2595 unsigned int w_pitch, h_pitch;
2596 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2598 out_image->image_id = VA_INVALID_ID;
2599 obj_surface = SURFACE(surface);
2602 return VA_STATUS_ERROR_INVALID_SURFACE;
2604 if (!obj_surface->bo) {
2605 unsigned int is_tiled = 0;
2606 unsigned int fourcc = VA_FOURCC('Y', 'V', '1', '2');
2607 i965_guess_surface_format(ctx, surface, &fourcc, &is_tiled);
2608 int sampling = get_sampling_from_fourcc(fourcc);
2609 i965_check_alloc_surface_bo(ctx, obj_surface, is_tiled, fourcc, sampling);
2612 assert(obj_surface->fourcc);
2614 w_pitch = obj_surface->width;
2615 h_pitch = obj_surface->height;
2617 image_id = NEW_IMAGE_ID();
2619 if (image_id == VA_INVALID_ID)
2620 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2622 obj_image = IMAGE(image_id);
2625 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2627 obj_image->bo = NULL;
2628 obj_image->palette = NULL;
2629 obj_image->derived_surface = VA_INVALID_ID;
2631 VAImage * const image = &obj_image->image;
2633 memset(image, 0, sizeof(*image));
2634 image->image_id = image_id;
2635 image->buf = VA_INVALID_ID;
2636 image->num_palette_entries = 0;
2637 image->entry_bytes = 0;
2638 image->width = obj_surface->orig_width;
2639 image->height = obj_surface->orig_height;
2640 image->data_size = obj_surface->size;
2642 image->format.fourcc = obj_surface->fourcc;
2643 image->format.byte_order = VA_LSB_FIRST;
2644 image->format.bits_per_pixel = 12;
2646 switch (image->format.fourcc) {
2647 case VA_FOURCC('Y', 'V', '1', '2'):
2648 image->num_planes = 3;
2649 image->pitches[0] = w_pitch; /* Y */
2650 image->offsets[0] = 0;
2651 image->pitches[1] = obj_surface->cb_cr_pitch; /* V */
2652 image->offsets[1] = w_pitch * obj_surface->y_cr_offset;
2653 image->pitches[2] = obj_surface->cb_cr_pitch; /* U */
2654 image->offsets[2] = w_pitch * obj_surface->y_cb_offset;
2657 case VA_FOURCC('N', 'V', '1', '2'):
2658 image->num_planes = 2;
2659 image->pitches[0] = w_pitch; /* Y */
2660 image->offsets[0] = 0;
2661 image->pitches[1] = obj_surface->cb_cr_pitch; /* UV */
2662 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2665 case VA_FOURCC('I', '4', '2', '0'):
2666 image->num_planes = 3;
2667 image->pitches[0] = w_pitch; /* Y */
2668 image->offsets[0] = 0;
2669 image->pitches[1] = obj_surface->cb_cr_pitch; /* U */
2670 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2671 image->pitches[2] = obj_surface->cb_cr_pitch; /* V */
2672 image->offsets[2] = w_pitch * obj_surface->y_cr_offset;
2674 case VA_FOURCC('Y', 'U', 'Y', '2'):
2675 image->num_planes = 1;
2676 image->pitches[0] = obj_surface->width * 2; /* Y, width is aligned already */
2677 image->offsets[0] = 0;
2678 image->pitches[1] = obj_surface->width * 2; /* U */
2679 image->offsets[1] = 0;
2680 image->pitches[2] = obj_surface->width * 2; /* V */
2681 image->offsets[2] = 0;
2683 case VA_FOURCC('R', 'G', 'B', 'A'):
2684 case VA_FOURCC('R', 'G', 'B', 'X'):
2685 case VA_FOURCC('B', 'G', 'R', 'A'):
2686 case VA_FOURCC('B', 'G', 'R', 'X'):
2687 image->num_planes = 1;
2688 image->pitches[0] = obj_surface->width * 4;
2694 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2695 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2696 if (va_status != VA_STATUS_SUCCESS)
2699 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2700 dri_bo_reference(obj_image->bo);
2702 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2703 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2704 if (!obj_image->palette) {
2705 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2710 *out_image = *image;
2711 obj_surface->flags |= SURFACE_DERIVED;
2712 obj_image->derived_surface = surface;
2714 return VA_STATUS_SUCCESS;
2717 i965_DestroyImage(ctx, image_id);
2722 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2724 object_heap_free(heap, obj);
2729 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2731 struct i965_driver_data *i965 = i965_driver_data(ctx);
2732 struct object_image *obj_image = IMAGE(image);
2733 struct object_surface *obj_surface;
2736 return VA_STATUS_SUCCESS;
2738 dri_bo_unreference(obj_image->bo);
2739 obj_image->bo = NULL;
2741 if (obj_image->image.buf != VA_INVALID_ID) {
2742 i965_DestroyBuffer(ctx, obj_image->image.buf);
2743 obj_image->image.buf = VA_INVALID_ID;
2746 if (obj_image->palette) {
2747 free(obj_image->palette);
2748 obj_image->palette = NULL;
2751 obj_surface = SURFACE(obj_image->derived_surface);
2754 obj_surface->flags &= ~SURFACE_DERIVED;
2757 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2759 return VA_STATUS_SUCCESS;
2763 * pointer to an array holding the palette data. The size of the array is
2764 * num_palette_entries * entry_bytes in size. The order of the components
2765 * in the palette is described by the component_order in VASubpicture struct
2768 i965_SetImagePalette(VADriverContextP ctx,
2770 unsigned char *palette)
2772 struct i965_driver_data *i965 = i965_driver_data(ctx);
2775 struct object_image *obj_image = IMAGE(image);
2777 return VA_STATUS_ERROR_INVALID_IMAGE;
2779 if (!obj_image->palette)
2780 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2782 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2783 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2784 ((unsigned int)palette[3*i + 1] << 8) |
2785 (unsigned int)palette[3*i + 2]);
2786 return VA_STATUS_SUCCESS;
2790 get_sampling_from_fourcc(unsigned int fourcc)
2792 int surface_sampling = -1;
2794 case VA_FOURCC('N', 'V', '1', '2'):
2795 case VA_FOURCC('Y', 'V', '1', '2'):
2796 case VA_FOURCC('I', '4', '2', '0'):
2797 case VA_FOURCC('I', 'M', 'C', '1'):
2798 case VA_FOURCC('I', 'M', 'C', '3'):
2799 surface_sampling = SUBSAMPLE_YUV420;
2801 case VA_FOURCC('Y', 'U', 'Y', '2'):
2802 surface_sampling = SUBSAMPLE_YUV422H;
2804 case VA_FOURCC('R','G','B','A'):
2805 case VA_FOURCC('R','G','B','X'):
2806 case VA_FOURCC('B','G','R','A'):
2807 case VA_FOURCC('B','G','R','X'):
2808 surface_sampling = SUBSAMPLE_RGBX;
2813 return surface_sampling;
2817 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2818 const uint8_t *src, unsigned int src_stride,
2819 unsigned int len, unsigned int height)
2823 for (i = 0; i < height; i++) {
2824 memcpy(dst, src, len);
2831 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2832 struct object_surface *obj_surface,
2833 const VARectangle *rect)
2835 uint8_t *dst[3], *src[3];
2837 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2838 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2839 unsigned int tiling, swizzle;
2841 if (!obj_surface->bo)
2844 assert(obj_surface->fourcc);
2845 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2847 if (tiling != I915_TILING_NONE)
2848 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2850 dri_bo_map(obj_surface->bo, 0);
2852 if (!obj_surface->bo->virtual)
2855 /* Dest VA image has either I420 or YV12 format.
2856 Source VA surface alway has I420 format */
2857 dst[Y] = image_data + obj_image->image.offsets[Y];
2858 src[0] = (uint8_t *)obj_surface->bo->virtual;
2859 dst[U] = image_data + obj_image->image.offsets[U];
2860 src[1] = src[0] + obj_surface->width * obj_surface->height;
2861 dst[V] = image_data + obj_image->image.offsets[V];
2862 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2865 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2866 src[0] += rect->y * obj_surface->width + rect->x;
2867 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2868 src[0], obj_surface->width,
2869 rect->width, rect->height);
2872 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2873 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2874 memcpy_pic(dst[U], obj_image->image.pitches[U],
2875 src[1], obj_surface->width / 2,
2876 rect->width / 2, rect->height / 2);
2879 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2880 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2881 memcpy_pic(dst[V], obj_image->image.pitches[V],
2882 src[2], obj_surface->width / 2,
2883 rect->width / 2, rect->height / 2);
2885 if (tiling != I915_TILING_NONE)
2886 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2888 dri_bo_unmap(obj_surface->bo);
2892 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2893 struct object_surface *obj_surface,
2894 const VARectangle *rect)
2896 uint8_t *dst[2], *src[2];
2897 unsigned int tiling, swizzle;
2899 if (!obj_surface->bo)
2902 assert(obj_surface->fourcc);
2903 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2905 if (tiling != I915_TILING_NONE)
2906 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2908 dri_bo_map(obj_surface->bo, 0);
2910 if (!obj_surface->bo->virtual)
2913 /* Both dest VA image and source surface have NV12 format */
2914 dst[0] = image_data + obj_image->image.offsets[0];
2915 src[0] = (uint8_t *)obj_surface->bo->virtual;
2916 dst[1] = image_data + obj_image->image.offsets[1];
2917 src[1] = src[0] + obj_surface->width * obj_surface->height;
2920 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2921 src[0] += rect->y * obj_surface->width + rect->x;
2922 memcpy_pic(dst[0], obj_image->image.pitches[0],
2923 src[0], obj_surface->width,
2924 rect->width, rect->height);
2927 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2928 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2929 memcpy_pic(dst[1], obj_image->image.pitches[1],
2930 src[1], obj_surface->width,
2931 rect->width, rect->height / 2);
2933 if (tiling != I915_TILING_NONE)
2934 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2936 dri_bo_unmap(obj_surface->bo);
2940 get_image_yuy2(struct object_image *obj_image, uint8_t *image_data,
2941 struct object_surface *obj_surface,
2942 const VARectangle *rect)
2945 unsigned int tiling, swizzle;
2947 if (!obj_surface->bo)
2950 assert(obj_surface->fourcc);
2951 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2953 if (tiling != I915_TILING_NONE)
2954 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2956 dri_bo_map(obj_surface->bo, 0);
2958 if (!obj_surface->bo->virtual)
2961 /* Both dest VA image and source surface have YUYV format */
2962 dst = image_data + obj_image->image.offsets[0];
2963 src = (uint8_t *)obj_surface->bo->virtual;
2966 dst += rect->y * obj_image->image.pitches[0] + rect->x*2;
2967 src += rect->y * obj_surface->width + rect->x*2;
2968 memcpy_pic(dst, obj_image->image.pitches[0],
2969 src, obj_surface->width*2,
2970 rect->width*2, rect->height);
2972 if (tiling != I915_TILING_NONE)
2973 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2975 dri_bo_unmap(obj_surface->bo);
2979 i965_sw_getimage(VADriverContextP ctx,
2980 VASurfaceID surface,
2981 int x, /* coordinates of the upper left source pixel */
2983 unsigned int width, /* width and height of the region */
2984 unsigned int height,
2987 struct i965_driver_data *i965 = i965_driver_data(ctx);
2988 struct i965_render_state *render_state = &i965->render_state;
2990 struct object_surface *obj_surface = SURFACE(surface);
2992 return VA_STATUS_ERROR_INVALID_SURFACE;
2994 struct object_image *obj_image = IMAGE(image);
2996 return VA_STATUS_ERROR_INVALID_IMAGE;
2999 return VA_STATUS_ERROR_INVALID_PARAMETER;
3000 if (x + width > obj_surface->orig_width ||
3001 y + height > obj_surface->orig_height)
3002 return VA_STATUS_ERROR_INVALID_PARAMETER;
3003 if (x + width > obj_image->image.width ||
3004 y + height > obj_image->image.height)
3005 return VA_STATUS_ERROR_INVALID_PARAMETER;
3007 if (obj_surface->fourcc != obj_image->image.format.fourcc)
3008 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
3011 void *image_data = NULL;
3013 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
3014 if (va_status != VA_STATUS_SUCCESS)
3021 rect.height = height;
3023 switch (obj_image->image.format.fourcc) {
3024 case VA_FOURCC('Y','V','1','2'):
3025 case VA_FOURCC('I','4','2','0'):
3026 /* I420 is native format for MPEG-2 decoded surfaces */
3027 if (render_state->interleaved_uv)
3028 goto operation_failed;
3029 get_image_i420(obj_image, image_data, obj_surface, &rect);
3031 case VA_FOURCC('N','V','1','2'):
3032 /* NV12 is native format for H.264 decoded surfaces */
3033 if (!render_state->interleaved_uv)
3034 goto operation_failed;
3035 get_image_nv12(obj_image, image_data, obj_surface, &rect);
3037 case VA_FOURCC('Y','U','Y','2'):
3038 /* YUY2 is the format supported by overlay plane */
3039 get_image_yuy2(obj_image, image_data, obj_surface, &rect);
3043 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3047 i965_UnmapBuffer(ctx, obj_image->image.buf);
3052 i965_hw_getimage(VADriverContextP ctx,
3053 VASurfaceID surface,
3054 int x, /* coordinates of the upper left source pixel */
3056 unsigned int width, /* width and height of the region */
3057 unsigned int height,
3060 struct i965_driver_data *i965 = i965_driver_data(ctx);
3061 struct i965_surface src_surface;
3062 struct i965_surface dst_surface;
3065 struct object_surface *obj_surface = SURFACE(surface);
3066 struct object_image *obj_image = IMAGE(image);
3069 return VA_STATUS_ERROR_INVALID_SURFACE;
3072 return VA_STATUS_ERROR_INVALID_IMAGE;
3075 return VA_STATUS_ERROR_INVALID_PARAMETER;
3076 if (x + width > obj_surface->orig_width ||
3077 y + height > obj_surface->orig_height)
3078 return VA_STATUS_ERROR_INVALID_PARAMETER;
3079 if (x + width > obj_image->image.width ||
3080 y + height > obj_image->image.height)
3081 return VA_STATUS_ERROR_INVALID_PARAMETER;
3083 if (!obj_surface->bo)
3084 return VA_STATUS_SUCCESS;
3085 assert(obj_image->bo); // image bo is always created, see i965_CreateImage()
3090 rect.height = height;
3092 src_surface.id = surface;
3093 src_surface.type = I965_SURFACE_TYPE_SURFACE;
3094 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3096 dst_surface.id = image;
3097 dst_surface.type = I965_SURFACE_TYPE_IMAGE;
3098 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3100 va_status = i965_image_processing(ctx,
3111 i965_GetImage(VADriverContextP ctx,
3112 VASurfaceID surface,
3113 int x, /* coordinates of the upper left source pixel */
3115 unsigned int width, /* width and height of the region */
3116 unsigned int height,
3119 struct i965_driver_data * const i965 = i965_driver_data(ctx);
3122 if (HAS_ACCELERATED_GETIMAGE(i965))
3123 va_status = i965_hw_getimage(ctx,
3129 va_status = i965_sw_getimage(ctx,
3139 put_image_i420(struct object_surface *obj_surface,
3140 const VARectangle *dst_rect,
3141 struct object_image *obj_image, uint8_t *image_data,
3142 const VARectangle *src_rect)
3144 uint8_t *dst[3], *src[3];
3146 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
3147 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
3148 unsigned int tiling, swizzle;
3150 if (!obj_surface->bo)
3153 assert(obj_surface->fourcc);
3154 assert(dst_rect->width == src_rect->width);
3155 assert(dst_rect->height == src_rect->height);
3156 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3158 if (tiling != I915_TILING_NONE)
3159 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3161 dri_bo_map(obj_surface->bo, 0);
3163 if (!obj_surface->bo->virtual)
3166 /* Dest VA image has either I420 or YV12 format.
3167 Source VA surface alway has I420 format */
3168 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3169 src[Y] = image_data + obj_image->image.offsets[Y];
3170 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3171 src[U] = image_data + obj_image->image.offsets[U];
3172 dst[2] = dst[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
3173 src[V] = image_data + obj_image->image.offsets[V];
3176 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3177 src[Y] += src_rect->y * obj_image->image.pitches[Y] + src_rect->x;
3178 memcpy_pic(dst[0], obj_surface->width,
3179 src[Y], obj_image->image.pitches[Y],
3180 src_rect->width, src_rect->height);
3183 dst[1] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3184 src[U] += (src_rect->y / 2) * obj_image->image.pitches[U] + src_rect->x / 2;
3185 memcpy_pic(dst[1], obj_surface->width / 2,
3186 src[U], obj_image->image.pitches[U],
3187 src_rect->width / 2, src_rect->height / 2);
3190 dst[2] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3191 src[V] += (src_rect->y / 2) * obj_image->image.pitches[V] + src_rect->x / 2;
3192 memcpy_pic(dst[2], obj_surface->width / 2,
3193 src[V], obj_image->image.pitches[V],
3194 src_rect->width / 2, src_rect->height / 2);
3196 if (tiling != I915_TILING_NONE)
3197 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3199 dri_bo_unmap(obj_surface->bo);
3203 put_image_nv12(struct object_surface *obj_surface,
3204 const VARectangle *dst_rect,
3205 struct object_image *obj_image, uint8_t *image_data,
3206 const VARectangle *src_rect)
3208 uint8_t *dst[2], *src[2];
3209 unsigned int tiling, swizzle;
3211 if (!obj_surface->bo)
3214 assert(obj_surface->fourcc);
3215 assert(dst_rect->width == src_rect->width);
3216 assert(dst_rect->height == src_rect->height);
3217 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3219 if (tiling != I915_TILING_NONE)
3220 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3222 dri_bo_map(obj_surface->bo, 0);
3224 if (!obj_surface->bo->virtual)
3227 /* Both dest VA image and source surface have NV12 format */
3228 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3229 src[0] = image_data + obj_image->image.offsets[0];
3230 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3231 src[1] = image_data + obj_image->image.offsets[1];
3234 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3235 src[0] += src_rect->y * obj_image->image.pitches[0] + src_rect->x;
3236 memcpy_pic(dst[0], obj_surface->width,
3237 src[0], obj_image->image.pitches[0],
3238 src_rect->width, src_rect->height);
3241 dst[1] += (dst_rect->y / 2) * obj_surface->width + (dst_rect->x & -2);
3242 src[1] += (src_rect->y / 2) * obj_image->image.pitches[1] + (src_rect->x & -2);
3243 memcpy_pic(dst[1], obj_surface->width,
3244 src[1], obj_image->image.pitches[1],
3245 src_rect->width, src_rect->height / 2);
3247 if (tiling != I915_TILING_NONE)
3248 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3250 dri_bo_unmap(obj_surface->bo);
3254 put_image_yuy2(struct object_surface *obj_surface,
3255 const VARectangle *dst_rect,
3256 struct object_image *obj_image, uint8_t *image_data,
3257 const VARectangle *src_rect)
3260 unsigned int tiling, swizzle;
3262 if (!obj_surface->bo)
3265 assert(obj_surface->fourcc);
3266 assert(dst_rect->width == src_rect->width);
3267 assert(dst_rect->height == src_rect->height);
3268 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3270 if (tiling != I915_TILING_NONE)
3271 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3273 dri_bo_map(obj_surface->bo, 0);
3275 if (!obj_surface->bo->virtual)
3278 /* Both dest VA image and source surface have YUY2 format */
3279 dst = (uint8_t *)obj_surface->bo->virtual;
3280 src = image_data + obj_image->image.offsets[0];
3282 /* YUYV packed plane */
3283 dst += dst_rect->y * obj_surface->width + dst_rect->x*2;
3284 src += src_rect->y * obj_image->image.pitches[0] + src_rect->x*2;
3285 memcpy_pic(dst, obj_surface->width*2,
3286 src, obj_image->image.pitches[0],
3287 src_rect->width*2, src_rect->height);
3289 if (tiling != I915_TILING_NONE)
3290 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3292 dri_bo_unmap(obj_surface->bo);
3297 i965_sw_putimage(VADriverContextP ctx,
3298 VASurfaceID surface,
3302 unsigned int src_width,
3303 unsigned int src_height,
3306 unsigned int dest_width,
3307 unsigned int dest_height)
3309 struct i965_driver_data *i965 = i965_driver_data(ctx);
3310 struct object_surface *obj_surface = SURFACE(surface);
3313 return VA_STATUS_ERROR_INVALID_SURFACE;
3315 struct object_image *obj_image = IMAGE(image);
3317 return VA_STATUS_ERROR_INVALID_IMAGE;
3319 if (src_x < 0 || src_y < 0)
3320 return VA_STATUS_ERROR_INVALID_PARAMETER;
3321 if (src_x + src_width > obj_image->image.width ||
3322 src_y + src_height > obj_image->image.height)
3323 return VA_STATUS_ERROR_INVALID_PARAMETER;
3324 if (dest_x < 0 || dest_y < 0)
3325 return VA_STATUS_ERROR_INVALID_PARAMETER;
3326 if (dest_x + dest_width > obj_surface->orig_width ||
3327 dest_y + dest_height > obj_surface->orig_height)
3328 return VA_STATUS_ERROR_INVALID_PARAMETER;
3330 /* XXX: don't allow scaling */
3331 if (src_width != dest_width || src_height != dest_height)
3332 return VA_STATUS_ERROR_INVALID_PARAMETER;
3334 if (obj_surface->fourcc) {
3335 /* Don't allow format mismatch */
3336 if (obj_surface->fourcc != obj_image->image.format.fourcc)
3337 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
3341 /* VA is surface not used for decoding, use same VA image format */
3342 i965_check_alloc_surface_bo(
3345 0, /* XXX: don't use tiled surface */
3346 obj_image->image.format.fourcc,
3347 get_sampling_from_fourcc (obj_image->image.format.fourcc));
3351 void *image_data = NULL;
3353 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
3354 if (va_status != VA_STATUS_SUCCESS)
3357 VARectangle src_rect, dest_rect;
3360 src_rect.width = src_width;
3361 src_rect.height = src_height;
3362 dest_rect.x = dest_x;
3363 dest_rect.y = dest_y;
3364 dest_rect.width = dest_width;
3365 dest_rect.height = dest_height;
3367 switch (obj_image->image.format.fourcc) {
3368 case VA_FOURCC('Y','V','1','2'):
3369 case VA_FOURCC('I','4','2','0'):
3370 put_image_i420(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3372 case VA_FOURCC('N','V','1','2'):
3373 put_image_nv12(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3375 case VA_FOURCC('Y','U','Y','2'):
3376 put_image_yuy2(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3379 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3383 i965_UnmapBuffer(ctx, obj_image->image.buf);
3388 i965_hw_putimage(VADriverContextP ctx,
3389 VASurfaceID surface,
3393 unsigned int src_width,
3394 unsigned int src_height,
3397 unsigned int dest_width,
3398 unsigned int dest_height)
3400 struct i965_driver_data *i965 = i965_driver_data(ctx);
3401 struct object_surface *obj_surface = SURFACE(surface);
3402 struct object_image *obj_image = IMAGE(image);
3403 struct i965_surface src_surface, dst_surface;
3404 VAStatus va_status = VA_STATUS_SUCCESS;
3405 VARectangle src_rect, dst_rect;
3408 return VA_STATUS_ERROR_INVALID_SURFACE;
3410 if (!obj_image || !obj_image->bo)
3411 return VA_STATUS_ERROR_INVALID_IMAGE;
3415 src_x + src_width > obj_image->image.width ||
3416 src_y + src_height > obj_image->image.height)
3417 return VA_STATUS_ERROR_INVALID_PARAMETER;
3421 dest_x + dest_width > obj_surface->orig_width ||
3422 dest_y + dest_height > obj_surface->orig_height)
3423 return VA_STATUS_ERROR_INVALID_PARAMETER;
3425 if (!obj_surface->bo) {
3426 unsigned int tiling, swizzle;
3427 int surface_sampling = get_sampling_from_fourcc (obj_image->image.format.fourcc);;
3428 dri_bo_get_tiling(obj_image->bo, &tiling, &swizzle);
3430 i965_check_alloc_surface_bo(ctx,
3433 obj_image->image.format.fourcc,
3437 assert(obj_surface->fourcc);
3439 src_surface.id = image;
3440 src_surface.type = I965_SURFACE_TYPE_IMAGE;
3441 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3444 src_rect.width = src_width;
3445 src_rect.height = src_height;
3447 dst_surface.id = surface;
3448 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
3449 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3450 dst_rect.x = dest_x;
3451 dst_rect.y = dest_y;
3452 dst_rect.width = dest_width;
3453 dst_rect.height = dest_height;
3455 va_status = i965_image_processing(ctx,
3465 i965_PutImage(VADriverContextP ctx,
3466 VASurfaceID surface,
3470 unsigned int src_width,
3471 unsigned int src_height,
3474 unsigned int dest_width,
3475 unsigned int dest_height)
3477 struct i965_driver_data *i965 = i965_driver_data(ctx);
3478 VAStatus va_status = VA_STATUS_SUCCESS;
3480 if (HAS_ACCELERATED_PUTIMAGE(i965))
3481 va_status = i965_hw_putimage(ctx,
3493 va_status = i965_sw_putimage(ctx,
3509 i965_PutSurface(VADriverContextP ctx,
3510 VASurfaceID surface,
3511 void *draw, /* X Drawable */
3514 unsigned short srcw,
3515 unsigned short srch,
3518 unsigned short destw,
3519 unsigned short desth,
3520 VARectangle *cliprects, /* client supplied clip list */
3521 unsigned int number_cliprects, /* number of clip rects in the clip list */
3522 unsigned int flags) /* de-interlacing flags */
3525 if (IS_VA_X11(ctx)) {
3526 VARectangle src_rect, dst_rect;
3530 src_rect.width = srcw;
3531 src_rect.height = srch;
3535 dst_rect.width = destw;
3536 dst_rect.height = desth;
3538 return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
3539 cliprects, number_cliprects, flags);
3542 return VA_STATUS_ERROR_UNIMPLEMENTED;
3546 i965_Terminate(VADriverContextP ctx)
3548 struct i965_driver_data *i965 = i965_driver_data(ctx);
3551 intel_batchbuffer_free(i965->batch);
3553 _i965DestroyMutex(&i965->pp_mutex);
3554 _i965DestroyMutex(&i965->render_mutex);
3558 i965_output_dri_terminate(ctx);
3561 #ifdef HAVE_VA_WAYLAND
3562 if (IS_VA_WAYLAND(ctx))
3563 i965_output_wayland_terminate(ctx);
3566 if (i965_render_terminate(ctx) == False)
3567 return VA_STATUS_ERROR_UNKNOWN;
3569 if (i965_post_processing_terminate(ctx) == False)
3570 return VA_STATUS_ERROR_UNKNOWN;
3572 i965_display_attributes_terminate(ctx);
3574 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
3575 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
3576 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
3577 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
3578 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
3579 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
3581 if (intel_driver_terminate(ctx) == False)
3582 return VA_STATUS_ERROR_UNKNOWN;
3584 free(ctx->pDriverData);
3585 ctx->pDriverData = NULL;
3587 return VA_STATUS_SUCCESS;
3592 VADriverContextP ctx, /* in */
3593 VABufferID buf_id, /* in */
3594 VABufferType *type, /* out */
3595 unsigned int *size, /* out */
3596 unsigned int *num_elements /* out */
3599 struct i965_driver_data *i965 = NULL;
3600 struct object_buffer *obj_buffer = NULL;
3602 i965 = i965_driver_data(ctx);
3603 obj_buffer = BUFFER(buf_id);
3605 *type = obj_buffer->type;
3606 *size = obj_buffer->size_element;
3607 *num_elements = obj_buffer->num_elements;
3609 return VA_STATUS_SUCCESS;
3614 VADriverContextP ctx, /* in */
3615 VASurfaceID surface, /* in */
3616 unsigned int *fourcc, /* out */
3617 unsigned int *luma_stride, /* out */
3618 unsigned int *chroma_u_stride, /* out */
3619 unsigned int *chroma_v_stride, /* out */
3620 unsigned int *luma_offset, /* out */
3621 unsigned int *chroma_u_offset, /* out */
3622 unsigned int *chroma_v_offset, /* out */
3623 unsigned int *buffer_name, /* out */
3624 void **buffer /* out */
3627 VAStatus vaStatus = VA_STATUS_SUCCESS;
3628 struct i965_driver_data *i965 = i965_driver_data(ctx);
3629 struct object_surface *obj_surface = NULL;
3633 assert(luma_stride);
3634 assert(chroma_u_stride);
3635 assert(chroma_v_stride);
3636 assert(luma_offset);
3637 assert(chroma_u_offset);
3638 assert(chroma_v_offset);
3639 assert(buffer_name);
3642 tmpImage.image_id = VA_INVALID_ID;
3644 obj_surface = SURFACE(surface);
3645 if (obj_surface == NULL) {
3646 // Surface is absent.
3647 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3651 // Lock functionality is absent now.
3652 if (obj_surface->locked_image_id != VA_INVALID_ID) {
3653 // Surface is locked already.
3654 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3658 vaStatus = i965_DeriveImage(
3662 if (vaStatus != VA_STATUS_SUCCESS) {
3666 obj_surface->locked_image_id = tmpImage.image_id;
3668 vaStatus = i965_MapBuffer(
3672 if (vaStatus != VA_STATUS_SUCCESS) {
3676 *fourcc = tmpImage.format.fourcc;
3677 *luma_offset = tmpImage.offsets[0];
3678 *luma_stride = tmpImage.pitches[0];
3679 *chroma_u_offset = tmpImage.offsets[1];
3680 *chroma_u_stride = tmpImage.pitches[1];
3681 *chroma_v_offset = tmpImage.offsets[2];
3682 *chroma_v_stride = tmpImage.pitches[2];
3683 *buffer_name = tmpImage.buf;
3686 if (vaStatus != VA_STATUS_SUCCESS) {
3695 VADriverContextP ctx, /* in */
3696 VASurfaceID surface /* in */
3699 VAStatus vaStatus = VA_STATUS_SUCCESS;
3700 struct i965_driver_data *i965 = i965_driver_data(ctx);
3701 struct object_image *locked_img = NULL;
3702 struct object_surface *obj_surface = NULL;
3704 obj_surface = SURFACE(surface);
3706 if (obj_surface == NULL) {
3707 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
3710 if (obj_surface->locked_image_id == VA_INVALID_ID) {
3711 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
3715 locked_img = IMAGE(obj_surface->locked_image_id);
3716 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
3717 // Work image was deallocated before i965_UnlockSurface()
3718 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3722 vaStatus = i965_UnmapBuffer(
3724 locked_img->image.buf);
3725 if (vaStatus != VA_STATUS_SUCCESS) {
3729 vaStatus = i965_DestroyImage(
3731 locked_img->image.image_id);
3732 if (vaStatus != VA_STATUS_SUCCESS) {
3736 locked_img->image.image_id = VA_INVALID_ID;
3739 obj_surface->locked_image_id = VA_INVALID_ID;
3745 i965_GetSurfaceAttributes(
3746 VADriverContextP ctx,
3748 VASurfaceAttrib *attrib_list,
3749 unsigned int num_attribs
3752 VAStatus vaStatus = VA_STATUS_SUCCESS;
3753 struct i965_driver_data *i965 = i965_driver_data(ctx);
3754 struct object_config *obj_config;
3757 if (config == VA_INVALID_ID)
3758 return VA_STATUS_ERROR_INVALID_CONFIG;
3760 obj_config = CONFIG(config);
3762 if (obj_config == NULL)
3763 return VA_STATUS_ERROR_INVALID_CONFIG;
3765 if (attrib_list == NULL || num_attribs == 0)
3766 return VA_STATUS_ERROR_INVALID_PARAMETER;
3768 for (i = 0; i < num_attribs; i++) {
3769 switch (attrib_list[i].type) {
3770 case VASurfaceAttribPixelFormat:
3771 attrib_list[i].value.type = VAGenericValueTypeInteger;
3772 attrib_list[i].flags = VA_SURFACE_ATTRIB_GETTABLE | VA_SURFACE_ATTRIB_SETTABLE;
3774 if (attrib_list[i].value.value.i == 0) {
3775 if (IS_G4X(i965->intel.device_id)) {
3776 if (obj_config->profile == VAProfileMPEG2Simple ||
3777 obj_config->profile == VAProfileMPEG2Main) {
3778 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3781 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3783 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3784 if (obj_config->profile == VAProfileMPEG2Simple ||
3785 obj_config->profile == VAProfileMPEG2Main) {
3786 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3787 } else if (obj_config->profile == VAProfileH264Baseline ||
3788 obj_config->profile == VAProfileH264Main ||
3789 obj_config->profile == VAProfileH264High) {
3790 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3791 } else if (obj_config->profile == VAProfileNone) {
3792 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3795 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3797 } else if (IS_GEN6(i965->intel.device_id)) {
3798 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3799 } else if (IS_GEN7(i965->intel.device_id)) {
3800 if (obj_config->profile == VAProfileJPEGBaseline)
3801 attrib_list[i].value.value.i = 0; /* internal format */
3803 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3806 if (IS_G4X(i965->intel.device_id)) {
3807 if (obj_config->profile == VAProfileMPEG2Simple ||
3808 obj_config->profile == VAProfileMPEG2Main) {
3809 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3810 attrib_list[i].value.value.i = 0;
3811 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3815 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3817 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3818 if (obj_config->profile == VAProfileMPEG2Simple ||
3819 obj_config->profile == VAProfileMPEG2Main) {
3820 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3821 attrib_list[i].value.value.i = 0;
3822 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3824 } else if (obj_config->profile == VAProfileH264Baseline ||
3825 obj_config->profile == VAProfileH264Main ||
3826 obj_config->profile == VAProfileH264High) {
3827 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3828 attrib_list[i].value.value.i = 0;
3829 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3831 } else if (obj_config->profile == VAProfileNone) {
3832 switch (attrib_list[i].value.value.i) {
3833 case VA_FOURCC('N', 'V', '1', '2'):
3834 case VA_FOURCC('I', '4', '2', '0'):
3835 case VA_FOURCC('Y', 'V', '1', '2'):
3836 case VA_FOURCC('Y', 'U', 'Y', '2'):
3837 case VA_FOURCC('B', 'G', 'R', 'A'):
3838 case VA_FOURCC('B', 'G', 'R', 'X'):
3839 case VA_FOURCC('R', 'G', 'B', 'X'):
3840 case VA_FOURCC('R', 'G', 'B', 'A'):
3843 attrib_list[i].value.value.i = 0;
3844 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3849 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3851 } else if (IS_GEN6(i965->intel.device_id)) {
3852 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3853 obj_config->entrypoint == VAEntrypointVideoProc) {
3854 switch (attrib_list[i].value.value.i) {
3855 case VA_FOURCC('N', 'V', '1', '2'):
3856 case VA_FOURCC('I', '4', '2', '0'):
3857 case VA_FOURCC('Y', 'V', '1', '2'):
3858 case VA_FOURCC('Y', 'U', 'Y', '2'):
3859 case VA_FOURCC('B', 'G', 'R', 'A'):
3860 case VA_FOURCC('B', 'G', 'R', 'X'):
3861 case VA_FOURCC('R', 'G', 'B', 'X'):
3862 case VA_FOURCC('R', 'G', 'B', 'A'):
3865 attrib_list[i].value.value.i = 0;
3866 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3870 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3871 attrib_list[i].value.value.i = 0;
3872 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3875 } else if (IS_GEN7(i965->intel.device_id)) {
3876 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3877 obj_config->entrypoint == VAEntrypointVideoProc) {
3878 switch (attrib_list[i].value.value.i) {
3879 case VA_FOURCC('N', 'V', '1', '2'):
3880 case VA_FOURCC('I', '4', '2', '0'):
3881 case VA_FOURCC('Y', 'V', '1', '2'):
3884 attrib_list[i].value.value.i = 0;
3885 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3889 if (obj_config->profile == VAProfileJPEGBaseline) {
3890 attrib_list[i].value.value.i = 0; /* JPEG decoding always uses an internal format */
3891 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3893 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3894 attrib_list[i].value.value.i = 0;
3895 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3903 case VASurfaceAttribMinWidth:
3904 /* FIXME: add support for it later */
3905 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3907 case VASurfaceAttribMaxWidth:
3908 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3910 case VASurfaceAttribMinHeight:
3911 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3913 case VASurfaceAttribMaxHeight:
3914 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3917 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3926 * Query video processing pipeline
3928 VAStatus i965_QueryVideoProcFilters(
3929 VADriverContextP ctx,
3930 VAContextID context,
3931 VAProcFilterType *filters,
3932 unsigned int *num_filters
3935 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3938 if (HAS_VPP(i965)) {
3939 filters[i++] = VAProcFilterNoiseReduction;
3940 filters[i++] = VAProcFilterDeinterlacing;
3943 if(IS_HASWELL(i965->intel.device_id)){
3944 filters[i++] = VAProcFilterNone;
3945 filters[i++] = VAProcFilterSharpening;
3946 filters[i++] = VAProcFilterColorBalance;
3947 filters[i++] = VAProcFilterColorStandard;
3952 return VA_STATUS_SUCCESS;
3955 VAStatus i965_QueryVideoProcFilterCaps(
3956 VADriverContextP ctx,
3957 VAContextID context,
3958 VAProcFilterType type,
3960 unsigned int *num_filter_caps
3963 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3966 if (type == VAProcFilterNoiseReduction) {
3967 VAProcFilterCap *cap = filter_caps;
3969 cap->range.min_value = 0.0;
3970 cap->range.max_value = 1.0;
3971 cap->range.default_value = 0.5;
3972 cap->range.step = 0.03125; /* 1.0 / 32 */
3974 } else if (type == VAProcFilterDeinterlacing) {
3975 VAProcFilterCapDeinterlacing *cap = filter_caps;
3977 cap->type = VAProcDeinterlacingBob;
3982 if(IS_HASWELL(i965->intel.device_id)){
3983 if(type == VAProcFilterColorBalance){
3984 VAProcFilterCapColorBalance *cap = filter_caps;
3985 cap->type = VAProcColorBalanceHue;
3986 cap->range.min_value = -180.0;
3987 cap->range.max_value = 180.0;
3988 cap->range.default_value = 0.0;
3989 cap->range.step = 1.0;
3993 cap->type = VAProcColorBalanceSaturation;
3994 cap->range.min_value = 0.0;
3995 cap->range.max_value = 10.0;
3996 cap->range.default_value = 0.0;
3997 cap->range.step = 0.1;
4001 cap->type = VAProcColorBalanceBrightness;
4002 cap->range.min_value = -100.0;
4003 cap->range.max_value = 100.0;
4004 cap->range.default_value = 0.0;
4005 cap->range.step = 1.0;
4009 cap->type = VAProcColorBalanceContrast;
4010 cap->range.min_value = 0.0;
4011 cap->range.max_value = 10.0;
4012 cap->range.default_value = 0.0;
4013 cap->range.step = 0.1;
4020 *num_filter_caps = i;
4022 return VA_STATUS_SUCCESS;
4025 static VAProcColorStandardType vpp_input_color_standards[VAProcColorStandardCount] = {
4026 VAProcColorStandardBT601,
4029 static VAProcColorStandardType vpp_output_color_standards[VAProcColorStandardCount] = {
4030 VAProcColorStandardBT601,
4033 VAStatus i965_QueryVideoProcPipelineCaps(
4034 VADriverContextP ctx,
4035 VAContextID context,
4036 VABufferID *filters,
4037 unsigned int num_filters,
4038 VAProcPipelineCaps *pipeline_cap /* out */
4041 struct i965_driver_data * const i965 = i965_driver_data(ctx);
4044 pipeline_cap->pipeline_flags = 0;
4045 pipeline_cap->filter_flags = 0;
4046 pipeline_cap->num_forward_references = 0;
4047 pipeline_cap->num_backward_references = 0;
4048 pipeline_cap->num_input_color_standards = 1;
4049 pipeline_cap->input_color_standards = vpp_input_color_standards;
4050 pipeline_cap->num_output_color_standards = 1;
4051 pipeline_cap->output_color_standards = vpp_output_color_standards;
4053 for (i = 0; i < num_filters; i++) {
4054 struct object_buffer *obj_buffer = BUFFER(filters[i]);
4055 VAProcFilterParameterBufferBase *base = (VAProcFilterParameterBufferBase *)obj_buffer->buffer_store->buffer;
4057 if (base->type == VAProcFilterNoiseReduction) {
4058 VAProcFilterParameterBuffer *denoise = (VAProcFilterParameterBuffer *)base;
4060 } else if (base->type == VAProcFilterDeinterlacing) {
4061 VAProcFilterParameterBufferDeinterlacing *deint = (VAProcFilterParameterBufferDeinterlacing *)base;
4063 assert(deint->algorithm == VAProcDeinterlacingWeave ||
4064 deint->algorithm == VAProcDeinterlacingBob);
4068 return VA_STATUS_SUCCESS;
4072 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
4075 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
4077 struct VADriverVTable * const vtable = ctx->vtable;
4078 struct VADriverVTableVPP * const vtable_vpp = ctx->vtable_vpp;
4080 struct i965_driver_data *i965;
4083 ctx->version_major = VA_MAJOR_VERSION;
4084 ctx->version_minor = VA_MINOR_VERSION;
4085 ctx->max_profiles = I965_MAX_PROFILES;
4086 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
4087 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
4088 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
4089 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
4090 ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
4092 vtable->vaTerminate = i965_Terminate;
4093 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4094 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
4095 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4096 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
4097 vtable->vaCreateConfig = i965_CreateConfig;
4098 vtable->vaDestroyConfig = i965_DestroyConfig;
4099 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
4100 vtable->vaCreateSurfaces = i965_CreateSurfaces;
4101 vtable->vaDestroySurfaces = i965_DestroySurfaces;
4102 vtable->vaCreateContext = i965_CreateContext;
4103 vtable->vaDestroyContext = i965_DestroyContext;
4104 vtable->vaCreateBuffer = i965_CreateBuffer;
4105 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
4106 vtable->vaMapBuffer = i965_MapBuffer;
4107 vtable->vaUnmapBuffer = i965_UnmapBuffer;
4108 vtable->vaDestroyBuffer = i965_DestroyBuffer;
4109 vtable->vaBeginPicture = i965_BeginPicture;
4110 vtable->vaRenderPicture = i965_RenderPicture;
4111 vtable->vaEndPicture = i965_EndPicture;
4112 vtable->vaSyncSurface = i965_SyncSurface;
4113 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
4114 vtable->vaPutSurface = i965_PutSurface;
4115 vtable->vaQueryImageFormats = i965_QueryImageFormats;
4116 vtable->vaCreateImage = i965_CreateImage;
4117 vtable->vaDeriveImage = i965_DeriveImage;
4118 vtable->vaDestroyImage = i965_DestroyImage;
4119 vtable->vaSetImagePalette = i965_SetImagePalette;
4120 vtable->vaGetImage = i965_GetImage;
4121 vtable->vaPutImage = i965_PutImage;
4122 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
4123 vtable->vaCreateSubpicture = i965_CreateSubpicture;
4124 vtable->vaDestroySubpicture = i965_DestroySubpicture;
4125 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
4126 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
4127 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
4128 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
4129 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
4130 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
4131 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
4132 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
4133 vtable->vaBufferInfo = i965_BufferInfo;
4134 vtable->vaLockSurface = i965_LockSurface;
4135 vtable->vaUnlockSurface = i965_UnlockSurface;
4136 vtable->vaGetSurfaceAttributes = i965_GetSurfaceAttributes;
4137 vtable->vaCreateSurfaces2 = i965_CreateSurfaces2;
4139 vtable_vpp->vaQueryVideoProcFilters = i965_QueryVideoProcFilters;
4140 vtable_vpp->vaQueryVideoProcFilterCaps = i965_QueryVideoProcFilterCaps;
4141 vtable_vpp->vaQueryVideoProcPipelineCaps = i965_QueryVideoProcPipelineCaps;
4143 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
4145 ctx->pDriverData = (void *)i965;
4147 result = object_heap_init(&i965->config_heap,
4148 sizeof(struct object_config),
4150 assert(result == 0);
4152 result = object_heap_init(&i965->context_heap,
4153 sizeof(struct object_context),
4155 assert(result == 0);
4157 result = object_heap_init(&i965->surface_heap,
4158 sizeof(struct object_surface),
4160 assert(result == 0);
4162 result = object_heap_init(&i965->buffer_heap,
4163 sizeof(struct object_buffer),
4165 assert(result == 0);
4167 result = object_heap_init(&i965->image_heap,
4168 sizeof(struct object_image),
4170 assert(result == 0);
4172 result = object_heap_init(&i965->subpic_heap,
4173 sizeof(struct object_subpic),
4175 assert(result == 0);
4177 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
4178 INTEL_STR_DRIVER_VENDOR,
4179 INTEL_STR_DRIVER_NAME,
4180 INTEL_DRIVER_MAJOR_VERSION,
4181 INTEL_DRIVER_MINOR_VERSION,
4182 INTEL_DRIVER_MICRO_VERSION);
4184 if (INTEL_DRIVER_PRE_VERSION > 0) {
4185 const int len = strlen(i965->va_vendor);
4186 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
4189 i965->current_context_id = VA_INVALID_ID;
4191 ctx->str_vendor = i965->va_vendor;
4193 return i965_Init(ctx);