2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
30 #include "config_android.h"
41 # include "i965_output_dri.h"
44 #ifdef HAVE_VA_WAYLAND
45 # include "i965_output_wayland.h"
48 #include "intel_driver.h"
49 #include "intel_memman.h"
50 #include "intel_batchbuffer.h"
51 #include "i965_defines.h"
52 #include "i965_drv_video.h"
53 #include "i965_decoder.h"
54 #include "i965_encoder.h"
56 #define CONFIG_ID_OFFSET 0x01000000
57 #define CONTEXT_ID_OFFSET 0x02000000
58 #define SURFACE_ID_OFFSET 0x04000000
59 #define BUFFER_ID_OFFSET 0x08000000
60 #define IMAGE_ID_OFFSET 0x0a000000
61 #define SUBPIC_ID_OFFSET 0x10000000
63 #define HAS_MPEG2_DECODING(ctx) ((ctx)->codec_info->has_mpeg2_decoding && \
66 #define HAS_MPEG2_ENCODING(ctx) ((ctx)->codec_info->has_mpeg2_encoding && \
69 #define HAS_H264_DECODING(ctx) ((ctx)->codec_info->has_h264_decoding && \
72 #define HAS_H264_ENCODING(ctx) ((ctx)->codec_info->has_h264_encoding && \
75 #define HAS_VC1_DECODING(ctx) ((ctx)->codec_info->has_vc1_decoding && \
78 #define HAS_JPEG_DECODING(ctx) ((ctx)->codec_info->has_jpeg_decoding && \
81 #define HAS_VPP(ctx) ((ctx)->codec_info->has_vpp)
83 #define HAS_ACCELERATED_GETIMAGE(ctx) ((ctx)->codec_info->has_accelerated_getimage)
85 #define HAS_ACCELERATED_PUTIMAGE(ctx) ((ctx)->codec_info->has_accelerated_putimage)
87 #define HAS_TILED_SURFACE(ctx) ((ctx)->codec_info->has_tiled_surface)
89 static int get_sampling_from_fourcc(unsigned int fourcc);
91 /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
92 #define IS_VA_X11(ctx) \
93 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
95 /* Check whether we are rendering to Wayland */
96 #define IS_VA_WAYLAND(ctx) \
97 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
100 I965_SURFACETYPE_RGBA = 1,
101 I965_SURFACETYPE_YUV,
102 I965_SURFACETYPE_INDEXED
105 /* List of supported display attributes */
106 static const VADisplayAttribute i965_display_attributes[] = {
108 VADisplayAttribRotation,
109 0, 3, VA_ROTATION_NONE,
110 VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
114 /* List of supported image formats */
117 VAImageFormat va_format;
118 } i965_image_format_map_t;
120 static const i965_image_format_map_t
121 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
122 { I965_SURFACETYPE_YUV,
123 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
124 { I965_SURFACETYPE_YUV,
125 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
126 { I965_SURFACETYPE_YUV,
127 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
128 { I965_SURFACETYPE_YUV,
129 { VA_FOURCC('Y','U','Y','2'), VA_LSB_FIRST, 16, } },
130 { I965_SURFACETYPE_YUV,
131 { VA_FOURCC('U','Y','V','Y'), VA_LSB_FIRST, 16, } },
132 { I965_SURFACETYPE_RGBA,
133 { VA_FOURCC('R','G','B','X'), VA_LSB_FIRST, 32, 24, 0x000000ff, 0x0000ff00, 0x00ff0000 } },
134 { I965_SURFACETYPE_RGBA,
135 { VA_FOURCC('B','G','R','X'), VA_LSB_FIRST, 32, 24, 0x00ff0000, 0x0000ff00, 0x000000ff } },
138 /* List of supported subpicture formats */
142 VAImageFormat va_format;
143 unsigned int va_flags;
144 } i965_subpic_format_map_t;
146 #define COMMON_SUBPICTURE_FLAGS \
147 (VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD| \
148 VA_SUBPICTURE_GLOBAL_ALPHA)
150 static const i965_subpic_format_map_t
151 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
152 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
153 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
154 COMMON_SUBPICTURE_FLAGS },
155 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
156 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
157 COMMON_SUBPICTURE_FLAGS },
158 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P8A8_UNORM,
159 { VA_FOURCC('I','A','8','8'), VA_MSB_FIRST, 16, },
160 COMMON_SUBPICTURE_FLAGS },
161 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A8P8_UNORM,
162 { VA_FOURCC('A','I','8','8'), VA_MSB_FIRST, 16, },
163 COMMON_SUBPICTURE_FLAGS },
164 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
165 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
166 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
167 COMMON_SUBPICTURE_FLAGS },
168 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
169 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
170 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
171 COMMON_SUBPICTURE_FLAGS },
174 static const i965_subpic_format_map_t *
175 get_subpic_format(const VAImageFormat *va_format)
178 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
179 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
180 if (m->va_format.fourcc == va_format->fourcc &&
181 (m->type == I965_SURFACETYPE_RGBA ?
182 (m->va_format.byte_order == va_format->byte_order &&
183 m->va_format.red_mask == va_format->red_mask &&
184 m->va_format.green_mask == va_format->green_mask &&
185 m->va_format.blue_mask == va_format->blue_mask &&
186 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
192 extern struct hw_context *i965_proc_context_init(VADriverContextP, struct object_config *);
193 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, struct object_config *);
194 static struct hw_codec_info g4x_hw_codec_info = {
195 .dec_hw_context_init = g4x_dec_hw_context_init,
196 .enc_hw_context_init = NULL,
197 .proc_hw_context_init = NULL,
201 .has_mpeg2_decoding = 1,
204 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, struct object_config *);
205 static struct hw_codec_info ironlake_hw_codec_info = {
206 .dec_hw_context_init = ironlake_dec_hw_context_init,
207 .enc_hw_context_init = NULL,
208 .proc_hw_context_init = i965_proc_context_init,
212 .has_mpeg2_decoding = 1,
213 .has_h264_decoding = 1,
215 .has_accelerated_putimage = 1,
218 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, struct object_config *);
219 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, struct object_config *);
220 static struct hw_codec_info gen6_hw_codec_info = {
221 .dec_hw_context_init = gen6_dec_hw_context_init,
222 .enc_hw_context_init = gen6_enc_hw_context_init,
223 .proc_hw_context_init = i965_proc_context_init,
227 .has_mpeg2_decoding = 1,
228 .has_mpeg2_encoding = 1,
229 .has_h264_decoding = 1,
230 .has_h264_encoding = 1,
231 .has_vc1_decoding = 1,
233 .has_accelerated_getimage = 1,
234 .has_accelerated_putimage = 1,
235 .has_tiled_surface = 1,
238 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, struct object_config *);
239 extern struct hw_context *gen7_enc_hw_context_init(VADriverContextP, struct object_config *);
240 static struct hw_codec_info gen7_hw_codec_info = {
241 .dec_hw_context_init = gen7_dec_hw_context_init,
242 .enc_hw_context_init = gen7_enc_hw_context_init,
243 .proc_hw_context_init = i965_proc_context_init,
247 .has_mpeg2_decoding = 1,
248 .has_mpeg2_encoding = 1,
249 .has_h264_decoding = 1,
250 .has_h264_encoding = 1,
251 .has_vc1_decoding = 1,
252 .has_jpeg_decoding = 1,
254 .has_accelerated_getimage = 1,
255 .has_accelerated_putimage = 1,
256 .has_tiled_surface = 1,
259 extern struct hw_context *gen75_proc_context_init(VADriverContextP, struct object_config *);
260 static struct hw_codec_info gen75_hw_codec_info = {
261 .dec_hw_context_init = gen75_dec_hw_context_init,
262 .enc_hw_context_init = gen75_enc_hw_context_init,
263 .proc_hw_context_init = gen75_proc_context_init,
267 .has_mpeg2_decoding = 1,
268 .has_mpeg2_encoding = 1,
269 .has_h264_decoding = 1,
270 .has_h264_encoding = 1,
271 .has_vc1_decoding = 1,
272 .has_jpeg_decoding = 1,
274 .has_accelerated_getimage = 1,
275 .has_accelerated_putimage = 1,
276 .has_tiled_surface = 1,
279 #define I965_PACKED_HEADER_BASE 0
280 #define I965_PACKED_MISC_HEADER_BASE 3
283 va_enc_packed_type_to_idx(int packed_type)
287 if (packed_type & VAEncPackedHeaderMiscMask) {
288 idx = I965_PACKED_MISC_HEADER_BASE;
289 packed_type = (~VAEncPackedHeaderMiscMask & packed_type);
290 assert(packed_type > 0);
291 idx += (packed_type - 1);
293 idx = I965_PACKED_HEADER_BASE;
295 switch (packed_type) {
296 case VAEncPackedHeaderSequence:
297 idx = I965_PACKED_HEADER_BASE + 0;
300 case VAEncPackedHeaderPicture:
301 idx = I965_PACKED_HEADER_BASE + 1;
304 case VAEncPackedHeaderSlice:
305 idx = I965_PACKED_HEADER_BASE + 2;
309 /* Should not get here */
321 i965_QueryConfigProfiles(VADriverContextP ctx,
322 VAProfile *profile_list, /* out */
323 int *num_profiles) /* out */
325 struct i965_driver_data * const i965 = i965_driver_data(ctx);
328 if (HAS_MPEG2_DECODING(i965) ||
329 HAS_MPEG2_ENCODING(i965)) {
330 profile_list[i++] = VAProfileMPEG2Simple;
331 profile_list[i++] = VAProfileMPEG2Main;
334 if (HAS_H264_DECODING(i965) ||
335 HAS_H264_ENCODING(i965)) {
336 profile_list[i++] = VAProfileH264Baseline;
337 profile_list[i++] = VAProfileH264Main;
338 profile_list[i++] = VAProfileH264High;
341 if (HAS_VC1_DECODING(i965)) {
342 profile_list[i++] = VAProfileVC1Simple;
343 profile_list[i++] = VAProfileVC1Main;
344 profile_list[i++] = VAProfileVC1Advanced;
348 profile_list[i++] = VAProfileNone;
351 if (HAS_JPEG_DECODING(i965)) {
352 profile_list[i++] = VAProfileJPEGBaseline;
355 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
356 assert(i <= I965_MAX_PROFILES);
359 return VA_STATUS_SUCCESS;
363 i965_QueryConfigEntrypoints(VADriverContextP ctx,
365 VAEntrypoint *entrypoint_list, /* out */
366 int *num_entrypoints) /* out */
368 struct i965_driver_data * const i965 = i965_driver_data(ctx);
372 case VAProfileMPEG2Simple:
373 case VAProfileMPEG2Main:
374 if (HAS_MPEG2_DECODING(i965))
375 entrypoint_list[n++] = VAEntrypointVLD;
377 if (HAS_MPEG2_ENCODING(i965))
378 entrypoint_list[n++] = VAEntrypointEncSlice;
382 case VAProfileH264Baseline:
383 case VAProfileH264Main:
384 case VAProfileH264High:
385 if (HAS_H264_DECODING(i965))
386 entrypoint_list[n++] = VAEntrypointVLD;
388 if (HAS_H264_ENCODING(i965))
389 entrypoint_list[n++] = VAEntrypointEncSlice;
393 case VAProfileVC1Simple:
394 case VAProfileVC1Main:
395 case VAProfileVC1Advanced:
396 if (HAS_VC1_DECODING(i965))
397 entrypoint_list[n++] = VAEntrypointVLD;
402 entrypoint_list[n++] = VAEntrypointVideoProc;
405 case VAProfileJPEGBaseline:
406 if (HAS_JPEG_DECODING(i965))
407 entrypoint_list[n++] = VAEntrypointVLD;
414 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
415 assert(n <= I965_MAX_ENTRYPOINTS);
416 *num_entrypoints = n;
417 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
421 i965_GetConfigAttributes(VADriverContextP ctx,
423 VAEntrypoint entrypoint,
424 VAConfigAttrib *attrib_list, /* in/out */
429 /* Other attributes don't seem to be defined */
430 /* What to do if we don't know the attribute? */
431 for (i = 0; i < num_attribs; i++) {
432 switch (attrib_list[i].type) {
433 case VAConfigAttribRTFormat:
434 attrib_list[i].value = VA_RT_FORMAT_YUV420;
437 case VAConfigAttribRateControl:
438 if (entrypoint == VAEntrypointEncSlice) {
439 attrib_list[i].value = VA_RC_CBR | VA_RC_CQP;
443 case VAConfigAttribEncPackedHeaders:
444 if (entrypoint == VAEntrypointEncSlice) {
445 attrib_list[i].value = VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE | VA_ENC_PACKED_HEADER_MISC;
451 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
456 return VA_STATUS_SUCCESS;
460 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
462 object_heap_free(heap, obj);
466 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
470 /* Check existing attrbiutes */
471 for (i = 0; i < obj_config->num_attribs; i++) {
472 if (obj_config->attrib_list[i].type == attrib->type) {
473 /* Update existing attribute */
474 obj_config->attrib_list[i].value = attrib->value;
475 return VA_STATUS_SUCCESS;
479 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
480 i = obj_config->num_attribs;
481 obj_config->attrib_list[i].type = attrib->type;
482 obj_config->attrib_list[i].value = attrib->value;
483 obj_config->num_attribs++;
484 return VA_STATUS_SUCCESS;
487 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
491 i965_CreateConfig(VADriverContextP ctx,
493 VAEntrypoint entrypoint,
494 VAConfigAttrib *attrib_list,
496 VAConfigID *config_id) /* out */
498 struct i965_driver_data * const i965 = i965_driver_data(ctx);
499 struct object_config *obj_config;
504 /* Validate profile & entrypoint */
506 case VAProfileMPEG2Simple:
507 case VAProfileMPEG2Main:
508 if ((HAS_MPEG2_DECODING(i965) && VAEntrypointVLD == entrypoint) ||
509 (HAS_MPEG2_ENCODING(i965) && VAEntrypointEncSlice == entrypoint)) {
510 vaStatus = VA_STATUS_SUCCESS;
512 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
516 case VAProfileH264Baseline:
517 case VAProfileH264Main:
518 case VAProfileH264High:
519 if ((HAS_H264_DECODING(i965) && VAEntrypointVLD == entrypoint) ||
520 (HAS_H264_ENCODING(i965) && VAEntrypointEncSlice == entrypoint)) {
521 vaStatus = VA_STATUS_SUCCESS;
523 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
528 case VAProfileVC1Simple:
529 case VAProfileVC1Main:
530 case VAProfileVC1Advanced:
531 if (HAS_VC1_DECODING(i965) && VAEntrypointVLD == entrypoint) {
532 vaStatus = VA_STATUS_SUCCESS;
534 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
540 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
541 vaStatus = VA_STATUS_SUCCESS;
543 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
548 case VAProfileJPEGBaseline:
549 if (HAS_JPEG_DECODING(i965) && VAEntrypointVLD == entrypoint) {
550 vaStatus = VA_STATUS_SUCCESS;
552 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
558 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
562 if (VA_STATUS_SUCCESS != vaStatus) {
566 configID = NEW_CONFIG_ID();
567 obj_config = CONFIG(configID);
569 if (NULL == obj_config) {
570 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
574 obj_config->profile = profile;
575 obj_config->entrypoint = entrypoint;
576 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
577 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
578 obj_config->num_attribs = 1;
580 for(i = 0; i < num_attribs; i++) {
581 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
583 if (VA_STATUS_SUCCESS != vaStatus) {
589 if (VA_STATUS_SUCCESS != vaStatus) {
590 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
592 *config_id = configID;
599 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
601 struct i965_driver_data *i965 = i965_driver_data(ctx);
602 struct object_config *obj_config = CONFIG(config_id);
605 if (NULL == obj_config) {
606 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
610 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
611 return VA_STATUS_SUCCESS;
614 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
615 VAConfigID config_id,
616 VAProfile *profile, /* out */
617 VAEntrypoint *entrypoint, /* out */
618 VAConfigAttrib *attrib_list, /* out */
619 int *num_attribs) /* out */
621 struct i965_driver_data *i965 = i965_driver_data(ctx);
622 struct object_config *obj_config = CONFIG(config_id);
623 VAStatus vaStatus = VA_STATUS_SUCCESS;
627 *profile = obj_config->profile;
628 *entrypoint = obj_config->entrypoint;
629 *num_attribs = obj_config->num_attribs;
631 for(i = 0; i < obj_config->num_attribs; i++) {
632 attrib_list[i] = obj_config->attrib_list[i];
639 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
641 struct object_surface *obj_surface = (struct object_surface *)obj;
643 dri_bo_unreference(obj_surface->bo);
644 obj_surface->bo = NULL;
646 if (obj_surface->free_private_data != NULL) {
647 obj_surface->free_private_data(&obj_surface->private_data);
648 obj_surface->private_data = NULL;
651 object_heap_free(heap, obj);
655 i965_CreateSurfaces2(
656 VADriverContextP ctx,
660 VASurfaceID *surfaces,
661 unsigned int num_surfaces,
662 VASurfaceAttrib *attrib_list,
663 unsigned int num_attribs
666 struct i965_driver_data *i965 = i965_driver_data(ctx);
668 VAStatus vaStatus = VA_STATUS_SUCCESS;
669 int expected_fourcc = 0;
671 for (i = 0; i < num_attribs && attrib_list; i++) {
672 if ((attrib_list[i].type == VASurfaceAttribPixelFormat) &&
673 (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {
674 assert(attrib_list[i].value.type == VAGenericValueTypeInteger);
675 expected_fourcc = attrib_list[i].value.value.i;
680 /* support 420 & 422 & RGB32 format, 422 and RGB32 are only used
681 * for post-processing (including color conversion) */
682 if (VA_RT_FORMAT_YUV420 != format &&
683 VA_RT_FORMAT_YUV422 != format &&
684 VA_RT_FORMAT_RGB32 != format) {
685 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
688 for (i = 0; i < num_surfaces; i++) {
689 int surfaceID = NEW_SURFACE_ID();
690 struct object_surface *obj_surface = SURFACE(surfaceID);
692 if (NULL == obj_surface) {
693 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
697 surfaces[i] = surfaceID;
698 obj_surface->status = VASurfaceReady;
699 obj_surface->orig_width = width;
700 obj_surface->orig_height = height;
702 obj_surface->subpic_render_idx = 0;
703 for(j = 0; j < I965_MAX_SUBPIC_SUM; j++){
704 obj_surface->subpic[j] = VA_INVALID_ID;
705 obj_surface->obj_subpic[j] = NULL;
708 obj_surface->width = ALIGN(width, 16);
709 obj_surface->height = ALIGN(height, 16);
710 obj_surface->flags = SURFACE_REFERENCED;
711 obj_surface->fourcc = 0;
712 obj_surface->bo = NULL;
713 obj_surface->locked_image_id = VA_INVALID_ID;
714 obj_surface->private_data = NULL;
715 obj_surface->free_private_data = NULL;
716 obj_surface->subsampling = SUBSAMPLE_YUV420;
718 if (expected_fourcc) {
719 int tiling = HAS_TILED_SURFACE(i965);
721 if (expected_fourcc != VA_FOURCC('N', 'V', '1', '2') &&
722 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'X') &&
723 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'A') )
725 // todo, should we disable tiling for 422 format?
727 if (VA_RT_FORMAT_YUV420 == format) {
728 obj_surface->subsampling = SUBSAMPLE_YUV420;
730 else if (VA_RT_FORMAT_YUV422 == format) {
731 obj_surface->subsampling = SUBSAMPLE_YUV422H;
733 else if (VA_RT_FORMAT_RGB32 == format) {
734 obj_surface->subsampling = SUBSAMPLE_RGBX;
740 i965_check_alloc_surface_bo(ctx, obj_surface, tiling, expected_fourcc, obj_surface->subsampling);
745 if (VA_STATUS_SUCCESS != vaStatus) {
746 /* surfaces[i-1] was the last successful allocation */
748 struct object_surface *obj_surface = SURFACE(surfaces[i]);
750 surfaces[i] = VA_INVALID_SURFACE;
752 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
760 i965_CreateSurfaces(VADriverContextP ctx,
765 VASurfaceID *surfaces) /* out */
767 return i965_CreateSurfaces2(ctx,
778 i965_DestroySurfaces(VADriverContextP ctx,
779 VASurfaceID *surface_list,
782 struct i965_driver_data *i965 = i965_driver_data(ctx);
785 for (i = num_surfaces; i--; ) {
786 struct object_surface *obj_surface = SURFACE(surface_list[i]);
789 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
792 return VA_STATUS_SUCCESS;
796 i965_QueryImageFormats(VADriverContextP ctx,
797 VAImageFormat *format_list, /* out */
798 int *num_formats) /* out */
802 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
803 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
805 format_list[n] = m->va_format;
811 return VA_STATUS_SUCCESS;
815 * Guess the format when the usage of a VA surface is unknown
816 * 1. Without a valid context: YV12
817 * 2. The current context is valid:
818 * a) always NV12 on GEN6 and later
819 * b) I420 for MPEG-2 and NV12 for other codec on GEN4 & GEN5
822 i965_guess_surface_format(VADriverContextP ctx,
824 unsigned int *fourcc,
825 unsigned int *is_tiled)
827 struct i965_driver_data *i965 = i965_driver_data(ctx);
828 struct object_context *obj_context = NULL;
829 struct object_config *obj_config = NULL;
831 *fourcc = VA_FOURCC('Y', 'V', '1', '2');
834 if (i965->current_context_id == VA_INVALID_ID)
837 obj_context = CONTEXT(i965->current_context_id);
842 obj_config = obj_context->obj_config;
848 if (IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id)) {
849 *fourcc = VA_FOURCC('N', 'V', '1', '2');
854 switch (obj_config->profile) {
855 case VAProfileMPEG2Simple:
856 case VAProfileMPEG2Main:
857 *fourcc = VA_FOURCC('I', '4', '2', '0');
862 *fourcc = VA_FOURCC('N', 'V', '1', '2');
869 i965_QuerySubpictureFormats(VADriverContextP ctx,
870 VAImageFormat *format_list, /* out */
871 unsigned int *flags, /* out */
872 unsigned int *num_formats) /* out */
876 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
877 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
879 format_list[n] = m->va_format;
881 flags[n] = m->va_flags;
887 return VA_STATUS_SUCCESS;
891 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
893 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
895 object_heap_free(heap, obj);
899 i965_CreateSubpicture(VADriverContextP ctx,
901 VASubpictureID *subpicture) /* out */
903 struct i965_driver_data *i965 = i965_driver_data(ctx);
904 VASubpictureID subpicID = NEW_SUBPIC_ID()
905 struct object_subpic *obj_subpic = SUBPIC(subpicID);
908 return VA_STATUS_ERROR_ALLOCATION_FAILED;
910 struct object_image *obj_image = IMAGE(image);
912 return VA_STATUS_ERROR_INVALID_IMAGE;
914 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
916 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
918 *subpicture = subpicID;
919 obj_subpic->image = image;
920 obj_subpic->obj_image = obj_image;
921 obj_subpic->format = m->format;
922 obj_subpic->width = obj_image->image.width;
923 obj_subpic->height = obj_image->image.height;
924 obj_subpic->pitch = obj_image->image.pitches[0];
925 obj_subpic->bo = obj_image->bo;
926 obj_subpic->global_alpha = 1.0;
928 return VA_STATUS_SUCCESS;
932 i965_DestroySubpicture(VADriverContextP ctx,
933 VASubpictureID subpicture)
935 struct i965_driver_data *i965 = i965_driver_data(ctx);
936 struct object_subpic *obj_subpic = SUBPIC(subpicture);
939 return VA_STATUS_ERROR_INVALID_SUBPICTURE;
941 assert(obj_subpic->obj_image);
942 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
943 return VA_STATUS_SUCCESS;
947 i965_SetSubpictureImage(VADriverContextP ctx,
948 VASubpictureID subpicture,
952 return VA_STATUS_ERROR_UNIMPLEMENTED;
956 i965_SetSubpictureChromakey(VADriverContextP ctx,
957 VASubpictureID subpicture,
958 unsigned int chromakey_min,
959 unsigned int chromakey_max,
960 unsigned int chromakey_mask)
963 return VA_STATUS_ERROR_UNIMPLEMENTED;
967 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
968 VASubpictureID subpicture,
971 struct i965_driver_data *i965 = i965_driver_data(ctx);
972 struct object_subpic *obj_subpic = SUBPIC(subpicture);
974 if(global_alpha > 1.0 || global_alpha < 0.0){
975 return VA_STATUS_ERROR_INVALID_PARAMETER;
979 return VA_STATUS_ERROR_INVALID_SUBPICTURE;
981 obj_subpic->global_alpha = global_alpha;
983 return VA_STATUS_SUCCESS;
987 i965_AssociateSubpicture(VADriverContextP ctx,
988 VASubpictureID subpicture,
989 VASurfaceID *target_surfaces,
991 short src_x, /* upper left offset in subpicture */
993 unsigned short src_width,
994 unsigned short src_height,
995 short dest_x, /* upper left offset in surface */
997 unsigned short dest_width,
998 unsigned short dest_height,
1000 * whether to enable chroma-keying or global-alpha
1001 * see VA_SUBPICTURE_XXX values
1005 struct i965_driver_data *i965 = i965_driver_data(ctx);
1006 struct object_subpic *obj_subpic = SUBPIC(subpicture);
1010 return VA_STATUS_ERROR_INVALID_SUBPICTURE;
1012 assert(obj_subpic->obj_image);
1014 obj_subpic->src_rect.x = src_x;
1015 obj_subpic->src_rect.y = src_y;
1016 obj_subpic->src_rect.width = src_width;
1017 obj_subpic->src_rect.height = src_height;
1018 obj_subpic->dst_rect.x = dest_x;
1019 obj_subpic->dst_rect.y = dest_y;
1020 obj_subpic->dst_rect.width = dest_width;
1021 obj_subpic->dst_rect.height = dest_height;
1022 obj_subpic->flags = flags;
1024 for (i = 0; i < num_surfaces; i++) {
1025 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
1027 return VA_STATUS_ERROR_INVALID_SURFACE;
1029 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
1030 if(obj_surface->subpic[j] == VA_INVALID_ID){
1031 assert(obj_surface->obj_subpic[j] == NULL);
1032 obj_surface->subpic[j] = subpicture;
1033 obj_surface->obj_subpic[j] = obj_subpic;
1038 if(j == I965_MAX_SUBPIC_SUM){
1039 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1043 return VA_STATUS_SUCCESS;
1048 i965_DeassociateSubpicture(VADriverContextP ctx,
1049 VASubpictureID subpicture,
1050 VASurfaceID *target_surfaces,
1053 struct i965_driver_data *i965 = i965_driver_data(ctx);
1054 struct object_subpic *obj_subpic = SUBPIC(subpicture);
1058 return VA_STATUS_ERROR_INVALID_SUBPICTURE;
1060 for (i = 0; i < num_surfaces; i++) {
1061 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
1063 return VA_STATUS_ERROR_INVALID_SURFACE;
1065 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
1066 if (obj_surface->subpic[j] == subpicture) {
1067 assert(obj_surface->obj_subpic[j] == obj_subpic);
1068 obj_surface->subpic[j] = VA_INVALID_ID;
1069 obj_surface->obj_subpic[j] = NULL;
1074 if(j == I965_MAX_SUBPIC_SUM){
1075 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1078 return VA_STATUS_SUCCESS;
1082 i965_reference_buffer_store(struct buffer_store **ptr,
1083 struct buffer_store *buffer_store)
1085 assert(*ptr == NULL);
1088 buffer_store->ref_count++;
1089 *ptr = buffer_store;
1094 i965_release_buffer_store(struct buffer_store **ptr)
1096 struct buffer_store *buffer_store = *ptr;
1098 if (buffer_store == NULL)
1101 assert(buffer_store->bo || buffer_store->buffer);
1102 assert(!(buffer_store->bo && buffer_store->buffer));
1103 buffer_store->ref_count--;
1105 if (buffer_store->ref_count == 0) {
1106 dri_bo_unreference(buffer_store->bo);
1107 free(buffer_store->buffer);
1108 buffer_store->bo = NULL;
1109 buffer_store->buffer = NULL;
1117 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
1119 struct object_context *obj_context = (struct object_context *)obj;
1122 if (obj_context->hw_context) {
1123 obj_context->hw_context->destroy(obj_context->hw_context);
1124 obj_context->hw_context = NULL;
1127 if (obj_context->codec_type == CODEC_PROC) {
1128 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
1130 } else if (obj_context->codec_type == CODEC_ENC) {
1131 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
1132 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1133 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1135 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
1136 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1138 free(obj_context->codec_state.encode.slice_params);
1140 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
1141 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1142 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1144 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1145 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1147 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1148 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1150 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.misc_param); i++)
1151 i965_release_buffer_store(&obj_context->codec_state.encode.misc_param[i]);
1153 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1154 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1156 free(obj_context->codec_state.encode.slice_params_ext);
1158 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
1159 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
1161 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1162 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1163 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1165 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
1166 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1168 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
1169 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1171 free(obj_context->codec_state.decode.slice_params);
1172 free(obj_context->codec_state.decode.slice_datas);
1175 free(obj_context->render_targets);
1176 object_heap_free(heap, obj);
1180 i965_CreateContext(VADriverContextP ctx,
1181 VAConfigID config_id,
1185 VASurfaceID *render_targets,
1186 int num_render_targets,
1187 VAContextID *context) /* out */
1189 struct i965_driver_data *i965 = i965_driver_data(ctx);
1190 struct i965_render_state *render_state = &i965->render_state;
1191 struct object_config *obj_config = CONFIG(config_id);
1192 struct object_context *obj_context = NULL;
1193 VAStatus vaStatus = VA_STATUS_SUCCESS;
1197 if (NULL == obj_config) {
1198 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
1202 if (picture_width > i965->codec_info->max_width ||
1203 picture_height > i965->codec_info->max_height) {
1204 vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
1209 /* Validate picture dimensions */
1210 contextID = NEW_CONTEXT_ID();
1211 obj_context = CONTEXT(contextID);
1213 if (NULL == obj_context) {
1214 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
1218 render_state->inited = 1;
1220 switch (obj_config->profile) {
1221 case VAProfileH264Baseline:
1222 case VAProfileH264Main:
1223 case VAProfileH264High:
1224 if (!HAS_H264_DECODING(i965) &&
1225 !HAS_H264_ENCODING(i965))
1226 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1227 render_state->interleaved_uv = 1;
1230 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
1234 *context = contextID;
1235 obj_context->flags = flag;
1236 obj_context->context_id = contextID;
1237 obj_context->obj_config = obj_config;
1238 obj_context->picture_width = picture_width;
1239 obj_context->picture_height = picture_height;
1240 obj_context->num_render_targets = num_render_targets;
1241 obj_context->render_targets =
1242 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
1243 obj_context->hw_context = NULL;
1245 for(i = 0; i < num_render_targets; i++) {
1246 if (NULL == SURFACE(render_targets[i])) {
1247 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
1251 obj_context->render_targets[i] = render_targets[i];
1254 if (VA_STATUS_SUCCESS == vaStatus) {
1255 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1256 obj_context->codec_type = CODEC_PROC;
1257 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
1258 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
1259 assert(i965->codec_info->proc_hw_context_init);
1260 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config);
1261 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1262 obj_context->codec_type = CODEC_ENC;
1263 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1264 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1265 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1266 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1267 sizeof(*obj_context->codec_state.encode.slice_params));
1268 assert(i965->codec_info->enc_hw_context_init);
1269 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config);
1271 obj_context->codec_type = CODEC_DEC;
1272 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1273 obj_context->codec_state.decode.current_render_target = -1;
1274 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1275 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1276 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1277 sizeof(*obj_context->codec_state.decode.slice_params));
1278 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1279 sizeof(*obj_context->codec_state.decode.slice_datas));
1281 assert(i965->codec_info->dec_hw_context_init);
1282 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config);
1286 /* Error recovery */
1287 if (VA_STATUS_SUCCESS != vaStatus) {
1288 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1291 i965->current_context_id = contextID;
1297 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1299 struct i965_driver_data *i965 = i965_driver_data(ctx);
1300 struct object_context *obj_context = CONTEXT(context);
1302 assert(obj_context);
1304 if (i965->current_context_id == context)
1305 i965->current_context_id = VA_INVALID_ID;
1307 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1309 return VA_STATUS_SUCCESS;
1313 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1315 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1317 assert(obj_buffer->buffer_store);
1318 i965_release_buffer_store(&obj_buffer->buffer_store);
1319 object_heap_free(heap, obj);
1323 i965_create_buffer_internal(VADriverContextP ctx,
1324 VAContextID context,
1327 unsigned int num_elements,
1332 struct i965_driver_data *i965 = i965_driver_data(ctx);
1333 struct object_buffer *obj_buffer = NULL;
1334 struct buffer_store *buffer_store = NULL;
1339 case VAPictureParameterBufferType:
1340 case VAIQMatrixBufferType:
1341 case VAQMatrixBufferType:
1342 case VABitPlaneBufferType:
1343 case VASliceGroupMapBufferType:
1344 case VASliceParameterBufferType:
1345 case VASliceDataBufferType:
1346 case VAMacroblockParameterBufferType:
1347 case VAResidualDataBufferType:
1348 case VADeblockingParameterBufferType:
1349 case VAImageBufferType:
1350 case VAEncCodedBufferType:
1351 case VAEncSequenceParameterBufferType:
1352 case VAEncPictureParameterBufferType:
1353 case VAEncSliceParameterBufferType:
1354 case VAEncPackedHeaderParameterBufferType:
1355 case VAEncPackedHeaderDataBufferType:
1356 case VAEncMiscParameterBufferType:
1357 case VAProcPipelineParameterBufferType:
1358 case VAProcFilterParameterBufferType:
1359 case VAHuffmanTableBufferType:
1364 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1367 bufferID = NEW_BUFFER_ID();
1368 obj_buffer = BUFFER(bufferID);
1370 if (NULL == obj_buffer) {
1371 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1374 if (type == VAEncCodedBufferType) {
1375 size += I965_CODEDBUFFER_HEADER_SIZE;
1376 size += 0x1000; /* for upper bound check */
1379 obj_buffer->max_num_elements = num_elements;
1380 obj_buffer->num_elements = num_elements;
1381 obj_buffer->size_element = size;
1382 obj_buffer->type = type;
1383 obj_buffer->buffer_store = NULL;
1384 buffer_store = calloc(1, sizeof(struct buffer_store));
1385 assert(buffer_store);
1386 buffer_store->ref_count = 1;
1388 if (store_bo != NULL) {
1389 buffer_store->bo = store_bo;
1390 dri_bo_reference(buffer_store->bo);
1393 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1394 } else if (type == VASliceDataBufferType ||
1395 type == VAImageBufferType ||
1396 type == VAEncCodedBufferType) {
1397 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1399 size * num_elements, 64);
1400 assert(buffer_store->bo);
1402 if (type == VAEncCodedBufferType) {
1403 struct i965_coded_buffer_segment *coded_buffer_segment;
1405 dri_bo_map(buffer_store->bo, 1);
1406 coded_buffer_segment = (struct i965_coded_buffer_segment *)buffer_store->bo->virtual;
1407 coded_buffer_segment->base.size = size - I965_CODEDBUFFER_HEADER_SIZE;
1408 coded_buffer_segment->base.bit_offset = 0;
1409 coded_buffer_segment->base.status = 0;
1410 coded_buffer_segment->base.buf = NULL;
1411 coded_buffer_segment->base.next = NULL;
1412 coded_buffer_segment->mapped = 0;
1413 coded_buffer_segment->codec = 0;
1414 dri_bo_unmap(buffer_store->bo);
1416 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1422 if (type == VAEncPackedHeaderDataBufferType) {
1423 msize = ALIGN(size, 4);
1426 buffer_store->buffer = malloc(msize * num_elements);
1427 assert(buffer_store->buffer);
1430 memcpy(buffer_store->buffer, data, size * num_elements);
1433 buffer_store->num_elements = obj_buffer->num_elements;
1434 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1435 i965_release_buffer_store(&buffer_store);
1438 return VA_STATUS_SUCCESS;
1442 i965_CreateBuffer(VADriverContextP ctx,
1443 VAContextID context, /* in */
1444 VABufferType type, /* in */
1445 unsigned int size, /* in */
1446 unsigned int num_elements, /* in */
1447 void *data, /* in */
1448 VABufferID *buf_id) /* out */
1450 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1455 i965_BufferSetNumElements(VADriverContextP ctx,
1456 VABufferID buf_id, /* in */
1457 unsigned int num_elements) /* in */
1459 struct i965_driver_data *i965 = i965_driver_data(ctx);
1460 struct object_buffer *obj_buffer = BUFFER(buf_id);
1461 VAStatus vaStatus = VA_STATUS_SUCCESS;
1466 return VA_STATUS_ERROR_INVALID_BUFFER;
1468 if ((num_elements < 0) ||
1469 (num_elements > obj_buffer->max_num_elements)) {
1470 vaStatus = VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1472 obj_buffer->num_elements = num_elements;
1473 if (obj_buffer->buffer_store != NULL) {
1474 obj_buffer->buffer_store->num_elements = num_elements;
1482 i965_MapBuffer(VADriverContextP ctx,
1483 VABufferID buf_id, /* in */
1484 void **pbuf) /* out */
1486 struct i965_driver_data *i965 = i965_driver_data(ctx);
1487 struct object_buffer *obj_buffer = BUFFER(buf_id);
1488 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1490 assert(obj_buffer && obj_buffer->buffer_store);
1491 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1492 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1494 if (!obj_buffer || !obj_buffer->buffer_store)
1495 return VA_STATUS_ERROR_INVALID_BUFFER;
1497 if (NULL != obj_buffer->buffer_store->bo) {
1498 unsigned int tiling, swizzle;
1500 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1502 if (tiling != I915_TILING_NONE)
1503 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1505 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1507 assert(obj_buffer->buffer_store->bo->virtual);
1508 *pbuf = obj_buffer->buffer_store->bo->virtual;
1510 if (obj_buffer->type == VAEncCodedBufferType) {
1512 unsigned char *buffer = NULL;
1513 struct i965_coded_buffer_segment *coded_buffer_segment = (struct i965_coded_buffer_segment *)(obj_buffer->buffer_store->bo->virtual);
1515 if (!coded_buffer_segment->mapped) {
1516 unsigned char delimiter0, delimiter1, delimiter2, delimiter3, delimiter4;
1518 coded_buffer_segment->base.buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + I965_CODEDBUFFER_HEADER_SIZE;
1520 if (coded_buffer_segment->codec == CODED_H264) {
1521 delimiter0 = H264_DELIMITER0;
1522 delimiter1 = H264_DELIMITER1;
1523 delimiter2 = H264_DELIMITER2;
1524 delimiter3 = H264_DELIMITER3;
1525 delimiter4 = H264_DELIMITER4;
1526 } else if (coded_buffer_segment->codec == CODED_MPEG2) {
1527 delimiter0 = MPEG2_DELIMITER0;
1528 delimiter1 = MPEG2_DELIMITER1;
1529 delimiter2 = MPEG2_DELIMITER2;
1530 delimiter3 = MPEG2_DELIMITER3;
1531 delimiter4 = MPEG2_DELIMITER4;
1536 for (i = 0; i < obj_buffer->size_element - I965_CODEDBUFFER_HEADER_SIZE - 3 - 0x1000; i++) {
1537 if ((buffer[i] == delimiter0) &&
1538 (buffer[i + 1] == delimiter1) &&
1539 (buffer[i + 2] == delimiter2) &&
1540 (buffer[i + 3] == delimiter3) &&
1541 (buffer[i + 4] == delimiter4))
1545 if (i == obj_buffer->size_element - I965_CODEDBUFFER_HEADER_SIZE - 3 - 0x1000) {
1546 coded_buffer_segment->base.status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1549 coded_buffer_segment->base.size = i;
1550 coded_buffer_segment->mapped = 1;
1552 assert(coded_buffer_segment->base.buf);
1556 vaStatus = VA_STATUS_SUCCESS;
1557 } else if (NULL != obj_buffer->buffer_store->buffer) {
1558 *pbuf = obj_buffer->buffer_store->buffer;
1559 vaStatus = VA_STATUS_SUCCESS;
1566 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1568 struct i965_driver_data *i965 = i965_driver_data(ctx);
1569 struct object_buffer *obj_buffer = BUFFER(buf_id);
1570 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1572 assert(obj_buffer && obj_buffer->buffer_store);
1573 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1574 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1576 if (!obj_buffer || !obj_buffer->buffer_store)
1577 return VA_STATUS_ERROR_INVALID_BUFFER;
1579 if (NULL != obj_buffer->buffer_store->bo) {
1580 unsigned int tiling, swizzle;
1582 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1584 if (tiling != I915_TILING_NONE)
1585 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1587 dri_bo_unmap(obj_buffer->buffer_store->bo);
1589 vaStatus = VA_STATUS_SUCCESS;
1590 } else if (NULL != obj_buffer->buffer_store->buffer) {
1592 vaStatus = VA_STATUS_SUCCESS;
1599 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1601 struct i965_driver_data *i965 = i965_driver_data(ctx);
1602 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1607 return VA_STATUS_ERROR_INVALID_BUFFER;
1609 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1611 return VA_STATUS_SUCCESS;
1615 i965_BeginPicture(VADriverContextP ctx,
1616 VAContextID context,
1617 VASurfaceID render_target)
1619 struct i965_driver_data *i965 = i965_driver_data(ctx);
1620 struct object_context *obj_context = CONTEXT(context);
1621 struct object_surface *obj_surface = SURFACE(render_target);
1622 struct object_config *obj_config;
1626 assert(obj_context);
1629 return VA_STATUS_ERROR_INVALID_CONTEXT;
1631 assert(obj_surface);
1634 return VA_STATUS_ERROR_INVALID_SURFACE;
1636 obj_config = obj_context->obj_config;
1639 switch (obj_config->profile) {
1640 case VAProfileMPEG2Simple:
1641 case VAProfileMPEG2Main:
1642 vaStatus = VA_STATUS_SUCCESS;
1645 case VAProfileH264Baseline:
1646 case VAProfileH264Main:
1647 case VAProfileH264High:
1648 vaStatus = VA_STATUS_SUCCESS;
1651 case VAProfileVC1Simple:
1652 case VAProfileVC1Main:
1653 case VAProfileVC1Advanced:
1654 vaStatus = VA_STATUS_SUCCESS;
1657 case VAProfileJPEGBaseline:
1658 vaStatus = VA_STATUS_SUCCESS;
1662 vaStatus = VA_STATUS_SUCCESS;
1667 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1671 if (obj_context->codec_type == CODEC_PROC) {
1672 obj_context->codec_state.proc.current_render_target = render_target;
1673 } else if (obj_context->codec_type == CODEC_ENC) {
1674 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1676 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1677 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1680 obj_context->codec_state.encode.num_slice_params = 0;
1683 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1685 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1686 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1688 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1689 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1691 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1692 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1694 obj_context->codec_state.encode.num_slice_params_ext = 0;
1695 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1696 obj_context->codec_state.encode.last_packed_header_type = 0;
1698 obj_context->codec_state.decode.current_render_target = render_target;
1699 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1700 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1701 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1702 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1704 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1705 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1706 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1709 obj_context->codec_state.decode.num_slice_params = 0;
1710 obj_context->codec_state.decode.num_slice_datas = 0;
1716 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1718 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1720 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1721 struct object_context *obj_context, \
1722 struct object_buffer *obj_buffer) \
1724 struct category##_state *category = &obj_context->codec_state.category; \
1725 assert(obj_buffer->buffer_store->bo == NULL); \
1726 assert(obj_buffer->buffer_store->buffer); \
1727 i965_release_buffer_store(&category->member); \
1728 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1729 return VA_STATUS_SUCCESS; \
1732 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1734 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1735 struct object_context *obj_context, \
1736 struct object_buffer *obj_buffer) \
1738 struct category##_state *category = &obj_context->codec_state.category; \
1739 if (category->num_##member == category->max_##member) { \
1740 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1741 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1742 category->max_##member += NUM_SLICES; \
1744 i965_release_buffer_store(&category->member[category->num_##member]); \
1745 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1746 category->num_##member++; \
1747 return VA_STATUS_SUCCESS; \
1750 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1752 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1753 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1754 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1755 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1756 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1758 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1759 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1760 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1763 i965_decoder_render_picture(VADriverContextP ctx,
1764 VAContextID context,
1765 VABufferID *buffers,
1768 struct i965_driver_data *i965 = i965_driver_data(ctx);
1769 struct object_context *obj_context = CONTEXT(context);
1770 VAStatus vaStatus = VA_STATUS_SUCCESS;
1773 assert(obj_context);
1776 return VA_STATUS_ERROR_INVALID_CONTEXT;
1778 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1779 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1783 return VA_STATUS_ERROR_INVALID_BUFFER;
1785 switch (obj_buffer->type) {
1786 case VAPictureParameterBufferType:
1787 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1790 case VAIQMatrixBufferType:
1791 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1794 case VABitPlaneBufferType:
1795 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1798 case VASliceParameterBufferType:
1799 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1802 case VASliceDataBufferType:
1803 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1806 case VAHuffmanTableBufferType:
1807 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1811 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1819 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1821 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1822 // DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1823 // DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1824 // DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1825 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1826 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1827 /* extended buffer */
1828 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1829 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1831 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1832 // DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1833 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1836 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1837 struct object_context *obj_context,
1838 struct object_buffer *obj_buffer,
1841 struct encode_state *encode = &obj_context->codec_state.encode;
1843 assert(obj_buffer->buffer_store->bo == NULL);
1844 assert(obj_buffer->buffer_store->buffer);
1845 i965_release_buffer_store(&encode->packed_header_param[type_index]);
1846 i965_reference_buffer_store(&encode->packed_header_param[type_index], obj_buffer->buffer_store);
1848 return VA_STATUS_SUCCESS;
1852 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1853 struct object_context *obj_context,
1854 struct object_buffer *obj_buffer,
1857 struct encode_state *encode = &obj_context->codec_state.encode;
1859 assert(obj_buffer->buffer_store->bo == NULL);
1860 assert(obj_buffer->buffer_store->buffer);
1861 i965_release_buffer_store(&encode->packed_header_data[type_index]);
1862 i965_reference_buffer_store(&encode->packed_header_data[type_index], obj_buffer->buffer_store);
1864 return VA_STATUS_SUCCESS;
1868 i965_encoder_render_misc_parameter_buffer(VADriverContextP ctx,
1869 struct object_context *obj_context,
1870 struct object_buffer *obj_buffer)
1872 struct encode_state *encode = &obj_context->codec_state.encode;
1873 VAEncMiscParameterBuffer *param = NULL;
1875 assert(obj_buffer->buffer_store->bo == NULL);
1876 assert(obj_buffer->buffer_store->buffer);
1878 param = (VAEncMiscParameterBuffer *)obj_buffer->buffer_store->buffer;
1879 i965_release_buffer_store(&encode->misc_param[param->type]);
1880 i965_reference_buffer_store(&encode->misc_param[param->type], obj_buffer->buffer_store);
1882 return VA_STATUS_SUCCESS;
1886 i965_encoder_render_picture(VADriverContextP ctx,
1887 VAContextID context,
1888 VABufferID *buffers,
1891 struct i965_driver_data *i965 = i965_driver_data(ctx);
1892 struct object_context *obj_context = CONTEXT(context);
1893 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1896 assert(obj_context);
1899 return VA_STATUS_ERROR_INVALID_CONTEXT;
1901 for (i = 0; i < num_buffers; i++) {
1902 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1906 return VA_STATUS_ERROR_INVALID_BUFFER;
1908 switch (obj_buffer->type) {
1909 case VAQMatrixBufferType:
1910 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1913 case VAIQMatrixBufferType:
1914 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1917 case VAEncSequenceParameterBufferType:
1918 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1921 case VAEncPictureParameterBufferType:
1922 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1925 case VAEncSliceParameterBufferType:
1926 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1929 case VAEncPackedHeaderParameterBufferType:
1931 struct encode_state *encode = &obj_context->codec_state.encode;
1932 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1933 encode->last_packed_header_type = param->type;
1935 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1938 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1942 case VAEncPackedHeaderDataBufferType:
1944 struct encode_state *encode = &obj_context->codec_state.encode;
1946 assert(encode->last_packed_header_type == VAEncPackedHeaderSequence ||
1947 encode->last_packed_header_type == VAEncPackedHeaderPicture ||
1948 encode->last_packed_header_type == VAEncPackedHeaderSlice ||
1949 (((encode->last_packed_header_type & VAEncPackedHeaderMiscMask) == VAEncPackedHeaderMiscMask) &&
1950 ((encode->last_packed_header_type & (~VAEncPackedHeaderMiscMask)) != 0)));
1951 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1954 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1958 case VAEncMiscParameterBufferType:
1959 vaStatus = i965_encoder_render_misc_parameter_buffer(ctx,
1965 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1973 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1975 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1976 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1979 i965_proc_render_picture(VADriverContextP ctx,
1980 VAContextID context,
1981 VABufferID *buffers,
1984 struct i965_driver_data *i965 = i965_driver_data(ctx);
1985 struct object_context *obj_context = CONTEXT(context);
1986 VAStatus vaStatus = VA_STATUS_SUCCESS;
1989 assert(obj_context);
1992 return VA_STATUS_ERROR_INVALID_CONTEXT;
1994 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1995 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1999 return VA_STATUS_ERROR_INVALID_BUFFER;
2001 switch (obj_buffer->type) {
2002 case VAProcPipelineParameterBufferType:
2003 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
2007 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
2016 i965_RenderPicture(VADriverContextP ctx,
2017 VAContextID context,
2018 VABufferID *buffers,
2021 struct i965_driver_data *i965 = i965_driver_data(ctx);
2022 struct object_context *obj_context;
2023 struct object_config *obj_config;
2024 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
2026 obj_context = CONTEXT(context);
2027 assert(obj_context);
2030 return VA_STATUS_ERROR_INVALID_CONTEXT;
2032 obj_config = obj_context->obj_config;
2035 if (VAEntrypointVideoProc == obj_config->entrypoint) {
2036 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
2037 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
2038 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
2040 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
2047 i965_EndPicture(VADriverContextP ctx, VAContextID context)
2049 struct i965_driver_data *i965 = i965_driver_data(ctx);
2050 struct object_context *obj_context = CONTEXT(context);
2051 struct object_config *obj_config;
2053 assert(obj_context);
2056 return VA_STATUS_ERROR_INVALID_CONTEXT;
2058 obj_config = obj_context->obj_config;
2061 if (obj_context->codec_type == CODEC_PROC) {
2062 assert(VAEntrypointVideoProc == obj_config->entrypoint);
2063 } else if (obj_context->codec_type == CODEC_ENC) {
2064 assert(VAEntrypointEncSlice == obj_config->entrypoint);
2066 assert(obj_context->codec_state.encode.pic_param ||
2067 obj_context->codec_state.encode.pic_param_ext);
2068 assert(obj_context->codec_state.encode.seq_param ||
2069 obj_context->codec_state.encode.seq_param_ext);
2070 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
2071 obj_context->codec_state.encode.num_slice_params_ext >= 1);
2073 assert(obj_context->codec_state.decode.pic_param);
2074 assert(obj_context->codec_state.decode.num_slice_params >= 1);
2075 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
2076 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
2079 assert(obj_context->hw_context->run);
2080 return obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
2084 i965_SyncSurface(VADriverContextP ctx,
2085 VASurfaceID render_target)
2087 struct i965_driver_data *i965 = i965_driver_data(ctx);
2088 struct object_surface *obj_surface = SURFACE(render_target);
2090 assert(obj_surface);
2093 drm_intel_bo_wait_rendering(obj_surface->bo);
2095 return VA_STATUS_SUCCESS;
2099 i965_QuerySurfaceStatus(VADriverContextP ctx,
2100 VASurfaceID render_target,
2101 VASurfaceStatus *status) /* out */
2103 struct i965_driver_data *i965 = i965_driver_data(ctx);
2104 struct object_surface *obj_surface = SURFACE(render_target);
2106 assert(obj_surface);
2108 if (obj_surface->bo) {
2109 if (drm_intel_bo_busy(obj_surface->bo)){
2110 *status = VASurfaceRendering;
2113 *status = VASurfaceReady;
2116 *status = VASurfaceReady;
2119 return VA_STATUS_SUCCESS;
2122 static VADisplayAttribute *
2123 get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
2125 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2128 if (!i965->display_attributes)
2131 for (i = 0; i < i965->num_display_attributes; i++) {
2132 if (i965->display_attributes[i].type == type)
2133 return &i965->display_attributes[i];
2139 i965_display_attributes_terminate(VADriverContextP ctx)
2141 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2143 if (i965->display_attributes) {
2144 free(i965->display_attributes);
2145 i965->display_attributes = NULL;
2146 i965->num_display_attributes = 0;
2151 i965_display_attributes_init(VADriverContextP ctx)
2153 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2155 i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
2156 i965->display_attributes = malloc(
2157 i965->num_display_attributes * sizeof(i965->display_attributes[0]));
2158 if (!i965->display_attributes)
2162 i965->display_attributes,
2163 i965_display_attributes,
2164 sizeof(i965_display_attributes)
2167 i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
2168 if (!i965->rotation_attrib) {
2174 i965_display_attributes_terminate(ctx);
2179 * Query display attributes
2180 * The caller must provide a "attr_list" array that can hold at
2181 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
2182 * returned in "attr_list" is returned in "num_attributes".
2185 i965_QueryDisplayAttributes(
2186 VADriverContextP ctx,
2187 VADisplayAttribute *attribs, /* out */
2188 int *num_attribs_ptr /* out */
2191 const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
2193 if (attribs && num_attribs > 0)
2194 memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
2196 if (num_attribs_ptr)
2197 *num_attribs_ptr = num_attribs;
2199 return VA_STATUS_SUCCESS;
2203 * Get display attributes
2204 * This function returns the current attribute values in "attr_list".
2205 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
2206 * from vaQueryDisplayAttributes() can have their values retrieved.
2209 i965_GetDisplayAttributes(
2210 VADriverContextP ctx,
2211 VADisplayAttribute *attribs, /* inout */
2212 int num_attribs /* in */
2217 for (i = 0; i < num_attribs; i++) {
2218 VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
2220 src_attrib = get_display_attribute(ctx, dst_attrib->type);
2221 if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
2222 dst_attrib->min_value = src_attrib->min_value;
2223 dst_attrib->max_value = src_attrib->max_value;
2224 dst_attrib->value = src_attrib->value;
2227 dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
2229 return VA_STATUS_SUCCESS;
2233 * Set display attributes
2234 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
2235 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
2236 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
2239 i965_SetDisplayAttributes(
2240 VADriverContextP ctx,
2241 VADisplayAttribute *attribs, /* in */
2242 int num_attribs /* in */
2247 for (i = 0; i < num_attribs; i++) {
2248 VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
2250 dst_attrib = get_display_attribute(ctx, src_attrib->type);
2252 return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
2254 if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
2257 if (src_attrib->value < dst_attrib->min_value ||
2258 src_attrib->value > dst_attrib->max_value)
2259 return VA_STATUS_ERROR_INVALID_PARAMETER;
2261 dst_attrib->value = src_attrib->value;
2262 /* XXX: track modified attributes through timestamps */
2264 return VA_STATUS_SUCCESS;
2268 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
2269 VASurfaceID surface,
2270 void **buffer, /* out */
2271 unsigned int *stride) /* out */
2274 return VA_STATUS_ERROR_UNIMPLEMENTED;
2278 i965_destroy_heap(struct object_heap *heap,
2279 void (*func)(struct object_heap *heap, struct object_base *object))
2281 struct object_base *object;
2282 object_heap_iterator iter;
2284 object = object_heap_first(heap, &iter);
2290 object = object_heap_next(heap, &iter);
2293 object_heap_destroy(heap);
2298 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
2301 i965_CreateImage(VADriverContextP ctx,
2302 VAImageFormat *format,
2305 VAImage *out_image) /* out */
2307 struct i965_driver_data *i965 = i965_driver_data(ctx);
2308 struct object_image *obj_image;
2309 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2311 unsigned int width2, height2, size2, size;
2313 out_image->image_id = VA_INVALID_ID;
2314 out_image->buf = VA_INVALID_ID;
2316 image_id = NEW_IMAGE_ID();
2317 if (image_id == VA_INVALID_ID)
2318 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2320 obj_image = IMAGE(image_id);
2322 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2323 obj_image->bo = NULL;
2324 obj_image->palette = NULL;
2325 obj_image->derived_surface = VA_INVALID_ID;
2327 VAImage * const image = &obj_image->image;
2328 image->image_id = image_id;
2329 image->buf = VA_INVALID_ID;
2331 size = width * height;
2332 width2 = (width + 1) / 2;
2333 height2 = (height + 1) / 2;
2334 size2 = width2 * height2;
2336 image->num_palette_entries = 0;
2337 image->entry_bytes = 0;
2338 memset(image->component_order, 0, sizeof(image->component_order));
2340 switch (format->fourcc) {
2341 case VA_FOURCC('I','A','4','4'):
2342 case VA_FOURCC('A','I','4','4'):
2343 image->num_planes = 1;
2344 image->pitches[0] = width;
2345 image->offsets[0] = 0;
2346 image->data_size = image->offsets[0] + image->pitches[0] * height;
2347 image->num_palette_entries = 16;
2348 image->entry_bytes = 3;
2349 image->component_order[0] = 'R';
2350 image->component_order[1] = 'G';
2351 image->component_order[2] = 'B';
2353 case VA_FOURCC('I','A','8','8'):
2354 case VA_FOURCC('A','I','8','8'):
2355 image->num_planes = 1;
2356 image->pitches[0] = width * 2;
2357 image->offsets[0] = 0;
2358 image->data_size = image->offsets[0] + image->pitches[0] * height;
2359 image->num_palette_entries = 256;
2360 image->entry_bytes = 3;
2361 image->component_order[0] = 'R';
2362 image->component_order[1] = 'G';
2363 image->component_order[2] = 'B';
2365 case VA_FOURCC('B','G','R','A'):
2366 case VA_FOURCC('R','G','B','A'):
2367 case VA_FOURCC('B','G','R','X'):
2368 case VA_FOURCC('R','G','B','X'):
2369 image->num_planes = 1;
2370 image->pitches[0] = width * 4;
2371 image->offsets[0] = 0;
2372 image->data_size = image->offsets[0] + image->pitches[0] * height;
2374 case VA_FOURCC('Y','V','1','2'):
2375 image->num_planes = 3;
2376 image->pitches[0] = width;
2377 image->offsets[0] = 0;
2378 image->pitches[1] = width2;
2379 image->offsets[1] = size + size2;
2380 image->pitches[2] = width2;
2381 image->offsets[2] = size;
2382 image->data_size = size + 2 * size2;
2384 case VA_FOURCC('I','4','2','0'):
2385 image->num_planes = 3;
2386 image->pitches[0] = width;
2387 image->offsets[0] = 0;
2388 image->pitches[1] = width2;
2389 image->offsets[1] = size;
2390 image->pitches[2] = width2;
2391 image->offsets[2] = size + size2;
2392 image->data_size = size + 2 * size2;
2394 case VA_FOURCC('N','V','1','2'):
2395 image->num_planes = 2;
2396 image->pitches[0] = width;
2397 image->offsets[0] = 0;
2398 image->pitches[1] = width;
2399 image->offsets[1] = size;
2400 image->data_size = size + 2 * size2;
2402 case VA_FOURCC('Y','U','Y','2'):
2403 case VA_FOURCC('U','Y','V','Y'):
2404 image->num_planes = 1;
2405 image->pitches[0] = width * 2;
2406 image->offsets[0] = 0;
2407 image->data_size = size * 2;
2413 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2414 image->data_size, 1, NULL, &image->buf);
2415 if (va_status != VA_STATUS_SUCCESS)
2418 struct object_buffer *obj_buffer = BUFFER(image->buf);
2421 !obj_buffer->buffer_store ||
2422 !obj_buffer->buffer_store->bo)
2423 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2425 obj_image->bo = obj_buffer->buffer_store->bo;
2426 dri_bo_reference(obj_image->bo);
2428 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2429 obj_image->palette = malloc(image->num_palette_entries * sizeof(*obj_image->palette));
2430 if (!obj_image->palette)
2434 image->image_id = image_id;
2435 image->format = *format;
2436 image->width = width;
2437 image->height = height;
2439 *out_image = *image;
2440 return VA_STATUS_SUCCESS;
2443 i965_DestroyImage(ctx, image_id);
2448 i965_check_alloc_surface_bo(VADriverContextP ctx,
2449 struct object_surface *obj_surface,
2451 unsigned int fourcc,
2452 unsigned int subsampling)
2454 struct i965_driver_data *i965 = i965_driver_data(ctx);
2455 int region_width, region_height;
2457 if (obj_surface->bo) {
2458 assert(obj_surface->fourcc);
2459 assert(obj_surface->fourcc == fourcc);
2460 assert(obj_surface->subsampling == subsampling);
2464 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
2465 obj_surface->x_cr_offset = 0;
2468 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2469 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2470 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2471 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2472 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2473 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2474 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2475 fourcc == VA_FOURCC('Y', 'U', 'Y', '2'));
2477 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
2478 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
2479 region_height = obj_surface->height;
2481 if (fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2482 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2483 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
2484 obj_surface->cb_cr_pitch = obj_surface->width;
2485 region_width = obj_surface->width;
2487 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2488 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2489 region_width = obj_surface->width * 2;
2491 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2492 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2493 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2494 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2495 region_width = obj_surface->width * 4;
2502 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2503 assert(subsampling == SUBSAMPLE_YUV420);
2504 obj_surface->y_cb_offset = obj_surface->height;
2505 obj_surface->y_cr_offset = obj_surface->height;
2506 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2507 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2508 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
2509 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2510 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2511 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2512 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2513 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2514 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2515 fourcc == VA_FOURCC('Y', 'U', 'Y', '2')) {
2516 switch (subsampling) {
2517 case SUBSAMPLE_YUV400:
2518 obj_surface->cb_cr_width = 0;
2519 obj_surface->cb_cr_height = 0;
2522 case SUBSAMPLE_YUV420:
2523 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2524 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2527 case SUBSAMPLE_YUV422H:
2528 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2529 obj_surface->cb_cr_height = obj_surface->orig_height;
2532 case SUBSAMPLE_YUV422V:
2533 obj_surface->cb_cr_width = obj_surface->orig_width;
2534 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2537 case SUBSAMPLE_YUV444:
2538 obj_surface->cb_cr_width = obj_surface->orig_width;
2539 obj_surface->cb_cr_height = obj_surface->orig_height;
2542 case SUBSAMPLE_YUV411:
2543 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2544 obj_surface->cb_cr_height = obj_surface->orig_height;
2546 case SUBSAMPLE_RGBX:
2553 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2555 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2556 obj_surface->y_cr_offset = obj_surface->height;
2557 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2558 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '3')){
2559 obj_surface->y_cb_offset = obj_surface->height;
2560 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2562 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2563 obj_surface->y_cb_offset = 0;
2564 obj_surface->y_cr_offset = 0;
2565 region_height = obj_surface->height;
2567 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2568 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2569 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2570 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2571 region_height = obj_surface->height;
2575 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2576 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2577 assert(subsampling == SUBSAMPLE_YUV420 ||
2578 subsampling == SUBSAMPLE_YUV422H ||
2579 subsampling == SUBSAMPLE_YUV422V ||
2580 subsampling == SUBSAMPLE_RGBX);
2582 region_width = obj_surface->width;
2583 region_height = obj_surface->height;
2586 case VA_FOURCC('N', 'V', '1', '2'):
2587 obj_surface->y_cb_offset = obj_surface->height;
2588 obj_surface->y_cr_offset = obj_surface->height;
2589 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2590 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2591 obj_surface->cb_cr_pitch = obj_surface->width;
2592 region_height = obj_surface->height + obj_surface->height / 2;
2595 case VA_FOURCC('Y', 'V', '1', '2'):
2596 case VA_FOURCC('I', '4', '2', '0'):
2597 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2598 obj_surface->y_cr_offset = obj_surface->height;
2599 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2601 obj_surface->y_cb_offset = obj_surface->height;
2602 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2605 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2606 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2607 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2608 region_height = obj_surface->height + obj_surface->height / 2;
2611 case VA_FOURCC('Y','U', 'Y', '2'):
2612 obj_surface->y_cb_offset = 0;
2613 obj_surface->y_cr_offset = 0;
2614 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2615 obj_surface->cb_cr_height = obj_surface->orig_height;
2616 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2617 region_width = obj_surface->width * 2;
2618 region_height = obj_surface->height;
2620 case VA_FOURCC('R', 'G', 'B', 'A'):
2621 case VA_FOURCC('R', 'G', 'B', 'X'):
2622 case VA_FOURCC('B', 'G', 'R', 'A'):
2623 case VA_FOURCC('B', 'G', 'R', 'X'):
2624 region_width = obj_surface->width * 4;
2625 region_height = obj_surface->height;
2634 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2637 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2638 unsigned long pitch;
2640 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2648 assert(tiling_mode == I915_TILING_Y);
2649 assert(pitch == obj_surface->width ||
2650 pitch == obj_surface->width * 2 ||
2651 pitch == obj_surface->width * 4) ;
2653 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2659 obj_surface->fourcc = fourcc;
2660 obj_surface->subsampling = subsampling;
2661 assert(obj_surface->bo);
2664 VAStatus i965_DeriveImage(VADriverContextP ctx,
2665 VASurfaceID surface,
2666 VAImage *out_image) /* out */
2668 struct i965_driver_data *i965 = i965_driver_data(ctx);
2669 struct object_image *obj_image;
2670 struct object_surface *obj_surface;
2672 unsigned int w_pitch;
2673 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2675 out_image->image_id = VA_INVALID_ID;
2676 obj_surface = SURFACE(surface);
2679 return VA_STATUS_ERROR_INVALID_SURFACE;
2681 if (!obj_surface->bo) {
2682 unsigned int is_tiled = 0;
2683 unsigned int fourcc = VA_FOURCC('Y', 'V', '1', '2');
2684 i965_guess_surface_format(ctx, surface, &fourcc, &is_tiled);
2685 int sampling = get_sampling_from_fourcc(fourcc);
2686 i965_check_alloc_surface_bo(ctx, obj_surface, is_tiled, fourcc, sampling);
2689 assert(obj_surface->fourcc);
2691 w_pitch = obj_surface->width;
2693 image_id = NEW_IMAGE_ID();
2695 if (image_id == VA_INVALID_ID)
2696 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2698 obj_image = IMAGE(image_id);
2701 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2703 obj_image->bo = NULL;
2704 obj_image->palette = NULL;
2705 obj_image->derived_surface = VA_INVALID_ID;
2707 VAImage * const image = &obj_image->image;
2709 memset(image, 0, sizeof(*image));
2710 image->image_id = image_id;
2711 image->buf = VA_INVALID_ID;
2712 image->num_palette_entries = 0;
2713 image->entry_bytes = 0;
2714 image->width = obj_surface->orig_width;
2715 image->height = obj_surface->orig_height;
2716 image->data_size = obj_surface->size;
2718 image->format.fourcc = obj_surface->fourcc;
2719 image->format.byte_order = VA_LSB_FIRST;
2720 image->format.bits_per_pixel = 12;
2722 switch (image->format.fourcc) {
2723 case VA_FOURCC('Y', 'V', '1', '2'):
2724 image->num_planes = 3;
2725 image->pitches[0] = w_pitch; /* Y */
2726 image->offsets[0] = 0;
2727 image->pitches[1] = obj_surface->cb_cr_pitch; /* V */
2728 image->offsets[1] = w_pitch * obj_surface->y_cr_offset;
2729 image->pitches[2] = obj_surface->cb_cr_pitch; /* U */
2730 image->offsets[2] = w_pitch * obj_surface->y_cb_offset;
2733 case VA_FOURCC('N', 'V', '1', '2'):
2734 image->num_planes = 2;
2735 image->pitches[0] = w_pitch; /* Y */
2736 image->offsets[0] = 0;
2737 image->pitches[1] = obj_surface->cb_cr_pitch; /* UV */
2738 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2741 case VA_FOURCC('I', '4', '2', '0'):
2742 image->num_planes = 3;
2743 image->pitches[0] = w_pitch; /* Y */
2744 image->offsets[0] = 0;
2745 image->pitches[1] = obj_surface->cb_cr_pitch; /* U */
2746 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2747 image->pitches[2] = obj_surface->cb_cr_pitch; /* V */
2748 image->offsets[2] = w_pitch * obj_surface->y_cr_offset;
2750 case VA_FOURCC('Y', 'U', 'Y', '2'):
2751 image->num_planes = 1;
2752 image->pitches[0] = obj_surface->width * 2; /* Y, width is aligned already */
2753 image->offsets[0] = 0;
2754 image->pitches[1] = obj_surface->width * 2; /* U */
2755 image->offsets[1] = 0;
2756 image->pitches[2] = obj_surface->width * 2; /* V */
2757 image->offsets[2] = 0;
2759 case VA_FOURCC('R', 'G', 'B', 'A'):
2760 case VA_FOURCC('R', 'G', 'B', 'X'):
2761 case VA_FOURCC('B', 'G', 'R', 'A'):
2762 case VA_FOURCC('B', 'G', 'R', 'X'):
2763 image->num_planes = 1;
2764 image->pitches[0] = obj_surface->width * 4;
2770 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2771 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2772 if (va_status != VA_STATUS_SUCCESS)
2775 struct object_buffer *obj_buffer = BUFFER(image->buf);
2778 !obj_buffer->buffer_store ||
2779 !obj_buffer->buffer_store->bo)
2780 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2782 obj_image->bo = obj_buffer->buffer_store->bo;
2783 dri_bo_reference(obj_image->bo);
2785 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2786 obj_image->palette = malloc(image->num_palette_entries * sizeof(*obj_image->palette));
2787 if (!obj_image->palette) {
2788 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2793 *out_image = *image;
2794 obj_surface->flags |= SURFACE_DERIVED;
2795 obj_image->derived_surface = surface;
2797 return VA_STATUS_SUCCESS;
2800 i965_DestroyImage(ctx, image_id);
2805 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2807 object_heap_free(heap, obj);
2812 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2814 struct i965_driver_data *i965 = i965_driver_data(ctx);
2815 struct object_image *obj_image = IMAGE(image);
2816 struct object_surface *obj_surface;
2819 return VA_STATUS_SUCCESS;
2821 dri_bo_unreference(obj_image->bo);
2822 obj_image->bo = NULL;
2824 if (obj_image->image.buf != VA_INVALID_ID) {
2825 i965_DestroyBuffer(ctx, obj_image->image.buf);
2826 obj_image->image.buf = VA_INVALID_ID;
2829 if (obj_image->palette) {
2830 free(obj_image->palette);
2831 obj_image->palette = NULL;
2834 obj_surface = SURFACE(obj_image->derived_surface);
2837 obj_surface->flags &= ~SURFACE_DERIVED;
2840 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2842 return VA_STATUS_SUCCESS;
2846 * pointer to an array holding the palette data. The size of the array is
2847 * num_palette_entries * entry_bytes in size. The order of the components
2848 * in the palette is described by the component_order in VASubpicture struct
2851 i965_SetImagePalette(VADriverContextP ctx,
2853 unsigned char *palette)
2855 struct i965_driver_data *i965 = i965_driver_data(ctx);
2858 struct object_image *obj_image = IMAGE(image);
2860 return VA_STATUS_ERROR_INVALID_IMAGE;
2862 if (!obj_image->palette)
2863 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2865 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2866 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2867 ((unsigned int)palette[3*i + 1] << 8) |
2868 (unsigned int)palette[3*i + 2]);
2869 return VA_STATUS_SUCCESS;
2873 get_sampling_from_fourcc(unsigned int fourcc)
2875 int surface_sampling = -1;
2877 case VA_FOURCC('N', 'V', '1', '2'):
2878 case VA_FOURCC('Y', 'V', '1', '2'):
2879 case VA_FOURCC('I', '4', '2', '0'):
2880 case VA_FOURCC('I', 'M', 'C', '1'):
2881 case VA_FOURCC('I', 'M', 'C', '3'):
2882 surface_sampling = SUBSAMPLE_YUV420;
2884 case VA_FOURCC('Y', 'U', 'Y', '2'):
2885 surface_sampling = SUBSAMPLE_YUV422H;
2887 case VA_FOURCC('R','G','B','A'):
2888 case VA_FOURCC('R','G','B','X'):
2889 case VA_FOURCC('B','G','R','A'):
2890 case VA_FOURCC('B','G','R','X'):
2891 surface_sampling = SUBSAMPLE_RGBX;
2896 return surface_sampling;
2900 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2901 const uint8_t *src, unsigned int src_stride,
2902 unsigned int len, unsigned int height)
2906 for (i = 0; i < height; i++) {
2907 memcpy(dst, src, len);
2914 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2915 struct object_surface *obj_surface,
2916 const VARectangle *rect)
2918 uint8_t *dst[3], *src[3];
2920 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2921 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2922 unsigned int tiling, swizzle;
2924 if (!obj_surface->bo)
2927 assert(obj_surface->fourcc);
2928 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2930 if (tiling != I915_TILING_NONE)
2931 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2933 dri_bo_map(obj_surface->bo, 0);
2935 if (!obj_surface->bo->virtual)
2938 /* Dest VA image has either I420 or YV12 format.
2939 Source VA surface alway has I420 format */
2940 dst[Y] = image_data + obj_image->image.offsets[Y];
2941 src[0] = (uint8_t *)obj_surface->bo->virtual;
2942 dst[U] = image_data + obj_image->image.offsets[U];
2943 src[1] = src[0] + obj_surface->width * obj_surface->height;
2944 dst[V] = image_data + obj_image->image.offsets[V];
2945 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2948 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2949 src[0] += rect->y * obj_surface->width + rect->x;
2950 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2951 src[0], obj_surface->width,
2952 rect->width, rect->height);
2955 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2956 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2957 memcpy_pic(dst[U], obj_image->image.pitches[U],
2958 src[1], obj_surface->width / 2,
2959 rect->width / 2, rect->height / 2);
2962 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2963 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2964 memcpy_pic(dst[V], obj_image->image.pitches[V],
2965 src[2], obj_surface->width / 2,
2966 rect->width / 2, rect->height / 2);
2968 if (tiling != I915_TILING_NONE)
2969 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2971 dri_bo_unmap(obj_surface->bo);
2975 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2976 struct object_surface *obj_surface,
2977 const VARectangle *rect)
2979 uint8_t *dst[2], *src[2];
2980 unsigned int tiling, swizzle;
2982 if (!obj_surface->bo)
2985 assert(obj_surface->fourcc);
2986 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2988 if (tiling != I915_TILING_NONE)
2989 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2991 dri_bo_map(obj_surface->bo, 0);
2993 if (!obj_surface->bo->virtual)
2996 /* Both dest VA image and source surface have NV12 format */
2997 dst[0] = image_data + obj_image->image.offsets[0];
2998 src[0] = (uint8_t *)obj_surface->bo->virtual;
2999 dst[1] = image_data + obj_image->image.offsets[1];
3000 src[1] = src[0] + obj_surface->width * obj_surface->height;
3003 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
3004 src[0] += rect->y * obj_surface->width + rect->x;
3005 memcpy_pic(dst[0], obj_image->image.pitches[0],
3006 src[0], obj_surface->width,
3007 rect->width, rect->height);
3010 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
3011 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
3012 memcpy_pic(dst[1], obj_image->image.pitches[1],
3013 src[1], obj_surface->width,
3014 rect->width, rect->height / 2);
3016 if (tiling != I915_TILING_NONE)
3017 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3019 dri_bo_unmap(obj_surface->bo);
3023 get_image_yuy2(struct object_image *obj_image, uint8_t *image_data,
3024 struct object_surface *obj_surface,
3025 const VARectangle *rect)
3028 unsigned int tiling, swizzle;
3030 if (!obj_surface->bo)
3033 assert(obj_surface->fourcc);
3034 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3036 if (tiling != I915_TILING_NONE)
3037 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3039 dri_bo_map(obj_surface->bo, 0);
3041 if (!obj_surface->bo->virtual)
3044 /* Both dest VA image and source surface have YUYV format */
3045 dst = image_data + obj_image->image.offsets[0];
3046 src = (uint8_t *)obj_surface->bo->virtual;
3049 dst += rect->y * obj_image->image.pitches[0] + rect->x*2;
3050 src += rect->y * obj_surface->width + rect->x*2;
3051 memcpy_pic(dst, obj_image->image.pitches[0],
3052 src, obj_surface->width*2,
3053 rect->width*2, rect->height);
3055 if (tiling != I915_TILING_NONE)
3056 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3058 dri_bo_unmap(obj_surface->bo);
3062 i965_sw_getimage(VADriverContextP ctx,
3063 VASurfaceID surface,
3064 int x, /* coordinates of the upper left source pixel */
3066 unsigned int width, /* width and height of the region */
3067 unsigned int height,
3070 struct i965_driver_data *i965 = i965_driver_data(ctx);
3071 struct i965_render_state *render_state = &i965->render_state;
3073 struct object_surface *obj_surface = SURFACE(surface);
3075 return VA_STATUS_ERROR_INVALID_SURFACE;
3077 struct object_image *obj_image = IMAGE(image);
3079 return VA_STATUS_ERROR_INVALID_IMAGE;
3082 return VA_STATUS_ERROR_INVALID_PARAMETER;
3083 if (x + width > obj_surface->orig_width ||
3084 y + height > obj_surface->orig_height)
3085 return VA_STATUS_ERROR_INVALID_PARAMETER;
3086 if (x + width > obj_image->image.width ||
3087 y + height > obj_image->image.height)
3088 return VA_STATUS_ERROR_INVALID_PARAMETER;
3090 if (obj_surface->fourcc != obj_image->image.format.fourcc)
3091 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
3094 void *image_data = NULL;
3096 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
3097 if (va_status != VA_STATUS_SUCCESS)
3104 rect.height = height;
3106 switch (obj_image->image.format.fourcc) {
3107 case VA_FOURCC('Y','V','1','2'):
3108 case VA_FOURCC('I','4','2','0'):
3109 /* I420 is native format for MPEG-2 decoded surfaces */
3110 if (render_state->interleaved_uv)
3111 goto operation_failed;
3112 get_image_i420(obj_image, image_data, obj_surface, &rect);
3114 case VA_FOURCC('N','V','1','2'):
3115 /* NV12 is native format for H.264 decoded surfaces */
3116 if (!render_state->interleaved_uv)
3117 goto operation_failed;
3118 get_image_nv12(obj_image, image_data, obj_surface, &rect);
3120 case VA_FOURCC('Y','U','Y','2'):
3121 /* YUY2 is the format supported by overlay plane */
3122 get_image_yuy2(obj_image, image_data, obj_surface, &rect);
3126 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3130 i965_UnmapBuffer(ctx, obj_image->image.buf);
3135 i965_hw_getimage(VADriverContextP ctx,
3136 VASurfaceID surface,
3137 int x, /* coordinates of the upper left source pixel */
3139 unsigned int width, /* width and height of the region */
3140 unsigned int height,
3143 struct i965_driver_data *i965 = i965_driver_data(ctx);
3144 struct i965_surface src_surface;
3145 struct i965_surface dst_surface;
3148 struct object_surface *obj_surface = SURFACE(surface);
3149 struct object_image *obj_image = IMAGE(image);
3152 return VA_STATUS_ERROR_INVALID_SURFACE;
3155 return VA_STATUS_ERROR_INVALID_IMAGE;
3158 return VA_STATUS_ERROR_INVALID_PARAMETER;
3159 if (x + width > obj_surface->orig_width ||
3160 y + height > obj_surface->orig_height)
3161 return VA_STATUS_ERROR_INVALID_PARAMETER;
3162 if (x + width > obj_image->image.width ||
3163 y + height > obj_image->image.height)
3164 return VA_STATUS_ERROR_INVALID_PARAMETER;
3166 if (!obj_surface->bo)
3167 return VA_STATUS_SUCCESS;
3168 assert(obj_image->bo); // image bo is always created, see i965_CreateImage()
3173 rect.height = height;
3175 src_surface.base = (struct object_base *)obj_surface;
3176 src_surface.type = I965_SURFACE_TYPE_SURFACE;
3177 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3179 dst_surface.base = (struct object_base *)obj_image;
3180 dst_surface.type = I965_SURFACE_TYPE_IMAGE;
3181 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3183 va_status = i965_image_processing(ctx,
3194 i965_GetImage(VADriverContextP ctx,
3195 VASurfaceID surface,
3196 int x, /* coordinates of the upper left source pixel */
3198 unsigned int width, /* width and height of the region */
3199 unsigned int height,
3202 struct i965_driver_data * const i965 = i965_driver_data(ctx);
3205 if (HAS_ACCELERATED_GETIMAGE(i965))
3206 va_status = i965_hw_getimage(ctx,
3212 va_status = i965_sw_getimage(ctx,
3222 put_image_i420(struct object_surface *obj_surface,
3223 const VARectangle *dst_rect,
3224 struct object_image *obj_image, uint8_t *image_data,
3225 const VARectangle *src_rect)
3227 uint8_t *dst[3], *src[3];
3229 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
3230 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
3231 unsigned int tiling, swizzle;
3233 if (!obj_surface->bo)
3236 assert(obj_surface->fourcc);
3237 assert(dst_rect->width == src_rect->width);
3238 assert(dst_rect->height == src_rect->height);
3239 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3241 if (tiling != I915_TILING_NONE)
3242 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3244 dri_bo_map(obj_surface->bo, 0);
3246 if (!obj_surface->bo->virtual)
3249 /* Dest VA image has either I420 or YV12 format.
3250 Source VA surface alway has I420 format */
3251 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3252 src[Y] = image_data + obj_image->image.offsets[Y];
3253 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3254 src[U] = image_data + obj_image->image.offsets[U];
3255 dst[2] = dst[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
3256 src[V] = image_data + obj_image->image.offsets[V];
3259 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3260 src[Y] += src_rect->y * obj_image->image.pitches[Y] + src_rect->x;
3261 memcpy_pic(dst[0], obj_surface->width,
3262 src[Y], obj_image->image.pitches[Y],
3263 src_rect->width, src_rect->height);
3266 dst[1] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3267 src[U] += (src_rect->y / 2) * obj_image->image.pitches[U] + src_rect->x / 2;
3268 memcpy_pic(dst[1], obj_surface->width / 2,
3269 src[U], obj_image->image.pitches[U],
3270 src_rect->width / 2, src_rect->height / 2);
3273 dst[2] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3274 src[V] += (src_rect->y / 2) * obj_image->image.pitches[V] + src_rect->x / 2;
3275 memcpy_pic(dst[2], obj_surface->width / 2,
3276 src[V], obj_image->image.pitches[V],
3277 src_rect->width / 2, src_rect->height / 2);
3279 if (tiling != I915_TILING_NONE)
3280 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3282 dri_bo_unmap(obj_surface->bo);
3286 put_image_nv12(struct object_surface *obj_surface,
3287 const VARectangle *dst_rect,
3288 struct object_image *obj_image, uint8_t *image_data,
3289 const VARectangle *src_rect)
3291 uint8_t *dst[2], *src[2];
3292 unsigned int tiling, swizzle;
3294 if (!obj_surface->bo)
3297 assert(obj_surface->fourcc);
3298 assert(dst_rect->width == src_rect->width);
3299 assert(dst_rect->height == src_rect->height);
3300 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3302 if (tiling != I915_TILING_NONE)
3303 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3305 dri_bo_map(obj_surface->bo, 0);
3307 if (!obj_surface->bo->virtual)
3310 /* Both dest VA image and source surface have NV12 format */
3311 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3312 src[0] = image_data + obj_image->image.offsets[0];
3313 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3314 src[1] = image_data + obj_image->image.offsets[1];
3317 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3318 src[0] += src_rect->y * obj_image->image.pitches[0] + src_rect->x;
3319 memcpy_pic(dst[0], obj_surface->width,
3320 src[0], obj_image->image.pitches[0],
3321 src_rect->width, src_rect->height);
3324 dst[1] += (dst_rect->y / 2) * obj_surface->width + (dst_rect->x & -2);
3325 src[1] += (src_rect->y / 2) * obj_image->image.pitches[1] + (src_rect->x & -2);
3326 memcpy_pic(dst[1], obj_surface->width,
3327 src[1], obj_image->image.pitches[1],
3328 src_rect->width, src_rect->height / 2);
3330 if (tiling != I915_TILING_NONE)
3331 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3333 dri_bo_unmap(obj_surface->bo);
3337 put_image_yuy2(struct object_surface *obj_surface,
3338 const VARectangle *dst_rect,
3339 struct object_image *obj_image, uint8_t *image_data,
3340 const VARectangle *src_rect)
3343 unsigned int tiling, swizzle;
3345 if (!obj_surface->bo)
3348 assert(obj_surface->fourcc);
3349 assert(dst_rect->width == src_rect->width);
3350 assert(dst_rect->height == src_rect->height);
3351 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3353 if (tiling != I915_TILING_NONE)
3354 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3356 dri_bo_map(obj_surface->bo, 0);
3358 if (!obj_surface->bo->virtual)
3361 /* Both dest VA image and source surface have YUY2 format */
3362 dst = (uint8_t *)obj_surface->bo->virtual;
3363 src = image_data + obj_image->image.offsets[0];
3365 /* YUYV packed plane */
3366 dst += dst_rect->y * obj_surface->width + dst_rect->x*2;
3367 src += src_rect->y * obj_image->image.pitches[0] + src_rect->x*2;
3368 memcpy_pic(dst, obj_surface->width*2,
3369 src, obj_image->image.pitches[0],
3370 src_rect->width*2, src_rect->height);
3372 if (tiling != I915_TILING_NONE)
3373 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3375 dri_bo_unmap(obj_surface->bo);
3380 i965_sw_putimage(VADriverContextP ctx,
3381 VASurfaceID surface,
3385 unsigned int src_width,
3386 unsigned int src_height,
3389 unsigned int dest_width,
3390 unsigned int dest_height)
3392 struct i965_driver_data *i965 = i965_driver_data(ctx);
3393 struct object_surface *obj_surface = SURFACE(surface);
3396 return VA_STATUS_ERROR_INVALID_SURFACE;
3398 struct object_image *obj_image = IMAGE(image);
3400 return VA_STATUS_ERROR_INVALID_IMAGE;
3402 if (src_x < 0 || src_y < 0)
3403 return VA_STATUS_ERROR_INVALID_PARAMETER;
3404 if (src_x + src_width > obj_image->image.width ||
3405 src_y + src_height > obj_image->image.height)
3406 return VA_STATUS_ERROR_INVALID_PARAMETER;
3407 if (dest_x < 0 || dest_y < 0)
3408 return VA_STATUS_ERROR_INVALID_PARAMETER;
3409 if (dest_x + dest_width > obj_surface->orig_width ||
3410 dest_y + dest_height > obj_surface->orig_height)
3411 return VA_STATUS_ERROR_INVALID_PARAMETER;
3413 /* XXX: don't allow scaling */
3414 if (src_width != dest_width || src_height != dest_height)
3415 return VA_STATUS_ERROR_INVALID_PARAMETER;
3417 if (obj_surface->fourcc) {
3418 /* Don't allow format mismatch */
3419 if (obj_surface->fourcc != obj_image->image.format.fourcc)
3420 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
3424 /* VA is surface not used for decoding, use same VA image format */
3425 i965_check_alloc_surface_bo(
3428 0, /* XXX: don't use tiled surface */
3429 obj_image->image.format.fourcc,
3430 get_sampling_from_fourcc (obj_image->image.format.fourcc));
3434 void *image_data = NULL;
3436 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
3437 if (va_status != VA_STATUS_SUCCESS)
3440 VARectangle src_rect, dest_rect;
3443 src_rect.width = src_width;
3444 src_rect.height = src_height;
3445 dest_rect.x = dest_x;
3446 dest_rect.y = dest_y;
3447 dest_rect.width = dest_width;
3448 dest_rect.height = dest_height;
3450 switch (obj_image->image.format.fourcc) {
3451 case VA_FOURCC('Y','V','1','2'):
3452 case VA_FOURCC('I','4','2','0'):
3453 put_image_i420(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3455 case VA_FOURCC('N','V','1','2'):
3456 put_image_nv12(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3458 case VA_FOURCC('Y','U','Y','2'):
3459 put_image_yuy2(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3462 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3466 i965_UnmapBuffer(ctx, obj_image->image.buf);
3471 i965_hw_putimage(VADriverContextP ctx,
3472 VASurfaceID surface,
3476 unsigned int src_width,
3477 unsigned int src_height,
3480 unsigned int dest_width,
3481 unsigned int dest_height)
3483 struct i965_driver_data *i965 = i965_driver_data(ctx);
3484 struct object_surface *obj_surface = SURFACE(surface);
3485 struct object_image *obj_image = IMAGE(image);
3486 struct i965_surface src_surface, dst_surface;
3487 VAStatus va_status = VA_STATUS_SUCCESS;
3488 VARectangle src_rect, dst_rect;
3491 return VA_STATUS_ERROR_INVALID_SURFACE;
3493 if (!obj_image || !obj_image->bo)
3494 return VA_STATUS_ERROR_INVALID_IMAGE;
3498 src_x + src_width > obj_image->image.width ||
3499 src_y + src_height > obj_image->image.height)
3500 return VA_STATUS_ERROR_INVALID_PARAMETER;
3504 dest_x + dest_width > obj_surface->orig_width ||
3505 dest_y + dest_height > obj_surface->orig_height)
3506 return VA_STATUS_ERROR_INVALID_PARAMETER;
3508 if (!obj_surface->bo) {
3509 unsigned int tiling, swizzle;
3510 int surface_sampling = get_sampling_from_fourcc (obj_image->image.format.fourcc);;
3511 dri_bo_get_tiling(obj_image->bo, &tiling, &swizzle);
3513 i965_check_alloc_surface_bo(ctx,
3516 obj_image->image.format.fourcc,
3520 assert(obj_surface->fourcc);
3522 src_surface.base = (struct object_base *)obj_image;
3523 src_surface.type = I965_SURFACE_TYPE_IMAGE;
3524 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3527 src_rect.width = src_width;
3528 src_rect.height = src_height;
3530 dst_surface.base = (struct object_base *)obj_surface;
3531 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
3532 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3533 dst_rect.x = dest_x;
3534 dst_rect.y = dest_y;
3535 dst_rect.width = dest_width;
3536 dst_rect.height = dest_height;
3538 va_status = i965_image_processing(ctx,
3548 i965_PutImage(VADriverContextP ctx,
3549 VASurfaceID surface,
3553 unsigned int src_width,
3554 unsigned int src_height,
3557 unsigned int dest_width,
3558 unsigned int dest_height)
3560 struct i965_driver_data *i965 = i965_driver_data(ctx);
3561 VAStatus va_status = VA_STATUS_SUCCESS;
3563 if (HAS_ACCELERATED_PUTIMAGE(i965))
3564 va_status = i965_hw_putimage(ctx,
3576 va_status = i965_sw_putimage(ctx,
3592 i965_PutSurface(VADriverContextP ctx,
3593 VASurfaceID surface,
3594 void *draw, /* X Drawable */
3597 unsigned short srcw,
3598 unsigned short srch,
3601 unsigned short destw,
3602 unsigned short desth,
3603 VARectangle *cliprects, /* client supplied clip list */
3604 unsigned int number_cliprects, /* number of clip rects in the clip list */
3605 unsigned int flags) /* de-interlacing flags */
3608 if (IS_VA_X11(ctx)) {
3609 VARectangle src_rect, dst_rect;
3613 src_rect.width = srcw;
3614 src_rect.height = srch;
3618 dst_rect.width = destw;
3619 dst_rect.height = desth;
3621 return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
3622 cliprects, number_cliprects, flags);
3625 return VA_STATUS_ERROR_UNIMPLEMENTED;
3630 VADriverContextP ctx, /* in */
3631 VABufferID buf_id, /* in */
3632 VABufferType *type, /* out */
3633 unsigned int *size, /* out */
3634 unsigned int *num_elements /* out */
3637 struct i965_driver_data *i965 = NULL;
3638 struct object_buffer *obj_buffer = NULL;
3640 i965 = i965_driver_data(ctx);
3641 obj_buffer = BUFFER(buf_id);
3646 return VA_STATUS_ERROR_INVALID_BUFFER;
3648 *type = obj_buffer->type;
3649 *size = obj_buffer->size_element;
3650 *num_elements = obj_buffer->num_elements;
3652 return VA_STATUS_SUCCESS;
3657 VADriverContextP ctx, /* in */
3658 VASurfaceID surface, /* in */
3659 unsigned int *fourcc, /* out */
3660 unsigned int *luma_stride, /* out */
3661 unsigned int *chroma_u_stride, /* out */
3662 unsigned int *chroma_v_stride, /* out */
3663 unsigned int *luma_offset, /* out */
3664 unsigned int *chroma_u_offset, /* out */
3665 unsigned int *chroma_v_offset, /* out */
3666 unsigned int *buffer_name, /* out */
3667 void **buffer /* out */
3670 VAStatus vaStatus = VA_STATUS_SUCCESS;
3671 struct i965_driver_data *i965 = i965_driver_data(ctx);
3672 struct object_surface *obj_surface = NULL;
3676 assert(luma_stride);
3677 assert(chroma_u_stride);
3678 assert(chroma_v_stride);
3679 assert(luma_offset);
3680 assert(chroma_u_offset);
3681 assert(chroma_v_offset);
3682 assert(buffer_name);
3685 tmpImage.image_id = VA_INVALID_ID;
3687 obj_surface = SURFACE(surface);
3688 if (obj_surface == NULL) {
3689 // Surface is absent.
3690 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3694 // Lock functionality is absent now.
3695 if (obj_surface->locked_image_id != VA_INVALID_ID) {
3696 // Surface is locked already.
3697 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3701 vaStatus = i965_DeriveImage(
3705 if (vaStatus != VA_STATUS_SUCCESS) {
3709 obj_surface->locked_image_id = tmpImage.image_id;
3711 vaStatus = i965_MapBuffer(
3715 if (vaStatus != VA_STATUS_SUCCESS) {
3719 *fourcc = tmpImage.format.fourcc;
3720 *luma_offset = tmpImage.offsets[0];
3721 *luma_stride = tmpImage.pitches[0];
3722 *chroma_u_offset = tmpImage.offsets[1];
3723 *chroma_u_stride = tmpImage.pitches[1];
3724 *chroma_v_offset = tmpImage.offsets[2];
3725 *chroma_v_stride = tmpImage.pitches[2];
3726 *buffer_name = tmpImage.buf;
3729 if (vaStatus != VA_STATUS_SUCCESS) {
3738 VADriverContextP ctx, /* in */
3739 VASurfaceID surface /* in */
3742 VAStatus vaStatus = VA_STATUS_SUCCESS;
3743 struct i965_driver_data *i965 = i965_driver_data(ctx);
3744 struct object_image *locked_img = NULL;
3745 struct object_surface *obj_surface = NULL;
3747 obj_surface = SURFACE(surface);
3749 if (obj_surface == NULL) {
3750 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
3753 if (obj_surface->locked_image_id == VA_INVALID_ID) {
3754 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
3758 locked_img = IMAGE(obj_surface->locked_image_id);
3759 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
3760 // Work image was deallocated before i965_UnlockSurface()
3761 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3765 vaStatus = i965_UnmapBuffer(
3767 locked_img->image.buf);
3768 if (vaStatus != VA_STATUS_SUCCESS) {
3772 vaStatus = i965_DestroyImage(
3774 locked_img->image.image_id);
3775 if (vaStatus != VA_STATUS_SUCCESS) {
3779 locked_img->image.image_id = VA_INVALID_ID;
3782 obj_surface->locked_image_id = VA_INVALID_ID;
3788 i965_GetSurfaceAttributes(
3789 VADriverContextP ctx,
3791 VASurfaceAttrib *attrib_list,
3792 unsigned int num_attribs
3795 VAStatus vaStatus = VA_STATUS_SUCCESS;
3796 struct i965_driver_data *i965 = i965_driver_data(ctx);
3797 struct object_config *obj_config;
3800 if (config == VA_INVALID_ID)
3801 return VA_STATUS_ERROR_INVALID_CONFIG;
3803 obj_config = CONFIG(config);
3805 if (obj_config == NULL)
3806 return VA_STATUS_ERROR_INVALID_CONFIG;
3808 if (attrib_list == NULL || num_attribs == 0)
3809 return VA_STATUS_ERROR_INVALID_PARAMETER;
3811 for (i = 0; i < num_attribs; i++) {
3812 switch (attrib_list[i].type) {
3813 case VASurfaceAttribPixelFormat:
3814 attrib_list[i].value.type = VAGenericValueTypeInteger;
3815 attrib_list[i].flags = VA_SURFACE_ATTRIB_GETTABLE | VA_SURFACE_ATTRIB_SETTABLE;
3817 if (attrib_list[i].value.value.i == 0) {
3818 if (IS_G4X(i965->intel.device_id)) {
3819 if (obj_config->profile == VAProfileMPEG2Simple ||
3820 obj_config->profile == VAProfileMPEG2Main) {
3821 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3824 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3826 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3827 if (obj_config->profile == VAProfileMPEG2Simple ||
3828 obj_config->profile == VAProfileMPEG2Main) {
3829 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3830 } else if (obj_config->profile == VAProfileH264Baseline ||
3831 obj_config->profile == VAProfileH264Main ||
3832 obj_config->profile == VAProfileH264High) {
3833 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3834 } else if (obj_config->profile == VAProfileNone) {
3835 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3838 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3840 } else if (IS_GEN6(i965->intel.device_id)) {
3841 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3842 } else if (IS_GEN7(i965->intel.device_id)) {
3843 if (obj_config->profile == VAProfileJPEGBaseline)
3844 attrib_list[i].value.value.i = 0; /* internal format */
3846 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3849 if (IS_G4X(i965->intel.device_id)) {
3850 if (obj_config->profile == VAProfileMPEG2Simple ||
3851 obj_config->profile == VAProfileMPEG2Main) {
3852 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3853 attrib_list[i].value.value.i = 0;
3854 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3858 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3860 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3861 if (obj_config->profile == VAProfileMPEG2Simple ||
3862 obj_config->profile == VAProfileMPEG2Main) {
3863 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3864 attrib_list[i].value.value.i = 0;
3865 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3867 } else if (obj_config->profile == VAProfileH264Baseline ||
3868 obj_config->profile == VAProfileH264Main ||
3869 obj_config->profile == VAProfileH264High) {
3870 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3871 attrib_list[i].value.value.i = 0;
3872 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3874 } else if (obj_config->profile == VAProfileNone) {
3875 switch (attrib_list[i].value.value.i) {
3876 case VA_FOURCC('N', 'V', '1', '2'):
3877 case VA_FOURCC('I', '4', '2', '0'):
3878 case VA_FOURCC('Y', 'V', '1', '2'):
3879 case VA_FOURCC('Y', 'U', 'Y', '2'):
3880 case VA_FOURCC('B', 'G', 'R', 'A'):
3881 case VA_FOURCC('B', 'G', 'R', 'X'):
3882 case VA_FOURCC('R', 'G', 'B', 'X'):
3883 case VA_FOURCC('R', 'G', 'B', 'A'):
3886 attrib_list[i].value.value.i = 0;
3887 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3892 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3894 } else if (IS_GEN6(i965->intel.device_id)) {
3895 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3896 obj_config->entrypoint == VAEntrypointVideoProc) {
3897 switch (attrib_list[i].value.value.i) {
3898 case VA_FOURCC('N', 'V', '1', '2'):
3899 case VA_FOURCC('I', '4', '2', '0'):
3900 case VA_FOURCC('Y', 'V', '1', '2'):
3901 case VA_FOURCC('Y', 'U', 'Y', '2'):
3902 case VA_FOURCC('B', 'G', 'R', 'A'):
3903 case VA_FOURCC('B', 'G', 'R', 'X'):
3904 case VA_FOURCC('R', 'G', 'B', 'X'):
3905 case VA_FOURCC('R', 'G', 'B', 'A'):
3908 attrib_list[i].value.value.i = 0;
3909 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3913 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3914 attrib_list[i].value.value.i = 0;
3915 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3918 } else if (IS_GEN7(i965->intel.device_id)) {
3919 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3920 obj_config->entrypoint == VAEntrypointVideoProc) {
3921 switch (attrib_list[i].value.value.i) {
3922 case VA_FOURCC('N', 'V', '1', '2'):
3923 case VA_FOURCC('I', '4', '2', '0'):
3924 case VA_FOURCC('Y', 'V', '1', '2'):
3927 attrib_list[i].value.value.i = 0;
3928 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3932 if (obj_config->profile == VAProfileJPEGBaseline) {
3933 attrib_list[i].value.value.i = 0; /* JPEG decoding always uses an internal format */
3934 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3936 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3937 attrib_list[i].value.value.i = 0;
3938 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3946 case VASurfaceAttribMinWidth:
3947 /* FIXME: add support for it later */
3948 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3950 case VASurfaceAttribMaxWidth:
3951 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3953 case VASurfaceAttribMinHeight:
3954 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3956 case VASurfaceAttribMaxHeight:
3957 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3960 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3969 * Query video processing pipeline
3971 VAStatus i965_QueryVideoProcFilters(
3972 VADriverContextP ctx,
3973 VAContextID context,
3974 VAProcFilterType *filters,
3975 unsigned int *num_filters
3978 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3981 if (HAS_VPP(i965)) {
3982 filters[i++] = VAProcFilterNoiseReduction;
3983 filters[i++] = VAProcFilterDeinterlacing;
3986 if(IS_HASWELL(i965->intel.device_id)){
3987 filters[i++] = VAProcFilterNone;
3988 filters[i++] = VAProcFilterSharpening;
3989 filters[i++] = VAProcFilterColorBalance;
3990 filters[i++] = VAProcFilterColorStandard;
3995 return VA_STATUS_SUCCESS;
3998 VAStatus i965_QueryVideoProcFilterCaps(
3999 VADriverContextP ctx,
4000 VAContextID context,
4001 VAProcFilterType type,
4003 unsigned int *num_filter_caps
4006 struct i965_driver_data *const i965 = i965_driver_data(ctx);
4009 if (type == VAProcFilterNoiseReduction) {
4010 VAProcFilterCap *cap = filter_caps;
4012 cap->range.min_value = 0.0;
4013 cap->range.max_value = 1.0;
4014 cap->range.default_value = 0.5;
4015 cap->range.step = 0.03125; /* 1.0 / 32 */
4017 } else if (type == VAProcFilterDeinterlacing) {
4018 VAProcFilterCapDeinterlacing *cap = filter_caps;
4020 cap->type = VAProcDeinterlacingBob;
4025 if(IS_HASWELL(i965->intel.device_id)){
4026 if(type == VAProcFilterColorBalance){
4027 VAProcFilterCapColorBalance *cap = filter_caps;
4028 cap->type = VAProcColorBalanceHue;
4029 cap->range.min_value = -180.0;
4030 cap->range.max_value = 180.0;
4031 cap->range.default_value = 0.0;
4032 cap->range.step = 1.0;
4036 cap->type = VAProcColorBalanceSaturation;
4037 cap->range.min_value = 0.0;
4038 cap->range.max_value = 10.0;
4039 cap->range.default_value = 0.0;
4040 cap->range.step = 0.1;
4044 cap->type = VAProcColorBalanceBrightness;
4045 cap->range.min_value = -100.0;
4046 cap->range.max_value = 100.0;
4047 cap->range.default_value = 0.0;
4048 cap->range.step = 1.0;
4052 cap->type = VAProcColorBalanceContrast;
4053 cap->range.min_value = 0.0;
4054 cap->range.max_value = 10.0;
4055 cap->range.default_value = 0.0;
4056 cap->range.step = 0.1;
4063 *num_filter_caps = i;
4065 return VA_STATUS_SUCCESS;
4068 static VAProcColorStandardType vpp_input_color_standards[VAProcColorStandardCount] = {
4069 VAProcColorStandardBT601,
4072 static VAProcColorStandardType vpp_output_color_standards[VAProcColorStandardCount] = {
4073 VAProcColorStandardBT601,
4076 VAStatus i965_QueryVideoProcPipelineCaps(
4077 VADriverContextP ctx,
4078 VAContextID context,
4079 VABufferID *filters,
4080 unsigned int num_filters,
4081 VAProcPipelineCaps *pipeline_cap /* out */
4084 struct i965_driver_data * const i965 = i965_driver_data(ctx);
4087 pipeline_cap->pipeline_flags = 0;
4088 pipeline_cap->filter_flags = 0;
4089 pipeline_cap->num_forward_references = 0;
4090 pipeline_cap->num_backward_references = 0;
4091 pipeline_cap->num_input_color_standards = 1;
4092 pipeline_cap->input_color_standards = vpp_input_color_standards;
4093 pipeline_cap->num_output_color_standards = 1;
4094 pipeline_cap->output_color_standards = vpp_output_color_standards;
4096 for (i = 0; i < num_filters; i++) {
4097 struct object_buffer *obj_buffer = BUFFER(filters[i]);
4100 !obj_buffer->buffer_store ||
4101 !obj_buffer->buffer_store->bo)
4102 return VA_STATUS_ERROR_INVALID_BUFFER;
4104 VAProcFilterParameterBufferBase *base = (VAProcFilterParameterBufferBase *)obj_buffer->buffer_store->buffer;
4106 if (base->type == VAProcFilterNoiseReduction) {
4107 VAProcFilterParameterBuffer *denoise = (VAProcFilterParameterBuffer *)base;
4109 } else if (base->type == VAProcFilterDeinterlacing) {
4110 VAProcFilterParameterBufferDeinterlacing *deint = (VAProcFilterParameterBufferDeinterlacing *)base;
4112 assert(deint->algorithm == VAProcDeinterlacingWeave ||
4113 deint->algorithm == VAProcDeinterlacingBob);
4117 return VA_STATUS_SUCCESS;
4121 i965_driver_data_init(VADriverContextP ctx)
4123 struct i965_driver_data *i965 = i965_driver_data(ctx);
4125 if (IS_HASWELL(i965->intel.device_id))
4126 i965->codec_info = &gen75_hw_codec_info;
4127 else if (IS_G4X(i965->intel.device_id))
4128 i965->codec_info = &g4x_hw_codec_info;
4129 else if (IS_IRONLAKE(i965->intel.device_id))
4130 i965->codec_info = &ironlake_hw_codec_info;
4131 else if (IS_GEN6(i965->intel.device_id))
4132 i965->codec_info = &gen6_hw_codec_info;
4133 else if (IS_GEN7(i965->intel.device_id))
4134 i965->codec_info = &gen7_hw_codec_info;
4138 if (object_heap_init(&i965->config_heap,
4139 sizeof(struct object_config),
4141 goto err_config_heap;
4142 if (object_heap_init(&i965->context_heap,
4143 sizeof(struct object_context),
4145 goto err_context_heap;
4147 if (object_heap_init(&i965->surface_heap,
4148 sizeof(struct object_surface),
4150 goto err_surface_heap;
4151 if (object_heap_init(&i965->buffer_heap,
4152 sizeof(struct object_buffer),
4154 goto err_buffer_heap;
4155 if (object_heap_init(&i965->image_heap,
4156 sizeof(struct object_image),
4158 goto err_image_heap;
4159 if (object_heap_init(&i965->subpic_heap,
4160 sizeof(struct object_subpic),
4162 goto err_subpic_heap;
4164 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER, 0);
4165 _i965InitMutex(&i965->render_mutex);
4166 _i965InitMutex(&i965->pp_mutex);
4171 object_heap_destroy(&i965->image_heap);
4173 object_heap_destroy(&i965->buffer_heap);
4175 object_heap_destroy(&i965->surface_heap);
4177 object_heap_destroy(&i965->context_heap);
4179 object_heap_destroy(&i965->config_heap);
4186 i965_driver_data_terminate(VADriverContextP ctx)
4188 struct i965_driver_data *i965 = i965_driver_data(ctx);
4190 _i965DestroyMutex(&i965->pp_mutex);
4191 _i965DestroyMutex(&i965->render_mutex);
4194 intel_batchbuffer_free(i965->batch);
4196 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
4197 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
4198 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
4199 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
4200 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
4201 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
4205 bool (*init)(VADriverContextP ctx);
4206 void (*terminate)(VADriverContextP ctx);
4208 } i965_sub_ops[] = {
4211 intel_driver_terminate,
4216 i965_driver_data_init,
4217 i965_driver_data_terminate,
4222 i965_display_attributes_init,
4223 i965_display_attributes_terminate,
4228 i965_post_processing_init,
4229 i965_post_processing_terminate,
4235 i965_render_terminate,
4239 #ifdef HAVE_VA_WAYLAND
4241 i965_output_wayland_init,
4242 i965_output_wayland_terminate,
4249 i965_output_dri_init,
4250 i965_output_dri_terminate,
4257 i965_Init(VADriverContextP ctx)
4259 struct i965_driver_data *i965 = i965_driver_data(ctx);
4262 for (i = 0; i < ARRAY_ELEMS(i965_sub_ops); i++) {
4263 if ((i965_sub_ops[i].display_type == 0 ||
4264 i965_sub_ops[i].display_type == (ctx->display_type & VA_DISPLAY_MAJOR_MASK)) &&
4265 !i965_sub_ops[i].init(ctx))
4269 if (i == ARRAY_ELEMS(i965_sub_ops)) {
4270 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
4271 INTEL_STR_DRIVER_VENDOR,
4272 INTEL_STR_DRIVER_NAME,
4273 INTEL_DRIVER_MAJOR_VERSION,
4274 INTEL_DRIVER_MINOR_VERSION,
4275 INTEL_DRIVER_MICRO_VERSION);
4277 if (INTEL_DRIVER_PRE_VERSION > 0) {
4278 const int len = strlen(i965->va_vendor);
4279 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
4282 i965->current_context_id = VA_INVALID_ID;
4284 return VA_STATUS_SUCCESS;
4288 for (; i >= 0; i--) {
4289 if (i965_sub_ops[i].display_type == 0 ||
4290 i965_sub_ops[i].display_type == (ctx->display_type & VA_DISPLAY_MAJOR_MASK)) {
4291 i965_sub_ops[i].terminate(ctx);
4295 return VA_STATUS_ERROR_UNKNOWN;
4300 i965_Terminate(VADriverContextP ctx)
4302 struct i965_driver_data *i965 = i965_driver_data(ctx);
4306 for (i = ARRAY_ELEMS(i965_sub_ops); i > 0; i--)
4307 if (i965_sub_ops[i - 1].display_type == 0 ||
4308 i965_sub_ops[i - 1].display_type == (ctx->display_type & VA_DISPLAY_MAJOR_MASK)) {
4309 i965_sub_ops[i - 1].terminate(ctx);
4313 return VA_STATUS_SUCCESS;
4317 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
4320 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
4322 struct VADriverVTable * const vtable = ctx->vtable;
4323 struct VADriverVTableVPP * const vtable_vpp = ctx->vtable_vpp;
4325 struct i965_driver_data *i965;
4326 VAStatus ret = VA_STATUS_ERROR_UNKNOWN;
4328 ctx->version_major = VA_MAJOR_VERSION;
4329 ctx->version_minor = VA_MINOR_VERSION;
4330 ctx->max_profiles = I965_MAX_PROFILES;
4331 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
4332 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
4333 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
4334 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
4335 ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
4337 vtable->vaTerminate = i965_Terminate;
4338 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4339 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
4340 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4341 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
4342 vtable->vaCreateConfig = i965_CreateConfig;
4343 vtable->vaDestroyConfig = i965_DestroyConfig;
4344 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
4345 vtable->vaCreateSurfaces = i965_CreateSurfaces;
4346 vtable->vaDestroySurfaces = i965_DestroySurfaces;
4347 vtable->vaCreateContext = i965_CreateContext;
4348 vtable->vaDestroyContext = i965_DestroyContext;
4349 vtable->vaCreateBuffer = i965_CreateBuffer;
4350 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
4351 vtable->vaMapBuffer = i965_MapBuffer;
4352 vtable->vaUnmapBuffer = i965_UnmapBuffer;
4353 vtable->vaDestroyBuffer = i965_DestroyBuffer;
4354 vtable->vaBeginPicture = i965_BeginPicture;
4355 vtable->vaRenderPicture = i965_RenderPicture;
4356 vtable->vaEndPicture = i965_EndPicture;
4357 vtable->vaSyncSurface = i965_SyncSurface;
4358 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
4359 vtable->vaPutSurface = i965_PutSurface;
4360 vtable->vaQueryImageFormats = i965_QueryImageFormats;
4361 vtable->vaCreateImage = i965_CreateImage;
4362 vtable->vaDeriveImage = i965_DeriveImage;
4363 vtable->vaDestroyImage = i965_DestroyImage;
4364 vtable->vaSetImagePalette = i965_SetImagePalette;
4365 vtable->vaGetImage = i965_GetImage;
4366 vtable->vaPutImage = i965_PutImage;
4367 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
4368 vtable->vaCreateSubpicture = i965_CreateSubpicture;
4369 vtable->vaDestroySubpicture = i965_DestroySubpicture;
4370 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
4371 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
4372 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
4373 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
4374 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
4375 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
4376 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
4377 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
4378 vtable->vaBufferInfo = i965_BufferInfo;
4379 vtable->vaLockSurface = i965_LockSurface;
4380 vtable->vaUnlockSurface = i965_UnlockSurface;
4381 vtable->vaGetSurfaceAttributes = i965_GetSurfaceAttributes;
4382 vtable->vaCreateSurfaces2 = i965_CreateSurfaces2;
4384 vtable_vpp->vaQueryVideoProcFilters = i965_QueryVideoProcFilters;
4385 vtable_vpp->vaQueryVideoProcFilterCaps = i965_QueryVideoProcFilterCaps;
4386 vtable_vpp->vaQueryVideoProcPipelineCaps = i965_QueryVideoProcPipelineCaps;
4388 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
4391 ctx->pDriverData = NULL;
4393 return VA_STATUS_ERROR_ALLOCATION_FAILED;
4396 ctx->pDriverData = (void *)i965;
4397 ret = i965_Init(ctx);
4399 if (ret == VA_STATUS_SUCCESS) {
4400 ctx->str_vendor = i965->va_vendor;
4403 ctx->pDriverData = NULL;