2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
33 # include "i965_output_dri.h"
36 #ifdef HAVE_VA_WAYLAND
37 # include "i965_output_wayland.h"
40 #include "intel_driver.h"
41 #include "intel_memman.h"
42 #include "intel_batchbuffer.h"
43 #include "i965_defines.h"
44 #include "i965_drv_video.h"
45 #include "i965_decoder.h"
46 #include "i965_encoder.h"
48 #define CONFIG_ID_OFFSET 0x01000000
49 #define CONTEXT_ID_OFFSET 0x02000000
50 #define SURFACE_ID_OFFSET 0x04000000
51 #define BUFFER_ID_OFFSET 0x08000000
52 #define IMAGE_ID_OFFSET 0x0a000000
53 #define SUBPIC_ID_OFFSET 0x10000000
55 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
56 IS_IRONLAKE((ctx)->intel.device_id) || \
57 ((IS_GEN6((ctx)->intel.device_id) || \
58 IS_GEN7((ctx)->intel.device_id)) && \
59 (ctx)->intel.has_bsd))
61 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
62 IS_GEN6((ctx)->intel.device_id) || \
63 IS_IRONLAKE((ctx)->intel.device_id)) && \
66 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
67 IS_GEN6((ctx)->intel.device_id)) && \
70 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
71 IS_GEN6((ctx)->intel.device_id)) && \
72 (ctx)->render_state.interleaved_uv)
74 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
75 IS_GEN6((ctx)->intel.device_id)) && \
78 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
81 #define HAS_ACCELERATED_GETIMAGE(ctx) (IS_GEN6((ctx)->intel.device_id) || \
82 IS_GEN7((ctx)->intel.device_id))
84 #define HAS_ACCELERATED_PUTIMAGE(ctx) HAS_VPP(ctx)
85 static int get_sampling_from_fourcc(unsigned int fourcc);
87 #if VA_CHECK_VERSION(0,33,0)
88 /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
89 #define IS_VA_X11(ctx) \
90 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
92 /* Check whether we are rendering to Wayland */
93 #define IS_VA_WAYLAND(ctx) \
94 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
96 /* Previous VA-API versions only supported VA/X11 (and VA/GLX) API */
97 #define IS_VA_X11(ctx) 1
98 #define IS_VA_WAYLAND(ctx) 0
102 I965_SURFACETYPE_RGBA = 1,
103 I965_SURFACETYPE_YUV,
104 I965_SURFACETYPE_INDEXED
107 /* List of supported display attributes */
108 static const VADisplayAttribute i965_display_attributes[] = {
110 VADisplayAttribRotation,
111 0, 3, VA_ROTATION_NONE,
112 VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
116 /* List of supported image formats */
119 VAImageFormat va_format;
120 } i965_image_format_map_t;
122 static const i965_image_format_map_t
123 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
124 { I965_SURFACETYPE_YUV,
125 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
126 { I965_SURFACETYPE_YUV,
127 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
128 { I965_SURFACETYPE_YUV,
129 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
132 /* List of supported subpicture formats */
136 VAImageFormat va_format;
137 unsigned int va_flags;
138 } i965_subpic_format_map_t;
140 static const i965_subpic_format_map_t
141 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
142 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
143 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
144 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
145 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
146 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
147 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
148 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
149 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
150 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
151 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
152 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
153 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
154 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
155 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
158 static const i965_subpic_format_map_t *
159 get_subpic_format(const VAImageFormat *va_format)
162 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
163 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
164 if (m->va_format.fourcc == va_format->fourcc &&
165 (m->type == I965_SURFACETYPE_RGBA ?
166 (m->va_format.byte_order == va_format->byte_order &&
167 m->va_format.red_mask == va_format->red_mask &&
168 m->va_format.green_mask == va_format->green_mask &&
169 m->va_format.blue_mask == va_format->blue_mask &&
170 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
176 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
177 static struct hw_codec_info g4x_hw_codec_info = {
178 .dec_hw_context_init = g4x_dec_hw_context_init,
179 .enc_hw_context_init = NULL,
184 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
185 static struct hw_codec_info ironlake_hw_codec_info = {
186 .dec_hw_context_init = ironlake_dec_hw_context_init,
187 .enc_hw_context_init = NULL,
192 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
193 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
194 static struct hw_codec_info gen6_hw_codec_info = {
195 .dec_hw_context_init = gen6_dec_hw_context_init,
196 .enc_hw_context_init = gen6_enc_hw_context_init,
201 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
202 static struct hw_codec_info gen7_hw_codec_info = {
203 .dec_hw_context_init = gen7_dec_hw_context_init,
204 .enc_hw_context_init = gen6_enc_hw_context_init,
209 static struct hw_codec_info gen75_hw_codec_info = {
210 .dec_hw_context_init = gen75_dec_hw_context_init,
211 .enc_hw_context_init = gen75_enc_hw_context_init,
217 i965_QueryConfigProfiles(VADriverContextP ctx,
218 VAProfile *profile_list, /* out */
219 int *num_profiles) /* out */
221 struct i965_driver_data * const i965 = i965_driver_data(ctx);
224 if (HAS_MPEG2(i965)) {
225 profile_list[i++] = VAProfileMPEG2Simple;
226 profile_list[i++] = VAProfileMPEG2Main;
229 if (HAS_H264(i965)) {
230 profile_list[i++] = VAProfileH264Baseline;
231 profile_list[i++] = VAProfileH264Main;
232 profile_list[i++] = VAProfileH264High;
236 profile_list[i++] = VAProfileVC1Simple;
237 profile_list[i++] = VAProfileVC1Main;
238 profile_list[i++] = VAProfileVC1Advanced;
241 #ifdef HAVE_VA_JPEG_DECODE
242 if (HAS_JPEG(i965)) {
243 profile_list[i++] = VAProfileJPEGBaseline;
247 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
248 assert(i <= I965_MAX_PROFILES);
251 return VA_STATUS_SUCCESS;
255 i965_QueryConfigEntrypoints(VADriverContextP ctx,
257 VAEntrypoint *entrypoint_list, /* out */
258 int *num_entrypoints) /* out */
260 struct i965_driver_data * const i965 = i965_driver_data(ctx);
264 case VAProfileMPEG2Simple:
265 case VAProfileMPEG2Main:
267 entrypoint_list[n++] = VAEntrypointVLD;
270 case VAProfileH264Baseline:
271 case VAProfileH264Main:
272 case VAProfileH264High:
274 entrypoint_list[n++] = VAEntrypointVLD;
276 if (HAS_ENCODER(i965))
277 entrypoint_list[n++] = VAEntrypointEncSlice;
281 case VAProfileVC1Simple:
282 case VAProfileVC1Main:
283 case VAProfileVC1Advanced:
285 entrypoint_list[n++] = VAEntrypointVLD;
288 case VAProfileJPEGBaseline:
290 entrypoint_list[n++] = VAEntrypointVLD;
297 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
298 assert(n <= I965_MAX_ENTRYPOINTS);
299 *num_entrypoints = n;
300 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
304 i965_GetConfigAttributes(VADriverContextP ctx,
306 VAEntrypoint entrypoint,
307 VAConfigAttrib *attrib_list, /* in/out */
312 /* Other attributes don't seem to be defined */
313 /* What to do if we don't know the attribute? */
314 for (i = 0; i < num_attribs; i++) {
315 switch (attrib_list[i].type) {
316 case VAConfigAttribRTFormat:
317 attrib_list[i].value = VA_RT_FORMAT_YUV420;
320 case VAConfigAttribRateControl:
321 attrib_list[i].value = VA_RC_VBR;
326 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
331 return VA_STATUS_SUCCESS;
335 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
337 object_heap_free(heap, obj);
341 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
345 /* Check existing attrbiutes */
346 for (i = 0; obj_config->num_attribs < i; i++) {
347 if (obj_config->attrib_list[i].type == attrib->type) {
348 /* Update existing attribute */
349 obj_config->attrib_list[i].value = attrib->value;
350 return VA_STATUS_SUCCESS;
354 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
355 i = obj_config->num_attribs;
356 obj_config->attrib_list[i].type = attrib->type;
357 obj_config->attrib_list[i].value = attrib->value;
358 obj_config->num_attribs++;
359 return VA_STATUS_SUCCESS;
362 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
366 i965_CreateConfig(VADriverContextP ctx,
368 VAEntrypoint entrypoint,
369 VAConfigAttrib *attrib_list,
371 VAConfigID *config_id) /* out */
373 struct i965_driver_data * const i965 = i965_driver_data(ctx);
374 struct object_config *obj_config;
379 /* Validate profile & entrypoint */
381 case VAProfileMPEG2Simple:
382 case VAProfileMPEG2Main:
383 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
384 vaStatus = VA_STATUS_SUCCESS;
386 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
390 case VAProfileH264Baseline:
391 case VAProfileH264Main:
392 case VAProfileH264High:
393 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
394 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
395 vaStatus = VA_STATUS_SUCCESS;
397 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
402 case VAProfileVC1Simple:
403 case VAProfileVC1Main:
404 case VAProfileVC1Advanced:
405 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
406 vaStatus = VA_STATUS_SUCCESS;
408 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
413 case VAProfileJPEGBaseline:
414 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
415 vaStatus = VA_STATUS_SUCCESS;
417 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
423 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
427 if (VA_STATUS_SUCCESS != vaStatus) {
431 configID = NEW_CONFIG_ID();
432 obj_config = CONFIG(configID);
434 if (NULL == obj_config) {
435 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
439 obj_config->profile = profile;
440 obj_config->entrypoint = entrypoint;
441 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
442 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
443 obj_config->num_attribs = 1;
445 for(i = 0; i < num_attribs; i++) {
446 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
448 if (VA_STATUS_SUCCESS != vaStatus) {
454 if (VA_STATUS_SUCCESS != vaStatus) {
455 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
457 *config_id = configID;
464 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
466 struct i965_driver_data *i965 = i965_driver_data(ctx);
467 struct object_config *obj_config = CONFIG(config_id);
470 if (NULL == obj_config) {
471 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
475 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
476 return VA_STATUS_SUCCESS;
479 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
480 VAConfigID config_id,
481 VAProfile *profile, /* out */
482 VAEntrypoint *entrypoint, /* out */
483 VAConfigAttrib *attrib_list, /* out */
484 int *num_attribs) /* out */
486 struct i965_driver_data *i965 = i965_driver_data(ctx);
487 struct object_config *obj_config = CONFIG(config_id);
488 VAStatus vaStatus = VA_STATUS_SUCCESS;
492 *profile = obj_config->profile;
493 *entrypoint = obj_config->entrypoint;
494 *num_attribs = obj_config->num_attribs;
496 for(i = 0; i < obj_config->num_attribs; i++) {
497 attrib_list[i] = obj_config->attrib_list[i];
504 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
506 struct object_surface *obj_surface = (struct object_surface *)obj;
508 dri_bo_unreference(obj_surface->bo);
509 obj_surface->bo = NULL;
511 if (obj_surface->free_private_data != NULL) {
512 obj_surface->free_private_data(&obj_surface->private_data);
513 obj_surface->private_data = NULL;
516 object_heap_free(heap, obj);
520 i965_CreateSurfaces(VADriverContextP ctx,
525 VASurfaceID *surfaces) /* out */
527 struct i965_driver_data *i965 = i965_driver_data(ctx);
529 VAStatus vaStatus = VA_STATUS_SUCCESS;
531 /* We only support one format */
532 if (VA_RT_FORMAT_YUV420 != format) {
533 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
536 for (i = 0; i < num_surfaces; i++) {
537 int surfaceID = NEW_SURFACE_ID();
538 struct object_surface *obj_surface = SURFACE(surfaceID);
540 if (NULL == obj_surface) {
541 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
545 surfaces[i] = surfaceID;
546 obj_surface->status = VASurfaceReady;
547 obj_surface->subpic = VA_INVALID_ID;
548 obj_surface->orig_width = width;
549 obj_surface->orig_height = height;
551 if (IS_G4X(i965->intel.device_id) || IS_IRONLAKE(i965->intel.device_id)) {
552 obj_surface->width = ALIGN(width, 16);
553 obj_surface->height = ALIGN(height, 16);
555 obj_surface->width = ALIGN(width, 128);
556 obj_surface->height = ALIGN(height, 32);
559 obj_surface->flags = SURFACE_REFERENCED;
560 obj_surface->fourcc = 0;
561 obj_surface->bo = NULL;
562 obj_surface->locked_image_id = VA_INVALID_ID;
563 obj_surface->private_data = NULL;
564 obj_surface->free_private_data = NULL;
565 obj_surface->subsampling = SUBSAMPLE_YUV420;
569 if (VA_STATUS_SUCCESS != vaStatus) {
570 /* surfaces[i-1] was the last successful allocation */
572 struct object_surface *obj_surface = SURFACE(surfaces[i]);
574 surfaces[i] = VA_INVALID_SURFACE;
576 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
584 i965_DestroySurfaces(VADriverContextP ctx,
585 VASurfaceID *surface_list,
588 struct i965_driver_data *i965 = i965_driver_data(ctx);
591 for (i = num_surfaces; i--; ) {
592 struct object_surface *obj_surface = SURFACE(surface_list[i]);
595 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
598 return VA_STATUS_SUCCESS;
602 i965_QueryImageFormats(VADriverContextP ctx,
603 VAImageFormat *format_list, /* out */
604 int *num_formats) /* out */
608 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
609 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
611 format_list[n] = m->va_format;
617 return VA_STATUS_SUCCESS;
621 i965_PutImage(VADriverContextP ctx,
626 unsigned int src_width,
627 unsigned int src_height,
630 unsigned int dest_width,
631 unsigned int dest_height)
633 return VA_STATUS_SUCCESS;
637 i965_QuerySubpictureFormats(VADriverContextP ctx,
638 VAImageFormat *format_list, /* out */
639 unsigned int *flags, /* out */
640 unsigned int *num_formats) /* out */
644 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
645 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
647 format_list[n] = m->va_format;
649 flags[n] = m->va_flags;
655 return VA_STATUS_SUCCESS;
659 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
661 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
663 object_heap_free(heap, obj);
667 i965_CreateSubpicture(VADriverContextP ctx,
669 VASubpictureID *subpicture) /* out */
671 struct i965_driver_data *i965 = i965_driver_data(ctx);
672 VASubpictureID subpicID = NEW_SUBPIC_ID()
673 struct object_subpic *obj_subpic = SUBPIC(subpicID);
676 return VA_STATUS_ERROR_ALLOCATION_FAILED;
678 struct object_image *obj_image = IMAGE(image);
680 return VA_STATUS_ERROR_INVALID_IMAGE;
682 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
684 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
686 *subpicture = subpicID;
687 obj_subpic->image = image;
688 obj_subpic->format = m->format;
689 obj_subpic->width = obj_image->image.width;
690 obj_subpic->height = obj_image->image.height;
691 obj_subpic->pitch = obj_image->image.pitches[0];
692 obj_subpic->bo = obj_image->bo;
693 return VA_STATUS_SUCCESS;
697 i965_DestroySubpicture(VADriverContextP ctx,
698 VASubpictureID subpicture)
700 struct i965_driver_data *i965 = i965_driver_data(ctx);
701 struct object_subpic *obj_subpic = SUBPIC(subpicture);
702 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
703 return VA_STATUS_SUCCESS;
707 i965_SetSubpictureImage(VADriverContextP ctx,
708 VASubpictureID subpicture,
712 return VA_STATUS_ERROR_UNIMPLEMENTED;
716 i965_SetSubpictureChromakey(VADriverContextP ctx,
717 VASubpictureID subpicture,
718 unsigned int chromakey_min,
719 unsigned int chromakey_max,
720 unsigned int chromakey_mask)
723 return VA_STATUS_ERROR_UNIMPLEMENTED;
727 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
728 VASubpictureID subpicture,
732 return VA_STATUS_ERROR_UNIMPLEMENTED;
736 i965_AssociateSubpicture(VADriverContextP ctx,
737 VASubpictureID subpicture,
738 VASurfaceID *target_surfaces,
740 short src_x, /* upper left offset in subpicture */
742 unsigned short src_width,
743 unsigned short src_height,
744 short dest_x, /* upper left offset in surface */
746 unsigned short dest_width,
747 unsigned short dest_height,
749 * whether to enable chroma-keying or global-alpha
750 * see VA_SUBPICTURE_XXX values
754 struct i965_driver_data *i965 = i965_driver_data(ctx);
755 struct object_subpic *obj_subpic = SUBPIC(subpicture);
758 obj_subpic->src_rect.x = src_x;
759 obj_subpic->src_rect.y = src_y;
760 obj_subpic->src_rect.width = src_width;
761 obj_subpic->src_rect.height = src_height;
762 obj_subpic->dst_rect.x = dest_x;
763 obj_subpic->dst_rect.y = dest_y;
764 obj_subpic->dst_rect.width = dest_width;
765 obj_subpic->dst_rect.height = dest_height;
766 obj_subpic->flags = flags;
768 for (i = 0; i < num_surfaces; i++) {
769 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
771 return VA_STATUS_ERROR_INVALID_SURFACE;
772 obj_surface->subpic = subpicture;
774 return VA_STATUS_SUCCESS;
779 i965_DeassociateSubpicture(VADriverContextP ctx,
780 VASubpictureID subpicture,
781 VASurfaceID *target_surfaces,
784 struct i965_driver_data *i965 = i965_driver_data(ctx);
787 for (i = 0; i < num_surfaces; i++) {
788 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
790 return VA_STATUS_ERROR_INVALID_SURFACE;
791 if (obj_surface->subpic == subpicture)
792 obj_surface->subpic = VA_INVALID_ID;
794 return VA_STATUS_SUCCESS;
798 i965_reference_buffer_store(struct buffer_store **ptr,
799 struct buffer_store *buffer_store)
801 assert(*ptr == NULL);
804 buffer_store->ref_count++;
810 i965_release_buffer_store(struct buffer_store **ptr)
812 struct buffer_store *buffer_store = *ptr;
814 if (buffer_store == NULL)
817 assert(buffer_store->bo || buffer_store->buffer);
818 assert(!(buffer_store->bo && buffer_store->buffer));
819 buffer_store->ref_count--;
821 if (buffer_store->ref_count == 0) {
822 dri_bo_unreference(buffer_store->bo);
823 free(buffer_store->buffer);
824 buffer_store->bo = NULL;
825 buffer_store->buffer = NULL;
833 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
835 struct object_context *obj_context = (struct object_context *)obj;
838 if (obj_context->hw_context) {
839 obj_context->hw_context->destroy(obj_context->hw_context);
840 obj_context->hw_context = NULL;
843 if (obj_context->codec_type == CODEC_ENC) {
844 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
845 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
846 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
848 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
849 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
851 free(obj_context->codec_state.encode.slice_params);
853 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
854 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
856 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
857 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
858 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
860 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
861 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
863 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
864 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
866 free(obj_context->codec_state.decode.slice_params);
867 free(obj_context->codec_state.decode.slice_datas);
870 free(obj_context->render_targets);
871 object_heap_free(heap, obj);
875 i965_CreateContext(VADriverContextP ctx,
876 VAConfigID config_id,
880 VASurfaceID *render_targets,
881 int num_render_targets,
882 VAContextID *context) /* out */
884 struct i965_driver_data *i965 = i965_driver_data(ctx);
885 struct i965_render_state *render_state = &i965->render_state;
886 struct object_config *obj_config = CONFIG(config_id);
887 struct object_context *obj_context = NULL;
888 VAStatus vaStatus = VA_STATUS_SUCCESS;
892 if (NULL == obj_config) {
893 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
897 if (picture_width > i965->codec_info->max_width ||
898 picture_height > i965->codec_info->max_height) {
899 vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
904 /* Validate picture dimensions */
905 contextID = NEW_CONTEXT_ID();
906 obj_context = CONTEXT(contextID);
908 if (NULL == obj_context) {
909 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
913 render_state->inited = 1;
915 switch (obj_config->profile) {
916 case VAProfileH264Baseline:
917 case VAProfileH264Main:
918 case VAProfileH264High:
920 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
921 render_state->interleaved_uv = 1;
924 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
928 *context = contextID;
929 obj_context->flags = flag;
930 obj_context->context_id = contextID;
931 obj_context->config_id = config_id;
932 obj_context->picture_width = picture_width;
933 obj_context->picture_height = picture_height;
934 obj_context->num_render_targets = num_render_targets;
935 obj_context->render_targets =
936 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
937 obj_context->hw_context = NULL;
939 for(i = 0; i < num_render_targets; i++) {
940 if (NULL == SURFACE(render_targets[i])) {
941 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
945 obj_context->render_targets[i] = render_targets[i];
948 if (VA_STATUS_SUCCESS == vaStatus) {
949 if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
950 obj_context->codec_type = CODEC_ENC;
951 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
952 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
953 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
954 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
955 sizeof(*obj_context->codec_state.encode.slice_params));
956 assert(i965->codec_info->enc_hw_context_init);
957 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
959 obj_context->codec_type = CODEC_DEC;
960 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
961 obj_context->codec_state.decode.current_render_target = -1;
962 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
963 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
964 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
965 sizeof(*obj_context->codec_state.decode.slice_params));
966 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
967 sizeof(*obj_context->codec_state.decode.slice_datas));
969 assert(i965->codec_info->dec_hw_context_init);
970 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
975 if (VA_STATUS_SUCCESS != vaStatus) {
976 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
983 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
985 struct i965_driver_data *i965 = i965_driver_data(ctx);
986 struct object_context *obj_context = CONTEXT(context);
989 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
991 return VA_STATUS_SUCCESS;
995 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
997 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
999 assert(obj_buffer->buffer_store);
1000 i965_release_buffer_store(&obj_buffer->buffer_store);
1001 object_heap_free(heap, obj);
1005 i965_create_buffer_internal(VADriverContextP ctx,
1006 VAContextID context,
1009 unsigned int num_elements,
1014 struct i965_driver_data *i965 = i965_driver_data(ctx);
1015 struct object_buffer *obj_buffer = NULL;
1016 struct buffer_store *buffer_store = NULL;
1021 case VAPictureParameterBufferType:
1022 case VAIQMatrixBufferType:
1023 case VABitPlaneBufferType:
1024 case VASliceGroupMapBufferType:
1025 case VASliceParameterBufferType:
1026 case VASliceDataBufferType:
1027 case VAMacroblockParameterBufferType:
1028 case VAResidualDataBufferType:
1029 case VADeblockingParameterBufferType:
1030 case VAImageBufferType:
1031 case VAEncCodedBufferType:
1032 case VAEncSequenceParameterBufferType:
1033 case VAEncPictureParameterBufferType:
1034 case VAEncSliceParameterBufferType:
1035 #ifdef HAVE_VA_JPEG_DECODE
1036 case VAHuffmanTableBufferType:
1042 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1045 bufferID = NEW_BUFFER_ID();
1046 obj_buffer = BUFFER(bufferID);
1048 if (NULL == obj_buffer) {
1049 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1052 if (type == VAEncCodedBufferType) {
1053 size += ALIGN(sizeof(VACodedBufferSegment), 64);
1056 obj_buffer->max_num_elements = num_elements;
1057 obj_buffer->num_elements = num_elements;
1058 obj_buffer->size_element = size;
1059 obj_buffer->type = type;
1060 obj_buffer->buffer_store = NULL;
1061 buffer_store = calloc(1, sizeof(struct buffer_store));
1062 assert(buffer_store);
1063 buffer_store->ref_count = 1;
1065 if (store_bo != NULL) {
1066 buffer_store->bo = store_bo;
1067 dri_bo_reference(buffer_store->bo);
1070 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1071 } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
1072 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1074 size * num_elements, 64);
1075 assert(buffer_store->bo);
1077 if (type == VAEncCodedBufferType) {
1078 VACodedBufferSegment *coded_buffer_segment;
1079 dri_bo_map(buffer_store->bo, 1);
1080 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1081 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
1082 coded_buffer_segment->bit_offset = 0;
1083 coded_buffer_segment->status = 0;
1084 coded_buffer_segment->buf = NULL;
1085 coded_buffer_segment->next = NULL;
1086 dri_bo_unmap(buffer_store->bo);
1088 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1092 buffer_store->buffer = malloc(size * num_elements);
1093 assert(buffer_store->buffer);
1096 memcpy(buffer_store->buffer, data, size * num_elements);
1099 buffer_store->num_elements = obj_buffer->num_elements;
1100 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1101 i965_release_buffer_store(&buffer_store);
1104 return VA_STATUS_SUCCESS;
1108 i965_CreateBuffer(VADriverContextP ctx,
1109 VAContextID context, /* in */
1110 VABufferType type, /* in */
1111 unsigned int size, /* in */
1112 unsigned int num_elements, /* in */
1113 void *data, /* in */
1114 VABufferID *buf_id) /* out */
1116 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1121 i965_BufferSetNumElements(VADriverContextP ctx,
1122 VABufferID buf_id, /* in */
1123 unsigned int num_elements) /* in */
1125 struct i965_driver_data *i965 = i965_driver_data(ctx);
1126 struct object_buffer *obj_buffer = BUFFER(buf_id);
1127 VAStatus vaStatus = VA_STATUS_SUCCESS;
1131 if ((num_elements < 0) ||
1132 (num_elements > obj_buffer->max_num_elements)) {
1133 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1135 obj_buffer->num_elements = num_elements;
1136 if (obj_buffer->buffer_store != NULL) {
1137 obj_buffer->buffer_store->num_elements = num_elements;
1145 i965_MapBuffer(VADriverContextP ctx,
1146 VABufferID buf_id, /* in */
1147 void **pbuf) /* out */
1149 struct i965_driver_data *i965 = i965_driver_data(ctx);
1150 struct object_buffer *obj_buffer = BUFFER(buf_id);
1151 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1153 assert(obj_buffer && obj_buffer->buffer_store);
1154 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1155 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1157 if (NULL != obj_buffer->buffer_store->bo) {
1158 unsigned int tiling, swizzle;
1160 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1162 if (tiling != I915_TILING_NONE)
1163 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1165 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1167 assert(obj_buffer->buffer_store->bo->virtual);
1168 *pbuf = obj_buffer->buffer_store->bo->virtual;
1170 if (obj_buffer->type == VAEncCodedBufferType) {
1171 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1172 coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1175 vaStatus = VA_STATUS_SUCCESS;
1176 } else if (NULL != obj_buffer->buffer_store->buffer) {
1177 *pbuf = obj_buffer->buffer_store->buffer;
1178 vaStatus = VA_STATUS_SUCCESS;
1185 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1187 struct i965_driver_data *i965 = i965_driver_data(ctx);
1188 struct object_buffer *obj_buffer = BUFFER(buf_id);
1189 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1191 assert(obj_buffer && obj_buffer->buffer_store);
1192 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1193 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1195 if (NULL != obj_buffer->buffer_store->bo) {
1196 unsigned int tiling, swizzle;
1198 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1200 if (tiling != I915_TILING_NONE)
1201 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1203 dri_bo_unmap(obj_buffer->buffer_store->bo);
1205 vaStatus = VA_STATUS_SUCCESS;
1206 } else if (NULL != obj_buffer->buffer_store->buffer) {
1208 vaStatus = VA_STATUS_SUCCESS;
1215 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1217 struct i965_driver_data *i965 = i965_driver_data(ctx);
1218 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1221 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1223 return VA_STATUS_SUCCESS;
1227 i965_BeginPicture(VADriverContextP ctx,
1228 VAContextID context,
1229 VASurfaceID render_target)
1231 struct i965_driver_data *i965 = i965_driver_data(ctx);
1232 struct object_context *obj_context = CONTEXT(context);
1233 struct object_surface *obj_surface = SURFACE(render_target);
1234 struct object_config *obj_config;
1239 assert(obj_context);
1240 assert(obj_surface);
1242 config = obj_context->config_id;
1243 obj_config = CONFIG(config);
1246 switch (obj_config->profile) {
1247 case VAProfileMPEG2Simple:
1248 case VAProfileMPEG2Main:
1249 vaStatus = VA_STATUS_SUCCESS;
1252 case VAProfileH264Baseline:
1253 case VAProfileH264Main:
1254 case VAProfileH264High:
1255 vaStatus = VA_STATUS_SUCCESS;
1258 case VAProfileVC1Simple:
1259 case VAProfileVC1Main:
1260 case VAProfileVC1Advanced:
1261 vaStatus = VA_STATUS_SUCCESS;
1264 case VAProfileJPEGBaseline:
1265 vaStatus = VA_STATUS_SUCCESS;
1270 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1274 if (obj_context->codec_type == CODEC_ENC) {
1275 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1276 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1278 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1279 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1282 obj_context->codec_state.encode.num_slice_params = 0;
1283 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1285 obj_context->codec_state.decode.current_render_target = render_target;
1286 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1287 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1288 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1289 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1291 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1292 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1293 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1296 obj_context->codec_state.decode.num_slice_params = 0;
1297 obj_context->codec_state.decode.num_slice_datas = 0;
1303 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1305 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1307 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1308 struct object_context *obj_context, \
1309 struct object_buffer *obj_buffer) \
1311 struct category##_state *category = &obj_context->codec_state.category; \
1312 assert(obj_buffer->buffer_store->bo == NULL); \
1313 assert(obj_buffer->buffer_store->buffer); \
1314 i965_release_buffer_store(&category->member); \
1315 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1316 return VA_STATUS_SUCCESS; \
1319 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1321 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1322 struct object_context *obj_context, \
1323 struct object_buffer *obj_buffer) \
1325 struct category##_state *category = &obj_context->codec_state.category; \
1326 if (category->num_##member == category->max_##member) { \
1327 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1328 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1329 category->max_##member += NUM_SLICES; \
1331 i965_release_buffer_store(&category->member[category->num_##member]); \
1332 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1333 category->num_##member++; \
1334 return VA_STATUS_SUCCESS; \
1337 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1339 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1340 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1341 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1342 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1343 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1345 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1346 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1347 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1350 i965_decoder_render_picture(VADriverContextP ctx,
1351 VAContextID context,
1352 VABufferID *buffers,
1355 struct i965_driver_data *i965 = i965_driver_data(ctx);
1356 struct object_context *obj_context = CONTEXT(context);
1357 VAStatus vaStatus = VA_STATUS_SUCCESS;
1360 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1361 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1364 switch (obj_buffer->type) {
1365 case VAPictureParameterBufferType:
1366 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1369 case VAIQMatrixBufferType:
1370 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1373 case VABitPlaneBufferType:
1374 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1377 case VASliceParameterBufferType:
1378 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1381 case VASliceDataBufferType:
1382 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1385 #ifdef HAVE_VA_JPEG_DECODE
1386 case VAHuffmanTableBufferType:
1387 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1392 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1400 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1402 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1403 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1404 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1405 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1406 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1407 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1409 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1410 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1413 i965_encoder_render_picture(VADriverContextP ctx,
1414 VAContextID context,
1415 VABufferID *buffers,
1418 struct i965_driver_data *i965 = i965_driver_data(ctx);
1419 struct object_context *obj_context = CONTEXT(context);
1420 VAStatus vaStatus = VA_STATUS_SUCCESS;
1423 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1424 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1427 switch (obj_buffer->type) {
1428 case VAEncSequenceParameterBufferType:
1429 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter);
1432 case VAEncPictureParameterBufferType:
1433 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter);
1436 case VAEncSliceParameterBufferType:
1437 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter);
1440 case VAPictureParameterBufferType:
1441 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_control);
1444 case VAQMatrixBufferType:
1445 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1448 case VAIQMatrixBufferType:
1449 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1453 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1462 i965_RenderPicture(VADriverContextP ctx,
1463 VAContextID context,
1464 VABufferID *buffers,
1467 struct i965_driver_data *i965 = i965_driver_data(ctx);
1468 struct object_context *obj_context;
1469 struct object_config *obj_config;
1471 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1473 obj_context = CONTEXT(context);
1474 assert(obj_context);
1476 config = obj_context->config_id;
1477 obj_config = CONFIG(config);
1480 if (VAEntrypointEncSlice == obj_config->entrypoint ){
1481 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1483 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1490 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1492 struct i965_driver_data *i965 = i965_driver_data(ctx);
1493 struct object_context *obj_context = CONTEXT(context);
1494 struct object_config *obj_config;
1497 assert(obj_context);
1498 config = obj_context->config_id;
1499 obj_config = CONFIG(config);
1502 if (obj_context->codec_type == CODEC_ENC) {
1503 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1505 assert(obj_context->codec_state.encode.pic_param);
1506 assert(obj_context->codec_state.encode.seq_param);
1507 assert(obj_context->codec_state.encode.num_slice_params >= 1);
1509 assert(obj_context->codec_state.decode.pic_param);
1510 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1511 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1512 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1515 assert(obj_context->hw_context->run);
1516 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1518 return VA_STATUS_SUCCESS;
1522 i965_SyncSurface(VADriverContextP ctx,
1523 VASurfaceID render_target)
1525 struct i965_driver_data *i965 = i965_driver_data(ctx);
1526 struct object_surface *obj_surface = SURFACE(render_target);
1528 assert(obj_surface);
1531 drm_intel_bo_wait_rendering(obj_surface->bo);
1533 return VA_STATUS_SUCCESS;
1537 i965_QuerySurfaceStatus(VADriverContextP ctx,
1538 VASurfaceID render_target,
1539 VASurfaceStatus *status) /* out */
1541 struct i965_driver_data *i965 = i965_driver_data(ctx);
1542 struct object_surface *obj_surface = SURFACE(render_target);
1544 assert(obj_surface);
1546 if (obj_surface->bo) {
1547 if (drm_intel_bo_busy(obj_surface->bo)){
1548 *status = VASurfaceRendering;
1551 *status = VASurfaceReady;
1554 *status = VASurfaceReady;
1557 return VA_STATUS_SUCCESS;
1560 static VADisplayAttribute *
1561 get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
1563 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1566 if (!i965->display_attributes)
1569 for (i = 0; i < i965->num_display_attributes; i++) {
1570 if (i965->display_attributes[i].type == type)
1571 return &i965->display_attributes[i];
1577 i965_display_attributes_init(VADriverContextP ctx)
1579 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1581 i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
1582 i965->display_attributes = malloc(
1583 i965->num_display_attributes * sizeof(i965->display_attributes[0]));
1584 if (!i965->display_attributes)
1588 i965->display_attributes,
1589 i965_display_attributes,
1590 sizeof(i965_display_attributes)
1593 i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
1594 if (!i965->rotation_attrib)
1600 i965_display_attributes_terminate(VADriverContextP ctx)
1602 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1604 if (i965->display_attributes) {
1605 free(i965->display_attributes);
1606 i965->display_attributes = NULL;
1607 i965->num_display_attributes = 0;
1612 * Query display attributes
1613 * The caller must provide a "attr_list" array that can hold at
1614 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1615 * returned in "attr_list" is returned in "num_attributes".
1618 i965_QueryDisplayAttributes(
1619 VADriverContextP ctx,
1620 VADisplayAttribute *attribs, /* out */
1621 int *num_attribs_ptr /* out */
1624 const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
1626 if (attribs && num_attribs > 0)
1627 memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
1629 if (num_attribs_ptr)
1630 *num_attribs_ptr = num_attribs;
1632 return VA_STATUS_SUCCESS;
1636 * Get display attributes
1637 * This function returns the current attribute values in "attr_list".
1638 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1639 * from vaQueryDisplayAttributes() can have their values retrieved.
1642 i965_GetDisplayAttributes(
1643 VADriverContextP ctx,
1644 VADisplayAttribute *attribs, /* inout */
1645 int num_attribs /* in */
1650 for (i = 0; i < num_attribs; i++) {
1651 VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
1653 src_attrib = get_display_attribute(ctx, dst_attrib->type);
1654 if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
1655 dst_attrib->min_value = src_attrib->min_value;
1656 dst_attrib->max_value = src_attrib->max_value;
1657 dst_attrib->value = src_attrib->value;
1660 dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
1662 return VA_STATUS_SUCCESS;
1666 * Set display attributes
1667 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1668 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1669 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1672 i965_SetDisplayAttributes(
1673 VADriverContextP ctx,
1674 VADisplayAttribute *attribs, /* in */
1675 int num_attribs /* in */
1680 for (i = 0; i < num_attribs; i++) {
1681 VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
1683 dst_attrib = get_display_attribute(ctx, src_attrib->type);
1685 return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
1687 if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
1690 if (src_attrib->value < dst_attrib->min_value ||
1691 src_attrib->value > dst_attrib->max_value)
1692 return VA_STATUS_ERROR_INVALID_PARAMETER;
1694 dst_attrib->value = src_attrib->value;
1695 /* XXX: track modified attributes through timestamps */
1697 return VA_STATUS_SUCCESS;
1701 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1702 VASurfaceID surface,
1703 void **buffer, /* out */
1704 unsigned int *stride) /* out */
1707 return VA_STATUS_ERROR_UNIMPLEMENTED;
1711 i965_Init(VADriverContextP ctx)
1713 struct i965_driver_data *i965 = i965_driver_data(ctx);
1715 if (intel_driver_init(ctx) == False)
1716 return VA_STATUS_ERROR_UNKNOWN;
1718 if (IS_HASWELL(i965->intel.device_id))
1719 i965->codec_info = &gen75_hw_codec_info;
1720 else if (IS_G4X(i965->intel.device_id))
1721 i965->codec_info = &g4x_hw_codec_info;
1722 else if (IS_IRONLAKE(i965->intel.device_id))
1723 i965->codec_info = &ironlake_hw_codec_info;
1724 else if (IS_GEN6(i965->intel.device_id))
1725 i965->codec_info = &gen6_hw_codec_info;
1726 else if (IS_GEN7(i965->intel.device_id))
1727 i965->codec_info = &gen7_hw_codec_info;
1729 return VA_STATUS_ERROR_UNKNOWN;
1731 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1733 if (!i965_display_attributes_init(ctx))
1734 return VA_STATUS_ERROR_UNKNOWN;
1736 if (i965_post_processing_init(ctx) == False)
1737 return VA_STATUS_ERROR_UNKNOWN;
1739 if (i965_render_init(ctx) == False)
1740 return VA_STATUS_ERROR_UNKNOWN;
1742 #ifdef HAVE_VA_WAYLAND
1743 if (IS_VA_WAYLAND(ctx) && !i965_output_wayland_init(ctx))
1744 return VA_STATUS_ERROR_UNKNOWN;
1748 if (IS_VA_X11(ctx) && !i965_output_dri_init(ctx))
1749 return VA_STATUS_ERROR_UNKNOWN;
1752 _i965InitMutex(&i965->render_mutex);
1754 return VA_STATUS_SUCCESS;
1758 i965_destroy_heap(struct object_heap *heap,
1759 void (*func)(struct object_heap *heap, struct object_base *object))
1761 struct object_base *object;
1762 object_heap_iterator iter;
1764 object = object_heap_first(heap, &iter);
1770 object = object_heap_next(heap, &iter);
1773 object_heap_destroy(heap);
1778 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1781 i965_CreateImage(VADriverContextP ctx,
1782 VAImageFormat *format,
1785 VAImage *out_image) /* out */
1787 struct i965_driver_data *i965 = i965_driver_data(ctx);
1788 struct object_image *obj_image;
1789 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1791 unsigned int width2, height2, size2, size;
1793 out_image->image_id = VA_INVALID_ID;
1794 out_image->buf = VA_INVALID_ID;
1796 image_id = NEW_IMAGE_ID();
1797 if (image_id == VA_INVALID_ID)
1798 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1800 obj_image = IMAGE(image_id);
1802 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1803 obj_image->bo = NULL;
1804 obj_image->palette = NULL;
1805 obj_image->derived_surface = VA_INVALID_ID;
1807 VAImage * const image = &obj_image->image;
1808 image->image_id = image_id;
1809 image->buf = VA_INVALID_ID;
1811 size = width * height;
1812 width2 = (width + 1) / 2;
1813 height2 = (height + 1) / 2;
1814 size2 = width2 * height2;
1816 image->num_palette_entries = 0;
1817 image->entry_bytes = 0;
1818 memset(image->component_order, 0, sizeof(image->component_order));
1820 switch (format->fourcc) {
1821 case VA_FOURCC('I','A','4','4'):
1822 case VA_FOURCC('A','I','4','4'):
1823 image->num_planes = 1;
1824 image->pitches[0] = width;
1825 image->offsets[0] = 0;
1826 image->data_size = image->offsets[0] + image->pitches[0] * height;
1827 image->num_palette_entries = 16;
1828 image->entry_bytes = 3;
1829 image->component_order[0] = 'R';
1830 image->component_order[1] = 'G';
1831 image->component_order[2] = 'B';
1833 case VA_FOURCC('A','R','G','B'):
1834 case VA_FOURCC('A','B','G','R'):
1835 case VA_FOURCC('B','G','R','A'):
1836 case VA_FOURCC('R','G','B','A'):
1837 image->num_planes = 1;
1838 image->pitches[0] = width * 4;
1839 image->offsets[0] = 0;
1840 image->data_size = image->offsets[0] + image->pitches[0] * height;
1842 case VA_FOURCC('Y','V','1','2'):
1843 image->num_planes = 3;
1844 image->pitches[0] = width;
1845 image->offsets[0] = 0;
1846 image->pitches[1] = width2;
1847 image->offsets[1] = size + size2;
1848 image->pitches[2] = width2;
1849 image->offsets[2] = size;
1850 image->data_size = size + 2 * size2;
1852 case VA_FOURCC('I','4','2','0'):
1853 image->num_planes = 3;
1854 image->pitches[0] = width;
1855 image->offsets[0] = 0;
1856 image->pitches[1] = width2;
1857 image->offsets[1] = size;
1858 image->pitches[2] = width2;
1859 image->offsets[2] = size + size2;
1860 image->data_size = size + 2 * size2;
1862 case VA_FOURCC('N','V','1','2'):
1863 image->num_planes = 2;
1864 image->pitches[0] = width;
1865 image->offsets[0] = 0;
1866 image->pitches[1] = width;
1867 image->offsets[1] = size;
1868 image->data_size = size + 2 * size2;
1874 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
1875 image->data_size, 1, NULL, &image->buf);
1876 if (va_status != VA_STATUS_SUCCESS)
1879 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1880 dri_bo_reference(obj_image->bo);
1882 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1883 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1884 if (!obj_image->palette)
1888 image->image_id = image_id;
1889 image->format = *format;
1890 image->width = width;
1891 image->height = height;
1893 *out_image = *image;
1894 return VA_STATUS_SUCCESS;
1897 i965_DestroyImage(ctx, image_id);
1902 i965_check_alloc_surface_bo(VADriverContextP ctx,
1903 struct object_surface *obj_surface,
1905 unsigned int fourcc,
1906 unsigned int subsampling)
1908 struct i965_driver_data *i965 = i965_driver_data(ctx);
1909 int region_width, region_height;
1911 if (obj_surface->bo) {
1912 assert(obj_surface->fourcc);
1913 assert(obj_surface->fourcc == fourcc);
1914 assert(obj_surface->subsampling == subsampling);
1918 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
1919 obj_surface->x_cr_offset = 0;
1922 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
1923 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
1924 fourcc == VA_FOURCC('I', 'M', 'C', '3'));
1926 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
1927 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
1928 obj_surface->cb_cr_pitch = obj_surface->width;
1929 region_width = obj_surface->width;
1930 region_height = obj_surface->height;
1932 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
1933 assert(subsampling == SUBSAMPLE_YUV420);
1934 obj_surface->y_cb_offset = obj_surface->height;
1935 obj_surface->y_cr_offset = obj_surface->height;
1936 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
1937 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
1938 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
1939 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
1940 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
1941 switch (subsampling) {
1942 case SUBSAMPLE_YUV400:
1943 obj_surface->cb_cr_width = 0;
1944 obj_surface->cb_cr_height = 0;
1947 case SUBSAMPLE_YUV420:
1948 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
1949 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
1952 case SUBSAMPLE_YUV422H:
1953 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
1954 obj_surface->cb_cr_height = obj_surface->orig_height;
1957 case SUBSAMPLE_YUV422V:
1958 obj_surface->cb_cr_width = obj_surface->orig_width;
1959 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
1962 case SUBSAMPLE_YUV444:
1963 obj_surface->cb_cr_width = obj_surface->orig_width;
1964 obj_surface->cb_cr_height = obj_surface->orig_height;
1967 case SUBSAMPLE_YUV411:
1968 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
1969 obj_surface->cb_cr_height = obj_surface->orig_height;
1977 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
1979 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
1980 obj_surface->y_cr_offset = obj_surface->height;
1981 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
1983 obj_surface->y_cb_offset = obj_surface->height;
1984 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
1988 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
1989 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
1990 assert(subsampling == SUBSAMPLE_YUV420);
1992 region_width = obj_surface->width;
1993 region_height = obj_surface->height;
1996 case VA_FOURCC('N', 'V', '1', '2'):
1997 obj_surface->y_cb_offset = obj_surface->height;
1998 obj_surface->y_cr_offset = obj_surface->height;
1999 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2000 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2001 obj_surface->cb_cr_pitch = obj_surface->width;
2002 region_height = obj_surface->height + obj_surface->height / 2;
2005 case VA_FOURCC('Y', 'V', '1', '2'):
2006 case VA_FOURCC('I', '4', '2', '0'):
2007 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2008 obj_surface->y_cr_offset = obj_surface->height;
2009 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2011 obj_surface->y_cb_offset = obj_surface->height;
2012 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2015 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2016 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2017 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2018 region_height = obj_surface->height + obj_surface->height / 2;
2027 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2030 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2031 unsigned long pitch;
2033 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2041 assert(tiling_mode == I915_TILING_Y);
2042 assert(pitch == obj_surface->width);
2044 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2050 obj_surface->fourcc = fourcc;
2051 obj_surface->subsampling = subsampling;
2052 assert(obj_surface->bo);
2055 VAStatus i965_DeriveImage(VADriverContextP ctx,
2056 VASurfaceID surface,
2057 VAImage *out_image) /* out */
2059 struct i965_driver_data *i965 = i965_driver_data(ctx);
2060 struct i965_render_state *render_state = &i965->render_state;
2061 struct object_image *obj_image;
2062 struct object_surface *obj_surface;
2064 unsigned int w_pitch, h_pitch;
2067 out_image->image_id = VA_INVALID_ID;
2068 obj_surface = SURFACE(surface);
2071 return VA_STATUS_ERROR_INVALID_SURFACE;
2073 w_pitch = obj_surface->width;
2074 h_pitch = obj_surface->height;
2076 image_id = NEW_IMAGE_ID();
2078 if (image_id == VA_INVALID_ID)
2079 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2081 obj_image = IMAGE(image_id);
2084 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2086 obj_image->bo = NULL;
2087 obj_image->palette = NULL;
2088 obj_image->derived_surface = VA_INVALID_ID;
2090 VAImage * const image = &obj_image->image;
2092 memset(image, 0, sizeof(*image));
2093 image->image_id = image_id;
2094 image->buf = VA_INVALID_ID;
2095 image->num_palette_entries = 0;
2096 image->entry_bytes = 0;
2097 image->width = obj_surface->orig_width;
2098 image->height = obj_surface->orig_height;
2099 image->data_size = obj_surface->size;
2101 if (!render_state->inited) {
2102 image->format.fourcc = VA_FOURCC('Y','V','1','2');
2103 image->format.byte_order = VA_LSB_FIRST;
2104 image->format.bits_per_pixel = 12;
2105 image->num_planes = 3;
2106 image->pitches[0] = w_pitch;
2107 image->offsets[0] = 0;
2108 image->pitches[1] = w_pitch / 2;
2109 image->offsets[1] = w_pitch * h_pitch;
2110 image->pitches[2] = w_pitch / 2;
2111 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2113 if (render_state->interleaved_uv) {
2114 image->format.fourcc = VA_FOURCC('N','V','1','2');
2115 image->format.byte_order = VA_LSB_FIRST;
2116 image->format.bits_per_pixel = 12;
2117 image->num_planes = 2;
2118 image->pitches[0] = w_pitch;
2119 image->offsets[0] = 0;
2120 image->pitches[1] = w_pitch;
2121 image->offsets[1] = w_pitch * h_pitch;
2123 image->format.fourcc = VA_FOURCC('I','4','2','0');
2124 image->format.byte_order = VA_LSB_FIRST;
2125 image->format.bits_per_pixel = 12;
2126 image->num_planes = 3;
2127 image->pitches[0] = w_pitch;
2128 image->offsets[0] = 0;
2129 image->pitches[1] = w_pitch / 2;
2130 image->offsets[1] = w_pitch * h_pitch;
2131 image->pitches[2] = w_pitch / 2;
2132 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2136 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), image->format.fourcc, SUBSAMPLE_YUV420);
2137 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2138 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2139 if (va_status != VA_STATUS_SUCCESS)
2142 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2143 dri_bo_reference(obj_image->bo);
2145 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2146 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2147 if (!obj_image->palette) {
2148 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2153 *out_image = *image;
2154 obj_surface->flags |= SURFACE_DERIVED;
2155 obj_image->derived_surface = surface;
2157 return VA_STATUS_SUCCESS;
2160 i965_DestroyImage(ctx, image_id);
2165 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2167 object_heap_free(heap, obj);
2172 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2174 struct i965_driver_data *i965 = i965_driver_data(ctx);
2175 struct object_image *obj_image = IMAGE(image);
2176 struct object_surface *obj_surface;
2179 return VA_STATUS_SUCCESS;
2181 dri_bo_unreference(obj_image->bo);
2182 obj_image->bo = NULL;
2184 if (obj_image->image.buf != VA_INVALID_ID) {
2185 i965_DestroyBuffer(ctx, obj_image->image.buf);
2186 obj_image->image.buf = VA_INVALID_ID;
2189 if (obj_image->palette) {
2190 free(obj_image->palette);
2191 obj_image->palette = NULL;
2194 obj_surface = SURFACE(obj_image->derived_surface);
2197 obj_surface->flags &= ~SURFACE_DERIVED;
2200 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2202 return VA_STATUS_SUCCESS;
2206 * pointer to an array holding the palette data. The size of the array is
2207 * num_palette_entries * entry_bytes in size. The order of the components
2208 * in the palette is described by the component_order in VASubpicture struct
2211 i965_SetImagePalette(VADriverContextP ctx,
2213 unsigned char *palette)
2215 struct i965_driver_data *i965 = i965_driver_data(ctx);
2218 struct object_image *obj_image = IMAGE(image);
2220 return VA_STATUS_ERROR_INVALID_IMAGE;
2222 if (!obj_image->palette)
2223 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2225 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2226 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2227 ((unsigned int)palette[3*i + 1] << 8) |
2228 (unsigned int)palette[3*i + 2]);
2229 return VA_STATUS_SUCCESS;
2233 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2234 const uint8_t *src, unsigned int src_stride,
2235 unsigned int len, unsigned int height)
2239 for (i = 0; i < height; i++) {
2240 memcpy(dst, src, len);
2247 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2248 struct object_surface *obj_surface,
2249 const VARectangle *rect)
2251 uint8_t *dst[3], *src[3];
2253 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2254 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2255 unsigned int tiling, swizzle;
2257 if (!obj_surface->bo)
2260 assert(obj_surface->fourcc);
2261 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2263 if (tiling != I915_TILING_NONE)
2264 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2266 dri_bo_map(obj_surface->bo, 0);
2268 if (!obj_surface->bo->virtual)
2271 /* Dest VA image has either I420 or YV12 format.
2272 Source VA surface alway has I420 format */
2273 dst[Y] = image_data + obj_image->image.offsets[Y];
2274 src[0] = (uint8_t *)obj_surface->bo->virtual;
2275 dst[U] = image_data + obj_image->image.offsets[U];
2276 src[1] = src[0] + obj_surface->width * obj_surface->height;
2277 dst[V] = image_data + obj_image->image.offsets[V];
2278 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2281 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2282 src[0] += rect->y * obj_surface->width + rect->x;
2283 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2284 src[0], obj_surface->width,
2285 rect->width, rect->height);
2288 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2289 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2290 memcpy_pic(dst[U], obj_image->image.pitches[U],
2291 src[1], obj_surface->width / 2,
2292 rect->width / 2, rect->height / 2);
2295 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2296 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2297 memcpy_pic(dst[V], obj_image->image.pitches[V],
2298 src[2], obj_surface->width / 2,
2299 rect->width / 2, rect->height / 2);
2301 if (tiling != I915_TILING_NONE)
2302 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2304 dri_bo_unmap(obj_surface->bo);
2308 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2309 struct object_surface *obj_surface,
2310 const VARectangle *rect)
2312 uint8_t *dst[2], *src[2];
2313 unsigned int tiling, swizzle;
2315 if (!obj_surface->bo)
2318 assert(obj_surface->fourcc);
2319 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2321 if (tiling != I915_TILING_NONE)
2322 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2324 dri_bo_map(obj_surface->bo, 0);
2326 if (!obj_surface->bo->virtual)
2329 /* Both dest VA image and source surface have NV12 format */
2330 dst[0] = image_data + obj_image->image.offsets[0];
2331 src[0] = (uint8_t *)obj_surface->bo->virtual;
2332 dst[1] = image_data + obj_image->image.offsets[1];
2333 src[1] = src[0] + obj_surface->width * obj_surface->height;
2336 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2337 src[0] += rect->y * obj_surface->width + rect->x;
2338 memcpy_pic(dst[0], obj_image->image.pitches[0],
2339 src[0], obj_surface->width,
2340 rect->width, rect->height);
2343 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2344 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2345 memcpy_pic(dst[1], obj_image->image.pitches[1],
2346 src[1], obj_surface->width,
2347 rect->width, rect->height / 2);
2349 if (tiling != I915_TILING_NONE)
2350 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2352 dri_bo_unmap(obj_surface->bo);
2356 i965_GetImage(VADriverContextP ctx,
2357 VASurfaceID surface,
2358 int x, /* coordinates of the upper left source pixel */
2360 unsigned int width, /* width and height of the region */
2361 unsigned int height,
2364 struct i965_driver_data *i965 = i965_driver_data(ctx);
2365 struct i965_render_state *render_state = &i965->render_state;
2367 struct object_surface *obj_surface = SURFACE(surface);
2369 return VA_STATUS_ERROR_INVALID_SURFACE;
2371 struct object_image *obj_image = IMAGE(image);
2373 return VA_STATUS_ERROR_INVALID_IMAGE;
2376 return VA_STATUS_ERROR_INVALID_PARAMETER;
2377 if (x + width > obj_surface->orig_width ||
2378 y + height > obj_surface->orig_height)
2379 return VA_STATUS_ERROR_INVALID_PARAMETER;
2380 if (x + width > obj_image->image.width ||
2381 y + height > obj_image->image.height)
2382 return VA_STATUS_ERROR_INVALID_PARAMETER;
2385 void *image_data = NULL;
2387 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2388 if (va_status != VA_STATUS_SUCCESS)
2395 rect.height = height;
2397 switch (obj_image->image.format.fourcc) {
2398 case VA_FOURCC('Y','V','1','2'):
2399 case VA_FOURCC('I','4','2','0'):
2400 /* I420 is native format for MPEG-2 decoded surfaces */
2401 if (render_state->interleaved_uv)
2402 goto operation_failed;
2403 get_image_i420(obj_image, image_data, obj_surface, &rect);
2405 case VA_FOURCC('N','V','1','2'):
2406 /* NV12 is native format for H.264 decoded surfaces */
2407 if (!render_state->interleaved_uv)
2408 goto operation_failed;
2409 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2413 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2417 i965_UnmapBuffer(ctx, obj_image->image.buf);
2422 i965_PutSurface(VADriverContextP ctx,
2423 VASurfaceID surface,
2424 void *draw, /* X Drawable */
2427 unsigned short srcw,
2428 unsigned short srch,
2431 unsigned short destw,
2432 unsigned short desth,
2433 VARectangle *cliprects, /* client supplied clip list */
2434 unsigned int number_cliprects, /* number of clip rects in the clip list */
2435 unsigned int flags) /* de-interlacing flags */
2438 if (IS_VA_X11(ctx)) {
2439 VARectangle src_rect, dst_rect;
2443 src_rect.width = srcw;
2444 src_rect.height = srch;
2448 dst_rect.width = destw;
2449 dst_rect.height = desth;
2451 return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
2452 cliprects, number_cliprects, flags);
2455 return VA_STATUS_ERROR_UNIMPLEMENTED;
2459 i965_Terminate(VADriverContextP ctx)
2461 struct i965_driver_data *i965 = i965_driver_data(ctx);
2464 intel_batchbuffer_free(i965->batch);
2466 _i965DestroyMutex(&i965->render_mutex);
2470 i965_output_dri_terminate(ctx);
2473 #ifdef HAVE_VA_WAYLAND
2474 if (IS_VA_WAYLAND(ctx))
2475 i965_output_wayland_terminate(ctx);
2478 if (i965_render_terminate(ctx) == False)
2479 return VA_STATUS_ERROR_UNKNOWN;
2481 if (i965_post_processing_terminate(ctx) == False)
2482 return VA_STATUS_ERROR_UNKNOWN;
2484 i965_display_attributes_terminate(ctx);
2486 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2487 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2488 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2489 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2490 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2491 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2493 if (intel_driver_terminate(ctx) == False)
2494 return VA_STATUS_ERROR_UNKNOWN;
2496 free(ctx->pDriverData);
2497 ctx->pDriverData = NULL;
2499 return VA_STATUS_SUCCESS;
2504 VADriverContextP ctx, /* in */
2505 VABufferID buf_id, /* in */
2506 VABufferType *type, /* out */
2507 unsigned int *size, /* out */
2508 unsigned int *num_elements /* out */
2511 struct i965_driver_data *i965 = NULL;
2512 struct object_buffer *obj_buffer = NULL;
2514 i965 = i965_driver_data(ctx);
2515 obj_buffer = BUFFER(buf_id);
2517 *type = obj_buffer->type;
2518 *size = obj_buffer->size_element;
2519 *num_elements = obj_buffer->num_elements;
2521 return VA_STATUS_SUCCESS;
2526 VADriverContextP ctx, /* in */
2527 VASurfaceID surface, /* in */
2528 unsigned int *fourcc, /* out */
2529 unsigned int *luma_stride, /* out */
2530 unsigned int *chroma_u_stride, /* out */
2531 unsigned int *chroma_v_stride, /* out */
2532 unsigned int *luma_offset, /* out */
2533 unsigned int *chroma_u_offset, /* out */
2534 unsigned int *chroma_v_offset, /* out */
2535 unsigned int *buffer_name, /* out */
2536 void **buffer /* out */
2539 VAStatus vaStatus = VA_STATUS_SUCCESS;
2540 struct i965_driver_data *i965 = i965_driver_data(ctx);
2541 struct object_surface *obj_surface = NULL;
2545 assert(luma_stride);
2546 assert(chroma_u_stride);
2547 assert(chroma_v_stride);
2548 assert(luma_offset);
2549 assert(chroma_u_offset);
2550 assert(chroma_v_offset);
2551 assert(buffer_name);
2554 tmpImage.image_id = VA_INVALID_ID;
2556 obj_surface = SURFACE(surface);
2557 if (obj_surface == NULL) {
2558 // Surface is absent.
2559 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2563 // Lock functionality is absent now.
2564 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2565 // Surface is locked already.
2566 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2570 vaStatus = i965_DeriveImage(
2574 if (vaStatus != VA_STATUS_SUCCESS) {
2578 obj_surface->locked_image_id = tmpImage.image_id;
2580 vaStatus = i965_MapBuffer(
2584 if (vaStatus != VA_STATUS_SUCCESS) {
2588 *fourcc = tmpImage.format.fourcc;
2589 *luma_offset = tmpImage.offsets[0];
2590 *luma_stride = tmpImage.pitches[0];
2591 *chroma_u_offset = tmpImage.offsets[1];
2592 *chroma_u_stride = tmpImage.pitches[1];
2593 *chroma_v_offset = tmpImage.offsets[2];
2594 *chroma_v_stride = tmpImage.pitches[2];
2595 *buffer_name = tmpImage.buf;
2598 if (vaStatus != VA_STATUS_SUCCESS) {
2607 VADriverContextP ctx, /* in */
2608 VASurfaceID surface /* in */
2611 VAStatus vaStatus = VA_STATUS_SUCCESS;
2612 struct i965_driver_data *i965 = i965_driver_data(ctx);
2613 struct object_image *locked_img = NULL;
2614 struct object_surface *obj_surface = NULL;
2616 obj_surface = SURFACE(surface);
2618 if (obj_surface == NULL) {
2619 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2622 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2623 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2627 locked_img = IMAGE(obj_surface->locked_image_id);
2628 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2629 // Work image was deallocated before i965_UnlockSurface()
2630 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2634 vaStatus = i965_UnmapBuffer(
2636 locked_img->image.buf);
2637 if (vaStatus != VA_STATUS_SUCCESS) {
2641 vaStatus = i965_DestroyImage(
2643 locked_img->image.image_id);
2644 if (vaStatus != VA_STATUS_SUCCESS) {
2648 locked_img->image.image_id = VA_INVALID_ID;
2651 obj_surface->locked_image_id = VA_INVALID_ID;
2657 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
2660 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2662 struct VADriverVTable * const vtable = ctx->vtable;
2663 struct i965_driver_data *i965;
2666 ctx->version_major = VA_MAJOR_VERSION;
2667 ctx->version_minor = VA_MINOR_VERSION;
2668 ctx->max_profiles = I965_MAX_PROFILES;
2669 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2670 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2671 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2672 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2673 ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
2675 vtable->vaTerminate = i965_Terminate;
2676 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2677 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2678 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2679 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2680 vtable->vaCreateConfig = i965_CreateConfig;
2681 vtable->vaDestroyConfig = i965_DestroyConfig;
2682 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2683 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2684 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2685 vtable->vaCreateContext = i965_CreateContext;
2686 vtable->vaDestroyContext = i965_DestroyContext;
2687 vtable->vaCreateBuffer = i965_CreateBuffer;
2688 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2689 vtable->vaMapBuffer = i965_MapBuffer;
2690 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2691 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2692 vtable->vaBeginPicture = i965_BeginPicture;
2693 vtable->vaRenderPicture = i965_RenderPicture;
2694 vtable->vaEndPicture = i965_EndPicture;
2695 vtable->vaSyncSurface = i965_SyncSurface;
2696 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2697 vtable->vaPutSurface = i965_PutSurface;
2698 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2699 vtable->vaCreateImage = i965_CreateImage;
2700 vtable->vaDeriveImage = i965_DeriveImage;
2701 vtable->vaDestroyImage = i965_DestroyImage;
2702 vtable->vaSetImagePalette = i965_SetImagePalette;
2703 vtable->vaGetImage = i965_GetImage;
2704 vtable->vaPutImage = i965_PutImage;
2705 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2706 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2707 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2708 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2709 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2710 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2711 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2712 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2713 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2714 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2715 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2716 vtable->vaBufferInfo = i965_BufferInfo;
2717 vtable->vaLockSurface = i965_LockSurface;
2718 vtable->vaUnlockSurface = i965_UnlockSurface;
2719 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2721 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2723 ctx->pDriverData = (void *)i965;
2725 result = object_heap_init(&i965->config_heap,
2726 sizeof(struct object_config),
2728 assert(result == 0);
2730 result = object_heap_init(&i965->context_heap,
2731 sizeof(struct object_context),
2733 assert(result == 0);
2735 result = object_heap_init(&i965->surface_heap,
2736 sizeof(struct object_surface),
2738 assert(result == 0);
2740 result = object_heap_init(&i965->buffer_heap,
2741 sizeof(struct object_buffer),
2743 assert(result == 0);
2745 result = object_heap_init(&i965->image_heap,
2746 sizeof(struct object_image),
2748 assert(result == 0);
2750 result = object_heap_init(&i965->subpic_heap,
2751 sizeof(struct object_subpic),
2753 assert(result == 0);
2755 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
2756 INTEL_STR_DRIVER_VENDOR,
2757 INTEL_STR_DRIVER_NAME,
2758 INTEL_DRIVER_MAJOR_VERSION,
2759 INTEL_DRIVER_MINOR_VERSION,
2760 INTEL_DRIVER_MICRO_VERSION);
2762 if (INTEL_DRIVER_PRE_VERSION > 0) {
2763 const int len = strlen(i965->va_vendor);
2764 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
2766 ctx->str_vendor = i965->va_vendor;
2768 return i965_Init(ctx);