2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include "va/x11/va_dricommon.h"
37 #include "intel_driver.h"
38 #include "intel_memman.h"
39 #include "intel_batchbuffer.h"
40 #include "i965_defines.h"
41 #include "i965_drv_video.h"
43 #define CONFIG_ID_OFFSET 0x01000000
44 #define CONTEXT_ID_OFFSET 0x02000000
45 #define SURFACE_ID_OFFSET 0x04000000
46 #define BUFFER_ID_OFFSET 0x08000000
47 #define IMAGE_ID_OFFSET 0x0a000000
48 #define SUBPIC_ID_OFFSET 0x10000000
50 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
51 IS_IRONLAKE((ctx)->intel.device_id) || \
52 (IS_GEN6((ctx)->intel.device_id) && (ctx)->intel.has_bsd))
54 #define HAS_H264(ctx) ((IS_GEN6((ctx)->intel.device_id) || \
55 IS_IRONLAKE((ctx)->intel.device_id)) && \
58 #define HAS_VC1(ctx) (IS_GEN6((ctx)->intel.device_id) && (ctx)->intel.has_bsd)
60 #define HAS_TILED_SURFACE(ctx) (IS_GEN6((ctx)->intel.device_id) && \
61 (ctx)->render_state.interleaved_uv)
63 #define HAS_ENCODER(ctx) (IS_GEN6((ctx)->intel.device_id) && \
67 I965_SURFACETYPE_RGBA = 1,
69 I965_SURFACETYPE_INDEXED
72 /* List of supported image formats */
75 VAImageFormat va_format;
76 } i965_image_format_map_t;
78 static const i965_image_format_map_t
79 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
80 { I965_SURFACETYPE_YUV,
81 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
82 { I965_SURFACETYPE_YUV,
83 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
84 { I965_SURFACETYPE_YUV,
85 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
88 /* List of supported subpicture formats */
92 VAImageFormat va_format;
93 unsigned int va_flags;
94 } i965_subpic_format_map_t;
96 static const i965_subpic_format_map_t
97 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
98 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
99 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
101 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
102 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
104 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
105 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
106 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
108 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
109 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
110 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
114 static const i965_subpic_format_map_t *
115 get_subpic_format(const VAImageFormat *va_format)
118 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
119 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
120 if (m->va_format.fourcc == va_format->fourcc &&
121 (m->type == I965_SURFACETYPE_RGBA ?
122 (m->va_format.byte_order == va_format->byte_order &&
123 m->va_format.red_mask == va_format->red_mask &&
124 m->va_format.green_mask == va_format->green_mask &&
125 m->va_format.blue_mask == va_format->blue_mask &&
126 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
132 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
133 static struct hw_codec_info g4x_hw_codec_info = {
134 .dec_hw_context_init = g4x_dec_hw_context_init,
135 .enc_hw_context_init = NULL,
138 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
139 static struct hw_codec_info ironlake_hw_codec_info = {
140 .dec_hw_context_init = ironlake_dec_hw_context_init,
141 .enc_hw_context_init = NULL,
144 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
145 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
146 static struct hw_codec_info gen6_hw_codec_info = {
147 .dec_hw_context_init = gen6_dec_hw_context_init,
148 .enc_hw_context_init = gen6_enc_hw_context_init,
152 i965_QueryConfigProfiles(VADriverContextP ctx,
153 VAProfile *profile_list, /* out */
154 int *num_profiles) /* out */
156 struct i965_driver_data * const i965 = i965_driver_data(ctx);
159 if (HAS_MPEG2(i965)) {
160 profile_list[i++] = VAProfileMPEG2Simple;
161 profile_list[i++] = VAProfileMPEG2Main;
164 if (HAS_H264(i965)) {
165 profile_list[i++] = VAProfileH264Baseline;
166 profile_list[i++] = VAProfileH264Main;
167 profile_list[i++] = VAProfileH264High;
171 profile_list[i++] = VAProfileVC1Simple;
172 profile_list[i++] = VAProfileVC1Main;
173 profile_list[i++] = VAProfileVC1Advanced;
176 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
177 assert(i <= I965_MAX_PROFILES);
180 return VA_STATUS_SUCCESS;
184 i965_QueryConfigEntrypoints(VADriverContextP ctx,
186 VAEntrypoint *entrypoint_list, /* out */
187 int *num_entrypoints) /* out */
189 struct i965_driver_data * const i965 = i965_driver_data(ctx);
193 case VAProfileMPEG2Simple:
194 case VAProfileMPEG2Main:
196 entrypoint_list[n++] = VAEntrypointVLD;
199 case VAProfileH264Baseline:
200 case VAProfileH264Main:
201 case VAProfileH264High:
203 entrypoint_list[n++] = VAEntrypointVLD;
205 if (HAS_ENCODER(i965))
206 entrypoint_list[n++] = VAEntrypointEncSlice;
210 case VAProfileVC1Simple:
211 case VAProfileVC1Main:
212 case VAProfileVC1Advanced:
214 entrypoint_list[n++] = VAEntrypointVLD;
221 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
222 assert(n <= I965_MAX_ENTRYPOINTS);
223 *num_entrypoints = n;
224 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
228 i965_GetConfigAttributes(VADriverContextP ctx,
230 VAEntrypoint entrypoint,
231 VAConfigAttrib *attrib_list, /* in/out */
236 /* Other attributes don't seem to be defined */
237 /* What to do if we don't know the attribute? */
238 for (i = 0; i < num_attribs; i++) {
239 switch (attrib_list[i].type) {
240 case VAConfigAttribRTFormat:
241 attrib_list[i].value = VA_RT_FORMAT_YUV420;
244 case VAConfigAttribRateControl:
245 attrib_list[i].value = VA_RC_VBR;
250 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
255 return VA_STATUS_SUCCESS;
259 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
261 object_heap_free(heap, obj);
265 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
269 /* Check existing attrbiutes */
270 for (i = 0; obj_config->num_attribs < i; i++) {
271 if (obj_config->attrib_list[i].type == attrib->type) {
272 /* Update existing attribute */
273 obj_config->attrib_list[i].value = attrib->value;
274 return VA_STATUS_SUCCESS;
278 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
279 i = obj_config->num_attribs;
280 obj_config->attrib_list[i].type = attrib->type;
281 obj_config->attrib_list[i].value = attrib->value;
282 obj_config->num_attribs++;
283 return VA_STATUS_SUCCESS;
286 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
290 i965_CreateConfig(VADriverContextP ctx,
292 VAEntrypoint entrypoint,
293 VAConfigAttrib *attrib_list,
295 VAConfigID *config_id) /* out */
297 struct i965_driver_data * const i965 = i965_driver_data(ctx);
298 struct object_config *obj_config;
303 /* Validate profile & entrypoint */
305 case VAProfileMPEG2Simple:
306 case VAProfileMPEG2Main:
307 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
308 vaStatus = VA_STATUS_SUCCESS;
310 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
314 case VAProfileH264Baseline:
315 case VAProfileH264Main:
316 case VAProfileH264High:
317 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
318 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
319 vaStatus = VA_STATUS_SUCCESS;
321 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
326 case VAProfileVC1Simple:
327 case VAProfileVC1Main:
328 case VAProfileVC1Advanced:
329 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
330 vaStatus = VA_STATUS_SUCCESS;
332 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
338 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
342 if (VA_STATUS_SUCCESS != vaStatus) {
346 configID = NEW_CONFIG_ID();
347 obj_config = CONFIG(configID);
349 if (NULL == obj_config) {
350 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
354 obj_config->profile = profile;
355 obj_config->entrypoint = entrypoint;
356 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
357 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
358 obj_config->num_attribs = 1;
360 for(i = 0; i < num_attribs; i++) {
361 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
363 if (VA_STATUS_SUCCESS != vaStatus) {
369 if (VA_STATUS_SUCCESS != vaStatus) {
370 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
372 *config_id = configID;
379 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
381 struct i965_driver_data *i965 = i965_driver_data(ctx);
382 struct object_config *obj_config = CONFIG(config_id);
385 if (NULL == obj_config) {
386 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
390 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
391 return VA_STATUS_SUCCESS;
394 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
395 VAConfigID config_id,
396 VAProfile *profile, /* out */
397 VAEntrypoint *entrypoint, /* out */
398 VAConfigAttrib *attrib_list, /* out */
399 int *num_attribs) /* out */
401 struct i965_driver_data *i965 = i965_driver_data(ctx);
402 struct object_config *obj_config = CONFIG(config_id);
403 VAStatus vaStatus = VA_STATUS_SUCCESS;
407 *profile = obj_config->profile;
408 *entrypoint = obj_config->entrypoint;
409 *num_attribs = obj_config->num_attribs;
411 for(i = 0; i < obj_config->num_attribs; i++) {
412 attrib_list[i] = obj_config->attrib_list[i];
419 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
421 struct object_surface *obj_surface = (struct object_surface *)obj;
423 dri_bo_unreference(obj_surface->bo);
424 obj_surface->bo = NULL;
425 dri_bo_unreference(obj_surface->pp_out_bo);
426 obj_surface->pp_out_bo = NULL;
428 if (obj_surface->free_private_data != NULL) {
429 obj_surface->free_private_data(&obj_surface->private_data);
430 obj_surface->private_data = NULL;
433 object_heap_free(heap, obj);
437 i965_CreateSurfaces(VADriverContextP ctx,
442 VASurfaceID *surfaces) /* out */
444 struct i965_driver_data *i965 = i965_driver_data(ctx);
446 VAStatus vaStatus = VA_STATUS_SUCCESS;
448 /* We only support one format */
449 if (VA_RT_FORMAT_YUV420 != format) {
450 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
453 for (i = 0; i < num_surfaces; i++) {
454 int surfaceID = NEW_SURFACE_ID();
455 struct object_surface *obj_surface = SURFACE(surfaceID);
457 if (NULL == obj_surface) {
458 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
462 surfaces[i] = surfaceID;
463 obj_surface->status = VASurfaceReady;
464 obj_surface->subpic = VA_INVALID_ID;
465 obj_surface->orig_width = width;
466 obj_surface->orig_height = height;
468 if (IS_GEN6(i965->intel.device_id)) {
469 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
470 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
472 obj_surface->width = ALIGN(obj_surface->orig_width, 16);
473 obj_surface->height = ALIGN(obj_surface->orig_height, 16);
476 obj_surface->size = SIZE_YUV420(obj_surface->width, obj_surface->height);
477 obj_surface->flags = SURFACE_REFERENCED;
478 obj_surface->bo = NULL;
479 obj_surface->pp_out_bo = NULL;
480 obj_surface->locked_image_id = VA_INVALID_ID;
481 obj_surface->private_data = NULL;
482 obj_surface->free_private_data = NULL;
486 if (VA_STATUS_SUCCESS != vaStatus) {
487 /* surfaces[i-1] was the last successful allocation */
489 struct object_surface *obj_surface = SURFACE(surfaces[i]);
491 surfaces[i] = VA_INVALID_SURFACE;
493 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
501 i965_DestroySurfaces(VADriverContextP ctx,
502 VASurfaceID *surface_list,
505 struct i965_driver_data *i965 = i965_driver_data(ctx);
508 for (i = num_surfaces; i--; ) {
509 struct object_surface *obj_surface = SURFACE(surface_list[i]);
512 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
515 return VA_STATUS_SUCCESS;
519 i965_QueryImageFormats(VADriverContextP ctx,
520 VAImageFormat *format_list, /* out */
521 int *num_formats) /* out */
525 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
526 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
528 format_list[n] = m->va_format;
534 return VA_STATUS_SUCCESS;
538 i965_PutImage(VADriverContextP ctx,
543 unsigned int src_width,
544 unsigned int src_height,
547 unsigned int dest_width,
548 unsigned int dest_height)
550 return VA_STATUS_SUCCESS;
554 i965_QuerySubpictureFormats(VADriverContextP ctx,
555 VAImageFormat *format_list, /* out */
556 unsigned int *flags, /* out */
557 unsigned int *num_formats) /* out */
561 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
562 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
564 format_list[n] = m->va_format;
566 flags[n] = m->va_flags;
572 return VA_STATUS_SUCCESS;
576 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
578 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
580 object_heap_free(heap, obj);
584 i965_CreateSubpicture(VADriverContextP ctx,
586 VASubpictureID *subpicture) /* out */
588 struct i965_driver_data *i965 = i965_driver_data(ctx);
589 VASubpictureID subpicID = NEW_SUBPIC_ID()
590 struct object_subpic *obj_subpic = SUBPIC(subpicID);
593 return VA_STATUS_ERROR_ALLOCATION_FAILED;
595 struct object_image *obj_image = IMAGE(image);
597 return VA_STATUS_ERROR_INVALID_IMAGE;
599 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
601 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
603 *subpicture = subpicID;
604 obj_subpic->image = image;
605 obj_subpic->format = m->format;
606 obj_subpic->width = obj_image->image.width;
607 obj_subpic->height = obj_image->image.height;
608 obj_subpic->pitch = obj_image->image.pitches[0];
609 obj_subpic->bo = obj_image->bo;
610 return VA_STATUS_SUCCESS;
614 i965_DestroySubpicture(VADriverContextP ctx,
615 VASubpictureID subpicture)
617 struct i965_driver_data *i965 = i965_driver_data(ctx);
618 struct object_subpic *obj_subpic = SUBPIC(subpicture);
619 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
620 return VA_STATUS_SUCCESS;
624 i965_SetSubpictureImage(VADriverContextP ctx,
625 VASubpictureID subpicture,
629 return VA_STATUS_ERROR_UNIMPLEMENTED;
633 i965_SetSubpictureChromakey(VADriverContextP ctx,
634 VASubpictureID subpicture,
635 unsigned int chromakey_min,
636 unsigned int chromakey_max,
637 unsigned int chromakey_mask)
640 return VA_STATUS_ERROR_UNIMPLEMENTED;
644 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
645 VASubpictureID subpicture,
649 return VA_STATUS_ERROR_UNIMPLEMENTED;
653 i965_AssociateSubpicture(VADriverContextP ctx,
654 VASubpictureID subpicture,
655 VASurfaceID *target_surfaces,
657 short src_x, /* upper left offset in subpicture */
659 unsigned short src_width,
660 unsigned short src_height,
661 short dest_x, /* upper left offset in surface */
663 unsigned short dest_width,
664 unsigned short dest_height,
666 * whether to enable chroma-keying or global-alpha
667 * see VA_SUBPICTURE_XXX values
671 struct i965_driver_data *i965 = i965_driver_data(ctx);
672 struct object_subpic *obj_subpic = SUBPIC(subpicture);
675 obj_subpic->src_rect.x = src_x;
676 obj_subpic->src_rect.y = src_y;
677 obj_subpic->src_rect.width = src_width;
678 obj_subpic->src_rect.height = src_height;
679 obj_subpic->dst_rect.x = dest_x;
680 obj_subpic->dst_rect.y = dest_y;
681 obj_subpic->dst_rect.width = dest_width;
682 obj_subpic->dst_rect.height = dest_height;
684 for (i = 0; i < num_surfaces; i++) {
685 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
687 return VA_STATUS_ERROR_INVALID_SURFACE;
688 obj_surface->subpic = subpicture;
690 return VA_STATUS_SUCCESS;
695 i965_DeassociateSubpicture(VADriverContextP ctx,
696 VASubpictureID subpicture,
697 VASurfaceID *target_surfaces,
700 struct i965_driver_data *i965 = i965_driver_data(ctx);
703 for (i = 0; i < num_surfaces; i++) {
704 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
706 return VA_STATUS_ERROR_INVALID_SURFACE;
707 if (obj_surface->subpic == subpicture)
708 obj_surface->subpic = VA_INVALID_ID;
710 return VA_STATUS_SUCCESS;
714 i965_reference_buffer_store(struct buffer_store **ptr,
715 struct buffer_store *buffer_store)
717 assert(*ptr == NULL);
720 buffer_store->ref_count++;
726 i965_release_buffer_store(struct buffer_store **ptr)
728 struct buffer_store *buffer_store = *ptr;
730 if (buffer_store == NULL)
733 assert(buffer_store->bo || buffer_store->buffer);
734 assert(!(buffer_store->bo && buffer_store->buffer));
735 buffer_store->ref_count--;
737 if (buffer_store->ref_count == 0) {
738 dri_bo_unreference(buffer_store->bo);
739 free(buffer_store->buffer);
740 buffer_store->bo = NULL;
741 buffer_store->buffer = NULL;
749 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
751 struct object_context *obj_context = (struct object_context *)obj;
754 if (obj_context->hw_context) {
755 obj_context->hw_context->destroy(obj_context->hw_context);
756 obj_context->hw_context = NULL;
759 if (obj_context->codec_type == CODEC_ENC) {
760 assert(obj_context->codec_state.enc.num_slice_params <= obj_context->codec_state.enc.max_slice_params);
761 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
762 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
764 assert(obj_context->codec_state.dec.num_slice_params <= obj_context->codec_state.dec.max_slice_params);
765 assert(obj_context->codec_state.dec.num_slice_datas <= obj_context->codec_state.dec.max_slice_datas);
767 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
768 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
769 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
771 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++)
772 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
774 for (i = 0; i < obj_context->codec_state.dec.num_slice_datas; i++)
775 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
777 free(obj_context->codec_state.dec.slice_params);
778 free(obj_context->codec_state.dec.slice_datas);
781 free(obj_context->render_targets);
782 object_heap_free(heap, obj);
786 i965_CreateContext(VADriverContextP ctx,
787 VAConfigID config_id,
791 VASurfaceID *render_targets,
792 int num_render_targets,
793 VAContextID *context) /* out */
795 struct i965_driver_data *i965 = i965_driver_data(ctx);
796 struct i965_render_state *render_state = &i965->render_state;
797 struct object_config *obj_config = CONFIG(config_id);
798 struct object_context *obj_context = NULL;
799 VAStatus vaStatus = VA_STATUS_SUCCESS;
803 if (NULL == obj_config) {
804 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
809 /* Validate picture dimensions */
810 contextID = NEW_CONTEXT_ID();
811 obj_context = CONTEXT(contextID);
813 if (NULL == obj_context) {
814 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
818 render_state->inited = 1;
820 switch (obj_config->profile) {
821 case VAProfileH264Baseline:
822 case VAProfileH264Main:
823 case VAProfileH264High:
825 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
826 render_state->interleaved_uv = 1;
829 render_state->interleaved_uv = !!IS_GEN6(i965->intel.device_id);
833 *context = contextID;
834 obj_context->flags = flag;
835 obj_context->context_id = contextID;
836 obj_context->config_id = config_id;
837 obj_context->picture_width = picture_width;
838 obj_context->picture_height = picture_height;
839 obj_context->num_render_targets = num_render_targets;
840 obj_context->render_targets =
841 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
842 obj_context->hw_context = NULL;
844 for(i = 0; i < num_render_targets; i++) {
845 if (NULL == SURFACE(render_targets[i])) {
846 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
850 obj_context->render_targets[i] = render_targets[i];
853 if (VA_STATUS_SUCCESS == vaStatus) {
854 if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
855 obj_context->codec_type = CODEC_ENC;
856 memset(&obj_context->codec_state.enc, 0, sizeof(obj_context->codec_state.enc));
857 obj_context->codec_state.enc.current_render_target = VA_INVALID_ID;
858 obj_context->codec_state.enc.max_slice_params = NUM_SLICES;
859 obj_context->codec_state.enc.slice_params = calloc(obj_context->codec_state.enc.max_slice_params,
860 sizeof(*obj_context->codec_state.enc.slice_params));
861 assert(i965->codec_info->enc_hw_context_init);
862 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
864 obj_context->codec_type = CODEC_DEC;
865 memset(&obj_context->codec_state.dec, 0, sizeof(obj_context->codec_state.dec));
866 obj_context->codec_state.dec.current_render_target = -1;
867 obj_context->codec_state.dec.max_slice_params = NUM_SLICES;
868 obj_context->codec_state.dec.max_slice_datas = NUM_SLICES;
869 obj_context->codec_state.dec.slice_params = calloc(obj_context->codec_state.dec.max_slice_params,
870 sizeof(*obj_context->codec_state.dec.slice_params));
871 obj_context->codec_state.dec.slice_datas = calloc(obj_context->codec_state.dec.max_slice_datas,
872 sizeof(*obj_context->codec_state.dec.slice_datas));
874 assert(i965->codec_info->dec_hw_context_init);
875 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
880 if (VA_STATUS_SUCCESS != vaStatus) {
881 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
888 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
890 struct i965_driver_data *i965 = i965_driver_data(ctx);
891 struct object_context *obj_context = CONTEXT(context);
894 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
896 return VA_STATUS_SUCCESS;
900 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
902 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
904 assert(obj_buffer->buffer_store);
905 i965_release_buffer_store(&obj_buffer->buffer_store);
906 object_heap_free(heap, obj);
910 i965_create_buffer_internal(VADriverContextP ctx,
914 unsigned int num_elements,
919 struct i965_driver_data *i965 = i965_driver_data(ctx);
920 struct object_buffer *obj_buffer = NULL;
921 struct buffer_store *buffer_store = NULL;
926 case VAPictureParameterBufferType:
927 case VAIQMatrixBufferType:
928 case VABitPlaneBufferType:
929 case VASliceGroupMapBufferType:
930 case VASliceParameterBufferType:
931 case VASliceDataBufferType:
932 case VAMacroblockParameterBufferType:
933 case VAResidualDataBufferType:
934 case VADeblockingParameterBufferType:
935 case VAImageBufferType:
936 case VAEncCodedBufferType:
937 case VAEncSequenceParameterBufferType:
938 case VAEncPictureParameterBufferType:
939 case VAEncSliceParameterBufferType:
944 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
947 bufferID = NEW_BUFFER_ID();
948 obj_buffer = BUFFER(bufferID);
950 if (NULL == obj_buffer) {
951 return VA_STATUS_ERROR_ALLOCATION_FAILED;
954 if (type == VAEncCodedBufferType) {
955 size += ALIGN(sizeof(VACodedBufferSegment), 64);
958 obj_buffer->max_num_elements = num_elements;
959 obj_buffer->num_elements = num_elements;
960 obj_buffer->size_element = size;
961 obj_buffer->type = type;
962 obj_buffer->buffer_store = NULL;
963 buffer_store = calloc(1, sizeof(struct buffer_store));
964 assert(buffer_store);
965 buffer_store->ref_count = 1;
967 if (store_bo != NULL) {
968 buffer_store->bo = store_bo;
969 dri_bo_reference(buffer_store->bo);
972 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
973 } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
974 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
976 size * num_elements, 64);
977 assert(buffer_store->bo);
979 if (type == VAEncCodedBufferType) {
980 VACodedBufferSegment *coded_buffer_segment;
981 dri_bo_map(buffer_store->bo, 1);
982 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
983 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
984 coded_buffer_segment->bit_offset = 0;
985 coded_buffer_segment->status = 0;
986 coded_buffer_segment->buf = NULL;
987 coded_buffer_segment->next = NULL;
988 dri_bo_unmap(buffer_store->bo);
990 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
994 buffer_store->buffer = malloc(size * num_elements);
995 assert(buffer_store->buffer);
998 memcpy(buffer_store->buffer, data, size * num_elements);
1001 buffer_store->num_elements = obj_buffer->num_elements;
1002 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1003 i965_release_buffer_store(&buffer_store);
1006 return VA_STATUS_SUCCESS;
1010 i965_CreateBuffer(VADriverContextP ctx,
1011 VAContextID context, /* in */
1012 VABufferType type, /* in */
1013 unsigned int size, /* in */
1014 unsigned int num_elements, /* in */
1015 void *data, /* in */
1016 VABufferID *buf_id) /* out */
1018 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1023 i965_BufferSetNumElements(VADriverContextP ctx,
1024 VABufferID buf_id, /* in */
1025 unsigned int num_elements) /* in */
1027 struct i965_driver_data *i965 = i965_driver_data(ctx);
1028 struct object_buffer *obj_buffer = BUFFER(buf_id);
1029 VAStatus vaStatus = VA_STATUS_SUCCESS;
1033 if ((num_elements < 0) ||
1034 (num_elements > obj_buffer->max_num_elements)) {
1035 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1037 obj_buffer->num_elements = num_elements;
1038 if (obj_buffer->buffer_store != NULL) {
1039 obj_buffer->buffer_store->num_elements = num_elements;
1047 i965_MapBuffer(VADriverContextP ctx,
1048 VABufferID buf_id, /* in */
1049 void **pbuf) /* out */
1051 struct i965_driver_data *i965 = i965_driver_data(ctx);
1052 struct object_buffer *obj_buffer = BUFFER(buf_id);
1053 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1055 assert(obj_buffer && obj_buffer->buffer_store);
1056 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1057 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1059 if (NULL != obj_buffer->buffer_store->bo) {
1060 unsigned int tiling, swizzle;
1062 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1064 if (tiling != I915_TILING_NONE)
1065 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1067 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1069 assert(obj_buffer->buffer_store->bo->virtual);
1070 *pbuf = obj_buffer->buffer_store->bo->virtual;
1072 if (obj_buffer->type == VAEncCodedBufferType) {
1073 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1074 coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1077 vaStatus = VA_STATUS_SUCCESS;
1078 } else if (NULL != obj_buffer->buffer_store->buffer) {
1079 *pbuf = obj_buffer->buffer_store->buffer;
1080 vaStatus = VA_STATUS_SUCCESS;
1087 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1089 struct i965_driver_data *i965 = i965_driver_data(ctx);
1090 struct object_buffer *obj_buffer = BUFFER(buf_id);
1091 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1093 assert(obj_buffer && obj_buffer->buffer_store);
1094 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1095 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1097 if (NULL != obj_buffer->buffer_store->bo) {
1098 unsigned int tiling, swizzle;
1100 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1102 if (tiling != I915_TILING_NONE)
1103 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1105 dri_bo_unmap(obj_buffer->buffer_store->bo);
1107 vaStatus = VA_STATUS_SUCCESS;
1108 } else if (NULL != obj_buffer->buffer_store->buffer) {
1110 vaStatus = VA_STATUS_SUCCESS;
1117 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1119 struct i965_driver_data *i965 = i965_driver_data(ctx);
1120 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1123 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1125 return VA_STATUS_SUCCESS;
1129 i965_BeginPicture(VADriverContextP ctx,
1130 VAContextID context,
1131 VASurfaceID render_target)
1133 struct i965_driver_data *i965 = i965_driver_data(ctx);
1134 struct object_context *obj_context = CONTEXT(context);
1135 struct object_surface *obj_surface = SURFACE(render_target);
1136 struct object_config *obj_config;
1140 assert(obj_context);
1141 assert(obj_surface);
1143 config = obj_context->config_id;
1144 obj_config = CONFIG(config);
1147 switch (obj_config->profile) {
1148 case VAProfileMPEG2Simple:
1149 case VAProfileMPEG2Main:
1150 vaStatus = VA_STATUS_SUCCESS;
1153 case VAProfileH264Baseline:
1154 case VAProfileH264Main:
1155 case VAProfileH264High:
1156 vaStatus = VA_STATUS_SUCCESS;
1159 case VAProfileVC1Simple:
1160 case VAProfileVC1Main:
1161 case VAProfileVC1Advanced:
1162 vaStatus = VA_STATUS_SUCCESS;
1167 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1171 if (obj_context->codec_type == CODEC_ENC)
1172 obj_context->codec_state.enc.current_render_target = render_target; /*This is input new frame*/
1174 obj_context->codec_state.dec.current_render_target = render_target;
1180 i965_render_picture_parameter_buffer(VADriverContextP ctx,
1181 struct object_context *obj_context,
1182 struct object_buffer *obj_buffer)
1184 assert(obj_buffer->buffer_store->bo == NULL);
1185 assert(obj_buffer->buffer_store->buffer);
1186 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1187 i965_reference_buffer_store(&obj_context->codec_state.dec.pic_param,
1188 obj_buffer->buffer_store);
1190 return VA_STATUS_SUCCESS;
1194 i965_render_iq_matrix_buffer(VADriverContextP ctx,
1195 struct object_context *obj_context,
1196 struct object_buffer *obj_buffer)
1198 assert(obj_buffer->buffer_store->bo == NULL);
1199 assert(obj_buffer->buffer_store->buffer);
1200 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1201 i965_reference_buffer_store(&obj_context->codec_state.dec.iq_matrix,
1202 obj_buffer->buffer_store);
1204 return VA_STATUS_SUCCESS;
1208 i965_render_bit_plane_buffer(VADriverContextP ctx,
1209 struct object_context *obj_context,
1210 struct object_buffer *obj_buffer)
1212 assert(obj_buffer->buffer_store->bo == NULL);
1213 assert(obj_buffer->buffer_store->buffer);
1214 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1215 i965_reference_buffer_store(&obj_context->codec_state.dec.bit_plane,
1216 obj_buffer->buffer_store);
1218 return VA_STATUS_SUCCESS;
1222 i965_render_slice_parameter_buffer(VADriverContextP ctx,
1223 struct object_context *obj_context,
1224 struct object_buffer *obj_buffer)
1226 assert(obj_buffer->buffer_store->bo == NULL);
1227 assert(obj_buffer->buffer_store->buffer);
1229 if (obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.max_slice_params) {
1230 obj_context->codec_state.dec.slice_params = realloc(obj_context->codec_state.dec.slice_params,
1231 (obj_context->codec_state.dec.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_params));
1232 memset(obj_context->codec_state.dec.slice_params + obj_context->codec_state.dec.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_params));
1233 obj_context->codec_state.dec.max_slice_params += NUM_SLICES;
1236 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params]);
1237 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params],
1238 obj_buffer->buffer_store);
1239 obj_context->codec_state.dec.num_slice_params++;
1241 return VA_STATUS_SUCCESS;
1245 i965_render_slice_data_buffer(VADriverContextP ctx,
1246 struct object_context *obj_context,
1247 struct object_buffer *obj_buffer)
1249 assert(obj_buffer->buffer_store->buffer == NULL);
1250 assert(obj_buffer->buffer_store->bo);
1252 if (obj_context->codec_state.dec.num_slice_datas == obj_context->codec_state.dec.max_slice_datas) {
1253 obj_context->codec_state.dec.slice_datas = realloc(obj_context->codec_state.dec.slice_datas,
1254 (obj_context->codec_state.dec.max_slice_datas + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_datas));
1255 memset(obj_context->codec_state.dec.slice_datas + obj_context->codec_state.dec.max_slice_datas, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_datas));
1256 obj_context->codec_state.dec.max_slice_datas += NUM_SLICES;
1259 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas]);
1260 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas],
1261 obj_buffer->buffer_store);
1262 obj_context->codec_state.dec.num_slice_datas++;
1264 return VA_STATUS_SUCCESS;
1268 i965_decoder_render_picture(VADriverContextP ctx,
1269 VAContextID context,
1270 VABufferID *buffers,
1273 struct i965_driver_data *i965 = i965_driver_data(ctx);
1274 struct object_context *obj_context = CONTEXT(context);
1278 for (i = 0; i < num_buffers; i++) {
1279 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1282 switch (obj_buffer->type) {
1283 case VAPictureParameterBufferType:
1284 vaStatus = i965_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1287 case VAIQMatrixBufferType:
1288 vaStatus = i965_render_iq_matrix_buffer(ctx, obj_context, obj_buffer);
1291 case VABitPlaneBufferType:
1292 vaStatus = i965_render_bit_plane_buffer(ctx, obj_context, obj_buffer);
1295 case VASliceParameterBufferType:
1296 vaStatus = i965_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1299 case VASliceDataBufferType:
1300 vaStatus = i965_render_slice_data_buffer(ctx, obj_context, obj_buffer);
1304 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1313 i965_encoder_render_squence_parameter_buffer(VADriverContextP ctx,
1314 struct object_context *obj_context,
1315 struct object_buffer *obj_buffer)
1317 assert(obj_buffer->buffer_store->bo == NULL);
1318 assert(obj_buffer->buffer_store->buffer);
1319 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1320 i965_reference_buffer_store(&obj_context->codec_state.enc.seq_param,
1321 obj_buffer->buffer_store);
1323 return VA_STATUS_SUCCESS;
1328 i965_encoder_render_picture_parameter_buffer(VADriverContextP ctx,
1329 struct object_context *obj_context,
1330 struct object_buffer *obj_buffer)
1332 assert(obj_buffer->buffer_store->bo == NULL);
1333 assert(obj_buffer->buffer_store->buffer);
1334 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1335 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_param,
1336 obj_buffer->buffer_store);
1338 return VA_STATUS_SUCCESS;
1342 i965_encoder_render_slice_parameter_buffer(VADriverContextP ctx,
1343 struct object_context *obj_context,
1344 struct object_buffer *obj_buffer)
1346 if (obj_context->codec_state.enc.num_slice_params == obj_context->codec_state.enc.max_slice_params) {
1347 obj_context->codec_state.enc.slice_params = realloc(obj_context->codec_state.enc.slice_params,
1348 (obj_context->codec_state.enc.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.enc.slice_params));
1349 memset(obj_context->codec_state.enc.slice_params + obj_context->codec_state.enc.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.enc.slice_params));
1350 obj_context->codec_state.enc.max_slice_params += NUM_SLICES;
1353 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params]);
1354 i965_reference_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params],
1355 obj_buffer->buffer_store);
1356 obj_context->codec_state.enc.num_slice_params++;
1358 return VA_STATUS_SUCCESS;
1362 i965_encoder_render_picture_control_buffer(VADriverContextP ctx,
1363 struct object_context *obj_context,
1364 struct object_buffer *obj_buffer)
1366 assert(obj_buffer->buffer_store->bo == NULL);
1367 assert(obj_buffer->buffer_store->buffer);
1368 i965_release_buffer_store(&obj_context->codec_state.enc.pic_control);
1369 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_control,
1370 obj_buffer->buffer_store);
1372 return VA_STATUS_SUCCESS;
1376 i965_encoder_render_qmatrix_buffer(VADriverContextP ctx,
1377 struct object_context *obj_context,
1378 struct object_buffer *obj_buffer)
1380 assert(obj_buffer->buffer_store->bo == NULL);
1381 assert(obj_buffer->buffer_store->buffer);
1382 i965_release_buffer_store(&obj_context->codec_state.enc.q_matrix);
1383 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1384 obj_buffer->buffer_store);
1386 return VA_STATUS_SUCCESS;
1390 i965_encoder_render_iqmatrix_buffer(VADriverContextP ctx,
1391 struct object_context *obj_context,
1392 struct object_buffer *obj_buffer)
1394 assert(obj_buffer->buffer_store->bo == NULL);
1395 assert(obj_buffer->buffer_store->buffer);
1396 i965_release_buffer_store(&obj_context->codec_state.enc.iq_matrix);
1397 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1398 obj_buffer->buffer_store);
1400 return VA_STATUS_SUCCESS;
1404 i965_encoder_render_picture(VADriverContextP ctx,
1405 VAContextID context,
1406 VABufferID *buffers,
1409 struct i965_driver_data *i965 = i965_driver_data(ctx);
1410 struct object_context *obj_context = CONTEXT(context);
1414 for (i = 0; i < num_buffers; i++) {
1415 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1418 switch (obj_buffer->type) {
1419 case VAEncSequenceParameterBufferType:
1420 vaStatus = i965_encoder_render_squence_parameter_buffer(ctx, obj_context, obj_buffer);
1423 case VAEncPictureParameterBufferType:
1424 vaStatus = i965_encoder_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1427 case VAEncSliceParameterBufferType:
1428 vaStatus = i965_encoder_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1431 case VAPictureParameterBufferType:
1432 vaStatus = i965_encoder_render_picture_control_buffer(ctx, obj_context, obj_buffer);
1435 case VAQMatrixBufferType:
1436 vaStatus = i965_encoder_render_qmatrix_buffer(ctx, obj_context, obj_buffer);
1439 case VAIQMatrixBufferType:
1440 vaStatus = i965_encoder_render_iqmatrix_buffer(ctx, obj_context, obj_buffer);
1444 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1453 i965_RenderPicture(VADriverContextP ctx,
1454 VAContextID context,
1455 VABufferID *buffers,
1458 struct i965_driver_data *i965 = i965_driver_data(ctx);
1459 struct object_context *obj_context;
1460 struct object_config *obj_config;
1462 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1464 obj_context = CONTEXT(context);
1465 assert(obj_context);
1467 config = obj_context->config_id;
1468 obj_config = CONFIG(config);
1471 if (VAEntrypointEncSlice == obj_config->entrypoint ){
1472 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1474 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1481 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1483 struct i965_driver_data *i965 = i965_driver_data(ctx);
1484 struct object_context *obj_context = CONTEXT(context);
1485 struct object_config *obj_config;
1489 assert(obj_context);
1490 config = obj_context->config_id;
1491 obj_config = CONFIG(config);
1494 if (obj_context->codec_type == CODEC_ENC) {
1495 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1497 assert(obj_context->codec_state.enc.pic_param);
1498 assert(obj_context->codec_state.enc.seq_param);
1499 assert(obj_context->codec_state.enc.num_slice_params >= 1);
1501 assert(obj_context->codec_state.dec.pic_param);
1502 assert(obj_context->codec_state.dec.num_slice_params >= 1);
1503 assert(obj_context->codec_state.dec.num_slice_datas >= 1);
1504 assert(obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.num_slice_datas);
1507 assert(obj_context->hw_context->run);
1508 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1510 if (obj_context->codec_type == CODEC_ENC) {
1511 obj_context->codec_state.enc.current_render_target = VA_INVALID_SURFACE;
1512 obj_context->codec_state.enc.num_slice_params = 0;
1513 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1514 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1516 for (i = 0; i < obj_context->codec_state.enc.num_slice_params; i++) {
1517 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[i]);
1520 obj_context->codec_state.dec.current_render_target = -1;
1521 obj_context->codec_state.dec.num_slice_params = 0;
1522 obj_context->codec_state.dec.num_slice_datas = 0;
1523 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1524 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1525 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1527 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++) {
1528 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
1529 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
1533 return VA_STATUS_SUCCESS;
1537 i965_SyncSurface(VADriverContextP ctx,
1538 VASurfaceID render_target)
1540 struct i965_driver_data *i965 = i965_driver_data(ctx);
1541 struct object_surface *obj_surface = SURFACE(render_target);
1543 assert(obj_surface);
1545 return VA_STATUS_SUCCESS;
1549 i965_QuerySurfaceStatus(VADriverContextP ctx,
1550 VASurfaceID render_target,
1551 VASurfaceStatus *status) /* out */
1553 struct i965_driver_data *i965 = i965_driver_data(ctx);
1554 struct object_surface *obj_surface = SURFACE(render_target);
1556 assert(obj_surface);
1558 /* Usually GEM will handle synchronization with the graphics hardware */
1560 if (obj_surface->bo) {
1561 dri_bo_map(obj_surface->bo, 0);
1562 dri_bo_unmap(obj_surface->bo);
1566 *status = obj_surface->status;
1568 return VA_STATUS_SUCCESS;
1573 * Query display attributes
1574 * The caller must provide a "attr_list" array that can hold at
1575 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1576 * returned in "attr_list" is returned in "num_attributes".
1579 i965_QueryDisplayAttributes(VADriverContextP ctx,
1580 VADisplayAttribute *attr_list, /* out */
1581 int *num_attributes) /* out */
1584 *num_attributes = 0;
1586 return VA_STATUS_SUCCESS;
1590 * Get display attributes
1591 * This function returns the current attribute values in "attr_list".
1592 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1593 * from vaQueryDisplayAttributes() can have their values retrieved.
1596 i965_GetDisplayAttributes(VADriverContextP ctx,
1597 VADisplayAttribute *attr_list, /* in/out */
1601 return VA_STATUS_ERROR_UNIMPLEMENTED;
1605 * Set display attributes
1606 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1607 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1608 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1611 i965_SetDisplayAttributes(VADriverContextP ctx,
1612 VADisplayAttribute *attr_list,
1616 return VA_STATUS_ERROR_UNIMPLEMENTED;
1620 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1621 VASurfaceID surface,
1622 void **buffer, /* out */
1623 unsigned int *stride) /* out */
1626 return VA_STATUS_ERROR_UNIMPLEMENTED;
1630 i965_Init(VADriverContextP ctx)
1632 struct i965_driver_data *i965 = i965_driver_data(ctx);
1634 if (intel_driver_init(ctx) == False)
1635 return VA_STATUS_ERROR_UNKNOWN;
1637 if (IS_G4X(i965->intel.device_id))
1638 i965->codec_info = &g4x_hw_codec_info;
1639 else if (IS_IRONLAKE(i965->intel.device_id))
1640 i965->codec_info = &ironlake_hw_codec_info;
1641 else if (IS_GEN6(i965->intel.device_id))
1642 i965->codec_info = &gen6_hw_codec_info;
1644 return VA_STATUS_ERROR_UNKNOWN;
1646 if (i965_post_processing_init(ctx) == False)
1647 return VA_STATUS_ERROR_UNKNOWN;
1649 if (i965_render_init(ctx) == False)
1650 return VA_STATUS_ERROR_UNKNOWN;
1652 _i965InitMutex(&i965->render_mutex);
1653 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1655 return VA_STATUS_SUCCESS;
1659 i965_destroy_heap(struct object_heap *heap,
1660 void (*func)(struct object_heap *heap, struct object_base *object))
1662 struct object_base *object;
1663 object_heap_iterator iter;
1665 object = object_heap_first(heap, &iter);
1671 object = object_heap_next(heap, &iter);
1674 object_heap_destroy(heap);
1679 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1682 i965_CreateImage(VADriverContextP ctx,
1683 VAImageFormat *format,
1686 VAImage *out_image) /* out */
1688 struct i965_driver_data *i965 = i965_driver_data(ctx);
1689 struct object_image *obj_image;
1690 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1692 unsigned int width2, height2, size2, size;
1694 out_image->image_id = VA_INVALID_ID;
1695 out_image->buf = VA_INVALID_ID;
1697 image_id = NEW_IMAGE_ID();
1698 if (image_id == VA_INVALID_ID)
1699 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1701 obj_image = IMAGE(image_id);
1703 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1704 obj_image->bo = NULL;
1705 obj_image->palette = NULL;
1706 obj_image->derived_surface = VA_INVALID_ID;
1708 VAImage * const image = &obj_image->image;
1709 image->image_id = image_id;
1710 image->buf = VA_INVALID_ID;
1712 size = width * height;
1713 width2 = (width + 1) / 2;
1714 height2 = (height + 1) / 2;
1715 size2 = width2 * height2;
1717 image->num_palette_entries = 0;
1718 image->entry_bytes = 0;
1719 memset(image->component_order, 0, sizeof(image->component_order));
1721 switch (format->fourcc) {
1722 case VA_FOURCC('I','A','4','4'):
1723 case VA_FOURCC('A','I','4','4'):
1724 image->num_planes = 1;
1725 image->pitches[0] = width;
1726 image->offsets[0] = 0;
1727 image->data_size = image->offsets[0] + image->pitches[0] * height;
1728 image->num_palette_entries = 16;
1729 image->entry_bytes = 3;
1730 image->component_order[0] = 'R';
1731 image->component_order[1] = 'G';
1732 image->component_order[2] = 'B';
1734 case VA_FOURCC('A','R','G','B'):
1735 case VA_FOURCC('A','B','G','R'):
1736 case VA_FOURCC('B','G','R','A'):
1737 case VA_FOURCC('R','G','B','A'):
1738 image->num_planes = 1;
1739 image->pitches[0] = width * 4;
1740 image->offsets[0] = 0;
1741 image->data_size = image->offsets[0] + image->pitches[0] * height;
1743 case VA_FOURCC('Y','V','1','2'):
1744 image->num_planes = 3;
1745 image->pitches[0] = width;
1746 image->offsets[0] = 0;
1747 image->pitches[1] = width2;
1748 image->offsets[1] = size + size2;
1749 image->pitches[2] = width2;
1750 image->offsets[2] = size;
1751 image->data_size = size + 2 * size2;
1753 case VA_FOURCC('I','4','2','0'):
1754 image->num_planes = 3;
1755 image->pitches[0] = width;
1756 image->offsets[0] = 0;
1757 image->pitches[1] = width2;
1758 image->offsets[1] = size;
1759 image->pitches[2] = width2;
1760 image->offsets[2] = size + size2;
1761 image->data_size = size + 2 * size2;
1763 case VA_FOURCC('N','V','1','2'):
1764 image->num_planes = 2;
1765 image->pitches[0] = width;
1766 image->offsets[0] = 0;
1767 image->pitches[1] = width;
1768 image->offsets[1] = size;
1769 image->data_size = size + 2 * size2;
1775 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
1776 image->data_size, 1, NULL, &image->buf);
1777 if (va_status != VA_STATUS_SUCCESS)
1780 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1781 dri_bo_reference(obj_image->bo);
1783 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1784 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1785 if (!obj_image->palette)
1789 image->image_id = image_id;
1790 image->format = *format;
1791 image->width = width;
1792 image->height = height;
1794 *out_image = *image;
1795 return VA_STATUS_SUCCESS;
1798 i965_DestroyImage(ctx, image_id);
1802 VAStatus i965_DeriveImage(VADriverContextP ctx,
1803 VASurfaceID surface,
1804 VAImage *out_image) /* out */
1806 struct i965_driver_data *i965 = i965_driver_data(ctx);
1807 struct i965_render_state *render_state = &i965->render_state;
1808 struct object_image *obj_image;
1809 struct object_surface *obj_surface;
1811 unsigned int w_pitch, h_pitch;
1812 unsigned int data_size;
1815 out_image->image_id = VA_INVALID_ID;
1816 obj_surface = SURFACE(surface);
1819 return VA_STATUS_ERROR_INVALID_SURFACE;
1821 w_pitch = obj_surface->width;
1822 h_pitch = obj_surface->height;
1823 data_size = obj_surface->orig_width * obj_surface->orig_height +
1824 2 * (((obj_surface->orig_width + 1) / 2) * ((obj_surface->orig_height + 1) / 2));
1826 image_id = NEW_IMAGE_ID();
1828 if (image_id == VA_INVALID_ID)
1829 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1831 obj_image = IMAGE(image_id);
1834 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1836 obj_image->bo = NULL;
1837 obj_image->palette = NULL;
1838 obj_image->derived_surface = VA_INVALID_ID;
1840 VAImage * const image = &obj_image->image;
1842 memset(image, 0, sizeof(*image));
1843 image->image_id = image_id;
1844 image->buf = VA_INVALID_ID;
1845 image->num_palette_entries = 0;
1846 image->entry_bytes = 0;
1847 image->width = obj_surface->orig_width;
1848 image->height = obj_surface->orig_height;
1849 image->data_size = data_size;
1851 if (!render_state->inited) {
1852 image->format.fourcc = VA_FOURCC('Y','V','1','2');
1853 image->format.byte_order = VA_LSB_FIRST;
1854 image->format.bits_per_pixel = 12;
1855 image->num_planes = 3;
1856 image->pitches[0] = w_pitch;
1857 image->offsets[0] = 0;
1858 image->pitches[1] = w_pitch / 2;
1859 image->offsets[1] = w_pitch * h_pitch;
1860 image->pitches[2] = w_pitch / 2;
1861 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1863 if (render_state->interleaved_uv) {
1864 image->format.fourcc = VA_FOURCC('N','V','1','2');
1865 image->format.byte_order = VA_LSB_FIRST;
1866 image->format.bits_per_pixel = 12;
1867 image->num_planes = 2;
1868 image->pitches[0] = w_pitch;
1869 image->offsets[0] = 0;
1870 image->pitches[1] = w_pitch;
1871 image->offsets[1] = w_pitch * h_pitch;
1873 image->format.fourcc = VA_FOURCC('I','4','2','0');
1874 image->format.byte_order = VA_LSB_FIRST;
1875 image->format.bits_per_pixel = 12;
1876 image->num_planes = 3;
1877 image->pitches[0] = w_pitch;
1878 image->offsets[0] = 0;
1879 image->pitches[1] = w_pitch / 2;
1880 image->offsets[1] = w_pitch * h_pitch;
1881 image->pitches[2] = w_pitch / 2;
1882 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1886 if (obj_surface->bo == NULL) {
1887 if (HAS_TILED_SURFACE(i965)) {
1889 uint32_t tiling_mode = I915_TILING_Y;
1890 unsigned long pitch;
1892 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
1895 obj_surface->height + obj_surface->height / 2,
1900 assert(obj_surface->bo);
1901 assert(tiling_mode == I915_TILING_Y);
1902 assert(pitch == obj_surface->width);
1904 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
1911 assert(obj_surface->bo);
1912 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
1913 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
1914 if (va_status != VA_STATUS_SUCCESS)
1917 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1918 dri_bo_reference(obj_image->bo);
1920 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1921 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1922 if (!obj_image->palette) {
1923 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
1928 *out_image = *image;
1929 obj_surface->flags |= SURFACE_DERIVED;
1930 obj_image->derived_surface = surface;
1932 return VA_STATUS_SUCCESS;
1935 i965_DestroyImage(ctx, image_id);
1940 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
1942 object_heap_free(heap, obj);
1947 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
1949 struct i965_driver_data *i965 = i965_driver_data(ctx);
1950 struct object_image *obj_image = IMAGE(image);
1951 struct object_surface *obj_surface;
1954 return VA_STATUS_SUCCESS;
1956 dri_bo_unreference(obj_image->bo);
1957 obj_image->bo = NULL;
1959 if (obj_image->image.buf != VA_INVALID_ID) {
1960 i965_DestroyBuffer(ctx, obj_image->image.buf);
1961 obj_image->image.buf = VA_INVALID_ID;
1964 if (obj_image->palette) {
1965 free(obj_image->palette);
1966 obj_image->palette = NULL;
1969 obj_surface = SURFACE(obj_image->derived_surface);
1972 obj_surface->flags &= ~SURFACE_DERIVED;
1975 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
1977 return VA_STATUS_SUCCESS;
1981 * pointer to an array holding the palette data. The size of the array is
1982 * num_palette_entries * entry_bytes in size. The order of the components
1983 * in the palette is described by the component_order in VASubpicture struct
1986 i965_SetImagePalette(VADriverContextP ctx,
1988 unsigned char *palette)
1990 struct i965_driver_data *i965 = i965_driver_data(ctx);
1993 struct object_image *obj_image = IMAGE(image);
1995 return VA_STATUS_ERROR_INVALID_IMAGE;
1997 if (!obj_image->palette)
1998 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2000 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2001 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2002 ((unsigned int)palette[3*i + 1] << 8) |
2003 (unsigned int)palette[3*i + 2]);
2004 return VA_STATUS_SUCCESS;
2008 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2009 const uint8_t *src, unsigned int src_stride,
2010 unsigned int len, unsigned int height)
2014 for (i = 0; i < height; i++) {
2015 memcpy(dst, src, len);
2022 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2023 struct object_surface *obj_surface,
2024 const VARectangle *rect)
2026 uint8_t *dst[3], *src[3];
2028 const int U = obj_image->image.format.fourcc == VA_FOURCC_YV12 ? 2 : 1;
2029 const int V = obj_image->image.format.fourcc == VA_FOURCC_YV12 ? 1 : 2;
2030 unsigned int tiling, swizzle;
2032 if (!obj_surface->bo)
2035 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2037 if (tiling != I915_TILING_NONE)
2038 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2040 dri_bo_map(obj_surface->bo, 0);
2042 if (!obj_surface->bo->virtual)
2045 /* Dest VA image has either I420 or YV12 format.
2046 Source VA surface alway has I420 format */
2047 dst[Y] = image_data + obj_image->image.offsets[Y];
2048 src[0] = (uint8_t *)obj_surface->bo->virtual;
2049 dst[U] = image_data + obj_image->image.offsets[U];
2050 src[1] = src[0] + obj_surface->width * obj_surface->height;
2051 dst[V] = image_data + obj_image->image.offsets[V];
2052 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2055 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2056 src[0] += rect->y * obj_surface->width + rect->x;
2057 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2058 src[0], obj_surface->width,
2059 rect->width, rect->height);
2062 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2063 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2064 memcpy_pic(dst[U], obj_image->image.pitches[U],
2065 src[1], obj_surface->width / 2,
2066 rect->width / 2, rect->height / 2);
2069 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2070 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2071 memcpy_pic(dst[V], obj_image->image.pitches[V],
2072 src[2], obj_surface->width / 2,
2073 rect->width / 2, rect->height / 2);
2075 if (tiling != I915_TILING_NONE)
2076 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2078 dri_bo_unmap(obj_surface->bo);
2082 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2083 struct object_surface *obj_surface,
2084 const VARectangle *rect)
2086 uint8_t *dst[2], *src[2];
2087 unsigned int tiling, swizzle;
2089 if (!obj_surface->bo)
2092 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2094 if (tiling != I915_TILING_NONE)
2095 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2097 dri_bo_map(obj_surface->bo, 0);
2099 if (!obj_surface->bo->virtual)
2102 /* Both dest VA image and source surface have NV12 format */
2103 dst[0] = image_data + obj_image->image.offsets[0];
2104 src[0] = (uint8_t *)obj_surface->bo->virtual;
2105 dst[1] = image_data + obj_image->image.offsets[1];
2106 src[1] = src[0] + obj_surface->width * obj_surface->height;
2109 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2110 src[0] += rect->y * obj_surface->width + rect->x;
2111 memcpy_pic(dst[0], obj_image->image.pitches[0],
2112 src[0], obj_surface->width,
2113 rect->width, rect->height);
2116 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2117 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2118 memcpy_pic(dst[1], obj_image->image.pitches[1],
2119 src[1], obj_surface->width,
2120 rect->width, rect->height / 2);
2122 if (tiling != I915_TILING_NONE)
2123 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2125 dri_bo_unmap(obj_surface->bo);
2129 i965_GetImage(VADriverContextP ctx,
2130 VASurfaceID surface,
2131 int x, /* coordinates of the upper left source pixel */
2133 unsigned int width, /* width and height of the region */
2134 unsigned int height,
2137 struct i965_driver_data *i965 = i965_driver_data(ctx);
2138 struct i965_render_state *render_state = &i965->render_state;
2140 struct object_surface *obj_surface = SURFACE(surface);
2142 return VA_STATUS_ERROR_INVALID_SURFACE;
2144 struct object_image *obj_image = IMAGE(image);
2146 return VA_STATUS_ERROR_INVALID_IMAGE;
2149 return VA_STATUS_ERROR_INVALID_PARAMETER;
2150 if (x + width > obj_surface->orig_width ||
2151 y + height > obj_surface->orig_height)
2152 return VA_STATUS_ERROR_INVALID_PARAMETER;
2153 if (x + width > obj_image->image.width ||
2154 y + height > obj_image->image.height)
2155 return VA_STATUS_ERROR_INVALID_PARAMETER;
2158 void *image_data = NULL;
2160 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2161 if (va_status != VA_STATUS_SUCCESS)
2168 rect.height = height;
2170 switch (obj_image->image.format.fourcc) {
2171 case VA_FOURCC('Y','V','1','2'):
2172 case VA_FOURCC('I','4','2','0'):
2173 /* I420 is native format for MPEG-2 decoded surfaces */
2174 if (render_state->interleaved_uv)
2175 goto operation_failed;
2176 get_image_i420(obj_image, image_data, obj_surface, &rect);
2178 case VA_FOURCC('N','V','1','2'):
2179 /* NV12 is native format for H.264 decoded surfaces */
2180 if (!render_state->interleaved_uv)
2181 goto operation_failed;
2182 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2186 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2190 i965_UnmapBuffer(ctx, obj_image->image.buf);
2195 i965_PutSurface(VADriverContextP ctx,
2196 VASurfaceID surface,
2197 void *draw, /* X Drawable */
2200 unsigned short srcw,
2201 unsigned short srch,
2204 unsigned short destw,
2205 unsigned short desth,
2206 VARectangle *cliprects, /* client supplied clip list */
2207 unsigned int number_cliprects, /* number of clip rects in the clip list */
2208 unsigned int flags) /* de-interlacing flags */
2210 struct i965_driver_data *i965 = i965_driver_data(ctx);
2211 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
2212 struct i965_render_state *render_state = &i965->render_state;
2213 struct dri_drawable *dri_drawable;
2214 union dri_buffer *buffer;
2215 struct intel_region *dest_region;
2216 struct object_surface *obj_surface;
2219 Bool new_region = False;
2221 /* Currently don't support DRI1 */
2222 if (dri_state->driConnectedFlag != VA_DRI2)
2223 return VA_STATUS_ERROR_UNKNOWN;
2225 /* Some broken sources such as H.264 conformance case FM2_SVA_C
2228 obj_surface = SURFACE(surface);
2229 if (!obj_surface || !obj_surface->bo)
2230 return VA_STATUS_SUCCESS;
2232 _i965LockMutex(&i965->render_mutex);
2234 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
2235 assert(dri_drawable);
2237 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
2240 dest_region = render_state->draw_region;
2243 assert(dest_region->bo);
2244 dri_bo_flink(dest_region->bo, &name);
2246 if (buffer->dri2.name != name) {
2248 dri_bo_unreference(dest_region->bo);
2251 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
2252 assert(dest_region);
2253 render_state->draw_region = dest_region;
2258 dest_region->x = dri_drawable->x;
2259 dest_region->y = dri_drawable->y;
2260 dest_region->width = dri_drawable->width;
2261 dest_region->height = dri_drawable->height;
2262 dest_region->cpp = buffer->dri2.cpp;
2263 dest_region->pitch = buffer->dri2.pitch;
2265 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
2266 assert(dest_region->bo);
2268 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
2272 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
2273 pp_flag |= I965_PP_FLAG_AVS;
2275 if (flags & (VA_BOTTOM_FIELD | VA_TOP_FIELD))
2276 pp_flag |= I965_PP_FLAG_DEINTERLACING;
2278 intel_render_put_surface(ctx, surface,
2279 srcx, srcy, srcw, srch,
2280 destx, desty, destw, desth,
2283 if(obj_surface->subpic != VA_INVALID_ID) {
2284 intel_render_put_subpicture(ctx, surface,
2285 srcx, srcy, srcw, srch,
2286 destx, desty, destw, desth);
2289 dri_swap_buffer(ctx, dri_drawable);
2290 obj_surface->flags |= SURFACE_DISPLAYED;
2292 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
2293 dri_bo_unreference(obj_surface->bo);
2294 obj_surface->bo = NULL;
2295 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
2297 if (obj_surface->free_private_data)
2298 obj_surface->free_private_data(&obj_surface->private_data);
2301 _i965UnlockMutex(&i965->render_mutex);
2303 return VA_STATUS_SUCCESS;
2307 i965_Terminate(VADriverContextP ctx)
2309 struct i965_driver_data *i965 = i965_driver_data(ctx);
2312 intel_batchbuffer_free(i965->batch);
2314 _i965DestroyMutex(&i965->render_mutex);
2316 if (i965_render_terminate(ctx) == False)
2317 return VA_STATUS_ERROR_UNKNOWN;
2319 if (i965_post_processing_terminate(ctx) == False)
2320 return VA_STATUS_ERROR_UNKNOWN;
2322 if (intel_driver_terminate(ctx) == False)
2323 return VA_STATUS_ERROR_UNKNOWN;
2325 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2326 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2327 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2328 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2329 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2330 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2332 free(ctx->pDriverData);
2333 ctx->pDriverData = NULL;
2335 return VA_STATUS_SUCCESS;
2340 VADriverContextP ctx, /* in */
2341 VABufferID buf_id, /* in */
2342 VABufferType *type, /* out */
2343 unsigned int *size, /* out */
2344 unsigned int *num_elements /* out */
2347 struct i965_driver_data *i965 = NULL;
2348 struct object_buffer *obj_buffer = NULL;
2350 i965 = i965_driver_data(ctx);
2351 obj_buffer = BUFFER(buf_id);
2353 *type = obj_buffer->type;
2354 *size = obj_buffer->size_element;
2355 *num_elements = obj_buffer->num_elements;
2357 return VA_STATUS_SUCCESS;
2362 VADriverContextP ctx, /* in */
2363 VASurfaceID surface, /* in */
2364 unsigned int *fourcc, /* out */
2365 unsigned int *luma_stride, /* out */
2366 unsigned int *chroma_u_stride, /* out */
2367 unsigned int *chroma_v_stride, /* out */
2368 unsigned int *luma_offset, /* out */
2369 unsigned int *chroma_u_offset, /* out */
2370 unsigned int *chroma_v_offset, /* out */
2371 unsigned int *buffer_name, /* out */
2372 void **buffer /* out */
2375 VAStatus vaStatus = VA_STATUS_SUCCESS;
2376 struct i965_driver_data *i965 = i965_driver_data(ctx);
2377 struct object_surface *obj_surface = NULL;
2381 assert(luma_stride);
2382 assert(chroma_u_stride);
2383 assert(chroma_v_stride);
2384 assert(luma_offset);
2385 assert(chroma_u_offset);
2386 assert(chroma_v_offset);
2387 assert(buffer_name);
2390 tmpImage.image_id = VA_INVALID_ID;
2392 obj_surface = SURFACE(surface);
2393 if (obj_surface == NULL) {
2394 // Surface is absent.
2395 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2399 // Lock functionality is absent now.
2400 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2401 // Surface is locked already.
2402 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2406 vaStatus = i965_DeriveImage(
2410 if (vaStatus != VA_STATUS_SUCCESS) {
2414 obj_surface->locked_image_id = tmpImage.image_id;
2416 vaStatus = i965_MapBuffer(
2420 if (vaStatus != VA_STATUS_SUCCESS) {
2424 *fourcc = tmpImage.format.fourcc;
2425 *luma_offset = tmpImage.offsets[0];
2426 *luma_stride = tmpImage.pitches[0];
2427 *chroma_u_offset = tmpImage.offsets[1];
2428 *chroma_u_stride = tmpImage.pitches[1];
2429 *chroma_v_offset = tmpImage.offsets[2];
2430 *chroma_v_stride = tmpImage.pitches[2];
2431 *buffer_name = tmpImage.buf;
2434 if (vaStatus != VA_STATUS_SUCCESS) {
2443 VADriverContextP ctx, /* in */
2444 VASurfaceID surface /* in */
2447 VAStatus vaStatus = VA_STATUS_SUCCESS;
2448 struct i965_driver_data *i965 = i965_driver_data(ctx);
2449 struct object_image *locked_img = NULL;
2450 struct object_surface *obj_surface = NULL;
2452 obj_surface = SURFACE(surface);
2454 if (obj_surface == NULL) {
2455 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2458 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2459 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2463 locked_img = IMAGE(obj_surface->locked_image_id);
2464 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2465 // Work image was deallocated before i965_UnlockSurface()
2466 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2470 vaStatus = i965_UnmapBuffer(
2472 locked_img->image.buf);
2473 if (vaStatus != VA_STATUS_SUCCESS) {
2477 vaStatus = i965_DestroyImage(
2479 locked_img->image.image_id);
2480 if (vaStatus != VA_STATUS_SUCCESS) {
2484 locked_img->image.image_id = VA_INVALID_ID;
2491 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2493 struct VADriverVTable * const vtable = ctx->vtable;
2494 struct i965_driver_data *i965;
2497 ctx->version_major = VA_MAJOR_VERSION;
2498 ctx->version_minor = VA_MINOR_VERSION;
2499 ctx->max_profiles = I965_MAX_PROFILES;
2500 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2501 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2502 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2503 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2504 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
2505 ctx->str_vendor = I965_STR_VENDOR;
2507 vtable->vaTerminate = i965_Terminate;
2508 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2509 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2510 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2511 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2512 vtable->vaCreateConfig = i965_CreateConfig;
2513 vtable->vaDestroyConfig = i965_DestroyConfig;
2514 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2515 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2516 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2517 vtable->vaCreateContext = i965_CreateContext;
2518 vtable->vaDestroyContext = i965_DestroyContext;
2519 vtable->vaCreateBuffer = i965_CreateBuffer;
2520 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2521 vtable->vaMapBuffer = i965_MapBuffer;
2522 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2523 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2524 vtable->vaBeginPicture = i965_BeginPicture;
2525 vtable->vaRenderPicture = i965_RenderPicture;
2526 vtable->vaEndPicture = i965_EndPicture;
2527 vtable->vaSyncSurface = i965_SyncSurface;
2528 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2529 vtable->vaPutSurface = i965_PutSurface;
2530 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2531 vtable->vaCreateImage = i965_CreateImage;
2532 vtable->vaDeriveImage = i965_DeriveImage;
2533 vtable->vaDestroyImage = i965_DestroyImage;
2534 vtable->vaSetImagePalette = i965_SetImagePalette;
2535 vtable->vaGetImage = i965_GetImage;
2536 vtable->vaPutImage = i965_PutImage;
2537 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2538 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2539 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2540 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2541 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2542 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2543 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2544 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2545 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2546 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2547 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2548 vtable->vaBufferInfo = i965_BufferInfo;
2549 vtable->vaLockSurface = i965_LockSurface;
2550 vtable->vaUnlockSurface = i965_UnlockSurface;
2551 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2553 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2555 ctx->pDriverData = (void *)i965;
2557 result = object_heap_init(&i965->config_heap,
2558 sizeof(struct object_config),
2560 assert(result == 0);
2562 result = object_heap_init(&i965->context_heap,
2563 sizeof(struct object_context),
2565 assert(result == 0);
2567 result = object_heap_init(&i965->surface_heap,
2568 sizeof(struct object_surface),
2570 assert(result == 0);
2572 result = object_heap_init(&i965->buffer_heap,
2573 sizeof(struct object_buffer),
2575 assert(result == 0);
2577 result = object_heap_init(&i965->image_heap,
2578 sizeof(struct object_image),
2580 assert(result == 0);
2582 result = object_heap_init(&i965->subpic_heap,
2583 sizeof(struct object_subpic),
2585 assert(result == 0);
2587 return i965_Init(ctx);