2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include "va/x11/va_dricommon.h"
37 #include "intel_driver.h"
38 #include "intel_memman.h"
39 #include "intel_batchbuffer.h"
40 #include "i965_defines.h"
41 #include "i965_drv_video.h"
43 #define CONFIG_ID_OFFSET 0x01000000
44 #define CONTEXT_ID_OFFSET 0x02000000
45 #define SURFACE_ID_OFFSET 0x04000000
46 #define BUFFER_ID_OFFSET 0x08000000
47 #define IMAGE_ID_OFFSET 0x0a000000
48 #define SUBPIC_ID_OFFSET 0x10000000
50 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
51 IS_IRONLAKE((ctx)->intel.device_id) || \
52 ((IS_GEN6((ctx)->intel.device_id) || \
53 IS_GEN7((ctx)->intel.device_id)) && \
54 (ctx)->intel.has_bsd))
56 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
57 IS_GEN6((ctx)->intel.device_id) || \
58 IS_IRONLAKE((ctx)->intel.device_id)) && \
61 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
62 IS_GEN6((ctx)->intel.device_id)) && \
65 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
66 IS_GEN6((ctx)->intel.device_id)) && \
67 (ctx)->render_state.interleaved_uv)
69 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
70 IS_GEN6((ctx)->intel.device_id)) && \
74 I965_SURFACETYPE_RGBA = 1,
76 I965_SURFACETYPE_INDEXED
79 /* List of supported image formats */
82 VAImageFormat va_format;
83 } i965_image_format_map_t;
85 static const i965_image_format_map_t
86 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
87 { I965_SURFACETYPE_YUV,
88 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
89 { I965_SURFACETYPE_YUV,
90 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
91 { I965_SURFACETYPE_YUV,
92 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
95 /* List of supported subpicture formats */
99 VAImageFormat va_format;
100 unsigned int va_flags;
101 } i965_subpic_format_map_t;
103 static const i965_subpic_format_map_t
104 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
105 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
106 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
108 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
109 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
111 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
112 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
113 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
115 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
116 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
117 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
121 static const i965_subpic_format_map_t *
122 get_subpic_format(const VAImageFormat *va_format)
125 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
126 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
127 if (m->va_format.fourcc == va_format->fourcc &&
128 (m->type == I965_SURFACETYPE_RGBA ?
129 (m->va_format.byte_order == va_format->byte_order &&
130 m->va_format.red_mask == va_format->red_mask &&
131 m->va_format.green_mask == va_format->green_mask &&
132 m->va_format.blue_mask == va_format->blue_mask &&
133 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
139 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
140 static struct hw_codec_info g4x_hw_codec_info = {
141 .dec_hw_context_init = g4x_dec_hw_context_init,
142 .enc_hw_context_init = NULL,
145 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
146 static struct hw_codec_info ironlake_hw_codec_info = {
147 .dec_hw_context_init = ironlake_dec_hw_context_init,
148 .enc_hw_context_init = NULL,
151 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
152 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
153 static struct hw_codec_info gen6_hw_codec_info = {
154 .dec_hw_context_init = gen6_dec_hw_context_init,
155 .enc_hw_context_init = gen6_enc_hw_context_init,
159 i965_QueryConfigProfiles(VADriverContextP ctx,
160 VAProfile *profile_list, /* out */
161 int *num_profiles) /* out */
163 struct i965_driver_data * const i965 = i965_driver_data(ctx);
166 if (HAS_MPEG2(i965)) {
167 profile_list[i++] = VAProfileMPEG2Simple;
168 profile_list[i++] = VAProfileMPEG2Main;
171 if (HAS_H264(i965)) {
172 profile_list[i++] = VAProfileH264Baseline;
173 profile_list[i++] = VAProfileH264Main;
174 profile_list[i++] = VAProfileH264High;
178 profile_list[i++] = VAProfileVC1Simple;
179 profile_list[i++] = VAProfileVC1Main;
180 profile_list[i++] = VAProfileVC1Advanced;
183 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
184 assert(i <= I965_MAX_PROFILES);
187 return VA_STATUS_SUCCESS;
191 i965_QueryConfigEntrypoints(VADriverContextP ctx,
193 VAEntrypoint *entrypoint_list, /* out */
194 int *num_entrypoints) /* out */
196 struct i965_driver_data * const i965 = i965_driver_data(ctx);
200 case VAProfileMPEG2Simple:
201 case VAProfileMPEG2Main:
203 entrypoint_list[n++] = VAEntrypointVLD;
206 case VAProfileH264Baseline:
207 case VAProfileH264Main:
208 case VAProfileH264High:
210 entrypoint_list[n++] = VAEntrypointVLD;
212 if (HAS_ENCODER(i965))
213 entrypoint_list[n++] = VAEntrypointEncSlice;
217 case VAProfileVC1Simple:
218 case VAProfileVC1Main:
219 case VAProfileVC1Advanced:
221 entrypoint_list[n++] = VAEntrypointVLD;
228 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
229 assert(n <= I965_MAX_ENTRYPOINTS);
230 *num_entrypoints = n;
231 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
235 i965_GetConfigAttributes(VADriverContextP ctx,
237 VAEntrypoint entrypoint,
238 VAConfigAttrib *attrib_list, /* in/out */
243 /* Other attributes don't seem to be defined */
244 /* What to do if we don't know the attribute? */
245 for (i = 0; i < num_attribs; i++) {
246 switch (attrib_list[i].type) {
247 case VAConfigAttribRTFormat:
248 attrib_list[i].value = VA_RT_FORMAT_YUV420;
251 case VAConfigAttribRateControl:
252 attrib_list[i].value = VA_RC_VBR;
257 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
262 return VA_STATUS_SUCCESS;
266 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
268 object_heap_free(heap, obj);
272 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
276 /* Check existing attrbiutes */
277 for (i = 0; obj_config->num_attribs < i; i++) {
278 if (obj_config->attrib_list[i].type == attrib->type) {
279 /* Update existing attribute */
280 obj_config->attrib_list[i].value = attrib->value;
281 return VA_STATUS_SUCCESS;
285 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
286 i = obj_config->num_attribs;
287 obj_config->attrib_list[i].type = attrib->type;
288 obj_config->attrib_list[i].value = attrib->value;
289 obj_config->num_attribs++;
290 return VA_STATUS_SUCCESS;
293 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
297 i965_CreateConfig(VADriverContextP ctx,
299 VAEntrypoint entrypoint,
300 VAConfigAttrib *attrib_list,
302 VAConfigID *config_id) /* out */
304 struct i965_driver_data * const i965 = i965_driver_data(ctx);
305 struct object_config *obj_config;
310 /* Validate profile & entrypoint */
312 case VAProfileMPEG2Simple:
313 case VAProfileMPEG2Main:
314 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
315 vaStatus = VA_STATUS_SUCCESS;
317 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
321 case VAProfileH264Baseline:
322 case VAProfileH264Main:
323 case VAProfileH264High:
324 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
325 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
326 vaStatus = VA_STATUS_SUCCESS;
328 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
333 case VAProfileVC1Simple:
334 case VAProfileVC1Main:
335 case VAProfileVC1Advanced:
336 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
337 vaStatus = VA_STATUS_SUCCESS;
339 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
345 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
349 if (VA_STATUS_SUCCESS != vaStatus) {
353 configID = NEW_CONFIG_ID();
354 obj_config = CONFIG(configID);
356 if (NULL == obj_config) {
357 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
361 obj_config->profile = profile;
362 obj_config->entrypoint = entrypoint;
363 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
364 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
365 obj_config->num_attribs = 1;
367 for(i = 0; i < num_attribs; i++) {
368 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
370 if (VA_STATUS_SUCCESS != vaStatus) {
376 if (VA_STATUS_SUCCESS != vaStatus) {
377 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
379 *config_id = configID;
386 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
388 struct i965_driver_data *i965 = i965_driver_data(ctx);
389 struct object_config *obj_config = CONFIG(config_id);
392 if (NULL == obj_config) {
393 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
397 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
398 return VA_STATUS_SUCCESS;
401 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
402 VAConfigID config_id,
403 VAProfile *profile, /* out */
404 VAEntrypoint *entrypoint, /* out */
405 VAConfigAttrib *attrib_list, /* out */
406 int *num_attribs) /* out */
408 struct i965_driver_data *i965 = i965_driver_data(ctx);
409 struct object_config *obj_config = CONFIG(config_id);
410 VAStatus vaStatus = VA_STATUS_SUCCESS;
414 *profile = obj_config->profile;
415 *entrypoint = obj_config->entrypoint;
416 *num_attribs = obj_config->num_attribs;
418 for(i = 0; i < obj_config->num_attribs; i++) {
419 attrib_list[i] = obj_config->attrib_list[i];
426 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
428 struct object_surface *obj_surface = (struct object_surface *)obj;
430 dri_bo_unreference(obj_surface->bo);
431 obj_surface->bo = NULL;
432 dri_bo_unreference(obj_surface->pp_out_bo);
433 obj_surface->pp_out_bo = NULL;
435 if (obj_surface->free_private_data != NULL) {
436 obj_surface->free_private_data(&obj_surface->private_data);
437 obj_surface->private_data = NULL;
440 object_heap_free(heap, obj);
444 i965_CreateSurfaces(VADriverContextP ctx,
449 VASurfaceID *surfaces) /* out */
451 struct i965_driver_data *i965 = i965_driver_data(ctx);
453 VAStatus vaStatus = VA_STATUS_SUCCESS;
455 /* We only support one format */
456 if (VA_RT_FORMAT_YUV420 != format) {
457 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
460 for (i = 0; i < num_surfaces; i++) {
461 int surfaceID = NEW_SURFACE_ID();
462 struct object_surface *obj_surface = SURFACE(surfaceID);
464 if (NULL == obj_surface) {
465 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
469 surfaces[i] = surfaceID;
470 obj_surface->status = VASurfaceReady;
471 obj_surface->subpic = VA_INVALID_ID;
472 obj_surface->orig_width = width;
473 obj_surface->orig_height = height;
475 if (IS_GEN6(i965->intel.device_id) ||
476 IS_GEN7(i965->intel.device_id)) {
477 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
478 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
480 obj_surface->width = ALIGN(obj_surface->orig_width, 16);
481 obj_surface->height = ALIGN(obj_surface->orig_height, 16);
484 obj_surface->size = SIZE_YUV420(obj_surface->width, obj_surface->height);
485 obj_surface->flags = SURFACE_REFERENCED;
486 obj_surface->bo = NULL;
487 obj_surface->pp_out_bo = NULL;
488 obj_surface->locked_image_id = VA_INVALID_ID;
489 obj_surface->private_data = NULL;
490 obj_surface->free_private_data = NULL;
494 if (VA_STATUS_SUCCESS != vaStatus) {
495 /* surfaces[i-1] was the last successful allocation */
497 struct object_surface *obj_surface = SURFACE(surfaces[i]);
499 surfaces[i] = VA_INVALID_SURFACE;
501 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
509 i965_DestroySurfaces(VADriverContextP ctx,
510 VASurfaceID *surface_list,
513 struct i965_driver_data *i965 = i965_driver_data(ctx);
516 for (i = num_surfaces; i--; ) {
517 struct object_surface *obj_surface = SURFACE(surface_list[i]);
520 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
523 return VA_STATUS_SUCCESS;
527 i965_QueryImageFormats(VADriverContextP ctx,
528 VAImageFormat *format_list, /* out */
529 int *num_formats) /* out */
533 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
534 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
536 format_list[n] = m->va_format;
542 return VA_STATUS_SUCCESS;
546 i965_PutImage(VADriverContextP ctx,
551 unsigned int src_width,
552 unsigned int src_height,
555 unsigned int dest_width,
556 unsigned int dest_height)
558 return VA_STATUS_SUCCESS;
562 i965_QuerySubpictureFormats(VADriverContextP ctx,
563 VAImageFormat *format_list, /* out */
564 unsigned int *flags, /* out */
565 unsigned int *num_formats) /* out */
569 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
570 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
572 format_list[n] = m->va_format;
574 flags[n] = m->va_flags;
580 return VA_STATUS_SUCCESS;
584 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
586 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
588 object_heap_free(heap, obj);
592 i965_CreateSubpicture(VADriverContextP ctx,
594 VASubpictureID *subpicture) /* out */
596 struct i965_driver_data *i965 = i965_driver_data(ctx);
597 VASubpictureID subpicID = NEW_SUBPIC_ID()
598 struct object_subpic *obj_subpic = SUBPIC(subpicID);
601 return VA_STATUS_ERROR_ALLOCATION_FAILED;
603 struct object_image *obj_image = IMAGE(image);
605 return VA_STATUS_ERROR_INVALID_IMAGE;
607 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
609 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
611 *subpicture = subpicID;
612 obj_subpic->image = image;
613 obj_subpic->format = m->format;
614 obj_subpic->width = obj_image->image.width;
615 obj_subpic->height = obj_image->image.height;
616 obj_subpic->pitch = obj_image->image.pitches[0];
617 obj_subpic->bo = obj_image->bo;
618 return VA_STATUS_SUCCESS;
622 i965_DestroySubpicture(VADriverContextP ctx,
623 VASubpictureID subpicture)
625 struct i965_driver_data *i965 = i965_driver_data(ctx);
626 struct object_subpic *obj_subpic = SUBPIC(subpicture);
627 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
628 return VA_STATUS_SUCCESS;
632 i965_SetSubpictureImage(VADriverContextP ctx,
633 VASubpictureID subpicture,
637 return VA_STATUS_ERROR_UNIMPLEMENTED;
641 i965_SetSubpictureChromakey(VADriverContextP ctx,
642 VASubpictureID subpicture,
643 unsigned int chromakey_min,
644 unsigned int chromakey_max,
645 unsigned int chromakey_mask)
648 return VA_STATUS_ERROR_UNIMPLEMENTED;
652 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
653 VASubpictureID subpicture,
657 return VA_STATUS_ERROR_UNIMPLEMENTED;
661 i965_AssociateSubpicture(VADriverContextP ctx,
662 VASubpictureID subpicture,
663 VASurfaceID *target_surfaces,
665 short src_x, /* upper left offset in subpicture */
667 unsigned short src_width,
668 unsigned short src_height,
669 short dest_x, /* upper left offset in surface */
671 unsigned short dest_width,
672 unsigned short dest_height,
674 * whether to enable chroma-keying or global-alpha
675 * see VA_SUBPICTURE_XXX values
679 struct i965_driver_data *i965 = i965_driver_data(ctx);
680 struct object_subpic *obj_subpic = SUBPIC(subpicture);
683 obj_subpic->src_rect.x = src_x;
684 obj_subpic->src_rect.y = src_y;
685 obj_subpic->src_rect.width = src_width;
686 obj_subpic->src_rect.height = src_height;
687 obj_subpic->dst_rect.x = dest_x;
688 obj_subpic->dst_rect.y = dest_y;
689 obj_subpic->dst_rect.width = dest_width;
690 obj_subpic->dst_rect.height = dest_height;
692 for (i = 0; i < num_surfaces; i++) {
693 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
695 return VA_STATUS_ERROR_INVALID_SURFACE;
696 obj_surface->subpic = subpicture;
698 return VA_STATUS_SUCCESS;
703 i965_DeassociateSubpicture(VADriverContextP ctx,
704 VASubpictureID subpicture,
705 VASurfaceID *target_surfaces,
708 struct i965_driver_data *i965 = i965_driver_data(ctx);
711 for (i = 0; i < num_surfaces; i++) {
712 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
714 return VA_STATUS_ERROR_INVALID_SURFACE;
715 if (obj_surface->subpic == subpicture)
716 obj_surface->subpic = VA_INVALID_ID;
718 return VA_STATUS_SUCCESS;
722 i965_reference_buffer_store(struct buffer_store **ptr,
723 struct buffer_store *buffer_store)
725 assert(*ptr == NULL);
728 buffer_store->ref_count++;
734 i965_release_buffer_store(struct buffer_store **ptr)
736 struct buffer_store *buffer_store = *ptr;
738 if (buffer_store == NULL)
741 assert(buffer_store->bo || buffer_store->buffer);
742 assert(!(buffer_store->bo && buffer_store->buffer));
743 buffer_store->ref_count--;
745 if (buffer_store->ref_count == 0) {
746 dri_bo_unreference(buffer_store->bo);
747 free(buffer_store->buffer);
748 buffer_store->bo = NULL;
749 buffer_store->buffer = NULL;
757 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
759 struct object_context *obj_context = (struct object_context *)obj;
762 if (obj_context->hw_context) {
763 obj_context->hw_context->destroy(obj_context->hw_context);
764 obj_context->hw_context = NULL;
767 if (obj_context->codec_type == CODEC_ENC) {
768 assert(obj_context->codec_state.enc.num_slice_params <= obj_context->codec_state.enc.max_slice_params);
769 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
770 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
772 assert(obj_context->codec_state.dec.num_slice_params <= obj_context->codec_state.dec.max_slice_params);
773 assert(obj_context->codec_state.dec.num_slice_datas <= obj_context->codec_state.dec.max_slice_datas);
775 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
776 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
777 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
779 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++)
780 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
782 for (i = 0; i < obj_context->codec_state.dec.num_slice_datas; i++)
783 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
785 free(obj_context->codec_state.dec.slice_params);
786 free(obj_context->codec_state.dec.slice_datas);
789 free(obj_context->render_targets);
790 object_heap_free(heap, obj);
794 i965_CreateContext(VADriverContextP ctx,
795 VAConfigID config_id,
799 VASurfaceID *render_targets,
800 int num_render_targets,
801 VAContextID *context) /* out */
803 struct i965_driver_data *i965 = i965_driver_data(ctx);
804 struct i965_render_state *render_state = &i965->render_state;
805 struct object_config *obj_config = CONFIG(config_id);
806 struct object_context *obj_context = NULL;
807 VAStatus vaStatus = VA_STATUS_SUCCESS;
811 if (NULL == obj_config) {
812 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
817 /* Validate picture dimensions */
818 contextID = NEW_CONTEXT_ID();
819 obj_context = CONTEXT(contextID);
821 if (NULL == obj_context) {
822 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
826 render_state->inited = 1;
828 switch (obj_config->profile) {
829 case VAProfileH264Baseline:
830 case VAProfileH264Main:
831 case VAProfileH264High:
833 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
834 render_state->interleaved_uv = 1;
837 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
841 *context = contextID;
842 obj_context->flags = flag;
843 obj_context->context_id = contextID;
844 obj_context->config_id = config_id;
845 obj_context->picture_width = picture_width;
846 obj_context->picture_height = picture_height;
847 obj_context->num_render_targets = num_render_targets;
848 obj_context->render_targets =
849 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
850 obj_context->hw_context = NULL;
852 for(i = 0; i < num_render_targets; i++) {
853 if (NULL == SURFACE(render_targets[i])) {
854 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
858 obj_context->render_targets[i] = render_targets[i];
861 if (VA_STATUS_SUCCESS == vaStatus) {
862 if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
863 obj_context->codec_type = CODEC_ENC;
864 memset(&obj_context->codec_state.enc, 0, sizeof(obj_context->codec_state.enc));
865 obj_context->codec_state.enc.current_render_target = VA_INVALID_ID;
866 obj_context->codec_state.enc.max_slice_params = NUM_SLICES;
867 obj_context->codec_state.enc.slice_params = calloc(obj_context->codec_state.enc.max_slice_params,
868 sizeof(*obj_context->codec_state.enc.slice_params));
869 assert(i965->codec_info->enc_hw_context_init);
870 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
872 obj_context->codec_type = CODEC_DEC;
873 memset(&obj_context->codec_state.dec, 0, sizeof(obj_context->codec_state.dec));
874 obj_context->codec_state.dec.current_render_target = -1;
875 obj_context->codec_state.dec.max_slice_params = NUM_SLICES;
876 obj_context->codec_state.dec.max_slice_datas = NUM_SLICES;
877 obj_context->codec_state.dec.slice_params = calloc(obj_context->codec_state.dec.max_slice_params,
878 sizeof(*obj_context->codec_state.dec.slice_params));
879 obj_context->codec_state.dec.slice_datas = calloc(obj_context->codec_state.dec.max_slice_datas,
880 sizeof(*obj_context->codec_state.dec.slice_datas));
882 assert(i965->codec_info->dec_hw_context_init);
883 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
888 if (VA_STATUS_SUCCESS != vaStatus) {
889 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
896 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
898 struct i965_driver_data *i965 = i965_driver_data(ctx);
899 struct object_context *obj_context = CONTEXT(context);
902 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
904 return VA_STATUS_SUCCESS;
908 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
910 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
912 assert(obj_buffer->buffer_store);
913 i965_release_buffer_store(&obj_buffer->buffer_store);
914 object_heap_free(heap, obj);
918 i965_create_buffer_internal(VADriverContextP ctx,
922 unsigned int num_elements,
927 struct i965_driver_data *i965 = i965_driver_data(ctx);
928 struct object_buffer *obj_buffer = NULL;
929 struct buffer_store *buffer_store = NULL;
934 case VAPictureParameterBufferType:
935 case VAIQMatrixBufferType:
936 case VABitPlaneBufferType:
937 case VASliceGroupMapBufferType:
938 case VASliceParameterBufferType:
939 case VASliceDataBufferType:
940 case VAMacroblockParameterBufferType:
941 case VAResidualDataBufferType:
942 case VADeblockingParameterBufferType:
943 case VAImageBufferType:
944 case VAEncCodedBufferType:
945 case VAEncSequenceParameterBufferType:
946 case VAEncPictureParameterBufferType:
947 case VAEncSliceParameterBufferType:
952 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
955 bufferID = NEW_BUFFER_ID();
956 obj_buffer = BUFFER(bufferID);
958 if (NULL == obj_buffer) {
959 return VA_STATUS_ERROR_ALLOCATION_FAILED;
962 if (type == VAEncCodedBufferType) {
963 size += ALIGN(sizeof(VACodedBufferSegment), 64);
966 obj_buffer->max_num_elements = num_elements;
967 obj_buffer->num_elements = num_elements;
968 obj_buffer->size_element = size;
969 obj_buffer->type = type;
970 obj_buffer->buffer_store = NULL;
971 buffer_store = calloc(1, sizeof(struct buffer_store));
972 assert(buffer_store);
973 buffer_store->ref_count = 1;
975 if (store_bo != NULL) {
976 buffer_store->bo = store_bo;
977 dri_bo_reference(buffer_store->bo);
980 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
981 } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
982 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
984 size * num_elements, 64);
985 assert(buffer_store->bo);
987 if (type == VAEncCodedBufferType) {
988 VACodedBufferSegment *coded_buffer_segment;
989 dri_bo_map(buffer_store->bo, 1);
990 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
991 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
992 coded_buffer_segment->bit_offset = 0;
993 coded_buffer_segment->status = 0;
994 coded_buffer_segment->buf = NULL;
995 coded_buffer_segment->next = NULL;
996 dri_bo_unmap(buffer_store->bo);
998 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1002 buffer_store->buffer = malloc(size * num_elements);
1003 assert(buffer_store->buffer);
1006 memcpy(buffer_store->buffer, data, size * num_elements);
1009 buffer_store->num_elements = obj_buffer->num_elements;
1010 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1011 i965_release_buffer_store(&buffer_store);
1014 return VA_STATUS_SUCCESS;
1018 i965_CreateBuffer(VADriverContextP ctx,
1019 VAContextID context, /* in */
1020 VABufferType type, /* in */
1021 unsigned int size, /* in */
1022 unsigned int num_elements, /* in */
1023 void *data, /* in */
1024 VABufferID *buf_id) /* out */
1026 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1031 i965_BufferSetNumElements(VADriverContextP ctx,
1032 VABufferID buf_id, /* in */
1033 unsigned int num_elements) /* in */
1035 struct i965_driver_data *i965 = i965_driver_data(ctx);
1036 struct object_buffer *obj_buffer = BUFFER(buf_id);
1037 VAStatus vaStatus = VA_STATUS_SUCCESS;
1041 if ((num_elements < 0) ||
1042 (num_elements > obj_buffer->max_num_elements)) {
1043 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1045 obj_buffer->num_elements = num_elements;
1046 if (obj_buffer->buffer_store != NULL) {
1047 obj_buffer->buffer_store->num_elements = num_elements;
1055 i965_MapBuffer(VADriverContextP ctx,
1056 VABufferID buf_id, /* in */
1057 void **pbuf) /* out */
1059 struct i965_driver_data *i965 = i965_driver_data(ctx);
1060 struct object_buffer *obj_buffer = BUFFER(buf_id);
1061 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1063 assert(obj_buffer && obj_buffer->buffer_store);
1064 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1065 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1067 if (NULL != obj_buffer->buffer_store->bo) {
1068 unsigned int tiling, swizzle;
1070 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1072 if (tiling != I915_TILING_NONE)
1073 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1075 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1077 assert(obj_buffer->buffer_store->bo->virtual);
1078 *pbuf = obj_buffer->buffer_store->bo->virtual;
1080 if (obj_buffer->type == VAEncCodedBufferType) {
1081 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1082 coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1085 vaStatus = VA_STATUS_SUCCESS;
1086 } else if (NULL != obj_buffer->buffer_store->buffer) {
1087 *pbuf = obj_buffer->buffer_store->buffer;
1088 vaStatus = VA_STATUS_SUCCESS;
1095 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1097 struct i965_driver_data *i965 = i965_driver_data(ctx);
1098 struct object_buffer *obj_buffer = BUFFER(buf_id);
1099 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1101 assert(obj_buffer && obj_buffer->buffer_store);
1102 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1103 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1105 if (NULL != obj_buffer->buffer_store->bo) {
1106 unsigned int tiling, swizzle;
1108 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1110 if (tiling != I915_TILING_NONE)
1111 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1113 dri_bo_unmap(obj_buffer->buffer_store->bo);
1115 vaStatus = VA_STATUS_SUCCESS;
1116 } else if (NULL != obj_buffer->buffer_store->buffer) {
1118 vaStatus = VA_STATUS_SUCCESS;
1125 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1127 struct i965_driver_data *i965 = i965_driver_data(ctx);
1128 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1131 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1133 return VA_STATUS_SUCCESS;
1137 i965_BeginPicture(VADriverContextP ctx,
1138 VAContextID context,
1139 VASurfaceID render_target)
1141 struct i965_driver_data *i965 = i965_driver_data(ctx);
1142 struct object_context *obj_context = CONTEXT(context);
1143 struct object_surface *obj_surface = SURFACE(render_target);
1144 struct object_config *obj_config;
1148 assert(obj_context);
1149 assert(obj_surface);
1151 config = obj_context->config_id;
1152 obj_config = CONFIG(config);
1155 switch (obj_config->profile) {
1156 case VAProfileMPEG2Simple:
1157 case VAProfileMPEG2Main:
1158 vaStatus = VA_STATUS_SUCCESS;
1161 case VAProfileH264Baseline:
1162 case VAProfileH264Main:
1163 case VAProfileH264High:
1164 vaStatus = VA_STATUS_SUCCESS;
1167 case VAProfileVC1Simple:
1168 case VAProfileVC1Main:
1169 case VAProfileVC1Advanced:
1170 vaStatus = VA_STATUS_SUCCESS;
1175 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1179 if (obj_context->codec_type == CODEC_ENC)
1180 obj_context->codec_state.enc.current_render_target = render_target; /*This is input new frame*/
1182 obj_context->codec_state.dec.current_render_target = render_target;
1188 i965_render_picture_parameter_buffer(VADriverContextP ctx,
1189 struct object_context *obj_context,
1190 struct object_buffer *obj_buffer)
1192 assert(obj_buffer->buffer_store->bo == NULL);
1193 assert(obj_buffer->buffer_store->buffer);
1194 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1195 i965_reference_buffer_store(&obj_context->codec_state.dec.pic_param,
1196 obj_buffer->buffer_store);
1198 return VA_STATUS_SUCCESS;
1202 i965_render_iq_matrix_buffer(VADriverContextP ctx,
1203 struct object_context *obj_context,
1204 struct object_buffer *obj_buffer)
1206 assert(obj_buffer->buffer_store->bo == NULL);
1207 assert(obj_buffer->buffer_store->buffer);
1208 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1209 i965_reference_buffer_store(&obj_context->codec_state.dec.iq_matrix,
1210 obj_buffer->buffer_store);
1212 return VA_STATUS_SUCCESS;
1216 i965_render_bit_plane_buffer(VADriverContextP ctx,
1217 struct object_context *obj_context,
1218 struct object_buffer *obj_buffer)
1220 assert(obj_buffer->buffer_store->bo == NULL);
1221 assert(obj_buffer->buffer_store->buffer);
1222 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1223 i965_reference_buffer_store(&obj_context->codec_state.dec.bit_plane,
1224 obj_buffer->buffer_store);
1226 return VA_STATUS_SUCCESS;
1230 i965_render_slice_parameter_buffer(VADriverContextP ctx,
1231 struct object_context *obj_context,
1232 struct object_buffer *obj_buffer)
1234 assert(obj_buffer->buffer_store->bo == NULL);
1235 assert(obj_buffer->buffer_store->buffer);
1237 if (obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.max_slice_params) {
1238 obj_context->codec_state.dec.slice_params = realloc(obj_context->codec_state.dec.slice_params,
1239 (obj_context->codec_state.dec.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_params));
1240 memset(obj_context->codec_state.dec.slice_params + obj_context->codec_state.dec.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_params));
1241 obj_context->codec_state.dec.max_slice_params += NUM_SLICES;
1244 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params]);
1245 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params],
1246 obj_buffer->buffer_store);
1247 obj_context->codec_state.dec.num_slice_params++;
1249 return VA_STATUS_SUCCESS;
1253 i965_render_slice_data_buffer(VADriverContextP ctx,
1254 struct object_context *obj_context,
1255 struct object_buffer *obj_buffer)
1257 assert(obj_buffer->buffer_store->buffer == NULL);
1258 assert(obj_buffer->buffer_store->bo);
1260 if (obj_context->codec_state.dec.num_slice_datas == obj_context->codec_state.dec.max_slice_datas) {
1261 obj_context->codec_state.dec.slice_datas = realloc(obj_context->codec_state.dec.slice_datas,
1262 (obj_context->codec_state.dec.max_slice_datas + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_datas));
1263 memset(obj_context->codec_state.dec.slice_datas + obj_context->codec_state.dec.max_slice_datas, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_datas));
1264 obj_context->codec_state.dec.max_slice_datas += NUM_SLICES;
1267 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas]);
1268 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas],
1269 obj_buffer->buffer_store);
1270 obj_context->codec_state.dec.num_slice_datas++;
1272 return VA_STATUS_SUCCESS;
1276 i965_decoder_render_picture(VADriverContextP ctx,
1277 VAContextID context,
1278 VABufferID *buffers,
1281 struct i965_driver_data *i965 = i965_driver_data(ctx);
1282 struct object_context *obj_context = CONTEXT(context);
1286 for (i = 0; i < num_buffers; i++) {
1287 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1290 switch (obj_buffer->type) {
1291 case VAPictureParameterBufferType:
1292 vaStatus = i965_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1295 case VAIQMatrixBufferType:
1296 vaStatus = i965_render_iq_matrix_buffer(ctx, obj_context, obj_buffer);
1299 case VABitPlaneBufferType:
1300 vaStatus = i965_render_bit_plane_buffer(ctx, obj_context, obj_buffer);
1303 case VASliceParameterBufferType:
1304 vaStatus = i965_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1307 case VASliceDataBufferType:
1308 vaStatus = i965_render_slice_data_buffer(ctx, obj_context, obj_buffer);
1312 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1321 i965_encoder_render_squence_parameter_buffer(VADriverContextP ctx,
1322 struct object_context *obj_context,
1323 struct object_buffer *obj_buffer)
1325 assert(obj_buffer->buffer_store->bo == NULL);
1326 assert(obj_buffer->buffer_store->buffer);
1327 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1328 i965_reference_buffer_store(&obj_context->codec_state.enc.seq_param,
1329 obj_buffer->buffer_store);
1331 return VA_STATUS_SUCCESS;
1336 i965_encoder_render_picture_parameter_buffer(VADriverContextP ctx,
1337 struct object_context *obj_context,
1338 struct object_buffer *obj_buffer)
1340 assert(obj_buffer->buffer_store->bo == NULL);
1341 assert(obj_buffer->buffer_store->buffer);
1342 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1343 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_param,
1344 obj_buffer->buffer_store);
1346 return VA_STATUS_SUCCESS;
1350 i965_encoder_render_slice_parameter_buffer(VADriverContextP ctx,
1351 struct object_context *obj_context,
1352 struct object_buffer *obj_buffer)
1354 if (obj_context->codec_state.enc.num_slice_params == obj_context->codec_state.enc.max_slice_params) {
1355 obj_context->codec_state.enc.slice_params = realloc(obj_context->codec_state.enc.slice_params,
1356 (obj_context->codec_state.enc.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.enc.slice_params));
1357 memset(obj_context->codec_state.enc.slice_params + obj_context->codec_state.enc.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.enc.slice_params));
1358 obj_context->codec_state.enc.max_slice_params += NUM_SLICES;
1361 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params]);
1362 i965_reference_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params],
1363 obj_buffer->buffer_store);
1364 obj_context->codec_state.enc.num_slice_params++;
1366 return VA_STATUS_SUCCESS;
1370 i965_encoder_render_picture_control_buffer(VADriverContextP ctx,
1371 struct object_context *obj_context,
1372 struct object_buffer *obj_buffer)
1374 assert(obj_buffer->buffer_store->bo == NULL);
1375 assert(obj_buffer->buffer_store->buffer);
1376 i965_release_buffer_store(&obj_context->codec_state.enc.pic_control);
1377 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_control,
1378 obj_buffer->buffer_store);
1380 return VA_STATUS_SUCCESS;
1384 i965_encoder_render_qmatrix_buffer(VADriverContextP ctx,
1385 struct object_context *obj_context,
1386 struct object_buffer *obj_buffer)
1388 assert(obj_buffer->buffer_store->bo == NULL);
1389 assert(obj_buffer->buffer_store->buffer);
1390 i965_release_buffer_store(&obj_context->codec_state.enc.q_matrix);
1391 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1392 obj_buffer->buffer_store);
1394 return VA_STATUS_SUCCESS;
1398 i965_encoder_render_iqmatrix_buffer(VADriverContextP ctx,
1399 struct object_context *obj_context,
1400 struct object_buffer *obj_buffer)
1402 assert(obj_buffer->buffer_store->bo == NULL);
1403 assert(obj_buffer->buffer_store->buffer);
1404 i965_release_buffer_store(&obj_context->codec_state.enc.iq_matrix);
1405 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1406 obj_buffer->buffer_store);
1408 return VA_STATUS_SUCCESS;
1412 i965_encoder_render_picture(VADriverContextP ctx,
1413 VAContextID context,
1414 VABufferID *buffers,
1417 struct i965_driver_data *i965 = i965_driver_data(ctx);
1418 struct object_context *obj_context = CONTEXT(context);
1422 for (i = 0; i < num_buffers; i++) {
1423 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1426 switch (obj_buffer->type) {
1427 case VAEncSequenceParameterBufferType:
1428 vaStatus = i965_encoder_render_squence_parameter_buffer(ctx, obj_context, obj_buffer);
1431 case VAEncPictureParameterBufferType:
1432 vaStatus = i965_encoder_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1435 case VAEncSliceParameterBufferType:
1436 vaStatus = i965_encoder_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1439 case VAPictureParameterBufferType:
1440 vaStatus = i965_encoder_render_picture_control_buffer(ctx, obj_context, obj_buffer);
1443 case VAQMatrixBufferType:
1444 vaStatus = i965_encoder_render_qmatrix_buffer(ctx, obj_context, obj_buffer);
1447 case VAIQMatrixBufferType:
1448 vaStatus = i965_encoder_render_iqmatrix_buffer(ctx, obj_context, obj_buffer);
1452 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1461 i965_RenderPicture(VADriverContextP ctx,
1462 VAContextID context,
1463 VABufferID *buffers,
1466 struct i965_driver_data *i965 = i965_driver_data(ctx);
1467 struct object_context *obj_context;
1468 struct object_config *obj_config;
1470 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1472 obj_context = CONTEXT(context);
1473 assert(obj_context);
1475 config = obj_context->config_id;
1476 obj_config = CONFIG(config);
1479 if (VAEntrypointEncSlice == obj_config->entrypoint ){
1480 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1482 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1489 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1491 struct i965_driver_data *i965 = i965_driver_data(ctx);
1492 struct object_context *obj_context = CONTEXT(context);
1493 struct object_config *obj_config;
1497 assert(obj_context);
1498 config = obj_context->config_id;
1499 obj_config = CONFIG(config);
1502 if (obj_context->codec_type == CODEC_ENC) {
1503 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1505 assert(obj_context->codec_state.enc.pic_param);
1506 assert(obj_context->codec_state.enc.seq_param);
1507 assert(obj_context->codec_state.enc.num_slice_params >= 1);
1509 assert(obj_context->codec_state.dec.pic_param);
1510 assert(obj_context->codec_state.dec.num_slice_params >= 1);
1511 assert(obj_context->codec_state.dec.num_slice_datas >= 1);
1512 assert(obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.num_slice_datas);
1515 assert(obj_context->hw_context->run);
1516 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1518 if (obj_context->codec_type == CODEC_ENC) {
1519 obj_context->codec_state.enc.current_render_target = VA_INVALID_SURFACE;
1520 obj_context->codec_state.enc.num_slice_params = 0;
1521 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1522 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1524 for (i = 0; i < obj_context->codec_state.enc.num_slice_params; i++) {
1525 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[i]);
1528 obj_context->codec_state.dec.current_render_target = -1;
1529 obj_context->codec_state.dec.num_slice_params = 0;
1530 obj_context->codec_state.dec.num_slice_datas = 0;
1531 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1532 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1533 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1535 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++) {
1536 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
1537 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
1541 return VA_STATUS_SUCCESS;
1545 i965_SyncSurface(VADriverContextP ctx,
1546 VASurfaceID render_target)
1548 struct i965_driver_data *i965 = i965_driver_data(ctx);
1549 struct object_surface *obj_surface = SURFACE(render_target);
1551 assert(obj_surface);
1553 return VA_STATUS_SUCCESS;
1557 i965_QuerySurfaceStatus(VADriverContextP ctx,
1558 VASurfaceID render_target,
1559 VASurfaceStatus *status) /* out */
1561 struct i965_driver_data *i965 = i965_driver_data(ctx);
1562 struct object_surface *obj_surface = SURFACE(render_target);
1564 assert(obj_surface);
1566 /* Usually GEM will handle synchronization with the graphics hardware */
1568 if (obj_surface->bo) {
1569 dri_bo_map(obj_surface->bo, 0);
1570 dri_bo_unmap(obj_surface->bo);
1574 *status = obj_surface->status;
1576 return VA_STATUS_SUCCESS;
1581 * Query display attributes
1582 * The caller must provide a "attr_list" array that can hold at
1583 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1584 * returned in "attr_list" is returned in "num_attributes".
1587 i965_QueryDisplayAttributes(VADriverContextP ctx,
1588 VADisplayAttribute *attr_list, /* out */
1589 int *num_attributes) /* out */
1592 *num_attributes = 0;
1594 return VA_STATUS_SUCCESS;
1598 * Get display attributes
1599 * This function returns the current attribute values in "attr_list".
1600 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1601 * from vaQueryDisplayAttributes() can have their values retrieved.
1604 i965_GetDisplayAttributes(VADriverContextP ctx,
1605 VADisplayAttribute *attr_list, /* in/out */
1609 return VA_STATUS_ERROR_UNIMPLEMENTED;
1613 * Set display attributes
1614 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1615 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1616 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1619 i965_SetDisplayAttributes(VADriverContextP ctx,
1620 VADisplayAttribute *attr_list,
1624 return VA_STATUS_ERROR_UNIMPLEMENTED;
1628 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1629 VASurfaceID surface,
1630 void **buffer, /* out */
1631 unsigned int *stride) /* out */
1634 return VA_STATUS_ERROR_UNIMPLEMENTED;
1638 i965_Init(VADriverContextP ctx)
1640 struct i965_driver_data *i965 = i965_driver_data(ctx);
1642 if (intel_driver_init(ctx) == False)
1643 return VA_STATUS_ERROR_UNKNOWN;
1645 if (IS_G4X(i965->intel.device_id))
1646 i965->codec_info = &g4x_hw_codec_info;
1647 else if (IS_IRONLAKE(i965->intel.device_id))
1648 i965->codec_info = &ironlake_hw_codec_info;
1649 else if (IS_GEN6(i965->intel.device_id))
1650 i965->codec_info = &gen6_hw_codec_info;
1651 else if (IS_GEN7(i965->intel.device_id))
1652 i965->codec_info = &gen6_hw_codec_info;
1654 return VA_STATUS_ERROR_UNKNOWN;
1656 if (i965_post_processing_init(ctx) == False)
1657 return VA_STATUS_ERROR_UNKNOWN;
1659 if (i965_render_init(ctx) == False)
1660 return VA_STATUS_ERROR_UNKNOWN;
1662 _i965InitMutex(&i965->render_mutex);
1663 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1665 return VA_STATUS_SUCCESS;
1669 i965_destroy_heap(struct object_heap *heap,
1670 void (*func)(struct object_heap *heap, struct object_base *object))
1672 struct object_base *object;
1673 object_heap_iterator iter;
1675 object = object_heap_first(heap, &iter);
1681 object = object_heap_next(heap, &iter);
1684 object_heap_destroy(heap);
1689 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1692 i965_CreateImage(VADriverContextP ctx,
1693 VAImageFormat *format,
1696 VAImage *out_image) /* out */
1698 struct i965_driver_data *i965 = i965_driver_data(ctx);
1699 struct object_image *obj_image;
1700 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1702 unsigned int width2, height2, size2, size;
1704 out_image->image_id = VA_INVALID_ID;
1705 out_image->buf = VA_INVALID_ID;
1707 image_id = NEW_IMAGE_ID();
1708 if (image_id == VA_INVALID_ID)
1709 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1711 obj_image = IMAGE(image_id);
1713 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1714 obj_image->bo = NULL;
1715 obj_image->palette = NULL;
1716 obj_image->derived_surface = VA_INVALID_ID;
1718 VAImage * const image = &obj_image->image;
1719 image->image_id = image_id;
1720 image->buf = VA_INVALID_ID;
1722 size = width * height;
1723 width2 = (width + 1) / 2;
1724 height2 = (height + 1) / 2;
1725 size2 = width2 * height2;
1727 image->num_palette_entries = 0;
1728 image->entry_bytes = 0;
1729 memset(image->component_order, 0, sizeof(image->component_order));
1731 switch (format->fourcc) {
1732 case VA_FOURCC('I','A','4','4'):
1733 case VA_FOURCC('A','I','4','4'):
1734 image->num_planes = 1;
1735 image->pitches[0] = width;
1736 image->offsets[0] = 0;
1737 image->data_size = image->offsets[0] + image->pitches[0] * height;
1738 image->num_palette_entries = 16;
1739 image->entry_bytes = 3;
1740 image->component_order[0] = 'R';
1741 image->component_order[1] = 'G';
1742 image->component_order[2] = 'B';
1744 case VA_FOURCC('A','R','G','B'):
1745 case VA_FOURCC('A','B','G','R'):
1746 case VA_FOURCC('B','G','R','A'):
1747 case VA_FOURCC('R','G','B','A'):
1748 image->num_planes = 1;
1749 image->pitches[0] = width * 4;
1750 image->offsets[0] = 0;
1751 image->data_size = image->offsets[0] + image->pitches[0] * height;
1753 case VA_FOURCC('Y','V','1','2'):
1754 image->num_planes = 3;
1755 image->pitches[0] = width;
1756 image->offsets[0] = 0;
1757 image->pitches[1] = width2;
1758 image->offsets[1] = size + size2;
1759 image->pitches[2] = width2;
1760 image->offsets[2] = size;
1761 image->data_size = size + 2 * size2;
1763 case VA_FOURCC('I','4','2','0'):
1764 image->num_planes = 3;
1765 image->pitches[0] = width;
1766 image->offsets[0] = 0;
1767 image->pitches[1] = width2;
1768 image->offsets[1] = size;
1769 image->pitches[2] = width2;
1770 image->offsets[2] = size + size2;
1771 image->data_size = size + 2 * size2;
1773 case VA_FOURCC('N','V','1','2'):
1774 image->num_planes = 2;
1775 image->pitches[0] = width;
1776 image->offsets[0] = 0;
1777 image->pitches[1] = width;
1778 image->offsets[1] = size;
1779 image->data_size = size + 2 * size2;
1785 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
1786 image->data_size, 1, NULL, &image->buf);
1787 if (va_status != VA_STATUS_SUCCESS)
1790 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1791 dri_bo_reference(obj_image->bo);
1793 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1794 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1795 if (!obj_image->palette)
1799 image->image_id = image_id;
1800 image->format = *format;
1801 image->width = width;
1802 image->height = height;
1804 *out_image = *image;
1805 return VA_STATUS_SUCCESS;
1808 i965_DestroyImage(ctx, image_id);
1812 VAStatus i965_DeriveImage(VADriverContextP ctx,
1813 VASurfaceID surface,
1814 VAImage *out_image) /* out */
1816 struct i965_driver_data *i965 = i965_driver_data(ctx);
1817 struct i965_render_state *render_state = &i965->render_state;
1818 struct object_image *obj_image;
1819 struct object_surface *obj_surface;
1821 unsigned int w_pitch, h_pitch;
1822 unsigned int data_size;
1825 out_image->image_id = VA_INVALID_ID;
1826 obj_surface = SURFACE(surface);
1829 return VA_STATUS_ERROR_INVALID_SURFACE;
1831 w_pitch = obj_surface->width;
1832 h_pitch = obj_surface->height;
1833 data_size = obj_surface->orig_width * obj_surface->orig_height +
1834 2 * (((obj_surface->orig_width + 1) / 2) * ((obj_surface->orig_height + 1) / 2));
1836 image_id = NEW_IMAGE_ID();
1838 if (image_id == VA_INVALID_ID)
1839 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1841 obj_image = IMAGE(image_id);
1844 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1846 obj_image->bo = NULL;
1847 obj_image->palette = NULL;
1848 obj_image->derived_surface = VA_INVALID_ID;
1850 VAImage * const image = &obj_image->image;
1852 memset(image, 0, sizeof(*image));
1853 image->image_id = image_id;
1854 image->buf = VA_INVALID_ID;
1855 image->num_palette_entries = 0;
1856 image->entry_bytes = 0;
1857 image->width = obj_surface->orig_width;
1858 image->height = obj_surface->orig_height;
1859 image->data_size = data_size;
1861 if (!render_state->inited) {
1862 image->format.fourcc = VA_FOURCC('Y','V','1','2');
1863 image->format.byte_order = VA_LSB_FIRST;
1864 image->format.bits_per_pixel = 12;
1865 image->num_planes = 3;
1866 image->pitches[0] = w_pitch;
1867 image->offsets[0] = 0;
1868 image->pitches[1] = w_pitch / 2;
1869 image->offsets[1] = w_pitch * h_pitch;
1870 image->pitches[2] = w_pitch / 2;
1871 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1873 if (render_state->interleaved_uv) {
1874 image->format.fourcc = VA_FOURCC('N','V','1','2');
1875 image->format.byte_order = VA_LSB_FIRST;
1876 image->format.bits_per_pixel = 12;
1877 image->num_planes = 2;
1878 image->pitches[0] = w_pitch;
1879 image->offsets[0] = 0;
1880 image->pitches[1] = w_pitch;
1881 image->offsets[1] = w_pitch * h_pitch;
1883 image->format.fourcc = VA_FOURCC('I','4','2','0');
1884 image->format.byte_order = VA_LSB_FIRST;
1885 image->format.bits_per_pixel = 12;
1886 image->num_planes = 3;
1887 image->pitches[0] = w_pitch;
1888 image->offsets[0] = 0;
1889 image->pitches[1] = w_pitch / 2;
1890 image->offsets[1] = w_pitch * h_pitch;
1891 image->pitches[2] = w_pitch / 2;
1892 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1896 if (obj_surface->bo == NULL) {
1897 if (HAS_TILED_SURFACE(i965)) {
1899 uint32_t tiling_mode = I915_TILING_Y;
1900 unsigned long pitch;
1902 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
1905 obj_surface->height + obj_surface->height / 2,
1910 assert(obj_surface->bo);
1911 assert(tiling_mode == I915_TILING_Y);
1912 assert(pitch == obj_surface->width);
1914 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
1921 assert(obj_surface->bo);
1922 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
1923 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
1924 if (va_status != VA_STATUS_SUCCESS)
1927 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1928 dri_bo_reference(obj_image->bo);
1930 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1931 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1932 if (!obj_image->palette) {
1933 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
1938 *out_image = *image;
1939 obj_surface->flags |= SURFACE_DERIVED;
1940 obj_image->derived_surface = surface;
1942 return VA_STATUS_SUCCESS;
1945 i965_DestroyImage(ctx, image_id);
1950 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
1952 object_heap_free(heap, obj);
1957 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
1959 struct i965_driver_data *i965 = i965_driver_data(ctx);
1960 struct object_image *obj_image = IMAGE(image);
1961 struct object_surface *obj_surface;
1964 return VA_STATUS_SUCCESS;
1966 dri_bo_unreference(obj_image->bo);
1967 obj_image->bo = NULL;
1969 if (obj_image->image.buf != VA_INVALID_ID) {
1970 i965_DestroyBuffer(ctx, obj_image->image.buf);
1971 obj_image->image.buf = VA_INVALID_ID;
1974 if (obj_image->palette) {
1975 free(obj_image->palette);
1976 obj_image->palette = NULL;
1979 obj_surface = SURFACE(obj_image->derived_surface);
1982 obj_surface->flags &= ~SURFACE_DERIVED;
1985 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
1987 return VA_STATUS_SUCCESS;
1991 * pointer to an array holding the palette data. The size of the array is
1992 * num_palette_entries * entry_bytes in size. The order of the components
1993 * in the palette is described by the component_order in VASubpicture struct
1996 i965_SetImagePalette(VADriverContextP ctx,
1998 unsigned char *palette)
2000 struct i965_driver_data *i965 = i965_driver_data(ctx);
2003 struct object_image *obj_image = IMAGE(image);
2005 return VA_STATUS_ERROR_INVALID_IMAGE;
2007 if (!obj_image->palette)
2008 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2010 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2011 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2012 ((unsigned int)palette[3*i + 1] << 8) |
2013 (unsigned int)palette[3*i + 2]);
2014 return VA_STATUS_SUCCESS;
2018 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2019 const uint8_t *src, unsigned int src_stride,
2020 unsigned int len, unsigned int height)
2024 for (i = 0; i < height; i++) {
2025 memcpy(dst, src, len);
2032 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2033 struct object_surface *obj_surface,
2034 const VARectangle *rect)
2036 uint8_t *dst[3], *src[3];
2038 const int U = obj_image->image.format.fourcc == VA_FOURCC_YV12 ? 2 : 1;
2039 const int V = obj_image->image.format.fourcc == VA_FOURCC_YV12 ? 1 : 2;
2040 unsigned int tiling, swizzle;
2042 if (!obj_surface->bo)
2045 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2047 if (tiling != I915_TILING_NONE)
2048 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2050 dri_bo_map(obj_surface->bo, 0);
2052 if (!obj_surface->bo->virtual)
2055 /* Dest VA image has either I420 or YV12 format.
2056 Source VA surface alway has I420 format */
2057 dst[Y] = image_data + obj_image->image.offsets[Y];
2058 src[0] = (uint8_t *)obj_surface->bo->virtual;
2059 dst[U] = image_data + obj_image->image.offsets[U];
2060 src[1] = src[0] + obj_surface->width * obj_surface->height;
2061 dst[V] = image_data + obj_image->image.offsets[V];
2062 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2065 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2066 src[0] += rect->y * obj_surface->width + rect->x;
2067 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2068 src[0], obj_surface->width,
2069 rect->width, rect->height);
2072 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2073 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2074 memcpy_pic(dst[U], obj_image->image.pitches[U],
2075 src[1], obj_surface->width / 2,
2076 rect->width / 2, rect->height / 2);
2079 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2080 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2081 memcpy_pic(dst[V], obj_image->image.pitches[V],
2082 src[2], obj_surface->width / 2,
2083 rect->width / 2, rect->height / 2);
2085 if (tiling != I915_TILING_NONE)
2086 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2088 dri_bo_unmap(obj_surface->bo);
2092 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2093 struct object_surface *obj_surface,
2094 const VARectangle *rect)
2096 uint8_t *dst[2], *src[2];
2097 unsigned int tiling, swizzle;
2099 if (!obj_surface->bo)
2102 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2104 if (tiling != I915_TILING_NONE)
2105 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2107 dri_bo_map(obj_surface->bo, 0);
2109 if (!obj_surface->bo->virtual)
2112 /* Both dest VA image and source surface have NV12 format */
2113 dst[0] = image_data + obj_image->image.offsets[0];
2114 src[0] = (uint8_t *)obj_surface->bo->virtual;
2115 dst[1] = image_data + obj_image->image.offsets[1];
2116 src[1] = src[0] + obj_surface->width * obj_surface->height;
2119 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2120 src[0] += rect->y * obj_surface->width + rect->x;
2121 memcpy_pic(dst[0], obj_image->image.pitches[0],
2122 src[0], obj_surface->width,
2123 rect->width, rect->height);
2126 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2127 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2128 memcpy_pic(dst[1], obj_image->image.pitches[1],
2129 src[1], obj_surface->width,
2130 rect->width, rect->height / 2);
2132 if (tiling != I915_TILING_NONE)
2133 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2135 dri_bo_unmap(obj_surface->bo);
2139 i965_GetImage(VADriverContextP ctx,
2140 VASurfaceID surface,
2141 int x, /* coordinates of the upper left source pixel */
2143 unsigned int width, /* width and height of the region */
2144 unsigned int height,
2147 struct i965_driver_data *i965 = i965_driver_data(ctx);
2148 struct i965_render_state *render_state = &i965->render_state;
2150 struct object_surface *obj_surface = SURFACE(surface);
2152 return VA_STATUS_ERROR_INVALID_SURFACE;
2154 struct object_image *obj_image = IMAGE(image);
2156 return VA_STATUS_ERROR_INVALID_IMAGE;
2159 return VA_STATUS_ERROR_INVALID_PARAMETER;
2160 if (x + width > obj_surface->orig_width ||
2161 y + height > obj_surface->orig_height)
2162 return VA_STATUS_ERROR_INVALID_PARAMETER;
2163 if (x + width > obj_image->image.width ||
2164 y + height > obj_image->image.height)
2165 return VA_STATUS_ERROR_INVALID_PARAMETER;
2168 void *image_data = NULL;
2170 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2171 if (va_status != VA_STATUS_SUCCESS)
2178 rect.height = height;
2180 switch (obj_image->image.format.fourcc) {
2181 case VA_FOURCC('Y','V','1','2'):
2182 case VA_FOURCC('I','4','2','0'):
2183 /* I420 is native format for MPEG-2 decoded surfaces */
2184 if (render_state->interleaved_uv)
2185 goto operation_failed;
2186 get_image_i420(obj_image, image_data, obj_surface, &rect);
2188 case VA_FOURCC('N','V','1','2'):
2189 /* NV12 is native format for H.264 decoded surfaces */
2190 if (!render_state->interleaved_uv)
2191 goto operation_failed;
2192 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2196 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2200 i965_UnmapBuffer(ctx, obj_image->image.buf);
2205 i965_PutSurface(VADriverContextP ctx,
2206 VASurfaceID surface,
2207 void *draw, /* X Drawable */
2210 unsigned short srcw,
2211 unsigned short srch,
2214 unsigned short destw,
2215 unsigned short desth,
2216 VARectangle *cliprects, /* client supplied clip list */
2217 unsigned int number_cliprects, /* number of clip rects in the clip list */
2218 unsigned int flags) /* de-interlacing flags */
2220 struct i965_driver_data *i965 = i965_driver_data(ctx);
2221 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
2222 struct i965_render_state *render_state = &i965->render_state;
2223 struct dri_drawable *dri_drawable;
2224 union dri_buffer *buffer;
2225 struct intel_region *dest_region;
2226 struct object_surface *obj_surface;
2229 Bool new_region = False;
2231 /* Currently don't support DRI1 */
2232 if (dri_state->driConnectedFlag != VA_DRI2)
2233 return VA_STATUS_ERROR_UNKNOWN;
2235 /* Some broken sources such as H.264 conformance case FM2_SVA_C
2238 obj_surface = SURFACE(surface);
2239 if (!obj_surface || !obj_surface->bo)
2240 return VA_STATUS_SUCCESS;
2242 _i965LockMutex(&i965->render_mutex);
2244 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
2245 assert(dri_drawable);
2247 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
2250 dest_region = render_state->draw_region;
2253 assert(dest_region->bo);
2254 dri_bo_flink(dest_region->bo, &name);
2256 if (buffer->dri2.name != name) {
2258 dri_bo_unreference(dest_region->bo);
2261 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
2262 assert(dest_region);
2263 render_state->draw_region = dest_region;
2268 dest_region->x = dri_drawable->x;
2269 dest_region->y = dri_drawable->y;
2270 dest_region->width = dri_drawable->width;
2271 dest_region->height = dri_drawable->height;
2272 dest_region->cpp = buffer->dri2.cpp;
2273 dest_region->pitch = buffer->dri2.pitch;
2275 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
2276 assert(dest_region->bo);
2278 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
2282 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
2283 pp_flag |= I965_PP_FLAG_AVS;
2285 if (flags & (VA_BOTTOM_FIELD | VA_TOP_FIELD))
2286 pp_flag |= I965_PP_FLAG_DEINTERLACING;
2288 intel_render_put_surface(ctx, surface,
2289 srcx, srcy, srcw, srch,
2290 destx, desty, destw, desth,
2293 if(obj_surface->subpic != VA_INVALID_ID) {
2294 intel_render_put_subpicture(ctx, surface,
2295 srcx, srcy, srcw, srch,
2296 destx, desty, destw, desth);
2299 dri_swap_buffer(ctx, dri_drawable);
2300 obj_surface->flags |= SURFACE_DISPLAYED;
2302 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
2303 dri_bo_unreference(obj_surface->bo);
2304 obj_surface->bo = NULL;
2305 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
2307 if (obj_surface->free_private_data)
2308 obj_surface->free_private_data(&obj_surface->private_data);
2311 _i965UnlockMutex(&i965->render_mutex);
2313 return VA_STATUS_SUCCESS;
2317 i965_Terminate(VADriverContextP ctx)
2319 struct i965_driver_data *i965 = i965_driver_data(ctx);
2322 intel_batchbuffer_free(i965->batch);
2324 _i965DestroyMutex(&i965->render_mutex);
2326 if (i965_render_terminate(ctx) == False)
2327 return VA_STATUS_ERROR_UNKNOWN;
2329 if (i965_post_processing_terminate(ctx) == False)
2330 return VA_STATUS_ERROR_UNKNOWN;
2332 if (intel_driver_terminate(ctx) == False)
2333 return VA_STATUS_ERROR_UNKNOWN;
2335 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2336 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2337 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2338 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2339 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2340 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2342 free(ctx->pDriverData);
2343 ctx->pDriverData = NULL;
2345 return VA_STATUS_SUCCESS;
2350 VADriverContextP ctx, /* in */
2351 VABufferID buf_id, /* in */
2352 VABufferType *type, /* out */
2353 unsigned int *size, /* out */
2354 unsigned int *num_elements /* out */
2357 struct i965_driver_data *i965 = NULL;
2358 struct object_buffer *obj_buffer = NULL;
2360 i965 = i965_driver_data(ctx);
2361 obj_buffer = BUFFER(buf_id);
2363 *type = obj_buffer->type;
2364 *size = obj_buffer->size_element;
2365 *num_elements = obj_buffer->num_elements;
2367 return VA_STATUS_SUCCESS;
2372 VADriverContextP ctx, /* in */
2373 VASurfaceID surface, /* in */
2374 unsigned int *fourcc, /* out */
2375 unsigned int *luma_stride, /* out */
2376 unsigned int *chroma_u_stride, /* out */
2377 unsigned int *chroma_v_stride, /* out */
2378 unsigned int *luma_offset, /* out */
2379 unsigned int *chroma_u_offset, /* out */
2380 unsigned int *chroma_v_offset, /* out */
2381 unsigned int *buffer_name, /* out */
2382 void **buffer /* out */
2385 VAStatus vaStatus = VA_STATUS_SUCCESS;
2386 struct i965_driver_data *i965 = i965_driver_data(ctx);
2387 struct object_surface *obj_surface = NULL;
2391 assert(luma_stride);
2392 assert(chroma_u_stride);
2393 assert(chroma_v_stride);
2394 assert(luma_offset);
2395 assert(chroma_u_offset);
2396 assert(chroma_v_offset);
2397 assert(buffer_name);
2400 tmpImage.image_id = VA_INVALID_ID;
2402 obj_surface = SURFACE(surface);
2403 if (obj_surface == NULL) {
2404 // Surface is absent.
2405 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2409 // Lock functionality is absent now.
2410 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2411 // Surface is locked already.
2412 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2416 vaStatus = i965_DeriveImage(
2420 if (vaStatus != VA_STATUS_SUCCESS) {
2424 obj_surface->locked_image_id = tmpImage.image_id;
2426 vaStatus = i965_MapBuffer(
2430 if (vaStatus != VA_STATUS_SUCCESS) {
2434 *fourcc = tmpImage.format.fourcc;
2435 *luma_offset = tmpImage.offsets[0];
2436 *luma_stride = tmpImage.pitches[0];
2437 *chroma_u_offset = tmpImage.offsets[1];
2438 *chroma_u_stride = tmpImage.pitches[1];
2439 *chroma_v_offset = tmpImage.offsets[2];
2440 *chroma_v_stride = tmpImage.pitches[2];
2441 *buffer_name = tmpImage.buf;
2444 if (vaStatus != VA_STATUS_SUCCESS) {
2453 VADriverContextP ctx, /* in */
2454 VASurfaceID surface /* in */
2457 VAStatus vaStatus = VA_STATUS_SUCCESS;
2458 struct i965_driver_data *i965 = i965_driver_data(ctx);
2459 struct object_image *locked_img = NULL;
2460 struct object_surface *obj_surface = NULL;
2462 obj_surface = SURFACE(surface);
2464 if (obj_surface == NULL) {
2465 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2468 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2469 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2473 locked_img = IMAGE(obj_surface->locked_image_id);
2474 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2475 // Work image was deallocated before i965_UnlockSurface()
2476 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2480 vaStatus = i965_UnmapBuffer(
2482 locked_img->image.buf);
2483 if (vaStatus != VA_STATUS_SUCCESS) {
2487 vaStatus = i965_DestroyImage(
2489 locked_img->image.image_id);
2490 if (vaStatus != VA_STATUS_SUCCESS) {
2494 locked_img->image.image_id = VA_INVALID_ID;
2501 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2503 struct VADriverVTable * const vtable = ctx->vtable;
2504 struct i965_driver_data *i965;
2507 ctx->version_major = VA_MAJOR_VERSION;
2508 ctx->version_minor = VA_MINOR_VERSION;
2509 ctx->max_profiles = I965_MAX_PROFILES;
2510 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2511 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2512 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2513 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2514 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
2515 ctx->str_vendor = I965_STR_VENDOR;
2517 vtable->vaTerminate = i965_Terminate;
2518 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2519 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2520 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2521 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2522 vtable->vaCreateConfig = i965_CreateConfig;
2523 vtable->vaDestroyConfig = i965_DestroyConfig;
2524 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2525 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2526 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2527 vtable->vaCreateContext = i965_CreateContext;
2528 vtable->vaDestroyContext = i965_DestroyContext;
2529 vtable->vaCreateBuffer = i965_CreateBuffer;
2530 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2531 vtable->vaMapBuffer = i965_MapBuffer;
2532 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2533 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2534 vtable->vaBeginPicture = i965_BeginPicture;
2535 vtable->vaRenderPicture = i965_RenderPicture;
2536 vtable->vaEndPicture = i965_EndPicture;
2537 vtable->vaSyncSurface = i965_SyncSurface;
2538 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2539 vtable->vaPutSurface = i965_PutSurface;
2540 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2541 vtable->vaCreateImage = i965_CreateImage;
2542 vtable->vaDeriveImage = i965_DeriveImage;
2543 vtable->vaDestroyImage = i965_DestroyImage;
2544 vtable->vaSetImagePalette = i965_SetImagePalette;
2545 vtable->vaGetImage = i965_GetImage;
2546 vtable->vaPutImage = i965_PutImage;
2547 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2548 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2549 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2550 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2551 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2552 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2553 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2554 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2555 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2556 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2557 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2558 vtable->vaBufferInfo = i965_BufferInfo;
2559 vtable->vaLockSurface = i965_LockSurface;
2560 vtable->vaUnlockSurface = i965_UnlockSurface;
2561 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2563 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2565 ctx->pDriverData = (void *)i965;
2567 result = object_heap_init(&i965->config_heap,
2568 sizeof(struct object_config),
2570 assert(result == 0);
2572 result = object_heap_init(&i965->context_heap,
2573 sizeof(struct object_context),
2575 assert(result == 0);
2577 result = object_heap_init(&i965->surface_heap,
2578 sizeof(struct object_surface),
2580 assert(result == 0);
2582 result = object_heap_init(&i965->buffer_heap,
2583 sizeof(struct object_buffer),
2585 assert(result == 0);
2587 result = object_heap_init(&i965->image_heap,
2588 sizeof(struct object_image),
2590 assert(result == 0);
2592 result = object_heap_init(&i965->subpic_heap,
2593 sizeof(struct object_subpic),
2595 assert(result == 0);
2597 return i965_Init(ctx);